diff --git a/.circleci/config.yml b/.circleci/config.yml index 1560a9b8b9..5eb5b2953d 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -5,78 +5,30 @@ jobs: # Run Python 3 tests working_directory: /python3_test docker: - - image: continuumio/miniconda + - image: continuumio/miniconda3 steps: - checkout - run: command: | - apt-get update -y && apt-get install -y build-essential # Create a file to checksum as cache key date --rfc-3339 date > cache_key.txt cat environment.yml >> cache_key.txt - restore_cache: key: deps3-{{ .Branch }}-{{ checksum "cache_key.txt" }} - run: - # Update/Create Conda Environment + # Update/Create Conda environment and run tests command: | + . /opt/conda/etc/profile.d/conda.sh conda update -y conda - if ! test -d "/opt/conda/envs/esmvaltool"; then - conda create -y --name esmvaltool python=3 - fi - conda env update --name esmvaltool - - save_cache: - key: deps3-{{ .Branch }}-{{ checksum "cache_key.txt" }} - paths: - - "/opt/conda/envs/esmvaltool" - - run: - # Activate Conda environment and run tests - command: | - source activate esmvaltool + conda env update + conda activate esmvaltool + conda install -yS r-lintr python setup.py test - - store_test_results: - path: test-reports/ - - store_artifacts: - path: test-reports/ - - run: - # Upload Python 3 test coverage to codacy, even when the actual - # running of the tests fails. - when: always - command: | - pip install codacy-coverage - python-codacy-coverage -r test-reports/python3/coverage.xml - - python2_test: - # Run Python 2 tests - working_directory: /python2_test - docker: - - image: continuumio/miniconda - steps: - - checkout - - run: - command: | - apt-get update -y && apt-get install -y build-essential - # Create a file to checksum as cache key - date --rfc-3339 date > cache_key.txt - cat environment.yml >> cache_key.txt - - restore_cache: - key: deps2-{{ .Branch }}-{{ checksum "cache_key.txt" }} - - run: - # Update/Create Conda Environment - command: | - conda update -y conda - if ! test -d "/opt/conda/envs/esmvaltool"; then - conda create -y --name esmvaltool python=2 - fi - conda env update --name esmvaltool - save_cache: - key: deps2-{{ .Branch }}-{{ checksum "cache_key.txt" }} + key: deps3-{{ .Branch }}-{{ checksum "cache_key.txt" }} paths: - "/opt/conda/envs/esmvaltool" - - run: - # Activate Conda environment and run tests - command: | - source activate esmvaltool - python setup.py test + - ".eggs" - store_test_results: path: test-reports/ - store_artifacts: @@ -86,93 +38,87 @@ jobs: # Test Python 3 installation working_directory: /python3_install docker: - - image: continuumio/miniconda + - image: continuumio/miniconda3 steps: - checkout + - restore_cache: + key: python3-install-{{ .Branch }} - run: command: | + . /opt/conda/etc/profile.d/conda.sh set -x mkdir /logs # Install - apt-get update > /logs/apt.txt 2>&1 - apt-get install -y build-essential >> /logs/apt.txt 2>&1 + wget https://julialang-s3.julialang.org/bin/linux/x64/1.0/julia-1.0.3-linux-x86_64.tar.gz + tar xfz julia-*-linux-x86_64.tar.gz + ln -s $(pwd)/julia-*/bin/julia /usr/bin/julia conda update -y conda > /logs/conda.txt 2>&1 - conda create -y --name esmvaltool python=3 > /logs/conda.txt 2>&1 - conda env update --name esmvaltool >> /logs/conda.txt 2>&1 - set +x - source activate esmvaltool - set -x + conda env update >> /logs/conda.txt 2>&1 + set +x; conda activate esmvaltool; set -x pip install . > /logs/install.txt 2>&1 + Rscript esmvaltool/install/R/setup.R > /logs/R_install.txt 2>&1 + julia esmvaltool/install/Julia/setup.jl > /logs/julia_install.txt 2>&1 # Log versions dpkg -l > /logs/versions.txt conda env export > /logs/environment.yml pip freeze > /logs/requirements.txt # Test installation + python setup.py test --installation esmvaltool -h ncl -V + # cdo test, check that it supports hdf5 + cdo --version + echo 0 | cdo -f nc input,r1x1 tmp.nc + ncdump tmp.nc | ncgen -k hdf5 -o tmp.nc + cdo -f nc copy tmp.nc tmp2.nc + - save_cache: + key: python3-install-{{ .Branch }} + paths: + - "/opt/conda/pkgs" + - ".eggs" - store_artifacts: path: /logs - - python2_install: - # Test Python 2 installation - working_directory: /python2_install - docker: - - image: continuumio/miniconda - steps: - - checkout + - store_artifacts: + path: test-reports/ + - store_test_results: + path: test-reports/ - run: + when: always command: | - set -x - mkdir /logs - # Install - apt-get update > /logs/apt.txt 2>&1 - apt-get install -y build-essential >> /logs/apt.txt 2>&1 - conda update -y conda > /logs/conda.txt 2>&1 - conda create -y --name esmvaltool python=2 > /logs/conda.txt 2>&1 - conda env update --name esmvaltool >> /logs/conda.txt 2>&1 - set +x - source activate esmvaltool - set -x - pip install . > /logs/install.txt 2>&1 - # Log versions - dpkg -l > /logs/versions.txt - conda env export > /logs/environment.yml - pip freeze > /logs/requirements.txt - # Test installation - esmvaltool -h - ncl -V - - store_artifacts: - path: /logs + pip install codacy-coverage + python-codacy-coverage -r test-reports/python3/coverage.xml develop: # Test development installation working_directory: /develop docker: - - image: continuumio/miniconda + - image: continuumio/miniconda3 steps: - checkout - run: command: | + . /opt/conda/etc/profile.d/conda.sh set -x mkdir /logs # Install - apt-get update > /logs/apt.txt 2>&1 - apt-get install -y build-essential >> /logs/apt.txt 2>&1 + wget https://julialang-s3.julialang.org/bin/linux/x64/1.0/julia-1.0.3-linux-x86_64.tar.gz + tar xfz julia-*-linux-x86_64.tar.gz + ln -s $(pwd)/julia-*/bin/julia /usr/bin/julia conda update -y conda > /logs/conda.txt 2>&1 - conda create -y --name esmvaltool python=3 > /logs/conda.txt 2>&1 - conda env update --name esmvaltool >> /logs/conda.txt 2>&1 - set +x - source activate esmvaltool - set -x + conda env update >> /logs/conda.txt 2>&1 + set +x; conda activate esmvaltool; set -x pip install -e .[develop] > /logs/install.txt 2>&1 + Rscript esmvaltool/install/R/setup.R > /logs/R_install.txt 2>&1 + julia esmvaltool/install/Julia/setup.jl > /logs/julia_install.txt 2>&1 # Log versions dpkg -l > /logs/versions.txt conda env export > /logs/environment.yml pip freeze > /logs/requirements.txt # Test installation esmvaltool -h - python setup.py test + python setup.py test --installation ncl -V + cdo --version - store_artifacts: path: /logs @@ -180,23 +126,18 @@ jobs: # Test building documentation working_directory: /doc docker: - - image: continuumio/miniconda + - image: continuumio/miniconda3 steps: - checkout - run: command: | + . /opt/conda/etc/profile.d/conda.sh set -x mkdir /logs # Install - apt-get update > /logs/apt.txt 2>&1 - apt-get install -y build-essential >> /logs/apt.txt 2>&1 - # TODO: change to Python 3 once issue #218 is fixed. conda update -y conda > /logs/conda.txt 2>&1 - conda create -y --name esmvaltool python=2 > /logs/conda.txt 2>&1 - conda env update --name esmvaltool >> /logs/conda.txt 2>&1 - set +x - source activate esmvaltool - set -x + conda env update >> /logs/conda.txt 2>&1 + set +x; conda activate esmvaltool; set -x pip install -e .[develop] > /logs/install.txt 2>&1 # Log versions dpkg -l > /logs/versions.txt @@ -211,37 +152,31 @@ jobs: # Test conda build working_directory: /esmvaltool docker: - - image: continuumio/miniconda + - image: continuumio/miniconda3 steps: - checkout - run: command: | + . /opt/conda/etc/profile.d/conda.sh set -x # Install prerequisites mkdir /logs - apt-get update > /logs/apt.txt 2>&1 - apt-get install -y build-essential >> /logs/apt.txt 2>&1 + wget https://julialang-s3.julialang.org/bin/linux/x64/1.0/julia-1.0.3-linux-x86_64.tar.gz + tar xfz julia-*-linux-x86_64.tar.gz + ln -s $(pwd)/julia-*/bin/julia /usr/bin/julia conda update -y conda > /logs/conda_base.txt 2>&1 - conda install -y conda-build >> /logs/conda_base.txt 2>&1 + conda install -y conda-build conda-verify >> /logs/conda_base.txt 2>&1 # Log versions dpkg -l > /logs/versions.txt conda env export -n base > /logs/build_environment.yml # Build conda package conda build . -c conda-forge -c birdhouse > /logs/build_log.txt # Install Python 3 conda package - conda create -y --name esmvaltool3 python=3 > /logs/conda_esmvaltool3.txt 2>&1 - set +x; source activate esmvaltool3; set -x + conda create -y --name esmvaltool3 > /logs/conda_esmvaltool3.txt 2>&1 + set +x; conda activate esmvaltool3; set -x conda install -y esmvaltool --use-local -c conda-forge -c birdhouse conda env export > /logs/test_environment3.yml esmvaltool -h - set +x; source deactivate; set -x - # Install Python 2 conda package - conda create -y --name esmvaltool2 python=2 > /logs/conda_esmvaltool2.txt 2>&1 - set +x; source activate esmvaltool2; set -x - conda install -y esmvaltool --use-local -c conda-forge -c birdhouse - conda env export > /logs/environment2.yml - esmvaltool -h - set +x; source deactivate; set -x - store_artifacts: path: /logs @@ -249,19 +184,21 @@ jobs: # Test conda package installation working_directory: /esmvaltool docker: - - image: continuumio/miniconda + - image: continuumio/miniconda3 steps: - run: command: | + . /opt/conda/etc/profile.d/conda.sh set -x # Install prerequisites mkdir /logs - apt-get update > /logs/apt.txt 2>&1 - apt-get install -y build-essential >> /logs/apt.txt 2>&1 + wget https://julialang-s3.julialang.org/bin/linux/x64/1.0/julia-1.0.3-linux-x86_64.tar.gz + tar xfz julia-*-linux-x86_64.tar.gz + ln -s $(pwd)/julia-*/bin/julia /usr/bin/julia conda update -y conda > /logs/conda.txt 2>&1 # Create and activate conda environment - conda create -y --name esmvaltool python=3 - set +x; source activate esmvaltool; set -x + conda create -y --name esmvaltool + set +x; conda activate esmvaltool; set -x # Install conda install -y esmvaltool -c esmvalgroup -c conda-forge -c birdhouse # Log versions @@ -269,32 +206,31 @@ jobs: # Test installation esmvaltool -h ncl -V + cdo --version - ncl: - # Test ncl conda package + ncl_cdo_test: + # Test ncl and cdo conda packages working_directory: /ncl docker: - - image: continuumio/miniconda + - image: continuumio/miniconda3 steps: - checkout - run: command: | + . /opt/conda/etc/profile.d/conda.sh set -x mkdir /logs # Install - apt-get update > /logs/apt.txt 2>&1 - apt-get install -y build-essential >> /logs/apt.txt 2>&1 conda update -y conda > /logs/conda.txt 2>&1 conda create -y --name ncl > /logs/conda.txt 2>&1 - set +x - source activate ncl - set -x - conda install -y --channel conda-forge ncl >> /logs/conda.txt 2>&1 + set +x; conda activate ncl; set -x + conda install -y --channel conda-forge ncl cdo >> /logs/conda.txt 2>&1 # Log versions dpkg -l > /logs/versions.txt conda env export > /logs/environment.yml # Test if NCL installed successfully ncl -V + cdo --version - store_artifacts: path: /logs @@ -303,7 +239,7 @@ workflows: commit: jobs: - python3_test - - python2_test + - python3_install nightly: triggers: - schedule: @@ -315,11 +251,9 @@ workflows: - version2_master jobs: - python3_test - - python2_test - python3_install - - python2_install - develop - doc - conda_build - conda_install - - ncl + - ncl_cdo_test diff --git a/.codacy.yml b/.codacy.yml index 3dc3bb8c6b..06a0ea342f 100644 --- a/.codacy.yml +++ b/.codacy.yml @@ -20,17 +20,6 @@ engines: exclude_paths: [ 'doc/sphinx/**', - 'esmvaltool/doc/sphinx/**', - # cmor tables - 'esmvaltool/interface_scripts/cmip*-cmor-tables/**', - # old stuff - 'backend/**', - 'diag_scripts/**', - 'interface_data/**', - 'interface_scripts/**', - 'main.py', - 'nml/**', - 'plot_scripts/**', - 'reformat_scripts/**', - 'variable_defs/**', + 'esmvaltool/cmor/tables/**', + 'tests/**' ] diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 0000000000..68623817fa --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,15 @@ +--- +name: Bug report +about: Create a report to help us improve +title: '' +labels: '' +assignees: '' + +--- + +**Describe the bug** +A clear and concise description of what the bug is. If you are developing a new diagnostic script, please provide a link to the code/branch on GitHub that you are working in. + +**Please attach** + - The recipe that you are trying to run, you can find a copy in the `run` directory in the output directory + - The `main_log_debug.txt` file, this can also be found in the `run` directory in the output directory diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 0000000000..8504841b10 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,14 @@ +--- +name: Feature request +about: Suggest an idea for this project +title: '' +labels: enhancement +assignees: '' + +--- + +**Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. + +**Would you be able to help out?** +Would you have the time and skills to implement the solution yourself? diff --git a/.github/ISSUE_TEMPLATE/new-diagnostic.md b/.github/ISSUE_TEMPLATE/new-diagnostic.md new file mode 100644 index 0000000000..03f9e4e0bf --- /dev/null +++ b/.github/ISSUE_TEMPLATE/new-diagnostic.md @@ -0,0 +1,14 @@ +--- +name: New diagnostic +about: Develop a new diagnostic. +title: '' +labels: diagnostic +assignees: '' + +--- + +**Short description of the diagnostic** +Add a short description of the diagnostic that you would like to add. + +**Branch and pull request** +Once you've started working, add the branch (and pull request) diff --git a/.gitignore b/.gitignore index 484a57a9ee..c0f1f23055 100644 --- a/.gitignore +++ b/.gitignore @@ -94,5 +94,9 @@ doc/sphinx/build *.grib *.RData *.Rdata +*.Rhistory *.rdata *.pkl + +# ESMF log files +*.ESMF_LogFile diff --git a/.gitmodules b/.gitmodules deleted file mode 100644 index ea8891a9b4..0000000000 --- a/.gitmodules +++ /dev/null @@ -1,6 +0,0 @@ -[submodule "esmvaltool/cmor/tables/cmip5"] - path = esmvaltool/cmor/tables/cmip5 - url = https://github.com/PCMDI/cmip5-cmor-tables.git -[submodule "esmvaltool/cmor/tables/cmip6"] - path = esmvaltool/cmor/tables/cmip6 - url = https://github.com/PCMDI/cmip6-cmor-tables.git diff --git a/.prospector.yml b/.prospector.yml index 58d4946d00..15fc8b71e9 100644 --- a/.prospector.yml +++ b/.prospector.yml @@ -16,13 +16,7 @@ pep8: full: true pep257: - # see http://pep257.readthedocs.io/en/latest/error_codes.html - disable: [ - # For short descriptions it makes sense not to end with a period: - D400, # First line should end with a period - # Disable because not part of PEP257 official convention: - D203, # 1 blank line required before class docstring - D212, # Multi-line docstring summary should start at the first line - D213, # Multi-line docstring summary should start at the second line - D404, # First word of the docstring should not be This - ] + # disable rules that are allowed by the numpy convention + # see https://github.com/PyCQA/pydocstyle/blob/master/src/pydocstyle/violations.py + # and http://pydocstyle.readthedocs.io/en/latest/error_codes.html + disable: ['D107', 'D203', 'D212', 'D213', 'D402', 'D413'] diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 0000000000..170e9131c0 --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,76 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, sex characteristics, gender identity and expression, +level of experience, education, socio-economic status, nationality, personal +appearance, race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting the project team at veronika.eyring@dlr.de. All +complaints will be reviewed and investigated and will result in a response that +is deemed necessary and appropriate to the circumstances. The project team is +obligated to maintain confidentiality with regard to the reporter of an incident. +Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, +available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html + +[homepage]: https://www.contributor-covenant.org + +For answers to common questions about this code of conduct, see +https://www.contributor-covenant.org/faq diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000000..2d90788e6d --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,93 @@ +# Contributions are very welcome + +If you would like to contribute a new diagnostic and recipe or a new feature, please discuss your idea with the development team before getting started, to avoid double work and/or disappointment later. A good way to do this is to open an [issue on GitHub](https://github.com/ESMValGroup/ESMValTool/issues). This is also a good way to get help. + +If you have a bug to report, please do so using the [issues tab on the ESMValTool github repository](https://github.com/ESMValGroup/ESMValTool/issues). + +To get started developing, follow the instructions below. More detailed instructions can be found in the [manual](https://esmvaltool.readthedocs.io) under Developer's Guide. + +## Getting started +To install in development mode, follow these instructions. + - [Download and install conda](https://conda.io/projects/conda/en/latest/user-guide/install/linux.html) (this should be done even if the system in use already has a preinstalled version of conda, as problems have been reported with NCL when using such a version) + - To make the `conda` command availble, add `source /etc/profile.d/conda.sh` to your `.bashrc` file and restart your shell. If using (t)csh shell, add `source /etc/profile.d/conda.csh` to your `.cshrc`/`.tcshrc` file instead. + - Update conda: `conda update -y conda` + - Clone the ESMValTool public github repository: `git clone git@github.com:ESMValGroup/ESMValTool.git`, or one of the private github repositories (e.g. `git clone git@github.com:ESMValGroup/ESMValTool-private.git`) + - Go to the esmvaltool directory: `cd ESMValTool` + - Create the esmvaltool conda environment `conda env create --name esmvaltool --file environment.yml` + - Activate the esmvaltool environment: `conda activate esmvaltool` + - Install in development mode: `pip install -e '.[develop]'`. If you are installing behind a proxy that does not trust the usual pip-urls you can declare them with the option `--trusted-host`, e.g. `pip install --trusted-host=pypi.python.org --trusted-host=pypi.org --trusted-host=files.pythonhosted.org -e .[develop]` + - If you want to use R diagnostics, run `Rscript esmvaltool/install/R/setup.R` to install the R dependences. + - If you want to use Julia diagnostics, run `julia esmvaltool/install/Julia/setup.jl` to install the Julia dependences. + - Test that your installation was succesful by running `esmvaltool -h`. + - If you log into a cluster or other device via `ssh` and your origin machine sends the `locale` environment via the `ssh` connection, make sure the environment is set correctly, specifically `LANG` and `LC_ALL` are set correctly (for GB English UTF-8 encoding these variables must be set to `en_GB.UTF-8`; you can set them by adding `export LANG=en_GB.UTF-8` and `export LC_ALL=en_GB.UTF-8` in your origin or login machines' `.profile`) + +## Running tests +Go to the directory where the repository is cloned and run `python setup.py test --installation`. Tests will also be run automatically by [CircleCI](https://circleci.com/gh/ESMValGroup/ESMValTool). + +## Code style +To increase the readability and maintainability or the ESMValTool source code, we aim to adhere to best practices and coding standards. All pull requests are reviewed and tested by one or more members of the core development team. For code in all languages, it is highly recommended that you split your code up in functions that are short enough to view without scrolling. + +### Python +The standard document on best practices for Python code is [PEP8](https://www.python.org/dev/peps/pep-0008/) and there is [PEP257](https://www.python.org/dev/peps/pep-0257/) for documentation. We make use of [numpy style docstrings](https://sphinxcontrib-napoleon.readthedocs.io/en/latest/example_numpy.html) to document Python functions that are visible on [readthedocs](https://esmvaltool.readthedocs.io). + +Most formatting issues in Python code can be fixed automatically by running the commands +``` +isort some_file.py +``` +to sort the imports in the standard way and +``` +yapf -i some_file.py +``` +to add/remove whitespace as required by the standard. + +To check if your code adheres to the standard, go to the directory where the repository is cloned, e.g. `cd ESMValTool`. +and run +``` +prospector esmvaltool/diag_scripts/your_diagnostic/your_script.py +``` +Run +``` +python setup.py lint +``` +to see the warnings about the code style of the entire project. + +We use `pycodestyle` on CircleCI to automatically check that there are no formatting mistakes and Codacy for monitoring (Python) code quality. Running prospector locally will give you quicker and sometimes more accurate results. + +### NCL +Because there is no standard best practices document for NCL, we use [PEP8](https://www.python.org/dev/peps/pep-0008/) for NCL code as well, with some minor adjustments to accomodate for differences in the languages. The most important difference is that for NCL code the indentation should be 2 spaces instead of 4. + +### R +A document on best practices for R is [Hadley Wickham's R Style Guide](http://r-pkgs.had.co.nz/style.html). We partially check adherence to this style guide by using [lintr](https://cran.r-project.org/web/packages/lintr/index.html) on CircleCI. In the future we would also like to make use of [goodpractice](https://cran.r-project.org/web/packages/goodpractice/index.html) to assess the quality of R code. + +### YAML +Please use `yamllint` to check that your YAML files do not contain mistakes. + +## Documentation + +### What should be documented + +Any code documentation that is visible on [readthedocs](https://esmvaltool.readthedocs.io) should be well written and adhere to the standards for documentation for the respective language. Recipes should have a page in the *Recipes* section on readthedocs. This is also the place to document recipe options for the diagnostic scripts used in those recipes. Note that there is no need to write extensive documentation for functions that are not visible on readthedocs. However, adding a one line docstring describing what a function does is always a good idea. + +### How to build the documentation locally +Go to the directory where the repository is cloned and run +``` +python setup.py build_sphinx -Ea +``` +Make sure that your newly added documentation builds without warnings or errors. + +## Pull requests and code review +New development should preferably be done in a new git branch in the main ESMValTool github repository. However, for scientists requiring confidentiality, private repositories are available. It is recommended that you open a pull request early, as this will cause CircleCI to run the unit tests and Codacy to analyse your code. It's also easier to get help from other developers if your code is visible in a pull request. + +You can view the results of the automatic checks below your pull request. If one of the tests shows a red cross instead of a green approval sign, please click the link and try to solve the issue. Note that this kind of automated checks make it easier to review code, but they are not flawless, so occasionally Codacy will report false positives. + +### Diagnostic script contributions +A pull request with diagnostic code should preferably not introduce new Codacy issues. However, we understand that there is a limit to how much time can be spend on polishing code, so up to 10 new (non-trivial) issues is still an acceptable amount. + +Never make changes to the esmvaltool core, e.g. a new preprocessor function, in diagnostic script pull requests. If you need to make this kind of change, create a separate pull request for it in the public repository. + +### Contributing to the core of ESMValTool +Contributions to the core of ESMValTool should + - Go into the public repository. + - Preferably be covered by unit tests. Unit tests are mandatory for new preprocessor functions or modifications to existing functions. If you do not know how to start with writing unit tests, let us know in a comment on the pull request and a core development team member will try to help you get started. + - Be accompanied by appropriate documentation. + - Introduce no new issues on Codacy (but note that style issues reported in unit test code are not worth the effort of fixing). diff --git a/NOTICE b/NOTICE index ae97a93604..5fa313af38 100644 --- a/NOTICE +++ b/NOTICE @@ -1,100 +1,120 @@ +Earth System Model Evaluation Tool VERSION 2.0 (the Software) License +PLEASE READ THIS SOFTWARE LICENSE ("LICENSE") CAREFULLY. USE OF THE SOFTWARE CONSTITUTES ACCEPTANCE OF THIS LICENSE AND TERMS -Earth System Model Evaluation Tool VERSION 2 (the Software) License - - -PLEASE READ THIS SOFTWARE LICENSE ("LICENSE") CAREFULLY. USE - - -OF THE SOFTWARE CONSTITUTES ACCEPTANCE OF THIS LICENSE AND TERMS - - -DATE: 26 January 2017 - +DATE: 6 February 2019 ======================================================================== - -THIS LICENSE APPLIES TO THE ESMValTool Version 2 VERSION THAT IS RELEASED AS -OPEN-SOURCE SOFTWARE UNDER the Apache License, Version 2.0 +THIS LICENSE APPLIES TO THE ESMValTool VERSION 1.0 AND VERSION 2.0 THAT IS RELEASED AS OPEN-SOURCE SOFTWARE UNDER THE APACHE LICENSE ======================================================================== -Copyright 2008- Deutsches Zentrum für Luft- und -Raumfahrt e.V. (DLR) and partners (see below). All rights reserved. +Copyright 2008- Deutsches Zentrum für Luft- und Raumfahrt e.V. (DLR) and partners (see below). All rights reserved. ========================================== +(1) ESMValTool VERSION 2.0 CORE DEVELOPMENT TEAM +========================================== +Copyright 2008- Deutsches Zentrum für Luft- und Raumfahrt e.V. (DLR), Germany - ESMValTool Principal Investigator (PI) +Copyright 2017- Alfred-Wegener-Institute (AWI), Germany +Copyright 2017- Barcelona Supercomputing Center (BSC), Spain +Copyright 2016- Ludwig Maximilian University, Germany +Copyright 2017- Netherlands e-Science Center (NLeSC), Netherlands +Copyright 2019- Plymouth Marine Laboratory, UK +Copyright 2017- University of Reading, UK -Copyright 2008- Deutsches Zentrum für Luft- -und Raumfahrt e.V. (DLR), ESMValTool Project PI “Entire ESMValTool” - -Copyright 2011- Swedish Meteorological and Hydrological Institute (SMHI) “Entire -ESMValTool” - -British Crown Copyright 2015 “recipe_SAMonsoon.yml, recipe_SAMonsoon_AMIP.yml, -recipe_SAMonsoon_daily.yml, SPHINX” - -Copyright 2014-2015 University of Exeter “Carbon Cycle “recipe_anav13jclim.yml” - -Copyright 2014-2015 Tyndall Centre for Climate Change Research, School of Environmental -Sciences, University of East Anglia, Norwich “recipe_GlobalOcean.yml” - -Copyright 2015 Agenzia nazionale per le nuove tecnologie, l’energia e lo sviluppo economico sostenibile (ENEA) “Ozone (recipe_eyring13jgr.yml; recipe_eyring06jgr.yml) - -Copyright 2014-2015 ETH Zurich, Switzerland “recipe_Evapotransport.yml, recipe_SPI.yml” +========================================== +(2) ESMValTool VERSION 2.0 PREPROCESSOR +========================================== +COPYRIGHT 2008- ALL CORE DEVELOPERS (see under (1)) -Copyright 2015 University Corporation for Atmospheric Research (UCAR) “Climate Variability Diagnostic Package (recipe_CVDP.yml)” +========================================== +(3) ESMValTool VERSION 2.0 RECIPES AND DIAGNOSTICS +========================================== +COPYRIGHT 2008- ALL CORE DEVELOPERS (see under (1)) + +recipe_autoassess_landsurface_permafrost.yml +recipe_autoassess_landsurface_snow.yml +recipe_autoassess_landsurface_soilmoisture.yml +recipe_autoassess_landsurface_surfrad.yml +recipe_autoassess_radiation_rms_Amon_all.yml +recipe_autoassess_radiation_rms_Amon_obs.yml +recipe_autoassess_radiation_rms_cfMon_all.yml +recipe_autoassess_stratosphere.yml +Copyright 2017- University of Reading, UK +Copyright 2017- MetOffice, UK + +recipe_my_personal_diagnostic.yml +recipe_validation.yml +Copyright 2017- University of Reading, UK + +recipe_clouds_bias.yml +recipe_clouds_ipcc.yml +recipe_flato13ipcc.yml +recipe_lauer13jclim.yml +recipe_perfmetrics_CMIP5.yml +recipe_williams09climdyn_CREM.yml +Copyright 2008- Deutsches Zentrum für Luft- und Raumfahrt e.V. (DLR), Germany + +recipe_cvdp.yml +Copyright 2010- University Corporation for Atmospheric Research (UCAR), USA + +recipe_ocean_amoc.yml +recipe_ocean_bgc.yml +recipe_ocean_example.yml +recipe_ocean_ice_extent.yml +recipe_ocean_quadmap.yml +recipe_ocean_scalar_fields.yml +Copyright 2019- Plymouth Marine Laboratory, Plymouth, UK +Copyright 2018- Swedish Meteorological and Hydrological Institute (SMHI), Sweden + +recipe_runoff_et.yml +Copyright 2015 Max-Planck-Institute for Meteorology (MPI), Germany +Copyright 2016- Ludwig Maximilian University of Munich, Germany +========================================== +ESMValTool VERSION 1.0 COPYRIGHTS +========================================== +Copyright 2008- Deutsches Zentrum für Luft- und Raumfahrt e.V. (DLR), ESMValTool Project PI “Entire ESMValTool” +Copyright 2011- Swedish Meteorological and Hydrological Institute (SMHI) “Technical ESMValTool development” +British Crown Copyright 2015 “namelist_SAMonsoon.xml, namelist_SAMonsoon_AMIP.xml, namelist_SAMonsoon_daily.xml, SPHINX” +Copyright 2014-2015 University of Exeter “Carbon Cycle “namelist_anav13jclim.xml” +Copyright 2014-2015 Tyndall Centre for Climate Change Research, School of Environmental Sciences, University of East Anglia, Norwich “namelist_GlobalOcean.xml” +Copyright 2015 Agenzia nazionale per le nuove tecnologie, l’energia e lo sviluppo economico sostenibile (ENEA) “Ozone (namelist_eyring13jgr.xml; namelist_eyring06jgr.xml) +Copyright 2014-2015 ETH Zurich, Switzerland “namelist_Evapotransport.xml, namelist_SPI.xml” +Copyright 2015 University Corporation for Atmospheric Research (UCAR) “Climate Variability Diagnostic Package (namelist_CVDP.xml)” Copyright 2015 Deutsches Klimarechenzentrum (DKRZ) “ESGF coupling“ - -Copyright 2015 Max-Planck-Institute for Meteorology (MPI) “recipe_runoff_et.yml” - +Copyright 2015 Max-Planck-Institute for Meteorology (MPI) “namelist_runoff_et.xml” Copyright 2015 National Centre for Atmospheric Science, British Atmospheric Data Centre, STFC Rutherford Appleton Laboratory “ESGF coupling” - -Copyright 2015-2016 Ludwig Maximilians University Munich (LMU) “Automated Testing”, contributions to “recipe_lauer17rse.yml” - -Copyright 2015 Finnish Meteorological Institute (FMI) “recipe_TropicalVariability.yml, recipe_SouthernHemisphere.yml” - -Copyright 2015 Institut Pierre Simon Laplace, “recipe_diurnalcycle.yml” - +Copyright 2015 Ludwig Maximilians University Munich (LMU) “Automated Testing” +Copyright 2015 Finnish Meteorological Institute (FMI) “namelist_TropicalVariability.xml, namelist_SouthernHemisphere.xml” +Copyright 2015 Institut Pierre Simon Laplace, “namelist_diurnalcycle.xml” Copyright 2015 University of Reading “ESGF coupling” - -Copyright 2015 CNRM-GAME, Météo France and CNRS, “recipe_WAMonsoon.yml” - -Copyright 2014-2015 Royal Netherlands Meteorological Institute (KNMI) “recipe_SouthernOcean.yml” - -Copyright 2008- 2012 UCAR “Chemistry-Climate Model Validation (CCMVal) Diagnostic Tool” +Copyright 2015 CNRM-GAME, Météo France and CNRS, “namelist_WAMonsoon.xml” +Copyright 2014-2015 Royal Netherlands Meteorological Institute (KNMI) “namelist_SouthernOcean.xml” ========================================== Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at - -    http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. - + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ========================================== - Users who apply the Software resulting in presentations or papers are kindly asked to cite the following “Software Documentation Paper” alongside with the Software doi (doi:10.17874/ac8548f0315) and version number: - - -Eyring et al., ESMValTool (v1.0) – a community diagnostic and performance metrics tool for routine evaluation of Earth system models in CMIP, Geosci. Model Dev., 9, 1747-1802, doi:10.5194/gmd-9-1747-2016, 2016. +Eyring et al., ESMValTool (v1.0) – a community diagnostic and performance metrics tool for routine evaluation of Earth System Models in CMIP, Geosci. Model Dev., 2016. Besides the above citation, users are kindly asked to register any journal articles (or other scientific documents) that use the Software at ESMValTool webpage (see http://www.esmvaltool.org/). - Citing the Software Documentation Paper and registering your paper(s) will serve to document the scientific impact of the Software, which is of vital importance for securing future funding. You should consider this an obligation if you have taken advantage of the Software, which represents the end product of considerable effort by the development team. ========================================== -In addition to using the Software, we encourage the community to join the Software Development Team and to contribute additional diagnostics and performance metrics or other software improvements. Contributing back the new diagnostics and performance metrics or -other software improvements will help to enhance the capability of the Software, which is of vital importance for securing future funding. You should consider this an obligation if you have taken advantage of the Software, which represents a product of considerable effort by the development team. +In addition to using the Software, we encourage the community to join the Software Development Team and to contribute additional diagnostics and performance metrics or other software improvements. Contributing back the new diagnostics and performance metrics or other software improvements will help to enhance the capability of the Software, which is of vital importance for securing future funding. You should consider this an obligation if you have taken advantage of the Software, which represents a product of considerable effort by the development team. ========================================== -Please contact Veronika Eyring (veronika.eyring"at"dlr.de) and Axel Lauer (axel.lauer"at"dlr.de) if you want, in addition to its use, join the Software Development Team or if you have any questions on the Software. +To join the ESMValTool Development Team, please contact Prof. Veronika Eyring (veronika.eyring@dlr.de) and Dr. Axel Lauer (axel.lauer@dlr.de). ========================================== + diff --git a/README.md b/README.md index 1266d14d35..c37fd0e6ad 100644 --- a/README.md +++ b/README.md @@ -1,41 +1,52 @@ # ESMValTool +[![Documentation Status](https://readthedocs.org/projects/esmvaltool/badge/?version=version2_development)](https://esmvaltool.readthedocs.io/en/version2_development/?badge=version2_development) [![DOIBadge](https://img.shields.io/badge/DOI-10.17874%2Fac8548f0315-blue.svg)](https://doi.org/10.17874/ac8548f0315) -[![Codacy Badge](https://api.codacy.com/project/badge/Grade/79bf6932c2e844eea15d0fb1ed7e415c)](https://www.codacy.com/app/ESMValGroup/ESMValTool?utm_source=github.com&utm_medium=referral&utm_content=ESMValGroup/ESMValTool&utm_campaign=Badge_Grade) -[![Codacy Coverage Badge](https://api.codacy.com/project/badge/Coverage/79bf6932c2e844eea15d0fb1ed7e415c)](https://www.codacy.com/app/ESMValGroup/ESMValTool?utm_source=github.com&utm_medium=referral&utm_content=ESMValGroup/ESMValTool&utm_campaign=Badge_Coverage) [![Gitter](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/ESMValGroup?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) [![CircleCI](https://circleci.com/gh/ESMValGroup/ESMValTool.svg?style=svg)](https://circleci.com/gh/ESMValGroup/ESMValTool) +[![Codacy Coverage Badge](https://api.codacy.com/project/badge/Coverage/79bf6932c2e844eea15d0fb1ed7e415c)](https://www.codacy.com/app/ESMValGroup/ESMValTool?utm_source=github.com&utm_medium=referral&utm_content=ESMValGroup/ESMValTool&utm_campaign=Badge_Coverage) +[![Codacy Badge](https://api.codacy.com/project/badge/Grade/79bf6932c2e844eea15d0fb1ed7e415c)](https://www.codacy.com/app/ESMValGroup/ESMValTool?utm_source=github.com&utm_medium=referral&utm_content=ESMValGroup/ESMValTool&utm_campaign=Badge_Grade) [![Docker Build Status](https://img.shields.io/docker/build/esmvalgroup/esmvaltool.svg)](https://hub.docker.com/r/esmvalgroup/esmvaltool/) +[![Anaconda-Server Badge](https://anaconda.org/esmvalgroup/esmvaltool/badges/installer/conda.svg)](https://conda.anaconda.org/esmvalgroup) + ESMValTool: A community diagnostic and performance metrics tool for routine evaluation of Earth system models in CMIP -## Developing -This is the development branch for version 2 of ESMValTool. To get started developing esmvaltool or developing/porting diagnostics, follow the instructions below. More detailed instructions can be found in the [manual](http://esmvaltool.readthedocs.io/en/refactoring_backend/). - -### Getting started -To install in development mode, follow these instructions. -- [Download and install conda](https://conda.io/docs/user-guide/install/linux.html) (this should be done even if the system in use already has a preinstalled version of conda, as problems have been reported with NCL when using such a version) -- If using (t)csh shell, do not prepend the installation path (``) to the environment variable PATH (as recommended by the installation procedure), but add `source /etc/profile.d/conda.csh` to the `.cshrc`/`.tcshrc` file instead -- Update conda: `conda update -y conda` -- Create a conda environment: `conda create -y -n esmvaltool python=3` -- Activate the esmvaltool environment: `source activate esmvaltool` (or `conda activate esmvaltool` if using (t)csh shell) -- Clone the ESMValTool github repository: `git clone git@github.com/ESMValGroup/ESMValTool` -- Go to the esmvaltool directory: `cd ESMValTool` -- Check out the version 2 development branch: `git checkout version2_development` -- Update the esmvaltool conda environment `conda env update` -- Install in development mode: `pip install -e .[develop]` (or `pip install -e '.[develop]'` if using (t)csh shell) -- Test that your installation was succesful by running `esmvaltool -h`. +# Getting started +This is the development branch for version 2 of ESMValTool. ESMValTool version 2 is under rapid development, an installation from source is recommended at the moment. + +## Installing from source [recommended] +Please see [CONTRIBUTING.md](https://github.com/ESMValGroup/ESMValTool/blob/version2_development/CONTRIBUTING.md) for instructions on installing ESMValTool from source. + +## Installing from Anaconda +The Anaconda package can be found on [ESMValGroup Anaconda Channel.](https://anaconda.org/ESMValGroup) + +First install gcc, g++ and gfortran if these are not available on your system. On Debian based systems, this can be done by +running `apt install build-essential gfortran`. In order to run +Julia diagnostics, you will also need to [install Julia](https://julialang.org/downloads/) version 1 or greater. + +If you already installed Anaconda, you can install ESMValTool by running: +``` +conda install -c esmvalgroup esmvaltool -c conda-forge -c birdhouse +``` + +## Using Docker +ESMValTool Docker images are automatically generated by Docker Hub. Run the command below to pull the most recent ESMValTool image. +``` +docker pull esmvalgroup/esmvaltool +``` +Example use: +``` +[sudo] docker run -ti esmvalgroup/esmvaltool +``` + +## Running ESMValTool - Review `config-user.yml`. To customize for your system, create a copy, edit and use the command line option `-c` to instruct `esmvaltool` to use your custom configuration. - Available recipes are located in the directory `esmvaltool/recipes`. +- Run e.g. `esmvaltool -c ~/config-user.yml examples/recipe_python.yml -### Running tests -Go to the directory where the repository is cloned and run `./setup.py test`. Tests will also be run automatically by CircleCI. - -### Code style -First go to the directory where the repository is cloned, e.g. `cd ESMValTool`. -- To review if your own code follows our coding standards, run `prospector esmvaltool/diag_scripts/your_diagnostic/your_script.py`. -- Run `./setup.py lint` to see the warnings about the code style of the entire project. +## Getting help +The easiest way to get help if you cannot find the answer in the documentation on [readthedocs](https://esmvaltool.readthedocs.io), is to open an [issue on GitHub](https://github.com/ESMValGroup/ESMValTool/issues). -We use Codacy for monitoring (Python) code quality. However, running prospector locally will generally give you quicker and sometimes more accurate results. Note that Codacy does not install dependencies, so getting a warning "Unable to import 'external_library'" is probably not a real issue. +## Contributing +If you would like to contribute a new diagnostic or feature, please have a look at [CONTRIBUTING.md](https://github.com/ESMValGroup/ESMValTool/blob/version2_development/CONTRIBUTING.md). -### Building documentation -Go to the directory where the repository is cloned and run `./setup.py build_sphinx` diff --git a/conda_build_config.yaml b/conda_build_config.yaml index 89ecc9277d..86d510f6d7 100644 --- a/conda_build_config.yaml +++ b/conda_build_config.yaml @@ -3,5 +3,5 @@ # Python versions python: - - 2.7 - 3.6 + - 3.7 diff --git a/doc/sphinx/source/codedoc2/esmvaltool.diag_scripts.ocean.rst b/doc/sphinx/source/codedoc2/esmvaltool.diag_scripts.ocean.rst new file mode 100644 index 0000000000..2c7d562239 --- /dev/null +++ b/doc/sphinx/source/codedoc2/esmvaltool.diag_scripts.ocean.rst @@ -0,0 +1,21 @@ +Ocean diagnostics toolkit +============================= + +Welcome to the API documentation for the ocean diagnostics tool kit. +This toolkit is built to assist in the evaluation of models of the ocean. + +This toolkit is part of ESMValTool v2. + +Author: Lee de Mora (PML) + ledm@pml.ac.uk + +.. toctree:: + +.. automodule:: esmvaltool.diag_scripts.ocean.diagnostic_maps +.. automodule:: esmvaltool.diag_scripts.ocean.diagnostic_maps_quad +.. automodule:: esmvaltool.diag_scripts.ocean.diagnostic_model_vs_obs +.. automodule:: esmvaltool.diag_scripts.ocean.diagnostic_profiles +.. automodule:: esmvaltool.diag_scripts.ocean.diagnostic_timeseries +.. automodule:: esmvaltool.diag_scripts.ocean.diagnostic_transects +.. automodule:: esmvaltool.diag_scripts.ocean.diagnostic_seaice +.. automodule:: esmvaltool.diag_scripts.ocean.diagnostic_tools diff --git a/doc/sphinx/source/codedoc2/esmvaltool.diag_scripts.rst b/doc/sphinx/source/codedoc2/esmvaltool.diag_scripts.rst index ef50494e48..70c1c0526a 100644 --- a/doc/sphinx/source/codedoc2/esmvaltool.diag_scripts.rst +++ b/doc/sphinx/source/codedoc2/esmvaltool.diag_scripts.rst @@ -1,4 +1,8 @@ Diagnostic scripts ================== +Various diagnostic packages exist as part of ESMValTool. + .. automodule:: esmvaltool.diag_scripts + +.. automodule:: esmvaltool.diag_scripts.ocean diff --git a/doc/sphinx/source/codedoc2/esmvaltool.rst b/doc/sphinx/source/codedoc2/esmvaltool.rst index a5d0a66a90..a2c392d4be 100644 --- a/doc/sphinx/source/codedoc2/esmvaltool.rst +++ b/doc/sphinx/source/codedoc2/esmvaltool.rst @@ -10,3 +10,4 @@ library. This section documents the public API of ESMValTool. esmvaltool.cmor esmvaltool.diag_scripts.shared esmvaltool.diag_scripts + esmvaltool.diag_scripts.ocean diff --git a/doc/sphinx/source/conf.py b/doc/sphinx/source/conf.py index a1f24f6ad2..5a0b88a014 100644 --- a/doc/sphinx/source/conf.py +++ b/doc/sphinx/source/conf.py @@ -15,23 +15,14 @@ import sys import os - # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. os.chdir(os.path.abspath(os.path.dirname(__file__))) -sys.path.insert(0, os.path.abspath('./../../..')) +sys.path.insert(0, os.path.abspath('./../../..')) from esmvaltool._version import __version__ -#add process_nl_docs in sphinx documentation source folder -sys.path.insert(0, os.path.abspath('.')) -import process_ncl_docs2 as process_ncl_docs - - -# add custom extensions directory to python path -#sys.path.append(os.path.join(os.path.abspath(os.path.dirname(__file__)), 'extensions')) - # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. @@ -51,7 +42,6 @@ 'sphinx.ext.viewcode', 'sphinx.ext.napoleon', ] -process_ncl_docs.create_doc_files_from_ncl() autodoc_default_flags = [ 'members', @@ -60,6 +50,19 @@ 'show-inheritance', ] +#autodoc_mock_imports = ['cf_units', 'iris', 'matplotlib', 'numpy', 'cartopy', 'cftime', 'netCDF4', 'yaml', 'PIL', 'prov', 'scipy', 'psutil', 'shapely', 'stratify', 'ESMF'] +autodoc_mock_imports = [ + 'iris', + 'cftime', + 'PIL', + 'prov', + 'scipy', + 'stratify', + 'ESMF', + 'cartopy', + 'cf_units', +] + # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] @@ -123,12 +126,11 @@ # If true, keep warnings as "system message" paragraphs in the built documents. #keep_warnings = False - # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = 'default' +html_theme = 'sphinx_rtd_theme' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the @@ -157,7 +159,7 @@ # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] +html_static_path = [] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied @@ -211,14 +213,15 @@ # -- Options for LaTeX output --------------------------------------------- latex_elements = { -# The paper size ('letterpaper' or 'a4paper'). -#'papersize': 'letterpaper', + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', -# The font size ('10pt', '11pt' or '12pt'). -#'pointsize': '10pt', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', -# Additional stuff for the LaTeX preamble. -'preamble': r''' + # Additional stuff for the LaTeX preamble. + 'preamble': + r''' \makeatletter \renewcommand{\maketitle}{ \newcommand{\MONTH}{% @@ -260,8 +263,10 @@ # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - ('index', 'ESMValTool_Users_Guide.tex', u'ESMValTool User\'s and Developer\'s Guide', - u'Veronika Eyring, Axel Lauer, Mattia Righi, Martin Evaldsson et al.', 'manual'), + ('index', 'ESMValTool_Users_Guide.tex', + u'ESMValTool User\'s and Developer\'s Guide', + u'ESMValTool Development Team', + 'manual'), ] # The name of an image file (relative to this directory) to place at the top of @@ -285,7 +290,6 @@ # If false, no module index is generated. #latex_domain_indices = True - # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples @@ -298,7 +302,6 @@ # If true, show URL addresses after external links. #man_show_urls = False - # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples @@ -322,14 +325,13 @@ # If true, do not generate a @detailmenu in the "Top" node's menu. #texinfo_no_detailmenu = False - # -- Options for Epub output ---------------------------------------------- # Bibliographic Dublin Core info. epub_title = u'ESMValTool' -epub_author = u'Veronika Eyring, Axel Lauer, Mattia Righi, Martin Evaldsson et al.' -epub_publisher = u'Veronika Eyring, Axel Lauer, Mattia Righi, Martin Evaldsson et al.' -epub_copyright = u'2015, Veronika Eyring, Axel Lauer, Mattia Righi, Martin Evaldsson et al.' +epub_author = u'ESMValTool Development Team' +epub_publisher = u'ESMValTool Development Team' +epub_copyright = u'ESMValTool Development Team' # The basename for the epub file. It defaults to the project name. #epub_basename = u'ESMValTool' @@ -394,5 +396,10 @@ numfig = True -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = {'http://docs.python.org/': None} +# Configuration for intersphinx +intersphinx_mapping = { + 'python': ('https://docs.python.org/3/', None), + 'iris': ('https://scitools.org.uk/iris/docs/latest/', None), + 'numpy': ('https://docs.scipy.org/doc/numpy/', None), + 'scipy': ('https://docs.scipy.org/doc/scipy/reference/', None), +} diff --git a/doc/sphinx/source/developer_guide2/config_developer.inc b/doc/sphinx/source/developer_guide2/config_developer.inc new file mode 100644 index 0000000000..e6a6dfd1d4 --- /dev/null +++ b/doc/sphinx/source/developer_guide2/config_developer.inc @@ -0,0 +1,100 @@ +.. _config_developer: + +******************* +Configuration files +******************* + +There are several configuration files in ESMValTool: + + - config-user.yml + - config-developer.yml + - config-references.yml + - environment.yml + + +User config file +================ + +The ``config-user.yml`` file is the only user-facing configuration file and is +described in the user guide section of this document. + + +Developer config file +===================== + +This configuration file describes the file system structure for several +key projects (CMIP5, CMIP6) on several key machines (BADC, CP4CDS, DKRZ, ETHZ, +SMHI, BSC). + +The data directory structure of the CMIP5 project is set up differently +at each site. The following code snipper is an example of several paths +descriptions for the CMIP5 at various sites: + +.. code-block:: yaml + + CMIP5: + input_dir: + default: '/' + BADC: '[institute]/[dataset]/[exp]/[frequency]/[modeling_realm]/[mip]/[ensemble]/latest/[short_name]' + CP4CDS: '[institute]/[dataset]/[exp]/[frequency]/[modeling_realm]/[mip]/[ensemble]/[short_name]/latest/' + DKRZ: '[institute]/[dataset]/[exp]/[frequency]/[modeling_realm]/[mip]/[ensemble]/[latestversion]/[short_name]' + ETHZ: '[exp]/[mip]/[short_name]/[dataset]/[ensemble]/' + SMHI: '[dataset]/[ensemble]/[exp]/[frequency]' + BSC: '[project]/[exp]/[dataset.lower]' + +As an example, the CMIP5 file path on BADC would be: + +.. code-block:: yaml + + [institute]/[dataset ]/[exp]/[frequency]/[modeling_realm]/[mip]/[ensemble]/latest/[short_name] + +When loading these files, ESMValTool replaces the placeholders with the true +values. The resulting real path would look something like this: + +.. code-block:: yaml + + MOHC/HadGEM2-CC/rcp85/mon/ocean/Omon/r1i1p1/latest/tos + + +References config file +================================ + +The ``config-references.yml`` file is the full list of ESMValTool authors, +references and projects. Each author, project and reference in the documentation +section of a recipe needs to be in this file in the relevant section. + +For instance, the recipe ``recipe_ocean_example.yml`` file contains the following +documentation section: + +.. code-block:: yaml + + documentation + authors: + - demo_le + + maintainer: + - demo_le + + references: + - demora2018gmd + + projects: + - ukesm + + +All four items here are named people, references and projects listed in the +``config-references.yml`` file. + + + +Environment config file +================================ + +This is the conda envronment which informs conda as to which packages +and which versions are needed in order to install ESMValTool. +It is called in the update and install commands: + +.. code-block:: bash + + conda env install --name esmvaltool --file ESMValTool/environment.yml + conda env update --name esmvaltool --file ESMValTool/environment.yml diff --git a/doc/sphinx/source/developer_guide2/git_repository.inc b/doc/sphinx/source/developer_guide2/git_repository.inc index ab65a21b7d..6e5c832c0b 100644 --- a/doc/sphinx/source/developer_guide2/git_repository.inc +++ b/doc/sphinx/source/developer_guide2/git_repository.inc @@ -1,5 +1,6 @@ .. _git_repository: +************** Git repository ************** @@ -161,7 +162,7 @@ Do-s * Comment your code as much as possible and in English. * Use short but self-explanatory variable names (e.g., model_input and reference_input instead of xm and xr). * Consider a modular/functional programming style. This often makes code easier to read and deletes intermediate variables immediately. If possible, separate diagnostic calculations from plotting routines. -* Consider reusing or extending existing code. General-purpose code can be found in diag_scripts/lib/ and in plot_scripts/. +* Consider reusing or extending existing code. General-purpose code can be found in esmvaltool/diag_scripts/shared/. * Comment all switches and parameters including a list of all possible settings/options in the header section of your code (see also Section :ref:`std_diag`). * Use templates for recipes (Section :ref:`std_recipe`) and diagnostics (Section :ref:`std_diag`) to help with proper documentation. * Keep your *FEATURE BRANCH* regularly synchronized with the *DEVELOPMENT BRANCH* (git merge). @@ -174,5 +175,5 @@ Don't-s * Do not develop without proper version control (see do-s above). * Avoid large (memory, disk space) intermediate results. Delete intermediate files/variables or see modular/functional programming style. * Do not use hard-coded pathnames or filenames. -* Do not mix developments / modifications of the ESMValTool framework and developments / modifications of diagnotics in the same *FEATURE BRANCH*. +* Do not mix developments / modifications of the ESMValTool framework and developments / modifications of diagnostics in the same *FEATURE BRANCH*. diff --git a/doc/sphinx/source/developer_guide2/index.rst b/doc/sphinx/source/developer_guide2/index.rst index d84505e40f..ea65b5dae0 100644 --- a/doc/sphinx/source/developer_guide2/index.rst +++ b/doc/sphinx/source/developer_guide2/index.rst @@ -2,6 +2,8 @@ Developer's Guide ################# +.. include:: new_diagnostic.inc .. include:: porting.inc -.. include:: core_team.inc .. include:: git_repository.inc +.. include:: core_team.inc +.. include:: config_developer.inc diff --git a/doc/sphinx/source/developer_guide2/new_diagnostic.inc b/doc/sphinx/source/developer_guide2/new_diagnostic.inc new file mode 100644 index 0000000000..bf98741688 --- /dev/null +++ b/doc/sphinx/source/developer_guide2/new_diagnostic.inc @@ -0,0 +1,202 @@ +.. _new_diagnostic: + +*************************************** +Contributing a new diagnostic or recipe +*************************************** + +Getting started +=============== + +Please discuss your idea for a new diagnostic or recipe with the development team before getting started, +to avoid disappointment later. A good way to do this is to open an +`issue on GitHub `_. +This is also a good way to get help. + +Creating a recipe and diagnostic script(s) +========================================== +First create a recipe in esmvaltool/recipes to define the input data your analysis script needs +and optionally preprocessing and other settings. Also create a script in the esmvaltool/diag_scripts directory +and make sure it is referenced from your recipe. The easiest way to do this is probably to copy the example recipe +and diagnostic script and adjust those to your needs. +A good example recipe is esmvaltool/recipes/examples/recipe_python.yml +and a good example diagnostic is esmvaltool/diag_scripts/examples/diagnostic.py. + +If you have no preferred programming language yet, Python 3 is highly recommended, because it is most well supported. +However, NCL, R, and Julia scripts are also supported. + +Unfortunately not much documentation is available at this stage, +so have a look at the other recipes and diagnostics for further inspiration. + +Re-using existing code +====================== +Always make sure your code is or can be released under a license that is compatible with the Apache 2 license. + +If you have existing code in a supported scripting language, you have two options for re-using it. If it is fairly +mature and a large amount of code, the preferred way is to package and publish it on the +official package repository for that language and add it as a dependency of esmvaltool. +If it is just a few simple scripts or packaging is not possible (i.e. for NCL) you can simply copy +and paste the source code into the esmvaltool/diag_scripts directory. + +If you have existing code in a compiled language like +C, C++, or Fortran that you want to re-use, the recommended way to proceed is to add Python bindings and publish +the package on PyPI so it can be installed as a Python dependency. You can then call the functions it provides +using a Python diagnostic. + +Interfaces and provenance +========================= +When ESMValTool runs a recipe, it will first find all data and run the default preprocessor steps plus any +additional preprocessing steps defined in the recipe. Next it will run the diagnostic script defined in the recipe +and finally it will store provenance information. Provenance information is stored in the +`W3C PROV XML format `_ +and also plotted in an SVG file for human inspection. In addition to provenance information, a caption is also added +to the plots. + +In order to communicate with the diagnostic script, two interfaces have been defined, which are described below. +Note that for Python and NCL diagnostics much more convenient methods are available than +directly reading and writing the interface files. For other languages these are not implemented yet. + +Using the interfaces from Python +-------------------------------- +Always use :meth:`esmvaltool.diag_scripts.shared.run_diagnostic` to start your script and make use of a +:class:`esmvaltool.diag_scripts.shared.ProvenanceLogger` to log provenance. Have a look at the example +Python diagnostic in esmvaltool/diag_scripts/examples/diagnostic.py for a complete example. + +Using the interfaces from NCL +----------------------------- +Always call the ``log_provenance`` procedure after plotting from your NCL diag_script. You could find available shortcuts for +statistics, domain, plottype, authors and references in the ``config-references.yml`` file. + +.. code-block:: bash + + log_provenance(nc-file,plot_file,caption,statistics,domain,plottype,authors,references,input-files) + +Have a look at the example NCL diagnostic in ``esmvaltool/diag_scripts/examples/diagnostic.ncl`` for a complete example. + +Generic interface between backend and diagnostic +------------------------------------------------ +To provide the diagnostic script with the information it needs to run (e.g. location of input data, various settings), +the backend creates a YAML file called settings.yml and provides the path to this file as the first command line +argument to the diagnostic script. + +The most interesting settings provided in this file are + +.. code:: yaml + + run_dir: /path/to/recipe_output/run/diagnostic_name/script_name + work_dir: /path/to/recipe_output/work/diagnostic_name/script_name + plot_dir: /path/to/recipe_output/work/diagnostic_name/script_name + input_files: + - /path/to/recipe_output/preproc/diagnostic_name/ta/metadata.yml + - /path/to/recipe_output/preproc/diagnostic_name/pr/metadata.yml + +Custom settings in the script section of the recipe will also be made available in this file. + +There are three directories defined: + +- :code:`run_dir` use this for storing temporary files +- :code:`work_dir` use this for storing NetCDF files containing the data used to make a plot +- :code:`plot_dir` use this for storing plots + +Finally :code:`input_files` is a list of YAML files, containing a description of the preprocessed data. Each entry in these +YAML files is a path to a preprocessed file in NetCDF format, with a list of various attributes. +An example preprocessor metadata.yml file could look like this + +.. code:: yaml + + ? /path/to/recipe_output/preproc/diagnostic_name/pr/CMIP5_GFDL-ESM2G_Amon_historical_r1i1p1_T2Ms_pr_2000-2002.nc + : cmor_table: CMIP5 + dataset: GFDL-ESM2G + diagnostic: diagnostic_name + end_year: 2002 + ensemble: r1i1p1 + exp: historical + filename: /path/to/recipe_output/preproc/diagnostic_name/pr/CMIP5_GFDL-ESM2G_Amon_historical_r1i1p1_T2Ms_pr_2000-2002.nc + frequency: mon + institute: [NOAA-GFDL] + long_name: Precipitation + mip: Amon + modeling_realm: [atmos] + preprocessor: preprocessor_name + project: CMIP5 + recipe_dataset_index: 1 + reference_dataset: MPI-ESM-LR + short_name: pr + standard_name: precipitation_flux + start_year: 2000 + units: kg m-2 s-1 + variable_group: pr + ? /path/to/recipe_output/preproc/diagnostic_name/pr/CMIP5_MPI-ESM-LR_Amon_historical_r1i1p1_T2Ms_pr_2000-2002.nc + : cmor_table: CMIP5 + dataset: MPI-ESM-LR + diagnostic: diagnostic_name + end_year: 2002 + ensemble: r1i1p1 + exp: historical + filename: /path/to/recipe_output/preproc/diagnostic1/pr/CMIP5_MPI-ESM-LR_Amon_historical_r1i1p1_T2Ms_pr_2000-2002.nc + frequency: mon + institute: [MPI-M] + long_name: Precipitation + mip: Amon + modeling_realm: [atmos] + preprocessor: preprocessor_name + project: CMIP5 + recipe_dataset_index: 2 + reference_dataset: MPI-ESM-LR + short_name: pr + standard_name: precipitation_flux + start_year: 2000 + units: kg m-2 s-1 + variable_group: pr + +Generic interface between diagnostic and backend +------------------------------------------------ + +After the diagnostic script has finished running, the backend will try to store provenance information. In order to +link the produced files to input data, the diagnostic script needs to store a YAML file called :code:`diagnostic_provenance.yml` +in it's :code:`run_dir`. + +For output file produced by the diagnostic script, there should be an entry in the :code:`diagnostic_provenance.yml` file. +The name of each entry should be the path to the output file. +Each file entry should at least contain the following items + +- :code:`ancestors` a list of input files used to create the plot +- :code:`caption` a caption text for the plot +- :code:`plot_file` if the diagnostic also created a plot file, e.g. in .png format. + +Each file entry can also contain items from the categories defined in the file :code:`esmvaltool/config_references.yml`. +The short entries will automatically be replaced by their longer equivalent in the final provenance records. +It is possible to add custom provenance information by adding custom items to entries. + +An example :code:`diagnostic_provenance.yml` file could look like this + +.. code:: yaml + + ? /path/to/recipe_output/work/diagnostic_name/script_name/CMIP5_GFDL-ESM2G_Amon_historical_r1i1p1_T2Ms_pr_2000-2002_mean.nc + : ancestors: + - /path/to/recipe_output/preproc/diagnostic_name/pr/CMIP5_GFDL-ESM2G_Amon_historical_r1i1p1_T2Ms_pr_2000-2002.nc + authors: [ande_bo, righ_ma] + caption: Average Precipitation between 2000 and 2002 according to GFDL-ESM2G. + domains: [global] + plot_file: /path/to/recipe_output/plots/diagnostic_name/script_name/CMIP5_GFDL-ESM2G_Amon_historical_r1i1p1_T2Ms_pr_2000-2002_mean.png + plot_type: zonal + references: [acknow_project] + statistics: [mean] + ? /path/to/recipe_output/work/diagnostic_name/script_name/CMIP5_MPI-ESM-LR_Amon_historical_r1i1p1_T2Ms_pr_2000-2002_mean.nc + : ancestors: + - /path/to/recipe_output/preproc/diagnostic_name/pr/CMIP5_MPI-ESM-LR_Amon_historical_r1i1p1_T2Ms_pr_2000-2002.nc + authors: [ande_bo, righ_ma] + caption: Average Precipitation between 2000 and 2002 according to MPI-ESM-LR. + domains: [global] + plot_file: /path/to/recipe_output/plots/diagnostic_name/script_name/CMIP5_MPI-ESM-LR_Amon_historical_r1i1p1_T2Ms_pr_2000-2002_mean.png + plot_type: zonal + references: [acknow_project] + statistics: [mean] + +You can check whether your diagnostic script successfully provided the provenance information to the backend by +verifying that + +- for each output file in the :code:`work_dir`, a file with the same name, but ending with _provenance.xml is created +- any NetCDF files created by your diagnostic script contain a 'provenance' global attribute +- any PNG plots created by your diagnostic script contain the provenance information in the 'Image History' attribute + +Note that this is done automatically by the ESMValTool backend. diff --git a/doc/sphinx/source/developer_guide2/porting.inc b/doc/sphinx/source/developer_guide2/porting.inc index 0487871e85..e707c060f3 100644 --- a/doc/sphinx/source/developer_guide2/porting.inc +++ b/doc/sphinx/source/developer_guide2/porting.inc @@ -51,90 +51,91 @@ The new ESMValTool version includes a completely revised interface, handling the .. tabularcolumns:: |p{6cm}|p{6cm}|p{3cm}| -+-------------------------------------------------+-----------------------------------------------------+------------------+ -| Name in v1.0 | Name in v2.0 | Affected code | -+=================================================+=====================================================+==================+ -| ``getenv("ESMValTool_wrk_dir")`` | ``config_user_info@work_dir`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------+------------------+ -| ``getenv(ESMValTool_att)`` | ``diag_script_info@att`` or | all .ncl scripts | -| | ``config_user_info@att`` | | -+-------------------------------------------------+-----------------------------------------------------+------------------+ -| ``xml`` | ``yml`` | all scripts | -+-------------------------------------------------+-----------------------------------------------------+------------------+ -| ``var_attr_ref(0)`` | ``variable_info@reference_dataset`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------+------------------+ -| ``var_attr_ref(1)`` | ``variable_info@alternative_dataset`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------+------------------+ -| ``models`` | ``dataset_info`` or ``input_file_info`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------+------------------+ -| ``models@name`` | ``dataset_info@dataset`` or | all .ncl scripts | -| | ``input_file_info@dataset`` | | -+-------------------------------------------------+-----------------------------------------------------+------------------+ -| ``verbosity`` | ``config_user_info@log_level`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------+------------------+ -| ``isfilepresent_esmval`` | ``fileexists`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------+------------------+ -| ``messaging.ncl`` | ``logging.ncl`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------+------------------+ -| ``info_output(arg1, arg2, arg3)`` | ``log_info(arg1)`` if ``arg3=1`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------+------------------+ -| ``info_output(arg1, arg2, arg3)`` | ``log_debug(arg1)`` if ``arg3>1`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------+------------------+ -| ``verbosity = config_user_info@verbosity`` | remove this statement | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------+------------------+ -| ``enter_msg(arg1, arg2, arg3)`` | ``enter_msg(arg1, arg2)`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------+------------------+ -| ``leave_msg(arg1, arg2, arg3)`` | ``leave_msg(arg1, arg2)`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------+------------------+ -| ``noop()`` | appropriate ``if-else`` statement | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------+------------------+ -| ``nooperation()`` | appropriate ``if-else`` stsatement | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------+------------------+ -| ``fullpaths`` | ``input_file_info@filename`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------+------------------+ -| ``get_output_dir(arg1, arg2)`` | ``config_user_info@plot_dir`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------+------------------+ -| ``get_work_dir`` | ``config_user_info@work_dir`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------+------------------+ -| ``inlist(arg1, arg2)`` | ``any(arg1.eq.arg2)`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------+------------------+ -| ``load interface_scripts/*.ncl`` | ``load interface_scripts/interface.ncl`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------+------------------+ -| ``_info.tmp`` | ``_info.ncl`` in ``preproc`` dir | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------+------------------+ -| ``ncl.interface`` | ``settings.ncl`` in ``run_dir`` and | all .ncl scripts | -| | ``interface_scripts/interface.ncl`` | | -+-------------------------------------------------+-----------------------------------------------------+------------------+ -| ``load diag_scripts/lib/ncl/`` | ``load diag_scripts/shared/`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------+------------------+ -| ``load plot_scripts/ncl/`` | ``load diag_scripts/shared/plot/`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------+------------------+ -| ``load diag_scripts/lib/ncl/rgb/`` | ``load diag_scripts/shared/plot/rgb/`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------+------------------+ -| ``load diag_scripts/lib/ncl/styles/`` | ``load diag_scripts/shared/plot/styles`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------+------------------+ -| ``load diag_scripts/lib/ncl/misc_function.ncl`` | ``load diag_scripts/shared/plot/misc_function.ncl`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------+------------------+ -| ``LW_CRE``, ``SW_CRE`` | ``lwcre``, ``swcre`` | some yml recipes | -+-------------------------------------------------+-----------------------------------------------------+------------------+ -| ``check_min_max_models`` | ``check_min_max_datasets`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------+------------------+ -| ``get_ref_model_idx`` | ``get_ref_dataset_idx`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------+------------------+ -| ``get_model_minus_ref`` | ``get_dataset_minus_ref`` | all .ncl scripts | -+-------------------------------------------------+-----------------------------------------------------+------------------+ ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| Name in v1.0 | Name in v2.0 | Affected code | ++=================================================+===========================================================+==================+ +| ``getenv("ESMValTool_wrk_dir")`` | ``config_user_info@work_dir`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``getenv(ESMValTool_att)`` | ``diag_script_info@att`` or | all .ncl scripts | +| | ``config_user_info@att`` | | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``xml`` | ``yml`` | all scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``var_attr_ref(0)`` | ``variable_info@reference_dataset`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``var_attr_ref(1)`` | ``variable_info@alternative_dataset`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``models`` | ``input_file_info`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``models@name`` | ``input_file_info@dataset`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``verbosity`` | ``config_user_info@log_level`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``isfilepresent_esmval`` | ``fileexists`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``messaging.ncl`` | ``logging.ncl`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``info_output(arg1, arg2, arg3)`` | ``log_info(arg1)`` if ``arg3=1`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``info_output(arg1, arg2, arg3)`` | ``log_debug(arg1)`` if ``arg3>1`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``verbosity = config_user_info@verbosity`` | remove this statement | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``enter_msg(arg1, arg2, arg3)`` | ``enter_msg(arg1, arg2)`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``leave_msg(arg1, arg2, arg3)`` | ``leave_msg(arg1, arg2)`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``noop()`` | appropriate ``if-else`` statement | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``nooperation()`` | appropriate ``if-else`` stsatement | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``fullpaths`` | ``input_file_info@filename`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``get_output_dir(arg1, arg2)`` | ``config_user_info@plot_dir`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``get_work_dir`` | ``config_user_info@work_dir`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``inlist(arg1, arg2)`` | ``any(arg1.eq.arg2)`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``load interface_scripts/*.ncl`` | ``load $diag_scripts/../interface_scripts/interface.ncl`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``_info.tmp`` | ``_info.ncl`` in ``preproc`` dir | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``ncl.interface`` | ``settings.ncl`` in ``run_dir`` and | all .ncl scripts | +| | ``interface_scripts/interface.ncl`` | | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``load diag_scripts/lib/ncl/`` | ``load $diag_scripts/shared/`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``load plot_scripts/ncl/`` | ``load $diag_scripts/shared/plot/`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``load diag_scripts/lib/ncl/rgb/`` | ``load $diag_scripts/shared/plot/rgb/`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``load diag_scripts/lib/ncl/styles/`` | ``load $diag_scripts/shared/plot/styles`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``load diag_scripts/lib/ncl/misc_function.ncl`` | ``load $diag_scripts/shared/plot/misc_function.ncl`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``LW_CRE``, ``SW_CRE`` | ``lwcre``, ``swcre`` | some yml recipes | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``check_min_max_models`` | ``check_min_max_datasets`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``get_ref_model_idx`` | ``get_ref_dataset_idx`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ +| ``get_model_minus_ref`` | ``get_dataset_minus_ref`` | all .ncl scripts | ++-------------------------------------------------+-----------------------------------------------------------+------------------+ The following changes may also have to be considered: - namelists are now called recipes and collected in ``esmvaltool/recipes``; - models are now called datasets and all files have been updated accordingly, including NCL functions (see table above); - ``run_dir`` (previous ``interface_data``), ``plot_dir``, ``work_dir`` are now unique to each diagnostic script, so it is no longer necessary to define specific paths in the diagnostic scripts to prevent file collision; -- the interface functions ``interface_get_*`` and ``get_figure_filename`` are no longer available: their functionalities can be easily reproduced using the ``dataset_info`` and ``input_file_info`` logicals and their attributes; +- `input_file_info`` is now a list of a list of logicals, where each element describes one dataset and one variable. Convenience functions to extract the required elements (e.g., all datasets of a given variable) are provided in ``esmvaltool/interface_scripts/interface.ncl``; +- the interface functions ``interface_get_*`` and ``get_figure_filename`` are no longer available: their functionalities can be easily reproduced using the ``input_file_info`` and the convenience functions in ``esmvaltool/interface_scripts/interface.ncl`` to access the required attributes; - there are now only 4 log levels (``debug``, ``info``, ``warning``, and ``error``) instead of (infinite) numerical values in ``verbosity`` - diagnostic scripts are now organized in subdirectories in ``esmvaltool/diag_scripts/``: all scripts belonging to the same diagnostics are to be collected in a single subdirectory (see ``esmvaltool/diag_scripts/perfmetrics/`` for example). This applies also to the ``aux_`` scripts, unless they are shared among multiple diagnostics (in this case they go in ``shared/``); +- the relevant input_file_info items required by a plot routine should be passed as argument to the routine itself; - upper case characters have to be avoided in script names, if possible. -As for the recipe, the diagnostic script ``./esmvaltool/diag_scripts/perfmetrics_main.ncl`` can be followed as working example. +As for the recipe, the diagnostic script ``./esmvaltool/diag_scripts/perfmetrics/main.ncl`` can be followed as working example. Move preprocessing from the diagnostic script to the backend ============================================================ diff --git a/doc/sphinx/source/figures/schematic.png b/doc/sphinx/source/figures/schematic.png index c112604106..fce8f16c69 100644 Binary files a/doc/sphinx/source/figures/schematic.png and b/doc/sphinx/source/figures/schematic.png differ diff --git a/doc/sphinx/source/process_ncl_docs2.py b/doc/sphinx/source/process_ncl_docs2.py deleted file mode 100644 index 006ae4c032..0000000000 --- a/doc/sphinx/source/process_ncl_docs2.py +++ /dev/null @@ -1,282 +0,0 @@ -""" -This script is part of the ESMValTool distribution. It's been added as part of -the incorporation of the Sphinx documentation generator. Sphinx was originally -developed for documenting Python code, and one of its features is that it is -able - using the so-called autodoc extension - to extract documentation strings -from Python source files and use them in the documentation it generates. - -The autodoc feature apparently does not exist for NCL source files (such as -those which are used in ESMValTool), but it has been mimicked -(or - more-or-less - reverse-engineered) here via this script, which walks -through a subset of the ESMValTool NCL scripts, extracts function names, -argument lists and descriptions (from the comments immediately following the -function definition), and assembles them in a subdirectory of -doc/sphinx/source. These output files are in the so-called reStructuredText -format (see, e.g., http://docutils.sourceforge.net/rst.html), which is the -markup language used by Sphinx; running make in doc/sphinx builds the -ESMValTool documentation from them, as noted above. - -Created on July 14, 2015 - -@author: jeremy.walton@metoffice.gov.uk -""" - -import os -import glob -import re -import string -import collections - - -def make_param_details(params): - """ - Create a list of parameter names and types from the params string. - :param params: - :return: - """ - - # We'll store the parameter names and their types in a dictionary. - # Note that it has to be an ordered dictionary, because later on we want - # to pull the entries out in the same order - # that we added them. - param_details = collections.OrderedDict() - for param in params: - - # Extract the type if it's specified, - # otherwise default to integer (say). - if ':' in param: - [pname, ptype] = param.split(':') - else: - pname = param - ptype = 'integer' - - # If the parameter is an array, - # we only want its name in the description. - pname = pname.split('[')[0] - pname = pname.strip() - - # Tie the name and the type of the parameter together. - param_details[pname] = ptype - - return param_details - - -def process_params(params, inp, oup): - """ - Extract the parameter names and types from the params string, pull their - descriptions out from the input file and reformat the lot in the output. - """ - # Get the names and types. - param_details = make_param_details(params) - - # We assume we're at the line before the first parameter description. - # Bump it, then check to see if we're really at the right location and - # issue a warning if not. - line = next(inp) - param_keys = list(param_details.keys()) - if param_keys[0] not in line: - print("Warning - parameter " + param_keys[0] + - " not found in this line:\n" + line) - - # Want a blank line just before parameter descriptions. - oup.write('\n') - - # Loop over all parameters in the argument list. - for i, pname in enumerate(param_keys): - - # Now assemble the description from the line(s). - if pname in line: - - # Get the text in the line which follows the first occurrence - # (reading from the left) of the parameter name, then strip - # trailing spaces (including the CR). - pdesc = line.split(pname, 1)[1] - pdesc = pdesc.rstrip() - - # The description could continue on the following lines, which - # need to be concatenated together. For all except the last - # parameter, the end of the description is signaled by the name of - # the next parameter. For the last (or maybe the only) parameter, - # it's signaled by a blank line. - line = next(inp) - if i < len(param_keys)-1: - pnext = param_keys[i + 1] - if pnext not in line: - # Do the concatenation, stripping whitespace - # (including the CR) as we go. - while pnext not in line: - pdesc += " " + line.replace(';;', ' ', 1).strip() - line = next(inp) - else: - while not line.replace(';;', ' ', 1).isspace(): - pdesc += " " + line.replace(';;', ' ', 1).strip() - line = next(inp) - - # Ensure the description starts with a colon. - if pdesc[0] != ':': - pdesc = ':' + pdesc - - # Write out the complete description of this parameter. - oup.write(' :param ' + param_details[pname] + ' ' - + pname + pdesc + '\n') - - # Want a blank line just after parameter descriptions. - oup.write('\n') - - -def find_argument(inp): - """ - Find the start of the Arguments list. - """ - - line = next(inp) - count = 1 - while 'Arguments' not in line: - line = next(inp) - - # We assume we're going to find this within two lines of the original - # location of the input - # - stop looking if we don't. - count += 1 - if count > 2: - return False - - return True - - -def parse_file(in_filename, out_filename): - """ - Processes an ncl file and produces an rst file as output, which contains - documentation of the ncl functions in a form suitable for input to - the Sphinx documentation generator. - :param in_filename: - :param out_filename: - :return: - """ - - # Open the files. - try: - inp = open(in_filename, "r") - except IOError: - print("Couldn't open", in_filename) - return - - try: - oup = open(out_filename, "w") - except IOError: - print("Couldn't open", out_filename) - return - - # We assume the file name has the form /path/to/foo.ncl, and the - # module name is foo. Pull it out, and write it to the output file - # as the title. - mod_name = os.path.splitext(os.path.basename(in_filename))[0] - - oup.write(':mod:' + '`' + mod_name + '`' + '\n') - oup.write("=" * (7+len(mod_name)) + '\n') - - for line in inp: - - # Is this the start of a function? - if re.match('^function', line) or re.match('^procedure', line): - - # The function could have parameters on the following lines. - # Concatenate them up until the closing bracket, stripping - # whitespace (including the CR) as we go. - fname = line.rstrip() - while ')' not in fname: - line = next(inp) - fname += " " + line.strip() - - # Some ncl files have backslashes in the function declaration to - # indicate continuation to the next line (even though this isn't - # necessary in ncl). These will mess up our processing of - # the argument list, and don't look good in the doc. so we pull - # them out here. - fname = fname.replace('\\', '') - - # Write the line out from the word 'function' onwards, and suitably - # decorated for rst. Need the CR at the end, as we've been pulling - # that off throughout the assembly of this line. - oup.write('.. function:: ' + fname[len('function')+1:] + '\n') - - # Now extract the list of parameters from the function declaration. - # First, pullout the text between the brackets, then split that - # into individual parameter names. - plist = fname.split('(')[1].split(')')[0] - params = plist.split(',') - - # Position the input just after the line containing 'Arguments'. - if not find_argument(inp): - print("Warning - argument list not found for " + fname) - else: - - # Here's where we check whether this function has any - # parameters. If it doesn't, then we don't need to - # process any. - if len(plist) > 0: - # Read the parameter descriptions and reformat them - # before writing them out. - process_params(params, inp, oup) - - # We assume the first batch of comments immediately following - # the function arepart of the documentation. - line = next(inp) - while re.match('^;;', line): - - # Write out this line, replacing the comments with spaces. - oup.write(line.replace(';;', ' ', 1)) - line = next(inp) - - # Close the files. - inp.close() - oup.close() - - -def create_doc_files_from_ncl(): - # Do some rudimentary checking of where this script is being run from, - # because we're going to be using relative paths below to find the - # directories containing the input & output. - file_path = os.path.dirname(os.path.realpath(__file__)) - esmval_root_folder = os.path.abspath(os.path.join(file_path, '..', '..', - '..')) - - # List the directories containing input files, then loop over them. - ncl_folders = {'diag_scripts': 'esmvaltool/diag_scripts/lib/ncl', - 'plot_scripts': 'esmvaltool/plot_scripts/ncl'} - for ncl_folder in ncl_folders: - in_dir = os.path.join(esmval_root_folder, ncl_folders[ncl_folder]) - # Form the output directory name from the input directory name - # (NB we assume the latter are all named ../../../foo/bar, where foo - # is the useful part of the name. - out_dir = os.path.join(esmval_root_folder, "doc/sphinx/source/", - ncl_folder) - if not os.path.isdir(out_dir): - os.makedirs(out_dir) - - # Find all the ncl files in the input directory, and loop over them. - in_files = glob.glob(os.path.join(in_dir, '*.ncl')) - index_file = open(os.path.join(out_dir, 'index.rst'), 'w') - write_index_header(index_file, ncl_folder) - - for nclFile in in_files: - print("Processing " + nclFile) - rst_filename = os.path.basename(nclFile).replace('.ncl', '.rst') - rst_file = os.path.join(out_dir, rst_filename) - parse_file(nclFile, rst_file) - index_file.write(' ') - index_file.write(os.path.basename(nclFile).replace('.ncl', '')) - index_file.write('\n') - - -def write_index_header(index_file, ncl_folder): - index_file.write(ncl_folder.upper()) - index_file.write('\n') - index_file.write('-' * len(ncl_folder)) - index_file.write('\n') - index_file.write('\n') - index_file.write('.. toctree::\n :maxdepth: 2\n\n') - - -if __name__ == '__main__': - create_doc_files_from_ncl() diff --git a/doc/sphinx/source/recipes/figures/Index_NAO.png b/doc/sphinx/source/recipes/figures/Index_NAO.png new file mode 100644 index 0000000000..f5ed01aa9a Binary files /dev/null and b/doc/sphinx/source/recipes/figures/Index_NAO.png differ diff --git a/doc/sphinx/source/recipes/figures/capacity_factor/capacity_factor_IPSL-CM5A-LR_1980-2005.png b/doc/sphinx/source/recipes/figures/capacity_factor/capacity_factor_IPSL-CM5A-LR_1980-2005.png new file mode 100644 index 0000000000..f65013283b Binary files /dev/null and b/doc/sphinx/source/recipes/figures/capacity_factor/capacity_factor_IPSL-CM5A-LR_1980-2005.png differ diff --git a/doc/sphinx/source/recipes/figures/clouds/cloud_lweffect.png b/doc/sphinx/source/recipes/figures/clouds/cloud_lweffect.png new file mode 100644 index 0000000000..74bb43dab2 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/clouds/cloud_lweffect.png differ diff --git a/doc/sphinx/source/recipes/figures/clouds/cloud_neteffect.png b/doc/sphinx/source/recipes/figures/clouds/cloud_neteffect.png new file mode 100644 index 0000000000..79f4696245 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/clouds/cloud_neteffect.png differ diff --git a/doc/sphinx/source/recipes/figures/clouds/cloud_sweffect.png b/doc/sphinx/source/recipes/figures/clouds/cloud_sweffect.png new file mode 100644 index 0000000000..52b00c8c45 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/clouds/cloud_sweffect.png differ diff --git a/doc/sphinx/source/recipes/figures/clouds/cloud_var_multi.png b/doc/sphinx/source/recipes/figures/clouds/cloud_var_multi.png new file mode 100644 index 0000000000..54a85593ae Binary files /dev/null and b/doc/sphinx/source/recipes/figures/clouds/cloud_var_multi.png differ diff --git a/doc/sphinx/source/recipes/figures/clouds/liq_h2o_path_multi.png b/doc/sphinx/source/recipes/figures/clouds/liq_h2o_path_multi.png new file mode 100644 index 0000000000..57052e850d Binary files /dev/null and b/doc/sphinx/source/recipes/figures/clouds/liq_h2o_path_multi.png differ diff --git a/doc/sphinx/source/recipes/figures/clouds/liq_h2o_taylor.png b/doc/sphinx/source/recipes/figures/clouds/liq_h2o_taylor.png new file mode 100644 index 0000000000..7e1e27ab1b Binary files /dev/null and b/doc/sphinx/source/recipes/figures/clouds/liq_h2o_taylor.png differ diff --git a/doc/sphinx/source/recipes/figures/combined_climate_extreme_index/t90p_IPSL-CM5A-LR_rcp85_2020_2040.png b/doc/sphinx/source/recipes/figures/combined_climate_extreme_index/t90p_IPSL-CM5A-LR_rcp85_2020_2040.png new file mode 100644 index 0000000000..cb48e377ec Binary files /dev/null and b/doc/sphinx/source/recipes/figures/combined_climate_extreme_index/t90p_IPSL-CM5A-LR_rcp85_2020_2040.png differ diff --git a/doc/sphinx/source/recipes/figures/crem/crem_error_metric.png b/doc/sphinx/source/recipes/figures/crem/crem_error_metric.png new file mode 100644 index 0000000000..98bfb8e6b2 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/crem/crem_error_metric.png differ diff --git a/doc/sphinx/source/recipes/figures/cvdp/nam.prreg.ann.png b/doc/sphinx/source/recipes/figures/cvdp/nam.prreg.ann.png new file mode 100755 index 0000000000..70e788bec6 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/cvdp/nam.prreg.ann.png differ diff --git a/doc/sphinx/source/recipes/figures/diurnal_temp_index/rcp85_diurnal.png b/doc/sphinx/source/recipes/figures/diurnal_temp_index/rcp85_diurnal.png new file mode 100644 index 0000000000..0747de51b0 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/diurnal_temp_index/rcp85_diurnal.png differ diff --git a/doc/sphinx/source/recipes/figures/ensclus/ensclus.png b/doc/sphinx/source/recipes/figures/ensclus/ensclus.png new file mode 100644 index 0000000000..9e721eeea7 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/ensclus/ensclus.png differ diff --git a/doc/sphinx/source/recipes/figures/extreme_events/cdd_timeseries.png b/doc/sphinx/source/recipes/figures/extreme_events/cdd_timeseries.png new file mode 100644 index 0000000000..e67e0f0180 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/extreme_events/cdd_timeseries.png differ diff --git a/doc/sphinx/source/recipes/figures/extreme_events/gleckler.png b/doc/sphinx/source/recipes/figures/extreme_events/gleckler.png new file mode 100644 index 0000000000..9120303ea0 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/extreme_events/gleckler.png differ diff --git a/doc/sphinx/source/recipes/figures/heatwaves/tasmax_extreme_spell_durationsummer_IPSL-CM5A-LR_rcp85_2020_2040.png b/doc/sphinx/source/recipes/figures/heatwaves/tasmax_extreme_spell_durationsummer_IPSL-CM5A-LR_rcp85_2020_2040.png new file mode 100644 index 0000000000..04c843fc22 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/heatwaves/tasmax_extreme_spell_durationsummer_IPSL-CM5A-LR_rcp85_2020_2040.png differ diff --git a/doc/sphinx/source/recipes/figures/hyint/hyint_maps.png b/doc/sphinx/source/recipes/figures/hyint/hyint_maps.png new file mode 100644 index 0000000000..7d2075514f Binary files /dev/null and b/doc/sphinx/source/recipes/figures/hyint/hyint_maps.png differ diff --git a/doc/sphinx/source/recipes/figures/hyint/hyint_timeseries.png b/doc/sphinx/source/recipes/figures/hyint/hyint_timeseries.png new file mode 100644 index 0000000000..549678d667 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/hyint/hyint_timeseries.png differ diff --git a/doc/sphinx/source/recipes/figures/hyint/hyint_trends.png b/doc/sphinx/source/recipes/figures/hyint/hyint_trends.png new file mode 100644 index 0000000000..018c0bc903 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/hyint/hyint_trends.png differ diff --git a/doc/sphinx/source/recipes/figures/landcover/area_treeFrac.png b/doc/sphinx/source/recipes/figures/landcover/area_treeFrac.png new file mode 100644 index 0000000000..e77017e48b Binary files /dev/null and b/doc/sphinx/source/recipes/figures/landcover/area_treeFrac.png differ diff --git a/doc/sphinx/source/recipes/figures/landcover/bias_CMIP5_MPI-ESM-LR_rcp85_r1i1p1.png b/doc/sphinx/source/recipes/figures/landcover/bias_CMIP5_MPI-ESM-LR_rcp85_r1i1p1.png new file mode 100644 index 0000000000..16fb12cea7 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/landcover/bias_CMIP5_MPI-ESM-LR_rcp85_r1i1p1.png differ diff --git a/doc/sphinx/source/recipes/figures/landcover/frac_grassFrac.png b/doc/sphinx/source/recipes/figures/landcover/frac_grassFrac.png new file mode 100644 index 0000000000..5abb1a74ee Binary files /dev/null and b/doc/sphinx/source/recipes/figures/landcover/frac_grassFrac.png differ diff --git a/doc/sphinx/source/recipes/figures/miles/miles_block.png b/doc/sphinx/source/recipes/figures/miles/miles_block.png new file mode 100644 index 0000000000..713e1a7d1f Binary files /dev/null and b/doc/sphinx/source/recipes/figures/miles/miles_block.png differ diff --git a/doc/sphinx/source/recipes/figures/miles/miles_eof1.png b/doc/sphinx/source/recipes/figures/miles/miles_eof1.png new file mode 100644 index 0000000000..a9184599d9 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/miles/miles_eof1.png differ diff --git a/doc/sphinx/source/recipes/figures/modes_of_variability/DJF-psl_observed_regimes.png b/doc/sphinx/source/recipes/figures/modes_of_variability/DJF-psl_observed_regimes.png new file mode 100644 index 0000000000..fb2abe6f5b Binary files /dev/null and b/doc/sphinx/source/recipes/figures/modes_of_variability/DJF-psl_observed_regimes.png differ diff --git a/doc/sphinx/source/recipes/figures/multimodel_products/tas_JUN_multimodel-anomaly_2006_2099_1961_1990.png b/doc/sphinx/source/recipes/figures/multimodel_products/tas_JUN_multimodel-anomaly_2006_2099_1961_1990.png new file mode 100644 index 0000000000..9ca805dd9b Binary files /dev/null and b/doc/sphinx/source/recipes/figures/multimodel_products/tas_JUN_multimodel-anomaly_2006_2099_1961_1990.png differ diff --git a/doc/sphinx/source/recipes/figures/ocean/amoc_fig_1.png b/doc/sphinx/source/recipes/figures/ocean/amoc_fig_1.png new file mode 100644 index 0000000000..484ba66371 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/ocean/amoc_fig_1.png differ diff --git a/doc/sphinx/source/recipes/figures/ocean/ocean_quad_plot1.png b/doc/sphinx/source/recipes/figures/ocean/ocean_quad_plot1.png new file mode 100644 index 0000000000..7e270e5a52 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/ocean/ocean_quad_plot1.png differ diff --git a/doc/sphinx/source/recipes/figures/ocean/ocean_sea_ice1.png b/doc/sphinx/source/recipes/figures/ocean/ocean_sea_ice1.png new file mode 100644 index 0000000000..095e707705 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/ocean/ocean_sea_ice1.png differ diff --git a/doc/sphinx/source/recipes/figures/ocean/stream_function1.png b/doc/sphinx/source/recipes/figures/ocean/stream_function1.png new file mode 100644 index 0000000000..7213bfa6e4 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/ocean/stream_function1.png differ diff --git a/doc/sphinx/source/recipes/figures/ocean/stream_function2.png b/doc/sphinx/source/recipes/figures/ocean/stream_function2.png new file mode 100644 index 0000000000..3debf2f537 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/ocean/stream_function2.png differ diff --git a/doc/sphinx/source/recipes/figures/perfmetrics/perfmetrics_fig_1.png b/doc/sphinx/source/recipes/figures/perfmetrics/perfmetrics_fig_1.png index 54cb707f8c..87a83561f9 100644 Binary files a/doc/sphinx/source/recipes/figures/perfmetrics/perfmetrics_fig_1.png and b/doc/sphinx/source/recipes/figures/perfmetrics/perfmetrics_fig_1.png differ diff --git a/doc/sphinx/source/recipes/figures/perfmetrics/perfmetrics_fig_2.png b/doc/sphinx/source/recipes/figures/perfmetrics/perfmetrics_fig_2.png index 690da3b7a8..a531e23804 100644 Binary files a/doc/sphinx/source/recipes/figures/perfmetrics/perfmetrics_fig_2.png and b/doc/sphinx/source/recipes/figures/perfmetrics/perfmetrics_fig_2.png differ diff --git a/doc/sphinx/source/recipes/figures/perfmetrics/perfmetrics_fig_3.png b/doc/sphinx/source/recipes/figures/perfmetrics/perfmetrics_fig_3.png index da2c1eee5c..24ede914b3 100644 Binary files a/doc/sphinx/source/recipes/figures/perfmetrics/perfmetrics_fig_3.png and b/doc/sphinx/source/recipes/figures/perfmetrics/perfmetrics_fig_3.png differ diff --git a/doc/sphinx/source/recipes/figures/perfmetrics/perfmetrics_fig_4.png b/doc/sphinx/source/recipes/figures/perfmetrics/perfmetrics_fig_4.png index b4f733b7f2..26b563d158 100644 Binary files a/doc/sphinx/source/recipes/figures/perfmetrics/perfmetrics_fig_4.png and b/doc/sphinx/source/recipes/figures/perfmetrics/perfmetrics_fig_4.png differ diff --git a/doc/sphinx/source/recipes/figures/runoff_et/MPI-ESM-LR_historical_r1i1p1_bias-plot_mrro.png b/doc/sphinx/source/recipes/figures/runoff_et/MPI-ESM-LR_historical_r1i1p1_bias-plot_mrro.png new file mode 100644 index 0000000000..46c7cfcf78 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/runoff_et/MPI-ESM-LR_historical_r1i1p1_bias-plot_mrro.png differ diff --git a/doc/sphinx/source/recipes/figures/runoff_et/MPI-ESM-LR_historical_r1i1p1_rocoef-vs-relprbias.png b/doc/sphinx/source/recipes/figures/runoff_et/MPI-ESM-LR_historical_r1i1p1_rocoef-vs-relprbias.png new file mode 100644 index 0000000000..57ba1ed185 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/runoff_et/MPI-ESM-LR_historical_r1i1p1_rocoef-vs-relprbias.png differ diff --git a/doc/sphinx/source/recipes/figures/runoff_et/catchments.png b/doc/sphinx/source/recipes/figures/runoff_et/catchments.png new file mode 100644 index 0000000000..f95c18d988 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/runoff_et/catchments.png differ diff --git a/doc/sphinx/source/recipes/figures/smpi/reichlerkim08bams_smpi.png b/doc/sphinx/source/recipes/figures/smpi/reichlerkim08bams_smpi.png new file mode 100644 index 0000000000..e98e69c2e7 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/smpi/reichlerkim08bams_smpi.png differ diff --git a/doc/sphinx/source/recipes/figures/spei/histogram_spei.png b/doc/sphinx/source/recipes/figures/spei/histogram_spei.png new file mode 100644 index 0000000000..9aed50720f Binary files /dev/null and b/doc/sphinx/source/recipes/figures/spei/histogram_spei.png differ diff --git a/doc/sphinx/source/recipes/figures/spei/histogram_spi.png b/doc/sphinx/source/recipes/figures/spei/histogram_spi.png new file mode 100644 index 0000000000..94cd9ea5df Binary files /dev/null and b/doc/sphinx/source/recipes/figures/spei/histogram_spi.png differ diff --git a/doc/sphinx/source/recipes/figures/thermodyn_diagtool/CanESM2_2458_lec_diagram.pdf b/doc/sphinx/source/recipes/figures/thermodyn_diagtool/CanESM2_2458_lec_diagram.pdf new file mode 100644 index 0000000000..b364d37bdd Binary files /dev/null and b/doc/sphinx/source/recipes/figures/thermodyn_diagtool/CanESM2_2458_lec_diagram.pdf differ diff --git a/doc/sphinx/source/recipes/figures/thermodyn_diagtool/CanESM2_wmb_transp.png b/doc/sphinx/source/recipes/figures/thermodyn_diagtool/CanESM2_wmb_transp.png new file mode 100644 index 0000000000..9a2288c73b Binary files /dev/null and b/doc/sphinx/source/recipes/figures/thermodyn_diagtool/CanESM2_wmb_transp.png differ diff --git a/doc/sphinx/source/recipes/figures/thermodyn_diagtool/IPSL-CM5A-MR_energy_climap.png b/doc/sphinx/source/recipes/figures/thermodyn_diagtool/IPSL-CM5A-MR_energy_climap.png new file mode 100644 index 0000000000..e816d727ca Binary files /dev/null and b/doc/sphinx/source/recipes/figures/thermodyn_diagtool/IPSL-CM5A-MR_energy_climap.png differ diff --git a/doc/sphinx/source/recipes/figures/thermodyn_diagtool/meridional_transp.png b/doc/sphinx/source/recipes/figures/thermodyn_diagtool/meridional_transp.png new file mode 100644 index 0000000000..7397d070c4 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/thermodyn_diagtool/meridional_transp.png differ diff --git a/doc/sphinx/source/recipes/figures/toymodel/synthetic_CMIP5_IPSL-CM5A-LR_day_historical_r1i1p1_T2M_tasmax_1999-2000.jpg b/doc/sphinx/source/recipes/figures/toymodel/synthetic_CMIP5_IPSL-CM5A-LR_day_historical_r1i1p1_T2M_tasmax_1999-2000.jpg new file mode 100644 index 0000000000..17ad159aa8 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/toymodel/synthetic_CMIP5_IPSL-CM5A-LR_day_historical_r1i1p1_T2M_tasmax_1999-2000.jpg differ diff --git a/doc/sphinx/source/recipes/figures/zmnam/zmnam_reg.png b/doc/sphinx/source/recipes/figures/zmnam/zmnam_reg.png new file mode 100644 index 0000000000..2251dcd883 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/zmnam/zmnam_reg.png differ diff --git a/doc/sphinx/source/recipes/figures/zmnam/zmnam_ts.png b/doc/sphinx/source/recipes/figures/zmnam/zmnam_ts.png new file mode 100644 index 0000000000..2997a5a4d7 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/zmnam/zmnam_ts.png differ diff --git a/doc/sphinx/source/recipes/index.rst b/doc/sphinx/source/recipes/index.rst index 7aa47cdc48..e40732e61a 100644 --- a/doc/sphinx/source/recipes/index.rst +++ b/doc/sphinx/source/recipes/index.rst @@ -1,7 +1,36 @@ +.. _recipes: + Recipes ------------- +------- .. toctree:: :maxdepth: 1 + recipe_capacity_factor + recipe_clouds + recipe_combined_climate_extreme_index + recipe_combined_indices + recipe_consecdrydays + recipe_crem + recipe_cvdp + recipe_diurnal_temperature_index + recipe_ensclus + recipe_extreme_events + recipe_flato13ipcc + recipe_heatwaves_coldwaves + recipe_hyint + recipe_landcover + recipe_miles + recipe_modes_of_variability + recipe_multimodel_products + recipe_oceans recipe_perfmetrics + recipe_quantilebias + recipe_runoff_et + recipe_rainfarm + recipe_shapeselect + recipe_smpi + recipe_spei + recipe_toymodel + recipe_thermodyn_diagtool + recipe_zmnam diff --git a/doc/sphinx/source/recipes/recipe_capacity_factor.rst b/doc/sphinx/source/recipes/recipe_capacity_factor.rst new file mode 100644 index 0000000000..9ad4b667a6 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_capacity_factor.rst @@ -0,0 +1,67 @@ +.. _yml_capacity_factor: + +Capacity factor of wind power: Ratio of average estimated power to theoretical maximum power +============================================================================================ + +Overview +-------- + +The goal of this diagnostic is to compute the wind capacity factor, taking as input the daily instantaneous surface wind speed, which is then extrapolated to obtain the wind speed at a height of 100 m as described in Lledó (2017). + +The capacity factor is a normalized indicator of the suitability of wind speed conditions to produce electricity, irrespective of the size and number of installed turbines. This indicator is provided for three different classes of wind turbines (IEC, 2005) that are designed specifically for low, medium and high wind speed conditions. + +The user can select the region, temporal range and season of interest. + +The output of the recipe is a netcdf file containing the capacity factor for each of the three turbine classes. +. + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + +* recipe_capacity_factor.yml + +Diagnostics are stored in diag_scripts/magic_bsc/ + +* capacity_factor.R: calculates the capacity factor for the three turbine classes. +* PC.r: calculates the power curves for the three turbine classes. + + +User settings +------------- + +User setting files are stored in recipes/ + +#. recipe_capacity_factor.yml + + *Required settings for script* + + * power_curves: (should not be changed) + +Variables +--------- + +* sfcWind (atmos, daily, longitude, latitude, time) + + +Observations and reformat scripts +--------------------------------- + +*None* + +References +---------- + +* IEC. (2005). International Standard IEC 61400-1, third edition, International Electrotechnical Commission. https://webstore.iec.ch/preview/info_iec61400-1%7Bed3.0%7Den.pdf + +* Lledó, L. (2017). Computing capacity factor. Technical note BSC-ESS-2017-001, Barcelona Supercomputing Center. Available online at https://earth.bsc.es/wiki/lib/exe/fetch.php?media=library:external:bsc-ess-2017-001-c4e_capacity_factor.pdf [last accessed 11 October 2018] + +Example plots +------------- + +.. _fig_capfactor1: +.. figure:: /recipes/figures/capacity_factor/capacity_factor_IPSL-CM5A-LR_1980-2005.png + :align: center + :width: 14cm + diff --git a/doc/sphinx/source/recipes/recipe_clouds.rst b/doc/sphinx/source/recipes/recipe_clouds.rst new file mode 100644 index 0000000000..ee4e1bc182 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_clouds.rst @@ -0,0 +1,317 @@ +Clouds +====== + +Overview +-------- + +The recipe recipe_lauer13jclim.yml computes the climatology and interannual +variability of climate relevant cloud variables such as cloud radiative forcing +(CRE), liquid water path (lwp), cloud amount (clt), and total precipitation (pr) +reproducing some of the evaluation results of Lauer and Hamilton (2013). The +recipe includes a comparison of the geographical distribution of multi-year +average cloud parameters from individual models and the multi-model mean with +satellite observations. Taylor diagrams are generated that show the multi-year +annual or seasonal average performance of individual models and the multi-model +mean in reproducing satellite observations. The diagnostic also facilitates the +assessment of the bias of the multi-model mean and zonal averages of individual +models compared with satellite observations. Interannual variability is +estimated as the relative temporal standard deviation from multi-year timeseries +of data with the temporal standard deviations calculated from monthly anomalies +after subtracting the climatological mean seasonal cycle. + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + + * recipe_lauer13jclim.yml + +Diagnostics are stored in diag_scripts/clouds/ + + * clouds.ncl: global maps of (multi-year) annual means including multi-model + mean + * clouds_bias.ncl: global maps of the multi-model mean and the multi-model + mean bias + * clouds_interannual: global maps of the interannual variability + * clouds_isccp: global maps of multi-model mean minus observations + zonal + averages of individual models, multi-model mean and observations + * clouds_taylor.ncl: taylor diagrams + + +User settings in recipe +----------------------- + +#. Script clouds.ncl + + *Required settings (scripts)* + + none + + *Optional settings (scripts)* + + * embracesetup: true = 2 plots per line, false = 4 plots per line (default) + * explicit_cn_levels: explicit contour levels (array) + * extralegend: plot legend(s) to extra file(s) + * filename_add: optionally add this string to plot filesnames + * panel_labels: label individual panels (true, false) + * PanelTop: manual override for "@gnsPanelTop" used by panel plot(s) + * projection: map projection for plotting (default = + "CylindricalEquidistant") + * showdiff: calculate and plot differences model - reference + (default = false) + * rel_diff: if showdiff = true, then plot relative differences (%) + (default = False) + * ref_diff_min: lower cutoff value in case of calculating relative + differences (in units of input variable) + * region: show only selected geographic region given as latmin, latmax, + lonmin, lonmax + * timemean: time averaging - "seasonal" = DJF, MAM, JJA, SON), + "annual" = annual mean + * treat_var_as_error: treat variable as error when averaging (true, false); + true: avg = sqrt(mean(var*var)), false: avg = mean(var) + + *Required settings (variables)* + + none + + * Optional settings (variables) + + * long_name: variable description + * reference_dataset: reference dataset; REQUIRED when calculating + differences (showdiff = True) + * units: variable units (for labeling plot only) + + *Color tables* + + * variable "lwp": diag_scripts/shared/plot/rgb/qcm3.rgb + +#. Script clouds_bias.ncl + + *Required settings (scripts)* + + none + + *Optional settings (scripts)* + + * plot_abs_diff: additionally also plot absolute differences (true, false) + * plot_rel_diff: additionally also plot relative differences (true, false) + * projection: map projection, e.g., Mollweide, Mercator + * timemean: time averaging, i.e. "seasonalclim" (DJF, MAM, JJA, SON), + "annualclim" (annual mean) + + * Required settings (variables)* + + * reference_dataset: name of reference datatset + + *Optional settings (variables)* + + * long_name: description of variable + + *Color tables* + + * variable "tas": diag_scripts/shared/plot/rgb/ipcc-tas.rgb, + diag_scripts/shared/plot/rgb/ipcc-tas-delta.rgb + * variable "pr-mmday": diag_scripts/shared/plots/rgb/ipcc-precip.rgb, + diag_scripts/shared/plot/rgb/ipcc-precip-delta.rgb + +#. Script clouds_interannual.ncl + + *Required settings (scripts)* + + none + + *Optional settings (scripts)* + + * colormap: e.g., WhiteBlueGreenYellowRed, rainbow + * explicit_cn_levels: use these contour levels for plotting + * extrafiles: write plots for individual models to separate files + (true, false) + * projection: map projection, e.g., Mollweide, Mercator + + *Required settings (variables)* + + none + + *Optional settings (variables)* + + * long_name: description of variable + * reference_dataset: name of reference datatset + + *Color tables* + + * variable "lwp": diag_scripts/shared/plots/rgb/qcm3.rgb + +#. Script clouds_ipcc.ncl + + *Required settings (scripts)* + + none + + *Optional settings (scripts)* + + * explicit_cn_levels: contour levels + * mask_ts_sea_ice: true = mask T < 272 K as sea ice (only for variable "ts"); + false = no additional grid cells masked for variable "ts" + * projection: map projection, e.g., Mollweide, Mercator + * styleset: style set for zonal mean plot ("CMIP5", "DEFAULT") + * timemean: time averaging, i.e. "seasonalclim" (DJF, MAM, JJA, SON), + "annualclim" (annual mean) + * valid_fraction: used for creating sea ice mask (mask_ts_sea_ice = true): + fraction of valid time steps required to mask grid cell as valid data + + *Required settings (variables)* + + * reference_dataset: name of reference data set + + *Optional settings (variables)* + + * long_name: description of variable + * units: variable units + + *Color tables* + + * variables "pr", "pr-mmday": diag_scripts/shared/plot/rgb/ipcc-precip-delta.rgb + +#. Script clouds_taylor.ncl + + *Required settings (scripts)* + + none + + *Optional settings (scripts)* + + * embracelegend: false (default) = include legend in plot, max. 2 columns + with dataset names in legend; true = write extra file with legend, max. 7 + dataset names per column in legend, alternative observational dataset(s) + will be plotted as a red star and labeled "altern. ref. dataset" in legend + (only if dataset is of class "OBS") + * estimate_obs_uncertainty: true = estimate observational uncertainties + from mean values (assuming fractions of obs. RMSE from documentation of + the obs data); only available for "CERES-EBAF", "MODIS", "MODIS-L3"; + false = do not estimate obs. uncertainties from mean values + * filename_add: legacy feature: arbitrary string to be added to all + filenames of plots and netcdf output produced (default = "") + * mask_ts_sea_ice: true = mask T < 272 K as sea ice (only for variable "ts"); + false = no additional grid cells masked for variable "ts" + * styleset: "CMIP5", "DEFAULT" (if not set, clouds_taylor.ncl will create a + color table and symbols for plotting) + * timemean: time averaging; annualclim (default) = 1 plot annual mean; + seasonalclim = 4 plots (DJF, MAM, JJA, SON) + * valid_fraction: used for creating sea ice mask (mask_ts_sea_ice = true): + fraction of valid time steps required to mask grid cell as valid data + + *Required settings (variables)* + + * reference_dataset: name of reference data set + + *Optional settings (variables)* + + none + + +Variables +--------- + +* clwvi (atmos, monthly mean, longitude latitude time) +* clivi (atmos, monthly mean, longitude latitude time) +* clt (atmos, monthly mean, longitude latitude time) +* pr (atmos, monthly mean, longitude latitude time) +* rlut, rlutcs (atmos, monthly mean, longitude latitude time) +* rsut, rsutcs (atmos, monthly mean, longitude latitude time) + + +Observations and reformat scripts +--------------------------------- + +*Note: (1) obs4mips data can be used directly without any preprocessing; +(2) see headers of reformat scripts for non-obs4mips data for download +instructions.* + +* CERES-EBAF (obs4mips) - CERES TOA radiation fluxes (used for calculation of + cloud forcing) +* GPCP-SG (obs4mips) - Global Precipitation Climatology Project total + precipitation +* MODIS (obs4mips) - MODIS total cloud fraction +* UWisc - University of Wisconsin-Madison liquid water path climatology, based + on satellite observbations from TMI, SSM/I, and AMSR-E, reference: O'Dell et + al. (2008), J. Clim. + + *Reformat script:* reformat_scripts/obs/reformat_obs_UWisc.ncl + +References +---------- + +* Flato, G., J. Marotzke, B. Abiodun, P. Braconnot, S.C. Chou, W. Collins, P. + Cox, F. Driouech, S. Emori, V. Eyring, C. Forest, P. Gleckler, E. Guilyardi, + C. Jakob, V. Kattsov, C. Reason and M. Rummukainen, 2013: Evaluation of + Climate Models. In: Climate Change 2013: The Physical Science Basis. + Contribution of Working Group I to the Fifth Assessment Report of the + Intergovernmental Panel on Climate Change [Stocker, T.F., D. Qin, G.-K. + Plattner, M. Tignor, S.K. Allen, J. Boschung, A. Nauels, Y. Xia, V. Bex and + P.M. Midgley (eds.)]. Cambridge University Press, Cambridge, United Kingdom + and New York, NY, USA. + +* Lauer A., and K. Hamilton (2013), Simulating clouds with global climate + models: A comparison of CMIP5 results with CMIP3 and satellite data, J. Clim., + 26, 3823-3845, doi: 10.1175/JCLI-D-12-00451.1. + +* O’Dell, C.W., F.J. Wentz, and R. Bennartz (2008), Cloud liquid water path + from satellite-based passive microwave observations: A new climatology over + the global oceans, J. Clim., 21, 1721-1739, doi:10.1175/2007JCLI1958.1. + +* Pincus, R., S. Platnick, S.A. Ackerman, R.S. Hemler, Robert J. Patrick + Hofmann (2012), Reconciling simulated and observed views of clouds: MODIS, + ISCCP, and the limits of instrument simulators. J. Climate, 25, 4699-4720, + doi: 10.1175/JCLI-D-11-00267.1. + + +Example plots +------------- + +.. _fig_cloud_1: +.. figure:: /recipes/figures/clouds/liq_h2o_path_multi.png + :align: center + + The 20-yr average LWP (1986-2005) from the CMIP5 historical model runs and + the multi-model mean in comparison with the UWisc satellite climatology + (1988-2007) based on SSM/I, TMI, and AMSR-E (O'Dell et al. 2008). + +.. _fig_cloud_2: +.. figure:: /recipes/figures/clouds/liq_h2o_taylor.png + :align: center + :width: 7cm + + Taylor diagram showing the 20-yr annual average performance of CMIP5 models + for total cloud fraction as compared to MODIS satellite observations. + +.. _fig_cloud_3: +.. figure:: /recipes/figures/clouds/cloud_sweffect.png + :align: center + :width: 9cm + +.. figure:: /recipes/figures/clouds/cloud_lweffect.png + :align: center + :width: 9cm + +.. figure:: /recipes/figures/clouds/cloud_neteffect.png + :align: center + :width: 9cm + + 20-year average (1986-2005) annual mean cloud radiative effects of CMIP5 + models against the CERES EBAF (2001–2012). Top row shows the shortwave + effect; middle row the longwave effect, and bottom row the net effect. + Multi-model mean biases against CERES EBAF are shown on the left, whereas the + right panels show zonal averages from CERES EBAF (thick black), the + individual CMIP5 models (thin gray lines) and the multi-model mean (thick + red line). Similar to Figure 9.5 of Flato et al. (2013). + +.. _fig_cloud_4: +.. figure:: /recipes/figures/clouds/cloud_var_multi.png + :align: center + + Interannual variability of modeled and observed (GPCP) precipitation rates + estimated as relative temporal standard deviation from 20 years (1986-2005) + of data. The temporal standard devitions are calculated from monthly + anomalies after subtracting the climatological mean seasonal cycle. + diff --git a/doc/sphinx/source/recipes/recipe_combined_climate_extreme_index.rst b/doc/sphinx/source/recipes/recipe_combined_climate_extreme_index.rst new file mode 100644 index 0000000000..4186538669 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_combined_climate_extreme_index.rst @@ -0,0 +1,92 @@ +.. _recipes_extreme_index: + +Combined Climate Extreme Index +==================================================== + +Overview +-------- + +The goal of this diagnostic is to compute time series of a number of extreme events: heatwave, coldwave, heavy precipitation, drought and high wind. Then, the user can combine these different components (with or without weights). The result is an index similar to the Climate Extremes Index (CEI; Karl et al., 1996), the modified CEI (mCEI; Gleason et al., 2008) or the Actuaries Climate Index (ACI; American Academy of Actuaries, 2018). The output consists of a netcdf file containing the area-weighted and multi-model multi-metric index. This recipe can be applied to data with any temporal resolution, and the running average is computed based on the user-defined window length (e.g. a window length of 5 would compute the 5-day running mean when applied to monthly data, or 5-month running mean when applied to monthly data). + +In recipe_extreme_index.yml, after defining the area and reference and projection period, the weigths for each metric is selected. The options are +* weight_t90p the weight of the number of days when the maximum temperature exceeds the 90th percentile, +* weight_t10p the weight of the number of days when the minimum temperature falls below the 10th percentile, +* weight_Wx the weight of the number of days when wind power (third power of wind speed) exceeds the 90th percentile, +* weight_cdd the weight of the maximum length of a dry spell, defined as the maximum number of consecutive days when the daily precipitation is lower than 1 mm, and +* weight_rx5day the weight of the maximum precipitation accumulated during 5 consecutive days. + +Available recipes and diagnostics +----------------------------------- + +Recipes are stored in recipes/ + +* recipe_extreme_index.yml + +Diagnostics are stored in diag_scripts/magic_bsc/ + +* extreme_index.r + + +User settings +------------- + +User setting files are stored in recipes/ + +#. recipe_extreme_index.yml + + *Required settings for script* + + * weight_t90p: 0.2 (from 0 to 1, the total sum of the weight should be 1) + * weight_t10p: 0.2 (from 0 to 1, the total sum of the weight should be 1) + * weight_Wx: 0.2 (from 0 to 1, the total sum of the weight should be 1) + * weight_rx5day: 0.2 (from 0 to 1, the total sum of the weight should be 1) + * weight_cdd: 0.2 (from 0 to 1, the total sum of the weight should be 1) + * running_mean: 5 (depends on the length of the future projection period selected, but recommended not greater than 11) + +Variables +--------- + +* tasmax (atmos, daily, longitude, latitude, time) +* tasmin (atmos, daily, longitude, latitude, time) +* sfcWind (atmos, daily, longitude, latitude, time) +* pr (atmos, daily, longitude, latitude, time) + + +Observations and reformat scripts +--------------------------------- + +*None* + +References +---------- + +* Alexander L.V. and Coauthors (2006). Global observed changes in daily climate extremes of temperature and precipitation. J. Geophys. Res., 111, D05109. https://doi.org/10.1029/2005JD006290 + +* American Academy of Actuaries, Canadian Institute of Actuaries, Casualty Actuarial Society and Society of Actuaries. Actuaries Climate Index. http://actuariesclimateindex.org (2018-10-06). + +* Donat, M., and Coauthors (2013). Updated analyses of temperature and precipitation extreme indices since the beginning of the twentieth century: The HadEX2 dataset. J. Geophys. Res., 118, 2098–2118, https://doi.org/10.1002/jgrd.50150. + +* Fouillet, A., Rey, G., Laurent, F., Pavillon, G. Bellec, S., Guihenneuc-Jouyaux, C., Clavel J., Jougla, E. and Hémon, D. (2006) Excess mortality related to the August 2003 heat wave in France. Int. Arch. Occup. Environ. Health, 80, 16–24. https://doi.org/10.1007/s00420-006-0089-4 + +* Gleason, K.L., J.H. Lawrimore, D.H. Levinson, T.R. Karl, and D.J. Karoly (2008). A Revised U.S. Climate Extremes Index. J. Climate, 21, 2124-2137 https://doi.org/10.1175/2007JCLI1883.1 + +* Meehl, G. A., and Coauthors (2000). An introduction to trends inextreme weather and climate events: Observations, socio-economic impacts, terrestrial ecological impacts, and model projections. Bull. Amer. Meteor. Soc., 81, 413–416. `doi: 10.1175/1520-0477(2000)081<0413:AITTIE>2.3.CO;2 `_ + +* Whitman, S., G. Good, E. R. Donoghue, N. Benbow, W. Y. Shou and S. X. Mou (1997). Mortality in Chicago attributed to the July 1995 heat wave. Amer. J. Public Health, 87, 1515–1518. https://doi.org/10.2105/AJPH.87.9.1515 + +* Zhang, Y., M. Nitschke, and P. Bi (2013). Risk factors for direct heat-related hospitalization during the 2009 Adelaide heat-wave: A case crossover study. Sci. Total Environ., 442, 1–5. https://doi.org/10.1016/j.scitotenv.2012.10.042 + +* Zhang, X. , Alexander, L. , Hegerl, G. C., Jones, P. , Tank, A. K., Peterson, T. C., Trewin, B. and Zwiers, F. W. (2011). Indices for monitoring changes in extremes based on daily temperature and precipitation data. WIREs Clim Change, 2: 851-870. doi:10.1002/wcc.147. https://doi.org/10.1002/wcc.147 + + + +Example plots +------------- + +.. _fig_combinedindices1: +.. figure:: /recipes/figures/combined_climate_extreme_index/t90p_IPSL-CM5A-LR_rcp85_2020_2040.png + :align: center + :width: 14cm + + + diff --git a/doc/sphinx/source/recipes/recipe_combined_indices.rst b/doc/sphinx/source/recipes/recipe_combined_indices.rst new file mode 100644 index 0000000000..342356c3a2 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_combined_indices.rst @@ -0,0 +1,76 @@ +.. _recipes_combined_indices: + +Indices based on area averages +==================================================== + +Overview +-------- + +The goal of this diagnostic is to compute indices based on area averages. + +In recipe_combined_indices.yml, after defining the period (historical or +future projection), the variable is selected. The predefined indices are: + +* Nino 3 +* Nino 3.4 +* Nino 4 +* North Atlantic Oscillation (NAO) +* Southern Oscillation Index (SOI) + +Available recipes and diagnostics +----------------------------------- + +Recipes are stored in recipes/ + +* recipe_combined_indices.yml + +Diagnostics are stored in diag_scripts/magic_bsc/ + +* combined_indices.r : calculates the area-weighted means and multi-model means, with or without weights + + + +User settings +------------- + +User setting files are stored in recipes/ + +#. recipe_combined_indices.yml + + *Required settings for script* + + * region: one of the following strings Nino3, Nino3.4, Nino4, NAO, SOI + * running_mean: an integer specifying the length of the window (in months) to be used for computing the running mean. + * moninf: an integer can be given to determine the first month of the seasonal mean to be computed (from 1 to 12, corresponding to January to December respectively). + * monsup: an integer specifying the last month to be computed (from 1 to 12, corresponding to January to December respectively). + * standardized: ‘true’ or ‘false’ to specify whether to compute the standarization of the variable. + + +Variables +--------- + +* psl, tasmax, tasmin, pr or sfcWind (atmos, monthly, longitude, latitude, time) +* tos (ocean, monthly, longitude, latitude, time) + + +Observations and reformat scripts +--------------------------------- + +*None* + +References +---------- + +* Trenberth, Kevin & National Center for Atmospheric Research Staff (Eds). Last modified 11 Jan 2019. "The Climate Data Guide: Nino SST Indices (Nino 1+2, 3, 3.4, 4; ONI and TNI)." Retrieved from https://climatedataguide.ucar.edu/climate-data/nino-sst-indices-nino-12-3-34-4-oni-and-tni. + + +Example plots +------------- + +.. _fig_combinedindices1: +.. figure:: /recipes/figures/Index_NAO.png + :align: center + :width: 14cm + + + diff --git a/doc/sphinx/source/recipes/recipe_consecdrydays.rst b/doc/sphinx/source/recipes/recipe_consecdrydays.rst new file mode 100644 index 0000000000..b5d04b0ab4 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_consecdrydays.rst @@ -0,0 +1,37 @@ +Consecutive dry days +==================== + +Overview +-------- +Meteorological drought can in its simplest form be described by a lack of precipitation. First, a wet day threshold is set, which can be either a limit related to measurement accuracy, or more directly process related to an amount that would break the drought. The diagnostic calculates the longest period of consecutive dry days, which is an indicator of the worst drought in the time series. Further, the diagnostic calculates the frequency of dry periods longer than a user defined number of days. + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + + * recipe_consecdrydays.yml + +Diagnostics are stored in diag_scripts/droughtindex/ + + * diag_cdd.py: calculates the longest period of consecutive dry days, and + the frequency of dry day periods longer than a user defined length + + +User settings in recipe +----------------------- + +#. Script diag_cdd.py + + *Required settings (script)* + + * plim: limit for a day to be considered dry [mm/day] + + * frlim: the shortest number of consecutive dry days for entering statistic on frequency of dry periods. + + +Variables +--------- + +* pr (atmos, daily mean, time latitude longitude) diff --git a/doc/sphinx/source/recipes/recipe_crem.rst b/doc/sphinx/source/recipes/recipe_crem.rst new file mode 100644 index 0000000000..28838502c0 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_crem.rst @@ -0,0 +1,109 @@ +Cloud Regime Error Metric (CREM) +================================ + +Overview +-------- + +The radiative feedback from clouds remains the largest source of uncertainty +in determining the climate sensitivity. Traditionally, cloud has been +evaluated in terms of its impact on the mean top of atmosphere fluxes. +However it is quite possible to achieve good performance on these criteria +through compensating errors, with boundary layer clouds being too reflective +but having insufficient horizontal coverage being a common example (e.g., +Nam et al., 2012). Williams and Webb (2009) (WW09) propose a Cloud Regime +Error Metric (CREM) which critically tests the ability of a model to +simulate both the relative frequency of occurrence and the radiative +properties correctly for a set of cloud regimes determined by the daily +mean cloud top pressure, cloud albedo and fractional coverage at each +grid-box. WW09 describe in detail how to calculate their metrics and we +have included the CREMpd metric from their paper in ESMValTool, with clear +references in the lodged code to tables in their paper. This has been +applied to those CMIP5 models who have submitted the required diagnostics +for their AMIP simulation (see Figure 8 below). As documented by WW09, a +perfect score with respect to ISCCP would be zero. WW09 also compared +MODIS/ERBE to ISCCP in order to provide an estimate of observational +uncertainty. This was found to be 0.96 and this is marked on Figure 8, +hence a model with a CREM similar to this value could be considered to have +an error comparable with observational uncertainty, although it should be +noted that this does not necessarily mean that the model lies within the +observations for each regime. A limitation of the metric is that it requires +a model to be good enough to simulate each regime. If a model is that poor +that the simulated frequency of occurrence of a particular regime is zero, +then a NaN will be returned from the code and a bar not plotted on the +figure for that model. + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + +* recipe_williams09climdyn_CREM.yml + +Diagnostics are stored in diag_scripts/crem/ + +* ww09_esmvaltool.py + + + +User settings +------------- + +None. + + +Variables +--------- + +* albisccp (atmos, daily mean, longitude latitude time) +* cltisccp (atmos, daily mean, longitude latitude time) +* pctisccp (atmos, daily mean, longitude latitude time) +* rlut (atmos, daily mean, longitude latitude time) +* rlutcs (atmos, daily mean, longitude latitude time) +* rsut (atmos, daily mean, longitude latitude time) +* rsutcs (atmos, daily mean, longitude latitude time) +* sic/siconc (seaice, daily mean, longitude latitude time) +* snc (atmos, daily mean, longitude latitude time) + +If snc is not available then snw can be used instead. For AMIP simulations, +sic/siconc is often not submitted as it a boundary condition and effectively +the same for every model. In this case the same daily sic data set can be +used for each model. + +**Note: in case of using sic/siconc data from a different model (AMIP), it has to +be checked by the user that the calendar definitions of all data sets are +compatible, in particular whether leap days are included or not.** + + + +Observations and reformat scripts +--------------------------------- + +All observational data have been pre-processed and included within the +routine. These are ISCCP, ISCCP-FD, MODIS, ERBE. No additional observational +data are required at runtime. + + + +References +---------- + +* Nam, C., Bony, S., Dufresne, J.-L., and Chepfer, H.: The 'too few, too bright' + tropical low-cloud problem in CMIP5 models, Geophys. Res. Lett., 39, L21801, + doi: 10.1029/2012GL053421, 2012. +* Williams, K.D. and Webb, M.J.: A quantitative performance assessment of + cloud regimes in climate models. Clim. Dyn. 33, 141-157, doi: + 10.1007/s00382-008-0443-1, 2009. + + +Example plots +------------- + +.. figure:: /recipes/figures/crem/crem_error_metric.png + :width: 10cm + :alt: xxxxx + + Cloud Regime Error Metrics (CREMpd) from William and Webb (2009) applied + to those CMIP5 AMIP simulations with the required data in the archive. A + perfect score with respect to ISCCP is zero; the dashed red line is an + indication of observational uncertainty. diff --git a/doc/sphinx/source/recipes/recipe_cvdp.rst b/doc/sphinx/source/recipes/recipe_cvdp.rst new file mode 100644 index 0000000000..7e84cd8212 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_cvdp.rst @@ -0,0 +1,53 @@ +The Climate Variability Diagnostics Package (CVDP) +================================================== + +Overview +-------- +The Climate Variability Diagnostics Package (CVDP) developed by NCAR's Climate Analysis Section is an analysis tool that documents the major modes of climate variability in models and observations, including ENSO, Pacific Decadal Oscillation, Atlantic Multi-decadal Oscillation, Northern and Southern Annular Modes, North Atlantic Oscillation, Pacific North and South American teleconnection patterns. For details please refer to the [1] and [2]. + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + + * recipe_cvdp.yml + +Diagnostics are stored in diag_scripts/cvdp/ + + * cvdp_wrapper.py + +User settings in recipe +----------------------- + +Currently, the recipe must be used with a single dataset entry. + +Variables +--------- + +* ts (atmos, monthly mean, longitude latitude time) +* tas (atmos, monthly mean, longitude latitude time) +* pr (atmos, monthly mean, longitude latitude time) +* psl (atmos, monthly mean, longitude latitude time) + + +Observations and reformat scripts +--------------------------------- + +*Note: (1) obs4mips data can be used directly without any preprocessing; +(2) see headers of reformat scripts for non-obs4mips data for download +instructions.* + + +References +---------- +[1] http://www.cesm.ucar.edu/working_groups/CVC/cvdp/ + +[2] https://github.com/NCAR/CVDP-ncl + +Example plots +------------- + +.. figure:: /recipes/figures/cvdp/nam.prreg.ann.png + :align: center + + Atmospheric Modes of Variability; pr (annual) diff --git a/doc/sphinx/source/recipes/recipe_diurnal_temperature_index.rst b/doc/sphinx/source/recipes/recipe_diurnal_temperature_index.rst new file mode 100644 index 0000000000..b04cf039fd --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_diurnal_temperature_index.rst @@ -0,0 +1,74 @@ +.. _recipe_diurnal_temperature_index: + +Diurnal temperature variation indicator: Difference between Tmax and Tmin for a specific day +============================================================================================ + +Overview +-------- + +The goal of this diagnostic is to compute a vulnerability indicator for the diurnal temperature range (DTR); the maximum variation in temperature within a period of 24 hours at a given location. This indicator was first proposed by the energy sector, to identify locations which may experience increased diurnal temperature variation in the future, which would put additional stress on the operational management of district heating systems. This indicator was defined as the DTR exceeding 5 degrees celsius at a given location and day of the year (Deandreis et al., N.D.). Projections of this indicator currently present high uncertainties, uncertainties associated to both Tmax and Tmin in future climate projections. + +As well as being of use to the energy sector, the global‐average DTR has been evaluated using both observations and climate model simulations (Braganza et. al., 2004) and changes in the mean and variability of the DTR have been shown to have a wide range of impacts on society, such as on the transmission of diseases (Lambrechts et al., 2011; Paaijmans et al., 2010) and energy consumption (Deandreis et al., N.D.). + +The recipe recipe_diurnal_temperature_index.yml computes first a mean DTR for a reference period using historical simulations and then, the number of days when the DTR from the future climate projections exceeds that of the reference period by 5 degrees or more. The user can define both the reference and projection periods, and the region to be considered. The output produced by this recipe consists of a four panel plot showing the maps of the projected mean DTR indicator for each season and a netcdf file containing the corresponding data. + + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + +* recipe_diurnal_temperature_index.yml + +Diagnostics are stored in diag_scripts/magic_bsc/ + +* diurnal_temp_index.r : calculates the diaurnal temperature vulnerability index. + + +User settings +------------- + +User setting files are stored in recipes/ + +#. recipe_diurnal_temperature_index.yml + + *Required settings for script* + + * None + +Variables +--------- + +* tasmin and tasmax (atmos, daily, longitude, latitude, time) + + +Observations and reformat scripts +--------------------------------- + +*None* + +References +---------- + +* Amiri, S. (2013). Economic and Environmental Benefits of CHP-based District Heating Systems in Sweden. Retrieved from http://www.sgc.se/ckfinder/userfiles/files/sokmotor/LiU67.pdf + +* Braganza, K., Karoly, D. J., & Arblaster, J. M. (2004). Diurnal temperature range as an index of global climate change during the twentieth century. Geophysical Research Letters, 31(13), n/a – n/a. https://doi.org/10.1029/2004GL019998 + +* Déandreis C. (IPSL), Braconnot P. (IPSL), Planton S. (CNRMGAME). Study performed for the DALKIA company. http://secif.ipsl.fr/images/SECIF/documents/Communication/fiche_invulnerable/RC_indicateur_EN.pdf + +* Lambrechts, L., Paaijmans, K. P., Fansiri, T., Carrington, L. B., Kramer, L. D., Thomas, M. B., & Scott, T. W. (2011). Impact of daily temperature fluctuations on dengue virus transmission by Aedes aegypti. Proceedings of the National Academy of Sciences of the United States of America, 108(18), 7460–7465. https://doi.org/10.1073/pnas.1101377108 + +* Paaijmans, K. P., Blanford, S., Bell, A. S., Blanford, J. I., Read, A. F., & Thomas, M. B. (2010). Influence of climate on malaria transmission depends on daily temperature variation. Proceedings of the National Academy of Sciences of the United States of America, 107(34), 15135–15139. https://doi.org/10.1073/pnas.1006422107 + +* Kalnay, E., & Cai, M. (2003). Impact of urbanization and land-use change on climate. Nature, 423(6939), 528–531. https://doi.org/10.1038/nature01675 + +* Thyholt, M., & Hestnes, A. G. (2008). Heat supply to low-energy buildings in district heating areas: Analyses of CO2 emissions and electricity supply security. Energy and Buildings, 40(2), 131–139. https://doi.org/10.1016/J.ENBUILD.2007.01.016 + +Example plots +------------- + +.. _fig_diurnal: +.. figure:: /recipes/figures/diurnal_temp_index/rcp85_diurnal.png + :align: center + :width: 14cm diff --git a/doc/sphinx/source/recipes/recipe_ensclus.rst b/doc/sphinx/source/recipes/recipe_ensclus.rst new file mode 100644 index 0000000000..7b0f05ff1f --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_ensclus.rst @@ -0,0 +1,82 @@ +EnsClus - Ensemble Clustering - a cluster analysis tool for climate model simulations +===================================================================================== + + +Overview +-------- +EnsClus is a cluster analysis tool in Python, based on the k-means algorithm, for ensembles of climate model simulations. + +Multi-model studies allow to investigate climate processes beyond the limitations of individual models by means of inter-comparison or averages of several members of an ensemble. With large ensembles, it is often an advantage to be able to group members according to similar characteristics and to select the most representative member for each cluster. + +The user chooses which feature of the data is used to group the ensemble members by clustering: time mean, maximum, a certain percentile (e.g., 75% as in the examples below), standard deviation and trend over the time period. For each ensemble member this value is computed at each grid point, obtaining N lat-lon maps, where N is the number of ensemble members. The anomaly is computed subtracting the ensemble mean of these maps to each of the single maps. The anomaly is therefore computed with respect to the ensemble members (and not with respect to the time) and the Empirical Orthogonal Function (EOF) analysis is applied to these anomaly maps. + +Regarding the EOF analysis, the user can choose either how many Principal Components (PCs) to retain or the percentage of explained variance to keep. After reducing dimensionality via EOF analysis, k-means analysis is applied using the desired subset of PCs. + +The major final outputs are the classification in clusters, i.e. which member belongs to which cluster (in k-means analysis the number k of clusters needs to be defined prior to the analysis) and the most representative member for each cluster, which is the closest member to the cluster centroid. + +Other outputs refer to the statistics of clustering: in the PC space, the minimum and the maximum distance between a member in a cluster and the cluster centroid (i.e. the closest and the furthest member), the intra-cluster standard deviation for each cluster (i.e. how much the cluster is compact). + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + +* recipe_ensclus.yml + +Diagnostics are stored in diag_scripts/ensclus/ + +* ensclus.py + +and subroutines + +* ens_anom.py +* ens_eof_kmeans.py +* ens_plots.py +* eof_tool.py +* read_netcdf.py +* sel_season_area.py + + +User settings +------------- + +*Required settings for script* + +* season: season over which to perform seasonal averaging (DJF, DJFM, NDJFM, JJA) +* area: region of interest (EAT=Euro-Atlantic, PNA=Pacific North American, NH=Northern Hemisphere, EU=Europe) +* extreme: extreme to consider: XXth_percentile (XX can be set arbitrarily, e.g. 75th_percentile), mean (mean value over the period), maximum (maximum value over the period), std (standard deviation), trend (linear trend over the period) +* numclus: number of clusters to be computed +* perc: percentage of variance to be explained by PCs (select either this or numpcs, default=80) +* numpcs: number of PCs to retain (has priority over perc unless it is set to 0 (default)) + +*Optional settings for script* + +* max_plot_panels: maximum number of panels (datasets) in a plot. When exceeded multiple plots are created. Default: 72 + + +Variables +--------- + +* chosen by user (e.g., precipitation as in the example) + + +Observations and reformat scripts +--------------------------------- + +None. + + +References +---------- + +* Straus, D. M., S. Corti, and F. Molteni: Circulation regimes: Chaotic variability vs. SST forced predictability. J. Climate, 20, 2251–2272, 2007. https://doi.org/10.1175/JCLI4070.1 + + +Example plots +------------- + +.. figure:: /recipes/figures/ensclus/ensclus.png + :width: 10cm + +Clustering based on historical JJA precipitation rate (mm/day), 75th percentile, CMIP5 models, 3 clusters, 80% variance explained by PCs. diff --git a/doc/sphinx/source/recipes/recipe_extreme_events.rst b/doc/sphinx/source/recipes/recipe_extreme_events.rst new file mode 100644 index 0000000000..5f80794af1 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_extreme_events.rst @@ -0,0 +1,135 @@ +Extreme Events Indices - Computation of ETCCDI extreme indices and plotting +===================================================================================== + + +Overview +-------- + +This diagnostic uses the standard climdex.pcic.ncdf R library to +compute the 27 climate change indices specified by +the joint CCl/CLIVAR/JCOMM Expert Team (ET) on Climate Change Detection and Indices http://etccdi.pacificclimate.org/. +The needed input fields are daily average precipitation flux and minimum, maximum and average daily surface temperatures. +The recipe reproduces panels of figure 9.37 of the IPCC AR5 report, producing both a Gleckler plot, +with relative error metrics for the CMIP5 temperature and precipitation extreme indices, +and timeseries plots comparing the ensemble spread with observations. +For plotting 1 to 4 observational reference datasets are supported. If no observational reference datasets are given, the plotting routines do not work, however, index generation without plotting is still possible. +All datasets are regridded to a common grid and considered only over land. + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + +* recipe_extreme_events.yml + +Diagnostics are stored in diag_scripts/extreme_events/ + +* ExtremeEvents.r + +and subroutines + +* common_climdex_preprocessing_for_plots.r +* make_Glecker_plot2.r +* make_timeseries_plot.r +* cfg_climdex.r +* cfg_extreme.r + +User settings +------------- + +*Required settings for script* + +* reference_datasets: list containing the reference datasets to compare with +* timeseries_idx: list of indices to compute for timeseries plot. + The syntax is "XXXETCCDI_TT", where "TT" can be either "yr" or "mon" + (yearly or monthly indices are computed) and "XXX" can be one of the following: + "altcdd", "altcsdi", "altcwd", "altwsdi", "cdd", "csdi", "cwd", + "dtr", "fd", "gsl", "id", "prcptot", "r10mm", "r1mm", "r20mm", + "r95p", "r99p", "rx1day", "rx5day", "sdii", "su", "tn10p", + "tn90p", "tnn", "tnx", "tr", "tx10p", "tx90p", "txn", "txx", "wsdi". + The option "mon" for "TT" can be only used in combination with one of: + "txx", "tnx", "txn", "tnn", tn10p", "tx10p", "tn90p", "tx90p", "dtr", "rx1day", "rx5day". +* gleckler_idx: list of indices to compute for Gleckler plot. Same syntax as above. + The diagnostic computes all unique indices specified in either ``gleckler_idx`` or ``timeseries_idx``. + If at least one "mon" index is selected, the indices are computed but no plots are produced. +* base_range: a list of two years to specify the range to be used as "base range" for climdex + (the period in which for example reference percentiles are computed) + +*Optional settings for script* + +* regrid_dataset: name of dataset to be used as common target for regridding. If missing the first reference dataset is used +* mip_name: string containing the name of the model ensemble, used for titles and labels in the plots (default: "CMIP") +* analysis_range: a list of two years to specify the range to be used for the analysis in the plots. + The input data will need to cover both ``analysis_range`` and ``base_range``. If missing the full period covered by the + input datasets will be used. +* ts_plt: (logical) if to produce the timeseries or not (default: true) +* glc_plt: (logical) if to produce the Gleckler or not (default: true) +* climdex_parallel: number of parallel threads to be used for climdex calculation (default: 4). Also the logical ``false`` can be passed to switch off parallel computation. +* normalize: (logical) if to detrend and normalize with the standard deviation for the datasets for use in the timeseries plot. When this option is used the data for the following indices are detrended and normalized in the timeseries plots: "altcdd", "altcsdi", "altcwd", "altwsdi", "cdd", "cwd","dtr", "fd", "gsl", "id", "prcptot", "r10mm", "r1mm", "r20mm", "r95p", "r99p", "rx1day", "rx5day", "sdii", "su", "tnn", "tnx", "tr", "txn","txn","txx" (default: false) + +Additional optional setting controlling the plots: + +* Timeseries plots: + + * ts_png_width: width for png figures (dafult: 640) + * ts_png_height: height for png figures (default: 480) + * ts_png_units: units for figure size (default: "px") + * ts_png_pointsize: fontsize (default: 12) + * ts_png_bg: background color (default: "white") + * ts_col_list: list of colors for lines (default: ["dodgerblue2", "darkgreen", "firebrick2", "darkorchid", "aquamarine3"]``) + * ts_lty_list: list of linetypes (default: [1, 4, 2, 3, 5]) + * ts_lwd_list: list of linewidths (default: [2, 2, 2, 2, 2]) + +* Gleckler plot: + + * gl_png_res: height for png figures (default: 480). + The width of the figure is computed automatically. + * gl_png_units: units for figure size (default: "px") + * gl_png_pointsize: fontsize (default: 12) + * gl_png_bg: background color (default: "white") + * gl_mar_par: page margins vector (default: [10, 4, 3, 14]) + * gl_rmsespacer: spacing of RMSE column (default: 0.01) + * gl_scaling_factor: scaling factor for colorscale height (default: 0.9) + * gl_text_scaling_factor: scaling factor for text size (default: 1.0) + * gl_xscale_spacer_rmse: horizontal posizion of coloured colorbar (default: 0.05) + * gl_xscale_spacer_rmsestd: horizontal posizion of gray colorbar (default: 0.05) + * gl_symb_scaling_factor: scaling factor for white "symbol" square explaining the partition (default: 1.0) + * gl_symb_xshift: horizontal position of the symbol box (default: 0.2) + * gl_symb_yshift: vertical position of the symbol box (default: 0.275) + * gl_text_symb_scaling_factor: scaling factor for text to be used for symbol box (default: 0.5) + +Variables +--------- + +* tas (atmos, daily mean, longitude latitude time) +* tasmin (atmos, daily minimum, longitude latitude time) +* tasmax (atmos, daily maximum, longitude latitude time) +* pr (atmos, daily mean, longitude latitude time) + + +Observations and reformat scripts +--------------------------------- + +None. + + +References +---------- + +* Zhang, X., Alexander, L., Hegerl, G. C., Jones, P., Klein Tank, A., Peterson, T. C., Trewin, B., Zwiers, F. W., Indices for monitoring changes in extremes based on daily temperature and precipitation data, WIREs Clim. Change, doi:10.1002/wcc.147, 2011 + +* Sillmann, J., V. V. Kharin, X. Zhang, and F. W. Zwiers, Climate extreme indices in the CMIP5 multi-model ensemble. Part 1: Model evaluation in the present climate. J. Geophys. Res., doi:10.1029/2012JD018390, 2013 + + +Example plots +------------- + +.. figure:: /recipes/figures/extreme_events/gleckler.png + :width: 10cm + +Portrait plot of relative error metrics for the CMIP5 temperature and precipitation extreme indices. Reproduces Fig. 9.37 of the IPCC AR5 report, Chapter 9. + +.. figure:: /recipes/figures/extreme_events/cdd_timeseries.png + :width: 10cm + +Timeseries of Consecutive Dry Days index for CMIP5 models. diff --git a/doc/sphinx/source/recipes/recipe_flato13ipcc.rst b/doc/sphinx/source/recipes/recipe_flato13ipcc.rst new file mode 100644 index 0000000000..80f1d19f11 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_flato13ipcc.rst @@ -0,0 +1,4 @@ +IPCC AR5 Chapter 9 +================== + +Description will be ported from v1 diff --git a/doc/sphinx/source/recipes/recipe_heatwaves_coldwaves.rst b/doc/sphinx/source/recipes/recipe_heatwaves_coldwaves.rst new file mode 100644 index 0000000000..91b372a160 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_heatwaves_coldwaves.rst @@ -0,0 +1,71 @@ +.. _recipes_heatwaves_coldwaves: + +Heat wave and cold wave duration +==================================================== + +Overview +-------- + +The goal of this diagnostic is to estimate the relative change in heat/cold wave characteristics in future climates compared to a reference period using daily maximum or minimum temperatures. + +The user can select whether to compute the frequency of exceedances or non-exceedances, which corresponds to extreme high or extreme low temperature events, respectively. The user can also select the minimum duration for an event to be classified as a heat/cold wave and the season of interest. + +The diagnostic calculates the number of days in which the temperature exceeds or does not exceeds the necessary threshold for a consecutive number of days in future climate projections. The result is an annual time series of the total number of heat/cold wave days for the selected season at each grid point. The final output is the average number of heat/cold wave days for the selected season in the future climate projections. + +Available recipes and diagnostics +----------------------------------- + +Recipes are stored in recipes/ + +* recipe_heatwaves_coldwaves.yml + +Diagnostics are stored in diag_scripts/magic_bsc/ + +* extreme_spells.r: calculates the heatwave or coldwave duration. + + +User settings +------------- + +User setting files are stored in recipes/ + +#. recipe_heatwaves_coldwaves.yml + + *Required settings for script* + + * quantile: quantile defining the exceedance/non-exceedance threshold + * min_duration: Min duration in days of a heatwave/coldwave event + * Operator: either '>' for exceedances or '<' for non-exceedances + * season: 'summer' or 'winter + +Variables +--------- + +* tasmax or tasmin (atmos, daily, longitude, latitude, time) + + +Observations and reformat scripts +--------------------------------- + +*None* + +References +---------- + +* Cardoso, S., Marta-Almeida, M., Carvalho, A.C., & Rocha, A. (2017). Heat wave and cold spell changes in Iberia for a future climate scenario. International Journal of Climatology, 37(15), 5192-5205. https://doi.org/10.1002/joc.5158 + +* Ouzeau, G., Soubeyroux, J.-M., Schneider, M., Vautard, R., & Planton, S. (2016). Heat waves analysis over France in present and future climate: Application of a new method on the EURO-CORDEX ensemble. Climate Services, 4, 1-12. https://doi.org/10.1016/J.CLISER.2016.09.002 + +* Wang, Y., Shi, L., Zanobetti, A., & Schwartz, J. D. (2016). Estimating and projecting the effect of cold waves on mortality in 209 US cities. Environment International, 94, 141-149. https://doi.org/10.1016/j.envint.2016.05.008 + +* Zhang, X., Hegerl, G., Zwiers, F. W., & Kenyon, J. (2005). Avoiding inhomogeneity in percentile-based indices of temperature extremes. Journal of Climate, 18(11), 1641-1651. https://doi.org/10.1175/JCLI3366.1 + + +Example plots +------------- + +.. _fig_heatwaves: +.. figure:: /recipes/figures/heatwaves/tasmax_extreme_spell_durationsummer_IPSL-CM5A-LR_rcp85_2020_2040.png + :align: center + :width: 14cm + diff --git a/doc/sphinx/source/recipes/recipe_hyint.rst b/doc/sphinx/source/recipes/recipe_hyint.rst new file mode 100644 index 0000000000..70c90392c9 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_hyint.rst @@ -0,0 +1,128 @@ +Hydroclimatic intensity and extremes (HyInt) +============================================== + + +Overview +-------- +The Earth’s hydrological cycle is of key importance both for the climate system and society. For example, the intensity and distribution of precipitation determines the availability or scarcity of fresh water in a certain region, and it is also related to the severity of hazardous events such as flooding or droughts. The simple investigation of average precipitation quantities can clearly hide some of the most relevant aspects of the hydrological cycle and its extremes (e.g., Giorgi et al., 2014). More in general, temperature and precipitation extremes have been the focus of recent climate studies attempting to capture the most relevant component of climate variability and impact on society in a changing climate (e.g., Alexander, 2016. A particular effort has been dedicated to developing and standardising indices that can be adopted for investigation studies with observations and climate models. This tool was developed to calculate a number of hydroclimatic and climate extremes indices and allow a multi-index evaluation of climate models. The tool firstly computes a set of 6 indices that allow to evaluate the response of the hydrological cycle to global warming with a joint view of both wet and dry extremes. The indices were selected following Giorgi et al. (2014) and include the simple precipitation intensity index (SDII), the maximum dry spell length (DSL) and wet spell length (WSL), the hydroclimatic intensity index (HY-INT), which is a measure of the overall behaviour of the hydroclimatic cycle (Giorgi et al., 2011), and the precipitation area (PA), i.e. the area over which at any given day precipitation occurs, (Giorgi et al., 2014). Secondly, also a selection of the 27 temperature and precipitation -based indices of extremes from the Expert Team on Climate Change Detection and Indices (ETCCDI) produced by the climdex (https://www.climdex.org) library can be ingested to produce a multi-index analysis. The tool allows then to perform a subsequent analysis of the selected indices calculating timeseries and trends over predefined continental areas, normalized to a reference period. Trends are calculated using the R `lm` function and significance testing performed with a Student T test on non-null coefficients hypothesis. Trend coefficients are stored together with their statistics which include standard error, t value and Pr(>|t|). The tool can then produce a variety of types of plots including global and regional maps, maps of comparison between models and a reference dataset, timeseries with their spread, trend lines and summary plots of trend coefficients. + +The hydroclimatic indices calculated by the diagnostic and included in the output are defined as follows: + +* PRY = mean annual precipitation +* INT = mean annual precipitation intensity (intensity during wet days, or simple precipitation intensity index SDII) +* WSL = mean annual wet spell length (number of consecutive days during each wet spell) +* DSL = mean annual dry spell lenght (number of consecutive days during each dry spell) +* PA = precipitation area (area over which of any given day precipitation occurs) +* R95 = heavy precipitation index (percent of total precipitation above the 95% percentile of the reference distribution) +* HY-INT = hydroclimatic intensity. HY-INT = normalized(INT) x normalized(DSL). + + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + +* recipe_hyint.yml (evaluating the 6 hydroclimatic indices) + +Diagnostics are stored in diag_scripts/hyint/ + +* hyint.R + +and subroutines + +* hyint_diagnostic.R +* hyint_functions.R +* hyint_parameters.R +* hyint_plot_trends.R +* hyint_etccdi_preproc.R +* hyint_metadata.R +* hyint_plot_maps.R +* hyint_preproc.R +* hyint_trends.R + + +User settings +------------- + +*Required settings for script* + + +* norm_years: first and last year of reference normalization period to be used for normalized indices + +* select_indices: indices to be analysed and plotted. Select one or more fields from the following list (order-sensitive): "pa_norm", "hyint", "int_norm", "r95_norm", "wsl_norm", "dsl_norm", "int", "dsl", "wsl" + +* select_regions: Select regions for timeseries and maps from the following list: GL=Globe, GL60=Global 60S/60N, TR=Tropics (30S/30N), SA=South America, AF=Africa, NA=North America, IN=India, EU=Europe, EA=East-Asia, AU=Australia + +* plot_type: type of figures to be plotted. Select one or more from: 1=lon/lat maps per individual field/exp/multi-year mean, 2=lon/lat maps per individual field exp-ref-diff/multi-year mean, 3=lon/lat maps multi-field/exp-ref-diff/multi-year mean, 11=timeseries over required individual region/exp, 12=timeseries over multiple regions/exp, 13=timeseries with multiple models, 14=summary trend coefficients multiple regions, 15=summary trend coefficients multiple models + +*Optional settings for script (with default setting)* + +#. Data + + * rgrid (false): Define whether model data should be regridded. (a) false to keep original resolution; (b) set desired regridding resolution in cdo format e.g., "r320x160"; (c) "REF" to use resolution of reference model + +#. Plotting + + * npancol (2): number of columns in timeseries/trends multipanel figures + * npanrow (3): number of rows in timeseries/trends multipanel figures + * autolevels (true): select automated (true) or pre-set (false) range of values in plots + * autolevels_scale (1): factor multiplying automated range for maps and timeseries + * autolevels_scale_t (1.5): factor multiplying automated range for trend coefficients + +#. Maps + + * oplot_grid (false): plot grid points over maps + * boxregion (false): !=0 plot region boxes over global maps with thickness = abs(boxregion); white (>0) or grey (<0). + * removedesert (false) remove (flag as NA) grid points with mean annual pr < 0.5 mm/day (deserts, Giorgi2014). This affects timeseries and trends calculations too. + +#. Timeseries and trends + + * weight_tseries (true): adopt area weights in timeseries + * trend_years (false): (a) false = apply trend to all years in dataset; (b) [year1, year2] to apply trend calculation and plotting only to a limited time interval + * add_trend (true): add linear trend to plot + * add_trend_sd (false): add dashed lines of stdev range to timeseries + * add_trend_sd_shade (false): add shade of stdev range to timeseries + * add_tseries_lines (true): plot lines connecting timeseries points + * add_zeroline (true): plot a dashed line at y=0 + * trend_years_only (false): limit timeseries plotting to the time interval adopted for trend calculation (excluding the normalization period) + * scale100years (true): plot trends scaled as 1/100 years + * scalepercent (false): plot trends as percent change + + +Variables +--------- + +* pr (atmos, daily mean, longitude latitude time) + + +Observations and reformat scripts +--------------------------------- + +None. + + +References +---------- + +* Giorgi et al., 2014, J. Geophys. Res. Atmos., 119, 11,695–11,708, doi:10.1002/ 2014JD022238 +* Giorgi et al., 2011, J. Climate 24, 5309-5324, doi:10.1175/2011JCLI3979.1 + + +Example plots +------------- + +.. figure:: figures/hyint/hyint_maps.png + :width: 10cm + +Mean hydroclimatic intensity (figure type 1) for the EC-EARTH model historical + rcp8.5 projection over 1976-2099. + +.. figure:: figures/hyint/hyint_timeseries.png + :width: 10cm + +Timeseries for multiple indices and regions (figure type 12) for the ACCESS1-0 model historical + RCP8.5 projection over 1976-2099. + +.. figure:: figures/hyint/hyint_trends.png + :width: 10cm + +Multi-model trend coefficients over selected indices (figure type 14) for rcp85 2006-2099 future projection normalized to the 1976-2005 historical period. diff --git a/doc/sphinx/source/recipes/recipe_landcover.rst b/doc/sphinx/source/recipes/recipe_landcover.rst new file mode 100644 index 0000000000..97528f1b46 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_landcover.rst @@ -0,0 +1,127 @@ +Landcover +========= + + +Overview +-------- + +The diagnostic computes the accumulated and fractional extent of major land cover classes, +namely bare soil, crops, grasses, shrubs and trees. The numbers are compiled for the whole +land surface as well as separated into Tropics, northern Extratropics and southern Extratropics. +The cover fractions are compared to ESA-CCI land cover data. + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + + * recipe_landcover.yml + +Diagnostics are stored in diag_scripts/landcover/ + + * landcover.py: bar plots showing the accumulated area and mean fractional coverage for five land + cover classes for all experiments as well as their bias compared to observations. + + +User settings +------------- + +script landcover.py + + *Required settings for script* + + * reference_dataset: land cover extent dataset for comparison. The script was developed using + ESACCI-LANDCOVER observations. + + *Optional settings for script* + + * comparison: [variable, model] Choose whether one plot per land cover class is generated comparing + the different experiments (default) or one plot per model comparing the different + land cover classes. + * colorscheme: Plotstyle used for the bar plots. A list of available style is found at + https://matplotlib.org/gallery/style_sheets/style_sheets_reference.html. Seaborn is used as default. + + +Variables +--------- + +* baresoilFrac (land, monthly mean, time latitude longitude) +* grassFrac (land, monthly mean, time latitude longitude) +* treeFrac (land, monthly mean, time latitude longitude) +* shrubFrac (land, monthly mean, time latitude longitude) +* cropFrac (land, monthly mean, time latitude longitude) + + +Observations and reformat scripts +--------------------------------- + +ESA-CCI land cover data (Defourny et al., 2015) needs to be downloaded manually by the user and converted to netCDF files +containing the grid cell fractions for the five major land cover types. The data and a conversion tool +are available at https://maps.elie.ucl.ac.be/CCI/viewer/ upon registration. After obtaining the data and the user +tool, the remapping to 0.5 degree can be done with:: + + ./bin/aggregate-map.sh + -PgridName=GEOGRAPHIC_LAT_LON + -PnumRows=360 + -PoutputLCCSClasses=true + -PnumMajorityClasses=0 + ESACCI-LC-L4-LCCS-Map-300m-P1Y-2015-v2.0.7b.nc + +Next, the data needs to be aggregated into the five major classes (PFT) similar to the study of Georgievski & Hagemann (2018) +and converted from grid cell fraction into percentage. + ++--------------+-------------------------------------------------------------------------------------------------------------+ +| PFT | ESA-CCI Landcover Classes | ++==============+=============================================================================================================+ +| baresoilFrac | Bare_Soil | ++--------------+-------------------------------------------------------------------------------------------------------------+ +| cropFrac | Managed_Grass | ++--------------+-------------------------------------------------------------------------------------------------------------+ +| grassFrac | Natural_Grass | ++--------------+-------------------------------------------------------------------------------------------------------------+ +| shrubFrac | Shrub_Broadleaf_Deciduous + Shrub_Broadleaf_Evergreen + Shrub_Needleleaf_Evergreen | ++--------------+-------------------------------------------------------------------------------------------------------------+ +| treeFrac | Tree_Broadleaf_Deciduous + Tree_Broadleaf_Evergreen + Tree_Needleleaf_Deciduous + Tree_Needleleaf_Evergreen | ++--------------+-------------------------------------------------------------------------------------------------------------+ + +Finally, it might be necessary to adapt the grid structure to the experiments files, e.g converting the -180 --> 180 degree grid +to 0 --> 360 degree and inverting the order of latitudes. Note, that all experiments will be regridded onto the grid of the land +cover observations, thus it is recommended to convert to the coarses resolution which is sufficient for the planned study. +For the script development, ESA-CCI data on 0.5 degree resolution was used with land cover data averaged over the +2008-2012 period. + + +References +---------- + +* Defourny et al. (2015): ESA Land Cover Climate Change Initiative (ESA LC_cci) data: + ESACCI-LC-L4-LCCS-Map-300m-P5Y-[2000,2005,2010]-v1.6.1 via Centre for Environmental Data Analysis +* Georgievski, G. & Hagemann, S. Characterizing uncertainties in the ESA-CCI land cover map of the epoch 2010 and their impacts on MPI-ESM climate simulations, + Theor Appl Climatol (2018). https://doi.org/10.1007/s00704-018-2675-2 + + +Example plots +------------- + +.. _fig_landcover_1: +.. figure:: /recipes/figures/landcover/area_treeFrac.png + :align: center + :width: 14cm + + Accumulated tree covered area for different regions and experiments. + +.. _fig_landcover_2: +.. figure:: /recipes/figures/landcover/frac_grassFrac.png + :align: center + :width: 14cm + + Average grass cover fraction for different regions and experiments + +.. _fig_landcover_3: +.. figure:: /recipes/figures/landcover/bias_CMIP5_MPI-ESM-LR_rcp85_r1i1p1.png + :align: center + :width: 14cm + + Biases in five major land cover fractions for different regions and one experiment. + diff --git a/doc/sphinx/source/recipes/recipe_miles.rst b/doc/sphinx/source/recipes/recipe_miles.rst new file mode 100644 index 0000000000..c8ddbefd10 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_miles.rst @@ -0,0 +1,131 @@ +Blocking metrics and indices, teleconnections and weather regimes (MiLES) +========================================================================= + + +Overview +-------- + +Atmospheric blocking is a recurrent mid-latitude weather pattern identified by a large-amplitude, quasi-stationary, long-lasting, high-pressure anomaly that ‘‘blocks’’ the westerly flow forcing the jet stream to split or meander +`(Rex, 1950) `_. + +It is typically initiated by the breaking of a Rossby wave in a diffluence region at the exit of the storm track, where it amplifies the underlying stationary ridge `(Tibaldi and Molteni, 1990) `_. +Blocking occurs more frequently in the Northern Hemisphere cold season, with larger frequencies observed over the Euro-Atlantic and North Pacific sectors. Its lifetime oscillates from a few days up to several weeks `(Davini et al., 2012) `_ sometimes leading to winter cold spells or summer heat waves. + +To this end, the MId-Latitude Evaluation System (MiLES) was developed as stand-alone package (https://github.com/oloapinivad/MiLES) to support analysis of mid-latitude weather patterns in terms of atmospheric blocking, teleconnections and weather regimes. The package was then implemented as recipe for ESMValTool. + +The tool works on daily 500hPa geopotential height data (with data interpolated on a common 2.5x2.5 grid) and calculates the following diagnostics: + +1D Atmospheric Blocking +*********************** +`Tibaldi and Molteni (1990) `_ index for Northern Hemisphere. Computed at fixed latitude of 60N, with delta of -5,-2.5,0,2.5,5 deg, fiN=80N and fiS=40N. Full timeseries and climatologies are provided in NetCDF4 Zip format. + +2D Atmospheric blocking +*********************** +Following the index by `Davini et al. (2012) `_. It is a 2D version of `Tibaldi and Molteni (1990) `_ for Northern Hemisphere atmospheric blocking evaluating meridional gradient reversal at 500hPa. It computes both Instantaneous Blocking and Blocking Events frequency, where the latter allows the estimation of the each blocking duration. It includes also two blocking intensity indices, i.e. the Meridional Gradient Index and the Blocking Intensity index. In addition the orientation (i.e. cyclonic or anticyclonic) of the Rossby wave breaking is computed. A supplementary Instantaneous Blocking index with the GHGS2 condition (see `Davini et al., 2012 `_) is also evaluated. +Full timeseries and climatologies are provided in NetCDF4 Zip format. + +Z500 Empirical Orthogonal Functions +*********************************** +Based on SVD. The first 4 EOFs for North Atlantic (over the 90W-40E 20N-85N box) and Northern Hemisphere (20N-85N) or a custom region are computed. North Atlantic Oscillation, East Atlantic Pattern, and Arctic Oscillation can be evaluated. +Figures showing linear regression of PCs on monthly Z500 are provided. PCs and eigenvectors, as well as the variances explained are provided in NetCDF4 Zip format. + +North Atlantic Weather Regimes +****************************** +Following k-means clustering of 500hPa geopotential height. 4 weather regimes over North Atlantic (80W-40E 30N-87.5N) are evaluated using anomalies from daily seasonal cycle. This is done retaining the first North Atlantic EOFs which explains the 80% of the variance to reduce the phase-space dimensions and then applying k-means clustering using Hartigan-Wong algorithm with k=4. Figures report patterns and frequencies of occurrence. NetCDF4 Zip data are saved. Only 4 regimes and DJF supported so far. + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + +* recipe_miles_block.yml +* recipe_miles_eof.yml +* recipe_miles_regimes.yml + +Diagnostics are stored in diag_scripts/miles/ + +* miles_block.R +* miles_eof.R +* miles_regimes.R + +and subroutines + +* basis_functions.R +* block_figures.R +* eof_figures.R +* regimes_figures.R +* block_fast.R +* eof_fast.R +* miles_parameters.R +* regimes_fast.R + +`miles_parameters.R` contains additional internal parameters which affect plot sizes, colortables etc. + + +User settings +------------- + +#. miles_block.R + + *Required settings for variables* + + * reference_dataset: reference dataset for comparison + + *Required settings for script* + + * seasons: Selected season('DJF','MAM','JJA','SON','ALL') or your period as e.g. 'Jan_Feb_Mar' + +#. miles_eof.R + + *Required settings for variables* + + * reference_dataset: reference dataset for comparison + + *Required settings for script* + + * seasons: Selected season('DJF','MAM','JJA','SON','ALL') or your period as e.g. 'Jan_Feb_Mar' + * teles: Select EOFs ('NAO','AO','PNA') or specify custom area as "lon1_lon2_lat1_lat2" + +#. miles_regimes.R + + *Required settings for variables* + + * reference_dataset: reference dataset + + *Required or optional settings for script* + + * None (the two parameters seasons and nclusters in the recipe should not be changed) + + +Variables +--------- + +* zg (atmos, daily mean, longitude latitude time) + + +Observations and reformat scripts +--------------------------------- +* ERA-INTERIM + + +References +---------- +* REX, D. F. (1950), Blocking Action in the Middle Troposphere and its Effect upon Regional Climate. Tellus, 2: 196-211. doi: http://doi.org/10.1111/j.2153-3490.1950.tb00331.x +* Davini, P., C. Cagnazzo, S. Gualdi, and A. Navarra (2012): Bidimensional Diagnostics, Variability, and Trends of Northern Hemisphere Blocking. J. Climate, 25, 6496–6509, doi: http://doi.org/10.1175/JCLI-D-12-00032.1. +* Tibaldi S, Molteni F.: On the operational predictability of blocking. Tellus A 42(3): 343–365, doi: 10.1034/j.1600- 0870.1990.t01- 2- 00003.x, 1990. https://doi.org/10.1034/j.1600-0870.1990.t01-2-00003.x +* Paolo Davini. (2018, April 30). MiLES - Mid Latitude Evaluation System (Version v0.51). Zenodo. http://doi.org/10.5281/zenodo.1237838 + + +Example plots +------------- + +.. figure:: /recipes/figures/miles/miles_block.png + :width: 10cm + + Blocking events frequency for EC-Earth model 1980-1989, compared to ERA-Interim. + +.. figure:: /recipes/figures/miles/miles_eof1.png + :width: 10cm + + Teleconnection indices as Z500 empirical orthogonal functions for the North Atlantic (the figure shows EOF1) diff --git a/doc/sphinx/source/recipes/recipe_modes_of_variability.rst b/doc/sphinx/source/recipes/recipe_modes_of_variability.rst new file mode 100644 index 0000000000..c7d8b76596 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_modes_of_variability.rst @@ -0,0 +1,91 @@ +.. _recipes_modes_of_variability: + +Modes of variability +==================== + +Overview +-------- + +The goal of this recipe is to compute modes of variability from a reference or observational dataset and from a set of climate projections and calculate the root-mean-square error between the mean anomalies obtained for the clusters from the reference and projection data sets. +This is done through K-means or hierarchical clustering applied either directly to the spatial data or after computing the EOFs. + +The user can specify the number of clusters to be computed. + +The recipe's output consist of three netcdf files for both the observed and projected weather regimes and the RMSE between them. + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + +* recipe_modes_of_variability.yml + + +Diagnostics are stored in diag_scripts/magic_bsc/ + +* WeatherRegime.r - function for computing the EOFs and k-means and hierarchical clusters. + +* weather_regime.r - applies the above weather regimes function to the datasets + + + +User settings +------------- + +User setting files are stored in recipes/ + +#. recipe_modes_of_variability.yml + + *Required settings for script* + + * plot type: rectangular or polar + * ncenters: number of centers to be computed by the clustering algorithm (maximum 4) + * cluster_method: kmeans (only psl variable) or hierarchical clustering (for psl or sic variables) + * detrend_order: the order of the polynomial detrending to be applied (0, 1 or 2) + * EOFs: logical indicating wether the k-means clustering algorithm is applied directly to the spatial data ('false') or to the EOFs ('true') + * frequency: select the month (format: JAN, FEB, ...) or season (format: JJA, SON, MAM, DJF) for the diagnostic to be computed for (does not work yet for MAM with daily data). + + +Variables +--------- + +* psl (atmos, monthly/daily, longitude, latitude, time) + + +Observations and reformat scripts +--------------------------------- + +*None* + +References +---------- + +* Dawson, A., T. N. Palmer, and S. Corti, 2012: Simulating regime structures in weather and climate prediction models. Geophysical Research Letters, 39 (21), https://doi.org/10.1029/2012GL053284. + +* Ferranti, L., S. Corti, and M. Janousek, 2015: Flow-dependent verification of the ECMWF ensemble over the Euro-Atlantic sector. Quarterly Journal of the Royal Meteorological Society, 141 (688), 916-924, https://doi.org/10.1002/qj.2411. + +* Grams, C. M., Beerli, R., Pfenninger, S., Staffell, I., & Wernli, H. (2017). Balancing Europe's wind-power output through spatial deployment informed by weather regimes. Nature climate change, 7(8), 557, https://doi.org/10.1038/nclimate3338. + +* Hannachi, A., D. M. Straus, C. L. E. Franzke, S. Corti, and T. Woollings, 2017: Low Frequency Nonlinearity and Regime Behavior in the Northern Hemisphere Extra-Tropical Atmosphere. Reviews of Geophysics, https://doi.org/10.1002/2015RG000509. + +* Michelangeli, P.-A., R. Vautard, and B. Legras, 1995: Weather regimes: Recurrence and quasi stationarity. Journal of the atmospheric sciences, 52 (8), 1237-1256, doi: `10.1175/1520-0469(1995)052<1237:WRRAQS>2.0.CO `_. + +* Vautard, R., 1990: Multiple weather regimes over the North Atlantic: Analysis of precursors and successors. Monthly weather review, 118 (10), 2056-2081, doi: `10.1175/1520-0493(1990)118<2056:MWROTN>2.0.CO;2 `_. + +* Yiou, P., K. Goubanova, Z. X. Li, and M. Nogaj, 2008: Weather regime dependence of extreme value statistics for summer temperature and precipitation. Nonlinear Processes in Geophysics, 15 (3), 365-378, https://doi.org/10.5194/npg-15-365-2008. + + + + +Example plots +------------- + +.. _fig_modesofvar: +.. figure:: /recipes/figures/modes_of_variability/DJF-psl_observed_regimes.png + :align: center + :width: 14cm + + + + diff --git a/doc/sphinx/source/recipes/recipe_multimodel_products.rst b/doc/sphinx/source/recipes/recipe_multimodel_products.rst new file mode 100644 index 0000000000..20d1219c24 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_multimodel_products.rst @@ -0,0 +1,78 @@ +.. _recipes_multimodel_products: + +Generic multi-model products +==================================================== + +Overview +-------- + +The goal of this diagnostic is to compute the multi-model ensemble mean for a set of models selected by the user for individual variables and different temporal resolutions (annual, seasonal, monthly). + +After selecting the region (defined by the lowermost and uppermost longitudes and latitudes), the mean for the selected reference period is subtracted from the projections in order to obtain the anomalies for the desired period. In addition, the recipe computes the percentage of models agreeing on the sign of this anomaly, thus providing some indication on the robustness of the climate signal. + +The output of the recipe consists of a colored map showing the time average of the multi-model mean anomaly and stippling to indicate locations where the percentage of models agreeing on the sign of the anomaly exceeds a threshold selected by the user. Furthermore, a time series of the area-weighted mean anomaly for the projections is plotted. For the plots, the user can select the length of the running window for temporal smoothing and choose to display the ensemble mean with a light shading to represent the spread of the ensemble or choose to display each individual models. + + + +Available recipes and diagnostics +----------------------------------- + +Recipes are stored in recipes/ + +* recipe_multimodel_products.yml + + +Diagnostics are stored in diag_scripts/magic_bsc/ + +* multimodel_products.r - script for computing multimodel anomalies and their agreement. + + + + +User settings +------------- + +User setting files are stored in recipes/ + +#. recipe_multimodel_products.yml + + *Required settings for script* + + * moninf: integer specifying the first month of the seasonal mean period to be computed + * monsup: integer specifying the last month of the seasonal mean period to be computed, if it's null the anomaly of month indicated in moninf will be computed + * agreement_threshold: integer between 0 and 100 indicating the threshold in percent for the minimum agreement between models on the sign of the multi-model mean anomaly for the stipling to be plotted + * running_mean: integer indictating the length of the window for the running mean to be computed + * time_series_plot: Either single or maxmin (plot the individual or the mean with shading between the max and min). + + +Variables +--------- + +* any Amon variable (atmos, monthly mean, longitude latitude time) + + +Observations and reformat scripts +--------------------------------- + +*None* + +References +---------- + +* Hagedorn, R., Doblas-Reyes, F. J., Palmer, T. N., Nat E H Ag E D O R N, R. E., & Pa, T. N. (2005). The rationale behind the success of multi-model ensembles in seasonal forecasting-I. Basic concept, 57, 219–233. https://doi.org/10.3402/tellusa.v57i3.14657 + +* Weigel, A. P., Liniger, M. A., & Appenzeller, C. (2008). Can multi-model combination really enhance the prediction skill of probabilistic ensemble forecasts? Quarterly Journal of the Royal Meteorological Society, 134(630), 241–260. https://doi.org/10.1002/qj.210 + + + + + + +Example plots +------------- + +.. _fig_multimodprod: +.. figure:: /recipes/figures/multimodel_products/tas_JUN_multimodel-anomaly_2006_2099_1961_1990.png + + + diff --git a/doc/sphinx/source/recipes/recipe_oceans.rst b/doc/sphinx/source/recipes/recipe_oceans.rst new file mode 100644 index 0000000000..d09074c30e --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_oceans.rst @@ -0,0 +1,785 @@ +.. _XML_oceans: + +Recipes for evaluating models of the ocean +========================================== + +Overview +........ + +These recipes are used for evaluating the marine component of models of the +earth system. Using these recipes, it should be possible to evaluate both the +physical models and biogeochemistry models. All these recipes use the +ocean diagnostics package. + +The ocean diagnostics package contains several diagnostics which produce +figures and statistical information of models of the ocean. The datasets have +been pre-processed by ESMValTool, based on recipes in the recipes directory. +Most of the diagnostics produce two or less types of figure, and several +diagnostics are called by multiple recipes. + +Each diagnostic script expects a metadata file, automatically generated by +ESMValTool, and one or more pre-processed dataset. These are passed to the +diagnostic by ESMValTool in the settings.yml and metadata.yml files. + +The ocean diagnostics toolkit can not figure out how to plot data by itself. +The current version requires the recipe to produce the correct pre-processed +data for each diagnostic script. ie: to produce a time series plot, +the preprocessor must produce a time-dimensional dataset. + +While these tools were built to evaluate the ocean component models, they also +can be used to produce figures for other domains. However, there are some ocean +specific elements, such as the z-direction being positive and reversed, and +some of the map plots have the continents coloured in by default. + +As elsewhere, both the model and observational datasets need to be +compliant with the CMOR data. + +Available recipes +................. + +* recipe_ocean_amoc.yml_ +* recipe_ocean_example.yml_ +* recipe_ocean_scalar_fields.yml_ +* recipe_ocean_bgc.yml_ +* recipe_ocean_quadmap.yml_ +* recipe_ocean_Landschutzer2014.yml_ + + +recipe_ocean_amoc.yml +--------------------- + +The recipe_ocean_amoc.yml_ is an recipe that produces figures describing the +Atlantic Meridional Overturning Circulation (AMOC) and the drake passage +current. + +The recipes produces time series of the AMOC at 26 north and the +drake passage current. + +.. centered:: |pic_amoc| + +.. |pic_amoc| image:: /recipes/figures/ocean/amoc_fig_1.png + + +This figure shows the multi model comparison of the AMOC from several CMIP5 +historical simulations, with a 6 year moving average (3 years either side of the +central value). A similar figure is produced for each individual model, and +for the Drake Passage current. + +This recipe also produces a contour transect and a coloured transect plot +showing the Atlantic stream function for each individual model, and a +multi-model contour is also produced: + +.. centered:: |pic_ocean_sf3| |pic_ocean_sf4| + +.. |pic_ocean_sf3| image:: /recipes/figures/ocean/stream_function1.png +.. |pic_ocean_sf4| image:: /recipes/figures/ocean/stream_function2.png + + +recipe_ocean_example.yml +------------------------ + +The recipe_ocean_example.yml_ is an example recipe which shows several examples +of how to manipulate marine model data using the ocean diagnostics tools. + +While several of the diagnostics here have specific uses in evaluating models, +it is meant to be a catch-all recipe demonstrating many different ways to +evaluate models. + +All example calculations are performed using the ocean temperature in a three +dimensional field (thetao), or at the surface (tos). This recipe demonstrates +the use of a range of preprocessors in a marine context, and also shows many +of the standard model-only diagnostics (no observational component is included.) + +This recipe includes examples of how to manipulate both 2D and 3D fields to +produce: + +* Time series: + + * Global surface area weighted mean time series + * Volume weighted average time series within a specific depth range + * Area weighted average time series at a specific depth + * Area weighted average time series at a specific depth in a specific region. + * Global volume weighted average time series + * Regional volume weighted average time series + +* Maps: + + * Global surface map (from 2D ad 3D initial fields) + * Global surface map using re-gridding to a regular grid + * Global map using re-gridding to a regular grid at a specific depth level + * Regional map using re-gridding to a regular grid at a specific depth level + +* Transects: + + * Produce various transect figure showing a re-gridded transect plot, and multi model comparisons + +* Profile: + + * Produce a Global area-weighted depth profile figure + * Produce a regional area-weighted depth profile figure + +All the these fields can be expanded using a + +recipe_ocean_bgc.yml +-------------------- + +The recipe_ocean_bgc.yml_ is an example recipe which shows a several simple examples of how to +manipulate marine biogeochemical model data. + +This recipe includes the following fields: + +* Global total volume-weighted average time series: + + * temperature, salinity, nitrate, oxygen, silicate (vs WOA data) `*` + * chlorophyll, iron, total alkalinity (no observations) + +* Surface area-weighted average time series: + + * temperature, salinity, nitrate, oxygen, silicate (vs WOA data) `*` + * fgco2 (global total), integrated primary production, chlorophyll, + iron, total alkalinity (no observations) + +* Scalar fields time series: + + * mfo (including stuff like drake passage) + +* Profiles: + + * temperature, salinity, nitrate, oxygen, silicate (vs WOA data) `*` + * chlorophyll, iron, total alkalinity (no observations) + +* Maps + contours: + + * temperature, salinity, nitrate, oxygen, silicate (vs WOA data) `*` + * chlorophyll, iron, total alkalinity (no observations) + +* Transects + contours: + + * temperature, salinity, nitrate, oxygen, silicate (vs WOA data) `*` + * chlorophyll, iron, no observations) + +`*` Note that Phosphate is also available as a WOA diagnostic, but I haven't +included it as HadGEM2-ES doesn't include a phosphate field. + +This recipe uses the World Ocean Atlas data, which can be downloaded from: +https://www.nodc.noaa.gov/OC5/woa13/woa13data.html +(last access 10/25/2018) + +Instructions: Select the "All fields data links (1° grid)" netCDF file, +which contain all fields. + + +.. recipe_OxygenMinimumZones.yml +.. ------------------------------------------ +.. This recipe will appear in a future version. + +.. This recipe produces an analysis of Marine oxygen. The diagnostics are based on +.. figure 1 of the following work: +.. Cabré, A., Marinov, I., Bernardello, R., and Bianchi, D.: Oxygen minimum zones +.. in the tropical Pacific across CMIP5 models: mean state differences and climate +.. change trends, Biogeosciences, 12, 5429-5454, +.. https://doi.org/10.5194/bg-12-5429-2015, 2015. + + +recipe_ocean_quadmap.yml +------------------------ + +The recipe_ocean_quadmap.yml_ is an example recipe showing the +diagnostic_maps_quad.py_ diagnostic. +This diagnostic produces an image showing four maps. Each of these four maps +show latitude vs longitude and the cube value is used as the colour scale. +The four plots are: + +================= ==================== +model1 model 1 minus model2 +----------------- -------------------- +model2 minus obs model1 minus obs +================= ==================== + +These figures are also known as Model vs Model vs Obs plots. + + +The figure produced by this recipe compares two versions of the HadGEM2 model +against ATSR sea surface temperature: + +.. centered:: |pic_quad_plot| + +.. |pic_quad_plot| image:: /recipes/figures/ocean/ocean_quad_plot1.png + +This kind of figure can be very useful when developing a model, as it +allows model developers to quickly see the impact of recent changes +to the model. + + +recipe_ocean_ice_extent.yml +--------------------------- + +The recipe_ocean_ice_extent.yml_ recipe produces several metrics describing +the behaviour of sea ice in a model, or in multiple models. + +This recipe has four preprocessors, a combinatorial combination of + +* Regions: Northern or Southern Hemisphere +* Seasons: December-January-February or June-July-August + +Once these seasonal hemispherical fractional ice cover is processed, +the resulting cube is passed 'as is' to the diagnostic_seaice.py_ +diagnostic. + +This diagnostic produces the plots: + +* Polar Stereographic projection Extent plots of individual models years. +* Polar Stereographic projection maps of the ice cover and ice extent for + individual models. +* A time series of Polar Stereographic projection Extent plots - see below. +* Time series plots of the total ice area and the total ice extent. + + +The following image shows an example of the sea ice extent plot, showing the +Summer Northern hemisphere ice extent for the HadGEM2-CC model, in the +historical scenario. + +.. centered:: |pic_sea_ice1| + +.. |pic_sea_ice1| image:: /recipes/figures/ocean/ocean_sea_ice1.png + + +The sea ice diagnostic is unlike the other diagnostics in the ocean diagnostics +toolkit. The other tools are build to be generic plotting tools which +work with any field (ie ``diagnostic_timeseries.py`` works fine for Temperature, +Chlorophyll, or any other field. On the other hand, the +sea ice diagnostic is the only tool that performs a field specific evaluation. + +The diagnostic_seaice.py_ diagnostic is more fully described below. + + + +Available diagnostics +........................ + +Diagnostics are stored in the diag_scripts directory: ocean_. + +The following python modules are included in the ocean diagnostics package. +Each module is described in more detail both below and inside the module. + +- diagnostic_maps.py +- diagnostic_maps_quad.py +- diagnostic_model_vs_obs.py +- diagnostic_profiles.py +- diagnostic_seaice.py +- diagnostic_timeseries.py +- diagnostic_tools.py +- diagnostic_transects.py + + +diagnostic_maps.py +------------------ + +The diagnostic_maps.py_ produces a spatial map from a NetCDF. It requires the +input netCDF to have the following dimensions. Either: + +- A two dimensional file: latitude, longitude. +- A three dimensional file: depth, latitude, longitude. + +In the case of a 3D netCDF file, this diagnostic produces a map for EVERY layer. +For this reason, we recommend extracting a small number of specific layers in +the preprocessor, using the `extract_layer` preprocessor. + +This script can not process NetCDFs with multiple time steps. Please use the +`time_average` preprocessor to collapse the time dimension. + +This diagnostic also includes the optional arguments, `threshold` and +`thresholds`. + +- threshold: a single float. +- thresholds: a list of floats. + +Only one of these arguments should be provided at a time. These two arguments +produce a second kind of diagnostic map plot: a contour map showing the spatial +distribution of the threshold value, for each dataset. Alternatively, if the +thresholds argument is used instead of threshold, the single-dataset contour +map shows the contours of all the values in the thresholds list. + +If multiple datasets are provided, in addition to the single dataset contour, +a multi-dataset contour map is also produced for each value in the thresholds +list. + +Some appropriate preprocessors for this diagnostic would be: + +For a Global 2D field: + + .. code-block:: yaml + + prep_map_1: + time_average: + + +For a regional 2D field: + + .. code-block:: yaml + + prep_map_2: + extract_region: + start_longitude: -80. + end_longitude: 30. + start_latitude: -80. + end_latitude: 80. + time_average: + +For a Global 3D field at the surface and 10m depth: + + .. code-block:: yaml + + prep_map_3: + custom_order: true + extract_levels: + levels: [0., 10.] + scheme: linear_horizontal_extrapolate_vertical + time_average: + + +For a multi-model comparison mean of 2D global fields including contour thresholds. + + .. code-block:: yaml + + prep_map_4: + custom_order: true + time_average: + regrid: + target_grid: 1x1 + scheme: linear + +And this also requires the threshold key in the diagnostic: + + .. code-block:: yaml + + diagnostic_map: + variables: + tos: # Temperature ocean surface + preprocessor: prep_map_4 + field: TO2M + scripts: + Ocean_regrid_map: + script: ocean/diagnostic_maps.py + thresholds: [5, 10, 15, 20] + + +diagnostic_maps_quad.py +-------------------------------- + +The diagnostic_maps_quad.py_ diagnostic produces an image showing four maps. +Each of these four maps show latitude vs longitude and the cube value is used +as the colour scale. The four plots are: + +================= ==================== +model1 model 1 minus model2 +----------------- -------------------- +model2 minus obs model1 minus obs +================= ==================== + + +These figures are also known as Model vs Model vs Obs plots. + +This diagnostic assumes that the preprocessors do the bulk of the +hard work, and that the cubes received by this diagnostic (via the settings.yml +and metadata.yml files) have no time component, a small number of depth layers, +and a latitude and longitude coordinates. + +An appropriate preprocessor for a 2D field would be: + + .. code-block:: yaml + + prep_quad_map: + time_average: + +and an example of an appropriate diagnostic section of the recipe would be: + + .. code-block:: yaml + + diag_map_1: + variables: + tos: # Temperature ocean surface + preprocessor: prep_quad_map + field: TO2Ms + mip: Omon + additional_datasets: + # filename: tos_ATSR_L3_ARC-v1.1.1_199701-201112.nc + # download from: https://datashare.is.ed.ac.uk/handle/10283/536 + - {dataset: ATSR, project: obs4mips, level: L3, version: ARC-v1.1.1, start_year: 2001, end_year: 2003, tier: 3} + scripts: + Global_Ocean_map: + script: ocean/diagnostic_maps_quad.py + control_model: {dataset: HadGEM2-CC, project: CMIP5, mip: Omon, exp: historical, ensemble: r1i1p1} + exper_model: {dataset: HadGEM2-ES, project: CMIP5, mip: Omon, exp: historical, ensemble: r1i1p1} + observational_dataset: {dataset: ATSR, project: obs4mips,} + +Note that the details about the control model, the experiment models +and the observational dataset are all provided in the script section of the +recipe. + + + +diagnostic_model_vs_obs.py +-------------------------------- + +The diagnostic_model_vs_obs.py_ diagnostic makes model vs observations maps +and scatter plots. The map plots shows four latitude vs longitude maps: + +======================== ======================= +Model Observations +------------------------ ----------------------- +Model minus Observations Model over Observations +======================== ======================= + +Note that this diagnostic assumes that the preprocessors do the bulk of the +hard work, and that the cube received by this diagnostic (via the settings.yml +and metadata.yml files) has no time component, a small number of depth layers, +and a latitude and longitude coordinates. + +This diagnostic also includes the optional arguments, `maps_range` and +`diff_range` to manually define plot ranges. Both arguments are a list of two floats +to set plot range minimun and maximum values respectively for Model and Observations +maps (Top panels) and for the Model minus Observations panel (bottom left). +Note that if input data have negative values the Model over Observations map +(bottom right) is not produced. + +The scatter plots plot the matched model coordinate on the x axis, and the +observational dataset on the y coordinate, then performs a linear +regression of those data and plots the line of best fit on the plot. +The parameters of the fit are also shown on the figure. + +An appropriate preprocessor for a 3D+time field would be: + + .. code-block:: yaml + + preprocessors: + prep_map: + extract_levels: + levels: [100., ] + scheme: linear_extrap + time_average: + regrid: + target_grid: 1x1 + scheme: linear + + + +diagnostic_profiles.py +-------------------------------- + +The diagnostic_profiles.py_ diagnostic produces images of the profile over time from a cube. +These plots show cube value (ie temperature) on the x-axis, and depth/height +on the y axis. The colour scale is the annual mean of the cube data. +Note that this diagnostic assumes that the preprocessors do the bulk of the +hard work, and that the cube received by this diagnostic (via the settings.yml +and metadata.yml files) has a time component, and depth component, but no +latitude or longitude coordinates. + +An appropriate preprocessor for a 3D+time field would be: + + .. code-block:: yaml + + preprocessors: + prep_profile: + extract_volume: + long1: 0. + long2: 20. + lat1: -30. + lat2: 30. + z_min: 0. + z_max: 3000. + average_region: + coord1: longitude + coord2: latitude + + + +diagnostic_timeseries.py +-------------------------------- + +The diagnostic_timeseries.py_ diagnostic produces images of the time development +of a metric from a cube. These plots show time on the x-axis and cube value +(ie temperature) on the y-axis. + +Two types of plots are produced: individual model timeseries plots and +multi model time series plots. The individual plots show the results from a +single cube, even if this cube is a multi-model mean made by the `multimodel` +preprocessor. + +The multi model time series plots show several models on the same axes, where +each model is represented by a different line colour. The line colours are +determined by the number of models, their alphabetical order and the `jet` +colour scale. Observational datasets and multimodel means are shown as black +lines. + +This diagnostic assumes that the preprocessors do the bulk of the work, +and that the cube received by this diagnostic (via the settings.yml +and metadata.yml files) is time-dimensional cube. This means that the pre-processed +netcdf has a time component, no depth component, and no latitude or longitude +coordinates. + +Some appropriate preprocessors would be : + +For a global area-weighted average 2D field: + + .. code-block:: yaml + + average_area: + coord1: longitude + coord2: latitude + +For a global volume-weighted average 3D field: + + .. code-block:: yaml + + average_volume: + coord1: longitude + coord2: latitude + coordz: depth + +For a global area-weighted surface of a 3D field: + + .. code-block:: yaml + + extract_levels: + levels: [0., ] + scheme: linear_horizontal_extrapolate_vertical + average_area: + coord1: longitude + coord2: latitude + + +An example of the multi-model time series plots can seen here: + +.. centered:: |pic_amoc2| + +.. |pic_amoc2| image:: /recipes/figures/ocean/amoc_fig_1.png + + + +diagnostic_transects.py +-------------------------------- + + + +The diagnostic_transects.py_ diagnostic produces images of a transect, +typically along a constant latitude or longitude. + +These plots show 2D plots with either latitude or longitude along the x-axis, +depth along the y-axis and and the cube value is used as the colour scale. + + +This diagnostic assumes that the preprocessors do the bulk of the hard work, +and that the cube received by this diagnostic (via the settings.yml and +metadata.yml files) has no time component, and one of the latitude or +longitude coordinates has been reduced to a single value. + +An appropriate preprocessor for a 3D+time field would be: + + .. code-block:: yaml + + time_average: + extract_slice: + latitude: [-50.,50.] + longitude: 332. + +Here is an example of the transect figure: +.. centered:: |pic_ocean_sf1| + +.. |pic_ocean_sf1| image:: /recipes/figures/ocean/stream_function1.png + +And here is an example of the multi-model transect contour figure: + +.. centered:: |pic_ocean_sf2| + +.. |pic_ocean_sf2| image:: /recipes/figures/ocean/stream_function2.png + + + +diagnostic_seaice.py +-------------------------------- + + + +The diagnostic_seaice.py_ diagnostic is unique in this module, as it produces +several different kinds of images, including time series, maps, and contours. +It is a good example of a diagnostic where the preprocessor does very little +work, and the diagnostic does a lot of the hard work. + +This was done purposely, firstly to demonstrate the flexibility of ESMValTool, +and secondly because Sea Ice is a unique field where several Metrics can be +calculated from the sea ice cover fraction. + +The recipe Associated with with diagnostic is the recipe_SeaIceExtent.yml. +This recipe contains 4 preprocessors which all perform approximately the same +calculation. All four preprocessors extract a season: +- December, January and February (DJF) +- June, July and August (JJA) +and they also extract either the North or South hemisphere. The four +preprocessors are combinations of DJF or JJA and North or South hemisphere. + +One of the four preprocessors is North Hemisphere Winter ice extent: + +.. code-block:: yaml + + timeseries_NHW_ice_extent: # North Hemisphere Winter ice_extent + custom_order: true + extract_time: &time_anchor # declare time here. + start_year: 1960 + start_month: 12 + start_day: 1 + end_year: 2005 + end_month: 9 + end_day: 31 + extract_season: + season: DJF + extract_region: + start_longitude: -180. + end_longitude: 180. + start_latitude: 0. + end_latitude: 90. + +Note that the default settings for ESMValTool assume that the year starts on the +first of January. This causes a problem for this preprocessor, as the first +DJF season would not include the first Month, December, and the final would not +include both January and February. For this reason, we also add the +`extract_time` preprocessor. + +This preprocessor group produces a 2D field with a time component, allowing +the diagnostic to investigate the time development of the sea ice extend. + +The diagnostic section of the recipe should look like this: + +.. code-block:: yaml + + diag_ice_NHW: + description: North Hemisphere Winter Sea Ice diagnostics + variables: + sic: # surface ice cover + preprocessor: timeseries_NHW_ice_extent + field: TO2M + mip: OImon + scripts: + Global_seaice_timeseries: + script: ocean/diagnostic_seaice.py + threshold: 15. + +Note the the threshold here is 15%, which is the standard cut of for the +ice extent. + +The sea ice diagnostic script produces three kinds of plots, using the +methods: + +- `make_map_extent_plots`: extent maps plots of individual models using a Polar Stereographic project. +- `make_map_plots`: maps plots of individual models using a Polar Stereographic project. +- `make_ts_plots`: time series plots of individual models + +There are no multi model comparisons included here (yet). + + + +diagnostic_tools.py +------------------- + + + +The diagnostic_tools.py_ is a module that contains several python tools used +by the ocean diagnostics tools. + +These tools are: + +- folder: produces a directory at the path provided and returns a string. +- get_input_files: loads a dictionary from the input files in the metadata.yml. +- bgc_units: converts to sensible units where appropriate (ie Celsius, mmol/m3) +- timecoord_to_float: Converts time series to decimal time ie: Midnight on January 1st 1970 is 1970.0 +- add_legend_outside_right: a plotting tool, which adds a legend outside the axes. +- get_image_format: loads the image format, as defined in the global user config.yml. +- get_image_path: creates a path for an image output. +- make_cube_layer_dict: makes a dictionary for several layers of a cube. + +We just show a simple description here, each individual function is more fully +documented in the diagnostic_tools.py_ module. + + +A note on the auxiliary data directory +...................................... + +Some of these diagnostic scripts may not function on machines with no access +to the internet, as cartopy may try to download the shape files. The solution +to this issue is the put the relevant cartopy shapefiles in a directory which +is visible to esmvaltool, then link that path to ESMValTool via +the `auxiliary_data_dir` variable in your config-user.yml file. + +The cartopy masking files can be downloaded from: +https://www.naturalearthdata.com/downloads/ + + +In these recipes, cartopy uses the 1:10, physical coastlines and land files:: + + 110m_coastline.dbf + 110m_coastline.shp + 110m_coastline.shx + 110m_land.dbf + 110m_land.shp + 110m_land.shx + + +Associated Observational datasets +........................................ + +The following observations datasets are used by these recipes: + +World Ocean ATLAS +----------------- +These data can be downloaded from: +https://www.nodc.noaa.gov/OC5/woa13/woa13data.html +(last access 10/25/2018) +Select the "All fields data links (1° grid)" netCDF file, which contain all +fields. + +The following WOA datasets are used by the ocean diagnostics: + - Temperature + - Salinity + - Nitrate + - Phosphate + - Silicate + - Dissolved Oxygen + +These files need to be reformatted using the `cmorize_obs_py` script with output name `WOA`. + + +Landschutzer 2014 +----------------- +These data can be downloaded from: +ftp://ftp.nodc.noaa.gov/nodc/archive/arc0105/0160558/1.1/data/0-data/spco2_1998-2011_ETH_SOM-FFN_CDIAC_G05.nc +(last access 02/28/2019) + +The following variables are used by the ocean diagnostics: + - fgco2, Surface Downward Flux of Total CO2 + - spco2, Surface Aqueous Partial Pressure of CO2 + - dpco2, Delta CO2 Partial Pressure + +The file needs to be reformatted using the `cmorize_obs_py` script with output name `Landschutzer2014`. + + + +.. Links: + +.. Recipes: +.. _recipe_ocean_amoc.yml: https://github.com/ESMValGroup/ESMValTool/tree/version2_development/esmvaltool/recipes/recipe_ocean_amoc.yml +.. _recipe_ocean_example.yml: https://github.com/ESMValGroup/ESMValTool/tree/version2_development/esmvaltool/recipes/recipe_ocean_example.yml +.. _recipe_ocean_scalar_fields.yml: https://github.com/ESMValGroup/ESMValTool/tree/version2_development/esmvaltool/recipes/recipe_ocean_scalar_fields.yml +.. _recipe_ocean_bgc.yml: https://github.com/ESMValGroup/ESMValTool/tree/version2_development/esmvaltool/recipes/recipe_ocean_bgc.yml +.. _recipe_ocean_quadmap.yml: https://github.com/ESMValGroup/ESMValTool/tree/version2_development/esmvaltool/recipes/recipe_ocean_quadmap.yml +.. _recipe_ocean_Landschutzer2014.yml: https://github.com/ESMValGroup/ESMValTool/tree/version2_development/esmvaltool/recipes/recipe_ocean_Landschutzer2014.yml + +.. Diagnostics: +.. _ocean: https://github.com/ESMValGroup/ESMValTool/tree/version2_development/esmvaltool/diag_scripts/ocean/: +.. _diagnostic_maps.py: https://github.com/ESMValGroup/ESMValTool/tree/version2_development/esmvaltool/diag_scripts/ocean/diagnostic_maps.py +.. _diagnostic_maps_quad.py: https://github.com/ESMValGroup/ESMValTool/tree/version2_development/esmvaltool/diag_scripts/ocean/diagnostic_maps_quad.py +.. _diagnostic_model_vs_obs.py: https://github.com/ESMValGroup/ESMValTool/tree/version2_development/esmvaltool/diag_scripts/ocean/diagnostic_model_vs_obs.py +.. _diagnostic_profiles.py: https://github.com/ESMValGroup/ESMValTool/tree/version2_development/esmvaltool/diag_scripts/ocean/diagnostic_profiles.py +.. _diagnostic_timeseries.py: https://github.com/ESMValGroup/ESMValTool/tree/version2_development/esmvaltool/diag_scripts/ocean/diagnostic_timeseries.py +.. _diagnostic_transects.py: https://github.com/ESMValGroup/ESMValTool/tree/version2_development/esmvaltool/diag_scripts/ocean/diagnostic_transects.py +.. _diagnostic_seaice.py: https://github.com/ESMValGroup/ESMValTool/tree/version2_development/esmvaltool/diag_scripts/ocean/diagnostic_seaice.py +.. _diagnostic_tools.py: https://github.com/ESMValGroup/ESMValTool/tree/version2_development/esmvaltool/diag_scripts/ocean/diagnostic_tools.py diff --git a/doc/sphinx/source/recipes/recipe_perfmetrics.rst b/doc/sphinx/source/recipes/recipe_perfmetrics.rst index 1f8ae91718..81ea101d28 100644 --- a/doc/sphinx/source/recipes/recipe_perfmetrics.rst +++ b/doc/sphinx/source/recipes/recipe_perfmetrics.rst @@ -6,93 +6,104 @@ Performance metrics for essential climate parameters Overview -------- -The goal is to create a standard recipe for the calculation of performance metrics to quantify the ability of the models to reproduce the climatological mean annual cycle for selected "Essential Climate Variables" (ECVs) plus some additional corresponding diagnostics and plots to better understand and interpret the results. The recipe can be used to calculate performance metrics at different vertical levels (e.g., 5, 30, 200, 850 hPa as in Gleckler et al., 2008) and in four regions (global, tropics 20°N-20°S, northern extratropics 20°-90°N, southern extratropics 20°-90°S). As an additional reference, we consider the Righi et al. (2015) paper. +The goal is to create a standard recipe for the calculation of performance metrics to quantify the ability of the models to reproduce the climatological mean annual cycle for selected "Essential Climate Variables" (ECVs) plus some additional corresponding diagnostics and plots to better understand and interpret the results. + +The recipe can be used to calculate performance metrics at different vertical levels (e.g., 5, 30, 200, 850 hPa as in `Gleckler et al. (2008) `_ and in different regions. As an additional reference, we consider `Righi et al. (2015) `_. Available recipes and diagnostics ----------------------------------- -Recipes are stored in nml/ - -* recipe_perfmetrics_CMIP5.xml - -Diagnostics are stored in diag_scripts/ - -* perfmetrics_grading.ncl: calculates grades according to a given metric, with different options for normalization. It requires fields precalculated by perfmetrics_main.ncl. -* perfmetrics_grading_collect.ncl: collects results from metrics previously calculated by perfmetrics_grading.ncl and passes them to the plotting functions. -* perfmetrics_main.ncl: calculates and (optionally) plots annual/seasonal cycles, zonal means, lat-lon fields and time-lat-lon fields from input monthly 2-d or 3-d ("T2M", "T3Ms") data. The calculated fields can be also plotted as difference w.r.t. a given reference model. They are also used as input to calculate grading metrics (see perfmetrics_grading.ncl). -* perfmetrics_taylor.ncl: calculates grades according to a given metric, with different options for normalization. It requires fields precalculated by perfmetrics_main.ncl. -* perfmetrics_taylor_collect.ncl: collects results from metrics previously calculated by perfmetrics_taylor.ncl and passes them to the plotting functions. - -User settings -------------- - -User setting files (cfg files) are stored in nml/cfg_perfmetrics/CMIP5/ - -#. perfmetrics_grading.ncl - - *diag_script_info attributes* - - * MultiModelMean: calculate multi-model mean (True, False) - * MultiModelMedian: calculate multi-model median (True, False) - * metric: applied metric ("RMSD" = root-mean square difference, "BIAS" = mean bias, "stddev_ratio" = ratio of standard deviations of var and ref (for Taylor diagrams only), "correlation" = pattern correlation of var and ref (for Taylor diagrams only)). - * normalization: applied normalization ("mean" = normalization with mean, "median" = normalization with media, "centered_median" = substracting and dividing by the median, "stddev_mean" = normalization with substracting the mean and dividing by the standard deviation) - -#. perfmetrics_grading_collect.ncl - - *Required diag_script_info attributes* - - * label_bounds: min and max of the labelbar - * label_scale: bin width of the labelbar - * disp_values: switch on/off the grading values on the plot - - *Optional diag_script_info attributes* - - * sort: sort models in alphabetic order (excluding multi-model mean) - * title: plot title +Recipes are stored in recipes/ + +* recipe_perfmetrics_CMIP5.yml + +Diagnostics are stored in diag_scripts/perfmetrics/ + +* main.ncl: calculates and (optionally) plots annual/seasonal cycles, zonal means, lat-lon fields and time-lat-lon fields. The calculated fields can also be plotted as difference w.r.t. a given reference dataset. main.ncl also calculates RMSD, bias and taylor metrics. Input data have to be regridded to a common grid in the preprocessor. Each plot type is created by a separated routine, as detailed below. +* cycle.ncl: creates an annual/seasonal cycle plot. +* zonal.ncl: creates a zonal (lat-pressure) plot. +* latlon.ncl: creates a lat-lon plot. +* cycle_latlon.ncl: precalculates the metrics for a time-lat-lon field, with different options for normalization. +* collect.ncl: collects and plots the metrics previously calculated by cycle_latlon.ncl. + +User settings in recipe +----------------------- + +#. Script main.ncl + + *Required settings (scripts)* + + * plot_type: cycle (time), zonal (plev, lat), latlon (lat, lon), cycle_latlon (time, lat, lon), cycle_zonal (time, plev, lat) + * time_avg: type of time average (monthlyclim, seasonalclim, annualclim) + * region: selected region (global, trop, nhext, shext, nhtrop, shtrop, nh, sh, nhmidlat, shmidlat, nhpolar, shpolar, eq) + + *Optional settings (scripts)* + + * styleset: for plot_type cycle only (cmip5, righi15gmd, cmip6, default) + * plot_stddev: for plot_type cycle only, plots standard deviation as shading + * legend_outside: for plot_type cycle only, plots the legend in a separate file + * t_test: for plot_type zonal or latlon, calculates t-test in difference plots (default: False) + * conf_level: for plot_type zonal or latlon, adds the confidence level for the t-test to the plot (default: False) + * projection: map projection for plot_type latlon (default: CylindricalEquidistant) + * plot_diff: draws difference plots (default: False) + * calc_grading: calculates grading metrics (default: False) + * stippling: uses stippling to mark statistically significant differences (default: False = mask out non-significant differences in gray) + * show_global_avg: diplays the global avaerage of the input field as string at the top-right of lat-lon plots (default: False) + * metric: chosen grading metric(s) (if calc_grading is True) + * normalization: metric normalization (for RMSD and BIAS metrics only) + * abs_levs: list of contour levels for absolute plot + * diff_levs: list of contour levels for difference plot + * zonal_cmap: for plot_type zonal only, chosen color table (default: "amwg_blueyellowred") + * zonal_ymin: for plot_type zonal only, minimum pressure level on the y-axis (default: 5. hPa) + * latlon_cmap: for plot_type latlon only, chosen color table (default: "amwg_blueyellowred") + * plot_units: plotting units (if different from standard CMOR units) + + *Required settings (variables)* + + * reference_dataset: reference dataset to compare with (usually the observations). + + *Optional settings (variables)* + + * alternative_dataset: a second dataset to compare with. + + These settings are passed to the other scripts by main.ncl, depending on the selected plot_type. + +#. Script collect.ncl + + *Required settings (scripts)* + + * metric: selected metric (RMSD, BIAS or taylor) + * label_bounds: for RMSD and BIAS metrics, min and max of the labelbar + * label_scale: for RMSD and BIAS metrics, bin width of the labelbar + * colormap: for RMSD and BIAS metrics, color table of the labelbar + + *Optional settings (scripts)* + + * label_lo: adds lower triange for values outside range + * label_hi: adds upper triange for values outside range + * cm_interval: min and max color of the color table + * cm_reverse: reverses the color table + * sort: sorts datasets in alphabetic order (excluding MMM) + * diag_order: sort diagnostics in a specific order (name = 'diagnostic'-'region') + * title: plots title * scale_font: scaling factor applied to the default font size - -#. perfmetrics_main.ncl - - *diag_script_info attributes* - - * plot_type: plot type ("cycle" (time), "zonal" (plev, lat), "latlon" (lat, lon), "cycle_latlon" (time, lat, lon)) - * time_avg: time averaging ("monthlyclim", "seasonalclim") - * valid_fraction: required fraction of valid values - * level: vertical level (hPa, "all" for no selection; set to "all" for zonal mean plots) - * region: averaging region ("Global", "Tropics", "NH extratropics", "SH extratropics") - * grid: regridding option ("finest", "coarsest", "ref") - * draw_plots: create plots (True, False) - * plot_diff: create difference plots (only for zonal and lat-lon plots) (True, False) - * plot_stddev: plot standard deviation ("all", "none", "ref_model" or given model name) - * legend_outside: plot legend in a separate file (only for cycle plots) (True, False) - * styleset: plot style (only for cycle plots) ("CMIP5", "DEFAULT", "EMAC") - * t_test: calculate t-test for difference plots (only for zonal and lat-lon plots) (True, False) - * conf_level: confidence level for the t-test (only for zonal and lat-lon plots) - -#. perfmetrics_taylor.ncl - - *Required diag_script_info attributes* - - * region: averaging region ("Global", "Tropics", "NH extratropics", "SH extratropics") - * time_avg: time averaging ("monthlyclim", "seasonalclim") - * metric: selected metric (required but ignored by permetrics_taylor.ncl) - * normalization: type of metric normalization (required but ignored by permetrics_taylor.ncl) - -#. perfmetrics_taylor_collect.ncl - - *diag_script_info attributes* - - * None. + * disp_values: switches on/off the grading values on the plot + * disp_rankings: switches on/off the rankings on the plot + * rank_order: displays rankings in increasing (1) or decreasing (-1) order Variables --------- +* clt (atmos, monthly mean, longitude latitude time) * hus (atmos, monthly mean, longitude latitude lev time) -* od550aer (aero, monthly mean, longitude latitude time) +* od550aer, od870aer, od550abs, od550lt1aer (aero, monthly mean, longitude latitude time) * pr (atmos, monthly mean, longitude latitude time) * rlut, rlutcs, rsut, rsutcs (atmos, monthly mean, longitude latitude time) +* sm (land, monthly mean, longitude latitude time) * ta (atmos, monthly mean, longitude latitude lev time) * tas (atmos, monthly mean, longitude latitude time) +* toz (atmos, monthly mean, longitude latitude time) +* ts (atmos, monthly mean, longitude latitude time) * ua (atmos, monthly mean, longitude latitude lev time) * va (atmos, monthly mean, longitude latitude lev time) * zg (atmos, monthly mean, longitude latitude lev time) @@ -100,15 +111,22 @@ Variables Observations and reformat scripts --------------------------------- -*Note: (1) obs4mips data can be used directly without any preprocessing; (2) see headers of reformat scripts for non-obs4mips data for download instructions.* - -* AIRS L3 (hus – obs4mips) -* CERES-EBAF (rlut, rlutcs, rsut, rsutcs – obs4mips) -* ERA-Interim (tas, ta, ua, va, zg, hus – reformat_scripts/obs/reformat_obs_ERA-Interim.ncl) -* ESACCI-AEROSOL (od550aer – reformat_scripts/obs/reformat_obs_ESACCI-AEROSOL.ncl) -* GPCP-SG (pr – obs4mips) -* MODIS-L3 (od550aer – obs4mips) -* NCEP (tas, ta, ua, va, zg – reformat_scripts/obs/reformat_obs_NCEP.ncl) +*Note: (1) obs4mips data can be used directly without any preprocessing; (2) see headers of cmorization scripts (in esmvaltool/utils/cmorizers/obs) for non-obs4mips data for download instructions.* + +* AIRS (hus - obs4mips) +* CERES-EBAF (rlut, rlutcs, rsut, rsutcs - obs4mips) +* ERA-Interim (tas, ta, ua, va, zg, hus - esmvaltool/utils/cmorizers/obs/cmorize_obs_ERA-Interim.ncl) +* ESACCI-AEROSOL (od550aer, od870aer, od550abs, od550lt1aer - esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-AEROSOL.ncl) +* ESACCI-CLOUD (clt - esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-CLOUD.ncl) +* ESACCI-OZONE (toz - esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-OZONE.ncl) +* ESACCI-SOILMOISTURE (sm - esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-SOILMOISTURE.ncl) +* ESACCI-SST (ts - esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-SST.ncl) +* GPCP-SG (pr - obs4mips) +* HadISST (ts - esmvaltool/utils/cmorizers/obs/cmorize_obs_HadISST.ncl) +* MODIS (od550aer - esmvaltool/utils/cmorizers/obs/cmorize_obs_MODIS.ncl) +* NCEP (tas, ta, ua, va, zg - esmvaltool/utils/cmorizers/obs/cmorize_obs_NCEP.ncl) +* NIWA-BS (toz - esmvaltool/utils/cmorizers/obs/cmorize_obs_NIWA-BS.ncl) +* PATMOS-x (clt - esmvaltool/utils/cmorizers/obs/cmorize_obs_PATMOS-x.ncl) References ---------- diff --git a/doc/sphinx/source/recipes/recipe_quantilebias.rst b/doc/sphinx/source/recipes/recipe_quantilebias.rst new file mode 100644 index 0000000000..0d72457bba --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_quantilebias.rst @@ -0,0 +1,46 @@ +Precipitation quantile bias +=========================== + + +Overview +-------- + +Precipitation is a dominant component of the hydrological cycle, and as such a main driver of the climate system and human development. The reliability of climate projections and water resources strategies therefore depends on how well precipitation can be reproduced by the models used for simulations. While global circulation models from the CMIP5 project observations can reproduce the main patterns of mean precipitation, they often show shortages and biases in the ability to reproduce the strong precipitation tails of the distribution. Most models underestimate precipitation over arid regions and overestimate it over regions of complex topography, and these shortages are amplified at high quantile precipitation. The quantilebias recipe implements calculation of the quantile bias to allow evaluation of the precipitation bias based on a user defined quantile in models as compared to a reference dataset following Mehran et al. (2014). The quantile bias (QB) is defined as the ratio of monthly precipitation amounts in each simulation to that of the reference dataset (GPCP observations in the example) above a specified threshold t (e.g., the 75th percentile of all the local monthly values). A quantile bias equal to 1 indicates no bias in the simulations, whereas a value above (below) 1 corresponds to a climate model's overestimation (underestimation) of the precipitation amount above the specified threshold t, with respect to that of the reference dataset. + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + +* recipe_quantilebias.yml + +Diagnostics are stored in diag_scripts/quantilebias/ + +* quantilebias.R + + +User settings +------------- + +*Required settings for script* + +* perc_lev: quantile (in %), e.g. 50 + + +Variables +--------- + +* pr (atmos, monthly, longitude latitude time) + + +Observations and reformat scripts +--------------------------------- + +* GPCP-SG observations (accessible via the obs4mips project) + + +References +---------- + +* Mehran, A. et al.: Journal of Geophysical Research: Atmospheres, Volume 119, Issue 4, pp. 1695-1707, 2014. diff --git a/doc/sphinx/source/recipes/recipe_rainfarm.rst b/doc/sphinx/source/recipes/recipe_rainfarm.rst new file mode 100644 index 0000000000..8c12734409 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_rainfarm.rst @@ -0,0 +1,53 @@ +RainFARM stochastic downscaling +=============================== + + +Overview +-------- + +Precipitation extremes and small-scale variability are essential drivers in many climate change impact studies. However, the spatial resolution currently achieved by global and regional climate models is still insufficient to correctly identify the fine structure of precipitation intensity fields. In the absence of a proper physically based representation, this scale gap can be at least temporarily bridged by adopting a stochastic rainfall downscaling technique (Rebora et al, 2006). With this aim, the Rainfall Filtered Autoregressive Model (RainFARM)was developed to apply the stochastic precipitation downscaling method to climate models. The RainFARM Julia library and command-line tool version (https://github.com/jhardenberg/RainFARM.jl) was implemented as recipe. The stochastic method allows to predict climate variables at local scale from information simulated by climate models at regional scale: It first evaluates the statistical distribution of precipitation fields at regional scale and then applies the relationship to the boundary conditions of the climate model to produce synthetic fields at the requested higher resolution. RainFARM exploits the nonlinear transformation of a Gaussian random precipitation field, conserving the information present in the fields at larger scale (Rebora et al., 2006; D’Onofrio et al., 2014). + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + +* recipe_rainfarm.yml + +Diagnostics are stored in diag_scripts/rainfarm/ + +* rainfarm.R + + +User settings +------------- + +*Required settings for script* + +* slope: spatial spectral slope (set to 0 to compute automatically from large scales) +* nens: number of ensemble members to be calculated +* nf: number of subdivisions for downscaling (e.g. 8 will produce output fields with linear resolution increased by a factor 8) +* conserv_glob: logical, if to conserve precipitation over full domain +* conserv_smooth: logical, if to conserve precipitation using convolution (if neither conserv_glob or conserv_smooth is chosen, box conservation is used) +* weights_climo: set to false if no orographic weights are to be used, else set it to the full path to a fine-scale precipitation climatology file. The file is expected to be in NetCDF format and should contain at least one precipitation field. If several fields at different times are provided, a climatology is derived by time averaging. Suitable climatology files could be for example a fine-scale precipitation climatology from a high-resolution regional climate model (see e.g. Terzago et al. 2018), a local high-resolution gridded climatology from observations, or a reconstruction such as those which can be downloaded from the WORLDCLIM (http://www.worldclim.org) or CHELSA (http://chelsa-climate.org) websites. The latter data will need to be converted to NetCDF format before being used (see for example the GDAL tools (https://www.gdal.org). + + +Variables +--------- + +* pr (atmos, daily mean, longitude latitude time) + + +Observations and reformat scripts +--------------------------------- + +None. + + +References +---------- + +* Terzago et al. 2018, Nat. Hazards Earth Syst. Sci., 18, 2825-2840 +* D'Onofrio et al. 2014, J of Hydrometeorology 15, 830-843 +* Rebora et. al 2006, JHM 7, 724 diff --git a/doc/sphinx/source/recipes/recipe_runoff_et.rst b/doc/sphinx/source/recipes/recipe_runoff_et.rst new file mode 100644 index 0000000000..3ce3fdc5bb --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_runoff_et.rst @@ -0,0 +1,130 @@ +Runoff_ET +========= + +Overview +-------- +This diagnostic calculates biases of long-term climatological annual means of total runoff R, +precipitation P and evapotranspiration E for 12 large-scale catchments on different continents +and climates. For total runoff, catchment averaged model values are compared to climatological +GRDC station observations of river runoff (Duemenil Gates et al., 2000). Due to the incompleteness +of these station data, a year-to-year correspondence of data cannot be achieved in a generalized way, +so that only climatological data are considered, such it has been done in Hagemann, et al. (2013). +For precipitation, catchment-averaged WFDEI precipitation data (Weedon et al., 2014) from 1979-2010 +is used as reference. For evapotranspiration, observations are estimated using the difference of the +above mentioned precipitation reference minus the climatological GRDC river runoff. + +The catchments are Amazon, Congo, Danube, Ganges-Brahmaputra, Lena, Mackenzie, Mississippi, Murray, +Niger, Nile, Parana and Yangtze-Kiang. Variable names are expected to follow CMOR standard, e.g. +precipitation as pr, total runoff as mrro and evapotranspiration as evspsbl with all fluxes given in +kg m-2 s-1 . Evapotranspiration furthermore has to be defined positive upwards. + +The diagnostic produces text files with absolute and relative bias to the observations, as well as the +respective absolute values. Furthermore it creates a bar plot for relative and absolute bias, +calculates and plots biases in runoff coefficient (R/P) and evapotranspiration coefficient (E/P) and +saves everything as one pdf file per model or one png file per model and analysis. + +The bias of the runoff coefficient is calculated via: +:math:`C_R = \frac{R_{model}}{P_{model}} - \frac{R_{GRDC}}{P_{WFDEI}}` and similar for the +evapotranspiration coefficient. In a very first approximation, evapotranspiration +and runoff are determined only by precipitation. In other words :math:`R = P - E`. Hence, the runoff coefficient +(and similar the evapotranspiration coefficient) tells you how important runoff (or evapotranspiration) +is in this region. By plotting the bias of the runoff coefficient against the evapotranspiration coefficient +we can immediately see whether there is a shift from runoff to evapotranspiration. On the other hand, by +plotting the bias of the runoff coefficient against the relative bias of precipitation we can see whether +an error in runoff is due to an error in precipitation. + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + + * recipe_runoff_et.yml + +Diagnostics are stored in diag_scripts/runoff_et/ + + * catchment_analysis.py: bar and scatter plots for catchment averages of + runoff, evapotranspiration and precipitation + + +User settings in recipe +----------------------- + +#. Script catchment_analysis.py + + *Required settings (scripts)* + + * catchmentmask: netCDF file indicating the grid cell for a specific catchment. Modus of + distribution not yet clearified. ESGF? + + *Optional settings (variables)* + + * reference_dataset: dataset_name + Datasets can be used as reference instead of defaults provided with the diagnostics. + Must be identical for all variables. + + +Variables +--------- + +* evspsbl (atmos, monthly mean, time latitude longitude) +* pr (atmos, monthly mean, time latitude longitude) +* mrro (land, monthly mean, time latitude longitude) + + +Observations and reformat scripts +--------------------------------- + +Default reference data based on GRDC and WFDEI are included in the diagnostic script +as catchment averages. They can be replaced with any gridded dataset by defining a +reference_dataset. The necessary catchment mask is available at + +.. image:: https://zenodo.org/badge/DOI/10.5281/zenodo.2025776.svg + :target: https://doi.org/10.5281/zenodo.2025776 + +All other datasets are remapped onto the catchment mask grid as part +of the diagnostics. + + +References +---------- +* Duemenil Gates, L., S. Hagemann and C. Golz, + Observed historical discharge data from major rivers for climate model validation. + Max Planck Institute for Meteorology Report 307, Hamburg, Germany, 2000. + +* Hagemann, S., A. Loew, A. Andersson, + Combined evaluation of MPI-ESM land surface water and energy fluxes + J. Adv. Model. Earth Syst., 5, doi:10.1029/2012MS000173, 2013. + +* Weedon, G. P., G. Balsamo, N. Bellouin, S. Gomes, M. J. Best, and P. Viterbo, + The WFDEI meteorological forcing data set: WATCH Forcing Data methodology applied + to ERA‐Interim reanalysis data, + Water Resour. Res., 50, 7505–7514, doi: 10.1002/2014WR015638, 2014 + + +Example plots +------------- + +.. _fig_runoff_et_1: +.. figure:: /recipes/figures/runoff_et/catchments.png + :align: center + :width: 14cm + + Catchment definitions used in the diagnostics. + +.. _fig_runoff_et_2: +.. figure:: /recipes/figures/runoff_et/MPI-ESM-LR_historical_r1i1p1_bias-plot_mrro.png + :align: center + :width: 14cm + + Barplot indicating the absolute and relative bias in annual runoff between MPI-ESM-LR (1970-2000) + and long term GRDC data for specific catchments. + +.. _fig_runoff_et_3: +.. figure:: /recipes/figures/runoff_et/MPI-ESM-LR_historical_r1i1p1_rocoef-vs-relprbias.png + :align: center + :width: 14cm + + Biases in runoff coefficient (runoff/precipitation) and precipitation for major catchments of + the globe. The MPI-ESM-LR historical simulation (1970-2000) is used as an example. + diff --git a/doc/sphinx/source/recipes/recipe_shapeselect.rst b/doc/sphinx/source/recipes/recipe_shapeselect.rst new file mode 100644 index 0000000000..91be26c549 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_shapeselect.rst @@ -0,0 +1,42 @@ +Shapeselect +=========== + +Overview +-------- +Impact modelers are often interested in data for irregular regions best defined by a shapefile. With the shapefile selector tool, the user can extract time series or CII data for a user defined region. The region is defined by a user provided shapefile that includes one or several polygons. For each polygon, a new timeseries, or CII, is produced with only one time series per polygon. The spatial information is reduced to a representative point for the polygon ('representative') or as an average of all grid points within the polygon boundaries ('mean_inside'). If there are no grid points strictly inside the polygon, the 'mean_inside' method defaults to 'representative' for that polygon. An option for displaying the grid points together with the shapefile polygon allows the user to assess which method is most optimal. In case interpolation to a high input grid is necessary, this can be provided in a pre-processing stage. Outputs are in the form of a NetCDF file, or as ascii code in csv format. + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + + * recipe_shapeselect.yml + + +Diagnostics are stored in diag_scripts/shapeselect/ + + * diag_shapeselect.py: calculate the average of grid points inside the + user provided shapefile and returns the result as a NetCDF or Excel sheet. + + +User settings in recipe +----------------------- + +#. Script diag_shapeselect.py + + *Required settings (scripts)* + + * shapefile: path to the user provided shapefile. A relative path is relative to the auxiliary_data_dir as configured in config-user.yml. + + * weighting_method: the preferred weighting method 'mean_inside' - mean of all grid points inside polygon; 'representative' - one point inside or close to the polygon is used to represent the complete area. + + * write_xlsx: true or false to write output as Excel sheet or not. + + * write_netcdf: true or false to write output as NetCDF or not. + +Variables +--------- + +* pr,tas (daily) + diff --git a/doc/sphinx/source/recipes/recipe_smpi.rst b/doc/sphinx/source/recipes/recipe_smpi.rst new file mode 100644 index 0000000000..c0f8cb0f31 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_smpi.rst @@ -0,0 +1,90 @@ +Single Model Perfomance Index (SMPI) +==================================== + +Overview +-------- + +This diagnostic calculates the Single Model Performance Index (SMPI) following Reichler and Kim (2008). The SMPI (called "I\ :sup:`2`") is based on the comparison of several different climate variables (atmospheric, surface and oceanic) between climate model simulations and observations or reanalyses, and it focuses on the validation of the time-mean state of climate. For I\ :sup:`2` to be determined, the differences between the climatological mean of each model variable and observations at each of the available data grid points are calculated, and scaled to the interannual variance from the validating observations. This interannual variability is determined by performing a bootstrapping method (random selection with replacement) for the creation of a large synthetic ensemble of observational climatologies. The results are then scaled to the average error from a reference ensemble of models, and in a final step the mean over all climate variables and one model is calculated. The plot shows the I\ :sup:`2` values for each model (orange circles) and the multi-model mean (black circle), with the diameter of each circle representing the range of I\ :sup:`2` values encompassed by the 5th and 95th percentiles of the bootstrap ensemble. The I\ :sup:`2` values vary around one, with values greater than one for underperforming models, and values less than one for more accurate models. + +Note: The SMPI diagnostic needs all indicated variables from all added models for exactly the same time period to be calculated correctly. If one model does not provide a specific variable, either that model cannot be added to the SMPI calculations, or the missing variable has to be removed from the diagnostics all together. + +Available recipes and diagnostics +----------------------------------- + +Recipes are stored in recipes/ + +* recipe_smpi.yml +* recipe_smpi_4cds.yml + +Diagnostics are stored in diag_scripts/perfmetrics/ + +* main.ncl: calculates and (optionally) plots annual/seasonal cycles, zonal means, lat-lon fields and time-lat-lon fields. The calculated fields can also be plotted as difference w.r.t. a given reference dataset. main.ncl also calculates RMSD, bias and taylor metrics. Input data have to be regridded to a common grid in the preprocessor. Each plot type is created by a separated routine, as detailed below. +* cycle_zonal.ncl: calculates single model perfomance index (Reichler and Kim, 2008). It requires fields precalculated by main.ncl. +* collect.ncl: collects the metrics previously calculated by cycle_latlon.ncl and passes them to the plotting functions. + +User settings +------------- + +#. perfmetrics/main.ncl + + *Required settings for script* + + * plot_type: only "cycle_latlon (time, lat, lon)" and "cycle_zonal (time, plev, lat)" available for SMPI; usage is defined in the recipe and is dependent on the used variable (2D variable: cycle_latlon, 3D variable: cycle_zonal) + * time_avg: type of time average (only "yearly" allowed for SMPI, any other settings are not supported for this diagnostic) + * region: selected region (only "global" allowed for SMPI, any other settings are not supported for this diagnostic) + * normalization: metric normalization ("CMIP5" for analysis of CMIP5 simulations; to be adjusted accordingly for a different CMIP phase) + * calc_grading: calculates grading metrics (has to be set to "true" in the recipe) + * metric: chosen grading metric(s) (if calc_grading is True; has to be set to "SMPI") + * smpi_n_bootstrap: number of bootstrapping members used to determine uncertainties on model-reference differences (typical number of bootstrapping members: 100) + + *Required settings for variables* + + * reference_dataset: reference dataset to compare with (usually the observations). + +These settings are passed to the other scripts by main.ncl, depending on the selected plot_type. + +#. collect.ncl + + *Required settings for script* + + * metric: selected metric (has to be "SMPI") + + +Variables +--------- + +* hfds (ocean, monthly mean, longitude latitude time) +* hus (atmos, monthly mean, longitude latitude lev time) +* pr (atmos, monthly mean, longitude latitude time) +* psl (atmos, monthly mean, longitude latitude time) +* sic (ocean-ice, monthly mean, longitude latitude time) +* ta (atmos, monthly mean, longitude latitude lev time) +* tas (atmos, monthly mean, longitude latitude time) +* tauu (atmos, monthly mean, longitude latitude time) +* tauv (atmos, monthly mean, longitude latitude time) +* tos (ocean, monthly mean, longitude latitude time) +* ua (atmos, monthly mean, longitude latitude lev time) +* va (atmos, monthly mean, longitude latitude lev time) + + +Observations and reformat scripts +--------------------------------- + +*Note: (1) obs4mips data can be used directly without any preprocessing; (2) see headers of reformat scripts for non-obs4mips data for download instructions.* + +* ERA-Interim (hfds, hus, psl, ta, tas, tauu, tauv, ua, va - esmvaltool/utils/cmorizers/obs/cmorize_obs_ERA-Interim.ncl) +* HadISST (sic, tos - reformat_scripts/obs/reformat_obs_HadISST.ncl) +* GPCP-SG (pr - obs4mips) + +References +---------- + +* Reichler, T. and J. Kim, How well do coupled models simulate today's climate? Bull. Amer. Meteor. Soc., 89, 303-311, doi: 10.1175/BAMS-89-3-303, 2008. + +Example plots +------------- + +.. figure:: /recipes/figures/smpi/reichlerkim08bams_smpi.png + :width: 70 % + + Performance index I\ :sup:`2` for individual models (circles). Circle sizes indicate the length of the 95% confidence intervals. The black circle indicates the I\ :sup:`2` of the multi-model mean (similar to Reichler and Kim (2008), Figure 1). diff --git a/doc/sphinx/source/recipes/recipe_spei.rst b/doc/sphinx/source/recipes/recipe_spei.rst new file mode 100644 index 0000000000..ff71c4a594 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_spei.rst @@ -0,0 +1,77 @@ +SPEI +==== + +Overview +-------- +Droughts can be separated into three main types: meteorological, hydrological, and agricultural drought. + +Common for all types is that a drought needs to be put in context of local and seasonal characteristics, i.e. a drought should not be defined with an absolute threshold, but as an anomalous condition. + +Meteorological droughts are often described using the standardized precipitation index (SPI; McKee et al, 1993), which in a standardized way describes local precipitation anomalies. It is calculated on monthly mean precipitation, and is therefore not accounting for the intensity of precipitation and the runoff process. Because SPI does not account for evaporation from the ground, it lacks one component of the water fluxes at the surface and is therefore not compatible with the concept of hydrological drought. + +A hydrological drought occurs when low water supply becomes evident, especially in streams, reservoirs, and groundwater levels, usually after extended periods of meteorological drought. GCMs normally do not simulate hydrological processes in sufficient detail to give deeper insights into hydrological drought processes. Neither do they properly describe agricultural droughts, when crops become affected by the hydrological drought. However, hydrological drought can be estimated by accounting for evapotranspiration, and thereby estimate the surface retention of water. The standardized precipitation-evapotranspiration index (SPEI; Vicente-Serrano et al., 2010) has been developed to also account for temperature effects on the surface water fluxes. Evapotranspiration is not normally calculated in GCMs, so SPEI often takes other inputs to estimate the evapotranspiration. Here, the Thornthwaite (Thornthwaite, 1948) method based on temperature is applied. + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + + * recipe_spei.yml + + +Diagnostics are stored in diag_scripts/droughtindex/ + + * diag_spi.r: calculate the SPI index + + * diag_spei.r: calculate the SPEI index + + +User settings +------------- + +#. Script diag_spi.py + + *Required settings (script)* + + * reference_dataset: dataset_name + The reference data set acts as a baseline for calculating model bias. + +#. Script diag_spei.py + + *Required settings (script)* + + * reference_dataset: dataset_name + The reference data set acts as a baseline for calculating model bias. + + +Variables +--------- + +* pr (atmos, monthly mean, time latitude longitude) +* tas (atmos, monthly mean, time latitude longitude) + + +References +---------- +* McKee, T. B., Doesken, N. J., & Kleist, J. (1993). The relationship of drought frequency and duration to time scales. In Proceedings of the 8th Conference on Applied Climatology (Vol. 17, No. 22, pp. 179-183). Boston, MA: American Meteorological Society. + +* Vicente-Serrano, S. M., Beguería, S., & López-Moreno, J. I. (2010). A multiscalar drought index sensitive to global warming: the standardized precipitation evapotranspiration index. Journal of climate, 23(7), 1696-1718. + + +Example plots +------------- + +.. _fig_spei: +.. figure:: /recipes/figures/spei/histogram_spei.png + :align: center + :width: 14cm + + (top) Probability distribution of the standardized precipitation-evapotranspiration index of a sub-set of the CMIP5 models, and (bottom) bias relative to the CRU reference data set. + +.. _fig_spi: +.. figure:: /recipes/figures/spei/histogram_spi.png + :align: center + :width: 14cm + + (top) Probability distribution of the standardized precipitation index of a sub-set of the CMIP5 models, and (bottom) bias relative to the CRU reference data set. diff --git a/doc/sphinx/source/recipes/recipe_template.rst.template b/doc/sphinx/source/recipes/recipe_template.rst.template new file mode 100644 index 0000000000..99bef29df5 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_template.rst.template @@ -0,0 +1,75 @@ +Title +===== + +Overview +-------- + +Brief description of the diagnostic. + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in esmvaltool/recipes/ + + * recipe_.yml + +Diagnostics are stored in esmvaltool/diag_scripts// + + * : one line scription + + +User settings in recipe +----------------------- + +#. Script + + *Required settings for script* + + * xxx: zzz + + *Optional settings for script* + + *Required settings for variables* + + *Optional settings for variables* + + *Required settings for preprocessor* + + *Optional settings for preprocessor* + + *Color tables* + + * list required color tables (if any) here + + +Variables +--------- + +* var1 (realm, frequency, dimensions), e.g. pr (atmos, monthly mean, longitude latitude time) + + +Observations and reformat scripts +--------------------------------- + +*Note: (1) obs4mips data can be used directly without any preprocessing; +(2) see headers of reformat scripts for non-obs4mips data for download +instructions.* + +* xxx + + *Reformat script:* + +References +---------- + +* xxx + +Example plots +------------- + +.. _fig_mynewdiag_1: +.. figure:: /recipes/figures//awesome1.png + :align: center + + Add figure caption here. diff --git a/doc/sphinx/source/recipes/recipe_thermodyn_diagtool.rst b/doc/sphinx/source/recipes/recipe_thermodyn_diagtool.rst new file mode 100644 index 0000000000..da8352b37b --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_thermodyn_diagtool.rst @@ -0,0 +1,123 @@ +TheDiaTo v1.0 - A Diagnostic Tool for the Thermodynamics of the Climate System +============================================================================== + +Overview +-------- + +The tool allows to compute TOA, atmospheric and surface energy budgets, latent energy and water mass budgets, +meridional heat transports, the Lorenz Energy Cycle (LEC), the material entropy production with the direct +and indirect method. + +The energy budgets are computed from monthly mean radiative and heat fluxes at the TOA and at the surface +(cfr. Wild et al., 2013). The meridional heat transports are obtained from the latitudinal integration +of the zonal mean energy budgets. When a land-sea mask is provided, results are also available for +land and oceans, separately. + +The water mass budget is obtained from monthly mean latent heat fluxes (for evaporation), total and snowfall +precipitation (cfr. Liepert et al., 2012). The latent energy budget is obtained multiplying each component of +the water mass budget by the respective latent heat constant. When a land-sea mask is provided, results are +also available for land and oceans, separately. + +The LEC is computed from 3D fields of daily mean velocity and temperature fields in the troposphere over +pressure levels. The analysis is carried on in spectral fields, converting lonlat grids in Fourier coefficients. +The components of the LEC are computed as in Ulbrich and Speth, 1991. In order to account for possible gaps +in pressure levels, the daily fields of 2D near-surface temperature and horizontal velocities. + +The material entropy production is computed by using the indirect or the direct method (or both). The former +method relies on the convergence of radiative heat in the atmosphere (cfr. Lucarini et al., 2011; Pascale et al., 2011), +the latter on all viscous and non-viscous dissipative processes occurring in the atmosphere +(namely the sensible heat fluxes, the hydrological cycle with its components and the kinetic energy dissipation). + +For a comprehensive report on the methods used and some descriptive results, please refer to Lembo et al., 2019. + + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + + * recipe_thermodyn_diagtool.yml + +Diagnostics are stored in diag_scripts/thermodyn_diagtool/ + + * thermodyn_diagnostics.py: the main script, handling input files, calling computation and plotting scricpts; + + * computations.py: a module containing all the main computations that are carried out by the program; + + * fluxogram.py: a module for the retrieval of the block diagrams displaying the reservoirs and conversion terms of the LEC + + * fourier_coefficients.py: a module for the computation of the Fourier coefficients from the lonlat input grid + + * lorenz_cycle.py: a module for the computation of the LEC components in Fourier coefficients + + * mkthe.py: a module for the computation of indirect variables obtained from the input fields, such as LCL height, boundary layer top height and temperature, potential temperature + + * plot_script.py: a module for the computation of maps, scatter plots, time series and meridional sections of some derived quantities for each model in the ensemble. The meridional heat and water mass transports are also computed here, as well as the peak magnitudes and locations; + + * provenance_meta.py: a module for collecting metadata and writing them to produced outputs; + +User settings +------------- + +Besides the datasets, to be set according to usual ESMValTool convention, the user can set the following optional variables in the recipe_Thermodyn_diagtool.yml: + + * wat: if set to 'true', computations are performed of the water mass and latent energy budgets and transports + * lsm: if set to true, the computations of the energy budgets, meridional energy transports, water mass and latent energy budgets and transports are performed separately over land and oceans + * lec: if set to 'true', computation of the LEC are performed + * entr: if set to 'true', computations of the material entropy production are performed + * met (1, 2 or 3): the computation of the material entropy production must be performed with the indirect method (1), the direct method (2), or both methods. If 2 or 3 options are chosen, the intensity of the LEC is needed for the entropy production related to the kinetic energy dissipation. If lec is set to 'false', a default value is provided. + + These options apply to all models provided for the multi-model ensemble computations + + +Variables +--------- + +* hfls (atmos, monthly mean, time latitude longitude) +* hfss (atmos, monthly mean, time latitude longitude) +* hus (atmos, monthly mean, time plev latitude longitude) +* pr (atmos, monthly mean, time latitude longitude) +* prsn (atmos, monthly mean, time latitude longitude) +* ps (atmos, monthly mean, time latitude longitude) +* rlds (atmos, monthly mean, time latitude longitude) +* rlus (atmos, monthly mean, time latitude longitude) +* rlut (atmos, monthly mean, time latitude longitude) +* rsds (atmos, monthly mean, time latitude longitude) +* rsdt (atmos, monthly mean, time latitude longitude) +* rsus (atmos, monthly mean, time latitude longitude) +* rsut (atmos, monthly mean, time latitude longitude) +* ta (atmos, daily mean, time plev latitude longitude) +* tas (atmos, daily mean, time latitude longitude) +* ts (atmos, monthly mean, time latitude longitude) +* ua (atmos, daily mean, time plev latitude longitude) +* uas (atmos, daily mean, time latitude longitude) +* va (atmos, daily mean, time plev latitude longitude) +* vas (atmos, daily mean, time latitude longitude) +* wap (atmos, daily mean, time plev latitude longitude) + + +References +---------- +* Lembo V, Lunkeit F, Lucarini V (2019) A new diagnostic tool for diagnosing water, energy and entropy budgets in climate models. Geophys Mod Dev Disc. doi:10.5194/gmd-2019-37. in review. +* Liepert BG, Previdi M (2012) Inter-model variability and biases of the global water cycle in CMIP3 coupled climate models. Environ Res Lett 7:014006. doi: 10.1088/1748-9326/7/1/014006 +* Lorenz EN (1955) Available Potential Energy and the Maintenance of the General Circulation. Tellus 7:157–167. doi: 10.1111/j.2153-3490.1955.tb01148.x +* Lucarini V, Fraedrich K, Ragone F (2010) New Results on the Thermodynamical Properties of the Climate System. J Atmo 68:. doi: 10.1175/2011JAS3713.1 +* Lucarini V, Blender R, Herbert C, et al (2014) Reviews of Geophysics Mathematical and physical ideas for climate science. doi: 10.1002/2013RG000446 +* Pascale S, Gregory JM, Ambaum M, Tailleux R (2011) Climate entropy budget of the HadCM3 atmosphere–ocean general circulation model and of FAMOUS, its low-resolution version. Clim Dyn 36:1189–1206. doi: 10.1007/s00382-009-0718-1 +* Ulbrich U, Speth P (1991) The global energy cycle of stationary and transient atmospheric waves: Results from ECMWF analyses. Meteorol Atmos Phys 45:125–138. doi: 10.1007/BF01029650 +* Wild M, Folini D, Schär C, et al (2013) The global energy balance from a surface perspective. Clim Dyn 40:3107–3134. doi: 10.1007/s00382-012-1569-8 + + +Example plots +------------- + +.. _fig_1: +.. figure:: /recipes/figures/thermodyn_diagtool/meridional_transp.png + :align: left + :width: 14cm + +.. _fig_2: +.. figure:: /recipes/figures/thermodyn_diagtool/CanESM2_wmb_transp.png + :align: right + :width: 14cm diff --git a/doc/sphinx/source/recipes/recipe_toymodel.rst b/doc/sphinx/source/recipes/recipe_toymodel.rst new file mode 100644 index 0000000000..f830380458 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_toymodel.rst @@ -0,0 +1,79 @@ +.. _recipes_toymodel: + +Toymodel +==================================================== + +Overview +-------- + +The goal of this diagnostic is to simulate single-model ensembles from an observational dataset to investigate the effect of observational uncertain. For further discussion of this synthetic value generator, its general application to forecasts and its limitations, see Weigel et al. (2008). The output is a netcdf file containing the synthetic observations. Due to the sampling of the perturbations from a Gaussian distribution, running the recipe multiple times, with the same observation dataset and input parameters, will result in different outputs. + + +Available recipes and diagnostics +----------------------------------- + +Recipes are stored in recipes/ + +* recipe_toymodel.yml + + +Diagnostics are stored in diag_scripts/magic_bsc/ + +* toymodel.R: generates a single model ensemble of synthetic observations + + + + +User settings +------------- + +User setting files are stored in recipes/ + +#. recipe_toymodel.yml + + *Required settings for preprocessor* + + extract_region: + + * start_longitude: minimum longitude + * end_longitude: maximum longitude + * start_latitude: minimum longitude + * end_latitude: maximum latitude + + *Required settings for script* + + * number_of_members: integer specifying the number of members to be generated + * beta: the user defined underdispersion (beta >= 0) + + +Variables +--------- + +* psl(atmos, daily-monthly, longitude, latitude, time) +* tas(atmos, daily-monthly, longitude, latitude, time) + + +Observations and reformat scripts +--------------------------------- + +*None* + +References +---------- + +* Bellprat, O., Massonnet, F., Siegert, S., Prodhomme, C., Macias-Gómez, D., Guemas, V., & Doblas-Reyes, F. (2017). Uncertainty propagation in observational references to climate model scales. Remote Sensing of Environment, 203, 101-108. + +* Massonet, F., Bellprat, O. Guemas, V., & Doblas-Reyes, F. J. (2016). Using climate models to estimate the quality of global observational data sets. Science, aaf6369. + +* Weigel, A. P., Liniger, M. A., & Appenzeller, C. (2008). Can multi-model combinations really enhance the prediction skill of probabilistic ensemble forecasts? Quarterly Journal of the Royal Meteorological Society, 134(630), 241-260. + + +Example plots +------------- + +.. _fig_toymodel: +.. figure:: /recipes/figures/toymodel/synthetic_CMIP5_IPSL-CM5A-LR_day_historical_r1i1p1_T2M_tasmax_1999-2000.jpg + + + + diff --git a/doc/sphinx/source/recipes/recipe_zmnam.rst b/doc/sphinx/source/recipes/recipe_zmnam.rst new file mode 100644 index 0000000000..b3303d195c --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_zmnam.rst @@ -0,0 +1,76 @@ +Stratosphere-troposphere coupling and annular modes indices (ZMNAM) +=================================================================== + + +Overview +-------- + +The current generation of climate models include the representation of stratospheric processes, as the vertical coupling with the troposphere is important for the weather and climate at the surface (e.g., `Baldwin and Dunkerton, 2001 `_). + +The recipe recipe_zmnam.yml can be used to evaluate the representation of the Northern Annular Mode (NAM, e.g., `Wallace, 2000 `_) in climate simulations, using reanalysis datasets as reference. + +The calculation is based on the “zonal mean algorithm” of `Baldwin and Thompson (2009) `_, and is alternative to pressure based or height-dependent methods. + +This approach provides a robust description of the stratosphere-troposphere coupling on daily timescales, requiring less subjective choices and a reduced amount of input data. +Starting from daily mean geopotential height on pressure levels, the leading empirical orthogonal function/principal component are computed from zonal mean daily anomalies, with the leading principal component representing the zonal mean NAM index. The regression of the monthly mean geopotential height onto this monthly averaged index represents the NAM pattern for each selected pressure level. + +The outputs of the procedure are the monthly time series and the histogram of the daily zonal-mean NAM index, and the monthly regression maps for selected pressure levels. The users can select the specific datasets (climate model simulation and/or reanalysis) to be evaluated, and a subset of pressure levels of interest. + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in recipes/ + +* recipe_zmnam.yml + +Diagnostics are stored in diag_scripts/zmnam/ + +* zmnam.py + +and subroutines + +* zmnam_calc.py +* zmnam_plot.py +* zmnam_preproc.py + + +User settings +------------- + +None. + + +Variables +--------- + +* zg (atmos, daily mean, longitude latitude time) + + +Observations and reformat scripts +--------------------------------- + +None. + + +References +---------- + +* Baldwin, M. P. and Thompson, D. W. (2009), A critical comparison of stratosphere–troposphere coupling indices. Q.J.R. Meteorol. Soc., 135: 1661-1672. `doi:10.1002/qj.479 `_. +* Baldwin, M. P and Dunkerton, T. J. (2001), Stratospheric Harbingers of Anomalous Weather Regimes. Science 294 (5542): 581-584. `doi:10.1126/science.1063315 `_. +* Wallace, J. M. (2000), North Atlantic Oscillation/annular mode: Two paradigms-one phenomenon. Q.J.R. Meteorol. Soc., 126 (564): 791-805. `doi:10.1002/qj.49712656402 `_. + + + +Example plots +------------- + +.. figure:: /recipes/figures/zmnam/zmnam_reg.png + :width: 10cm + + Example output: time series of the zonal-mean NAM index. + +.. figure:: /recipes/figures/zmnam/zmnam_ts.png + :width: 10cm + + Example output: regression map for a selected pressure level. diff --git a/doc/sphinx/source/requirements.txt b/doc/sphinx/source/requirements.txt new file mode 100644 index 0000000000..23ebc0ead2 --- /dev/null +++ b/doc/sphinx/source/requirements.txt @@ -0,0 +1,18 @@ +cdo +cython +matplotlib<3 +nc-time-axis +netCDF4 +numba +numpy +pillow +prov[dot] +psutil +pyyaml +shapely +six +xarray +yamale +sklearn +pandas +eofs diff --git a/doc/sphinx/source/user_guide2/config.inc b/doc/sphinx/source/user_guide2/config.inc new file mode 100644 index 0000000000..dc9a537801 --- /dev/null +++ b/doc/sphinx/source/user_guide2/config.inc @@ -0,0 +1,102 @@ +.. _config: + +******************* +Configuration files +******************* + +There are several configuration files in ESMValTool: + + - config-user.yml + - config-developer.yml + - config-references.yml + - environment.yml + +The ``config-user.yml`` file is described here, however, the +other three files are beyond the scope of the user guide. They are described +in the developer guide. + +User config file +================ + +The ``config-user.yml`` configuration file contains all the global level +information needed by ESMValTool. The following shows the default settings from +the ``config-user.yml`` file. + +.. code-block:: yaml + + # Diagnostics create plots? [true]/false + write_plots: true + # Diagnositcs write NetCDF files? [true]/false + write_netcdf: true + # Set the console log level debug, [info], warning, error + log_level: info + # verbosity is deprecated and will be removed in the future + # verbosity: 1 + # Exit on warning? true/[false] + exit_on_warning: false + # Plot file format? [ps]/pdf/png/eps/epsi + output_file_type: pdf + # Destination directory + output_dir: ./esmvaltool_output + # Auxiliary data directory (used for some additional datasets) + auxiliary_data_dir: ./auxiliary_data + # Use netCDF compression true/[false] + compress_netcdf: false + # Save intermediary cubes in the preprocessor true/[false] + save_intermediary_cubes: false + # Remove the preproc dir if all fine + remove_preproc_dir: true + # Run at most this many tasks in parallel null/[1]/2/3/4/.. + # Set to null to use the number of available CPUs. + # Make sure your system has enough memory for the specified number of tasks. + max_parallel_tasks: 1 + # Path to custom config-developer file, to customise project configurations. + # See config-developer.yml for an example. Set to None to use the default + config_developer_file: null + # Get profiling information for diagnostics + # Only available for Python diagnostics + profile_diagnostic: false + + # Rootpaths to the data from different projects (lists are also possible) + rootpath: + CMIP5: [~/cmip5_inputpath1, ~/cmip5_inputpath2] + OBS: ~/obs_inputpath + default: ~/default_inputpath + + # Directory structure for input data: [default]/BADC/DKRZ/ETHZ/etc + # See config-developer.yml for definitions. + drs: + CMIP5: default + +Most of these settings are fairly self-explanatory, ie: + +.. code-block:: yaml + + # Diagnostics create plots? [true]/false + write_plots: true + # Diagnositcs write NetCDF files? [true]/false + write_netcdf: true + +The ``write_plots`` setting is used to inform ESMValTool about your preference +for saving figures. Similarly, the ``write_netcdf`` setting is a boolean which +turns on or off the writing of netCDF files. + +.. code-block:: yaml + + # Auxiliary data directory (used for some additional datasets) + auxiliary_data_dir: ./auxiliary_data + +The ``auxiliary_data_dir`` setting is the path to place any required +additional auxiliary data files. This method was necessary because certain +Python toolkits such as cartopy will attempt to download data files at run +time, typically geographic data files such as coastlines or land surface maps. +This can fail if the machine does not have access to the wider internet. This +location allows us to tell cartopy (and other similar tools) where to find the +files if they can not be downloaded at runtime. To reiterate, this setting is +not for model or observational datasets, rather it is for data files used in +plotting such as coastline descriptions and so on. + + +Tip: You choose your config.yml file at run time, so you could have several +available with different purposes. One for formalised run, one for debugging, +etc... diff --git a/doc/sphinx/source/user_guide2/dirstruct.inc b/doc/sphinx/source/user_guide2/dirstruct.inc new file mode 100644 index 0000000000..767788c6d6 --- /dev/null +++ b/doc/sphinx/source/user_guide2/dirstruct.inc @@ -0,0 +1,5 @@ +:: _dirstruct: + +************************************* +Directory structure of the ESMValTool +************************************* diff --git a/doc/sphinx/source/user_guide2/first_diagnostic.inc b/doc/sphinx/source/user_guide2/first_diagnostic.inc new file mode 100644 index 0000000000..aeccebdaa0 --- /dev/null +++ b/doc/sphinx/source/user_guide2/first_diagnostic.inc @@ -0,0 +1,96 @@ +******************************************** +How to prepare and run your first diagnostic +********************************************* + +Instructiona for personal diagnostic +==================================== + +Anyone can run a personal diagnostic, no matter where the location of it; +there is no need to install esmvaltool in developer mode nor is it to +git push or for that matter, do any git operations; the example recipe + +.. code-block:: bash + esmvaltool/recipes/recipe_my_personal_diagnostic.yml + +shows the use of running a personal diagnostic; the example + +.. code-block:: bash + esmvaltool/diag_scripts/examples/my_little_diagnostic.py + +and any of its alterations may be used as training wheels for the future ESMValTool +diagnostic developer. The purpose of this example is to familiarize the user with +the framework of ESMValTool without the constraints of installing and running the +tool as developer. + +Functionality +============= + +`my_little_diagnostic` (or whatever the user will call their diagnostic) makes full use +of ESMValTool's preprocessor output (both phyisical files and run variables); this output +comes in form of a nested dictionary, or config dictionary, see an example below; +it also makes full use of the ability to call any of the preprocessor's functions, +note that relative imports of modules from the esmvaltool package are allowed and +work without altering the $PYTHONPATH. + +The user may parse this dictionary so that they execute a number of operations on the +preprocessed data; for example the `my_little_diagnostic.plot_time_series` grabs the +preprocessed data output, computes global area averages for each model, then plots +a time-series for each model. Different manipulation functionalities for grouping, +sorting etc of the data in the config dictionary are available, +please consult ESMValTool User Manual. + + +Writing a basic recipe +====================== +The user will need to write a basic recipe to be able to run their own personal diagnostic. +An example of such a recipe is found in `esmvaltool/recipes/recipe_my_personal_diagnostic.yml`. +For general guidelines with regards to ESMValTool recipes please consult the User Guide; +the specific parameters needed by a recipe that runs a personal diagnostic are: + +.. code-block:: bash + scripts: + my_diagnostic: + script: /path/to/your/my_little_diagnostic.py + +i.e. the full path to the personal diagnostic that the user needs to run. + +Example of config dictionary +============================ +``` +{'input_files': +['/group_workspaces/jasmin2/cmip6_prep/esmvaltool_users/valeriu/MyDIAG/recipe_my_personal_diagnostic_20181001_112918/preproc/simple_pp_ta/metadata.yml'], +'log_level': 'info', +'max_data_filesize': 100, +'myDiag': 'my_personal_diagnostic_example', +'myDiagPlace': '/group_workspaces/jasmin2/cmip6_prep/esmvaltool_users/valeriu', +'output_file_type': 'pdf', +'plot_dir': '/group_workspaces/jasmin2/cmip6_prep/esmvaltool_users/valeriu/MyDIAG/recipe_my_personal_diagnostic_20181001_112918/plots/simple/my_diagnostic', 'recipe': 'recipe_my_personal_diagnostic.yml', +'run_dir': '/group_workspaces/jasmin2/cmip6_prep/esmvaltool_users/valeriu/MyDIAG/recipe_my_personal_diagnostic_20181001_112918/run/simple/my_diagnostic', +'script': 'my_diagnostic', +'title': 'My First Diagnostic', +'version': '2.0a1', +'work_dir': '/group_workspaces/jasmin2/cmip6_prep/esmvaltool_users/valeriu/MyDIAG/recipe_my_personal_diagnostic_20181001_112918/work/simple/my_diagnostic', +'write_netcdf': True, +'write_plots': True, +'input_data': {'/group_workspaces/jasmin2/cmip6_prep/esmvaltool_users/valeriu/MyDIAG/recipe_my_personal_diagnostic_20181001_112918/preproc/simple_pp_ta/CMIP5_MPI-ESM-LR_Amon_historical_r1i1p1_T3M_ta_2000-2002.nc': + {'cmor_table': 'CMIP5', + 'dataset': 'MPI-ESM-LR', + 'diagnostic': 'simple', + 'end_year': 2002, + 'ensemble': 'r1i1p1', + 'exp': 'historical', + 'field': 'T3M', + 'filename': '/group_workspaces/jasmin2/cmip6_prep/esmvaltool_users/valeriu/MyDIAG/recipe_my_personal_diagnostic_20181001_112918/preproc/simple_pp_ta/CMIP5_MPI-ESM-LR_Amon_historical_r1i1p1_T3M_ta_2000-2002.nc', + 'fx_files': {'areacello': '/badc/cmip5/data/cmip5/output1/MPI-M/MPI-ESM-LR/historical/fx/ocean/fx/r0i0p0/latest/areacello/areacello_fx_MPI-ESM-LR_historical_r0i0p0.nc', 'sftlf': '/badc/cmip5/data/cmip5/output1/MPI-M/MPI-ESM-LR/historical/fx/atmos/fx/r0i0p0/latest/sftlf/sftlf_fx_MPI-ESM-LR_historical_r0i0p0.nc', 'sftof': '/badc/cmip5/data/cmip5/output1/MPI-M/MPI-ESM-LR/historical/fx/ocean/fx/r0i0p0/latest/sftof/sftof_fx_MPI-ESM-LR_historical_r0i0p0.nc'}, + 'long_name': 'Air Temperature', + 'mip': 'Amon', + 'preprocessor': 'pp', + 'project': 'CMIP5', + 'short_name': 'ta', + 'standard_name': 'air_temperature', + 'start_year': 2000, + 'units': 'K' + } -- end of input_data member value (key: preprocessed file) + } -- end of input_data dictionary +} -- end of config dictionary +``` diff --git a/doc/sphinx/source/user_guide2/index.rst b/doc/sphinx/source/user_guide2/index.rst index 899edc57f8..5728044b44 100644 --- a/doc/sphinx/source/user_guide2/index.rst +++ b/doc/sphinx/source/user_guide2/index.rst @@ -4,3 +4,11 @@ User's guide .. include:: intro.inc .. include:: install.inc +.. include:: config.inc +.. include:: inputdata.inc +.. include:: outputdata.inc +.. include:: observations.inc +.. include:: recipe.inc +.. include:: preprocessor.inc +.. include:: dirstruct.inc +.. include:: running.inc diff --git a/doc/sphinx/source/user_guide2/inputdata.inc b/doc/sphinx/source/user_guide2/inputdata.inc new file mode 100644 index 0000000000..b12d1e54ea --- /dev/null +++ b/doc/sphinx/source/user_guide2/inputdata.inc @@ -0,0 +1,7 @@ +:: _inputdata: + +********************** +ESMValTool data finder +********************** + +Documentation of the _data_finder.py module (incl. _download.py?) \ No newline at end of file diff --git a/doc/sphinx/source/user_guide2/install.inc b/doc/sphinx/source/user_guide2/install.inc index 991727b45c..d4b883fad9 100644 --- a/doc/sphinx/source/user_guide2/install.inc +++ b/doc/sphinx/source/user_guide2/install.inc @@ -4,15 +4,13 @@ Installing ESMValTool ********************* -ESMValTool 2.0 requires a Unix(-like) operating system and Python 2.7+ or 3.6+. -Python 2.7+ will be discontinued in the near future, so we encourage you to use -Python 3.6+ if possible. +ESMValTool 2.0 requires a Unix(-like) operating system and Python 3.6+. The ESMValTool supports three different installation methods: -* Installation through Conda package manager (see https://www.continuum.io/); +* Installation through Conda package manager (see https://www.continuum.io); -* Deployment through a Docker container (see https://www.docker.com/); +* Deployment through a Docker container (see https://www.docker.com); * From the source code available at https://github.com/ESMValGroup/ESMValTool. @@ -45,20 +43,21 @@ The ESMValTool source code is available on a public GitHub repository: https://github.com/ESMValGroup/ESMValTool The easiest way to obtain it is to clone the repository using git -(see https://git-scm.com/): +(see https://git-scm.com/). To clone the public repository: .. code-block:: bash git clone https://github.com/ESMValGroup/ESMValTool.git +It is also possible to work in one of the ESMValTool private repositories, e.g.: + +.. code-block:: bash + + git clone https://github.com/ESMValGroup/ESMValTool-private.git + By default, this command will create a folder called ESMValTool containing the source code of the tool. -.. attention:: - The newly created clone of the git repository will point by default - to the master branch. To change to another branch or release execute: - git checkout origin/$BRANCH_OR_RELEASE_NAME, i.e git checkout origin/2.0.0. - GitHub also allows to download the source code in as a tar.gz or zip file. If you choose to use this option, download the compressed file and extract its contents at the desired location. @@ -67,21 +66,26 @@ contents at the desired location. Prerequisites ------------- -It is strongly recommended to use conda to manage ESMValTool dependencies. +It is recommended to use conda to manage ESMValTool dependencies. For a minimal conda installation go to https://conda.io/miniconda.html. To -simplify the process, an environment definition file is provided within the -repository (``environment.yml`` in the root folder). +simplify the installation process, an environment definition file is provided +in the repository (``environment.yml`` in the root folder). .. attention:: Some systems provides a preinstalled version of conda (e.g., via the module environment). Several users however reported problems when installing NCL with such versions. It is therefore preferable to use a local, fully user-controlled conda installation. + Using an older version of conda can also be a source of problems, so if you have conda + installed already, make sure it is up to date by running ``conda update -n base conda``. -Note that the standard conda installation has some issues with the ``csh``/``tcsh`` -login shell. If you are using such shell, do not prepend the install -location (````) to PATH in your ``.tcshrc`` file (as suggested by -the standard installation procedure which assumes ``bash``). Instead, add -the following line to your ``.cshrc``/``.tcshrc`` file: +To enable the ``conda`` command, please source the appropriate configuration file +from your ``~/.bashrc`` file: + +.. code-block:: bash + + source /etc/profile.d/conda.sh + +or ``~/.cshrc``/``~/.tcshrc`` file: .. code-block:: bash @@ -92,43 +96,39 @@ for those cases in which a conda installation is not possible or advisable. From now on, we will assume that the installation is going to be done through conda. -Ideally, you should want to create a conda environment for ESMValTool, so it is +Ideally, you should create a conda environment for ESMValTool, so it is independent from any other Python tools present in the system. -To create a environment using Python 3.x +Note that it is advisable to update conda to the latest version before +installing ESMValTool, using the command .. code-block:: bash - conda create --name esmvaltool python=3 - conda env update --name esmvaltool --file ESMValTool/environment.yml + conda update --name base conda -To create a environment using Python 2.x +To create an environment, go to the directory containing the ESMValTool source +code (called ESMValTool if you did not choose a different name) and run .. code-block:: bash - conda create --name esmvaltool python=2 - conda env update --name esmvaltool --file ESMValTool/environment.yml + conda env create --name esmvaltool --file environment.yml The environment is called ``esmvaltool`` by default, but it is possible to use -the option -n $(ENVIRONMENT_NAME) to define a custom name. If you are using the -``bash`` shell, you can activate the environment using the command: +the option ``--name ENVIRONMENT_NAME`` to define a custom name. You can activate +the environment using the command: .. code-block:: bash - source activate esmvaltool - -while for the ``csh``/``tcsh`` you need to use: - -.. code-block:: bash - conda activate esmvaltool It is also possible to update an existing environment from the environment -file. This can be very useful when updating an older installation of ESMValTool: +file. This may be useful when updating an older installation of ESMValTool: .. code-block:: bash - conda env update --file environment.yml --name $(ENVIRONMENT_TO_UPDATE) + conda env update --name esmvaltool --file environment.yml + +but if you run into trouble, please try creating a new environment. .. attention:: From now on, we assume that the conda environment for ESMValTool is @@ -137,27 +137,42 @@ file. This can be very useful when updating an older installation of ESMValTool: Software installation --------------------- -Once all prerequisites are fulfilled, ESMValTool 2.0 can be installed using -the following command: +Once all prerequisites are fulfilled, ESMValTool can be installed by running +the following commands in the directory containing the ESMValTool source code +(called ESMValTool if you did not choose a different name): + +.. code-block:: bash + + pip install . + +If you would like to run Julia diagnostic scripts, you will also need to +`install Julia `_ and the Julia dependencies: + +.. code-block:: bash + + julia esmvaltool/install/Julia/setup.jl + +If you would like to run R diagnostic scripts, you will also need to install the R +dependencies. Install the R dependency packages: .. code-block:: bash - python ESMValTool/setup.py install + Rscript esmvaltool/install/R/setup.R The next step is to check that the installation works properly. -To do this, run the tool with --version: +To do this, run the tool with: .. code-block:: bash - esmvaltool --version + esmvaltool --help -If everything was installed properly, ESMValTool should have printed the -version number at the console and exited. +If everything was installed properly, ESMValTool should have printed a +help message to the console. For a more complete installation verification, run the automated tests and confirm that no errors are reported: .. code-block:: bash - python ESMValTool/setup.py test + python setup.py test --installation diff --git a/doc/sphinx/source/user_guide2/observations.inc b/doc/sphinx/source/user_guide2/observations.inc new file mode 100644 index 0000000000..c7285a0df6 --- /dev/null +++ b/doc/sphinx/source/user_guide2/observations.inc @@ -0,0 +1,87 @@ +:: _observations: + +********************************************** +Obtaining and cmorizing observational datasets +********************************************** + +Observational and reanalysis products in the standard CF/CMOR format used in CMIP and required by the ESMValTool are available via the obs4mips (https://esgf-node.llnl.gov/projects/obs4mips/) and ana4mips (https://esgf.nccs.nasa.gov/projects/ana4mips/) proejcts, respectively. Their use is strongly recommended, when possible. + +Other datasets not available in these archives can be obtained by the user from the respective sources and reformatted to the CF/CMOR standard using the cmorizers included in the ESMValTool. The cmorizers are dataset-specific scripts that can be run once to generate a local pool of observational datasets for usage with the ESMValTool. The necessary information to download and process the data is provided in the header of each cmorizing script. These scripts also serve as template to create new cmorizers for datasets not yet included. Note that dataset cmorized for ESMValTool v1 may not be working with v2, due to the much stronger constraints on metadata set by the Iris library. + +To cmorize one or more datasets, run: + +.. code-block:: bash + + cmorize_obs -c [CONFIG_FILE] -o [DATASET_LIST] + +The path to the raw data to be cmorized must be specified in the CONFIG_FILE as RAWOBS. Within this path, the data are expected to be organized in subdirectories corresponding to the data tier: Tier2 for freely-available datasets (other than obs4mips and ana4mips) and Tier3 for restricted datasets (i.e., dataset which requires a registration to be retrieved or provided upon request to the respective contact or PI). The cmorization follows the CMIP5 CMOR tables. The resulting output is saved in the output_dir, again following the Tier structure. The output file names follow the definition given in ``config-developer.yml`` for the ``OBS`` project: ``OBS_[dataset]_[type]_[version]_[mip]_[short_name]_YYYYMM_YYYYMM.nc``, where ``type`` may be ``sat`` (satellite data), ``reanaly`` (reanalysis data), ``ground`` (ground observations), ``clim`` (derived climatologies), ``campaign`` (aircraft campaign). + + +At the moment, cmorize_obs supports Python and NCL scripts. + +A list of the datasets for which a cmorizers is available is provided in the following table. + +.. tabularcolumns:: |p{3cm}|p{6cm}|p{3cm}|p{3cm}| + ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| Dataset | Variables (MIP) | Tier | Script language | ++==============================+======================================================================================================+======+=================+ +| AURA-TES | tro3 (Amon) | 3 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| CDS-SATELLITE-SOIL-MOISTURE | sm (Lmon), smStderr (Lmon) | 3 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| CDS-XCH4 | xch4 (Amon) | 3 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| CDS-XCO2 | xco2 (Amon) | 3 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| CERES-SYN1deg | rlds, rldscs, rlus, rluscs, rlut, rlutcs, rsds, rsdscs, rsus, rsuscs, rsut, rsutcs (3hr) | 3 | NCL | +| | rlds, rldscs, rlus, rlut, rlutcs, rsds, rsdt, rsus, rsut, rsutcs (Amon) | | | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| CRU | tas, pr (Amon) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| ERA-Interim | clivi, clt, clwvi, hfds, hur, hus, pr, prw, ps, psl, ta, tas, tauu, tauv, ts, ua, va, wap, zg (Amon) | 3 | NCL | +| | pr, psl, tas, tasmin, tasmax, zg (day), sftlf (fx), tos (Omon) | | | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| ESACCI-AEROSOL | abs550aer, od550aer, od550aerStderr, od550lt1aer, od870aer, od870aerStderr (aero) | 2 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| ESACCI-CLOUD | clivi, clt, cltStderr, clwvi (Amon) | 2 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| ESACCI-FIRE | burntArea (Lmon) | 2 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| ESACCI-LANDCOVER | baresoilFrac, cropFrac, grassFrac, shrubFrac, treeFrac (Lmon) | 2 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| ESACCI-OC | chl | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| ESACCI-OZONE | toz, tozStderr, tro3prof, tro3profStderr (Amon) | 2 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| ESACCI-SOILMOISTURE | dos, dosStderr, sm, smStderr (Lmon) | 2 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| ESACCI-SST | ts, tsStderr (Amon) | 2 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| GHCN | pr (Amon) | 2 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| HadCRUT3 | tas, tasa (Amon) | 2 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| HadCRUT4 | tas, tasa (Amon) | 2 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| HadISST | sic (OImon), tos (Omon), ts (Amon) | 2 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| LandFlux-EVAL | et, etStderr (Lmon) | 3 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| Landschuetzer2016 | fgco2 (Omon), spco2 (Omon), dpco2 (Omon) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| MODIS | cliwi, clt, clwvi, iwpStderr, lwpStderr (Amon), od550aer (aero) | 3 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| MTE | gpp, gppStderr (Lmon) | 3 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| NCEP | hur, hus, pr, ta, tas, ua, va, wap, zg (Amon) | 2 | NCL | +| | pr, rlut, ua, va (day) | | | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| NIWA-BS | toz, tozStderr (Amon) | 3 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| PATMOS-x | clt (Amon) | 2 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| UWisc | clwvi, lwpStderr (Amon) | 3 | NCL | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| WOA | no3, o2, po4, si (Oyr), so, thetao (Omon) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ diff --git a/doc/sphinx/source/user_guide2/outputdata.inc b/doc/sphinx/source/user_guide2/outputdata.inc new file mode 100644 index 0000000000..4acd1eb531 --- /dev/null +++ b/doc/sphinx/source/user_guide2/outputdata.inc @@ -0,0 +1,22 @@ +:: _outputdata: + +***************** +ESMValTool output +***************** + +Preprocessed datasets +===================== +preproc/ + +Diagnostic output +================= +work/ + +Plots +===== +plots/ + +Log files +========= +run/ + diff --git a/doc/sphinx/source/user_guide2/preprocessor.inc b/doc/sphinx/source/user_guide2/preprocessor.inc new file mode 100644 index 0000000000..12965d47df --- /dev/null +++ b/doc/sphinx/source/user_guide2/preprocessor.inc @@ -0,0 +1,400 @@ +:: _preprocessor: + +*********************** +ESMValTool Preprocessor +*********************** +The ESMValTool preprocessor can be used to perform all types of climate data pre-processing needed before indices or diagnostics can be calculated. It is a base component for many other diagnostics and metrics shown on this portal. It can be applied to tailor the climate model data to the need of the user for its own calculations. + +Features of the ESMValTool Climate data pre-processor are: + +* Regridding +* Geographical area selection +* Aggregation of data +* Provenance tracking of the calculations +* Model statistics +* Multi-model mean +* and many more + +Variable derivation +=================== +Documentation of _derive.py + + +Time manipulation +================= +The _time.py module contains the following preprocessor functions: + +* extract_time: Extract a time range from a cube. +* extract_season: Extract only the times that occur within a specific season. +* extract_month: Extract only the times that occur within a specific month. +* time_average: Take the weighted average over the time dimension. +* seasonal_mean: Produces a mean for each season (DJF, MAM, JJA, SON) +* annual_mean: Produces an annual or decadal mean. +* regrid_time: Aligns the time axis of each dataset to have common time points and calendars. + +1. extract_time +--------------- + +This function subsets a dataset between two points in times. It removes all +times in the dataset before the first time and after the last time point. +The required arguments are relatively self explanatory: + +* start_year +* start_month +* start_day +* end_year +* end_month +* end_day + +These start and end points are set using the datasets native calendar. +All six arguments should be given as integers - the named month string +will not be accepted. + +See also :func:`esmvaltool.preprocessor.extract_time`. + + +2. extract_season +----------------- + +Extract only the times that occur within a specific season. + +This function only has one argument: `season`. This is the named season to +extract. ie: DJF, MAM, JJA, SON. + +Note that this function does not change the time resolution. If your original +data is in monthly time resolution, then this function will return three +monthly datapoints per year. + +If you want the seasonal average, then this function needs to be combined with +the seasonal_mean function, below. + +See also :func:`esmvaltool.preprocessor.extract_season`. + + +3. extract_month +---------------- + +The function extracts the times that occur within a specific month. +This function only has one argument: `month`. This value should be an integer +between 1 and 12 as the named month string will not be accepted. + +See also :func:`esmvaltool.preprocessor.extract_month`. + + +4. time_average +--------------- + +This functions takes the weighted average over the time dimension. This +function requires no arguments and removes the time dimension of the cube. + +See also :func:`esmvaltool.preprocessor.time_average`. + + +5. seasonal_mean +---------------- + +This function produces a seasonal mean for each season (DJF, MAM, JJA, SON). +Note that this function will not check for missing time points. For instance, +if you are looking at the DJF field, but your datasets starts on January 1st, +the first DJF field will only contain data from January and February. + +We recommend using the extract_time to start the dataset from the following +December and remove such biased initial datapoints. + +See also :func:`esmvaltool.preprocessor.seasonal_mean`. + + +6. annual_mean +-------------- + +This function produces an annual or a decadal mean. The only argument is the +decadal boolean switch. When this switch is set to true, this function +will output the decadal averages. + +See also :func:`esmvaltool.preprocessor.annual_mean`. + + +7. regrid_time +-------------- + +This function aligns the time points of each component dataset so that the dataset +iris cubes can be subtracted. The operation makes the datasets time points common and +sets common calendars; it also resets the time bounds and auxiliary coordinates to +reflect the artifically shifted time points. Current implementation for monthly +and daily data; the frequency is set automatically from the variable CMOR table +unless a custom frequency is set manually by the user in recipe. + + +Area manipulation +================= +The _area.py module contains the following preprocessor functions: + +* extract_region: Extract a region from a cube based on lat/lon corners. +* zonal_means: Calculates the zonal or meridional means. +* average_region: Calculates the average value over a region. +* extract_named_regions: Extract a specific region from in the region cooordinate. + + +1. extract_region +----------------- + +This function masks data outside a rectagular region requested. The boundairies +of the region are provided as latitude and longitude coordinates in the +arguments: + +* start_longitude +* end_longitude +* start_latitude +* end_latitude + +Note that this function can only be used to extract a rectangular region. + +See also :func:`esmvaltool.preprocessor.extract_region`. + + +2. zonal_means +-------------- + +The function calculates the zonal or meridional means. While this function is +named `zonal_mean`, it can be used to apply several different operations in +an zonal or meridional direction. +This function takes two arguments: + +* coordinate: Which direction to apply the operation: latitude or longitude +* mean_type: Which operation to apply: mean, std_dev, variance, median, min or max + +See also :func:`esmvaltool.preprocessor.zonal_means`. + + +3. average_region +----------------- + +This function calculates the average value over a region - weighted by the +cell areas of the region. + +This function takes three arguments: +coord1: the name of the coordinate in the first direction. +coord2: the name of the coordinate in the second dimension. +operator: the name of the operation to apply (default: mean). + +While this function is named `average_region`, it can be used to apply several +different operations in the horizonal plane: mean, standard deviation, median +variance, minimum and maximum. + +Note that this function is applied over the entire dataset. If only a specific +region, depth layer or time period is required, then those regions need to be +removed using other preprocessor operations in advance. + +See also :func:`esmvaltool.preprocessor.average_region`. + + +4. extract_named_regions +------------------------ + +This function extract a specific named region from the data. This function +takes the following argument: `regions` which is either a string or a list +of strings of named regions. Note that the dataset must have a `region` +cooordinate which includes a list of strings as values. This function then +matches the named regions against the requested string. + +See also :func:`esmvaltool.preprocessor.extract_named_regions`. + + +Volume manipulation +=================== +The _volume.py module contains the following preprocessor functions: + +* extract_volume: Extract a specific depth range from a cube. +* average_volume: Calculate the volume-weighted average. +* depth_integration: Integrate over the depth dimension. +* extract_transect: Extract data along a line of constant latitude or longitude. +* extract_trajectory: Extract data along a specified trajectory. + + +1. extract_volume +----------------- + +Extract a specific range in the z-direction from a cube. This function +takes two arguments, a minimum and a maximum (`z_min` and `z_max`, +respectively) in the z direction. + +Note that this requires the requested z-coordinate range to be the +same sign as the iris cube. ie, if the cube has z-coordinate as +negative, then z_min and z_max need to be negative numbers. + +See also :func:`esmvaltool.preprocessor.extract_volume`. + + +2. average_volume +----------------- + +This function calculates the volume-weighted average across three dimensions, +but maintains the time dimension. The following arguments are required: + +coord1: the name of the coordinate in the first direction. +coord2: the name of the coordinate in the second dimension. + +No depth coordinate is required as this is determined by iris. This +function works best when the fx_files provide the cell volume. + +See also :func:`esmvaltool.preprocessor.average_volume`. + + +3. depth_integration +-------------------- + +This function integrate over the depth dimension. This function does a +weighted sum along the z-coordinate, and removes the z direction of the output +cube. This preprocessor takes no arguments. + +See also :func:`esmvaltool.preprocessor.depth_integration`. + + +4. extract_transect +------------------- + +This function extract data along a line of constant latitude or longitude. +This function takes two arguments, although only one is strictly required. +The two arguments are `latitude` and `longitude`. One of these arguments +needs to be set to a float, and the other can then be either ignored or set to +a minimum or maximum value. +Ie: If we set latitude to 0 N and leave longitude blank, it would produce a +cube along the equator. On the other hand, if we set latitude to 0 and then +set longitude to `[40., 100.]` this will produce a transect of the equator +in the indian ocean. + +See also :func:`esmvaltool.preprocessor.extract_transect`. + + +5. extract_trajectory +--------------------- + +This function extract data along a specified trajectory. +The three areguments are: latitudes and longitudes are the coordinates of the +trajectory. + +If two points are provided, the `number_points` argument is used to set a +the number of places to extract between the two end points. + +If more than two points are provided, then +extract_trajectory will produce a cube which has extrapolated the data +of the cube to those points, and `number_points` is not needed. + +Note that this function uses the expensive interpolate method, but it may be +necceasiry for irregular grids. + +See also :func:`esmvaltool.preprocessor.extract_trajectory`. + + +CMORization and dataset-specific fixes +====================================== +Documentation of _reformat.py, check.py and fix.py + +Vertical interpolation +====================== +Documentation of _regrid.py (part 1) + +Land/Sea/Ice Masking +==================== +Documentation of _mask.py (part 1) + +Certain metrics and diagnostics need to be computed and performed on restricted regions of the Globe; ESMValTool supports subsetting the input data on land mass, oceans and seas, ice. This is achived by masking the model data and keeping only the values associated with grid points that correspond to e.g. land mass +or oceans and seas; masking is done either by using standard mask files that have the same grid resolution as the model data (these files are usually produced +at the same time with the model data and are called fx files) or, in the absence of these files, by using Natural Earth masks. Natural Earth masks, even if they are not model-specific, represent a good approximation since their grid resolution is almost always much higher than the model data, and they are constantly updated with changing +geographical features. + +In ESMValTool v2 land-seas-ice masking can be done in two places: in the preprocessor, to apply a mask on the data before any subsequent preprocessing step, and before +running the diagnostic, or in the disgnostic phase. We present both these implementations below. + +To mask out seas in the preprocessor step, simply add `mask_landsea:` as a preprocessor step in the `preprocessor` of your choice section of the recipe, example: + +.. code-block:: bash + + preprocessors: + my_masking_preprocessor: + mask_landsea: + mask_out: sea + +The tool will retrieve the corresponding `fx: stfof` type of mask for each of the used variables and apply the mask so that only the land mass points are +kept in the data after applying the mask; conversely, it will retrieve the `fx: sftlf` files when land needs to be masked out. +`mask_out` accepts: land or sea as values. If the corresponding fx file is not found (some models are missing these +type of files; observational data is missing them altogether), then the tool attempts to mask using Natural Earth mask files (that are vectorized rasters). +Note that the resolutions for the Natural Earth masks are much higher than any usual CMIP model: 10m for land and 50m for ocean masks. + +Note that for masking out ice the preprocessor is using a different function, this so that both land and sea or ice can be masked out without +losing generality. To mask ice out one needs to add the preprocessing step much as above: + +.. code-block:: bash + + preprocessors: + my_masking_preprocessor: + mask_landseaice: + mask_out: ice + +To keep only the ice, one needs to mask out landsea, so use that as value for mask_out. As in the case of mask_landsea, the tool will automatically +retrieve the `fx: sftgif` file corresponding the the used variable and extract the ice mask from it. + +At the core of the land/sea/ice masking in the preprocessor are the mask files (whether it be fx type or Natural Earth type of files); these files (bar Natural Earth) +can be retrived and used in the diagnostic phase as well or solely. By specifying the `fx_files:` key in the variable in diagnostic in the recipe, and populating it +with a list of desired files e.g.: + +.. code-block:: bash + + variables: + ta: + preprocessor: my_masking_preprocessor + fx_files: [sftlf, sftof, sftgif, areacello, areacella] + +Such a recipe will automatically retrieve all the `[sftlf, sftof, sftgif, areacello, areacella]`-type fx files for each of the variables that are needed for +and then, in the diagnostic phase, these mask files will be available for the developer to use them as they need to. They `fx_files` attribute of the big `variable` +nested dictionary that gets passed to the diagnostic is, in turn, a dictionary on its own, and members of it can be accessed in the diagnostic through a simple loop over +the 'config' diagnostic variable items e.g.: + +.. code-block:: bash + + for filename, attributes in config['input_data'].items(): + sftlf_file = attributes['fx_files']['sftlf'] + areacello_file = attributes['fx_files']['areacello'] + + +Horizontal regridding +===================== +Documentation of _regrid.py (part 2) + +Masking of missing values +========================= +Documentation of _mask.py (part 2) + +Multi-model statistics +====================== +Documentation of_multimodel.py + +Time-area statistics +==================== +Documentation of _area_pp.py and _volume_pp.py + +Information on maximum memory required +====================================== +In the most general case, we can set upper limits on the maximum memory the anlysis will require: + + +Ms = (R + N) x F_eff - F_eff - when no multimodel analysis is performed; +Mm = (2R + N) x F_eff - 2F_eff - when multimodel analysis is performed; + +where + +Ms: maximum memory for non-multimodel module +Mm: maximum memory for multimodel module +R: computational efficiency of module; R is typically 2-3 +N: number of datasets +F_eff: average size of data per dataset where F_eff = e x f x F +where e is the factor that describes how lazy the data is (e = 1 for fully realized data) +and f describes how much the data was shrunk by the immediately previous module eg +time extraction, area selection or level extraction; note that for fix_data f relates only to the time extraction, if data is exact in time (no time selection) f = 1 for fix_data + +so for cases when we deal with a lot of datasets (R + N = N), data is fully realized, assuming an average size of 1.5GB for 10 years of 3D netCDF data, N datasets will require + + +Ms = 1.5 x (N - 1) GB +Mm = 1.5 x (N - 2) GB +======= diff --git a/doc/sphinx/source/user_guide2/recipe.inc b/doc/sphinx/source/user_guide2/recipe.inc new file mode 100644 index 0000000000..23311999e2 --- /dev/null +++ b/doc/sphinx/source/user_guide2/recipe.inc @@ -0,0 +1,5 @@ +.. _recipe: + +***************** +ESMValTool recipe +***************** diff --git a/doc/sphinx/source/user_guide2/running.inc b/doc/sphinx/source/user_guide2/running.inc new file mode 100644 index 0000000000..3f63f0dd8f --- /dev/null +++ b/doc/sphinx/source/user_guide2/running.inc @@ -0,0 +1,30 @@ +:: _running: + +********************** +Running the ESMValTool +********************** + +To run ESMValTool, use the command + +.. code:: bash + + esmvaltool -c /path/to/config-user.yml examples/recipe_python.yml + +This will run the example recipe_python.yml. The path to the recipe +can either be the path to a recipe file, or a path relative to the +esmvaltool/recipes directory of your installed ESMValTool. See the chapter +`User config file`_ for an explanation of how to create your own +config-user.yml file. + +To get help on additional commands, please use + +.. code:: bash + + esmvaltool --help + + + +Available diagnostics and metrics +================================= + +See :ref:`recipes` for a description of all available recipes. diff --git a/environment.yml b/environment.yml index c9d7fde8bd..cfb41cda75 100644 --- a/environment.yml +++ b/environment.yml @@ -1,21 +1,26 @@ --- - name: esmvaltool channels: - conda-forge dependencies: # Python packages that cannot be installed from PyPI: - # TODO when switching to iris 2.x, switch straight to 2.1+ - - basemap - - iris=1.13 + - esmpy + - iris>=2.2 + - matplotlib<3 # Can be installed from PyPI, but is a dependency of iris and should be pinned. - python-stratify - # Multi language support: - - ncl - - ncurses=6.1=hfc679d8_1 - # TODO: add R, julia + - xarray>=0.12.0 # Can be installed from PyPI, but here to get a consistent set of depencies with iris. + # Non-Python dependencies + - graphviz + - cdo + - imagemagick + - nco - - pip: - # TODO: include dummydata in esmvaltool repository and move to setup.py - # dummydata fetched from github as a zip file, contains latest version - - https://github.com/ESMValGroup/dummydata/archive/dev.zip + # Multi language support: + - python>=3.6 + - libunwind # Needed for Python3.7+ + - ncl>=6.5.0 + - r-base + - r-curl # Dependency of lintr, but fails to compile because it cannot find libcurl installed from conda. + - r-udunits2 # Fails to compile because it cannot find udunits2 installed from conda. + # - julia>=1.0.0 # The Julia package on conda is apparently broken diff --git a/esmvaltool/__init__.py b/esmvaltool/__init__.py index c2a4eae5e5..db052f292e 100644 --- a/esmvaltool/__init__.py +++ b/esmvaltool/__init__.py @@ -8,5 +8,5 @@ def get_script_root(): - """ Return the location of the ESMValTool installation.""" + """Return the location of the ESMValTool installation.""" return os.path.abspath(os.path.dirname(__file__)) diff --git a/esmvaltool/_config.py b/esmvaltool/_config.py index 757abec7ec..221c06d026 100644 --- a/esmvaltool/_config.py +++ b/esmvaltool/_config.py @@ -4,14 +4,23 @@ import logging.config import os import time -import six +from distutils.version import LooseVersion +import iris +import six import yaml + from .cmor.table import read_cmor_tables logger = logging.getLogger(__name__) CFG = {} +CFG_USER = {} + + +def use_legacy_iris(): + """Return True if legacy iris is used.""" + return LooseVersion(iris.__version__) < LooseVersion("2.0.0") def read_config_user_file(config_file, recipe_name): @@ -28,26 +37,35 @@ def read_config_user_file(config_file, recipe_name): 'max_data_filesize': 100, 'output_file_type': 'ps', 'output_dir': './output_dir', + 'auxiliary_data_dir': './auxiliary_data', 'save_intermediary_cubes': False, 'remove_preproc_dir': False, 'max_parallel_tasks': 1, 'run_diagnostic': True, + 'profile_diagnostic': False, 'config_developer_file': None, 'drs': {}, } for key in defaults: if key not in cfg: - logger.warning("No %s specification in config file, " - "defaulting to %s", key, defaults[key]) + logger.info( + "No %s specification in config file, " + "defaulting to %s", key, defaults[key]) cfg[key] = defaults[key] cfg['output_dir'] = _normalize_path(cfg['output_dir']) + cfg['auxiliary_data_dir'] = _normalize_path(cfg['auxiliary_data_dir']) + cfg['config_developer_file'] = _normalize_path( cfg['config_developer_file']) for key in cfg['rootpath']: - cfg['rootpath'][key] = _normalize_path(cfg['rootpath'][key]) + root = cfg['rootpath'][key] + if isinstance(root, six.string_types): + cfg['rootpath'][key] = [_normalize_path(root)] + else: + cfg['rootpath'][key] = [_normalize_path(path) for path in root] # insert a directory date_time_recipe_usertag in the output paths now = datetime.datetime.utcnow().strftime("%Y%m%d_%H%M%S") @@ -60,6 +78,11 @@ def read_config_user_file(config_file, recipe_name): cfg['plot_dir'] = os.path.join(cfg['output_dir'], 'plots') cfg['run_dir'] = os.path.join(cfg['output_dir'], 'run') + # Save user configuration in global variable + for key, value in six.iteritems(cfg): + CFG_USER[key] = value + + # Read developer configuration file cfg_developer = read_config_developer_file(cfg['config_developer_file']) for key, value in six.iteritems(cfg_developer): CFG[key] = value @@ -68,11 +91,15 @@ def read_config_user_file(config_file, recipe_name): return cfg +def get_config_user_file(): + """Return user configuration dictionary.""" + return CFG_USER + + def _normalize_path(path): - """ - Normalize paths + """Normalize paths. - Expand ~ character and environment variables and convert path to absolute + Expand ~ character and environment variables and convert path to absolute. Parameters ---------- @@ -83,6 +110,7 @@ def _normalize_path(path): ------- str: Normalized path + """ if path is None: return None @@ -104,7 +132,7 @@ def read_config_developer_file(cfg_file=None): def configure_logging(cfg_file=None, output=None, console_log_level=None): - """Set up logging""" + """Set up logging.""" if cfg_file is None: cfg_file = os.path.join( os.path.dirname(__file__), 'config-logging.yml') @@ -128,6 +156,7 @@ def configure_logging(cfg_file=None, output=None, console_log_level=None): logging.config.dictConfig(cfg) logging.Formatter.converter = time.gmtime + logging.captureWarnings(True) return log_files @@ -138,13 +167,52 @@ def get_project_config(project): return CFG[project] -def cmip5_dataset2inst(dataset): - """Return the institute given the dataset name in CMIP5.""" - logger.debug("Retrieving institute for CMIP5 dataset %s", dataset) - return CFG['CMIP5']['institute'][dataset] +def get_institutes(variable): + """Return the institutes given the dataset name in CMIP5.""" + dataset = variable['dataset'] + project = variable['project'] + logger.debug("Retrieving institutes for dataset %s", dataset) + return CFG.get(project, {}).get('institutes', {}).get(dataset, []) + + +def replace_mip_fx(fx_file): + """Replace MIP so to retrieve correct fx files.""" + default_mip = 'Amon' + if fx_file not in CFG['CMIP5']['fx_mip_change']: + logger.warning( + 'mip for fx variable %s is not specified in ' + 'config_developer.yml, using default (%s)', fx_file, default_mip) + new_mip = CFG['CMIP5']['fx_mip_change'].get(fx_file, default_mip) + logger.debug("Switching mip for fx file finding to %s", new_mip) + return new_mip + + +TAGS_CONFIG_FILE = os.path.join( + os.path.dirname(__file__), 'config-references.yml') + + +def _load_tags(filename=TAGS_CONFIG_FILE): + """Load the refence tags used for provenance recording.""" + logger.debug("Loading tags from %s", filename) + with open(filename) as file: + return yaml.safe_load(file) + + +TAGS = _load_tags() + + +def get_tag_value(section, tag): + """Retrieve the value of a tag.""" + if section not in TAGS: + raise ValueError("Section '{}' does not exist in {}".format( + section, TAGS_CONFIG_FILE)) + if tag not in TAGS[section]: + raise ValueError( + "Tag '{}' does not exist in section '{}' of {}".format( + tag, section, TAGS_CONFIG_FILE)) + return TAGS[section][tag] -def cmip5_mip2realm_freq(mip): - """Return realm and frequency given the mip in CMIP5.""" - logger.debug("Retrieving realm and frequency for CMIP5 mip %s", mip) - return CFG['CMIP5']['realm_frequency'][mip] +def replace_tags(section, tags): + """Replace a list of tags with their values.""" + return tuple(get_tag_value(section, tag) for tag in tags) diff --git a/esmvaltool/_data_finder.py b/esmvaltool/_data_finder.py index 563da8c2c3..ba6e21d532 100644 --- a/esmvaltool/_data_finder.py +++ b/esmvaltool/_data_finder.py @@ -11,21 +11,22 @@ import six -from ._config import (cmip5_dataset2inst, cmip5_mip2realm_freq, - get_project_config) +from ._config import get_project_config, replace_mip_fx +from .cmor.table import CMOR_TABLES logger = logging.getLogger(__name__) -def find_files(dirname, filename): - """Find files matching filename.""" - logger.debug("Looking for files matching %s in %s", filename, dirname) +def find_files(dirnames, filenames): + """Find files matching filenames in dirnames.""" + logger.debug("Looking for files matching %s in %s", filenames, dirnames) result = [] - for path, _, files in os.walk(dirname, followlinks=True): - files = fnmatch.filter(files, filename) - if files: - result.extend(os.path.join(path, f) for f in files) + for dirname in dirnames: + for path, _, files in os.walk(dirname, followlinks=True): + for filename in filenames: + matches = fnmatch.filter(files, filename) + result.extend(os.path.join(path, f) for f in matches) return result @@ -94,56 +95,43 @@ def select_files(filenames, start_year, end_year): return selection -def replace_tags(path, variable, j=None, i=None): +def _replace_tags(path, variable, fx_var=None): """Replace tags in the config-developer's file with actual values.""" path = path.strip('/') tlist = re.findall(r'\[([^]]*)\]', path) + paths = [path] for tag in tlist: original_tag = tag - tag, lower, upper = _get_caps_options(tag) - - if tag == 'var': - replacewith = variable['short_name'] - elif tag == 'fx_var': - replacewith = variable['fx_files'][i] - elif tag == 'field': - replacewith = variable['field'] - elif tag in ('institute', 'freq', 'realm'): - if tag in variable: - replacewith = str(variable[tag]) - else: - if tag == 'institute': - replacewith = cmip5_dataset2inst(variable['dataset']) - elif tag == 'freq': - replacewith = cmip5_mip2realm_freq(variable['mip'])[1] - elif tag == 'realm': - replacewith = cmip5_mip2realm_freq(variable['mip'])[0] + tag, _, _ = _get_caps_options(tag) + + if tag == 'fx_var': + replacewith = fx_var elif tag == 'latestversion': # handled separately later continue - elif tag == 'tier': - replacewith = ''.join(('Tier', str(variable['tier']))) - elif tag == 'dataset': - replacewith = variable['dataset'] - else: # all other cases use the corresponding dataset dictionary key - if tag in variable: - replacewith = str(variable[tag]) - else: - raise KeyError( - "Dataset key {} must be specified for project {}, check " - "your recipe entry".format(tag, variable['project'])) - - if not isinstance(replacewith, list): - path = path.replace('[' + original_tag + ']', - _apply_caps(replacewith, lower, upper)) + elif tag in variable: + replacewith = variable[tag] else: - path = [ - path.replace('[' + original_tag + ']', - _apply_caps(dkrz_place, lower, upper)) - for dkrz_place in replacewith - ][j] - return path + raise KeyError("Dataset key {} must be specified for {}, check " + "your recipe entry".format(tag, variable)) + + paths = _replace_tag(paths, original_tag, replacewith) + + return paths + + +def _replace_tag(paths, tag, replacewith): + """Replace tag by replacewith in paths.""" + _, lower, upper = _get_caps_options(tag) + result = [] + if isinstance(replacewith, (list, tuple)): + for item in replacewith: + result.extend(_replace_tag(paths, tag, item)) + else: + text = _apply_caps(str(replacewith), lower, upper) + result.extend(p.replace('[' + tag + ']', text) for p in paths) + return result def _get_caps_options(tag): @@ -161,288 +149,149 @@ def _get_caps_options(tag): def _apply_caps(original, lower, upper): if lower: return original.lower() - elif upper: + if upper: return original.upper() return original -def get_input_dirname_template(variable, rootpath, drs): - """Return a template of the full path to input directory.""" - project = variable['project'] +def _resolve_latestversion(dirname_template): + """Resolve the 'latestversion' tag.""" + if '[latestversion]' not in dirname_template: + return dirname_template - cfg = get_project_config(project) - - # Set the rootpath - if project in rootpath: - dir1 = rootpath[project] - elif 'default' in rootpath: - dir1 = rootpath['default'] - else: - raise KeyError( - 'default rootpath must be specified in config-user file') - - # Set the drs - _drs = drs.get(project, 'default') - input_dir = cfg['input_dir'] - if isinstance(input_dir, six.string_types): - dir2 = replace_tags(input_dir, variable) - elif _drs in input_dir: - try: - insts = cmip5_dataset2inst(variable['dataset']) - except KeyError as msg: - logger.debug('CMIP5 dataset2inst: %s', msg) - insts = 0 - dirs2 = [] - if isinstance(insts, list): - for j in range(len(insts)): - dir2 = replace_tags(input_dir[_drs], variable, j) - dirs2.append(dir2) - else: - dir2 = replace_tags(input_dir[_drs], variable) - dirs2.append(dir2) - else: - raise KeyError( - 'drs {} for {} project not specified in config-developer file' - .format(_drs, project)) - - dirname_template = [os.path.join(dir1, dir_2) for dir_2 in dirs2] + # Find latest version + part1, part2 = dirname_template.split('[latestversion]') + part2 = part2.lstrip(os.sep) + if os.path.exists(part1): + versions = os.listdir(part1) + versions.sort(reverse=True) + for version in ['latest'] + versions: + dirname = os.path.join(part1, version, part2) + if os.path.isdir(dirname): + return dirname return dirname_template -def get_input_fx_dirname_template(variable, rootpath, drs): - """Return a template of the full path to input directory.""" - project = variable['project'] - +def _select_drs(input_type, drs, project): + """Select the directory structure of input path.""" cfg = get_project_config(project) + input_path = cfg[input_type] + if isinstance(input_path, six.string_types): + return input_path - dirs = [] - # Set the rootpath - if project in rootpath: - dir1 = rootpath[project] - elif 'default' in rootpath: - dir1 = rootpath['default'] - else: - raise KeyError( - 'default rootpath must be specified in config-user file') - - # Set the drs - _drs = drs.get(project, 'default') - input_dir = cfg['fx_dir'] - for fx_ind in range(len(variable['fx_files'])): - if isinstance(input_dir, six.string_types): - dir2 = replace_tags(input_dir, variable, i=fx_ind) - elif _drs in input_dir: - dir2 = replace_tags(input_dir[_drs], variable, i=fx_ind) - else: - raise KeyError( - 'drs {} for {} project not specified in config-developer file' - .format(_drs, project)) - - # Replace seaIce realm by ocean realm - path_elements = dir2.split(os.path.sep) - if "seaIce" in path_elements: - old_dir = dir2 - dir2 = dir2.replace("seaIce", "ocean") - logger.info("Replaced path to fx files %s by %s for seaIce" - "diagnostics", old_dir, dir2) + structure = drs.get(project, 'default') + if structure in input_path: + return input_path[structure] - dirname_template = os.path.join(dir1, dir2) - dirs.append(dirname_template) + raise KeyError( + 'drs {} for {} project not specified in config-developer file'.format( + structure, project)) - return dirs +def get_rootpath(rootpath, project): + """Select the rootpath.""" + if project in rootpath: + return rootpath[project] + if 'default' in rootpath: + return rootpath['default'] + raise KeyError('default rootpath must be specified in config-user file') -def get_input_filename(variable, rootpath, drs): - """Simulate a path to input file. - This function should match the function get_input_filelist below. - """ - dirname_templates = get_input_dirname_template(variable, rootpath, drs) - for dirname_template in dirname_templates: - # Simulate a latest version if required - if '[latestversion]' in dirname_template: - part1, part2 = dirname_template.split('[latestversion]') - dirname = os.path.join(part1, 'latestversion', part2) - else: - dirname = dirname_template +def _find_input_dirs(variable, rootpath, drs, fx_var=None): + """Return a the full paths to input directories.""" + project = variable['project'] - # Set the filename - filename = _get_filename(variable, drs) - if filename.endswith('*'): - filename = filename.rstrip( - '*') + "{start_year}01-{end_year}12.nc".format(**variable) + root = get_rootpath(rootpath, project) + input_type = 'input_{}dir'.format('fx_' if fx_var else '') + path_template = _select_drs(input_type, drs, project) + + dirnames = [] + for dirname_template in _replace_tags(path_template, variable, fx_var): + for base_path in root: + dirname = os.path.join(base_path, dirname_template) + dirname = _resolve_latestversion(dirname) + if os.path.exists(dirname): + logger.debug("Found %s", dirname) + dirnames.append(dirname) + else: + logger.debug("Skipping non-existent %s", dirname) - # Full path to files - return os.path.join(dirname, filename) + return dirnames -def get_input_fx_filename(variable, rootpath, drs): - """Simulate a path to input file. +def _get_filenames_glob(variable, drs, fx_var=None): + """Return patterns that can be used to look for input files.""" + input_type = 'input_{}file'.format('fx_' if fx_var else '') + path_template = _select_drs(input_type, drs, variable['project']) + filenames_glob = _replace_tags(path_template, variable, fx_var) + return filenames_glob - This function should match the function get_input_filelist below. - """ - files = [] - dirname_templates = get_input_fx_dirname_template(variable, rootpath, drs) - for j, dirname_template in zip( - range(len(dirname_templates)), dirname_templates): - # Simulate a latest version if required - if '[latestversion]' in dirname_template: - part1, part2 = dirname_template.split('[latestversion]') - dirname = os.path.join(part1, 'latestversion', part2) - else: - dirname = dirname_template - # Set the filename - filename = _get_fx_filename(variable, drs, j) +def _find_input_files(variable, rootpath, drs, fx_var=None): + logger.debug("Looking for input %sfiles for variable %s of dataset %s", + fx_var + ' fx ' if fx_var else '', variable['short_name'], + variable['dataset']) - # Full path to files - files.append(os.path.join(dirname, filename)) + input_dirs = _find_input_dirs(variable, rootpath, drs, fx_var) + filenames_glob = _get_filenames_glob(variable, drs, fx_var) + files = find_files(input_dirs, filenames_glob) return files -def _get_filename(variable, drs): - project = variable['project'] - cfg = get_project_config(project) - - input_file = cfg['input_file'] - _drs = drs.get(project, 'default') - if not isinstance(input_file, six.string_types): - if _drs in input_file: - input_file = input_file[_drs] - else: - raise KeyError( - 'drs {} for {} project not specified for input_file ' - 'in config-developer file'.format(_drs, project)) - filename = replace_tags(input_file, variable) - return filename - - -def _get_fx_filename(variable, drs, j): - project = variable['project'] - cfg = get_project_config(project) - - input_file = cfg['fx_file'] - _drs = drs.get(project, 'default') - if not isinstance(input_file, six.string_types): - if _drs in input_file: - input_file = input_file[_drs] - else: - raise KeyError( - 'drs {} for {} project not specified for input_file ' - 'in config-developer file'.format(_drs, project)) - filename = replace_tags(input_file, variable, i=j) - return filename - - def get_input_filelist(variable, rootpath, drs): """Return the full path to input files.""" - all_files = [] - dirname_templates = get_input_dirname_template(variable, rootpath, drs) - valid_dirs = [] - - for dirname_template in dirname_templates: - # Find latest version if required - if '[latestversion]' not in dirname_template: - valid_dirs.append(dirname_template) - else: - part1, part2 = dirname_template.split('[latestversion]') - part2 = part2.lstrip(os.sep) - if os.path.exists(part1): - list_versions = os.listdir(part1) - list_versions.sort(reverse=True) - for version in list_versions: - dirname = os.path.join(part1, version, part2) - if os.path.isdir(dirname): - valid_dirs.append(dirname) - break - - # Set the filename glob - filename_glob = _get_filename(variable, drs) - - for dir_name in valid_dirs: - # Find files - files = find_files(dir_name, filename_glob) - - # Select files within the required time interval - files = select_files(files, variable['start_year'], - variable['end_year']) - all_files.extend(files) - - return all_files + files = _find_input_files(variable, rootpath, drs) + files = select_files(files, variable['start_year'], variable['end_year']) + return files def get_input_fx_filelist(variable, rootpath, drs): - """Return the full path to input files.""" - dirname_templates = get_input_fx_dirname_template(variable, rootpath, drs) + """Return a dict with the full path to fx input files.""" fx_files = {} + for fx_var in variable['fx_files']: + var = dict(variable) + var['mip'] = replace_mip_fx(fx_var) + table = CMOR_TABLES[var['cmor_table']].get_table(var['mip']) + var['frequency'] = table.frequency + realm = getattr(table.get(var['short_name']), 'modeling_realm', None) + var['modeling_realm'] = realm if realm else table.realm - for j, dirname_template in zip( - range(len(dirname_templates)), dirname_templates): - # Find latest version if required - if '[latestversion]' in dirname_template: - part1, part2 = dirname_template.split('[latestversion]') - part2 = part2.lstrip(os.sep) - # root part1 could not exist at all - if not os.path.exists(part1): - fx_files[variable['fx_files'][j]] = None - return fx_files - list_versions = os.listdir(part1) - list_versions.sort(reverse=True) - if 'latest' in list_versions: - list_versions.insert( - 0, list_versions.pop(list_versions.index('latest'))) - for version in list_versions: - if version == 'latest': - dirname = os.path.join(part1, version, part2) - if os.path.isdir(dirname): - break - else: - dirname = os.path.join(part1, version, part2) - if os.path.isdir(dirname): - break - else: - dirname = dirname_template - - # Set the filename glob - filename_glob = _get_fx_filename(variable, drs, j) - - # Find files - fx_file_list = find_files(dirname, filename_glob) - if fx_file_list: - # Grab the first file only; fx vars should have a single file - fx_files[variable['fx_files'][j]] = fx_file_list[0] - else: - # No files - fx_files[variable['fx_files'][j]] = None + files = _find_input_files(var, rootpath, drs, fx_var) + fx_files[fx_var] = files[0] if files else None return fx_files def get_output_file(variable, preproc_dir): - """Return the full path to the output (preprocessed) file""" + """Return the full path to the output (preprocessed) file.""" cfg = get_project_config(variable['project']) + # Join different experiment names + if isinstance(variable.get('exp'), (list, tuple)): + variable = dict(variable) + variable['exp'] = '-'.join(variable['exp']) + outfile = os.path.join( preproc_dir, - '{diagnostic}_{preprocessor}_{short_name}'.format(**variable), - replace_tags(cfg['output_file'], variable) + '.nc') + variable['diagnostic'], + variable['variable_group'], + _replace_tags(cfg['output_file'], variable)[0] + '.nc', + ) return outfile -def get_statistic_output_file(variable, statistic, preproc_dir): - """Get multi model statistic filename depending on settings""" - values = dict(variable) - values['stat'] = statistic.title() - +def get_statistic_output_file(variable, preproc_dir): + """Get multi model statistic filename depending on settings.""" template = os.path.join( preproc_dir, - '{diagnostic}_{preprocessor}_{short_name}', - 'MultiModel{stat}_{field}_{short_name}_{start_year}-{end_year}.nc', + '{diagnostic}', + '{variable_group}', + '{dataset}_{mip}_{short_name}_{start_year}-{end_year}.nc', ) - outfile = template.format(**values) + outfile = template.format(**variable) return outfile diff --git a/esmvaltool/_main.py b/esmvaltool/_main.py index df2ab672c9..c8a2dab022 100755 --- a/esmvaltool/_main.py +++ b/esmvaltool/_main.py @@ -1,4 +1,4 @@ -"""ESMValTool - Earth System Model Evaluation Tool +"""ESMValTool - Earth System Model Evaluation Tool. http://www.esmvaltool.org @@ -6,12 +6,14 @@ Veronika Eyring (PI; DLR, Germany - veronika.eyring@dlr.de) Bouwe Andela (NLESC, Netherlands - b.andela@esciencecenter.nl) Bjoern Broetz (DLR, Germany - bjoern.broetz@dlr.de) + Lee de Mora (PML, UK - ledm@pml.ac.uk) Niels Drost (NLESC, Netherlands - n.drost@esciencecenter.nl) Nikolay Koldunov (AWI, Germany - nikolay.koldunov@awi.de) Axel Lauer (DLR, Germany - axel.lauer@dlr.de) Benjamin Mueller (LMU, Germany - b.mueller@iggf.geo.uni-muenchen.de) Valeriu Predoi (URead, UK - valeriu.predoi@ncas.ac.uk) Mattia Righi (DLR, Germany - mattia.righi@dlr.de) + Manuel Schlund (DLR, Germany - manuel.schlund@dlr.de) Javier Vegas-Regidor (BSC, Spain - javier.vegas@bsc.es) For further help, please read the documentation at @@ -25,8 +27,6 @@ # Valeriu Predoi (URead, UK - valeriu.predoi@ncas.ac.uk) # Mattia Righi (DLR, Germany - mattia.righi@dlr.de) -from __future__ import print_function - import argparse import datetime import errno @@ -39,7 +39,7 @@ from . import __version__ from ._config import configure_logging, read_config_user_file -from ._recipe import read_recipe_file +from ._recipe import TASKSEP, read_recipe_file from ._task import resource_usage_logger # set up logging @@ -58,7 +58,7 @@ def get_args(): - """Define the `esmvaltool` command line""" + """Define the `esmvaltool` command line.""" # parse command line args parser = argparse.ArgumentParser( description=HEADER, @@ -85,12 +85,24 @@ def get_args(): '--max-datasets', type=int, help='Try to limit the number of datasets used to MAX_DATASETS.') + parser.add_argument( + '--max-years', + type=int, + help='Limit the number of years to MAX_YEARS.') + parser.add_argument( + '--skip-nonexistent', + action='store_true', + help="Skip datasets that cannot be found.") + parser.add_argument( + '--diagnostics', + nargs='*', + help="Only run the named diagnostics from the recipe.") args = parser.parse_args() return args def main(args): - """Define the `esmvaltool` program""" + """Define the `esmvaltool` program.""" recipe = args.recipe if not os.path.exists(recipe): installed_recipe = os.path.join( @@ -125,8 +137,19 @@ def main(args): logger.info("Using config file %s", config_file) logger.info("Writing program log files to:\n%s", "\n".join(log_files)) + cfg['skip-nonexistent'] = args.skip_nonexistent + cfg['diagnostics'] = { + pattern if TASKSEP in pattern else pattern + TASKSEP + '*' + for pattern in args.diagnostics or () + } cfg['synda_download'] = args.synda_download - cfg['max_datasets'] = args.max_datasets + for limit in ('max_datasets', 'max_years'): + value = getattr(args, limit) + if value is not None: + if value < 1: + raise ValueError("--{} should be larger than 0.".format( + limit.replace('_', '-'))) + cfg[limit] = value resource_log = os.path.join(cfg['run_dir'], 'resource_usage.txt') with resource_usage_logger(pid=os.getpid(), filename=resource_log): @@ -135,7 +158,7 @@ def main(args): def process_recipe(recipe_file, config_user): - """Process recipe""" + """Process recipe.""" if not os.path.isfile(recipe_file): raise OSError(errno.ENOENT, "Specified recipe file does not exist", recipe_file) @@ -193,8 +216,9 @@ def process_recipe(recipe_file, config_user): out_refs = glob.glob( os.path.join(config_user['output_dir'], '*', '*', 'references-acknowledgements.txt')) - logger.info("For the required references/acknowledgements of these " - "diagnostics see:\n%s", '\n'.join(out_refs)) + logger.info( + "For the required references/acknowledgements of these " + "diagnostics see:\n%s", '\n'.join(out_refs)) def run(): @@ -203,10 +227,18 @@ def run(): try: conf = main(args) except: # noqa + if not logger.handlers: + # Add a logging handler if main failed to do so. + logging.basicConfig() logger.exception( "Program terminated abnormally, see stack trace " "below for more information", exc_info=True) + logger.info( + "If you suspect this is a bug or need help, please open an issue " + "on https://github.com/ESMValGroup/ESMValTool/issues and attach " + "the run/recipe_*.yml and run/main_log_debug.txt files from the " + "output directory.") sys.exit(1) else: if conf["remove_preproc_dir"]: @@ -214,4 +246,4 @@ def run(): logger.info("If this data is further needed, then") logger.info("set remove_preproc_dir to false in config") shutil.rmtree(conf["preproc_dir"]) - logger.info("Run was succesful") + logger.info("Run was successful") diff --git a/esmvaltool/_provenance.py b/esmvaltool/_provenance.py new file mode 100644 index 0000000000..6a6685d135 --- /dev/null +++ b/esmvaltool/_provenance.py @@ -0,0 +1,258 @@ +"""Provenance module.""" +import copy +import logging +import os + +from netCDF4 import Dataset +from PIL import Image +from PIL.PngImagePlugin import PngInfo +from prov.dot import prov_to_dot +from prov.model import ProvDocument + +from ._version import __version__ + +logger = logging.getLogger(__name__) + +ESMVALTOOL_URI_PREFIX = 'https://www.esmvaltool.org/' + + +def update_without_duplicating(bundle, other): + """Add new records from other provenance bundle.""" + for record in other.records: + if record not in bundle.records: + bundle.add_record(record) + + +def create_namespace(provenance, namespace): + """Create an esmvaltool namespace.""" + provenance.add_namespace(namespace, uri=ESMVALTOOL_URI_PREFIX + namespace) + + +def get_esmvaltool_provenance(): + """Create an esmvaltool run activity.""" + provenance = ProvDocument() + namespace = 'software' + create_namespace(provenance, namespace) + attributes = {} # TODO: add dependencies with versions here + activity = provenance.activity( + namespace + ':esmvaltool==' + __version__, other_attributes=attributes) + + return activity + + +ESMVALTOOL_PROVENANCE = get_esmvaltool_provenance() + + +def attribute_to_authors(entity, authors): + """Attribute entity to authors.""" + namespace = 'author' + create_namespace(entity.bundle, namespace) + + for author in authors: + agent = entity.bundle.agent( + namespace + ':' + author['name'], + {'attribute:' + k: author[k] + for k in author if k != 'name'}) + entity.wasAttributedTo(agent) + + +def attribute_to_projects(entity, projects): + """Attribute entity to projecs.""" + namespace = 'project' + create_namespace(entity.bundle, namespace) + + for project in projects: + agent = entity.bundle.agent(namespace + ':' + project) + entity.wasAttributedTo(agent) + + +def get_recipe_provenance(documentation, filename): + """Create a provenance entity describing a recipe.""" + provenance = ProvDocument() + + for namespace in ('recipe', 'attribute'): + create_namespace(provenance, namespace) + + entity = provenance.entity( + 'recipe:{}'.format(filename), { + 'attribute:description': documentation.get('description', ''), + 'attribute:references': ', '.join( + documentation.get('references', [])), + }) + + attribute_to_authors(entity, documentation.get('authors', [])) + attribute_to_projects(entity, documentation.get('projects', [])) + + return entity + + +def get_task_provenance(task, recipe_entity): + """Create a provenance activity describing a task.""" + provenance = ProvDocument() + create_namespace(provenance, 'task') + + activity = provenance.activity('task:' + task.name) + + trigger = recipe_entity + update_without_duplicating(provenance, recipe_entity.bundle) + + starter = ESMVALTOOL_PROVENANCE + update_without_duplicating(provenance, starter.bundle) + + activity.wasStartedBy(trigger, starter) + + return activity + + +class TrackedFile(object): + """File with provenance tracking.""" + + def __init__(self, filename, attributes, ancestors=None): + """Create an instance of a file with provenance tracking.""" + self._filename = filename + self.attributes = copy.deepcopy(attributes) + + self.provenance = None + self.entity = None + self.activity = None + self._ancestors = [] if ancestors is None else ancestors + + def __str__(self): + """Return summary string.""" + return "{}: {}".format(self.__class__.__name__, self.filename) + + def copy_provenance(self, target=None): + """Create a copy with identical provenance information.""" + if self.provenance is None: + raise ValueError("Provenance of {} not initialized".format(self)) + if target is None: + new = TrackedFile(self.filename, self.attributes) + else: + if target.filename != self.filename: + raise ValueError( + "Attempt to copy provenance to incompatible file.") + new = target + new.attributes = copy.deepcopy(self.attributes) + new.provenance = copy.deepcopy(self.provenance) + new.entity = new.provenance.get_record(self.entity.identifier)[0] + new.activity = new.provenance.get_record(self.activity.identifier)[0] + return new + + @property + def filename(self): + """Filename.""" + return self._filename + + def initialize_provenance(self, activity): + """Initialize the provenance document. + + Note: this also copies the ancestor provenance. Therefore, changes + made to ancestor provenance after calling this function will not + propagate into the provenance of this file. + """ + if self.provenance is not None: + raise ValueError( + "Provenance of {} already initialized".format(self)) + self.provenance = ProvDocument() + self._initialize_namespaces() + self._initialize_activity(activity) + self._initialize_entity() + self._initialize_ancestors(activity) + + def _initialize_namespaces(self): + """Inialize the namespaces.""" + for namespace in ('file', 'attribute', 'preprocessor', 'task'): + create_namespace(self.provenance, namespace) + + def _initialize_activity(self, activity): + """Copy the preprocessor task activity.""" + self.activity = activity + update_without_duplicating(self.provenance, activity.bundle) + + def _initialize_entity(self): + """Initialize the entity representing the file.""" + attributes = { + 'attribute:' + k: str(v) + for k, v in self.attributes.items() + if k not in ('authors', 'projects') + } + self.entity = self.provenance.entity('file:' + self.filename, + attributes) + attribute_to_authors(self.entity, self.attributes.get('authors', [])) + attribute_to_projects(self.entity, self.attributes.get('projects', [])) + + def _initialize_ancestors(self, activity): + """Register ancestor files for provenance tracking.""" + for ancestor in self._ancestors: + if ancestor.provenance is None: + ancestor.initialize_provenance(activity) + update_without_duplicating(self.provenance, ancestor.provenance) + self.wasderivedfrom(ancestor) + + def wasderivedfrom(self, other): + """Let the file know that it was derived from other.""" + if isinstance(other, TrackedFile): + other_entity = other.entity + else: + other_entity = other + update_without_duplicating(self.provenance, other_entity.bundle) + if not self.activity: + raise ValueError("Activity not initialized.") + self.entity.wasDerivedFrom(other_entity, self.activity) + + def _select_for_include(self): + attributes = { + 'provenance': self.provenance.serialize(format='xml'), + 'software': "Created with ESMValTool v{}".format(__version__), + } + if 'caption' in self.attributes: + attributes['caption'] = self.attributes['caption'] + return attributes + + @staticmethod + def _include_provenance_nc(filename, attributes): + with Dataset(filename, 'a') as dataset: + for key, value in attributes.items(): + setattr(dataset, key, value) + + @staticmethod + def _include_provenance_png(filename, attributes): + pnginfo = PngInfo() + exif_tags = { + 'provenance': 'ImageHistory', + 'caption': 'ImageDescription', + 'software': 'Software', + } + for key, value in attributes.items(): + pnginfo.add_text(exif_tags.get(key, key), value, zip=True) + with Image.open(filename) as image: + image.save(filename, pnginfo=pnginfo) + + def _include_provenance(self): + """Include provenance information as metadata.""" + attributes = self._select_for_include() + + # List of files to attach provenance to + files = [self.filename] + if 'plot_file' in self.attributes: + files.append(self.attributes['plot_file']) + + # Attach provenance to supported file types + for filename in files: + ext = os.path.splitext(filename)[1].lstrip('.').lower() + write = getattr(self, '_include_provenance_' + ext, None) + if write: + write(filename, attributes) + + def save_provenance(self): + """Export provenance information.""" + self._include_provenance() + filename = os.path.splitext(self.filename)[0] + '_provenance' + self.provenance.serialize(filename + '.xml', format='xml') + # Only plot provenance if there are not too many records. + if len(self.provenance.records) > 100: + logger.debug("Not plotting large provenance tree of %s", + self.filename) + else: + figure = prov_to_dot(self.provenance) + figure.write_svg(filename + '.svg') diff --git a/esmvaltool/_recipe.py b/esmvaltool/_recipe.py index 5ff380f330..6ab0236f55 100644 --- a/esmvaltool/_recipe.py +++ b/esmvaltool/_recipe.py @@ -1,43 +1,44 @@ -"""Recipe parser""" -import copy +"""Recipe parser.""" import fnmatch -import inspect import logging import os -import subprocess from collections import OrderedDict +from copy import deepcopy -import yamale import yaml - -from . import __version__, preprocessor -from ._data_finder import (get_input_filelist, get_input_filename, - get_input_fx_filelist, get_output_file, - get_start_end_year, get_statistic_output_file) -from ._task import DiagnosticTask, get_independent_tasks, run_tasks, which +from netCDF4 import Dataset + +from . import __version__ +from . import _recipe_checks as check +from ._config import TAGS, get_institutes, replace_tags +from ._data_finder import (get_input_filelist, get_input_fx_filelist, + get_output_file, get_statistic_output_file) +from ._provenance import TrackedFile, get_recipe_provenance +from ._recipe_checks import RecipeError +from ._task import (DiagnosticTask, get_flattened_tasks, get_independent_tasks, + run_tasks) from .cmor.table import CMOR_TABLES -from .preprocessor import DEFAULT_ORDER, FINAL_STEPS, INITIAL_STEPS +from .preprocessor import (DEFAULT_ORDER, FINAL_STEPS, INITIAL_STEPS, + MULTI_MODEL_FUNCTIONS, PreprocessingTask, + PreprocessorFile) from .preprocessor._derive import get_required from .preprocessor._download import synda_search from .preprocessor._io import DATASET_KEYS, concatenate_callback -from .preprocessor._regrid import get_cmor_levels, get_reference_levels +from .preprocessor._regrid import (get_cmor_levels, get_reference_levels, + parse_cell_spec) logger = logging.getLogger(__name__) TASKSEP = os.sep -class RecipeError(Exception): - """Recipe contains an error.""" - - def ordered_safe_load(stream): - """Load a YAML file using OrderedDict instead of dict""" + """Load a YAML file using OrderedDict instead of dict.""" class OrderedSafeLoader(yaml.SafeLoader): - """Loader class that uses OrderedDict to load a map""" + """Loader class that uses OrderedDict to load a map.""" def construct_mapping(loader, node): - """Load a map as an OrderedDict""" + """Load a map as an OrderedDict.""" loader.flatten_mapping(node) return OrderedDict(loader.construct_pairs(node)) @@ -47,174 +48,26 @@ def construct_mapping(loader, node): return yaml.load(stream, OrderedSafeLoader) -def read_recipe_file(filename, config_user, initialize_tasks=True): - """Read a recipe from file.""" - raw_recipe = check_recipe(filename) - return Recipe( - raw_recipe, config_user, initialize_tasks, recipe_file=filename) - - -def check_ncl_version(): - """Check the NCL version""" - ncl = which('ncl') - if not ncl: - raise RecipeError("Recipe contains NCL scripts, but cannot find " - "an NCL installation.") - try: - cmd = [ncl, '-V'] - version = subprocess.check_output(cmd, universal_newlines=True) - except subprocess.CalledProcessError: - logger.error("Failed to execute '%s'", ' '.join(' '.join(cmd))) - raise RecipeError("Recipe contains NCL scripts, but your NCL " - "installation appears to be broken.") - - version = version.strip() - logger.info("Found NCL version %s", version) - - major, minor = (int(i) for i in version.split('.')[:2]) - if major < 6 or (major == 6 and minor < 4): - raise RecipeError("NCL version 6.4 or higher is required to run " - "a recipe containing NCL scripts.") - - -def check_recipe_with_schema(filename): - """Check if the recipe content matches schema.""" - schema_file = os.path.join(os.path.dirname(__file__), 'recipe_schema.yml') - logger.debug("Checking recipe against schema %s", schema_file) - recipe = yamale.make_data(filename) - schema = yamale.make_schema(schema_file) - yamale.validate(schema, recipe) - - -def check_recipe(filename): +def load_raw_recipe(filename): """Check a recipe file and return it in raw form.""" # Note that many checks can only be performed after the automatically # computed entries have been filled in by creating a Recipe object. - check_recipe_with_schema(filename) + check.recipe_with_schema(filename) with open(filename, 'r') as file: contents = file.read() raw_recipe = yaml.safe_load(contents) raw_recipe['preprocessors'] = ordered_safe_load(contents).get( 'preprocessors', {}) - check_preprocessors(raw_recipe['preprocessors']) - check_diagnostics(raw_recipe['diagnostics']) + check.diagnostics(raw_recipe['diagnostics']) return raw_recipe -def check_preprocessors(preprocessors): - """Check preprocessors in recipe""" - valid_functions = set(preprocessor.DEFAULT_ORDER) - for name, profile in preprocessors.items(): - invalid_functions = set(profile) - {'custom_order'} - valid_functions - if invalid_functions: - raise RecipeError( - "Unknown function(s) {} in preprocessor {}, choose from: " - "{}".format(', '.join(invalid_functions), name, - ', '.join(preprocessor.DEFAULT_ORDER))) - - -def check_diagnostics(diagnostics): - """Check diagnostics in recipe""" - for name, diagnostic in diagnostics.items(): - if 'scripts' not in diagnostic: - raise RecipeError("Missing scripts section in diagnostic {}" - .format(name)) - variable_names = tuple(diagnostic.get('variables', {})) - scripts = diagnostic.get('scripts') - if scripts is None: - scripts = {} - for script_name, script in scripts.items(): - if script_name in variable_names: - raise RecipeError( - "Invalid script name {} encountered in diagnostic {}: " - "scripts cannot have the same name as variables.".format( - script_name, name)) - if not script.get('script'): - raise RecipeError( - "No script defined for script {} in diagnostic {}".format( - script_name, name)) - - -def check_preprocessor_settings(settings): - """Check preprocessor settings.""" - # The inspect functions getargspec and getcallargs are deprecated - # in Python 3, but their replacements are not available in Python 2. - # TODO: Use the new Python 3 inspect API - for step in settings: - if step not in preprocessor.DEFAULT_ORDER: - raise RecipeError( - "Unknown preprocessor function '{}', choose from: {}".format( - step, ', '.join(preprocessor.DEFAULT_ORDER))) - - function = getattr(preprocessor, step) - argspec = inspect.getargspec(function) - args = argspec.args[1:] - # Check for invalid arguments - invalid_args = set(settings[step]) - set(args) - if invalid_args: - raise RecipeError( - "Invalid argument(s): {} encountered for preprocessor " - "function {}. \nValid arguments are: [{}]".format( - ', '.join(invalid_args), step, ', '.join(args))) - - # Check for missing arguments - defaults = argspec.defaults - end = None if defaults is None else -len(defaults) - missing_args = set(args[:end]) - set(settings[step]) - if missing_args: - raise RecipeError( - "Missing required argument(s) {} for preprocessor " - "function {}".format(missing_args, step)) - # Final sanity check in case the above fails to catch a mistake - try: - inspect.getcallargs(function, None, **settings[step]) - except TypeError: - logger.error( - "Wrong preprocessor function arguments in " - "function '%s'", step) - raise - - -def check_duplicate_datasets(datasets): - """Check for duplicate datasets.""" - checked_datasets_ = [] - for dataset in datasets: - if dataset in checked_datasets_: - raise RecipeError( - "Duplicate dataset {} in datasets section".format(dataset)) - checked_datasets_.append(dataset) - - -def check_variable(variable, required_keys): - """Check variables as derived from recipe""" - required = set(required_keys) - missing = required - set(variable) - if missing: - raise RecipeError( - "Missing keys {} from variable {} in diagnostic {}".format( - missing, variable.get('short_name'), - variable.get('diagnostic'))) - - -def check_data_availability(input_files, variable): - """Check if the required input data is available""" - if not input_files: - raise RecipeError("No input files found for variable {}" - .format(variable)) - - required_years = set( - range(variable['start_year'], variable['end_year'] + 1)) - available_years = set() - for filename in input_files: - start, end = get_start_end_year(filename) - available_years.update(range(start, end + 1)) - - missing_years = required_years - available_years - if missing_years: - raise RecipeError( - "No input data available for years {} in files {}".format( - ", ".join(str(year) for year in missing_years), input_files)) +def read_recipe_file(filename, config_user, initialize_tasks=True): + """Read a recipe from file.""" + raw_recipe = load_raw_recipe(filename) + return Recipe( + raw_recipe, config_user, initialize_tasks, recipe_file=filename) def _get_value(key, datasets): @@ -241,26 +94,6 @@ def _update_from_others(variable, keys, datasets): variable[key] = value -def _update_cmor_table(table, mip, short_name): - """Try to add an ESMValTool custom CMOR table file.""" - cmor_table = CMOR_TABLES[table] - var_info = cmor_table.get_variable(mip, short_name) - - if var_info is None and hasattr(cmor_table, 'add_custom_table_file'): - table_file = os.path.join( - os.path.dirname(__file__), 'cmor', 'tables', 'custom', - 'CMOR_' + short_name + '.dat') - if os.path.exists(table_file): - logger.debug("Loading custom CMOR table from %s", table_file) - cmor_table.add_custom_table_file(table_file, mip) - var_info = cmor_table.get_variable(mip, short_name) - - if var_info is None: - raise RecipeError( - "Unable to load CMOR table '{}' for variable '{}' with mip '{}'" - .format(table, short_name, mip)) - - def _add_cmor_info(variable, override=False): """Add information from CMOR tables to variable.""" logger.debug("If not present: adding keys from CMOR table to %s", variable) @@ -272,10 +105,28 @@ def _add_cmor_info(variable, override=False): if variable['cmor_table'] not in CMOR_TABLES: logger.warning("Unknown CMOR table %s", variable['cmor_table']) + derive = variable.get('derive', False) # Copy the following keys from CMOR table - cmor_keys = ['standard_name', 'long_name', 'units'] - table_entry = CMOR_TABLES[variable['cmor_table']].get_variable( - variable['mip'], variable['short_name']) + cmor_keys = [ + 'standard_name', 'long_name', 'units', 'modeling_realm', 'frequency' + ] + cmor_table = variable['cmor_table'] + mip = variable['mip'] + short_name = variable['short_name'] + table_entry = CMOR_TABLES[cmor_table].get_variable(mip, short_name) + + if derive and table_entry is None: + custom_table = CMOR_TABLES['custom'] + table_entry = custom_table.get_variable(mip, short_name) + + if table_entry is None: + raise RecipeError( + "Unable to load CMOR table '{}' for variable '{}' with mip '{}'". + format(cmor_table, short_name, mip)) + + mip_info = CMOR_TABLES[cmor_table].get_table(mip) + if mip_info: + table_entry.frequency = mip_info.frequency for key in cmor_keys: if key not in variable or override: @@ -288,7 +139,7 @@ def _add_cmor_info(variable, override=False): variable) # Check that keys are available - check_variable(variable, required_keys=cmor_keys) + check.variable(variable, required_keys=cmor_keys) def _special_name_to_dataset(variable, special_name): @@ -325,14 +176,17 @@ def _update_target_levels(variable, variables, settings, config_user): settings['extract_levels']['levels'] = get_cmor_levels( levels['cmor_table'], levels['coordinate']) elif 'dataset' in levels: - if variable['dataset'] == levels['dataset']: + dataset = levels['dataset'] + if variable['dataset'] == dataset: del settings['extract_levels'] else: + variable_data = _get_dataset_info(dataset, variables) filename = \ - _dataset_to_file(levels['dataset'], variables, config_user) - coordinate = levels.get('coordinate', 'air_pressure') + _dataset_to_file(variable_data, config_user) settings['extract_levels']['levels'] = get_reference_levels( - filename, coordinate) + filename, variable_data['project'], dataset, + variable_data['short_name'], + os.path.splitext(variable_data['filename'])[0] + '_fixed') def _update_target_grid(variable, variables, settings, config_user): @@ -347,53 +201,70 @@ def _update_target_grid(variable, variables, settings, config_user): del settings['regrid'] elif any(grid == v['dataset'] for v in variables): settings['regrid']['target_grid'] = _dataset_to_file( - grid, variables, config_user) + _get_dataset_info(grid, variables), config_user) + else: + # Check that MxN grid spec is correct + parse_cell_spec(settings['regrid']['target_grid']) -def _dataset_to_file(dataset, variables, config_user): - """Find the first file belonging to dataset.""" - for variable in variables: - if variable['dataset'] == dataset: - files = get_input_filelist( - variable=variable, - rootpath=config_user['rootpath'], - drs=config_user['drs']) - if not files and variable.get('derive'): - variable = copy.deepcopy(variable) - variable['short_name'], variable['field'] = get_required( - variable['short_name'], variable['field'])[0] - files = get_input_filelist( - variable=variable, - rootpath=config_user['rootpath'], - drs=config_user['drs']) - check_data_availability(files, variable) - return files[0] +def _update_regrid_time(variable, settings): + """Input data frequency automatically for regrid_time preprocessor.""" + regrid_time = settings.get('regrid_time') + if regrid_time is None: + return + frequency = settings.get('regrid_time', {}).get('frequency') + if not frequency: + settings['regrid_time']['frequency'] = variable['frequency'] + - raise RecipeError( - "Unable to find matching file for dataset {}".format(dataset)) +def _get_dataset_info(dataset, variables): + for var in variables: + if var['dataset'] == dataset: + return var + raise RecipeError("Unable to find matching file for dataset" + "{}".format(dataset)) -def _limit_datasets(variables, profile, max_datasets=None): +def _augment(base, update): + """Update dict base with values from dict update.""" + for key in update: + if key not in base: + base[key] = update[key] + + +def _dataset_to_file(variable, config_user): + """Find the first file belonging to dataset from variable info.""" + files = get_input_filelist( + variable=variable, + rootpath=config_user['rootpath'], + drs=config_user['drs']) + if not files and variable.get('derive'): + first_required = get_required(variable['short_name'])[0] + _augment(first_required, variable) + files = get_input_filelist( + variable=first_required, + rootpath=config_user['rootpath'], + drs=config_user['drs']) + check.data_availability(files, variable) + return files[0] + + +def _limit_datasets(variables, profile, max_datasets=0): """Try to limit the number of datasets to max_datasets.""" if not max_datasets: return variables logger.info("Limiting the number of datasets to %s", max_datasets) - required_datasets = ( - profile.get('extract_levels', {}).get('levels'), - profile.get('regrid', {}).get('target_grid'), + required_datasets = [ + (profile.get('extract_levels') or {}).get('levels'), + (profile.get('regrid') or {}).get('target_grid'), variables[0].get('reference_dataset'), variables[0].get('alternative_dataset'), - ) - - limited = [] + ] + limited = [v for v in variables if v['dataset'] in required_datasets] for variable in variables: - if variable['dataset'] in required_datasets: - limited.append(variable) - - for variable in variables[::-1]: if len(limited) >= max_datasets: break if variable not in limited: @@ -411,23 +282,16 @@ def _get_default_settings(variable, config_user, derive=False): # Set up downloading using synda if requested. if config_user['synda_download']: - local_dir = os.path.dirname( - get_input_filename( - variable=variable, - rootpath=config_user['rootpath'], - drs=config_user['drs'])) + # TODO: make this respect drs or download to preproc dir? + download_folder = os.path.join(config_user['preproc_dir'], 'downloads') settings['download'] = { - 'dest_folder': local_dir, + 'dest_folder': download_folder, } # Configure loading - settings['load_cubes'] = { + settings['load'] = { 'callback': concatenate_callback, - 'filename': variable['filename'], - 'metadata': variable, } - if not derive: - settings['load_cubes']['constraints'] = variable['standard_name'] # Configure merge settings['concatenate'] = {} @@ -439,29 +303,34 @@ def _get_default_settings(variable, config_user, derive=False): } # File fixes fix_dir = os.path.splitext(variable['filename'])[0] + '_fixed' - if not derive: - settings['fix_file'] = dict(fix) - settings['fix_file']['output_dir'] = fix_dir + settings['fix_file'] = dict(fix) + settings['fix_file']['output_dir'] = fix_dir # Cube fixes # Only supply mip if the CMOR check fixes are implemented. if variable.get('cmor_table'): fix['cmor_table'] = variable['cmor_table'] fix['mip'] = variable['mip'] + fix['frequency'] = variable['frequency'] settings['fix_data'] = dict(fix) settings['fix_metadata'] = dict(fix) # Configure time extraction settings['extract_time'] = { - 'yr1': variable['start_year'], - 'yr2': variable['end_year'] + 1, - 'mo1': 1, - 'mo2': 1, - 'd1': 1, - 'd2': 1, + 'start_year': variable['start_year'], + 'end_year': variable['end_year'] + 1, + 'start_month': 1, + 'end_month': 1, + 'start_day': 1, + 'end_day': 1, } if derive: - settings['derive'] = {'variable': variable} + settings['derive'] = { + 'short_name': variable['short_name'], + 'standard_name': variable['standard_name'], + 'long_name': variable['long_name'], + 'units': variable['units'], + } # Configure CMOR metadata check if variable.get('cmor_table'): @@ -469,6 +338,7 @@ def _get_default_settings(variable, config_user, derive=False): 'cmor_table': variable['cmor_table'], 'mip': variable['mip'], 'short_name': variable['short_name'], + 'frequency': variable['frequency'], } # Configure final CMOR data check if variable.get('cmor_table'): @@ -476,6 +346,7 @@ def _get_default_settings(variable, config_user, derive=False): 'cmor_table': variable['cmor_table'], 'mip': variable['mip'], 'short_name': variable['short_name'], + 'frequency': variable['frequency'], } # Clean up fixed files @@ -491,20 +362,31 @@ def _get_default_settings(variable, config_user, derive=False): def _update_fx_settings(settings, variable, config_user): - """Find and set the FX mask settings""" - if 'mask_landsea' in settings.keys(): + """Find and set the FX derive/mask settings.""" + # update for derive + if 'derive' in settings: + fx_files = {} + for var in get_required(variable['short_name']): + if 'fx_files' in var: + _augment(var, variable) + fx_files.update( + get_input_fx_filelist( + variable=var, + rootpath=config_user['rootpath'], + drs=config_user['drs'])) + settings['derive']['fx_files'] = fx_files + + # update for landsea + if 'mask_landsea' in settings: # Configure ingestion of land/sea masks logger.debug('Getting fx mask settings now...') - # settings[mask_landsea][fx_file] is a list to store ALL - # available masks settings['mask_landsea']['fx_files'] = [] - # fx_files already in variable - variable = dict(variable) - variable['fx_files'] = ['sftlf', 'sftof'] + var = dict(variable) + var['fx_files'] = ['sftlf', 'sftof'] fx_files_dict = get_input_fx_filelist( - variable=variable, + variable=var, rootpath=config_user['rootpath'], drs=config_user['drs']) @@ -514,9 +396,47 @@ def _update_fx_settings(settings, variable, config_user): if fx_files_dict['sftof']: settings['mask_landsea']['fx_files'].append(fx_files_dict['sftof']) + if 'mask_landseaice' in settings: + logger.debug('Getting fx mask settings now...') + + settings['mask_landseaice']['fx_files'] = [] + + var = dict(variable) + var['fx_files'] = ['sftgif'] + fx_files_dict = get_input_fx_filelist( + variable=var, + rootpath=config_user['rootpath'], + drs=config_user['drs']) + + # allow sftgif (only, for now) + if fx_files_dict['sftgif']: + settings['mask_landseaice']['fx_files'].append( + fx_files_dict['sftgif']) + + for step in ('average_region', 'average_volume'): + if settings.get(step, {}).get('fx_files'): + settings[step]['fx_files'] = get_input_fx_filelist( + variable=variable, + rootpath=config_user['rootpath'], + drs=config_user['drs'], + ) + + +def _read_attributes(filename): + """Read the attributes from a netcdf file.""" + attributes = {} + if not (os.path.exists(filename) + and os.path.splitext(filename)[1].lower() == '.nc'): + return attributes + + with Dataset(filename, 'r') as dataset: + for attr in dataset.ncattrs(): + attributes[attr] = getattr(dataset, attr) + return attributes + def _get_input_files(variable, config_user): - """Get the input files for a single dataset""" + """Get the input files for a single dataset.""" # Find input files locally. input_files = get_input_filelist( variable=variable, @@ -531,13 +451,21 @@ def _get_input_files(variable, config_user): logger.info("Using input files for variable %s of dataset %s:\n%s", variable['short_name'], variable['dataset'], '\n'.join(input_files)) - check_data_availability(input_files, variable) + if (not config_user.get('skip-nonexistent') + or variable['dataset'] == variable.get('reference_dataset')): + check.data_availability(input_files, variable) + + # Set up provenance tracking + for i, filename in enumerate(input_files): + attributes = _read_attributes(filename) + input_files[i] = TrackedFile(filename, attributes) return input_files -def _apply_preprocessor_settings(settings, profile_settings): +def _apply_preprocessor_profile(settings, profile_settings): """Apply settings from preprocessor profile.""" + profile_settings = deepcopy(profile_settings) for step, args in profile_settings.items(): # Remove disabled preprocessor functions if args is False: @@ -551,83 +479,194 @@ def _apply_preprocessor_settings(settings, profile_settings): settings[step].update(args) -def _update_multi_model_statistics(variables, settings, preproc_dir): +def _get_statistic_attributes(products): + """Get attributes for the statistic output products.""" + attributes = {} + some_product = next(iter(products)) + for key, value in some_product.attributes.items(): + if all(p.attributes.get(key, object()) == value for p in products): + attributes[key] = value + + # Ensure start_year and end_year attributes are available + for product in products: + start = product.attributes['start_year'] + if 'start_year' not in attributes or start < attributes['start_year']: + attributes['start_year'] = start + end = product.attributes['end_year'] + if 'end_year' not in attributes or end > attributes['end_year']: + attributes['end_year'] = end + + return attributes + + +def _get_remaining_common_settings(step, order, products): + """Get preprocessor settings that are shared between products.""" + settings = {} + remaining_steps = order[order.index(step) + 1:] + some_product = next(iter(products)) + for key, value in some_product.settings.items(): + if key in remaining_steps: + if all(p.settings.get(key, object()) == value for p in products): + settings[key] = value + return settings + + +def _update_multi_dataset_settings(variable, settings): """Configure multi dataset statistics.""" - if settings.get('multi_model_statistics', False): - if settings['multi_model_statistics'] is True: - settings['multi_model_statistics'] = {} - stat_settings = settings['multi_model_statistics'] - - variable = variables[0] - - # Define output files - stat_settings['filenames'] = {} - for statistic in stat_settings['statistics']: - stat_settings['filenames'][statistic] = get_statistic_output_file( - variable, statistic, preproc_dir) - - # Define datasets to exclude - exclude_datasets = set(stat_settings.get('exclude', {})) - for key in 'reference_dataset', 'alternative_dataset': - if key in exclude_datasets and key in variable: - exclude_datasets.remove(key) - exclude_datasets.add(variable[key]) - exclude_files = { - v['filename'] - for v in variables if v['dataset'] in exclude_datasets + for step in MULTI_MODEL_FUNCTIONS: + if not settings.get(step): + continue + # Exclude dataset if requested + exclude = { + _special_name_to_dataset(variable, dataset) + for dataset in settings[step].pop('exclude', []) } - logger.debug('Multidataset excludes files %s', exclude_files) - stat_settings['exclude'] = {'_filename': exclude_files} + if variable['dataset'] in exclude: + settings.pop(step) -def _get_preprocessor_settings(variables, profile, config_user): - """Get preprocessor settings for a set of datasets.""" - all_settings = {} - profile = copy.deepcopy(profile) - _update_multi_model_statistics(variables, profile, - config_user['preproc_dir']) +def _update_statistic_settings(products, order, preproc_dir): + """Define statistic output products.""" + # TODO: move this to multi model statistics function? + # But how to check, with a dry-run option? + step = 'multi_model_statistics' + + products = {p for p in products if step in p.settings} + if not products: + return + + some_product = next(iter(products)) + for statistic in some_product.settings[step]['statistics']: + attributes = _get_statistic_attributes(products) + attributes['dataset'] = 'MultiModel{}'.format(statistic.title()) + attributes['filename'] = get_statistic_output_file( + attributes, preproc_dir) + common_settings = _get_remaining_common_settings(step, order, products) + statistic_product = PreprocessorFile(attributes, common_settings) + for product in products: + settings = product.settings[step] + if 'output_products' not in settings: + settings['output_products'] = {} + settings['output_products'][statistic] = statistic_product + + +def _match_products(products, variables): + """Match a list of input products to output product attributes.""" + grouped_products = {} + + def get_matching(attributes): + """Find the output filename which matches input attributes best.""" + score = 0 + filenames = [] + for variable in variables: + filename = variable['filename'] + tmp = sum(v == variable.get(k) for k, v in attributes.items()) + if tmp > score: + score = tmp + filenames = [filename] + elif tmp == score: + filenames.append(filename) + if not filenames: + logger.warning( + "Unable to find matching output file for input file %s", + filename) + return filenames + + # Group input files by output file + for product in products: + for filename in get_matching(product.attributes): + if filename not in grouped_products: + grouped_products[filename] = [] + grouped_products[filename].append(product) + + return grouped_products + + +def _get_preprocessor_products(variables, profile, order, ancestor_products, + config_user): + """Get preprocessor product definitions for a set of datasets.""" + products = set() for variable in variables: - derive = 'derive' in profile - settings = _get_default_settings(variable, config_user, derive=derive) - _apply_preprocessor_settings(settings, profile) - # if the target grid is a dataset name, replace it with a file name - # TODO: call _update_target_grid only once per variable? + variable['filename'] = get_output_file(variable, + config_user['preproc_dir']) + + if ancestor_products: + grouped_ancestors = _match_products(ancestor_products, variables) + else: + grouped_ancestors = {} + + for variable in variables: + settings = _get_default_settings( + variable, config_user, derive='derive' in profile) + _apply_preprocessor_profile(settings, profile) + _update_multi_dataset_settings(variable, settings) _update_target_levels( variable=variable, variables=variables, settings=settings, config_user=config_user) - _update_fx_settings(settings=settings, - variable=variable, - config_user=config_user) + _update_fx_settings( + settings=settings, variable=variable, config_user=config_user) _update_target_grid( variable=variable, variables=variables, settings=settings, config_user=config_user) - check_preprocessor_settings(settings) - all_settings[variable['filename']] = settings - - _check_multi_model_settings(all_settings) - return all_settings - - -def _check_multi_model_settings(all_settings): - """Check that multi dataset settings are identical for all datasets.""" - multi_model_steps = (step for step in preprocessor.MULTI_MODEL_FUNCTIONS - if any(step in settings - for settings in all_settings.values())) - for step in multi_model_steps: - result = None - for filename, settings in all_settings.items(): - if result is None: - result = settings[step] - elif result != settings[step]: - raise RecipeError( - "Unable to combine differing multi-dataset settings " - "{} and {} for output file {}".format( - result, settings[step], filename)) + _update_regrid_time(variable, settings) + ancestors = grouped_ancestors.get(variable['filename']) + if not ancestors: + ancestors = _get_input_files(variable, config_user) + if config_user.get('skip-nonexistent') and not ancestors: + logger.info("Skipping: no data found for %s", variable) + continue + product = PreprocessorFile( + attributes=variable, settings=settings, ancestors=ancestors) + products.add(product) + + _update_statistic_settings(products, order, config_user['preproc_dir']) + + for product in products: + product.check() + + return products + + +def _get_single_preprocessor_task(variables, + profile, + config_user, + name, + ancestor_tasks=None): + """Create preprocessor tasks for a set of datasets.""" + if ancestor_tasks is None: + ancestor_tasks = [] + order = _extract_preprocessor_order(profile) + ancestor_products = [p for task in ancestor_tasks for p in task.products] + products = _get_preprocessor_products( + variables=variables, + profile=profile, + order=order, + ancestor_products=ancestor_products, + config_user=config_user, + ) + + if not products: + raise RecipeError( + "Did not find any input data for task {}".format(name)) + + task = PreprocessingTask( + products=products, + ancestors=ancestor_tasks, + name=name, + order=order, + debug=config_user['save_intermediary_cubes'], + write_ncl_interface=config_user['write_ncl_interface'], + ) + + logger.info("PreprocessingTask %s created. It will create the files:\n%s", + task.name, '\n'.join(p.filename for p in task.products)) + + return task def _extract_preprocessor_order(profile): @@ -639,52 +678,66 @@ def _extract_preprocessor_order(profile): return INITIAL_STEPS + order + FINAL_STEPS +def _split_settings(settings, step, order=DEFAULT_ORDER): + """Split settings, using step as a separator.""" + before = {} + for _step in order: + if _step == step: + break + if _step in settings: + before[_step] = settings[_step] + after = { + k: v + for k, v in settings.items() if not (k == step or k in before) + } + return before, after + + def _split_derive_profile(profile): - """Split the derive preprocessor profile""" + """Split the derive preprocessor profile.""" order = _extract_preprocessor_order(profile) - before, after = preprocessor.split_settings(profile, 'derive', order) - after['derive'] = {} + before, after = _split_settings(profile, 'derive', order) + after['derive'] = True + after['fix_file'] = False + after['fix_metadata'] = False + after['fix_data'] = False if order != DEFAULT_ORDER: before['custom_order'] = True after['custom_order'] = True return before, after -def _get_single_preprocessor_task(variables, - profile, - config_user, - ancestors=None): - """Create preprocessor tasks for a set of datasets.""" - # Configure preprocessor - order = _extract_preprocessor_order(profile) - all_settings = _get_preprocessor_settings( - variables=variables, profile=profile, config_user=config_user) - - # Input files, used by tasks without ancestors - input_files = None - if not ancestors: - input_files = [ - filename for variable in variables - for filename in _get_input_files(variable, config_user) - ] - - output_dir = os.path.dirname(variables[0]['filename']) - - task = preprocessor.PreprocessingTask( - settings=all_settings, - output_dir=output_dir, - ancestors=ancestors, - input_files=input_files, - order=order, - debug=config_user['save_intermediary_cubes']) +def _get_derive_input_variables(variables, config_user): + """Determine the input sets of `variables` needed for deriving.""" + derive_input = {} - return task + def append(group_prefix, var): + """Append variable `var` to a derive input group.""" + group = group_prefix + var['short_name'] + var['variable_group'] = group + if group not in derive_input: + derive_input[group] = [] + derive_input[group].append(var) + for variable in variables: + group_prefix = variable['variable_group'] + '_derive_input_' + if not variable.get('force_derivation') and get_input_filelist( + variable=variable, + rootpath=config_user['rootpath'], + drs=config_user['drs']): + # No need to derive, just process normally up to derive step + var = deepcopy(variable) + append(group_prefix, var) + else: + # Process input data needed to derive variable + for var in get_required(variable['short_name']): + _augment(var, variable) + append(group_prefix, var) + + return derive_input -def _get_preprocessor_task(variables, - profiles, - config_user, - write_ncl_interface=False): + +def _get_preprocessor_task(variables, profiles, config_user, task_name): """Create preprocessor task(s) for a set of datasets.""" # First set up the preprocessor profile variable = variables[0] @@ -693,67 +746,45 @@ def _get_preprocessor_task(variables, raise RecipeError( "Unknown preprocessor {} in variable {} of diagnostic {}".format( preproc_name, variable['short_name'], variable['diagnostic'])) - profile = copy.deepcopy(profiles[variable['preprocessor']]) + profile = deepcopy(profiles[variable['preprocessor']]) logger.info("Creating preprocessor '%s' task for variable '%s'", variable['preprocessor'], variable['short_name']) variables = _limit_datasets(variables, profile, config_user.get('max_datasets')) - + for variable in variables: + _add_cmor_info(variable) # Create preprocessor task(s) derive_tasks = [] if variable.get('derive'): # Create tasks to prepare the input data for the derive step derive_profile, profile = _split_derive_profile(profile) - - derive_input = {} - for variable in variables: - _update_cmor_table( - table=variable['cmor_table'], - mip=variable['mip'], - short_name=variable['short_name']) - _add_cmor_info(variable) - if not variable.get('force_derivation') and get_input_filelist( - variable=variable, - rootpath=config_user['rootpath'], - drs=config_user['drs']): - # No need to derive, just process normally up to derive step - short_name = variable['short_name'] - if short_name not in derive_input: - derive_input[short_name] = [] - derive_input[short_name].append(variable) - else: - # Process input data needed to derive variable - for short_name, field in get_required(variable['short_name'], - variable['field']): - if short_name not in derive_input: - derive_input[short_name] = [] - variable = copy.deepcopy(variable) - variable['short_name'] = short_name - variable['field'] = field - variable['filename'] = get_output_file( - variable, config_user['preproc_dir']) - _add_cmor_info(variable, override=True) - derive_input[short_name].append(variable) + derive_input = _get_derive_input_variables(variables, config_user) for derive_variables in derive_input.values(): - task = _get_single_preprocessor_task(derive_variables, - derive_profile, config_user) + for derive_variable in derive_variables: + _add_cmor_info(derive_variable, override=True) + derive_name = task_name.split( + TASKSEP)[0] + TASKSEP + derive_variables[0]['variable_group'] + task = _get_single_preprocessor_task( + derive_variables, + derive_profile, + config_user, + name=derive_name) derive_tasks.append(task) - # Add CMOR info - for variable in variables: - _add_cmor_info(variable) - # Create (final) preprocessor task - profile['extract_metadata'] = {'write_ncl': write_ncl_interface} task = _get_single_preprocessor_task( - variables, profile, config_user, ancestors=derive_tasks) + variables, + profile, + config_user, + ancestor_tasks=derive_tasks, + name=task_name) return task -class Recipe(object): - """Recipe object""" +class Recipe: + """Recipe object.""" def __init__(self, raw_recipe, @@ -761,14 +792,17 @@ def __init__(self, initialize_tasks=True, recipe_file=None): """Parse a recipe file into an object.""" - self._cfg = config_user - self._recipe_file = os.path.basename(recipe_file) + self._cfg = deepcopy(config_user) + self._cfg['write_ncl_interface'] = self._need_ncl( + raw_recipe['diagnostics']) + self._filename = os.path.basename(recipe_file) self._preprocessors = raw_recipe.get('preprocessors', {}) if 'default' not in self._preprocessors: self._preprocessors['default'] = {} - self._support_ncl = self._need_ncl(raw_recipe['diagnostics']) self.diagnostics = self._initialize_diagnostics( raw_recipe['diagnostics'], raw_recipe.get('datasets', [])) + self.entity = self._initalize_provenance( + raw_recipe.get('documentation', {})) self.tasks = self.initialize_tasks() if initialize_tasks else None @staticmethod @@ -781,12 +815,21 @@ def _need_ncl(raw_diagnostics): for script in diagnostic['scripts'].values(): if script.get('script', '').lower().endswith('.ncl'): logger.info("NCL script detected, checking NCL version") - check_ncl_version() + check.ncl_version() return True return False + def _initalize_provenance(self, raw_documentation): + """Initialize the recipe provenance.""" + doc = deepcopy(raw_documentation) + for key in doc: + if key in TAGS: + doc[key] = replace_tags(key, doc[key]) + + return get_recipe_provenance(doc, self._filename) + def _initialize_diagnostics(self, raw_diagnostics, raw_datasets): - """Define diagnostics in recipe""" + """Define diagnostics in recipe.""" logger.debug("Retrieving diagnostics from recipe") diagnostics = {} @@ -803,47 +846,64 @@ def _initialize_diagnostics(self, raw_diagnostics, raw_datasets): variable_names = tuple(raw_diagnostic.get('variables', {})) diagnostic['scripts'] = self._initialize_scripts( name, raw_diagnostic.get('scripts'), variable_names) + for key in ('themes', 'realms'): + if key in raw_diagnostic: + for script in diagnostic['scripts'].values(): + script['settings'][key] = raw_diagnostic[key] diagnostics[name] = diagnostic return diagnostics @staticmethod def _initialize_datasets(raw_datasets): - """Define datasets used by variable""" - datasets = copy.deepcopy(raw_datasets) + """Define datasets used by variable.""" + datasets = deepcopy(raw_datasets) for dataset in datasets: for key in dataset: DATASET_KEYS.add(key) - check_duplicate_datasets(datasets) + check.duplicate_datasets(datasets) return datasets def _initialize_variables(self, raw_variable, raw_datasets): """Define variables for all datasets.""" variables = [] + raw_variable = deepcopy(raw_variable) datasets = self._initialize_datasets( raw_datasets + raw_variable.pop('additional_datasets', [])) - for dataset in datasets: - variable = dict(raw_variable) + for index, dataset in enumerate(datasets): + variable = deepcopy(raw_variable) variable.update(dataset) + variable['recipe_dataset_index'] = index if ('cmor_table' not in variable and variable.get('project') in CMOR_TABLES): variable['cmor_table'] = variable['project'] + if 'end_year' in variable and 'max_years' in self._cfg: + variable['end_year'] = min( + variable['end_year'], + variable['start_year'] + self._cfg['max_years'] - 1) variables.append(variable) required_keys = { - 'short_name', 'field', 'dataset', 'project', 'start_year', - 'end_year', 'preprocessor', 'diagnostic' + 'short_name', + 'mip', + 'dataset', + 'project', + 'start_year', + 'end_year', + 'preprocessor', + 'diagnostic', } for variable in variables: _update_from_others(variable, ['cmor_table', 'mip'], datasets) - check_variable(variable, required_keys) - variable['filename'] = get_output_file(variable, - self._cfg['preproc_dir']) + institute = get_institutes(variable) + if institute: + variable['institute'] = institute + check.variable(variable, required_keys) if 'fx_files' in variable: for fx_file in variable['fx_files']: DATASET_KEYS.add(fx_file) @@ -860,26 +920,31 @@ def _initialize_variables(self, raw_variable, raw_datasets): def _initialize_preprocessor_output(self, diagnostic_name, raw_variables, raw_datasets): - """Define variables in diagnostic""" + """Define variables in diagnostic.""" logger.debug("Populating list of variables for diagnostic %s", diagnostic_name) preprocessor_output = {} - for variable_name, raw_variable in raw_variables.items(): + for variable_group, raw_variable in raw_variables.items(): + if raw_variable is None: + raw_variable = {} + else: + raw_variable = deepcopy(raw_variable) + raw_variable['variable_group'] = variable_group if 'short_name' not in raw_variable: - raw_variable['short_name'] = variable_name + raw_variable['short_name'] = variable_group raw_variable['diagnostic'] = diagnostic_name raw_variable['preprocessor'] = str( raw_variable.get('preprocessor', 'default')) - preprocessor_output[variable_name] = \ + preprocessor_output[variable_group] = \ self._initialize_variables(raw_variable, raw_datasets) return preprocessor_output def _initialize_scripts(self, diagnostic_name, raw_scripts, variable_names): - """Define script in diagnostic""" + """Define script in diagnostic.""" if not raw_scripts: return {} @@ -888,14 +953,14 @@ def _initialize_scripts(self, diagnostic_name, raw_scripts, scripts = {} for script_name, raw_settings in raw_scripts.items(): - raw_script = raw_settings.pop('script') + settings = deepcopy(raw_settings) + script = settings.pop('script') ancestors = [] - for id_glob in raw_settings.pop('ancestors', variable_names): + for id_glob in settings.pop('ancestors', variable_names): if TASKSEP not in id_glob: id_glob = diagnostic_name + TASKSEP + id_glob ancestors.append(id_glob) - settings = dict(copy.deepcopy(raw_settings)) - settings['recipe'] = self._recipe_file + settings['recipe'] = self._filename settings['version'] = __version__ settings['script'] = script_name # Add output dirs to settings @@ -903,14 +968,21 @@ def _initialize_scripts(self, diagnostic_name, raw_scripts, settings[dir_name] = os.path.join(self._cfg[dir_name], diagnostic_name, script_name) # Copy other settings - if self._support_ncl: + if self._cfg['write_ncl_interface']: settings['exit_on_ncl_warning'] = self._cfg['exit_on_warning'] - for key in ('max_data_filesize', 'output_file_type', 'log_level', - 'write_plots', 'write_netcdf'): + for key in ( + 'max_data_filesize', + 'output_file_type', + 'log_level', + 'write_plots', + 'write_netcdf', + 'profile_diagnostic', + 'auxiliary_data_dir', + ): settings[key] = self._cfg[key] scripts[script_name] = { - 'script': raw_script, + 'script': script, 'output_dir': settings['work_dir'], 'settings': settings, 'ancestors': ancestors, @@ -919,7 +991,8 @@ def _initialize_scripts(self, diagnostic_name, raw_scripts, return scripts def _resolve_diagnostic_ancestors(self, tasks): - """Resolve diagnostic ancestors""" + """Resolve diagnostic ancestors.""" + tasks = {t.name: t for t in tasks} for diagnostic_name, diagnostic in self.diagnostics.items(): for script_name, script_cfg in diagnostic['scripts'].items(): task_id = diagnostic_name + TASKSEP + script_name @@ -931,52 +1004,72 @@ def _resolve_diagnostic_ancestors(self, tasks): ancestor_ids = fnmatch.filter(tasks, id_glob) if not ancestor_ids: raise RecipeError( - "Could not find any ancestors matching {}" - .format(id_glob)) + "Could not find any ancestors matching {}". + format(id_glob)) logger.debug("Pattern %s matches %s", id_glob, ancestor_ids) ancestors.extend(tasks[a] for a in ancestor_ids) tasks[task_id].ancestors = ancestors def initialize_tasks(self): - """Define tasks in recipe""" + """Define tasks in recipe.""" logger.info("Creating tasks from recipe") - tasks = {} + tasks = set() for diagnostic_name, diagnostic in self.diagnostics.items(): logger.info("Creating tasks for diagnostic %s", diagnostic_name) # Create preprocessor tasks - for variable_name in diagnostic['preprocessor_output']: - task_id = diagnostic_name + TASKSEP + variable_name - logger.info("Creating preprocessor task %s", task_id) + for variable_group in diagnostic['preprocessor_output']: + task_name = diagnostic_name + TASKSEP + variable_group + logger.info("Creating preprocessor task %s", task_name) task = _get_preprocessor_task( - variables=diagnostic['preprocessor_output'][variable_name], + variables=diagnostic['preprocessor_output'] + [variable_group], profiles=self._preprocessors, config_user=self._cfg, - write_ncl_interface=self._support_ncl) - tasks[task_id] = task - - if not self._cfg['run_diagnostic']: - continue + task_name=task_name) + tasks.add(task) # Create diagnostic tasks for script_name, script_cfg in diagnostic['scripts'].items(): - task_id = diagnostic_name + TASKSEP + script_name - logger.info("Creating diagnostic task %s", task_id) + task_name = diagnostic_name + TASKSEP + script_name + logger.info("Creating diagnostic task %s", task_name) task = DiagnosticTask( script=script_cfg['script'], output_dir=script_cfg['output_dir'], - settings=script_cfg['settings']) - tasks[task_id] = task + settings=script_cfg['settings'], + name=task_name) + tasks.add(task) + + check.tasks_valid(tasks) # Resolve diagnostic ancestors self._resolve_diagnostic_ancestors(tasks) + # Select only requested tasks + tasks = get_flattened_tasks(tasks) + if not self._cfg.get('run_diagnostic'): + tasks = {t for t in tasks if isinstance(t, PreprocessingTask)} + if self._cfg.get('diagnostics'): + names = {t.name for t in tasks} + selection = set() + for pattern in self._cfg.get('diagnostics'): + selection |= set(fnmatch.filter(names, pattern)) + tasks = {t for t in tasks if t.name in selection} + + tasks = get_flattened_tasks(tasks) + logger.info("These tasks will be executed: %s", + ', '.join(t.name for t in tasks)) + + # Initialize task provenance + for task in tasks: + task.initialize_provenance(self.entity) + # TODO: check that no loops are created (will throw RecursionError) # Return smallest possible set of tasks - return get_independent_tasks(tasks.values()) + return get_independent_tasks(tasks) def __str__(self): """Get human readable summary.""" diff --git a/esmvaltool/_recipe_checks.py b/esmvaltool/_recipe_checks.py new file mode 100644 index 0000000000..a6ab607df0 --- /dev/null +++ b/esmvaltool/_recipe_checks.py @@ -0,0 +1,120 @@ +"""Module with functions to check a recipe.""" +import logging +import os +import subprocess + +import yamale + +from ._data_finder import get_start_end_year +from ._task import get_flattened_tasks, which +from .preprocessor import PreprocessingTask + +logger = logging.getLogger(__name__) + + +class RecipeError(Exception): + """Recipe contains an error.""" + + +def ncl_version(): + """Check the NCL version.""" + ncl = which('ncl') + if not ncl: + raise RecipeError("Recipe contains NCL scripts, but cannot find " + "an NCL installation.") + try: + cmd = [ncl, '-V'] + version = subprocess.check_output(cmd, universal_newlines=True) + except subprocess.CalledProcessError: + logger.error("Failed to execute '%s'", ' '.join(' '.join(cmd))) + raise RecipeError("Recipe contains NCL scripts, but your NCL " + "installation appears to be broken.") + + version = version.strip() + logger.info("Found NCL version %s", version) + + major, minor = (int(i) for i in version.split('.')[:2]) + if major < 6 or (major == 6 and minor < 4): + raise RecipeError("NCL version 6.4 or higher is required to run " + "a recipe containing NCL scripts.") + + +def recipe_with_schema(filename): + """Check if the recipe content matches schema.""" + schema_file = os.path.join(os.path.dirname(__file__), 'recipe_schema.yml') + logger.debug("Checking recipe against schema %s", schema_file) + recipe = yamale.make_data(filename) + schema = yamale.make_schema(schema_file) + yamale.validate(schema, recipe) + + +def diagnostics(diags): + """Check diagnostics in recipe.""" + for name, diagnostic in diags.items(): + if 'scripts' not in diagnostic: + raise RecipeError( + "Missing scripts section in diagnostic {}".format(name)) + variable_names = tuple(diagnostic.get('variables', {})) + scripts = diagnostic.get('scripts') + if scripts is None: + scripts = {} + for script_name, script in scripts.items(): + if script_name in variable_names: + raise RecipeError( + "Invalid script name {} encountered in diagnostic {}: " + "scripts cannot have the same name as variables.".format( + script_name, name)) + if not script.get('script'): + raise RecipeError( + "No script defined for script {} in diagnostic {}".format( + script_name, name)) + + +def duplicate_datasets(datasets): + """Check for duplicate datasets.""" + checked_datasets_ = [] + for dataset in datasets: + if dataset in checked_datasets_: + raise RecipeError( + "Duplicate dataset {} in datasets section".format(dataset)) + checked_datasets_.append(dataset) + + +def variable(var, required_keys): + """Check variables as derived from recipe.""" + required = set(required_keys) + missing = required - set(var) + if missing: + raise RecipeError( + "Missing keys {} from variable {} in diagnostic {}".format( + missing, var.get('short_name'), var.get('diagnostic'))) + + +def data_availability(input_files, var): + """Check if the required input data is available.""" + if not input_files: + raise RecipeError("No input files found for variable {}".format(var)) + + required_years = set(range(var['start_year'], var['end_year'] + 1)) + available_years = set() + for filename in input_files: + start, end = get_start_end_year(filename) + available_years.update(range(start, end + 1)) + + missing_years = required_years - available_years + if missing_years: + raise RecipeError( + "No input data available for years {} in files {}".format( + ", ".join(str(year) for year in missing_years), input_files)) + + +def tasks_valid(tasks): + """Check that tasks are consistent.""" + filenames = set() + msg = "Duplicate preprocessor filename {}, please file a bug report." + for task in get_flattened_tasks(tasks): + if isinstance(task, PreprocessingTask): + for product in task.products: + if product.filename in filenames: + raise ValueError(msg.format(product.filename)) + filenames.add(product.filename) diff --git a/esmvaltool/_task.py b/esmvaltool/_task.py index 05947401c5..90aa3c4c0f 100644 --- a/esmvaltool/_task.py +++ b/esmvaltool/_task.py @@ -1,18 +1,23 @@ -"""ESMValtool task definition""" +"""ESMValtool task definition.""" import contextlib import datetime import errno import logging +import numbers import os import pprint import subprocess import threading import time +from copy import deepcopy from multiprocessing import Pool, cpu_count import psutil import yaml +from ._config import TAGS, replace_tags +from ._provenance import TrackedFile, get_task_provenance + logger = logging.getLogger(__name__) DATASET_KEYS = { @@ -112,60 +117,95 @@ def _log_resource_usage(): thread.join() -def write_ncl_settings(settings, filename, mode='wt'): - """Write settings to NCL file.""" - logger.debug("Writing NCL configuration file %s", filename) - - def _format(value): - """Format string or list as NCL""" - if value is None or isinstance(value, str): - txt = '"{}"'.format(value) - elif isinstance(value, (list, tuple)): - # TODO: convert None to fill value? - # If an array contains a str, make all items str - if any(isinstance(v, str) or v is None for v in value): - value = [(str(v)) for v in value] - txt = '(/{}/)'.format(', '.join(_format(v) for v in value)) +def _py2ncl(value, var_name=''): + """Format a structure of Python list/dict/etc items as NCL.""" + txt = var_name + ' = ' if var_name else '' + if value is None: + txt += '_Missing' + elif isinstance(value, str): + txt += '"{}"'.format(value) + elif isinstance(value, (list, tuple)): + if not value: + txt += '_Missing' else: - txt = str(value) - return txt + if isinstance(value[0], numbers.Real): + type_ = numbers.Real + else: + type_ = type(value[0]) + if any(not isinstance(v, type_) for v in value): + raise ValueError( + "NCL array cannot be mixed type: {}".format(value)) + txt += '(/{}/)'.format(', '.join(_py2ncl(v) for v in value)) + elif isinstance(value, dict): + if not var_name: + raise ValueError( + "NCL does not support nested dicts: {}".format(value)) + txt += 'True\n' + for key in value: + txt += '{}@{} = {}\n'.format(var_name, key, _py2ncl(value[key])) + else: + txt += str(value) + return txt - def _format_dict(name, dictionary): - """Format dict as NCL""" - lines = ['{} = True'.format(name)] - for key, value in sorted(dictionary.items()): - lines.append('{}@{} = {}'.format(name, key, _format(value))) - txt = '\n'.join(lines) - return txt - def _header(name): - """Delete any existing NCL variable known as `name`.""" - return ('if (isvar("{name}")) then\n' - ' delete({name})\n' - 'end if\n'.format(name=name)) +def write_ncl_settings(settings, filename, mode='wt'): + """Write a dictionary with generic settings to NCL file.""" + logger.debug("Writing NCL configuration file %s", filename) + + def _ncl_type(value): + """Convert some Python types to NCL types.""" + typemap = { + bool: 'logical', + str: 'string', + float: 'double', + int: 'int64', + dict: 'logical', + } + for type_ in typemap: + if isinstance(value, type_): + return typemap[type_] + raise ValueError("Unable to map {} to an NCL type".format(type(value))) lines = [] - for key, value in sorted(settings.items()): - txt = _header(name=key) - if isinstance(value, dict): - txt += _format_dict(name=key, dictionary=value) + for var_name, value in sorted(settings.items()): + if isinstance(value, (list, tuple)): + # Create an NCL list that can span multiple files + lines.append('if (.not. isdefined("{var_name}")) then\n' + ' {var_name} = NewList("fifo")\n' + 'end if\n'.format(var_name=var_name)) + for item in value: + lines.append('ListAppend({var_name}, new(1, {type}))\n' + 'i = ListCount({var_name}) - 1'.format( + var_name=var_name, type=_ncl_type(item))) + lines.append(_py2ncl(item, var_name + '[i]')) else: - txt += '{} = {}'.format(key, _format(value)) - lines.append(txt) + # Create an NCL variable that overwrites previous variables + lines.append('if (isvar("{var_name}")) then\n' + ' delete({var_name})\n' + 'end if\n'.format(var_name=var_name)) + lines.append(_py2ncl(value, var_name)) + with open(filename, mode) as file: - file.write('\n\n'.join(lines)) + file.write('\n'.join(lines)) file.write('\n') -class AbstractTask(object): - """Base class for defining task classes""" +class BaseTask: + """Base class for defining task classes.""" - def __init__(self, settings, output_dir, ancestors=None): + def __init__(self, ancestors=None, name=''): """Initialize task.""" - self.settings = settings self.ancestors = [] if ancestors is None else ancestors - self.output_dir = output_dir self.output_files = None + self.name = name + self.activity = None + + def initialize_provenance(self, recipe_entity): + """Initialize task provenance activity.""" + if self.activity is not None: + raise ValueError( + "Provenance of {} already initialized".format(self)) + self.activity = get_task_provenance(self, recipe_entity) def flatten(self): """Return a flattened set of all ancestor tasks and task itself.""" @@ -182,7 +222,10 @@ def run(self, input_files=None): input_files = [] for task in self.ancestors: input_files.extend(task.run()) + logger.info("Starting task %s in process [%s]", self.name, + os.getpid()) self.output_files = self._run(input_files) + logger.info("Successfully completed task %s", self.name) return self.output_files @@ -196,33 +239,32 @@ def str(self): def _indent(txt): return '\n'.join('\t' + line for line in txt.split('\n')) - txt = 'settings:\n{}\nancestors:\n{}'.format( - pprint.pformat(self.settings, indent=2), - '\n\n'.join(_indent(str(task)) for task in self.ancestors) - if self.ancestors else 'None', - ) + txt = 'ancestors:\n{}'.format('\n\n'.join( + _indent(str(task)) + for task in self.ancestors) if self.ancestors else 'None') return txt class DiagnosticError(Exception): - """Error in diagnostic""" + """Error in diagnostic.""" -class DiagnosticTask(AbstractTask): - """Task for running a diagnostic""" +class DiagnosticTask(BaseTask): + """Task for running a diagnostic.""" - def __init__(self, script, settings, output_dir, ancestors=None): - """Initialize""" - super(DiagnosticTask, self).__init__( - settings=settings, output_dir=output_dir, ancestors=ancestors) + def __init__(self, script, settings, output_dir, ancestors=None, name=''): + """Create a diagnostic task.""" + super(DiagnosticTask, self).__init__(ancestors=ancestors, name=name) self.script = script + self.settings = settings + self.products = set() + self.output_dir = output_dir self.cmd = self._initialize_cmd(script) self.log = os.path.join(settings['run_dir'], 'log.txt') self.resource_log = os.path.join(settings['run_dir'], 'resource_usage.txt') - @staticmethod - def _initialize_cmd(script): + def _initialize_cmd(self, script): """Create a an executable command from script.""" diagnostics_root = os.path.join( os.path.dirname(__file__), 'diag_scripts') @@ -236,11 +278,26 @@ def _initialize_cmd(script): cmd = [] if not os.access(script_file, os.X_OK): # if not executable extension = os.path.splitext(script)[1].lower()[1:] - executables = { - 'py': [which('python')], - 'ncl': [which('ncl'), '-n', '-p'], - 'r': [which('Rscript'), '--slave', '--quiet'], - } + if not self.settings['profile_diagnostic']: + executables = { + 'py': [which('python')], + 'ncl': [which('ncl'), '-n', '-p'], + 'r': [which('Rscript')], + 'jl': [which('julia')], + } + else: + profile_file = os.path.join(self.settings['run_dir'], + 'profile.bin') + executables = { + 'py': [ + which('python'), '-m', 'vmprof', '--lines', '-o', + profile_file + ], + 'ncl': [which('ncl'), '-n', '-p'], + 'r': [which('Rscript')], + 'jl': [which('julia')], + } + if extension not in executables: raise DiagnosticError( "Cannot execute script {} ({}): non-executable file " @@ -253,7 +310,7 @@ def _initialize_cmd(script): return cmd def write_settings(self): - """Write settings to file""" + """Write settings to file.""" run_dir = self.settings['run_dir'] if not os.path.exists(run_dir): os.makedirs(run_dir) @@ -271,7 +328,7 @@ def write_settings(self): return filename def _write_ncl_settings(self): - """Write settings to NCL file""" + """Write settings to NCL file.""" filename = os.path.join(self.settings['run_dir'], 'settings.ncl') config_user_keys = { @@ -339,16 +396,17 @@ def _control_ncl_execution(self, process, lines): "There were warnings during the execution of NCL script %s, " "for details, see the log %s", self.script, self.log) - def _start_diagnostic_script(self, cmd, env, cwd): + def _start_diagnostic_script(self, cmd, env): """Start the diagnostic script.""" logger.info("Running command %s", cmd) logger.debug("in environment\n%s", pprint.pformat(env)) + cwd = self.settings['run_dir'] logger.debug("in current working directory: %s", cwd) logger.info("Writing output to %s", self.output_dir) logger.info("Writing plots to %s", self.settings['plot_dir']) logger.info("Writing log to %s", self.log) - rerun_msg = '' if cwd is None else 'cd {}; '.format(cwd) + rerun_msg = 'cd {}; '.format(cwd) if env: rerun_msg += ' '.join('{}="{}"'.format(k, env[k]) for k in env if k not in os.environ) @@ -358,6 +416,7 @@ def _start_diagnostic_script(self, cmd, env, cwd): try: process = subprocess.Popen( cmd, + bufsize=2**20, # Use a large buffer to prevent NCL crash stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=cwd, @@ -381,36 +440,33 @@ def _run(self, input_files): is_ncl_script = self.script.lower().endswith('.ncl') if is_ncl_script: - input_files = [ + self.settings['input_files'] = [ f for f in input_files if f.endswith('.ncl') or os.path.isdir(f) ] else: - input_files = [ + self.settings['input_files'] = [ f for f in input_files if f.endswith('.yml') or os.path.isdir(f) ] - self.settings['input_files'] = input_files - - cmd = list(self.cmd) - cwd = None - env = None - - settings_file = self.write_settings() - - if not self.script.lower().endswith('.py'): - env = dict(os.environ) + env = dict(os.environ) + if self.script.lower().endswith('.py'): + # Set non-interactive matplotlib backend + env['MPLBACKEND'] = 'Agg' + else: + # Make diag_scripts path available to diagostics scripts env['diag_scripts'] = os.path.join( os.path.dirname(__file__), 'diag_scripts') + cmd = list(self.cmd) + settings_file = self.write_settings() if is_ncl_script: - cwd = os.path.dirname(__file__) env['settings'] = settings_file else: cmd.append(settings_file) - process = self._start_diagnostic_script(cmd, env, cwd) + process = self._start_diagnostic_script(cmd, env) returncode = None last_line = [''] @@ -436,17 +492,82 @@ def _run(self, input_files): time.sleep(0.001) if returncode == 0: + logger.debug("Script %s completed successfully", self.script) + self._collect_provenance() return [self.output_dir] raise DiagnosticError( "Diagnostic script {} failed with return code {}. See the log " "in {}".format(self.script, returncode, self.log)) + def _collect_provenance(self): + """Process provenance information provided by the diagnostic script.""" + provenance_file = os.path.join(self.settings['run_dir'], + 'diagnostic_provenance.yml') + if not os.path.exists(provenance_file): + logger.warning("No provenance information was written to %s", + provenance_file) + return + + logger.debug("Collecting provenance from %s", provenance_file) + start = time.time() + with open(provenance_file, 'r') as file: + table = yaml.safe_load(file) + + ignore = ( + 'auxiliary_data_dir', + 'exit_on_ncl_warning', + 'input_files', + 'log_level', + 'max_data_filesize', + 'output_file_type', + 'plot_dir', + 'profile_diagnostic', + 'recipe', + 'run_dir', + 'version', + 'write_netcdf', + 'write_ncl_interface', + 'write_plots', + 'work_dir', + ) + attrs = { + 'script_file': self.script, + } + for key in self.settings: + if key not in ignore: + attrs[key] = self.settings[key] + + ancestor_products = {p for a in self.ancestors for p in a.products} + + for filename, attributes in table.items(): + # copy to avoid updating other entries if file contains anchors + attributes = deepcopy(attributes) + ancestor_files = attributes.pop('ancestors', []) + ancestors = { + p + for p in ancestor_products if p.filename in ancestor_files + } + + attributes.update(deepcopy(attrs)) + for key in attributes: + if key in TAGS: + attributes[key] = replace_tags(key, attributes[key]) + + product = TrackedFile(filename, attributes, ancestors) + product.initialize_provenance(self.activity) + product.save_provenance() + self.products.add(product) + logger.debug("Collecting provenance of task %s took %.1f seconds", + self.name, + time.time() - start) + def __str__(self): """Get human readable description.""" - txt = "{}:\nscript: {}\n{}".format( + txt = "{}:\nscript: {}\n{}\nsettings:\n{}\n".format( self.__class__.__name__, self.script, + pprint.pformat(self.settings, indent=2), super(DiagnosticTask, self).str(), ) return txt @@ -476,7 +597,7 @@ def run_tasks(tasks, max_parallel_tasks=None): def _run_tasks_sequential(tasks): - """Run tasks sequentially""" + """Run tasks sequentially.""" n_tasks = len(get_flattened_tasks(tasks)) logger.info("Running %s tasks sequentially", n_tasks) @@ -485,7 +606,7 @@ def _run_tasks_sequential(tasks): def _run_tasks_parallel(tasks, max_parallel_tasks=None): - """Run tasks in parallel""" + """Run tasks in parallel.""" scheduled = get_flattened_tasks(tasks) running = [] results = [] @@ -517,7 +638,14 @@ def done(task): # Handle completed tasks for task, result in zip(running, results): if result.ready(): - task.output_files = result.get() + task.output_files, updated_products = result.get() + for updated in updated_products: + for original in task.products: + if original.filename == updated.filename: + updated.copy_provenance(target=original) + break + else: + task.products.add(updated) running.remove(task) results.remove(result) @@ -529,9 +657,10 @@ def done(task): if len(scheduled) != n_scheduled or len(running) != n_running: n_scheduled, n_running = len(scheduled), len(running) n_done = n_tasks - n_scheduled - n_running - logger.info("Progress: %s tasks running or queued, %s tasks " - "waiting for ancestors, %s/%s done", n_running, - n_scheduled, n_done, n_tasks) + logger.info( + "Progress: %s tasks running or queued, %s tasks waiting for " + "ancestors, %s/%s done", n_running, n_scheduled, n_done, + n_tasks) pool.close() pool.join() @@ -539,4 +668,5 @@ def done(task): def _run_task(task): """Run task and return the result.""" - return task.run() + output_files = task.run() + return output_files, task.products diff --git a/esmvaltool/_version.py b/esmvaltool/_version.py index 0c0137f85a..40605f3912 100644 --- a/esmvaltool/_version.py +++ b/esmvaltool/_version.py @@ -1,2 +1,2 @@ """ESMValTool version""" -__version__ = '2.0a1' +__version__ = '2.0a2' diff --git a/esmvaltool/cmor/_fixes/CMIP5/BNU_ESM.py b/esmvaltool/cmor/_fixes/CMIP5/BNU_ESM.py index ecf981dc97..22c86aaee2 100644 --- a/esmvaltool/cmor/_fixes/CMIP5/BNU_ESM.py +++ b/esmvaltool/cmor/_fixes/CMIP5/BNU_ESM.py @@ -1,33 +1,35 @@ -"""Fixes for BNU ESM model""" +# pylint: disable=invalid-name, no-self-use, too-few-public-methods +"""Fixes for BNU ESM model.""" from cf_units import Unit +from dask import array as da from ..fix import Fix class fgco2(Fix): - """Fixes for fgco2""" + """Fixes for fgco2.""" - def fix_metadata(self, cube): + def fix_metadata(self, cubes): """ - Fix metadata + Fix metadata. Fixes cube units Parameters ---------- - cube: iris.cube.Cube + cube: iris.cube.CubeList Returns ------- iris.cube.Cube """ - cube.units = Unit('kg m-2 s-1') - return cube + self.get_cube_from_list(cubes).units = Unit('kg m-2 s-1') + return cubes def fix_data(self, cube): """ - Fix data + Fix data. Fixes cube units @@ -47,29 +49,29 @@ def fix_data(self, cube): class ch4(Fix): - """Fixes for ch4""" + """Fixes for ch4.""" - def fix_metadata(self, cube): + def fix_metadata(self, cubes): """ - Fix metadata + Fix metadata. Fixes cube units Parameters ---------- - cube: iris.cube.Cube + cubes: iris.cube.CubeList Returns ------- - iris.cube.Cube + iris.cube.CubeList """ - cube.units = Unit('1e-9') - return cube + self.get_cube_from_list(cubes).units = Unit('1e-9') + return cubes def fix_data(self, cube): """ - Fix metadata + Fix metadata. Fixes cube units @@ -89,29 +91,29 @@ def fix_data(self, cube): class co2(Fix): - """Fixes for co2""" + """Fixes for co2.""" - def fix_metadata(self, cube): + def fix_metadata(self, cubes): """ - Fix metadata + Fix metadata. Fixes cube units Parameters ---------- - cube: iris.cube.Cube + cubes: iris.cube.CubeList Returns ------- - iris.cube.Cube + iris.cube.CubeList """ - cube.units = Unit('1e-6') - return cube + self.get_cube_from_list(cubes).units = Unit('1e-6') + return cubes def fix_data(self, cube): """ - Fix data + Fix data. Fixes cube units @@ -131,11 +133,11 @@ def fix_data(self, cube): class spco2(Fix): - """Fixes for spco2""" + """Fixes for spco2.""" def fix_data(self, cube): """ - Fix data + Fix data. Fixes cube units @@ -154,6 +156,28 @@ def fix_data(self, cube): return cube +class od550aer(Fix): + """Fixes for od550aer.""" + + def fix_data(self, cube): + """ + Fix data. + + Masks invalid values. + + Parameters + ---------- + cube: iris.cube.Cube + + Returns + ------- + iris.cube.Cube + + """ + data = da.ma.masked_equal(cube.core_data(), 1.e36) + return cube.copy(data) + + # No clear way to apply this fix now that we are working with cubes, not files # class sftlf(Fix): diff --git a/esmvaltool/cmor/_fixes/CMIP5/CCSM4.py b/esmvaltool/cmor/_fixes/CMIP5/CCSM4.py index ae681624df..995db84a0b 100644 --- a/esmvaltool/cmor/_fixes/CMIP5/CCSM4.py +++ b/esmvaltool/cmor/_fixes/CMIP5/CCSM4.py @@ -1,4 +1,5 @@ -"""Fixes for CCSM4 model""" +# pylint: disable=invalid-name, no-self-use, too-few-public-methods +"""Fixes for CCSM4 model.""" import numpy as np from ..fix import Fix @@ -6,53 +7,59 @@ # noinspection PyPep8Naming class rlut(Fix): - """Fixes for rlut""" + """Fixes for rlut.""" - def fix_metadata(self, cube): + def fix_metadata(self, cubes): """ - Fix data + Fix data. Fixes discrepancy between declared units and real units Parameters ---------- - cube: iris.cube.Cube + cube: iris.cube.CubeList Returns ------- iris.cube.Cube """ + cube = self.get_cube_from_list(cubes) lat = cube.coord('latitude') lat.points = np.round(lat.points, 3) lat.bounds = np.round(lat.bounds, 3) - return cube + return cubes class rlutcs(rlut): - """Fixes for rlut""" + """Fixes for rlutcs.""" - pass + +class rsut(rlut): + """Fixes for rsut.""" + + +class rsutcs(rlut): + """Fixes for rsutcs.""" class so(Fix): - """Fixes for so""" + """Fixes for so.""" - def fix_metadata(self, cube): + def fix_metadata(self, cubes): """ - Fix data + Fix data. Fixes discrepancy between declared units and real units Parameters ---------- - cube: iris.cube.Cube + cube: iris.cube.CubeList Returns ------- iris.cube.Cube """ - cube.units = '1e3' - - return cube + self.get_cube_from_list(cubes).units = '1e3' + return cubes diff --git a/esmvaltool/cmor/_fixes/CMIP5/CESM1_BGC.py b/esmvaltool/cmor/_fixes/CMIP5/CESM1_BGC.py index 18458df963..7c5b0dfe74 100644 --- a/esmvaltool/cmor/_fixes/CMIP5/CESM1_BGC.py +++ b/esmvaltool/cmor/_fixes/CMIP5/CESM1_BGC.py @@ -1,73 +1,17 @@ -"""Fixes for CESM1-BGC model""" -import shutil +# pylint: disable=invalid-name, no-self-use, too-few-public-methods +"""Fixes for CESM1-BGC model.""" -import six from cf_units import Unit -from netCDF4 import Dataset from ..fix import Fix -class nbp(Fix): - """Fixes for nbp variable""" - - def fix_file(self, filepath, output_dir): - """ - Apply fixes to the files prior to creating the cube. - - Should be used only to fix errors that prevent loading or can - not be fixed in the cube (i.e. those related with missing_value - and _FillValue) - - Parameters - ---------- - filepath: basestring - file to fix - output_dir: basestring - path to the folder to store the fixe files, if required - - Returns - ------- - basestring - Path to the corrected file. It can be different from the original - filepath if a fix has been applied, but if not it should be the - original filepath - - """ - temp = Fix.get_fixed_filepath(output_dir, filepath) - - original_dataset = Dataset(filepath) - new_dataset = Dataset(temp, mode='w') - - for dim_name, dimension in six.iteritems(original_dataset.dimensions): - new_dataset.createDimension(dim_name, dimension.size) - - for var_name, variable in six.iteritems(original_dataset.variables): - fill_value = variable._FillValue - if var_name == 'nbp': - fill_value = 1e+33 - new_var = new_dataset.createVariable( - var_name, - variable.datatype, - variable.dimensions, - zlib=True, - fill_value=fill_value) - attr = {k: variable.getncattr(k) for k in variable.ncattrs()} - del attr['_FillValue'] - attr['missing_value'] = 1e+33 - new_var.setncatts(attr) - new_var[...] = variable[...] - original_dataset.close - new_dataset.close() - return temp - - class co2(Fix): - """Fixes for co2 variable""" + """Fixes for co2 variable.""" def fix_data(self, cube): """ - Fix data + Fix data. Fixes discrepancy between declared units and real units @@ -84,28 +28,3 @@ def fix_data(self, cube): cube *= 28.966 / 44.0 cube.metadata = metadata return cube - - -class allvars(Fix): - """Fixes common to all vars""" - - def fix_metadata(self, cube): - """ - Fix metadata - - Fixes time units - - Parameters - ---------- - cube: iris.cube.Cube - - Returns - ------- - iris.cube.Cube - - """ - time = cube.coord('time') - if time.units.name == 'day since 1-01-01 00:00:00.000000 UTC': - time.units = Unit('days since 1850-01-01 00:00:00', - time.units.calendar) - return cube diff --git a/esmvaltool/cmor/_fixes/CMIP5/CNRM_CM5.py b/esmvaltool/cmor/_fixes/CMIP5/CNRM_CM5.py index dfc7a7abad..5c47ec88d2 100644 --- a/esmvaltool/cmor/_fixes/CMIP5/CNRM_CM5.py +++ b/esmvaltool/cmor/_fixes/CMIP5/CNRM_CM5.py @@ -1,13 +1,14 @@ -"""Fixes for CNRM-CM5 model""" +# pylint: disable=invalid-name, no-self-use, too-few-public-methods +"""Fixes for CNRM-CM5 model.""" from ..fix import Fix class msftmyz(Fix): - """Fixes for msftmyz""" + """Fixes for msftmyz.""" def fix_data(self, cube): """ - Fix data + Fix data. Fixes discrepancy between declared units and real units @@ -27,6 +28,4 @@ def fix_data(self, cube): class msftmyzba(msftmyz): - """Fixes for msftmyzba""" - - pass + """Fixes for msftmyzba.""" diff --git a/esmvaltool/cmor/_fixes/CMIP5/CanESM2.py b/esmvaltool/cmor/_fixes/CMIP5/CanESM2.py index a5ae077763..048b957376 100644 --- a/esmvaltool/cmor/_fixes/CMIP5/CanESM2.py +++ b/esmvaltool/cmor/_fixes/CMIP5/CanESM2.py @@ -1,14 +1,15 @@ -"""Fixes for CanESM2 model""" +# pylint: disable=invalid-name, no-self-use, too-few-public-methods +"""Fixes for CanESM2 model.""" from ..fix import Fix # noinspection PyPep8Naming class fgco2(Fix): - """Fixes for fgco2""" + """Fixes for fgco2.""" def fix_data(self, cube): """ - Fix data + Fix data. Fixes discrepancy between declared units and real units diff --git a/esmvaltool/cmor/_fixes/CMIP5/EC_EARTH.py b/esmvaltool/cmor/_fixes/CMIP5/EC_EARTH.py index 2129576da2..adc8c02c41 100644 --- a/esmvaltool/cmor/_fixes/CMIP5/EC_EARTH.py +++ b/esmvaltool/cmor/_fixes/CMIP5/EC_EARTH.py @@ -1,13 +1,14 @@ -"""Fixes for EC-Earth model""" +# pylint: disable=invalid-name, no-self-use, too-few-public-methods +"""Fixes for EC-Earth model.""" from ..fix import Fix class sic(Fix): - """Fixes for sic""" + """Fixes for sic.""" def fix_data(self, cube): """ - Fix data + Fix data. Fixes discrepancy between declared units and real units @@ -27,11 +28,11 @@ def fix_data(self, cube): class sftlf(Fix): - """Fixes for sftlf""" + """Fixes for sftlf.""" def fix_data(self, cube): """ - Fix data + Fix data. Fixes discrepancy between declared units and real units diff --git a/esmvaltool/cmor/_fixes/CMIP5/FGOALS_g2.py b/esmvaltool/cmor/_fixes/CMIP5/FGOALS_g2.py index 98e4f981ad..7395c5e6b8 100644 --- a/esmvaltool/cmor/_fixes/CMIP5/FGOALS_g2.py +++ b/esmvaltool/cmor/_fixes/CMIP5/FGOALS_g2.py @@ -1,3 +1,4 @@ +# pylint: disable=invalid-name, no-self-use, too-few-public-methods """Fixes for FGOALS-g2 model""" from cf_units import Unit @@ -7,21 +8,22 @@ class allvars(Fix): """Fixes common to all vars""" - def fix_metadata(self, cube): + def fix_metadata(self, cubes): """ - Fix metadata + Fix metadata. Fixes time units Parameters ---------- - cube: iris.cube.Cube + cube: iris.cube.CubeList Returns ------- iris.cube.Cube """ - time = cube.coord('time') - time.units = Unit(time.units.name, time.units.calendar) - return cube + for cube in cubes: + time = cube.coord('time') + time.units = Unit(time.units.name, time.units.calendar) + return cubes diff --git a/esmvaltool/cmor/_fixes/CMIP5/FIO_ESM.py b/esmvaltool/cmor/_fixes/CMIP5/FIO_ESM.py index 3f3cf0a488..491af3c861 100644 --- a/esmvaltool/cmor/_fixes/CMIP5/FIO_ESM.py +++ b/esmvaltool/cmor/_fixes/CMIP5/FIO_ESM.py @@ -1,13 +1,14 @@ -"""Fixes for FIO ESM model""" +# pylint: disable=invalid-name, no-self-use, too-few-public-methods +"""Fixes for FIO ESM model.""" from ..fix import Fix class co2(Fix): - """Fixes for co2""" + """Fixes for co2.""" def fix_data(self, cube): """ - Fix data + Fix data. Fixes discrepancy between declared units and real units @@ -27,11 +28,11 @@ def fix_data(self, cube): class ch4(Fix): - """Fixes for co2""" + """Fixes for ch4.""" def fix_data(self, cube): """ - Fix data + Fix data. Fixes discrepancy between declared units and real units diff --git a/esmvaltool/cmor/_fixes/CMIP5/GFDL_CM2p1.py b/esmvaltool/cmor/_fixes/CMIP5/GFDL_CM2p1.py index 2c1b5a1dd6..af97ded041 100644 --- a/esmvaltool/cmor/_fixes/CMIP5/GFDL_CM2p1.py +++ b/esmvaltool/cmor/_fixes/CMIP5/GFDL_CM2p1.py @@ -1,13 +1,19 @@ -"""Fixes for GFDL CM2p1 model""" +# pylint: disable=invalid-name, no-self-use, too-few-public-methods +"""Fixes for GFDL CM2p1 model.""" from ..fix import Fix +from ..CMIP5.GFDL_ESM2G import allvars as base_allvars + + +class allvars(base_allvars): + """Fixes for all variables.""" class sftof(Fix): - """Fixes for sftof""" + """Fixes for sftof.""" def fix_data(self, cube): """ - Fix data + Fix data. Fixes discrepancy between declared units and real units diff --git a/esmvaltool/cmor/_fixes/CMIP5/GFDL_CM3.py b/esmvaltool/cmor/_fixes/CMIP5/GFDL_CM3.py index 88c6676710..b796f8b0f7 100644 --- a/esmvaltool/cmor/_fixes/CMIP5/GFDL_CM3.py +++ b/esmvaltool/cmor/_fixes/CMIP5/GFDL_CM3.py @@ -1,13 +1,20 @@ +# pylint: disable=invalid-name, no-self-use, too-few-public-methods """Fixes for GFDL CM3 model""" from ..fix import Fix +from ..CMIP5.GFDL_ESM2G import allvars as base_allvars + + +class allvars(base_allvars): + """Fixes for all variables.""" + class sftof(Fix): """Fixes for sftof""" def fix_data(self, cube): """ - Fix data + Fix data. Fixes discrepancy between declared units and real units diff --git a/esmvaltool/cmor/_fixes/CMIP5/GFDL_ESM2G.py b/esmvaltool/cmor/_fixes/CMIP5/GFDL_ESM2G.py index 202b1b4ce2..b2e861d89d 100644 --- a/esmvaltool/cmor/_fixes/CMIP5/GFDL_ESM2G.py +++ b/esmvaltool/cmor/_fixes/CMIP5/GFDL_ESM2G.py @@ -1,15 +1,49 @@ -"""Fixes for GFDL ESM2G""" +# pylint: disable=invalid-name, no-self-use, too-few-public-methods +"""Fixes for GFDL ESM2G.""" + +import iris + from ..fix import Fix +def _get_and_remove(cubes, long_name): + try: + cube = cubes.extract_strict(long_name) + cubes.remove(cube) + except iris.exceptions.ConstraintMismatchError: + pass + + +class allvars(Fix): + """Common fixes.""" + + def fix_metadata(self, cubes): + """Fix metadata. + + Fixes bad standard names. + + Parameters + ---------- + cubes: iris.cube.CubeList + + Returns + ------- + iris.cube.CubeList + + """ + _get_and_remove(cubes, 'Start time for average period') + _get_and_remove(cubes, 'End time for average period') + _get_and_remove(cubes, 'Length of average period') + return cubes + + class co2(Fix): - """Fixes for co2""" + """Fixes for co2.""" def fix_data(self, cube): - """ - Fix data + """Fix data. - Fixes discrepancy between declared units and real units + Fixes discrepancy between declared units and real units. Parameters ---------- @@ -24,3 +58,25 @@ def fix_data(self, cube): cube *= 1e6 cube.metadata = metadata return cube + + +class fgco2(Fix): + """Fixes for fgco2.""" + + def fix_metadata(self, cubes): + """Fix metadata. + + Remove unnecessary variables prohibiting cube concatenation. + + Parameters + ---------- + cubes: iris.cube.CubeList + + Returns + ------- + iris.cube.CubeList + + """ + _get_and_remove(cubes, 'Latitude of tracer (h) points') + _get_and_remove(cubes, 'Longitude of tracer (h) points') + return cubes diff --git a/esmvaltool/cmor/_fixes/CMIP5/GFDL_ESM2M.py b/esmvaltool/cmor/_fixes/CMIP5/GFDL_ESM2M.py index ecc558f7d4..ec161a6e70 100644 --- a/esmvaltool/cmor/_fixes/CMIP5/GFDL_ESM2M.py +++ b/esmvaltool/cmor/_fixes/CMIP5/GFDL_ESM2M.py @@ -1,33 +1,13 @@ +# pylint: disable=invalid-name, no-self-use, too-few-public-methods """Fixes for GFDL ESM2M""" from cf_units import Unit from ..fix import Fix +from ..CMIP5.GFDL_ESM2G import allvars as base_allvars -class allvars(Fix): - """Fixes common to all vars""" - - def fix_metadata(self, cube): - """ - Fix metadata - - Fixes errors in time units - - Parameters - ---------- - cube: iris.cube.Cube - - Returns - ------- - iris.cube.Cube - - """ - time = cube.coord('time') - if time.units.origin in ("days since 0001-01-01 00:00:00", - "days since 1-01-01 00:00:00"): - time.units = Unit('days since 1850-01-01 00:00:00', - time.units.calendar) - return cube +class allvars(base_allvars): + """Fixes for all variables""" class sftof(Fix): @@ -35,7 +15,7 @@ class sftof(Fix): def fix_data(self, cube): """ - Fix data + Fix data. Fixes discrepancy between declared units and real units @@ -59,7 +39,7 @@ class co2(Fix): def fix_data(self, cube): """ - Fix data + Fix data. Fixes discrepancy between declared units and real units diff --git a/esmvaltool/cmor/_fixes/CMIP5/HadGEM2_CC.py b/esmvaltool/cmor/_fixes/CMIP5/HadGEM2_CC.py index 7665ea28e0..33ed75be27 100644 --- a/esmvaltool/cmor/_fixes/CMIP5/HadGEM2_CC.py +++ b/esmvaltool/cmor/_fixes/CMIP5/HadGEM2_CC.py @@ -1,4 +1,6 @@ +# pylint: disable=invalid-name, no-self-use, too-few-public-methods """Fixes for HadGEM2_CC""" +import iris import numpy as np from ..fix import Fix @@ -7,9 +9,9 @@ class allvars(Fix): """Fixes common to all vars""" - def fix_metadata(self, cube): + def fix_metadata(self, cubes): """ - Fixes latitude + Fixes latitude. Parameters ---------- @@ -20,8 +22,47 @@ def fix_metadata(self, cube): iris.cube.Cube """ - lat = cube.coord('latitude') - lat.points = np.clip(lat.points, -90., 90.) - lat.bounds = np.clip(lat.bounds, -90., 90.) + for cube in cubes: + lats = cube.coords('latitude') + if lats: + lat = cube.coord('latitude') + lat.points = np.clip(lat.points, -90., 90.) + lat.bounds = np.clip(lat.bounds, -90., 90.) - return cube + return cubes + + +class o2(Fix): + """Fixes for o2""" + + def fix_file(self, filepath, output_dir): + """ + Apply fixes to the files prior to creating the cube. + + Should be used only to fix errors that prevent loading or can + not be fixed in the cube (i.e. those related with missing_value + and _FillValue or missing standard_name). + Parameters + ---------- + filepath: basestring + file to fix. + output_dir: basestring + path to the folder to store the fix files, if required. + Returns + ------- + basestring + Path to the corrected file. It can be different from the original + filepath if a fix has been applied, but if not it should be the + original filepath. + """ + new_path = Fix.get_fixed_filepath(output_dir, filepath) + cube = iris.load_cube(filepath) + + std = 'mole_concentration_of_dissolved_molecular_oxygen_in_sea_water' + long_name = 'Dissolved Oxygen Concentration' + + cube.long_name = long_name + cube.standard_name = std + + iris.save(cube, new_path) + return new_path diff --git a/esmvaltool/cmor/_fixes/CMIP5/HadGEM2_ES.py b/esmvaltool/cmor/_fixes/CMIP5/HadGEM2_ES.py index a6352bc7f2..b9a4e3c0e6 100644 --- a/esmvaltool/cmor/_fixes/CMIP5/HadGEM2_ES.py +++ b/esmvaltool/cmor/_fixes/CMIP5/HadGEM2_ES.py @@ -1,27 +1,68 @@ -"""Fixes for HadGEM2_ES""" +# pylint: disable=invalid-name, no-self-use, too-few-public-methods +"""Fixes for HadGEM2_ES.""" import numpy as np +import iris from ..fix import Fix class allvars(Fix): - """Fixes common to all vars""" + """Fixes common to all vars.""" - def fix_metadata(self, cube): + def fix_metadata(self, cubes): """ - Fixes latitude + Fixes latitude. Parameters ---------- - cube: iris.cube.Cube + cube: iris.cube.CubeList Returns ------- iris.cube.Cube """ - lat = cube.coord('latitude') - lat.points = np.clip(lat.points, -90., 90.) - lat.bounds = np.clip(lat.bounds, -90., 90.) + for cube in cubes: + lats = cube.coords('latitude') + if lats: + lat = cube.coord('latitude') + lat.points = np.clip(lat.points, -90., 90.) + lat.bounds = np.clip(lat.bounds, -90., 90.) - return cube + return cubes + + +class o2(Fix): + """Fixes for o2.""" + + def fix_file(self, filepath, output_dir): + """ + Apply fixes to the files prior to creating the cube. + + Should be used only to fix errors that prevent loading or can + not be fixed in the cube (i.e. those related with missing_value + and _FillValue or missing standard_name). + Parameters + ---------- + filepath: basestring + file to fix. + output_dir: basestring + path to the folder to store the fix files, if required. + Returns + ------- + basestring + Path to the corrected file. It can be different from the original + filepath if a fix has been applied, but if not it should be the + original filepath. + """ + new_path = Fix.get_fixed_filepath(output_dir, filepath) + cube = iris.load_cube(filepath) + + std = 'mole_concentration_of_dissolved_molecular_oxygen_in_sea_water' + long_name = 'Dissolved Oxygen Concentration' + + cube.long_name = long_name + cube.standard_name = std + + iris.save(cube, new_path) + return new_path diff --git a/esmvaltool/cmor/_fixes/CMIP5/MIROC5.py b/esmvaltool/cmor/_fixes/CMIP5/MIROC5.py index 0f46f4b46a..352b3bd2b2 100644 --- a/esmvaltool/cmor/_fixes/CMIP5/MIROC5.py +++ b/esmvaltool/cmor/_fixes/CMIP5/MIROC5.py @@ -1,13 +1,15 @@ -"""Fixes for MIROC5 model""" +# pylint: disable=invalid-name, no-self-use, too-few-public-methods +"""Fixes for MIROC5 model.""" +import numpy as np from ..fix import Fix class sftof(Fix): - """Fixes for sftof""" + """Fixes for sftof.""" def fix_data(self, cube): """ - Fix data + Fix data. Fixes discrepancy between declared units and real units @@ -27,11 +29,11 @@ def fix_data(self, cube): class snw(Fix): - """Fixes for snw""" + """Fixes for snw.""" - def fix_metadata(self, cube): + def fix_data(self, cube): """ - Fix data + Fix data. Fixes discrepancy between declared units and real units @@ -51,9 +53,7 @@ def fix_metadata(self, cube): class snc(snw): - """Fixes for snc""" - - pass + """Fixes for snc.""" # dayspermonth = (/31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31/) @@ -83,3 +83,28 @@ class snc(snw): # end do # ret = 0 # end if + + +class msftmyz(Fix): + """Fixes for msftmyz.""" + + def fix_data(self, cube): + """ + Fix data. + + Fixes mask + + Parameters + ---------- + cube: iris.cube.Cube + + Returns + ------- + iris.cube.Cube + + """ + cube.data = np.ma.array(cube.data) + cube.data = np.ma.masked_where(cube.data.mask + (cube.data == 0.), + cube.data) + + return cube diff --git a/esmvaltool/cmor/_fixes/CMIP5/MIROC_ESM.py b/esmvaltool/cmor/_fixes/CMIP5/MIROC_ESM.py index c458c97d4f..a976fb1aba 100644 --- a/esmvaltool/cmor/_fixes/CMIP5/MIROC_ESM.py +++ b/esmvaltool/cmor/_fixes/CMIP5/MIROC_ESM.py @@ -1,3 +1,4 @@ +# pylint: disable=invalid-name, no-self-use, too-few-public-methods """Fixes for MIROC ESM model""" import cf_units from iris.coords import DimCoord @@ -7,11 +8,11 @@ class tro3(Fix): - """Fixes for tro3""" + """Fixes for tro3.""" def fix_data(self, cube): """ - Fix data + Fix data. Fixes discrepancy between declared units and real units @@ -33,118 +34,78 @@ def fix_data(self, cube): class co2(Fix): """Fixes for co2""" - def fix_metadata(self, cube): + def fix_metadata(self, cubes): """ - Fix metadata + Fix metadata. Fixes error in cube units Parameters ---------- - cube: iris.cube.Cube + cube: iris.cube.CubeList Returns ------- iris.cube.Cube """ - cube.units = cf_units.Unit('1.0e-6') - return cube + self.get_cube_from_list(cubes).units = '1.0e-6' + return cubes class gpp(Fix): """Fixes for gpp""" - def fix_metadata(self, cube): + def fix_metadata(self, cubes): """ - Fix metadata + Fix metadata. Fixes error in cube units Parameters ---------- - cube: iris.cube.Cube + cube: iris.cube.CubeList Returns ------- - iris.cube.Cube + iris.cube.CubeList """ # Fixing the metadata, automatic unit conversion should do the trick - cube.units = cf_units.Unit('g m-2 day-1') - return cube + self.get_cube_from_list(cubes).units = cf_units.Unit('g m-2 day-1') + return cubes class allvars(Fix): """Common fixes to all vars""" - def fix_metadata(self, cube): - """ - Fix metadata - - Fixes errors in time units and correct air_pressure coordinate, - sometimes called AR5PL35 - - Parameters - ---------- - cube: iris.cube.Cube - - Returns - ------- - iris.cube.Cube - - """ - try: - time = cube.coord('time') - if time.units.calendar: - calendar = time.units.calendar - else: - calendar = 'standard' - - if time.units.origin == 'days since 0000-01-01 00:00:00': - time.units = cf_units.Unit( - 'days since 1849-01-01 00:00:00', calendar=calendar) - elif time.units.origin == 'days since 1-1-1': - time.units = cf_units.Unit( - 'days since 1850-01-01 00:00:00', calendar=calendar) - except CoordinateNotFoundError: - pass - - try: - old = cube.coord('AR5PL35') - dims = cube.coord_dims(old) - cube.remove_coord(old) - - plev = DimCoord.from_coord(old) - plev.var_name = plev - plev.standard_name = 'air_pressure' - plev.long_name = 'Pressure ' - cube.add_dim_coord(plev, dims) - except CoordinateNotFoundError: - pass - - return cube - - -class tos(allvars): - def fix_metadata(self, cube): + def fix_metadata(self, cubes): """ - Fix metadata + Fix metadata. - Fixes errors in time units. + Fixes error in air_pressure coordinate, sometimes called AR5PL35 Parameters ---------- - cube: iris.cube.Cube + cube: iris.cube.CubeList Returns ------- - iris.cube.Cube + iris.cube.CubeList """ - time = cube.coord('time') - calendar = time.units.calendar - if time.units.origin == 'days since 1850-1-1': - time.units = cf_units.Unit( - 'days since 1850-1-1 00:00:00', calendar=calendar) - return cube + for cube in cubes: + try: + old = cube.coord('AR5PL35') + dims = cube.coord_dims(old) + cube.remove_coord(old) + + plev = DimCoord.from_coord(old) + plev.var_name = plev + plev.standard_name = 'air_pressure' + plev.long_name = 'Pressure ' + cube.add_dim_coord(plev, dims) + except CoordinateNotFoundError: + pass + + return cubes diff --git a/esmvaltool/cmor/_fixes/CMIP5/MIROC_ESM_CHEM.py b/esmvaltool/cmor/_fixes/CMIP5/MIROC_ESM_CHEM.py index 18b9b59f77..072f510ee5 100644 --- a/esmvaltool/cmor/_fixes/CMIP5/MIROC_ESM_CHEM.py +++ b/esmvaltool/cmor/_fixes/CMIP5/MIROC_ESM_CHEM.py @@ -1,3 +1,4 @@ +# pylint: disable=invalid-name, no-self-use, too-few-public-methods """Fixes for MIROC ESM CHEM""" from ..fix import Fix @@ -7,7 +8,7 @@ class tro3(Fix): def fix_data(self, cube): """ - Fix data + Fix data. Fixes discrepancy between declared units and real units diff --git a/esmvaltool/cmor/_fixes/CMIP5/MPI_ESM_LR.py b/esmvaltool/cmor/_fixes/CMIP5/MPI_ESM_LR.py index d0e7323330..c0a5b483e9 100644 --- a/esmvaltool/cmor/_fixes/CMIP5/MPI_ESM_LR.py +++ b/esmvaltool/cmor/_fixes/CMIP5/MPI_ESM_LR.py @@ -1,13 +1,14 @@ -"""Fixes for MPI ESM LR model""" +# pylint: disable=invalid-name, no-self-use, too-few-public-methods +"""Fixes for MPI ESM LR model.""" from ..fix import Fix class pctisccp(Fix): - """Fixes for pctisccp""" + """Fixes for pctisccp.""" def fix_data(self, cube): """ - Fix data + Fix data. Fixes discrepancy between declared units and real units diff --git a/esmvaltool/cmor/_fixes/CMIP5/MRI_CGCM3.py b/esmvaltool/cmor/_fixes/CMIP5/MRI_CGCM3.py new file mode 100644 index 0000000000..ab03ae2d99 --- /dev/null +++ b/esmvaltool/cmor/_fixes/CMIP5/MRI_CGCM3.py @@ -0,0 +1,55 @@ +# pylint: disable=invalid-name, no-self-use, too-few-public-methods +"""Fixes for MRI-CGCM3 model.""" +import numpy as np +from ..fix import Fix + + +class msftmyz(Fix): + """Fixes for msftmyz.""" + + def fix_data(self, cube): + """ + Fix msftmyz data. + + Fixes mask + + Parameters + ---------- + cube: iris.cube.Cube + + Returns + ------- + iris.cube.Cube + + """ + cube.data = np.ma.array(cube.data) + cube.data = np.ma.masked_where(cube.data.mask + (cube.data == 0.), + cube.data) + + return cube + + +class thetao(Fix): + """Fixes for thetao.""" + + def fix_data(self, cube): + """ + Fix thetao data. + + Fixes mask + + Parameters + ---------- + cube: iris.cube.Cube + + Returns + ------- + iris.cube.Cube + + """ + cube.data = np.ma.array(cube.data) + cube.data = np.ma.masked_where(np.logical_or(cube.data.mask, + cube.data == 0.), + cube.data) + + return cube diff --git a/esmvaltool/cmor/_fixes/CMIP5/MRI_ESM1.py b/esmvaltool/cmor/_fixes/CMIP5/MRI_ESM1.py new file mode 100644 index 0000000000..4484dd7c0e --- /dev/null +++ b/esmvaltool/cmor/_fixes/CMIP5/MRI_ESM1.py @@ -0,0 +1,29 @@ +# pylint: disable=invalid-name, no-self-use, too-few-public-methods +"""Fixes for MRI-ESM1 model.""" +import numpy as np +from ..fix import Fix + + +class msftmyz(Fix): + """Fixes for msftmyz.""" + + def fix_data(self, cube): + """ + Fix msftmyz data. + + Fixes mask + + Parameters + ---------- + cube: iris.cube.Cube + + Returns + ------- + iris.cube.Cube + + """ + cube.data = np.ma.array(cube.data) + cube.data = np.ma.masked_where(cube.data.mask + (cube.data == 0.), + cube.data) + + return cube diff --git a/esmvaltool/cmor/_fixes/CMIP5/NorESM1_ME.py b/esmvaltool/cmor/_fixes/CMIP5/NorESM1_ME.py new file mode 100644 index 0000000000..cea4578746 --- /dev/null +++ b/esmvaltool/cmor/_fixes/CMIP5/NorESM1_ME.py @@ -0,0 +1,32 @@ +# pylint: disable=invalid-name, no-self-use, too-few-public-methods +"""Fixes for NorESM1-ME model.""" + +import numpy as np + +from ..fix import Fix + + +class tas(Fix): + """Fixes for tas.""" + + def fix_metadata(self, cubes): + """Fix metadata. + + Some coordinate points vary for different files of this dataset (for + different time range). This fix removes these inaccuracies by rounding + the coordinates. + + Parameters + ---------- + cubes: iris.cube.CubeList + + Returns + ------- + iris.cube.CubeList + + """ + for cube in cubes: + for coord in cube.coords(dim_coords=True): + for attr in ('points', 'bounds'): + setattr(coord, attr, np.round(getattr(coord, attr), 12)) + return cubes diff --git a/esmvaltool/cmor/_fixes/CMIP5/__init__.py b/esmvaltool/cmor/_fixes/CMIP5/__init__.py index 3eca4e8911..7f4ba3d544 100644 --- a/esmvaltool/cmor/_fixes/CMIP5/__init__.py +++ b/esmvaltool/cmor/_fixes/CMIP5/__init__.py @@ -1 +1 @@ -"""Fixes for CMIP5 data""" +"""Fixes for CMIP5 data.""" diff --git a/esmvaltool/cmor/_fixes/CMIP5/bcc_csm1_1.py b/esmvaltool/cmor/_fixes/CMIP5/bcc_csm1_1.py new file mode 100644 index 0000000000..c5bb894908 --- /dev/null +++ b/esmvaltool/cmor/_fixes/CMIP5/bcc_csm1_1.py @@ -0,0 +1,61 @@ +# pylint: disable=invalid-name, no-self-use, too-few-public-methods +"""Fixes for bcc-csm1-1.""" +import numpy as np +from scipy.interpolate import InterpolatedUnivariateSpline +from scipy.ndimage import map_coordinates + +from ..fix import Fix + + +class tos(Fix): + """Fixes for tos.""" + + def fix_data(self, cube): + """Fix data. + + Calculate missing latitude/longitude boundaries using interpolation. + + Parameters + ---------- + cube: iris.cube.Cube + + Returns + ------- + iris.cube.Cube + + """ + rlat = cube.coord('grid_latitude').points + rlon = cube.coord('grid_longitude').points + + # Transform grid latitude/longitude to array indices [0, 1, 2, ...] + rlat_to_idx = InterpolatedUnivariateSpline( + rlat, np.arange(len(rlat)), k=1) + rlon_to_idx = InterpolatedUnivariateSpline( + rlon, np.arange(len(rlon)), k=1) + rlat_idx_bnds = rlat_to_idx(cube.coord('grid_latitude').bounds) + rlon_idx_bnds = rlon_to_idx(cube.coord('grid_longitude').bounds) + + # Calculate latitude/longitude vertices by interpolation + lat_vertices = [] + lon_vertices = [] + for (i, j) in [(0, 0), (0, 1), (1, 1), (1, 0)]: + (rlat_v, rlon_v) = np.meshgrid( + rlat_idx_bnds[:, i], rlon_idx_bnds[:, j], indexing='ij') + lat_vertices.append( + map_coordinates( + cube.coord('latitude').points, [rlat_v, rlon_v], + mode='nearest')) + lon_vertices.append( + map_coordinates( + cube.coord('longitude').points, [rlat_v, rlon_v], + mode='wrap')) + lat_vertices = np.array(lat_vertices) + lon_vertices = np.array(lon_vertices) + lat_vertices = np.moveaxis(lat_vertices, 0, -1) + lon_vertices = np.moveaxis(lon_vertices, 0, -1) + + # Copy vertices to cube + cube.coord('latitude').bounds = lat_vertices + cube.coord('longitude').bounds = lon_vertices + + return cube diff --git a/esmvaltool/cmor/_fixes/CMIP5/bcc_csm1_1_m.py b/esmvaltool/cmor/_fixes/CMIP5/bcc_csm1_1_m.py new file mode 100644 index 0000000000..e1ec8b43e2 --- /dev/null +++ b/esmvaltool/cmor/_fixes/CMIP5/bcc_csm1_1_m.py @@ -0,0 +1,61 @@ +# pylint: disable=invalid-name, no-self-use, too-few-public-methods +"""Fixes for bcc-csm1-1-m.""" +import numpy as np +from scipy.interpolate import InterpolatedUnivariateSpline +from scipy.ndimage import map_coordinates + +from ..fix import Fix + + +class tos(Fix): + """Fixes for tos.""" + + def fix_data(self, cube): + """Fix data. + + Calculate missing latitude/longitude boundaries using interpolation. + + Parameters + ---------- + cube: iris.cube.Cube + + Returns + ------- + iris.cube.Cube + + """ + rlat = cube.coord('grid_latitude').points + rlon = cube.coord('grid_longitude').points + + # Transform grid latitude/longitude to array indices [0, 1, 2, ...] + rlat_to_idx = InterpolatedUnivariateSpline( + rlat, np.arange(len(rlat)), k=1) + rlon_to_idx = InterpolatedUnivariateSpline( + rlon, np.arange(len(rlon)), k=1) + rlat_idx_bnds = rlat_to_idx(cube.coord('grid_latitude').bounds) + rlon_idx_bnds = rlon_to_idx(cube.coord('grid_longitude').bounds) + + # Calculate latitude/longitude vertices by interpolation + lat_vertices = [] + lon_vertices = [] + for (i, j) in [(0, 0), (0, 1), (1, 1), (1, 0)]: + (rlat_v, rlon_v) = np.meshgrid( + rlat_idx_bnds[:, i], rlon_idx_bnds[:, j], indexing='ij') + lat_vertices.append( + map_coordinates( + cube.coord('latitude').points, [rlat_v, rlon_v], + mode='nearest')) + lon_vertices.append( + map_coordinates( + cube.coord('longitude').points, [rlat_v, rlon_v], + mode='wrap')) + lat_vertices = np.array(lat_vertices) + lon_vertices = np.array(lon_vertices) + lat_vertices = np.moveaxis(lat_vertices, 0, -1) + lon_vertices = np.moveaxis(lon_vertices, 0, -1) + + # Copy vertices to cube + cube.coord('latitude').bounds = lat_vertices + cube.coord('longitude').bounds = lon_vertices + + return cube diff --git a/esmvaltool/cmor/_fixes/CMIP5/inmcm4.py b/esmvaltool/cmor/_fixes/CMIP5/inmcm4.py index 77c2a3977e..91d340846a 100644 --- a/esmvaltool/cmor/_fixes/CMIP5/inmcm4.py +++ b/esmvaltool/cmor/_fixes/CMIP5/inmcm4.py @@ -1,13 +1,16 @@ -"""Fixes for inmcm4 model""" +# pylint: disable=invalid-name, no-self-use, too-few-public-methods +"""Fixes for inmcm4 model.""" +import iris + from ..fix import Fix class gpp(Fix): - """Fixes for gpp""" + """Fixes for gpp.""" def fix_data(self, cube): """ - Fix data + Fix data. Fixes discrepancy between declared units and real units @@ -27,11 +30,11 @@ def fix_data(self, cube): class lai(Fix): - """Fixes for lai""" + """Fixes for lai.""" def fix_data(self, cube): """ - Fix data + Fix data. Fixes discrepancy between declared units and real units @@ -48,3 +51,67 @@ def fix_data(self, cube): cube *= 0.01 cube.metadata = metadata return cube + + +class nbp(Fix): + """Fixes for nbp.""" + + def fix_file(self, filepath, output_dir): + """ + Apply fixes to the files prior to creating the cube. + + Should be used only to fix errors that prevent loading or can + not be fixed in the cube (i.e. those related with missing_value + and _FillValue or missing standard_name). + + Parameters + ---------- + filepath: basestring + file to fix. + output_dir: basestring + path to the folder to store the fix files, if required. + + Returns + ------- + basestring + Path to the corrected file. It can be different from the original + filepath if a fix has been applied, but if not it should be the + original filepath. + + """ + new_path = Fix.get_fixed_filepath(output_dir, filepath) + cube = iris.load_cube(filepath) + cube.standard_name = ('surface_net_downward_mass_flux_of_carbon_' + 'dioxide_expressed_as_carbon_due_to_all_land_' + 'processes') + iris.save(cube, new_path) + return new_path + + +class baresoilFrac(Fix): + """Fixes for baresoilFrac.""" + + def fix_metadata(self, cubelist): + """ + Fix missing scalar dimension. + + Parameters + ---------- + cubelist: iris CubeList + List of cubes to fix + + Returns + ------- + iris.cube.CubeList + + """ + typebare = iris.coords.AuxCoord( + 'bare_ground', + standard_name='area_type', + long_name='surface type', + var_name='type', + units='1', + bounds=None) + for cube in cubelist: + cube.add_aux_coord(typebare) + return cubelist diff --git a/esmvaltool/cmor/_fixes/CMIP6/__init__.py b/esmvaltool/cmor/_fixes/CMIP6/__init__.py new file mode 100644 index 0000000000..4348e35490 --- /dev/null +++ b/esmvaltool/cmor/_fixes/CMIP6/__init__.py @@ -0,0 +1 @@ +"""Fixes for CMIP6 data.""" diff --git a/esmvaltool/cmor/_fixes/OBS/BDBP.py b/esmvaltool/cmor/_fixes/OBS/BDBP.py deleted file mode 100644 index d13c0705c4..0000000000 --- a/esmvaltool/cmor/_fixes/OBS/BDBP.py +++ /dev/null @@ -1,31 +0,0 @@ -"""Fixes for BDBP""" - -import cf_units - -from ..fix import Fix - - -class tro3prof(Fix): - """Class to fix tro3prof""" - - def fix_metadata(self, cube): - """ - Fix metadata for tro3prof - - Fix air_pressure coordinate - """ - old = cube.coord('air_pressure') - dims = cube.coord_dims(old) - cube.remove_coord(old) - points = old.points * 100 - if old.bounds is None: - bounds = None - else: - bounds = old.bounds * 100 - plev = old.copy(points, bounds) - plev.var_name = 'plev' - plev.standard_name = 'air_pressure' - plev.long_name = 'Pressure ' - plev.units = cf_units.Unit('Pa') - cube.add_dim_coord(plev, dims) - return cube diff --git a/esmvaltool/cmor/_fixes/OBS/ESACCI_GHG.py b/esmvaltool/cmor/_fixes/OBS/ESACCI_GHG.py deleted file mode 100644 index 4111abc32a..0000000000 --- a/esmvaltool/cmor/_fixes/OBS/ESACCI_GHG.py +++ /dev/null @@ -1,104 +0,0 @@ -"""Fixes for ESA-CCI GHG""" -import cf_units - -from ..fix import Fix - - -class xco2Stderr(Fix): - """Fixes for xco2Stderr""" - - def fix_metadata(self, cube): - """ - Fix metadata - - Fix cube units - - Parameters - ---------- - cube: iris.cube.Cube - Cube to fix - - Returns - ------- - iris.cube.Cube - - """ - cube.units = cf_units.Unit('1.0e-6') - return cube - - def fix_data(self, cube): - """ - Fix data - - Fix cube units - - Parameters - ---------- - cube: iris.cube.Cube - Cube to fix - - Returns - ------- - iris.cube.Cube - - """ - metadata = cube.metadata - cube *= 1.0e6 - cube.metadata = metadata - return cube - - -class xco2Stddev(xco2Stderr): - """Fixes for xco2Stddev""" - - pass - - -class xch4Stderr(Fix): - """Fixes for xch4Stderr""" - - def fix_metadata(self, cube): - """ - Fix metadata - - Fix cube units - - Parameters - ---------- - cube: iris.cube.Cube - Cube to fix - - Returns - ------- - iris.cube.Cube - - """ - cube.units = cf_units.Unit('1.0e-9') - return cube - - def fix_data(self, cube): - """ - Fix data - - Fix cube units - - Parameters - ---------- - cube: iris.cube.Cube - Cube to fix - - Returns - ------- - iris.cube.Cube - - """ - metadata = cube.metadata - cube *= 1.0e9 - cube.metadata = metadata - return cube - - -class xch4Stddev(xch4Stderr): - """Fixes for xch4Stddev""" - - pass diff --git a/esmvaltool/cmor/_fixes/OBS/ESACCI_OZONE.py b/esmvaltool/cmor/_fixes/OBS/ESACCI_OZONE.py deleted file mode 100644 index 934043b9d3..0000000000 --- a/esmvaltool/cmor/_fixes/OBS/ESACCI_OZONE.py +++ /dev/null @@ -1,39 +0,0 @@ -"""Fixes for ESA CCI ozone""" -import cf_units - -from ..fix import Fix - - -class tro3prof(Fix): - """Fixes for tro3prof""" - - def fix_metadata(self, cube): - """ - Fix metadata - - Fixes air_pressure coordinate - - Parameters - ---------- - cube: iris.cube.Cube - - Returns - ------- - iris.cube.Cube - - """ - old = cube.coord('air_pressure') - dims = cube.coord_dims(old) - cube.remove_coord(old) - points = old.points * 100 - if old.bounds is None: - bounds = None - else: - bounds = old.bounds * 100 - plev = old.copy(points, bounds) - plev.var_name = 'plev' - plev.standard_name = 'air_pressure' - plev.long_name = 'Pressure ' - plev.units = cf_units.Unit('Pa') - cube.add_dim_coord(plev, dims) - return cube diff --git a/esmvaltool/cmor/_fixes/OBS/__init__.py b/esmvaltool/cmor/_fixes/OBS/__init__.py deleted file mode 100644 index 0dd1ff7983..0000000000 --- a/esmvaltool/cmor/_fixes/OBS/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Fixes for OBS project""" diff --git a/esmvaltool/cmor/_fixes/PRIMAVERA/EC_Earth3_HR.py b/esmvaltool/cmor/_fixes/PRIMAVERA/EC_Earth3_HR.py index 721f0b4398..6f31db6ada 100644 --- a/esmvaltool/cmor/_fixes/PRIMAVERA/EC_Earth3_HR.py +++ b/esmvaltool/cmor/_fixes/PRIMAVERA/EC_Earth3_HR.py @@ -1,13 +1,14 @@ +# pylint: disable=invalid-name, no-self-use, too-few-public-methods """Fixes for EC-Earth3-HR PRIMAVERA project data""" from ..fix import Fix class allvars(Fix): - """Fixes common to all variables""" + """Fixes common to all variables.""" - def fix_metadata(self, cube): + def fix_metadata(self, cubes): """ - Fixes cube metadata + Fix cube metadata. Parameters ---------- @@ -19,9 +20,10 @@ def fix_metadata(self, cube): Cube: Fixed cube. It is the same instance that was received """ - latitude = cube.coord('latitude') - latitude.var_name = 'lat' + for cube in cubes: + latitude = cube.coord('latitude') + latitude.var_name = 'lat' - longitude = cube.coord('longitude') - longitude.var_name = 'lon' - return cube + longitude = cube.coord('longitude') + longitude.var_name = 'lon' + return cubes diff --git a/esmvaltool/cmor/_fixes/PRIMAVERA/__init__.py b/esmvaltool/cmor/_fixes/PRIMAVERA/__init__.py index b34934c56e..74148590d1 100644 --- a/esmvaltool/cmor/_fixes/PRIMAVERA/__init__.py +++ b/esmvaltool/cmor/_fixes/PRIMAVERA/__init__.py @@ -1 +1 @@ -"""Fixes for PRIMAVERA project data""" +"""Fixes for PRIMAVERA project data.""" diff --git a/esmvaltool/cmor/_fixes/fix.py b/esmvaltool/cmor/_fixes/fix.py index c498200e62..50e2477960 100644 --- a/esmvaltool/cmor/_fixes/fix.py +++ b/esmvaltool/cmor/_fixes/fix.py @@ -33,7 +33,7 @@ def fix_file(self, filepath, output_dir): """ return filepath - def fix_metadata(self, cube): + def fix_metadata(self, cubes): """ Apply fixes to the metadata of the cube. @@ -43,16 +43,44 @@ def fix_metadata(self, cube): Parameters ---------- - cube: iris.cube.Cube - Cube to fix + cubes: iris.cube.CubeList + Cubes to fix Returns ------- - iris.cube.Cube - Fixed cube. It can be a difference instance. + iris.cube.CubeList + Fixed cubes. They can be different instances. """ - return cube + return cubes + + def get_cube_from_list(self, cubes, short_name=None): + """ + Get a cube from the list with a given short name. + + Parameters + ---------- + cubes : iris.cube.CubeList + List of cubes to search + short_name : str + Cube's variable short name. If None, short name is the class name + + Raises + ------ + Exception + If no cube is found + + Returns + ------- + iris.Cube + Variable's cube + """ + if short_name is None: + short_name = self.__class__.__name__ + for cube in cubes: + if cube.var_name == short_name: + return cube + raise Exception('Cube for variable "{}" not found'.format(short_name)) def fix_data(self, cube): """ diff --git a/esmvaltool/cmor/_fixes/obs4mips/SSMI.py b/esmvaltool/cmor/_fixes/obs4mips/SSMI.py new file mode 100644 index 0000000000..5343af0826 --- /dev/null +++ b/esmvaltool/cmor/_fixes/obs4mips/SSMI.py @@ -0,0 +1,16 @@ +# pylint: disable=invalid-name, no-self-use, too-few-public-methods +"""Fixes for SSMI model.""" +from ..fix import Fix + + +class prw(Fix): + """Fixes for prw.""" + + def fix_metadata(self, cubes): + for cube in cubes: + latitude = cube.coord('latitude') + latitude.var_name = 'lat' + + longitude = cube.coord('longitude') + longitude.var_name = 'lon' + return cubes diff --git a/esmvaltool/cmor/_fixes/obs4mips/SSMI_MERIS.py b/esmvaltool/cmor/_fixes/obs4mips/SSMI_MERIS.py new file mode 100644 index 0000000000..56c88daa8e --- /dev/null +++ b/esmvaltool/cmor/_fixes/obs4mips/SSMI_MERIS.py @@ -0,0 +1,28 @@ +# pylint: disable=invalid-name, no-self-use, too-few-public-methods +"""Fixes for CCSM4 model.""" +from iris.cube import CubeList + +from ..fix import Fix + + +# noinspection PyPep8Naming +class prw(Fix): + """Fixes for prw.""" + + def fix_metadata(self, cubes): + """ + Fix metadata. + + Remove error and number of observations cubes + + Parameters + ---------- + cube: iris.cube.CubeList + + Returns + ------- + iris.cube.Cube + + """ + cube = self.get_cube_from_list(cubes) + return CubeList([cube]) diff --git a/esmvaltool/cmor/_fixes/obs4mips/__init__.py b/esmvaltool/cmor/_fixes/obs4mips/__init__.py new file mode 100644 index 0000000000..255025f5a8 --- /dev/null +++ b/esmvaltool/cmor/_fixes/obs4mips/__init__.py @@ -0,0 +1 @@ +"""Fixes for obs4mips data.""" diff --git a/esmvaltool/cmor/check.py b/esmvaltool/cmor/check.py index 42f96d93db..b46d1cb93d 100644 --- a/esmvaltool/cmor/check.py +++ b/esmvaltool/cmor/check.py @@ -1,11 +1,9 @@ -"""Module for checking iris cubes against their CMOR definitions""" +"""Module for checking iris cubes against their CMOR definitions.""" import logging import cf_units -import iris import iris.coord_categorisation import iris.coords -import iris.cube import iris.exceptions import iris.util import numpy as np @@ -14,12 +12,11 @@ class CMORCheckError(Exception): - """Exception raised when a cube does not pass the CMORCheck""" + """Exception raised when a cube does not pass the CMORCheck.""" -class CMORCheck(object): - """ - Class used to check the CMOR-compliance of the data. +class CMORCheck(): + """Class used to check the CMOR-compliance of the data. It can also fix some minor errors and does some minor data homogeneization: @@ -27,25 +24,25 @@ class CMORCheck(object): Parameters ---------- cube: iris.cube.Cube: - Iris cube to check + Iris cube to check. var_info: variables_info.VariableInfo - Variable info to check + Variable info to check. frequency: str - Expected frequency for the data + Expected frequency for the data. fail_on_error: bool If true, CMORCheck stops on the first error. If false, it collects - all possible errors before stopping + all possible errors before stopping. automatic_fixes: bool If True, CMORCheck will try to apply automatic fixes for any - detected error, if possible + detected error, if possible. Attributes ---------- frequency: str - Expected frequency for the data + Expected frequency for the data. automatic_fixes: bool If True, CMORCheck will try to apply automatic fixes for any - detected error, if possible + detected error, if possible. """ @@ -73,21 +70,20 @@ def __init__(self, self.automatic_fixes = automatic_fixes def check_metadata(self, logger=None): - """ - Check the cube metadata + """Check the cube metadata. - Perform all the tests that do not require - to have the data in memory + Perform all the tests that do not require to have the data in memory. It will also report some warnings in case of minor errors and homogenize some data: - - Equivalent calendars will all default to the same name + + - Equivalent calendars will all default to the same name. - Auxiliary coordinates year, month_number, day_of_month and - day_of_year will be added for the time axis + day_of_year will be added for the time axis. Raises ------ - CMORCheckException: + CMORCheckError If errors are found. If fail_on_error attribute is set to True, raises as soon as an error is detected. If set to False, it perform all checks and then raises. @@ -107,14 +103,14 @@ def check_metadata(self, logger=None): self.report_errors() self._add_auxiliar_time_coordinates() + return self._cube def report_errors(self): - """ - Report detected errors + """Report detected errors. Raises ------ - CMORCheckError: + CMORCheckError If any errors were reported before calling this method. """ @@ -125,8 +121,7 @@ def report_errors(self): raise CMORCheckError(msg) def report_warnings(self, logger): - """ - Report detected warnings to the given logger + """Report detected warnings to the given logger. Parameters ---------- @@ -134,25 +129,22 @@ def report_warnings(self, logger): """ if self.has_warnings(): - msg = ('There were warnings in variable {}:\n{}\n' - 'in the cube that will be saved to file: {}') - msg = msg.format(self._cube.var_name, '\n '.join(self._warnings), - self._cube.attributes.get('_filename')) + msg = 'There were warnings in variable {}:\n{}\n'.format( + self._cube.var_name, '\n '.join(self._warnings)) logger.warning(msg) def check_data(self, logger=None): - """ - Check the cube data + """Check the cube data. Performs all the tests that require to have the data in memory. Assumes that metadata is correct, so you must call check_metadata prior to this. - It will also report some warnings in case of minor errors + It will also report some warnings in case of minor errors. Raises ------ - CMORCheckException: + CMORCheckError If errors are found. If fail_on_error attribute is set to True, raises as soon as an error is detected. If set to False, it perform all checks and then raises. @@ -162,26 +154,26 @@ def check_data(self, logger=None): logger = logging.getLogger(__name__) if self._cmor_var.units: - units = self._get_efective_units() + units = self._get_effective_units() if str(self._cube.units) != units: self._cube.convert_units(units) - self._check_data_range() self._check_coords_data() self.report_warnings(logger) self.report_errors() + return self._cube def _check_fill_value(self): + """Check fill value.""" # Iris removes _FillValue/missing_value information if data has none # of these values. If there are values == _FillValue then it will # be encoded in the numpy.ma object created. # # => Very difficult to check! - pass def _check_var_metadata(self): - + """Check metadata of variable.""" # Check standard_name if self._cmor_var.standard_name: if self._cube.standard_name != self._cmor_var.standard_name: @@ -190,17 +182,18 @@ def _check_var_metadata(self): self._cmor_var.standard_name, self._cube.standard_name) # Check units - if self._cube.attributes.get('invalid_units', '').lower() == 'psu': + if (self.automatic_fixes and self._cube.attributes.get( + 'invalid_units', '').lower() == 'psu'): self._cube.units = '1.0' del self._cube.attributes['invalid_units'] if self._cmor_var.units: - units = self._get_efective_units() + units = self._get_effective_units() if not self._cube.units.is_convertible(units): - self.report_error('Variable {0} units () can not be ' - 'converted to {2}', self._cube.var_name, - self._cmor_var.units, self._cube.units) + self.report_error(f'Variable {self._cube.var_name} units ' + f'{self._cube.units} can not be ' + f'converted to {self._cmor_var.units}') # Check other variable attributes that match entries in cube.attributes attrs = ('positive', ) @@ -208,42 +201,23 @@ def _check_var_metadata(self): attr_value = getattr(self._cmor_var, attr) if attr_value: if attr not in self._cube.attributes: - # It is usually missing in CMIP5 data, so we only report - # a warning in that case - if self._cmor_var.table_type == 'CMIP5': - self.report_warning('{}: attribute {} not present', - self._cube.var_name, attr) - else: - self.report_error('{}: attribute {} not present', - self._cube.var_name, attr) + self.report_warning('{}: attribute {} not present', + self._cube.var_name, attr) elif self._cube.attributes[attr] != attr_value: self.report_error(self._attr_msg, self._cube.var_name, attr, attr_value, self._cube.attributes[attr]) - def _get_efective_units(self): + def _get_effective_units(self): + """Get effective units.""" if self._cmor_var.units.lower() == 'psu': units = '1.0' else: units = self._cmor_var.units return units - def _check_data_range(self): - # Check data is not less than valid_min - if self._cmor_var.valid_min: - valid_min = float(self._cmor_var.valid_min) - if self._cube.data.min() < valid_min: - self.report_warning(self._vals_msg, self._cube.var_name, - '< {} ='.format('valid_min'), valid_min) - # Check data is not greater than valid_max - if self._cmor_var.valid_max: - valid_max = float(self._cmor_var.valid_max) - if self._cube.data.max() > valid_max: - self.report_warning(self._vals_msg, self._cube.var_name, - '> {} ='.format('valid_max'), valid_max) - def _check_rank(self): - # Count rank, excluding scalar dimensions + """Check rank, excluding scalar dimensions.""" rank = 0 dimensions = [] for coordinate in self._cmor_var.coordinates.values(): @@ -264,6 +238,7 @@ def _check_rank(self): 'match coordinate rank') def _check_dim_names(self): + """Check dimension names.""" for (_, coordinate) in self._cmor_var.coordinates.items(): if coordinate.generic_level: continue @@ -271,14 +246,28 @@ def _check_dim_names(self): try: cube_coord = self._cube.coord(var_name=coordinate.out_name) if cube_coord.standard_name != coordinate.standard_name: - self.report_error(self._attr_msg, coordinate.out_name, - 'standard_name', - coordinate.standard_name, - cube_coord.standard_name) + self.report_error( + self._attr_msg, + coordinate.out_name, + 'standard_name', + coordinate.standard_name, + cube_coord.standard_name, + ) except iris.exceptions.CoordinateNotFoundError: - self.report_error(self._does_msg, coordinate.name, 'exist') + try: + coord = self._cube.coord(coordinate.standard_name) + self.report_error( + 'Coordinate {0} has var name {1} instead of {2}', + coordinate.name, + coord.var_name, + coordinate.out_name, + ) + except iris.exceptions.CoordinateNotFoundError: + self.report_error(self._does_msg, coordinate.name, + 'exist') def _check_coords(self): + """Check coordinates.""" for coordinate in self._cmor_var.coordinates.values(): # Cannot check generic_level coords as no CMOR information if coordinate.generic_level: @@ -294,6 +283,7 @@ def _check_coords(self): self._check_coord(coordinate, coord, var_name) def _check_coords_data(self): + """Check coordinate data.""" for coordinate in self._cmor_var.coordinates.values(): # Cannot check generic_level coords as no CMOR information if coordinate.generic_level: @@ -310,6 +300,7 @@ def _check_coords_data(self): coordinate, coord, var_name) def _check_coord(self, cmor, coord, var_name): + """Check single coordinate.""" if coord.var_name == 'time': return if cmor.units: @@ -331,9 +322,11 @@ def _check_coord(self, cmor, coord, var_name): self._check_coord_monotonicity_and_direction(cmor, coord, var_name) def _check_coord_monotonicity_and_direction(self, cmor, coord, var_name): + """Check monotonicity and direction of coordinate.""" if not coord.is_monotonic(): self.report_error(self._is_msg, var_name, 'monotonic') - + if len(coord.points) == 1: + return if cmor.stored_direction: if cmor.stored_direction == 'increasing': if coord.points[0] > coord.points[1]: @@ -349,12 +342,13 @@ def _check_coord_monotonicity_and_direction(self, cmor, coord, var_name): self._reverse_coord(coord) def _reverse_coord(self, coord): + """Reverse coordinate.""" if coord.ndim == 1: - self._cube.data = iris.util.reverse(self._cube.data, - self._cube.coord_dims(coord)) - coord.points = iris.util.reverse(coord.points, 0) + self._cube = iris.util.reverse(self._cube, + self._cube.coord_dims(coord)) def _check_coord_values(self, coord_info, coord, var_name): + """Check coordinate values.""" # Check requested coordinate values exist in coord.points self._check_requested_values(coord, coord_info, var_name) @@ -386,6 +380,7 @@ def _check_coord_values(self, coord_info, coord, var_name): self._cube = self._cube.intersection(lon_extent) def _check_requested_values(self, coord, coord_info, var_name): + """Check requested values.""" if coord_info.requested: cmor_points = [float(val) for val in coord_info.requested] coord_points = list(coord.points) @@ -395,6 +390,7 @@ def _check_requested_values(self, coord, coord_info, var_name): str(point), str(coord.units)) def _check_time_coord(self): + """Check time coordinate.""" try: coord = self._cube.coord('time', dim_coords=True) # , axis='T') var_name = coord.var_name @@ -407,116 +403,122 @@ def _check_time_coord(self): else: coord.convert_units( cf_units.Unit( - 'days since 1950-01-01 00:00:00', + 'days since 1950-1-1 00:00:00', calendar=coord.units.calendar)) - simplified_cal = self._simplify_calendars(coord.units.calendar) - coord.units = cf_units.Unit(coord.units.name, simplified_cal) + simplified_cal = self._simplify_calendar(coord.units.calendar) + coord.units = cf_units.Unit(coord.units.origin, simplified_cal) tol = 0.001 - intervals = { - 'dec': (3600, 3660), - 'yr': (360, 366), - 'mon': (28, 31), - 'day': (1, 1) - } - if self.frequency in intervals: - interval = intervals[self.frequency] - target_interval = (interval[0] - tol, interval[1] + tol) - elif self.frequency.endswith('hr'): - - frequency = self.frequency[:-2] - if frequency == 'sub': - frequency = 1.0 / 24 - target_interval = (-tol, frequency + tol) - else: - frequency = float(frequency) / 24 - target_interval = (frequency - tol, frequency + tol) + intervals = {'dec': (3600, 3660), 'day': (1, 1)} + if self.frequency == 'mon': + for i in range(len(coord.points) - 1): + first = coord.cell(i).point + second = coord.cell(i + 1).point + second_month = first.month + 1 + second_year = first.year + if second_month == 13: + second_month = 1 + second_year += 1 + if second_month != second.month or \ + second_year != second.year: + msg = '{}: Frequency {} does not match input data' + self.report_error(msg, var_name, self.frequency) + break + elif self.frequency == 'yr': + for i in range(len(coord.points) - 1): + first = coord.cell(i).point + second = coord.cell(i + 1).point + second_month = first.month + 1 + if first.year + 1 != second.year: + msg = '{}: Frequency {} does not match input data' + self.report_error(msg, var_name, self.frequency) + break else: - msg = '{}: Frequency {} not supported by checker' - self.report_error(msg, var_name, self.frequency) - return - for i in range(len(coord.points) - 1): - interval = coord.points[i + 1] - coord.points[i] - if (interval < target_interval[0] - or interval > target_interval[1]): - msg = '{}: Frequency {} does not match input data' + if self.frequency in intervals: + interval = intervals[self.frequency] + target_interval = (interval[0] - tol, interval[1] + tol) + elif self.frequency.endswith('hr'): + frequency = self.frequency[:-2] + if frequency == 'sub': + frequency = 1.0 / 24 + target_interval = (-tol, frequency + tol) + else: + frequency = float(frequency) / 24 + target_interval = (frequency - tol, frequency + tol) + else: + msg = '{}: Frequency {} not supported by checker' self.report_error(msg, var_name, self.frequency) - break - - CALENDARS = [ - ['gregorian', 'standard'], - ['proleptic_gregorian'], - ['365_day', 'noleap'], - ['366_day', 'all_leap'], - ['360_day'], - ['julian'], - ['none'], - ] + return + for i in range(len(coord.points) - 1): + interval = coord.points[i + 1] - coord.points[i] + if (interval < target_interval[0] + or interval > target_interval[1]): + msg = '{}: Frequency {} does not match input data' + self.report_error(msg, var_name, self.frequency) + break @staticmethod - def _simplify_calendars(calendar): - for calendar_type in CMORCheck.CALENDARS: - if calendar in calendar_type: - return calendar_type[0] + def _simplify_calendar(calendar): + calendar_aliases = { + 'all_leap': '366_day', + 'noleap': '365_day', + 'standard': 'gregorian', + } + return calendar_aliases.get(calendar, calendar) def has_errors(self): - """ - Check if there are reported errors + """Check if there are reported errors. Returns ------- bool: - True if there are pending errors, False otherwise + True if there are pending errors, False otherwise. """ return len(self._errors) > 0 def has_warnings(self): - """ - Check if there are reported warnings + """Check if there are reported warnings. Returns ------- bool: - True if there are pending warnings, False otherwise + True if there are pending warnings, False otherwise. """ return len(self._warnings) > 0 def report_error(self, message, *args): - """ - Report an error. + """Report an error. If fail_on_error is set to True, raises automatically. - If fail_on_error is set to False, stores it for later reports + If fail_on_error is set to False, stores it for later reports. Parameters ---------- message: str: unicode - Message for the error + Message for the error. *args: - arguments to format the message string + arguments to format the message string. """ msg = message.format(*args) if self._failerr: raise CMORCheckError(msg + '\nin cube:\n{}'.format(self._cube)) - else: - self._errors.append(msg) + self._errors.append(msg) def report_warning(self, message, *args): - """ - Report a warning. + """Report a warning. If fail_on_error is set to True, logs it automatically. - If fail_on_error is set to False, stores it for later reports + If fail_on_error is set to False, stores it for later reports. Parameters ---------- message: str: unicode - Message for the warning + Message for the warning. *args: - arguments to format the message string + arguments to format the message string. """ msg = message.format(*args) @@ -540,91 +542,100 @@ def _add_auxiliar_time_coordinates(self): def _get_cmor_checker(table, mip, short_name, + frequency, fail_on_error=True, automatic_fixes=False): """Get a CMOR checker/fixer.""" if table not in CMOR_TABLES: - raise NotImplementedError("No CMOR checker implemented for table {}." - "\nThe following options are available: {}" - .format(table, ', '.join(CMOR_TABLES))) + raise NotImplementedError( + "No CMOR checker implemented for table {}." + "\nThe following options are available: {}".format( + table, ', '.join(CMOR_TABLES))) cmor_table = CMOR_TABLES[table] var_info = cmor_table.get_variable(mip, short_name) + if var_info is None: + var_info = CMOR_TABLES['custom'].get_variable(mip, short_name) def _checker(cube): return CMORCheck( cube, var_info, + frequency=frequency, fail_on_error=fail_on_error, automatic_fixes=automatic_fixes) return _checker -def cmor_check_metadata(cube, cmor_table, mip, short_name): - """ - Check if metadata conforms to variable's CMOR definiton. +def cmor_check_metadata(cube, cmor_table, mip, short_name, frequency): + """Check if metadata conforms to variable's CMOR definiton. - None of the checks at this step will force the cube to load the data + None of the checks at this step will force the cube to load the data. Parameters ---------- cube: iris.cube.Cube - Data cube to check + Data cube to check. cmor_table: basestring - CMOR definitions to use + CMOR definitions to use. mip: - Variable's mip + Variable's mip. short_name: basestring - Variable's short name + Variable's short name. + frequency: basestring + Data frequency. """ - checker = _get_cmor_checker(cmor_table, mip, short_name) + checker = _get_cmor_checker(cmor_table, mip, short_name, frequency) checker(cube).check_metadata() return cube -def cmor_check_data(cube, cmor_table, mip, short_name): - """ - Check if data conforms to variable's CMOR definiton. +def cmor_check_data(cube, cmor_table, mip, short_name, frequency): + """Check if data conforms to variable's CMOR definiton. - The checks performed at this step require the data in memory + The checks performed at this step require the data in memory. Parameters ---------- cube: iris.cube.Cube - Data cube to check + Data cube to check. cmor_table: basestring - CMOR definitions to use + CMOR definitions to use. mip: - Variable's mip + Variable's mip. short_name: basestring Variable's short name + frequency: basestring + Data frequency """ - checker = _get_cmor_checker(cmor_table, mip, short_name) + checker = _get_cmor_checker(cmor_table, mip, short_name, frequency) checker(cube).check_data() return cube -def cmor_check(cube, cmor_table, mip, short_name): - """ - Check if cube conforms to variable's CMOR definiton. +def cmor_check(cube, cmor_table, mip, short_name, frequency): + """Check if cube conforms to variable's CMOR definiton. - Equivalent to calling cmor_check_metadata and cmor_check_data consecutively + Equivalent to calling cmor_check_metadata and cmor_check_data + consecutively. Parameters ---------- cube: iris.cube.Cube - Data cube to check + Data cube to check. cmor_table: basestring - CMOR definitions to use + CMOR definitions to use. mip: - Variable's mip + Variable's mip. short_name: basestring - Variable's short name + Variable's short name. + frequency: basestring + Data frequency. """ - cmor_check_metadata(cube, cmor_table, mip, short_name) - cmor_check_data(cube, cmor_table, mip, short_name) + cmor_check_metadata(cube, cmor_table, mip, short_name, frequency) + cmor_check_data(cube, cmor_table, mip, short_name, frequency) return cube diff --git a/esmvaltool/cmor/fix.py b/esmvaltool/cmor/fix.py index 30e5ec9473..976d1594d0 100644 --- a/esmvaltool/cmor/fix.py +++ b/esmvaltool/cmor/fix.py @@ -7,13 +7,17 @@ fixed. """ +from collections import defaultdict + +from iris.cube import CubeList + from ._fixes.fix import Fix from .check import _get_cmor_checker -def fix_file(filename, short_name, project, dataset, output_dir): +def fix_file(file, short_name, project, dataset, output_dir): """ - Fix files before ESMValTool can load them + Fix files before ESMValTool can load them. This fixes are only for issues that prevent iris from loading the cube or that cannot be fixed after the cube is loaded. @@ -22,7 +26,7 @@ def fix_file(filename, short_name, project, dataset, output_dir): Parameters ---------- - filename: str + file: str Path to the original file short_name: str Variable's short name @@ -39,14 +43,19 @@ def fix_file(filename, short_name, project, dataset, output_dir): """ for fix in Fix.get_fixes( project=project, dataset=dataset, variable=short_name): - filename = fix.fix_file(filename, output_dir) - return filename + file = fix.fix_file(file, output_dir) + return file -def fix_metadata(cube, short_name, project, dataset, cmor_table=None, - mip=None): +def fix_metadata(cubes, + short_name, + project, + dataset, + cmor_table=None, + mip=None, + frequency=None): """ - Fix cube metadata if fixes are required and check it anyway + Fix cube metadata if fixes are required and check it anyway. This method collects all the relevant fixes for a given variable, applies them and checks the resulting cube (or the original if no fixes were @@ -55,9 +64,9 @@ def fix_metadata(cube, short_name, project, dataset, cmor_table=None, Parameters ---------- - cube: iris.cube.Cube - Cube to fix - short_name; str + cubes: iris.cube.CubeList + Cubes to fix + short_name: str Variable's short name project: str @@ -69,6 +78,9 @@ def fix_metadata(cube, short_name, project, dataset, cmor_table=None, mip: str, optional Variable's MIP, if available + frequency: str, optional + Variable's data frequency, if available + Returns ------- iris.cube.Cube: @@ -76,25 +88,48 @@ def fix_metadata(cube, short_name, project, dataset, cmor_table=None, Raises ------ - CMORCheckError: + CMORCheckError If the checker detects errors in the metadata that it can not fix. """ - for fix in Fix.get_fixes( - project=project, dataset=dataset, variable=short_name): - cube = fix.fix_metadata(cube) - if cmor_table and mip: - checker = _get_cmor_checker( - table=cmor_table, - mip=mip, - short_name=short_name, - fail_on_error=False, - automatic_fixes=True) - checker(cube).check_metadata() - return cube - - -def fix_data(cube, short_name, project, dataset, cmor_table=None, mip=None): + fixes = Fix.get_fixes( + project=project, dataset=dataset, variable=short_name) + fixed_cubes = [] + by_file = defaultdict(list) + for cube in cubes: + by_file[cube.attributes.get('source_file', '')].append(cube) + + for cube_list in by_file.values(): + cube_list = CubeList(cube_list) + for fix in fixes: + cube_list = fix.fix_metadata(cube_list) + + if len(cube_list) != 1: + raise ValueError('Cubes were not reduced to one after' + 'fixing: %s' % cube_list) + cube = cube_list[0] + + if cmor_table and mip: + checker = _get_cmor_checker( + frequency=frequency, + table=cmor_table, + mip=mip, + short_name=short_name, + fail_on_error=False, + automatic_fixes=True) + cube = checker(cube).check_metadata() + cube.attributes.pop('source_file', None) + fixed_cubes.append(cube) + return fixed_cubes + + +def fix_data(cube, + short_name, + project, + dataset, + cmor_table=None, + mip=None, + frequency=None): """ Fix cube data if fixes add present and check it anyway. @@ -109,7 +144,7 @@ def fix_data(cube, short_name, project, dataset, cmor_table=None, mip=None): ---------- cube: iris.cube.Cube Cube to fix - short_name; str + short_name: str Variable's short name project: str @@ -121,6 +156,9 @@ def fix_data(cube, short_name, project, dataset, cmor_table=None, mip=None): mip: str, optional Variable's MIP, if available + frequency: str, optional + Variable's data frequency, if available + Returns ------- iris.cube.Cube: @@ -128,7 +166,7 @@ def fix_data(cube, short_name, project, dataset, cmor_table=None, mip=None): Raises ------ - CMORCheckError: + CMORCheckError If the checker detects errors in the data that it can not fix. """ @@ -137,10 +175,11 @@ def fix_data(cube, short_name, project, dataset, cmor_table=None, mip=None): cube = fix.fix_data(cube) if cmor_table and mip: checker = _get_cmor_checker( + frequency=frequency, table=cmor_table, mip=mip, short_name=short_name, fail_on_error=False, automatic_fixes=True) - checker(cube).check_data() + cube = checker(cube).check_data() return cube diff --git a/esmvaltool/cmor/table.py b/esmvaltool/cmor/table.py index f53ee418e7..805ede3a6a 100644 --- a/esmvaltool/cmor/table.py +++ b/esmvaltool/cmor/table.py @@ -1,5 +1,5 @@ """ -CMOR information reader for ESMValTool +CMOR information reader for ESMValTool. Read variable information from CMOR 2 and CMOR 3 tables and make it easily available for the other components of ESMValTool @@ -13,34 +13,46 @@ logger = logging.getLogger(__name__) CMOR_TABLES = {} -"""dict of str, obj: CMOR info objects""" +"""dict of str, obj: CMOR info objects.""" def read_cmor_tables(cfg_developer): - """Read cmor tables required in the configuration + """Read cmor tables required in the configuration. Parameters ---------- cfg_developer : dict of str Parsed config-developer file + """ - for table in cfg_developer.keys(): - project = cfg_developer[table] + custom = CustomInfo() + CMOR_TABLES['custom'] = custom - table_path = project.get('cmor_tables', '') - table_path = os.path.expandvars(os.path.expanduser(table_path)) + for table in cfg_developer: + project = cfg_developer[table] cmor_type = project.get('cmor_type', 'CMIP5') + table_path = project.get('cmor_tables', cmor_type.lower()) + table_path = os.path.expandvars(os.path.expanduser(table_path)) + cmor_strict = project.get('cmor_strict', True) + if cmor_strict: + default = None + else: + default = custom if cmor_type == 'CMIP5': - CMOR_TABLES[table] = CMIP5Info(table_path) + CMOR_TABLES[table] = CMIP5Info( + table_path, default=default, + ) elif cmor_type == 'CMIP6': - CMOR_TABLES[table] = CMIP6Info(table_path) + CMOR_TABLES[table] = CMIP6Info( + table_path, default=default, + ) class CMIP6Info(object): """ - Class to read CMIP6-like data request + Class to read CMIP6-like data request. This uses CMOR 3 json format @@ -57,10 +69,11 @@ class CMIP6Info(object): 'tro3': 'o3', } - def __init__(self, cmor_tables_path=None): + def __init__(self, cmor_tables_path, default=None): cmor_tables_path = self._get_cmor_path(cmor_tables_path) self._cmor_folder = os.path.join(cmor_tables_path, 'Tables') + self.default = default self.tables = {} @@ -72,9 +85,10 @@ def __init__(self, cmor_tables_path=None): @staticmethod def _get_cmor_path(cmor_tables_path): - if not cmor_tables_path: - cwd = os.path.dirname(os.path.realpath(__file__)) - cmor_tables_path = os.path.join(cwd, 'tables', 'cmip6') + if os.path.isdir(cmor_tables_path): + return cmor_tables_path + cwd = os.path.dirname(os.path.realpath(__file__)) + cmor_tables_path = os.path.join(cwd, 'tables', cmor_tables_path) return cmor_tables_path def _load_table(self, json_file): @@ -82,25 +96,24 @@ def _load_table(self, json_file): raw_data = json.loads(inf.read()) if not self._is_table(raw_data): return + table = TableInfo() header = raw_data['Header'] - name = header['table_id'][6:] - self.tables[name] = {} + table.name = header['table_id'][6:].split('_')[-1] + self.tables[table.name] = table generic_levels = header['generic_levels'].split() - if 'frequency' in header: - frequency = header['frequency'] - else: - frequency = None + table.frequency = header.get('frequency', '') + table.realm = header.get('realm', '') for var_name, var_data in raw_data['variable_entry'].items(): var = VariableInfo('CMIP6', var_name) if 'frequency' in var_data: var.frequency = var_data['frequency'] else: - var.frequency = frequency + var.frequency = table.frequency var.read_json(var_data) self._assign_dimensions(var, generic_levels) - self.tables[name][var_name] = var + table[var_name] = var def _assign_dimensions(self, var, generic_levels): for dimension in var.dimensions: @@ -128,9 +141,27 @@ def _load_coordinates(self): coord.read_json(table_data['axis_entry'][coord_name]) self.coords[coord_name] = coord + def get_table(self, table): + """ + Search and return the table info. + + Parameters + ---------- + table: basestring + Table name + + Returns + ------- + TableInfo + Return the TableInfo object for the requested table if + found, returns None if not + + """ + return self.tables.get(table) + def get_variable(self, table, short_name): """ - Search and return the variable info + Search and return the variable info. Parameters ---------- @@ -152,6 +183,8 @@ def get_variable(self, table, short_name): if short_name in CMIP6Info._CMIP_5to6_varname: new_short_name = CMIP6Info._CMIP_5to6_varname[short_name] return self.get_variable(table, new_short_name) + if self.default: + return self.default.get_variable(table, short_name) return None @staticmethod @@ -163,6 +196,17 @@ def _is_table(table_data): return True +class TableInfo(dict): + """Container class for storing a CMOR table.""" + + def __init__(self, *args, **kwargs): + """Create a new TableInfo object for storing VariableInfo objects.""" + super(TableInfo, self).__init__(*args, **kwargs) + self.name = '' + self.frequency = '' + self.realm = '' + + class JsonInfo(object): """ Base class for the info classes. @@ -171,11 +215,11 @@ class JsonInfo(object): """ def __init__(self): - self._json_data = None + self._json_data = {} def _read_json_variable(self, parameter): """ - Read a json parameter in json_data + Read a json parameter in json_data. Parameters ---------- @@ -194,7 +238,7 @@ def _read_json_variable(self, parameter): def _read_json_list_variable(self, parameter): """ - Read a json list parameter in json_data + Read a json list parameter in json_data. Parameters ---------- @@ -213,9 +257,11 @@ def _read_json_list_variable(self, parameter): class VariableInfo(JsonInfo): + """Class to read and store variable information.""" + def __init__(self, table_type, short_name): """ - Class to read and store variable information + Class to read and store variable information. Parameters ---------- @@ -225,6 +271,8 @@ def __init__(self, table_type, short_name): """ super(VariableInfo, self).__init__() self.table_type = table_type + self.modeling_realm = [] + """Modeling realm""" self.short_name = short_name """Short name""" self.standard_name = '' @@ -270,14 +318,18 @@ def read_json(self, json_data): self.valid_min = self._read_json_variable('valid_min') self.valid_max = self._read_json_variable('valid_max') self.positive = self._read_json_variable('positive') + self.modeling_realm = \ + self._read_json_variable('modeling_realm').split() self.dimensions = self._read_json_variable('dimensions').split() class CoordinateInfo(JsonInfo): + """Class to read and store coordinate information.""" + def __init__(self, name): """ - Class to read and store coordinate information + Class to read and store coordinate information. Parameters ---------- @@ -346,7 +398,7 @@ def read_json(self, json_data): class CMIP5Info(object): """ - Class to read CMIP5-like data request + Class to read CMIP5-like data request. Parameters ---------- @@ -355,7 +407,7 @@ class CMIP5Info(object): """ - def __init__(self, cmor_tables_path=None): + def __init__(self, cmor_tables_path, default=None): cmor_tables_path = self._get_cmor_path(cmor_tables_path) self._cmor_folder = os.path.join(cmor_tables_path, 'Tables') @@ -365,6 +417,8 @@ def __init__(self, cmor_tables_path=None): self.tables = {} self.coords = {} + self.default = default + self._current_table = None self._last_line_read = None for table_file in glob.glob(os.path.join(self._cmor_folder, '*')): @@ -374,21 +428,35 @@ def __init__(self, cmor_tables_path=None): @staticmethod def _get_cmor_path(cmor_tables_path): - if not cmor_tables_path: - cwd = os.path.dirname(os.path.realpath(__file__)) - cmor_tables_path = os.path.join(cwd, 'tables', 'cmip5') + if os.path.isdir(cmor_tables_path): + return cmor_tables_path + cwd = os.path.dirname(os.path.realpath(__file__)) + cmor_tables_path = os.path.join(cwd, 'tables', cmor_tables_path) return cmor_tables_path - def _load_table(self, table_file, table_name='', frequency=''): + def _load_table(self, table_file, table_name=''): + if table_name and table_name in self.tables: + # special case used for updating a table with custom variable file + table = self.tables[table_name] + else: + # default case: table name is first line of table file + table = None + + self._read_table_file(table_file, table) + + def _read_table_file(self, table_file, table=None): with open(table_file) as self._current_table: self._read_line() while True: key, value = self._last_line_read if key == 'table_id': - table_name = value[len('Table '):] - self.tables[table_name] = {} + table = TableInfo() + table.name = value[len('Table '):] + self.tables[table.name] = table elif key == 'frequency': - frequency = value + table.frequency = value + elif key == 'modeling_realm': + table.realm = value elif key == 'generic_levels': for dim in value.split(' '): coord = CoordinateInfo(dim) @@ -399,32 +467,11 @@ def _load_table(self, table_file, table_name='', frequency=''): self.coords[value] = self._read_coordinate(value) continue elif key == 'variable_entry': - variable = self._read_variable(value) - variable.frequency = frequency - for dim in variable.dimensions: - variable.coordinates[dim] = self.coords[dim] - self.tables[table_name][value] = variable + table[value] = self._read_variable(value, table.frequency) continue if not self._read_line(): return - def add_custom_table_file(self, table_file, table_name): - """ - Add a file with custom definitions to table. - - Parameters - ---------- - table_file: basestring - Path to the file containing the custom table - table_name: basestring - Name of the the custom table to add - - """ - random_variable_key = next(iter(self.tables[table_name])) - random_variable = self.tables[table_name][random_variable_key] - frequency = random_variable.frequency - self._load_table(table_file, table_name, frequency) - def _read_line(self): line = self._current_table.readline() if line == '': @@ -454,23 +501,44 @@ def _read_coordinate(self, value): continue if hasattr(coord, key): setattr(coord, key, value) + return coord - def _read_variable(self, value): - var = VariableInfo('CMIP5', value) + def _read_variable(self, short_name, frequency): + var = VariableInfo('CMIP5', short_name) + var.frequency = frequency while self._read_line(): key, value = self._last_line_read if key in ('variable_entry', 'axis_entry'): - return var - if key == 'dimensions': - var.dimensions = value.split(' ') - continue - if hasattr(var, key): + break + if key in ('dimensions', 'modeling_realm'): + setattr(var, key, value.split()) + elif hasattr(var, key): setattr(var, key, value) + for dim in var.dimensions: + var.coordinates[dim] = self.coords[dim] return var + def get_table(self, table): + """ + Search and return the table info. + + Parameters + ---------- + table: basestring + Table name + + Returns + ------- + TableInfo + Return the TableInfo object for the requested table if + found, returns None if not + + """ + return self.tables.get(table) + def get_variable(self, table, short_name): """ - Search and return the variable info + Search and return the variable info. Parameters ---------- @@ -486,7 +554,96 @@ def get_variable(self, table, short_name): found, returns None if not """ - try: - return self.tables[table][short_name] - except KeyError: - return None + var_info = self.tables.get(table, {}).get(short_name, None) + if not var_info and self.default: + return self.default.get_variable(table, short_name) + return var_info + + +class CustomInfo(CMIP5Info): + """ + Class to read custom var info for ESMVal. + + Parameters + ---------- + cmor_tables_path: basestring or None + Full path to the table or name for the table if it is present in + ESMValTool repository + + """ + + def __init__(self, cmor_tables_path=None): + cwd = os.path.dirname(os.path.realpath(__file__)) + self._cmor_folder = os.path.join(cwd, 'tables', 'custom') + self.tables = {} + table = TableInfo() + table.name = 'custom' + self.tables[table.name] = table + self._coordinates_file = os.path.join( + self._cmor_folder, + 'CMOR_coordinates.dat', + ) + self.coords = {} + self._read_table_file(self._coordinates_file, self.tables['custom']) + for dat_file in glob.glob(os.path.join(self._cmor_folder, '*.dat')): + if dat_file == self._coordinates_file: + continue + self._read_table_file(dat_file, self.tables['custom']) + + def get_table(self, table): + """ + Search and return the table info. + + Parameters + ---------- + table: basestring + Table name + + Returns + ------- + TableInfo + Return the TableInfo object for the requested table if + found, returns None if not + + """ + return self.tables.get(table) + + def get_variable(self, table, short_name): + """ + Search and return the variable info. + + Parameters + ---------- + table: basestring + Table name + short_name: basestring + Variable's short name + + Returns + ------- + VariableInfo + Return the VariableInfo object for the requested variable if + found, returns None if not + + """ + return self.tables['custom'].get(short_name, None) + + def _read_table_file(self, table_file, table=None): + with open(table_file) as self._current_table: + self._read_line() + while True: + key, value = self._last_line_read + if key == 'generic_levels': + for dim in value.split(' '): + coord = CoordinateInfo(dim) + coord.generic_level = True + coord.axis = 'Z' + self.coords[dim] = coord + elif key == 'axis_entry': + self.coords[value] = self._read_coordinate(value) + continue + elif key == 'variable_entry': + table[value] = self._read_variable(value, None) + continue + if not self._read_line(): + return diff --git a/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_Omon b/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_Omon index 2ccc1a269d..e64f3a68a2 100644 --- a/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_Omon +++ b/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_Omon @@ -142,9 +142,9 @@ long_name: ocean basin !---------------------------------- ! Additional axis information: !---------------------------------- -out_name: basin +out_name: region type: character -requested: atlantic_arctic_ocean indian_pacific_ocean global_ocean ! space-separated list of requested coordinates +requested: atlantic_arctic_ocean indian_pacific_ocean global_ocean ! space-separated list of requested coordinates must_have_bounds: no coords_attrib: region !---------------------------------- @@ -180,11 +180,11 @@ axis_entry: oline ! Axis attributes: !---------------------------------- standard_name: region -long_name: ocean passage +long_name: ocean passage !---------------------------------- ! Additional axis information: !---------------------------------- -out_name: line +out_name: passage type: character requested: barents_opening bering_strait canadian_archipelago denmark_strait drake_passage english_channel pacific_equatorial_undercurrent faroe_scotland_channel florida_bahamas_strait fram_strait iceland_faroe_channel indonesian_throughflow mozambique_channel taiwan_luzon_straits windward_passage ! space-separated list of requested coordinates must_have_bounds: no @@ -3795,7 +3795,7 @@ long_name: Sea Water Transport !---------------------------------- ! Additional variable information: !---------------------------------- -dimensions: oline time +dimensions: time oline out_name: mfo type: real !---------------------------------- diff --git a/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_Oyr b/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_Oyr index f829babdfb..98cba4f62b 100644 --- a/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_Oyr +++ b/esmvaltool/cmor/tables/cmip5/Tables/CMIP5_Oyr @@ -935,7 +935,7 @@ modeling_realm: ocnBgchem !---------------------------------- ! Variable attributes: !---------------------------------- -standard_name: mole_concentration_of_molecular_oxygen_in_sea_water +standard_name: mole_concentration_of_dissolved_molecular_oxygen_in_sea_water units: mol m-3 cell_methods: time: mean area: mean where sea cell_measures: area: areacello volume: volcello diff --git a/esmvaltool/cmor/tables/cmip6/README.md b/esmvaltool/cmor/tables/cmip6/README.md index f4fbfae2c4..19d535de2c 100644 --- a/esmvaltool/cmor/tables/cmip6/README.md +++ b/esmvaltool/cmor/tables/cmip6/README.md @@ -1 +1,9 @@ # cmip6-cmor-tables + +## Data Request 01.00.30 (March 11, 2019) + +Source is https://github.com/PCMDI/cmip6-cmor-tables + +- branch: 01.00.30 +- commit: c9089bf17e81d5dcc41a839e8299cee69da890d3 + diff --git a/esmvaltool/cmor/tables/custom/CMOR_alb.dat b/esmvaltool/cmor/tables/custom/CMOR_alb.dat new file mode 100644 index 0000000000..caa3513fd5 --- /dev/null +++ b/esmvaltool/cmor/tables/custom/CMOR_alb.dat @@ -0,0 +1,20 @@ +SOURCE: CMIP5 +!============ +variable_entry: alb +!============ +modeling_realm: atmos +!---------------------------------- +! Variable attributes: +!---------------------------------- +standard_name: +units: 1 +cell_methods: time: mean +cell_measures: area: areacella +long_name: albedo at the surface +!---------------------------------- +! Additional variable information: +!---------------------------------- +dimensions: longitude latitude time +type: real +!---------------------------------- +! diff --git a/esmvaltool/cmor/tables/custom/CMOR_amoc.dat b/esmvaltool/cmor/tables/custom/CMOR_amoc.dat new file mode 100644 index 0000000000..0f951078df --- /dev/null +++ b/esmvaltool/cmor/tables/custom/CMOR_amoc.dat @@ -0,0 +1,21 @@ +SOURCE: CMIP5 +!============ +variable_entry: amoc +!============ +modeling_realm: ocean +!---------------------------------- +! Variable attributes: +!---------------------------------- +standard_name: +units: kg s-1 +cell_methods: time: mean area: where sea +cell_measures: area: areacello +long_name: Atlantic Meridional Overturning Circulation +comment: AMOC at the Rapid array (26.5 N) +!---------------------------------- +! Additional variable information: +!---------------------------------- +dimensions: time +type: real +!---------------------------------- +! diff --git a/esmvaltool/cmor/tables/custom/CMOR_clhmtisccp.dat b/esmvaltool/cmor/tables/custom/CMOR_clhmtisccp.dat index 9e84eb79dd..588264e55a 100644 --- a/esmvaltool/cmor/tables/custom/CMOR_clhmtisccp.dat +++ b/esmvaltool/cmor/tables/custom/CMOR_clhmtisccp.dat @@ -6,7 +6,7 @@ modeling_realm: atmos !---------------------------------- ! Variable attributes: !---------------------------------- -standard_name: ISCCP_High_Level_Medium_Thickness_Cloud_Area_Fraction +standard_name: units: % cell_methods: time: mean cell_measures: area: areacella diff --git a/esmvaltool/cmor/tables/custom/CMOR_clhtkisccp.dat b/esmvaltool/cmor/tables/custom/CMOR_clhtkisccp.dat index 567bd123a9..080fe2e76a 100644 --- a/esmvaltool/cmor/tables/custom/CMOR_clhtkisccp.dat +++ b/esmvaltool/cmor/tables/custom/CMOR_clhtkisccp.dat @@ -6,7 +6,7 @@ modeling_realm: atmos !---------------------------------- ! Variable attributes: !---------------------------------- -standard_name: ISCCP_high_level_thick_cloud_area_fraction +standard_name: units: % cell_methods: time: mean cell_measures: area: areacella diff --git a/esmvaltool/cmor/tables/custom/CMOR_clisccp.dat b/esmvaltool/cmor/tables/custom/CMOR_clisccp.dat new file mode 100644 index 0000000000..b86353b3ed --- /dev/null +++ b/esmvaltool/cmor/tables/custom/CMOR_clisccp.dat @@ -0,0 +1,21 @@ +SOURCE: CMIP5 +!============ +variable_entry: clisccp +!============ +modeling_realm: atmos +!---------------------------------- +! Variable attributes: +!---------------------------------- +standard_name: +units: % +cell_methods: time: mean +cell_measures: area: areacella +long_name: ISCCP Cloud Area Fraction +!---------------------------------- +! Additional variable information: +!---------------------------------- +dimensions: longitude latitude plevs tau time +out_name: clisccp +type: real +!---------------------------------- +! \ No newline at end of file diff --git a/esmvaltool/cmor/tables/custom/CMOR_cllmtisccp.dat b/esmvaltool/cmor/tables/custom/CMOR_cllmtisccp.dat index 6ebf9a9d94..de48310ff6 100644 --- a/esmvaltool/cmor/tables/custom/CMOR_cllmtisccp.dat +++ b/esmvaltool/cmor/tables/custom/CMOR_cllmtisccp.dat @@ -6,7 +6,7 @@ modeling_realm: atmos !---------------------------------- ! Variable attributes: !---------------------------------- -standard_name: ISCCP_Low_Level_Medium_Thickness_Cloud_Area_Fraction +standard_name: units: % cell_methods: time: mean cell_measures: area: areacella diff --git a/esmvaltool/cmor/tables/custom/CMOR_clltkisccp.dat b/esmvaltool/cmor/tables/custom/CMOR_clltkisccp.dat index 0bb5ed7ec6..01affe0170 100644 --- a/esmvaltool/cmor/tables/custom/CMOR_clltkisccp.dat +++ b/esmvaltool/cmor/tables/custom/CMOR_clltkisccp.dat @@ -6,7 +6,7 @@ modeling_realm: atmos !---------------------------------- ! Variable attributes: !---------------------------------- -standard_name: ISCCP_low_level_thick_cloud_area_fraction +standard_name: units: % cell_methods: time: mean cell_measures: area: areacella diff --git a/esmvaltool/cmor/tables/custom/CMOR_clmmtisccp.dat b/esmvaltool/cmor/tables/custom/CMOR_clmmtisccp.dat index 3533ea5698..e2fa6bde2c 100644 --- a/esmvaltool/cmor/tables/custom/CMOR_clmmtisccp.dat +++ b/esmvaltool/cmor/tables/custom/CMOR_clmmtisccp.dat @@ -6,7 +6,7 @@ modeling_realm: atmos !---------------------------------- ! Variable attributes: !---------------------------------- -standard_name: ISCCP_Middle_Level_Medium_Thickness_Cloud_Area_Fraction +standard_name: units: % cell_methods: time: mean cell_measures: area: areacella diff --git a/esmvaltool/cmor/tables/custom/CMOR_clmtkisccp.dat b/esmvaltool/cmor/tables/custom/CMOR_clmtkisccp.dat index fe136404e5..4e3e41ceea 100644 --- a/esmvaltool/cmor/tables/custom/CMOR_clmtkisccp.dat +++ b/esmvaltool/cmor/tables/custom/CMOR_clmtkisccp.dat @@ -6,7 +6,7 @@ modeling_realm: atmos !---------------------------------- ! Variable attributes: !---------------------------------- -standard_name: ISCCP_Middle_Level_Thick_Cloud_Area_Fraction +standard_name: units: % cell_methods: time: mean cell_measures: area: areacella diff --git a/esmvaltool/cmor/tables/custom/CMOR_cltStderr.dat b/esmvaltool/cmor/tables/custom/CMOR_cltStderr.dat new file mode 100644 index 0000000000..507e62e425 --- /dev/null +++ b/esmvaltool/cmor/tables/custom/CMOR_cltStderr.dat @@ -0,0 +1,26 @@ +SOURCE: CMIP5 +!============ +variable_entry: cltStderr +!============ +modeling_realm: atmos +!---------------------------------- +! Variable attributes: +!---------------------------------- +standard_name: +units: % +cell_methods: time: mean +cell_measures: area: areacella +long_name: Total Cloud Fraction Error +comment: for the whole atmospheric column, as seen from the surface or the top of the atmosphere. Include both large-scale and convective cloud. +!---------------------------------- +! Additional variable information: +!---------------------------------- +dimensions: longitude latitude time +out_name: cltStderr +type: real +valid_min: 0 +valid_max: 0.01 +ok_min_mean_abs: 0 +ok_max_mean_abs: 0.01 +!---------------------------------- +! diff --git a/esmvaltool/cmor/tables/custom/CMOR_coordinates.dat b/esmvaltool/cmor/tables/custom/CMOR_coordinates.dat new file mode 100644 index 0000000000..8ad131fb4b --- /dev/null +++ b/esmvaltool/cmor/tables/custom/CMOR_coordinates.dat @@ -0,0 +1,106 @@ +!============ +axis_entry: longitude +!============ +!---------------------------------- +! Axis attributes: +!---------------------------------- +standard_name: longitude +units: degrees_east +axis: X ! X, Y, Z, T (default: undeclared) +long_name: longitude +!---------------------------------- +! Additional axis information: +!---------------------------------- +out_name: lon +valid_min: 0.0 +valid_max: 360.0 +stored_direction: increasing +type: double +must_have_bounds: yes +!---------------------------------- +! + +!============ +axis_entry: latitude +!============ +!---------------------------------- +! Axis attributes: +!---------------------------------- +standard_name: latitude +units: degrees_north +axis: Y ! X, Y, Z, T (default: undeclared) +long_name: latitude +!---------------------------------- +! Additional axis information: +!---------------------------------- +out_name: lat +valid_min: -90.0 +valid_max: 90.0 +stored_direction: increasing +type: double +must_have_bounds: yes +!---------------------------------- +! + +!============ +axis_entry: plevs +!============ +!---------------------------------- +! Axis attributes: +!---------------------------------- +standard_name: air_pressure +units: Pa +axis: Z ! X, Y, Z, T (default: undeclared) +positive: down +long_name: pressure +!---------------------------------- +! Additional axis information: +!---------------------------------- +out_name: plev +valid_min: 0.0 +valid_max: 110000.0 +stored_direction: decreasing +type: double +must_have_bounds: no +!---------------------------------- +! + + +!============ +axis_entry: time +!============ +!---------------------------------- +! Axis attributes: +!---------------------------------- +standard_name: time +units: days since ? +axis: T ! X, Y, Z, T (default: undeclared) +long_name: time +!---------------------------------- +! Additional axis information: +!---------------------------------- +out_name: time +stored_direction: increasing +type: double +must_have_bounds: yes +!---------------------------------- +! + +!============ +axis_entry: tau +!============ +!---------------------------------- +! Axis attributes: +!---------------------------------- +standard_name: atmosphere_optical_thickness_due_to_cloud +units: 1 +long_name: cloud optical thickness +!---------------------------------- +! Additional axis information: +!---------------------------------- +out_name: tau +stored_direction: increasing +type: double +must_have_bounds: yes +!---------------------------------- +! \ No newline at end of file diff --git a/esmvaltool/cmor/tables/custom/CMOR_dos.dat b/esmvaltool/cmor/tables/custom/CMOR_dos.dat new file mode 100644 index 0000000000..ef24f2e740 --- /dev/null +++ b/esmvaltool/cmor/tables/custom/CMOR_dos.dat @@ -0,0 +1,27 @@ +SOURCE: CMIP5 (adapted from mrso) +!============ +variable_entry: dos +!============ +modeling_realm: land +!---------------------------------- +! Variable attributes: +!---------------------------------- +standard_name: +units: m3 m-3 +cell_methods: time: mean area: mean where land +cell_measures: area: areacella +long_name: Degree of Soil Saturation +comment: (unitless) degree of soil saturation for comparing mass based models with volumetric observations. +!---------------------------------- +! Additional variable information: +!---------------------------------- +dimensions: longitude latitude time +out_name: dos +type: real +valid_min: 0 +valid_max: 2 +ok_min_mean_abs: 0 +ok_max_mean_abs: 1 +!---------------------------------- +! + diff --git a/esmvaltool/cmor/tables/custom/CMOR_dosStderr.dat b/esmvaltool/cmor/tables/custom/CMOR_dosStderr.dat new file mode 100644 index 0000000000..41cbe68923 --- /dev/null +++ b/esmvaltool/cmor/tables/custom/CMOR_dosStderr.dat @@ -0,0 +1,24 @@ +SOURCE: CMIP5 (adapted from mrso) +!============ +variable_entry: dosStderr +!============ +modeling_realm: land +!---------------------------------- +! Variable attributes: +!---------------------------------- +standard_name: +units: m3 m-3 +cell_methods: time: mean area: mean where land +cell_measures: area: areacella +long_name: Degree of Soil Saturation Error +!---------------------------------- +! Additional variable information: +!---------------------------------- +dimensions: longitude latitude time +out_name: dosStderr +type: real +valid_min: 0.0 +valid_max: 1.0 +!---------------------------------- +! + diff --git a/esmvaltool/cmor/tables/custom/CMOR_et.dat b/esmvaltool/cmor/tables/custom/CMOR_et.dat new file mode 100644 index 0000000000..a962ae3f29 --- /dev/null +++ b/esmvaltool/cmor/tables/custom/CMOR_et.dat @@ -0,0 +1,21 @@ +SOURCE: CMIP5 +!============ +variable_entry: et +!============ +modeling_realm: atmos +!---------------------------------- +! Variable attributes: +!---------------------------------- +standard_name: +units: mm day-1 +cell_methods: time: mean +cell_measures: area: areacella +long_name: Evapotranspiration +!---------------------------------- +! Additional variable information: +!---------------------------------- +dimensions: longitude latitude time +type: real +positive: down +!---------------------------------- +! diff --git a/esmvaltool/cmor/tables/custom/CMOR_etStderr.dat b/esmvaltool/cmor/tables/custom/CMOR_etStderr.dat new file mode 100644 index 0000000000..685da2a8de --- /dev/null +++ b/esmvaltool/cmor/tables/custom/CMOR_etStderr.dat @@ -0,0 +1,24 @@ +SOURCE: CMIP5 +!============ +variable_entry: etStderr +!============ +modeling_realm: land +!---------------------------------- +! Variable attributes: +!---------------------------------- +standard_name: +units: mm day-1 +cell_methods: time: mean +cell_measures: area: areacella +long_name: Evapotranspiration Error +comment: Standard deviation +!---------------------------------- +! Additional variable information: +!---------------------------------- +dimensions: longitude latitude time +out_name: etStderr +type: real +valid_min: 0 +!---------------------------------- +! + diff --git a/esmvaltool/cmor/tables/custom/CMOR_fgco2_grid.dat b/esmvaltool/cmor/tables/custom/CMOR_fgco2_grid.dat new file mode 100644 index 0000000000..0ec33ca258 --- /dev/null +++ b/esmvaltool/cmor/tables/custom/CMOR_fgco2_grid.dat @@ -0,0 +1,23 @@ +SOURCE: CMIP5 +!============ +variable_entry: fgco2_grid +!============ +modeling_realm: ocnBgchem +!---------------------------------- +! Variable attributes: +!---------------------------------- +standard_name: +units: kg m-2 s-1 +cell_methods: time: mean area: mean +cell_measures: area: areacello +long_name: Surface Downward CO2 Flux relative to grid cell area +comment: Gas exchange flux of CO2 (positive into ocean) +!---------------------------------- +! Additional variable information: +!---------------------------------- +dimensions: longitude latitude time +out_name: fgco2_grid +type: real +positive: down +!---------------------------------- +! diff --git a/esmvaltool/cmor/tables/custom/CMOR_gppStderr.dat b/esmvaltool/cmor/tables/custom/CMOR_gppStderr.dat new file mode 100644 index 0000000000..b33d2c19c0 --- /dev/null +++ b/esmvaltool/cmor/tables/custom/CMOR_gppStderr.dat @@ -0,0 +1,23 @@ +SOURCE: CMIP5 +!============ +variable_entry: gppStderr +!============ +modeling_realm: land +!---------------------------------- +! Variable attributes: +!---------------------------------- +standard_name: +units: kg m-2 s-1 +cell_methods: time: mean +cell_measures: area: areacella +long_name: Carbon Mass Flux out of Atmosphere due to Gross Primary Production on Land Error +comment: Standard deviation calculated based on median absolute deviation +!---------------------------------- +! Additional variable information: +!---------------------------------- +dimensions: longitude latitude time +out_name: gppStderr +type: real +valid_min: 0 +!---------------------------------- +! diff --git a/esmvaltool/cmor/tables/custom/CMOR_gtfgco2.dat b/esmvaltool/cmor/tables/custom/CMOR_gtfgco2.dat new file mode 100644 index 0000000000..dba2aa00c6 --- /dev/null +++ b/esmvaltool/cmor/tables/custom/CMOR_gtfgco2.dat @@ -0,0 +1,21 @@ +SOURCE: CMIP5 +!============ +variable_entry: gtfgco2 +!============ +modeling_realm: ocnBgchem +!---------------------------------- +! Variable attributes: +!---------------------------------- +standard_name: +units: kg s-1 +cell_methods: time: mean area: where sea +cell_measures: area: areacello +long_name: Global Total Surface Downward CO2 Flux +comment: Gas exchange flux of CO2 (positive into ocean) +!---------------------------------- +! Additional variable information: +!---------------------------------- +dimensions: time +type: real +!---------------------------------- +! diff --git a/esmvaltool/cmor/tables/custom/CMOR_husStderr.dat b/esmvaltool/cmor/tables/custom/CMOR_husStderr.dat new file mode 100644 index 0000000000..b053d1d4fd --- /dev/null +++ b/esmvaltool/cmor/tables/custom/CMOR_husStderr.dat @@ -0,0 +1,25 @@ +SOURCE: obs4mips +!============ +variable_entry: husStderr +!============ +modeling_realm: atmos +!---------------------------------- +! Variable attributes: +!---------------------------------- +standard_name: +units: 1 +cell_methods: time: mean +cell_measures: area: areacella +long_name: Specific Humidity Error +!---------------------------------- +! Additional variable information: +!---------------------------------- +dimensions: longitude latitude plevs time +out_name: husStderr +type: real +valid_min: -0.000299 +valid_max: 0.02841 +ok_min_mean_abs: -0.0003539 +ok_max_mean_abs: 0.01041 +!---------------------------------- +! diff --git a/esmvaltool/cmor/tables/custom/CMOR_iwpStderr.dat b/esmvaltool/cmor/tables/custom/CMOR_iwpStderr.dat new file mode 100644 index 0000000000..088b372487 --- /dev/null +++ b/esmvaltool/cmor/tables/custom/CMOR_iwpStderr.dat @@ -0,0 +1,25 @@ +SOURCE: CMIP5 +!============ +variable_entry: iwpStderr +!============ +modeling_realm: atmos +!---------------------------------- +! Variable attributes: +!---------------------------------- +standard_name: +units: kg m-2 +cell_methods: time: mean +cell_measures: area: areacella +long_name: Condensed Ice Path Error +!---------------------------------- +! Additional variable information: +!---------------------------------- +dimensions: longitude latitude time +out_name: iwpStderr +type: real +valid_min: 0.0 +valid_max: 5.0 +ok_min_mean_abs: 0.0 +ok_max_mean_abs: 1.0 +!---------------------------------- +! diff --git a/esmvaltool/cmor/tables/custom/CMOR_lwcre.dat b/esmvaltool/cmor/tables/custom/CMOR_lwcre.dat index f274bf2771..1088e87bb0 100644 --- a/esmvaltool/cmor/tables/custom/CMOR_lwcre.dat +++ b/esmvaltool/cmor/tables/custom/CMOR_lwcre.dat @@ -6,7 +6,7 @@ modeling_realm: atmos !---------------------------------- ! Variable attributes: !---------------------------------- -standard_name: toa_longwave_cloud_radiative_effect +standard_name: units: W m-2 cell_methods: time: mean cell_measures: area: areacella diff --git a/esmvaltool/cmor/tables/custom/CMOR_lwp.dat b/esmvaltool/cmor/tables/custom/CMOR_lwp.dat index 491c76acc8..2e4dc38d24 100644 --- a/esmvaltool/cmor/tables/custom/CMOR_lwp.dat +++ b/esmvaltool/cmor/tables/custom/CMOR_lwp.dat @@ -6,16 +6,15 @@ modeling_realm: atmos !---------------------------------- ! Variable attributes: !---------------------------------- -standard_name: atmosphere_cloud_liquid_water_content +standard_name: units: kg m-2 cell_methods: time: mean cell_measures: area: areacella -long_name: liquid water path +long_name: Liquid Water Path !---------------------------------- ! Additional variable information: !---------------------------------- dimensions: longitude latitude time -out_name: lwp type: real valid_min: 0.0 valid_max: 5.0 diff --git a/esmvaltool/cmor/tables/custom/CMOR_lwpStderr.dat b/esmvaltool/cmor/tables/custom/CMOR_lwpStderr.dat new file mode 100644 index 0000000000..fd2bf63555 --- /dev/null +++ b/esmvaltool/cmor/tables/custom/CMOR_lwpStderr.dat @@ -0,0 +1,25 @@ +SOURCE: CMIP5 +!============ +variable_entry: lwpStderr +!============ +modeling_realm: atmos +!---------------------------------- +! Variable attributes: +!---------------------------------- +standard_name: +units: kg m-2 +cell_methods: time: mean +cell_measures: area: areacella +long_name: Liquid Water Path Error +!---------------------------------- +! Additional variable information: +!---------------------------------- +dimensions: longitude latitude time +out_name: lwpStderr +type: real +valid_min: 0.0 +valid_max: 5.0 +ok_min_mean_abs: 0.0 +ok_max_mean_abs: 1.0 +!---------------------------------- +! diff --git a/esmvaltool/cmor/tables/custom/CMOR_nbp_grid.dat b/esmvaltool/cmor/tables/custom/CMOR_nbp_grid.dat new file mode 100644 index 0000000000..9c70aebc26 --- /dev/null +++ b/esmvaltool/cmor/tables/custom/CMOR_nbp_grid.dat @@ -0,0 +1,23 @@ +SOURCE: CMIP5 +!============ +variable_entry: nbp_grid +!============ +modeling_realm: land +!---------------------------------- +! Variable attributes: +!---------------------------------- +standard_name: +units: kg m-2 s-1 +cell_methods: time: mean area: mean +cell_measures: area: areacella +long_name: Carbon Mass Flux out of Atmosphere due to Net Biospheric Production on Land relative to grid cell area +comment: This is the net mass flux of carbon between land and atmosphere calculated as photosynthesis MINUS the sum of plant and soil respiration, carbonfluxes from fire, harvest, grazing and land use change. Positive flux is into the land. +!---------------------------------- +! Additional variable information: +!---------------------------------- +dimensions: longitude latitude time +out_name: nbp_grid +type: real +positive: down +!---------------------------------- +! diff --git a/esmvaltool/cmor/tables/custom/CMOR_netcre.dat b/esmvaltool/cmor/tables/custom/CMOR_netcre.dat new file mode 100644 index 0000000000..6525abd2b6 --- /dev/null +++ b/esmvaltool/cmor/tables/custom/CMOR_netcre.dat @@ -0,0 +1,22 @@ +SOURCE: CCMI1 +!============ +variable_entry: netcre +!============ +modeling_realm: atmos +!---------------------------------- +! Variable attributes: +!---------------------------------- +standard_name: +units: W m-2 +cell_methods: time: mean +cell_measures: area: areacella +long_name: TOA Net Cloud Radiative Effect +comment: at the top of the atmosphere (to be compared with satellite measurements) +!---------------------------------- +! Additional variable information: +!---------------------------------- +dimensions: longitude latitude time +type: real +positive: up +!---------------------------------- +! diff --git a/esmvaltool/cmor/tables/custom/CMOR_od550aerStderr.dat b/esmvaltool/cmor/tables/custom/CMOR_od550aerStderr.dat new file mode 100644 index 0000000000..5a43465e9a --- /dev/null +++ b/esmvaltool/cmor/tables/custom/CMOR_od550aerStderr.dat @@ -0,0 +1,22 @@ +SOURCE: CMIP5 +!============ +variable_entry: od550aerStderr +!============ +modeling_realm: aerosol +!---------------------------------- +! Variable attributes: +!---------------------------------- +standard_name: +units: 1 +cell_methods: time: mean +cell_measures: area: areacella +long_name: Ambient Aerosol Optical Thickness at 550 nm Error +comment: AOD error from the ambient aerosls (i.e., includes aerosol water). Does not include AOD from stratospheric aerosols if these are prescribed but includes other possible background aerosol types. +!---------------------------------- +! Additional variable information: +!---------------------------------- +dimensions: longitude latitude time +out_name: od550aerStderr +type: real +!---------------------------------- +! diff --git a/esmvaltool/cmor/tables/custom/CMOR_od870aerStderr.dat b/esmvaltool/cmor/tables/custom/CMOR_od870aerStderr.dat new file mode 100644 index 0000000000..fb8c316ca4 --- /dev/null +++ b/esmvaltool/cmor/tables/custom/CMOR_od870aerStderr.dat @@ -0,0 +1,22 @@ +SOURCE: CMIP5 +!============ +variable_entry: od870aerStderr +!============ +modeling_realm: aerosol +!---------------------------------- +! Variable attributes: +!---------------------------------- +standard_name: +units: 1 +cell_methods: time: mean +cell_measures: area: areacella +long_name: Ambient Aerosol Optical Thickness at 870 nm Error +comment: AOD error from the ambient aerosls (i.e., includes aerosol water). Does not include AOD from stratospheric aerosols if these are prescribed but includes other possible background aerosol types. +!---------------------------------- +! Additional variable information: +!---------------------------------- +dimensions: longitude latitude time +out_name: od870aerStderr +type: real +!---------------------------------- +! diff --git a/esmvaltool/cmor/tables/custom/CMOR_prStderr.dat b/esmvaltool/cmor/tables/custom/CMOR_prStderr.dat new file mode 100644 index 0000000000..4e8ecd30e7 --- /dev/null +++ b/esmvaltool/cmor/tables/custom/CMOR_prStderr.dat @@ -0,0 +1,26 @@ +SOURCE: CMIP5 +!============ +variable_entry: prStderr +!============ +modeling_realm: atmos +!---------------------------------- +! Variable attributes: +!---------------------------------- +standard_name: +units: kg m-2 s-1 +cell_methods: time: mean +cell_measures: area: areacella +long_name: Precipitation Standard Error +comment: at surface; includes both liquid and solid phases from all types of clouds (both large-scale and convective) +!---------------------------------- +! Additional variable information: +!---------------------------------- +dimensions: longitude latitude time +out_name: prStderr +type: real +valid_min: 0 +valid_max: 0.001 +ok_min_mean_abs: 0 +ok_max_mean_abs: 0.001 +!---------------------------------- +! diff --git a/esmvaltool/cmor/tables/custom/CMOR_rlns.dat b/esmvaltool/cmor/tables/custom/CMOR_rlns.dat index 2b27a03fbe..05841ab7e2 100644 --- a/esmvaltool/cmor/tables/custom/CMOR_rlns.dat +++ b/esmvaltool/cmor/tables/custom/CMOR_rlns.dat @@ -6,7 +6,7 @@ modeling_realm: atmos !---------------------------------- ! Variable attributes: !---------------------------------- -standard_name: surface_net_downward_longwave_radiation +standard_name: units: W m-2 cell_methods: time: mean cell_measures: area: areacella diff --git a/esmvaltool/cmor/tables/custom/CMOR_rluscs.dat b/esmvaltool/cmor/tables/custom/CMOR_rluscs.dat new file mode 100644 index 0000000000..df4d2601ed --- /dev/null +++ b/esmvaltool/cmor/tables/custom/CMOR_rluscs.dat @@ -0,0 +1,26 @@ +SOURCE: CMIP5 +!============ +variable_entry: rluscs +!============ +modeling_realm: atmos +!---------------------------------- +! Variable attributes: +!---------------------------------- +standard_name: +units: W m-2 +cell_methods: time: mean +cell_measures: area: areacella +long_name: Surface Upwelling Clear-Sky Longwave Radiation +!---------------------------------- +! Additional variable information: +!---------------------------------- +dimensions: longitude latitude time +out_name: rluscs +type: real +positive: up +valid_min: 43.75 +valid_max: 658 +ok_min_mean_abs: 325.6 +ok_max_mean_abs: 376.3 +!---------------------------------- +! diff --git a/esmvaltool/cmor/tables/custom/CMOR_rlut.dat b/esmvaltool/cmor/tables/custom/CMOR_rlut.dat new file mode 100644 index 0000000000..66cae228e6 --- /dev/null +++ b/esmvaltool/cmor/tables/custom/CMOR_rlut.dat @@ -0,0 +1,27 @@ +SOURCE: CMIP5 +!============ +variable_entry: rlut +!============ +modeling_realm: atmos +!---------------------------------- +! Variable attributes: +!---------------------------------- +standard_name: toa_outgoing_longwave_flux +units: W m-2 +cell_methods: time: mean +cell_measures: area: areacella +long_name: TOA Outgoing Longwave Radiation +comment: at the top of the atmosphere (to be compared with satellite measurements) +!---------------------------------- +! Additional variable information: +!---------------------------------- +dimensions: longitude latitude time +out_name: rlut +type: real +positive: up +valid_min: 67.48 +valid_max: 383.2 +ok_min_mean_abs: 207.4 +ok_max_mean_abs: 234.4 +!---------------------------------- +! diff --git a/esmvaltool/cmor/tables/custom/CMOR_rlutcs.dat b/esmvaltool/cmor/tables/custom/CMOR_rlutcs.dat new file mode 100644 index 0000000000..204e4bb4b0 --- /dev/null +++ b/esmvaltool/cmor/tables/custom/CMOR_rlutcs.dat @@ -0,0 +1,26 @@ +SOURCE: CMIP5 +!============ +variable_entry: rlutcs +!============ +modeling_realm: atmos +!---------------------------------- +! Variable attributes: +!---------------------------------- +standard_name: toa_outgoing_longwave_flux_assuming_clear_sky +units: W m-2 +cell_methods: time: mean +cell_measures: area: areacella +long_name: TOA Outgoing Clear-Sky Longwave Radiation +!---------------------------------- +! Additional variable information: +!---------------------------------- +dimensions: longitude latitude time +out_name: rlutcs +type: real +positive: up +valid_min: 70.59 +valid_max: 377.5 +ok_min_mean_abs: 228.9 +ok_max_mean_abs: 260.4 +!---------------------------------- +! diff --git a/esmvaltool/cmor/tables/custom/CMOR_rsns.dat b/esmvaltool/cmor/tables/custom/CMOR_rsns.dat index 357bcf0d47..966c3bb6cf 100644 --- a/esmvaltool/cmor/tables/custom/CMOR_rsns.dat +++ b/esmvaltool/cmor/tables/custom/CMOR_rsns.dat @@ -6,7 +6,7 @@ modeling_realm: atmos !---------------------------------- ! Variable attributes: !---------------------------------- -standard_name: surface_net_downward_shortwave_radiation +standard_name: units: W m-2 cell_methods: time: mean cell_measures: area: areacella diff --git a/esmvaltool/cmor/tables/custom/CMOR_rsnt.dat b/esmvaltool/cmor/tables/custom/CMOR_rsnt.dat index 2f935e6ce8..32a7da45de 100644 --- a/esmvaltool/cmor/tables/custom/CMOR_rsnt.dat +++ b/esmvaltool/cmor/tables/custom/CMOR_rsnt.dat @@ -6,7 +6,7 @@ modeling_realm: atmos !---------------------------------- ! Variable attributes: !---------------------------------- -standard_name: toa_net_downward_shortwave_radiation +standard_name: units: W m-2 cell_methods: time: mean cell_measures: area: areacella diff --git a/esmvaltool/cmor/tables/custom/CMOR_rsut.dat b/esmvaltool/cmor/tables/custom/CMOR_rsut.dat new file mode 100644 index 0000000000..a2fd561b34 --- /dev/null +++ b/esmvaltool/cmor/tables/custom/CMOR_rsut.dat @@ -0,0 +1,27 @@ +SOURCE: CMIP5 +!============ +variable_entry: rsut +!============ +modeling_realm: atmos +!---------------------------------- +! Variable attributes: +!---------------------------------- +standard_name: toa_outgoing_shortwave_flux +units: W m-2 +cell_methods: time: mean +cell_measures: area: areacella +long_name: TOA Outgoing Shortwave Radiation +comment: at the top of the atmosphere +!---------------------------------- +! Additional variable information: +!---------------------------------- +dimensions: longitude latitude time +out_name: rsut +type: real +positive: up +valid_min: -0.02689 +valid_max: 421.9 +ok_min_mean_abs: 96.72 +ok_max_mean_abs: 114.1 +!---------------------------------- +! diff --git a/esmvaltool/cmor/tables/custom/CMOR_rsutcs.dat b/esmvaltool/cmor/tables/custom/CMOR_rsutcs.dat new file mode 100644 index 0000000000..3039856aec --- /dev/null +++ b/esmvaltool/cmor/tables/custom/CMOR_rsutcs.dat @@ -0,0 +1,26 @@ +SOURCE: CMIP5 +!============ +variable_entry: rsutcs +!============ +modeling_realm: atmos +!---------------------------------- +! Variable attributes: +!---------------------------------- +standard_name: toa_outgoing_shortwave_flux_assuming_clear_sky +units: W m-2 +cell_methods: time: mean +cell_measures: area: areacella +long_name: TOA Outgoing Clear-Sky Shortwave Radiation +!---------------------------------- +! Additional variable information: +!---------------------------------- +dimensions: longitude latitude time +out_name: rsutcs +type: real +positive: up +valid_min: 0 +valid_max: 444 +ok_min_mean_abs: 54.7 +ok_max_mean_abs: 73.36 +!---------------------------------- +! diff --git a/esmvaltool/cmor/tables/custom/CMOR_rtnt.dat b/esmvaltool/cmor/tables/custom/CMOR_rtnt.dat index 59a6474013..b3697aa344 100644 --- a/esmvaltool/cmor/tables/custom/CMOR_rtnt.dat +++ b/esmvaltool/cmor/tables/custom/CMOR_rtnt.dat @@ -6,7 +6,7 @@ modeling_realm: atmos !---------------------------------- ! Variable attributes: !---------------------------------- -standard_name: toa_net_downward_total_radiation +standard_name: units: W m-2 cell_methods: time: mean cell_measures: area: areacella diff --git a/esmvaltool/cmor/tables/custom/CMOR_sm.dat b/esmvaltool/cmor/tables/custom/CMOR_sm.dat new file mode 100644 index 0000000000..31858bb292 --- /dev/null +++ b/esmvaltool/cmor/tables/custom/CMOR_sm.dat @@ -0,0 +1,26 @@ +SOURCE: CMIP5 (adapted from mrsos) +!============ +variable_entry: sm +!============ +modeling_realm: land +!---------------------------------- +! Variable attributes: +!---------------------------------- +standard_name: +units: m3 m-3 +cell_methods: time: mean area: mean where land +cell_measures: area: areacella +long_name: Volumetric Moisture in Upper Portion of Soil Column +comment: the volume of water in all phases in a thin surface soil layer. +!---------------------------------- +! Additional variable information: +!---------------------------------- +dimensions: longitude latitude time +out_name: sm +type: real +valid_min: 0 +valid_max: 1 +ok_min_mean_abs: 0 +ok_max_mean_abs: 1 +!---------------------------------- +! diff --git a/esmvaltool/cmor/tables/custom/CMOR_smStderr.dat b/esmvaltool/cmor/tables/custom/CMOR_smStderr.dat new file mode 100644 index 0000000000..7da194878c --- /dev/null +++ b/esmvaltool/cmor/tables/custom/CMOR_smStderr.dat @@ -0,0 +1,25 @@ +SOURCE: CMIP5 (adapted from mrsos) +!============ +variable_entry: smStderr +!============ +modeling_realm: land +!---------------------------------- +! Variable attributes: +!---------------------------------- +standard_name: +units: m3 m-3 +cell_methods: time: mean area: mean where land +cell_measures: area: areacella +long_name: Volumetric Moisture in Upper Portion of Soil Column Error +comment: Error of the volume of water in all phases in a thin surface soil layer. +!---------------------------------- +! Additional variable information: +!---------------------------------- +dimensions: longitude latitude time +out_name: smStderr +type: real +valid_min: 0.0 +valid_max: 1.0 +!---------------------------------- +! + diff --git a/esmvaltool/cmor/tables/custom/CMOR_swcre.dat b/esmvaltool/cmor/tables/custom/CMOR_swcre.dat index 65be74787e..096d6ca77e 100644 --- a/esmvaltool/cmor/tables/custom/CMOR_swcre.dat +++ b/esmvaltool/cmor/tables/custom/CMOR_swcre.dat @@ -6,7 +6,7 @@ modeling_realm: atmos !---------------------------------- ! Variable attributes: !---------------------------------- -standard_name: toa_shortwave_cloud_radiative_effect +standard_name: units: W m-2 cell_methods: time: mean cell_measures: area: areacella diff --git a/esmvaltool/cmor/tables/custom/CMOR_tasa.dat b/esmvaltool/cmor/tables/custom/CMOR_tasa.dat new file mode 100644 index 0000000000..1ad03eb64e --- /dev/null +++ b/esmvaltool/cmor/tables/custom/CMOR_tasa.dat @@ -0,0 +1,25 @@ +SOURCE: CMIP5 (adapted from tas) +!============ +variable_entry: tasa +!============ +modeling_realm: atmos +!---------------------------------- +! Variable attributes: +!---------------------------------- +standard_name: +units: K +cell_methods: time: mean +cell_measures: area: areacella +long_name: Near-Surface Air Temperature Anomaly +!---------------------------------- +! Additional variable information: +!---------------------------------- +dimensions: longitude latitude time +out_name: tasa +type: real +valid_min: -20.0 +valid_max: 20.0 +ok_min_mean_abs: -20 +ok_max_mean_abs: 20. +!---------------------------------- +! diff --git a/esmvaltool/cmor/tables/custom/CMOR_toz.dat b/esmvaltool/cmor/tables/custom/CMOR_toz.dat index 75ca0178a0..6d319a171b 100644 --- a/esmvaltool/cmor/tables/custom/CMOR_toz.dat +++ b/esmvaltool/cmor/tables/custom/CMOR_toz.dat @@ -1,12 +1,12 @@ SOURCE: CCMI1 !============ -variable_entry: toz +variable_entry: toz !============ modeling_realm: atmos !---------------------------------- ! Variable attributes: !---------------------------------- -standard_name: equivalent_thickness_at_stp_of_atmosphere_ozone_content +standard_name: units: DU cell_methods: time: mean cell_measures: area: areacella diff --git a/esmvaltool/cmor/tables/custom/CMOR_tozStderr.dat b/esmvaltool/cmor/tables/custom/CMOR_tozStderr.dat new file mode 100644 index 0000000000..240247c84c --- /dev/null +++ b/esmvaltool/cmor/tables/custom/CMOR_tozStderr.dat @@ -0,0 +1,23 @@ +SOURCE: CCMI1 +!============ +variable_entry: tozStderr +!============ +modeling_realm: atmos +!---------------------------------- +! Variable attributes: +!---------------------------------- +standard_name: +units: DU +cell_methods: time: mean +cell_measures: area: areacella +long_name: Total Ozone Column Error +comment: total ozone column in DU +!---------------------------------- +! Additional variable information: +!---------------------------------- +dimensions: longitude latitude time +type: real +valid_min: 0.0 +valid_max: 5000.0 +!---------------------------------- +! diff --git a/esmvaltool/cmor/tables/custom/CMOR_tro3prof.dat b/esmvaltool/cmor/tables/custom/CMOR_tro3prof.dat new file mode 100644 index 0000000000..efe6e651c0 --- /dev/null +++ b/esmvaltool/cmor/tables/custom/CMOR_tro3prof.dat @@ -0,0 +1,23 @@ +SOURCE: CMIP5 +!============ +variable_entry: tro3prof +!============ +modeling_realm: atmos +!---------------------------------- +! Variable attributes: +!---------------------------------- +standard_name: +units: 1e-9 +cell_methods: time: mean +cell_measures: area: areacella +long_name: Ozone Volume Mixing Ratio +comment: +!---------------------------------- +! Additional variable information: +!---------------------------------- +dimensions: latitude plevs time +type: real +valid_min: 0.0 +valid_max: 1.0 +!---------------------------------- +! diff --git a/esmvaltool/cmor/tables/custom/CMOR_tro3profStderr.dat b/esmvaltool/cmor/tables/custom/CMOR_tro3profStderr.dat new file mode 100644 index 0000000000..d5fc63027d --- /dev/null +++ b/esmvaltool/cmor/tables/custom/CMOR_tro3profStderr.dat @@ -0,0 +1,23 @@ +SOURCE: CMIP5 +!============ +variable_entry: tro3profStderr +!============ +modeling_realm: atmos +!---------------------------------- +! Variable attributes: +!---------------------------------- +standard_name: +units: 1e-9 +cell_methods: time: mean +cell_measures: area: areacella +long_name: Ozone Volume Mixing Ratio Error +comment: +!---------------------------------- +! Additional variable information: +!---------------------------------- +dimensions: latitude plevs time +type: real +valid_min: 0.0 +valid_max: 1.0 +!---------------------------------- +! diff --git a/esmvaltool/cmor/tables/custom/CMOR_tsStderr.dat b/esmvaltool/cmor/tables/custom/CMOR_tsStderr.dat new file mode 100644 index 0000000000..4bd6530a17 --- /dev/null +++ b/esmvaltool/cmor/tables/custom/CMOR_tsStderr.dat @@ -0,0 +1,26 @@ +SOURCE: CMIP5 +!============ +variable_entry: tsStderr +!============ +modeling_realm: atmos +!---------------------------------- +! Variable attributes: +!---------------------------------- +standard_name: +units: K +cell_methods: time: mean +cell_measures: area: areacella +long_name: Surface Temperature Error +comment: ""skin"" temperature error (i.e., SST for open ocean) +!---------------------------------- +! Additional variable information: +!---------------------------------- +dimensions: longitude latitude time +out_name: tsStderr +type: real +valid_min: 0 +valid_max: 10 +ok_min_mean_abs: 0 +ok_max_mean_abs: 10 +!---------------------------------- +! diff --git a/esmvaltool/cmor/tables/custom/CMOR_xch4.dat b/esmvaltool/cmor/tables/custom/CMOR_xch4.dat new file mode 100644 index 0000000000..779a6da6df --- /dev/null +++ b/esmvaltool/cmor/tables/custom/CMOR_xch4.dat @@ -0,0 +1,22 @@ +SOURCE: CMIP5 +!============ +variable_entry: xch4 +!============ +modeling_realm: atmos +!---------------------------------- +! Variable attributes: +!---------------------------------- +standard_name: +units: 1 +cell_methods: time: mean +cell_measures: area: areacella +long_name: Column-average Dry-air Mole Fraction of Atmospheric Methane +comment: Satellite retrieved column-average dry-air mole fraction of atmospheric methane (XCH4) +!---------------------------------- +! Additional variable information: +!---------------------------------- +dimensions: longitude latitude time +out_name: xch4 +type: real +!---------------------------------- +! \ No newline at end of file diff --git a/esmvaltool/cmor/tables/custom/CMOR_xco2.dat b/esmvaltool/cmor/tables/custom/CMOR_xco2.dat new file mode 100644 index 0000000000..dcdde3bbf7 --- /dev/null +++ b/esmvaltool/cmor/tables/custom/CMOR_xco2.dat @@ -0,0 +1,22 @@ +SOURCE: CMIP5 +!============ +variable_entry: xco2 +!============ +modeling_realm: atmos +!---------------------------------- +! Variable attributes: +!---------------------------------- +standard_name: +units: 1e-6 +cell_methods: time: mean +cell_measures: area: areacella +long_name: Column-average Dry-air Mole Fraction of Atmospheric Carbon Dioxide +comment: Satellite retrieved column-average dry-air mole fraction of atmospheric carbon dioxide (XCO2) +!---------------------------------- +! Additional variable information: +!---------------------------------- +dimensions: longitude latitude time +out_name: xco2 +type: real +!---------------------------------- +! \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_Aday.json b/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_Aday.json new file mode 100644 index 0000000000..0f945e4327 --- /dev/null +++ b/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_Aday.json @@ -0,0 +1,631 @@ +{ + "Header":{ + "#dataRequest_specs_version":"01.00.21", + "#mip_era":"CMIP6", + "Conventions":"CF-1.7 ODS-2.1", + "approx_interval":"1.00000", + "cmor_version":"3.2", + "data_specs_version":"2.1.0", + "generic_levels":"", + "int_missing_value":"-2147483648", + "missing_value":"1e20", + "product":"observations", + "realm":"atmos", + "table_date":"07 March 2018", + "table_id":"Table obs4MIPs_Aday" + }, + "variable_entry":{ + "clt":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Total cloud area fraction for the whole atmospheric column, as seen from the surface or the top of the atmosphere. Includes both large-scale and convective cloud.", + "dimensions":"longitude latitude time", + "frequency":"day", + "long_name":"Total Cloud Fraction", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"clt", + "positive":"", + "standard_name":"cloud_area_fraction", + "type":"real", + "units":"%", + "valid_max":"", + "valid_min":"" + }, + "hfls":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"day", + "long_name":"Surface Upward Latent Heat Flux", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"hfls", + "positive":"up", + "standard_name":"surface_upward_latent_heat_flux", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "hfss":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"day", + "long_name":"Surface Upward Sensible Heat Flux", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"hfss", + "positive":"up", + "standard_name":"surface_upward_sensible_heat_flux", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "hur":{ + "cell_measures":"", + "cell_methods":"time: mean", + "comment":"The relative humidity with respect to liquid water for T> 0 C, and with respect to ice for T<0 C.", + "dimensions":"longitude latitude plev8 time", + "frequency":"day", + "long_name":"Relative Humidity", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"hur", + "positive":"", + "standard_name":"relative_humidity", + "type":"real", + "units":"%", + "valid_max":"", + "valid_min":"" + }, + "hurs":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"The relative humidity with respect to liquid water for T> 0 C, and with respect to ice for T<0 C.", + "dimensions":"longitude latitude time height2m", + "frequency":"day", + "long_name":"Near-Surface Relative Humidity", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"hurs", + "positive":"", + "standard_name":"relative_humidity", + "type":"real", + "units":"%", + "valid_max":"", + "valid_min":"" + }, + "hursmax":{ + "cell_measures":"", + "cell_methods":"area: mean time: maximum", + "comment":"", + "dimensions":"longitude latitude time height2m", + "frequency":"day", + "long_name":"Surface Daily Maximum Relative Humidity", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"hursmax", + "positive":"", + "standard_name":"relative_humidity", + "type":"real", + "units":"%", + "valid_max":"", + "valid_min":"" + }, + "hursmin":{ + "cell_measures":"", + "cell_methods":"area: mean time: minimum", + "comment":"", + "dimensions":"longitude latitude time height2m", + "frequency":"day", + "long_name":"Surface Daily Minimum Relative Humidity", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"hursmin", + "positive":"", + "standard_name":"relative_humidity", + "type":"real", + "units":"%", + "valid_max":"", + "valid_min":"" + }, + "hus":{ + "cell_measures":"", + "cell_methods":"time: mean", + "comment":"", + "dimensions":"longitude latitude plev8 time", + "frequency":"day", + "long_name":"Specific Humidity", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"hus", + "positive":"", + "standard_name":"specific_humidity", + "type":"real", + "units":"1", + "valid_max":"", + "valid_min":"" + }, + "huss":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Near-surface (usually, 2 meter) specific humidity.", + "dimensions":"longitude latitude time height2m", + "frequency":"day", + "long_name":"Near-Surface Specific Humidity", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"huss", + "positive":"", + "standard_name":"specific_humidity", + "type":"real", + "units":"1", + "valid_max":"", + "valid_min":"" + }, + "mrro":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"The total run-off (including drainage through the base of the soil model) per unit area leaving the land portion of the grid cell.", + "dimensions":"longitude latitude time", + "frequency":"day", + "long_name":"Total Runoff", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"mrro", + "positive":"", + "standard_name":"runoff_flux", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "mrso":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"the mass per unit area (summed over all soil layers) of water in all phases.", + "dimensions":"longitude latitude time", + "frequency":"day", + "long_name":"Total Soil Moisture Content", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"mrso", + "positive":"", + "standard_name":"soil_moisture_content", + "type":"real", + "units":"kg m-2", + "valid_max":"", + "valid_min":"" + }, + "mrsos":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"The mass of water in all phases in the upper 10cm of the soil layer.", + "dimensions":"longitude latitude time sdepth1", + "frequency":"day", + "long_name":"Moisture in Upper Portion of Soil Column", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"mrsos", + "positive":"", + "standard_name":"moisture_content_of_soil_layer", + "type":"real", + "units":"kg m-2", + "valid_max":"", + "valid_min":"" + }, + "pr":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"includes both liquid and solid phases", + "dimensions":"longitude latitude time", + "frequency":"day", + "long_name":"Precipitation", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"pr", + "positive":"", + "standard_name":"precipitation_flux", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "prc":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Convective precipitation at surface; includes both liquid and solid phases.", + "dimensions":"longitude latitude time", + "frequency":"day", + "long_name":"Convective Precipitation", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"prc", + "positive":"", + "standard_name":"convective_precipitation_flux", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "prsn":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"at surface; includes precipitation of all forms of water in the solid phase", + "dimensions":"longitude latitude time", + "frequency":"day", + "long_name":"Snowfall Flux", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"prsn", + "positive":"", + "standard_name":"snowfall_flux", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "psl":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Sea Level Pressure", + "dimensions":"longitude latitude time", + "frequency":"day", + "long_name":"Sea Level Pressure", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"psl", + "positive":"", + "standard_name":"air_pressure_at_sea_level", + "type":"real", + "units":"Pa", + "valid_max":"", + "valid_min":"" + }, + "rlds":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"day", + "long_name":"Surface Downwelling Longwave Radiation", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"rlds", + "positive":"down", + "standard_name":"surface_downwelling_longwave_flux_in_air", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "rlus":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"day", + "long_name":"Surface Upwelling Longwave Radiation", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"rlus", + "positive":"up", + "standard_name":"surface_upwelling_longwave_flux_in_air", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "rlut":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"at the top of the atmosphere (to be compared with satellite measurements)", + "dimensions":"longitude latitude time", + "frequency":"day", + "long_name":"TOA Outgoing Longwave Radiation", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"rlut", + "positive":"up", + "standard_name":"toa_outgoing_longwave_flux", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "rsds":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"surface solar irradiance for UV calculations", + "dimensions":"longitude latitude time", + "frequency":"day", + "long_name":"Surface Downwelling Shortwave Radiation", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"rsds", + "positive":"down", + "standard_name":"surface_downwelling_shortwave_flux_in_air", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "rsus":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"day", + "long_name":"Surface Upwelling Shortwave Radiation", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"rsus", + "positive":"up", + "standard_name":"surface_upwelling_shortwave_flux_in_air", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "sfcWind":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"near-surface (usually, 10 meters) wind speed.", + "dimensions":"longitude latitude time height10m", + "frequency":"day", + "long_name":"Daily-Mean Near-Surface Wind Speed", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sfcWind", + "positive":"", + "standard_name":"wind_speed", + "type":"real", + "units":"m s-1", + "valid_max":"", + "valid_min":"" + }, + "sfcWindmax":{ + "cell_measures":"", + "cell_methods":"area: mean time: maximum", + "comment":"Daily maximum near-surface (usually, 10 meters) wind speed.", + "dimensions":"longitude latitude time height10m", + "frequency":"day", + "long_name":"Daily Maximum Near-Surface Wind Speed", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sfcWindmax", + "positive":"", + "standard_name":"wind_speed", + "type":"real", + "units":"m s-1", + "valid_max":"", + "valid_min":"" + }, + "snc":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Fraction of each grid cell that is occupied by snow that rests on land portion of cell.", + "dimensions":"longitude latitude time", + "frequency":"day", + "long_name":"Snow Area Fraction", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"snc", + "positive":"", + "standard_name":"surface_snow_area_fraction", + "type":"real", + "units":"%", + "valid_max":"", + "valid_min":"" + }, + "snw":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"The mass of surface snow on the land portion of the grid cell divided by the land area in the grid cell; reported as missing where the land fraction is 0; excludes snow on vegetation canopy or on sea ice.", + "dimensions":"longitude latitude time", + "frequency":"day", + "long_name":"Surface Snow Amount", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"snw", + "positive":"", + "standard_name":"surface_snow_amount", + "type":"real", + "units":"kg m-2", + "valid_max":"", + "valid_min":"" + }, + "ta":{ + "cell_measures":"", + "cell_methods":"time: mean", + "comment":"Air Temperature", + "dimensions":"longitude latitude plev8 time", + "frequency":"day", + "long_name":"Air Temperature", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"ta", + "positive":"", + "standard_name":"air_temperature", + "type":"real", + "units":"K", + "valid_max":"", + "valid_min":"" + }, + "tas":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"near-surface (usually, 2 meter) air temperature", + "dimensions":"longitude latitude time height2m", + "frequency":"day", + "long_name":"Near-Surface Air Temperature", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"tas", + "positive":"", + "standard_name":"air_temperature", + "type":"real", + "units":"K", + "valid_max":"", + "valid_min":"" + }, + "tasmax":{ + "cell_measures":"", + "cell_methods":"area: mean time: maximum", + "comment":"maximum near-surface (usually, 2 meter) air temperature (add cell_method attribute 'time: max')", + "dimensions":"longitude latitude time height2m", + "frequency":"day", + "long_name":"Daily Maximum Near-Surface Air Temperature", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"tasmax", + "positive":"", + "standard_name":"air_temperature", + "type":"real", + "units":"K", + "valid_max":"", + "valid_min":"" + }, + "tasmin":{ + "cell_measures":"", + "cell_methods":"area: mean time: minimum", + "comment":"minimum near-surface (usually, 2 meter) air temperature (add cell_method attribute 'time: min')", + "dimensions":"longitude latitude time height2m", + "frequency":"day", + "long_name":"Daily Minimum Near-Surface Air Temperature", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"tasmin", + "positive":"", + "standard_name":"air_temperature", + "type":"real", + "units":"K", + "valid_max":"", + "valid_min":"" + }, + "tslsi":{ + "cell_measures":"", + "cell_methods":"area: time: mean (comment: over land and sea ice)", + "comment":"Surface temperature of all surfaces except open ocean.", + "dimensions":"longitude latitude time", + "frequency":"day", + "long_name":"Surface Temperature Where Land or Sea Ice", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"tslsi", + "positive":"", + "standard_name":"surface_temperature", + "type":"real", + "units":"K", + "valid_max":"", + "valid_min":"" + }, + "ua":{ + "cell_measures":"", + "cell_methods":"time: mean", + "comment":"", + "dimensions":"longitude latitude plev8 time", + "frequency":"day", + "long_name":"Eastward Wind", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"ua", + "positive":"", + "standard_name":"eastward_wind", + "type":"real", + "units":"m s-1", + "valid_max":"", + "valid_min":"" + }, + "uas":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Eastward component of the near-surface (usually, 10 meters) wind", + "dimensions":"longitude latitude time height10m", + "frequency":"day", + "long_name":"Eastward Near-Surface Wind", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"uas", + "positive":"", + "standard_name":"eastward_wind", + "type":"real", + "units":"m s-1", + "valid_max":"", + "valid_min":"" + }, + "va":{ + "cell_measures":"", + "cell_methods":"time: mean", + "comment":"", + "dimensions":"longitude latitude plev8 time", + "frequency":"day", + "long_name":"Northward Wind", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"va", + "positive":"", + "standard_name":"northward_wind", + "type":"real", + "units":"m s-1", + "valid_max":"", + "valid_min":"" + }, + "vas":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Northward component of the near surface wind", + "dimensions":"longitude latitude time height10m", + "frequency":"day", + "long_name":"Northward Near-Surface Wind", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"vas", + "positive":"", + "standard_name":"northward_wind", + "type":"real", + "units":"m s-1", + "valid_max":"", + "valid_min":"" + }, + "wap":{ + "cell_measures":"", + "cell_methods":"time: mean", + "comment":"Omega (vertical velocity in pressure coordinates, positive downwards)", + "dimensions":"longitude latitude plev8 time", + "frequency":"day", + "long_name":"omega (=dp/dt)", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"wap", + "positive":"", + "standard_name":"lagrangian_tendency_of_air_pressure", + "type":"real", + "units":"Pa s-1", + "valid_max":"", + "valid_min":"" + }, + "zg":{ + "cell_measures":"", + "cell_methods":"time: mean", + "comment":"", + "dimensions":"longitude latitude plev8 time", + "frequency":"day", + "long_name":"Geopotential Height", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"zg", + "positive":"", + "standard_name":"geopotential_height", + "type":"real", + "units":"m", + "valid_max":"", + "valid_min":"" + } + } +} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_Amon.json b/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_Amon.json new file mode 100644 index 0000000000..8862bb46d7 --- /dev/null +++ b/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_Amon.json @@ -0,0 +1,1464 @@ +{ + "Header":{ + "#dataRequest_specs_version":"01.00.21", + "#mip_era":"CMIP6", + "Conventions":"CF-1.7 ODS-2.1", + "approx_interval":"30.00000", + "cmor_version":"3.2", + "data_specs_version":"2.1.0", + "generic_levels":"alevel alevhalf", + "int_missing_value":"-2147483648", + "missing_value":"1e20", + "product":"observations", + "realm":"atmos", + "table_date":"07 March 2018", + "table_id":"Table obs4MIPs_Amon" + }, + "variable_entry":{ + "ccb":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Where convective cloud is present in the grid cell, the instantaneous cloud base altitude should be that of the bottom of the lowest level containing convective cloud. Missing data should be reported in the absence of convective cloud. The time mean should be calculated from these quantities averaging over occasions when convective cloud is present only, and should contain missing data for occasions when no convective cloud is present during the meaning period.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Air Pressure at Convective Cloud Base", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"ccb", + "positive":"", + "standard_name":"air_pressure_at_convective_cloud_base", + "type":"real", + "units":"Pa", + "valid_max":"", + "valid_min":"" + }, + "cct":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Where convective cloud is present in the grid cell, the instantaneous cloud top altitude should be that of the top of the highest level containing convective cloud. Missing data should be reported in the absence of convective cloud. The time mean should be calculated from these quantities averaging over occasions when convective cloud is present only, and should contain missing data for occasions when no convective cloud is present during the meaning period.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Air Pressure at Convective Cloud Top", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"cct", + "positive":"", + "standard_name":"air_pressure_at_convective_cloud_top", + "type":"real", + "units":"Pa", + "valid_max":"", + "valid_min":"" + }, + "cfc113global":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"", + "dimensions":"time", + "frequency":"mon", + "long_name":"Global Mean Mole Fraction of CFC113", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"cfc113global", + "positive":"", + "standard_name":"mole_fraction_of_cfc113_in_air", + "type":"real", + "units":"1e-12", + "valid_max":"", + "valid_min":"" + }, + "cfc11global":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"", + "dimensions":"time", + "frequency":"mon", + "long_name":"Global Mean Mole Fraction of CFC11", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"cfc11global", + "positive":"", + "standard_name":"mole_fraction_of_cfc11_in_air", + "type":"real", + "units":"1e-12", + "valid_max":"", + "valid_min":"" + }, + "cfc12global":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"", + "dimensions":"time", + "frequency":"mon", + "long_name":"Global Mean Mole Fraction of CFC12", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"cfc12global", + "positive":"", + "standard_name":"mole_fraction_of_cfc12_in_air", + "type":"real", + "units":"1e-12", + "valid_max":"", + "valid_min":"" + }, + "ch4":{ + "cell_measures":"", + "cell_methods":"time: mean", + "comment":"", + "dimensions":"longitude latitude plev19 time", + "frequency":"mon", + "long_name":"Mole Fraction of CH4", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"ch4", + "positive":"", + "standard_name":"mole_fraction_of_methane_in_air", + "type":"real", + "units":"mol mol-1", + "valid_max":"", + "valid_min":"" + }, + "ch4Clim":{ + "cell_measures":"", + "cell_methods":"area: mean time: mean within years time: mean over years", + "comment":"", + "dimensions":"longitude latitude plev19 time2", + "frequency":"monC", + "long_name":"Mole Fraction of CH4", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"ch4", + "positive":"", + "standard_name":"mole_fraction_of_methane_in_air", + "type":"real", + "units":"mol mol-1", + "valid_max":"", + "valid_min":"" + }, + "ch4global":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Global Mean Mole Fraction of CH4", + "dimensions":"time", + "frequency":"mon", + "long_name":"Global Mean Mole Fraction of CH4", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"ch4global", + "positive":"", + "standard_name":"mole_fraction_of_methane_in_air", + "type":"real", + "units":"1e-09", + "valid_max":"", + "valid_min":"" + }, + "ch4globalClim":{ + "cell_measures":"", + "cell_methods":"area: mean time: mean within years time: mean over years", + "comment":"Global Mean Mole Fraction of CH4", + "dimensions":"time2", + "frequency":"monC", + "long_name":"Global Mean Mole Fraction of CH4", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"ch4global", + "positive":"", + "standard_name":"mole_fraction_of_methane_in_air", + "type":"real", + "units":"1e-09", + "valid_max":"", + "valid_min":"" + }, + "ci":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Fraction of time that convection occurs in the grid cell.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Fraction of Time Convection Occurs", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"ci", + "positive":"", + "standard_name":"convection_time_fraction", + "type":"real", + "units":"1", + "valid_max":"", + "valid_min":"" + }, + "cl":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Percentage cloud cover, including both large-scale and convective cloud.", + "dimensions":"longitude latitude alevel time", + "frequency":"mon", + "long_name":"Cloud Area Fraction", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"cl", + "positive":"", + "standard_name":"cloud_area_fraction_in_atmosphere_layer", + "type":"real", + "units":"%", + "valid_max":"", + "valid_min":"" + }, + "clCCI":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Percentage cloud cover in optical depth categories.", + "dimensions":"longitude latitude plev7c tau time", + "frequency":"mon", + "long_name":"CCI Cloud Area Fraction", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"clCCI", + "positive":"", + "standard_name":"cloud_area_fraction_in_atmosphere_layer", + "type":"real", + "units":"%", + "valid_max":"", + "valid_min":"" + }, + "clCLARA":{ + "cell_measures":"", + "cell_methods":"area: mean time: mean", + "comment":"Percentage cloud cover in optical depth categories.", + "dimensions":"longitude latitude plev7c tau time", + "frequency":"mon", + "long_name":"CLARA Cloud Area Fraction", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"clCLARA", + "positive":"", + "standard_name":"cloud_area_fraction_in_atmosphere_layer", + "type":"real", + "units":"%", + "valid_max":"", + "valid_min":"" + }, + "cli":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Includes both large-scale and convective cloud. This is calculated as the mass of cloud ice in the grid cell divided by the mass of air (including the water in all phases) in the grid cell. It includes precipitating hydrometeors ONLY if the precipitating hydrometeors affect the calculation of radiative transfer in model.", + "dimensions":"longitude latitude alevel time", + "frequency":"mon", + "long_name":"Mass Fraction of Cloud Ice", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"cli", + "positive":"", + "standard_name":"mass_fraction_of_cloud_ice_in_air", + "type":"real", + "units":"kg kg-1", + "valid_max":"", + "valid_min":"" + }, + "clivi":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"mass of ice water in the column divided by the area of the column (not just the area of the cloudy portion of the column). Includes precipitating frozen hydrometeors ONLY if the precipitating hydrometeor affects the calculation of radiative transfer in model.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Ice Water Path", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"clivi", + "positive":"", + "standard_name":"atmosphere_cloud_ice_content", + "type":"real", + "units":"kg m-2", + "valid_max":"", + "valid_min":"" + }, + "clt":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Total cloud area fraction for the whole atmospheric column, as seen from the surface or the top of the atmosphere. Includes both large-scale and convective cloud.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Total Cloud Fraction", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"clt", + "positive":"", + "standard_name":"cloud_area_fraction", + "type":"real", + "units":"%", + "valid_max":"", + "valid_min":"" + }, + "cltCCI":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Total cloud area fraction for the whole atmospheric column, as seen from the surface or the top of the atmosphere. Includes both large-scale and convective cloud.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"CCI Total Cloud Fraction", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"cltCCI", + "positive":"", + "standard_name":"cloud_area_fraction", + "type":"real", + "units":"%", + "valid_max":"", + "valid_min":"" + }, + "cltCLARA":{ + "cell_measures":"", + "cell_methods":"area: mean time: mean", + "comment":"Total cloud area fraction for the whole atmospheric column, as seen from the surface or the top of the atmosphere. Includes both large-scale and convective cloud.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"CLARA Total Cloud Fraction", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"cltCLARA", + "positive":"", + "standard_name":"cloud_area_fraction", + "type":"real", + "units":"%", + "valid_max":"", + "valid_min":"" + }, + "clw":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Includes both large-scale and convective cloud. Calculate as the mass of cloud liquid water in the grid cell divided by the mass of air (including the water in all phases) in the grid cells. Precipitating hydrometeors are included ONLY if the precipitating hydrometeors affect the calculation of radiative transfer in model.", + "dimensions":"longitude latitude alevel time", + "frequency":"mon", + "long_name":"Mass Fraction of Cloud Liquid Water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"clw", + "positive":"", + "standard_name":"mass_fraction_of_cloud_liquid_water_in_air", + "type":"real", + "units":"kg kg-1", + "valid_max":"", + "valid_min":"" + }, + "clwCCI":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Percentage liquid cloud cover in optical depth categories.", + "dimensions":"longitude latitude plev7c tau time", + "frequency":"mon", + "long_name":"CCI Liquid Cloud Area Fraction", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"clwCCI", + "positive":"", + "standard_name":"liquid_water_cloud_area_fraction_in_atmosphere_layer", + "type":"real", + "units":"%", + "valid_max":"", + "valid_min":"" + }, + "clwCLARA":{ + "cell_measures":"", + "cell_methods":"area: mean time: mean", + "comment":"Percentage liquid cloud cover in optical depth categories.", + "dimensions":"longitude latitude plev7c tau time", + "frequency":"mon", + "long_name":"CLARA Liquid Cloud Area Fraction", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"clwCLARA", + "positive":"", + "standard_name":"liquid_water_cloud_area_fraction_in_atmosphere_layer", + "type":"real", + "units":"%", + "valid_max":"", + "valid_min":"" + }, + "clwtCCI":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"CCI Total Liquid Cloud Area Fraction", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"clwtCCI", + "positive":"", + "standard_name":"liquid_water_cloud_area_fraction", + "type":"real", + "units":"%", + "valid_max":"", + "valid_min":"" + }, + "clwtCLARA":{ + "cell_measures":"", + "cell_methods":"area: mean time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"CLARA Total Liquid Cloud Area Fraction", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"clwtCLARA", + "positive":"", + "standard_name":"liquid_water_cloud_area_fraction", + "type":"real", + "units":"%", + "valid_max":"", + "valid_min":"" + }, + "clwvi":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Mass of condensed (liquid + ice) water in the column divided by the area of the column (not just the area of the cloudy portion of the column). Includes precipitating hydrometeors ONLY if the precipitating hydrometeors affect the calculation of radiative transfer in model.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Condensed Water Path", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"clwvi", + "positive":"", + "standard_name":"atmosphere_cloud_condensed_water_content", + "type":"real", + "units":"kg m-2", + "valid_max":"", + "valid_min":"" + }, + "co2":{ + "cell_measures":"", + "cell_methods":"time: mean", + "comment":"", + "dimensions":"longitude latitude plev19 time", + "frequency":"mon", + "long_name":"Mole Fraction of CO2", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"co2", + "positive":"", + "standard_name":"mole_fraction_of_carbon_dioxide_in_air", + "type":"real", + "units":"mol mol-1", + "valid_max":"", + "valid_min":"" + }, + "co2Clim":{ + "cell_measures":"", + "cell_methods":"area: mean time: mean within years time: mean over years", + "comment":"", + "dimensions":"longitude latitude plev19 time2", + "frequency":"monC", + "long_name":"Mole Fraction of CO2", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"co2", + "positive":"", + "standard_name":"mole_fraction_of_carbon_dioxide_in_air", + "type":"real", + "units":"mol mol-1", + "valid_max":"", + "valid_min":"" + }, + "co2mass":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Total atmospheric mass of Carbon Dioxide", + "dimensions":"time", + "frequency":"mon", + "long_name":"Total Atmospheric Mass of CO2", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"co2mass", + "positive":"", + "standard_name":"atmosphere_mass_of_carbon_dioxide", + "type":"real", + "units":"kg", + "valid_max":"", + "valid_min":"" + }, + "co2massClim":{ + "cell_measures":"", + "cell_methods":"area: mean time: mean within years time: mean over years", + "comment":"Total atmospheric mass of Carbon Dioxide", + "dimensions":"time2", + "frequency":"monC", + "long_name":"Total Atmospheric Mass of CO2", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"co2mass", + "positive":"", + "standard_name":"atmosphere_mass_of_carbon_dioxide", + "type":"real", + "units":"kg", + "valid_max":"", + "valid_min":"" + }, + "evspsbl":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Evaporation at surface: flux of water into the atmosphere due to conversion of both liquid and solid phases to vapor (from underlying surface and vegetation)", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Evaporation", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"evspsbl", + "positive":"", + "standard_name":"water_evaporation_flux", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "fco2antt":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"This is requested only for the emission-driven coupled carbon climate model runs. Does not include natural fire sources but, includes all anthropogenic sources, including fossil fuel use, cement production, agricultural burning, and sources associated with anthropogenic land use change excluding forest regrowth.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Carbon Mass Flux into Atmosphere Due to All Anthropogenic Emissions of CO2", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"fco2antt", + "positive":"", + "standard_name":"tendency_of_atmosphere_mass_content_of_carbon_dioxide_expressed_as_carbon_due_to_anthropogenic_emission", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "fco2fos":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"This is the prescribed anthropogenic CO2 flux from fossil fuel use, including cement production, and flaring (but not from land-use changes, agricultural burning, forest regrowth, etc.)", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Carbon Mass Flux into Atmosphere Due to Fossil Fuel Emissions of CO2", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"fco2fos", + "positive":"", + "standard_name":"tendency_of_atmosphere_mass_content_of_carbon_dioxide_expressed_as_carbon_due_to_emission_from_fossil_fuel_combustion", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "fco2nat":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"This is what the atmosphere sees (on its own grid). This field should be equivalent to the combined natural fluxes of carbon that account for natural exchanges between the atmosphere and land (nep) or ocean (fgco2) reservoirs.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Surface Carbon Mass Flux into the Atmosphere Due to Natural Sources", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"fco2nat", + "positive":"", + "standard_name":"surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_emission_from_natural_sources", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "hcfc22global":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"", + "dimensions":"time", + "frequency":"mon", + "long_name":"Global Mean Mole Fraction of HCFC22", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"hcfc22global", + "positive":"", + "standard_name":"mole_fraction_of_hcfc22_in_air", + "type":"real", + "units":"1e-12", + "valid_max":"", + "valid_min":"" + }, + "hfls":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Surface Upward Latent Heat Flux", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"hfls", + "positive":"up", + "standard_name":"surface_upward_latent_heat_flux", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "hfss":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Surface Upward Sensible Heat Flux", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"hfss", + "positive":"up", + "standard_name":"surface_upward_sensible_heat_flux", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "hur":{ + "cell_measures":"", + "cell_methods":"time: mean", + "comment":"The relative humidity with respect to liquid water for T> 0 C, and with respect to ice for T<0 C.", + "dimensions":"longitude latitude plev19 time", + "frequency":"mon", + "long_name":"Relative Humidity", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"hur", + "positive":"", + "standard_name":"relative_humidity", + "type":"real", + "units":"%", + "valid_max":"", + "valid_min":"" + }, + "hurs":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"The relative humidity with respect to liquid water for T> 0 C, and with respect to ice for T<0 C.", + "dimensions":"longitude latitude time height2m", + "frequency":"mon", + "long_name":"Near-Surface Relative Humidity", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"hurs", + "positive":"", + "standard_name":"relative_humidity", + "type":"real", + "units":"%", + "valid_max":"", + "valid_min":"" + }, + "hus":{ + "cell_measures":"", + "cell_methods":"time: mean", + "comment":"", + "dimensions":"longitude latitude plev19 time", + "frequency":"mon", + "long_name":"Specific Humidity", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"hus", + "positive":"", + "standard_name":"specific_humidity", + "type":"real", + "units":"1", + "valid_max":"", + "valid_min":"" + }, + "huss":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Near-surface (usually, 2 meter) specific humidity.", + "dimensions":"longitude latitude time height2m", + "frequency":"mon", + "long_name":"Near-Surface Specific Humidity", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"huss", + "positive":"", + "standard_name":"specific_humidity", + "type":"real", + "units":"1", + "valid_max":"", + "valid_min":"" + }, + "mc":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"The net mass flux should represent the difference between the updraft and downdraft components. The flux is computed as the mass divided by the area of the grid cell.", + "dimensions":"longitude latitude alevhalf time", + "frequency":"mon", + "long_name":"Convective Mass Flux", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"mc", + "positive":"up", + "standard_name":"atmosphere_net_upward_convective_mass_flux", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "n2o":{ + "cell_measures":"", + "cell_methods":"time: mean", + "comment":"", + "dimensions":"longitude latitude plev19 time", + "frequency":"mon", + "long_name":"Mole Fraction of N2O", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"n2o", + "positive":"", + "standard_name":"mole_fraction_of_nitrous_oxide_in_air", + "type":"real", + "units":"mol mol-1", + "valid_max":"", + "valid_min":"" + }, + "n2oClim":{ + "cell_measures":"", + "cell_methods":"area: mean time: mean within years time: mean over years", + "comment":"", + "dimensions":"longitude latitude plev19 time2", + "frequency":"monC", + "long_name":"Mole Fraction of N2O", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"n2o", + "positive":"", + "standard_name":"mole_fraction_of_nitrous_oxide_in_air", + "type":"real", + "units":"mol mol-1", + "valid_max":"", + "valid_min":"" + }, + "n2oglobal":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Global mean Nitrous Oxide (N2O)", + "dimensions":"time", + "frequency":"mon", + "long_name":"Global Mean Mole Fraction of N2O", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"n2oglobal", + "positive":"", + "standard_name":"mole_fraction_of_nitrous_oxide_in_air", + "type":"real", + "units":"1e-09", + "valid_max":"", + "valid_min":"" + }, + "n2oglobalClim":{ + "cell_measures":"", + "cell_methods":"area: mean time: mean within years time: mean over years", + "comment":"Global mean Nitrous Oxide (N2O)", + "dimensions":"time2", + "frequency":"monC", + "long_name":"Global Mean Mole Fraction of N2O", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"n2oglobal", + "positive":"", + "standard_name":"mole_fraction_of_nitrous_oxide_in_air", + "type":"real", + "units":"1e-09", + "valid_max":"", + "valid_min":"" + }, + "o3":{ + "cell_measures":"", + "cell_methods":"time: mean", + "comment":"", + "dimensions":"longitude latitude plev19 time", + "frequency":"mon", + "long_name":"Mole Fraction of O3", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"o3", + "positive":"", + "standard_name":"mole_fraction_of_ozone_in_air", + "type":"real", + "units":"mol mol-1", + "valid_max":"", + "valid_min":"" + }, + "o3Clim":{ + "cell_measures":"", + "cell_methods":"area: mean time: mean within years time: mean over years", + "comment":"", + "dimensions":"longitude latitude plev19 time2", + "frequency":"monC", + "long_name":"Mole Fraction of O3", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"o3", + "positive":"", + "standard_name":"mole_fraction_of_ozone_in_air", + "type":"real", + "units":"mol mol-1", + "valid_max":"", + "valid_min":"" + }, + "pctCCI":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"CCI Mean Cloud Top Pressure", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"pctCCI", + "positive":"", + "standard_name":"air_pressure_at_cloud_top", + "type":"real", + "units":"Pa", + "valid_max":"", + "valid_min":"" + }, + "pctCLARA":{ + "cell_measures":"", + "cell_methods":"area: mean time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"CLARA Mean Cloud Top Pressure", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"pctCLARA", + "positive":"", + "standard_name":"air_pressure_at_cloud_top", + "type":"real", + "units":"Pa", + "valid_max":"", + "valid_min":"" + }, + "pme":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Net flux of water (in all phases) between the atmosphere and underlying surface including vegetation), mainly resulting from the difference of precipitation and evaporation", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Surface Downward Freshwater Flux", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"pme", + "positive":"", + "standard_name":"surface_downward_water_flux", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "pr":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"includes both liquid and solid phases", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Precipitation", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"pr", + "positive":"", + "standard_name":"precipitation_flux", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "prc":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Convective precipitation at surface; includes both liquid and solid phases.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Convective Precipitation", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"prc", + "positive":"", + "standard_name":"convective_precipitation_flux", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "prsn":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"at surface; includes precipitation of all forms of water in the solid phase", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Snowfall Flux", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"prsn", + "positive":"", + "standard_name":"snowfall_flux", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "prw":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"vertically integrated through the atmospheric column", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Water Vapor Path", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"prw", + "positive":"", + "standard_name":"atmosphere_water_vapor_content", + "type":"real", + "units":"kg m-2", + "valid_max":"", + "valid_min":"" + }, + "ps":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"surface pressure (not mean sea-level pressure), 2-D field to calculate the 3-D pressure field from hybrid coordinates", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Surface Air Pressure", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"ps", + "positive":"", + "standard_name":"surface_air_pressure", + "type":"real", + "units":"Pa", + "valid_max":"", + "valid_min":"" + }, + "psl":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Sea Level Pressure", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Sea Level Pressure", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"psl", + "positive":"", + "standard_name":"air_pressure_at_sea_level", + "type":"real", + "units":"Pa", + "valid_max":"", + "valid_min":"" + }, + "rlds":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Surface Downwelling Longwave Radiation", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"rlds", + "positive":"down", + "standard_name":"surface_downwelling_longwave_flux_in_air", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "rldscs":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Surface downwelling clear-sky longwave radiation", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Surface Downwelling Clear-Sky Longwave Radiation", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"rldscs", + "positive":"down", + "standard_name":"surface_downwelling_longwave_flux_in_air_assuming_clear_sky", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "rlus":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Surface Upwelling Longwave Radiation", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"rlus", + "positive":"up", + "standard_name":"surface_upwelling_longwave_flux_in_air", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "rlut":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"at the top of the atmosphere (to be compared with satellite measurements)", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"TOA Outgoing Longwave Radiation", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"rlut", + "positive":"up", + "standard_name":"toa_outgoing_longwave_flux", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "rlutcs":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Upwelling clear-sky longwave radiation at top of atmosphere", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"TOA Outgoing Clear-Sky Longwave Radiation", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"rlutcs", + "positive":"up", + "standard_name":"toa_outgoing_longwave_flux_assuming_clear_sky", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "rsds":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"surface solar irradiance for UV calculations", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Surface Downwelling Shortwave Radiation", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"rsds", + "positive":"down", + "standard_name":"surface_downwelling_shortwave_flux_in_air", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "rsdscs":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"surface solar irradiance clear sky for UV calculations", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Surface Downwelling Clear-Sky Shortwave Radiation", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"rsdscs", + "positive":"down", + "standard_name":"surface_downwelling_shortwave_flux_in_air_assuming_clear_sky", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "rsdt":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Shortwave radiation incident at the top of the atmosphere", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"TOA Incident Shortwave Radiation", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"rsdt", + "positive":"down", + "standard_name":"toa_incoming_shortwave_flux", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "rsus":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Surface Upwelling Shortwave Radiation", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"rsus", + "positive":"up", + "standard_name":"surface_upwelling_shortwave_flux_in_air", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "rsuscs":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Surface Upwelling Clear-sky Shortwave Radiation", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Surface Upwelling Clear-Sky Shortwave Radiation", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"rsuscs", + "positive":"up", + "standard_name":"surface_upwelling_shortwave_flux_in_air_assuming_clear_sky", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "rsut":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"at the top of the atmosphere", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"TOA Outgoing Shortwave Radiation", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"rsut", + "positive":"up", + "standard_name":"toa_outgoing_shortwave_flux", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "rsutcs":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Calculated in the absence of clouds.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"TOA Outgoing Clear-Sky Shortwave Radiation", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"rsutcs", + "positive":"up", + "standard_name":"toa_outgoing_shortwave_flux_assuming_clear_sky", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "rtmt":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Net Downward Radiative Flux at Top of Model : I.e., at the top of that portion of the atmosphere where dynamics are explicitly treated by the model. This is reported only if it differs from the net downward radiative flux at the top of the atmosphere.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Net Downward Flux at Top of Model", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"rtmt", + "positive":"down", + "standard_name":"net_downward_radiative_flux_at_top_of_atmosphere_model", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "sbl":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"The snow and ice sublimation flux is the loss of snow and ice mass per unit area from the surface resulting from their direct conversion to water vapor that enters the atmosphere.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Surface Snow and Ice Sublimation Flux", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sbl", + "positive":"", + "standard_name":"surface_snow_and_ice_sublimation_flux", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "sci":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Fraction of time that shallow convection occurs in the grid cell.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Fraction of Time Shallow Convection Occurs", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sci", + "positive":"", + "standard_name":"shallow_convection_time_fraction", + "type":"real", + "units":"1", + "valid_max":"", + "valid_min":"" + }, + "sfcWind":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"near-surface (usually, 10 meters) wind speed.", + "dimensions":"longitude latitude time height10m", + "frequency":"mon", + "long_name":"Near-Surface Wind Speed", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sfcWind", + "positive":"", + "standard_name":"wind_speed", + "type":"real", + "units":"m s-1", + "valid_max":"", + "valid_min":"" + }, + "ta":{ + "cell_measures":"", + "cell_methods":"time: mean", + "comment":"Air Temperature", + "dimensions":"longitude latitude plev19 time", + "frequency":"mon", + "long_name":"Air Temperature", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"ta", + "positive":"", + "standard_name":"air_temperature", + "type":"real", + "units":"K", + "valid_max":"", + "valid_min":"" + }, + "tas":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"near-surface (usually, 2 meter) air temperature", + "dimensions":"longitude latitude time height2m", + "frequency":"mon", + "long_name":"Near-Surface Air Temperature", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"tas", + "positive":"", + "standard_name":"air_temperature", + "type":"real", + "units":"K", + "valid_max":"", + "valid_min":"" + }, + "tasmax":{ + "cell_measures":"", + "cell_methods":"area: mean time: maximum within days time: mean over days", + "comment":"maximum near-surface (usually, 2 meter) air temperature (add cell_method attribute 'time: max')", + "dimensions":"longitude latitude time height2m", + "frequency":"mon", + "long_name":"Daily Maximum Near-Surface Air Temperature", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"tasmax", + "positive":"", + "standard_name":"air_temperature", + "type":"real", + "units":"K", + "valid_max":"", + "valid_min":"" + }, + "tasmin":{ + "cell_measures":"", + "cell_methods":"area: mean time: minimum within days time: mean over days", + "comment":"minimum near-surface (usually, 2 meter) air temperature (add cell_method attribute 'time: min')", + "dimensions":"longitude latitude time height2m", + "frequency":"mon", + "long_name":"Daily Minimum Near-Surface Air Temperature", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"tasmin", + "positive":"", + "standard_name":"air_temperature", + "type":"real", + "units":"K", + "valid_max":"", + "valid_min":"" + }, + "tauu":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Downward eastward wind stress at the surface", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Surface Downward Eastward Wind Stress", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"tauu", + "positive":"down", + "standard_name":"surface_downward_eastward_stress", + "type":"real", + "units":"Pa", + "valid_max":"", + "valid_min":"" + }, + "tauv":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Downward northward wind stress at the surface", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Surface Downward Northward Wind Stress", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"tauv", + "positive":"down", + "standard_name":"surface_downward_northward_stress", + "type":"real", + "units":"Pa", + "valid_max":"", + "valid_min":"" + }, + "ts":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Temperature of the lower boundary of the atmosphere", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Surface Temperature", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"ts", + "positive":"", + "standard_name":"surface_temperature", + "type":"real", + "units":"K", + "valid_max":"", + "valid_min":"" + }, + "ttbr":{ + "cell_measures":"", + "cell_methods":"time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Top of Atmosphere Brightness Temperature", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"ttbr", + "positive":"", + "standard_name":"toa_brightness_temperature", + "type":"real", + "units":"K", + "valid_max":"", + "valid_min":"" + }, + "ua":{ + "cell_measures":"", + "cell_methods":"time: mean", + "comment":"", + "dimensions":"longitude latitude plev19 time", + "frequency":"mon", + "long_name":"Eastward Wind", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"ua", + "positive":"", + "standard_name":"eastward_wind", + "type":"real", + "units":"m s-1", + "valid_max":"", + "valid_min":"" + }, + "uas":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Eastward component of the near-surface (usually, 10 meters) wind", + "dimensions":"longitude latitude time height10m", + "frequency":"mon", + "long_name":"Eastward Near-Surface Wind", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"uas", + "positive":"", + "standard_name":"eastward_wind", + "type":"real", + "units":"m s-1", + "valid_max":"", + "valid_min":"" + }, + "va":{ + "cell_measures":"", + "cell_methods":"time: mean", + "comment":"", + "dimensions":"longitude latitude plev19 time", + "frequency":"mon", + "long_name":"Northward Wind", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"va", + "positive":"", + "standard_name":"northward_wind", + "type":"real", + "units":"m s-1", + "valid_max":"", + "valid_min":"" + }, + "vas":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Northward component of the near surface wind", + "dimensions":"longitude latitude time height10m", + "frequency":"mon", + "long_name":"Northward Near-Surface Wind", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"vas", + "positive":"", + "standard_name":"northward_wind", + "type":"real", + "units":"m s-1", + "valid_max":"", + "valid_min":"" + }, + "wap":{ + "cell_measures":"", + "cell_methods":"time: mean", + "comment":"Omega (vertical velocity in pressure coordinates, positive downwards)", + "dimensions":"longitude latitude plev19 time", + "frequency":"mon", + "long_name":"omega (=dp/dt)", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"wap", + "positive":"", + "standard_name":"lagrangian_tendency_of_air_pressure", + "type":"real", + "units":"Pa s-1", + "valid_max":"", + "valid_min":"" + }, + "zg":{ + "cell_measures":"", + "cell_methods":"time: mean", + "comment":"", + "dimensions":"longitude latitude plev19 time", + "frequency":"mon", + "long_name":"Geopotential Height", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"zg", + "positive":"", + "standard_name":"geopotential_height", + "type":"real", + "units":"m", + "valid_max":"", + "valid_min":"" + } + } +} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_CV.json b/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_CV.json new file mode 100644 index 0000000000..5f30ba78aa --- /dev/null +++ b/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_CV.json @@ -0,0 +1,351 @@ +{ + "CV":{ + "activity_id":"obs4MIPs", + "frequency":{ + "1hr":"sampled hourly", + "1hrCM":"monthly-mean diurnal cycle resolving each day into 1-hour means", + "1hrPt":"sampled hourly, at specified time point within an hour", + "3hr":"sampled every 3 hours", + "3hrPt":"sampled 3 hourly, at specified time point within the time period", + "6hr":"sampled every 6 hours", + "6hrPt":"sampled 6 hourly, at specified time point within the time period", + "day":"daily mean samples", + "dec":"decadal mean samples", + "fx":"fixed (time invariant) field", + "mon":"monthly mean samples", + "monC":"monthly climatology computed from monthly mean samples", + "monPt":"sampled monthly, at specified time point within the time period", + "subhrPt":"sampled sub-hourly, at specified time point within an hour", + "yr":"annual mean samples", + "yrPt":"sampled yearly, at specified time point within the time period" + }, + "grid_label":{ + "gm":"global mean data", + "gn":"data reported on a model's native grid", + "gna":"data reported on a native grid in the region of Antarctica", + "gng":"data reported on a native grid in the region of Greenland", + "gnz":"zonal mean data reported on a model's native latitude grid", + "gr":"regridded data reported on the data provider's preferred target grid", + "gr1":"regridded data reported on a grid other than the native grid and other than the preferred target grid", + "gr1a":"regridded data reported in the region of Antarctica on a grid other than the native grid and other than the preferred target grid", + "gr1g":"regridded data reported in the region of Greenland on a grid other than the native grid and other than the preferred target grid", + "gr1z":"regridded zonal mean data reported on a grid other than the native latitude grid and other than the preferred latitude target grid", + "gr2":"regridded data reported on a grid other than the native grid and other than the preferred target grid", + "gr2a":"regridded data reported in the region of Antarctica on a grid other than the native grid and other than the preferred target grid", + "gr2g":"regridded data reported in the region of Greenland on a grid other than the native grid and other than the preferred target grid", + "gr2z":"regridded zonal mean data reported on a grid other than the native latitude grid and other than the preferred latitude target grid", + "gr3":"regridded data reported on a grid other than the native grid and other than the preferred target grid", + "gr3a":"regridded data reported in the region of Antarctica on a grid other than the native grid and other than the preferred target grid", + "gr3g":"regridded data reported in the region of Greenland on a grid other than the native grid and other than the preferred target grid", + "gr3z":"regridded zonal mean data reported on a grid other than the native latitude grid and other than the preferred latitude target grid", + "gr4":"regridded data reported on a grid other than the native grid and other than the preferred target grid", + "gr4a":"regridded data reported in the region of Antarctica on a grid other than the native grid and other than the preferred target grid", + "gr4g":"regridded data reported in the region of Greenland on a grid other than the native grid and other than the preferred target grid", + "gr4z":"regridded zonal mean data reported on a grid other than the native latitude grid and other than the preferred latitude target grid", + "gr5":"regridded data reported on a grid other than the native grid and other than the preferred target grid", + "gr5a":"regridded data reported in the region of Antarctica on a grid other than the native grid and other than the preferred target grid", + "gr5g":"regridded data reported in the region of Greenland on a grid other than the native grid and other than the preferred target grid", + "gr5z":"regridded zonal mean data reported on a grid other than the native latitude grid and other than the preferred latitude target grid", + "gr6":"regridded data reported on a grid other than the native grid and other than the preferred target grid", + "gr6a":"regridded data reported in the region of Antarctica on a grid other than the native grid and other than the preferred target grid", + "gr6g":"regridded data reported in the region of Greenland on a grid other than the native grid and other than the preferred target grid", + "gr6z":"regridded zonal mean data reported on a grid other than the native latitude grid and other than the preferred latitude target grid", + "gr7":"regridded data reported on a grid other than the native grid and other than the preferred target grid", + "gr7a":"regridded data reported in the region of Antarctica on a grid other than the native grid and other than the preferred target grid", + "gr7g":"regridded data reported in the region of Greenland on a grid other than the native grid and other than the preferred target grid", + "gr7z":"regridded zonal mean data reported on a grid other than the native latitude grid and other than the preferred latitude target grid", + "gr8":"regridded data reported on a grid other than the native grid and other than the preferred target grid", + "gr8a":"regridded data reported in the region of Antarctica on a grid other than the native grid and other than the preferred target grid", + "gr8g":"regridded data reported in the region of Greenland on a grid other than the native grid and other than the preferred target grid", + "gr8z":"regridded zonal mean data reported on a grid other than the native latitude grid and other than the preferred latitude target grid", + "gr9":"regridded data reported on a grid other than the native grid and other than the preferred target grid", + "gr9a":"regridded data reported in the region of Antarctica on a grid other than the native grid and other than the preferred target grid", + "gr9g":"regridded data reported in the region of Greenland on a grid other than the native grid and other than the preferred target grid", + "gr9z":"regridded zonal mean data reported on a grid other than the native latitude grid and other than the preferred latitude target grid", + "gra":"regridded data in the region of Antarctica reported on the data provider's preferred target grid", + "grg":"regridded data in the region of Greenland reported on the data provider's preferred target grid", + "grz":"regridded zonal mean data reported on the data provider's preferred latitude target grid" + }, + "institution_id":{ + "DWD":"Deutscher Wetterdienst, Offenbach 63067, Germany", + "NASA-JPL":"NASA's Jet Propulsion Laboratory, Pasadena, CA 91109, USA", + "NOAA-NCEI":"NOAA's National Centers for Environmental Information, Asheville, NC 28801, USA", + "PCMDI":"Program for Climate Model Diagnosis and Intercomparison, Lawrence Livermore National Laboratory, Livermore, CA 94550, USA", + "RSS":"Remote Sensing Systems, Santa Rosa, CA 95401, USA" + }, + "license":"Data in this file produced by is licensed under a Creative Commons Attribution-ShareAlike 4.0 International License (https://creativecommons.org/licenses/). Use of the data must be acknowledged following guidelines found at . Further information about this data, including some limitations, can be found via .)", + "nominal_resolution":[ + "0.5 km", + "1 km", + "10 km", + "100 km", + "1000 km", + "10000 km", + "1x1 degree", + "2.5 km", + "25 km", + "250 km", + "2500 km", + "5 km", + "50 km", + "500 km", + "5000 km" + ], + "product":[ + "observations", + "reanalysis" + ], + "realm":[ + "aerosol", + "atmos", + "atmosChem", + "land", + "landIce", + "ocean", + "ocnBgchem", + "seaIce" + ], + "region":[ + "africa", + "antarctica", + "arabian_sea", + "aral_sea", + "arctic_ocean", + "asia", + "atlantic_ocean", + "australia", + "baltic_sea", + "barents_opening", + "barents_sea", + "beaufort_sea", + "bellingshausen_sea", + "bering_sea", + "bering_strait", + "black_sea", + "canadian_archipelago", + "caribbean_sea", + "caspian_sea", + "central_america", + "chukchi_sea", + "contiguous_united_states", + "denmark_strait", + "drake_passage", + "east_china_sea", + "english_channel", + "eurasia", + "europe", + "faroe_scotland_channel", + "florida_bahamas_strait", + "fram_strait", + "global", + "global_land", + "global_ocean", + "great_lakes", + "greenland", + "gulf_of_alaska", + "gulf_of_mexico", + "hudson_bay", + "iceland_faroe_channel", + "indian_ocean", + "indo_pacific_ocean", + "indonesian_throughflow", + "irish_sea", + "lake_baykal", + "lake_chad", + "lake_malawi", + "lake_tanganyika", + "lake_victoria", + "mediterranean_sea", + "mozambique_channel", + "north_america", + "north_sea", + "norwegian_sea", + "pacific_equatorial_undercurrent", + "pacific_ocean", + "persian_gulf", + "red_sea", + "ross_sea", + "sea_of_japan", + "sea_of_okhotsk", + "south_america", + "south_china_sea", + "southern_ocean", + "taiwan_luzon_straits", + "weddell_sea", + "windward_passage", + "yellow_sea" + ], + "required_global_attributes":[ + "Conventions", + "activity_id", + "contact", + "creation_date", + "data_specs_version", + "frequency", + "grid", + "grid_label", + "institution", + "institution_id", + "license", + "nominal_resolution", + "product", + "realm", + "source_id", + "table_id", + "tracking_id", + "variable_id", + "variant_label" + ], + "source_id":{ + "AIRS-1-0":{ + "region":"global", + "source":"AIRS 1.0 (2011): Atmospheric Infrared Sounder", + "source_label":"AIRS", + "source_type":"satellite_retrieval", + "source_version_number":"1.0" + }, + "Aura-MLS-v04-2":{ + "region":"global", + "source":"Aura-MLS v04.2 (2018): EOS Aura Microwave Limb Sounder", + "source_label":"Aura-MLS", + "source_type":"satellite_retrieval", + "source_version_number":"v04.2" + }, + "CMSAF-CLARA-A-2-0":{ + "region":"global", + "source":"CMSAF-CLARA-A 2.0 (2017): CM SAF cLoud, Albedo and surface RAdiation dataset from AVHRR data", + "source_label":"CMSAF-CLARA-A", + "source_type":"satellite_retrieval", + "source_version_number":"2.0" + }, + "CMSAF-HOAPS-4-0":{ + "region":"global_ocean", + "source":"CMSAF-HOAPS 4.0 (2017): Hamburg Ocean Atmosphere Parameters and fluxes from Satellite data, based on SSM/I and SSMIS aboard DMSP", + "source_label":"CMSAF-HOAPS", + "source_type":"satellite_retrieval", + "source_version_number":"4.0" + }, + "CMSAF-SARAH-2-0":{ + "region":"africa, atlantic_ocean, europe", + "source":"CMSAF-SARAH 2.0 (2017): Surface solAr RAdiation data set - Heliosat, based on MVIRI/SEVIRI aboard METEOSAT", + "source_label":"CMSAF-SARAH", + "source_type":"satellite_retrieval", + "source_version_number":"2.0" + }, + "ESACCI-CLOUD-ATSR2-AATSR-2-0":{ + "region":"global", + "source":"ESACCI-CLOUD-ATSR2-AATSR 2.0 (2017): Cloud properties derived from ATSR2 and AATSR (aboard ERS2 and ENVISAT) measurements. This dataset belongs to the ESA Cloud_cci suite of long-term coherent cloud property datasets.", + "source_label":"ESACCI-CLOUD-ATSR2-AATSR", + "source_type":"satellite_retrieval", + "source_version_number":"2.0" + }, + "ESACCI-CLOUD-AVHRR-AM-2-0":{ + "region":"global", + "source":"ESACCI-CLOUD-AVHRR-AM 2.0 (2017): Cloud properties derived from AVHRR (aboard NOAA and MetOp AM) measurements. This dataset belongs to the ESA Cloud_cci suite of long-term coherent cloud property datasets", + "source_label":"ESACCI-CLOUD-AVHRR-AM", + "source_type":"satellite_retrieval", + "source_version_number":"2.0" + }, + "ESACCI-CLOUD-AVHRR-PM-2-0":{ + "region":"global", + "source":"ESACCI-CLOUD-AVHRR-PM 2.0 (2017): Cloud properties derived from AVHRR (aboard NOAA and MetOp PM) measurements. This dataset belongs to the ESA Cloud_cci suite of long-term coherent cloud property datasets", + "source_label":"ESACCI-CLOUD-AVHRR-PM", + "source_type":"satellite_retrieval", + "source_version_number":"2.0" + }, + "ESACCI-CLOUD-MERIS-AATSR-2-0":{ + "region":"global", + "source":"ESACCI-CLOUD-MERIS-AATSR 2.0 (2017): Cloud properties derived from MERIS and AATSR (aboard ENVISAT) measurements. This dataset belongs to the ESA Cloud_cci suite of long-term coherent cloud property datasets.", + "source_label":"ESACCI-CLOUD-MERIS-AATSR", + "source_type":"satellite_retrieval", + "source_version_number":"2.0" + }, + "GNSS-RO-1-3":{ + "region":"global", + "source":"GNSS-RO 1.3 (2016): Global Navigation Satellite Systems Radio Occultation", + "source_label":"GNSS-RO", + "source_type":"satellite_retrieval", + "source_version_number":"1.3" + }, + "NOAA-NCEI-AVHRR-NDVI-4-0":{ + "region":"global_land", + "source":"NOAA-NCEI-AVHRR-NDVI 4.0 (2013): Normalized Difference Vegetation Index", + "source_label":"NOAA-NCEI-AVHRR-NDVI", + "source_type":"satellite_retrieval", + "source_version_number":"4.0" + }, + "NOAA-NCEI-ERSST-4-0":{ + "region":"global_ocean", + "source":"NOAA-NCEI-ERSST 4.0 (2015): Extended Reconstructed Sea Surface Temperatures", + "source_label":"NOAA-NCEI-ERSST", + "source_type":"gridded_insitu", + "source_version_number":"4.0" + }, + "NOAA-NCEI-FAPAR-4-0":{ + "region":"global_land", + "source":"NOAA-NCEI-FAPAR 4.0 (2014): AVHRR Fraction of Absorbed Photosynthetically Active Radiation", + "source_label":"NOAA-NCEI-FAPAR", + "source_type":"satellite_retrieval", + "source_version_number":"4.0" + }, + "NOAA-NCEI-GridSat-4-0":{ + "region":"global", + "source":"NOAA-NCEI-GridSat 4.0 (2015): Gridded Satellite ISCCP B1 11 Micron Brightness Temperature", + "source_label":"NOAA-NCEI-GridSat", + "source_type":"satellite_retrieval", + "source_version_number":"4.0" + }, + "NOAA-NCEI-LAI-4-0":{ + "region":"global_land", + "source":"NOAA-NCEI-LAI 4.0 (2014): AVHRR Leaf Area Index", + "source_label":"NOAA-NCEI-LAI", + "source_type":"satellite_retrieval", + "source_version_number":"4.0" + }, + "NOAA-NCEI-PERSIANN-1-1":{ + "region":"global", + "source":"NOAA-NCEI-PERSIANN 1.1 (2014): Precipitation Estimation from Remotely Sensed Information using Artificial Neural Network", + "source_label":"NOAA-NCEI-PERSIANN", + "source_type":"satellite_retrieval", + "source_version_number":"1.1" + }, + "NOAA-NCEI-SeaWinds-1-2":{ + "region":"global_ocean", + "source":"NOAA-NCEI-SeaWinds 1.2 (2008): Blended Sea Surface Winds", + "source_label":"NOAA-NCEI-SeaWinds", + "source_type":"satellite_blended", + "source_version_number":"1.2" + }, + "QuikSCAT-v20110531":{ + "region":"global", + "source":"QuikSCAT v20110531 (2011): QuikSCAT winds monthly climatology derived from QuikSCAT L2B using the 2006 model function and processing algorithms.", + "source_label":"QuikSCAT", + "source_type":"satellite_retrieval", + "source_version_number":"v20110531" + }, + "REMSS-PRW-6-6-0":{ + "region":"global", + "source":"REMSS-PRW 6.6.0 (2017): Water Vapor Path", + "source_label":"REMSS-PRW", + "source_type":"satellite_blended", + "source_version_number":"6.6.0" + } + }, + "source_type":{ + "gridded_insitu":"gridded product based on measurements collected from in-situ instruments", + "reanalysis":"gridded product generated from a model reanalysis based on in-situ instruments and possibly satellite measurements", + "satellite_blended":"gridded product based on both in-situ instruments and satellite measurements", + "satellite_retrieval":"gridded product based on satellite measurements" + }, + "table_id":[ + "obs4MIPs_Aday", + "obs4MIPs_Amon", + "obs4MIPs_Lmon", + "obs4MIPs_Omon", + "obs4MIPs_SImon", + "obs4MIPs_fx", + "obs4MIPs_monNobs", + "obs4MIPs_monStderr" + ] + } +} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_Lmon.json b/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_Lmon.json new file mode 100644 index 0000000000..55666ea58e --- /dev/null +++ b/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_Lmon.json @@ -0,0 +1,954 @@ +{ + "Header":{ + "#dataRequest_specs_version":"01.00.21", + "#mip_era":"CMIP6", + "Conventions":"CF-1.7 ODS-2.1", + "approx_interval":"30.00000", + "cmor_version":"3.2", + "data_specs_version":"2.1.0", + "generic_levels":"", + "int_missing_value":"-2147483648", + "missing_value":"1e20", + "product":"observations", + "realm":"land", + "table_date":"07 March 2018", + "table_id":"Table obs4MIPs_Lmon" + }, + "variable_entry":{ + "baresoilFrac":{ + "cell_measures":"", + "cell_methods":"area: mean where land over all_area_types time: mean", + "comment":"Percentage of entire grid cell that is covered by bare soil.", + "dimensions":"longitude latitude time typebare", + "frequency":"mon", + "long_name":"Bare Soil Fraction", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"baresoilFrac", + "positive":"", + "standard_name":"area_fraction", + "type":"real", + "units":"%", + "valid_max":"", + "valid_min":"" + }, + "burntFractionAll":{ + "cell_measures":"", + "cell_methods":"area: mean where land over all_area_types time: mean", + "comment":"Percentage of grid cell burned due to all fires including natural and anthropogenic fires and those associated with anthropogenic land use change", + "dimensions":"longitude latitude time typeburnt", + "frequency":"mon", + "long_name":"Percentage of Entire Grid cell that is Covered by Burnt Vegetation (All Classes)", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"burntFractionAll", + "positive":"", + "standard_name":"area_fraction", + "type":"real", + "units":"%", + "valid_max":"", + "valid_min":"" + }, + "c3PftFrac":{ + "cell_measures":"", + "cell_methods":"area: mean where land over all_area_types time: mean", + "comment":"Percentage of entire grid cell that is covered by C3 PFTs (including grass, crops, and trees).", + "dimensions":"longitude latitude time typec3pft", + "frequency":"mon", + "long_name":"Total C3 PFT Cover Fraction", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"c3PftFrac", + "positive":"", + "standard_name":"area_fraction", + "type":"real", + "units":"%", + "valid_max":"", + "valid_min":"" + }, + "c4PftFrac":{ + "cell_measures":"", + "cell_methods":"area: mean where land over all_area_types time: mean", + "comment":"Percentage of entire grid cell that is covered by C4 PFTs (including grass and crops).", + "dimensions":"longitude latitude time typec4pft", + "frequency":"mon", + "long_name":"Total C4 PFT Cover Fraction", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"c4PftFrac", + "positive":"", + "standard_name":"area_fraction", + "type":"real", + "units":"%", + "valid_max":"", + "valid_min":"" + }, + "cCwd":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"Carbon mass per unit area in woody debris (dead organic matter composed of coarse wood. It is distinct from litter)", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Carbon Mass in Coarse Woody Debris", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"cCwd", + "positive":"", + "standard_name":"wood_debris_carbon_content", + "type":"real", + "units":"kg m-2", + "valid_max":"", + "valid_min":"" + }, + "cLeaf":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"Carbon mass per unit area in leaves.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Carbon Mass in Leaves", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"cLeaf", + "positive":"", + "standard_name":"leaf_carbon_content", + "type":"real", + "units":"kg m-2", + "valid_max":"", + "valid_min":"" + }, + "cLitter":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Carbon Mass in Litter Pool", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"cLitter", + "positive":"", + "standard_name":"litter_carbon_content", + "type":"real", + "units":"kg m-2", + "valid_max":"", + "valid_min":"" + }, + "cLitterAbove":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Carbon Mass in Above-Ground Litter", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"cLitterAbove", + "positive":"", + "standard_name":"surface_litter_carbon_content", + "type":"real", + "units":"kg m-2", + "valid_max":"", + "valid_min":"" + }, + "cLitterBelow":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Carbon Mass in Below-Ground Litter", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"cLitterBelow", + "positive":"", + "standard_name":"subsurface_litter_carbon_content", + "type":"real", + "units":"kg m-2", + "valid_max":"", + "valid_min":"" + }, + "cProduct":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"Carbon mass per unit area in that has been removed from the environment through land use change.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Carbon Mass in Products of Land Use Change", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"cProduct", + "positive":"", + "standard_name":"carbon_content_of_products_of_anthropogenic_land_use_change", + "type":"real", + "units":"kg m-2", + "valid_max":"", + "valid_min":"" + }, + "cRoot":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"Carbon mass per unit area in roots, including fine and coarse roots.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Carbon Mass in Roots", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"cRoot", + "positive":"", + "standard_name":"root_carbon_content", + "type":"real", + "units":"kg m-2", + "valid_max":"", + "valid_min":"" + }, + "cSoilFast":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"Carbon mass per unit area in fast soil pool. Fast means a lifetime of less than 10 years for reference climate conditions (20th century) in the absence of water limitations.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Carbon Mass in Fast Soil Pool", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"cSoilFast", + "positive":"", + "standard_name":"fast_soil_pool_carbon_content", + "type":"real", + "units":"kg m-2", + "valid_max":"", + "valid_min":"" + }, + "cSoilMedium":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"Carbon mass per unit area in medium (rate) soil pool. Medium means a lifetime of more than than 10 years and less than 100 years for reference climate conditions (20th century) in the absence of water limitations.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Carbon Mass in Medium Soil Pool", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"cSoilMedium", + "positive":"", + "standard_name":"medium_soil_pool_carbon_content", + "type":"real", + "units":"kg m-2", + "valid_max":"", + "valid_min":"" + }, + "cSoilSlow":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"Carbon mass per unit area in slow soil pool. Slow means a lifetime of more than 100 years for reference climate (20th century) in the absence of water limitations.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Carbon Mass in Slow Soil Pool", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"cSoilSlow", + "positive":"", + "standard_name":"slow_soil_pool_carbon_content", + "type":"real", + "units":"kg m-2", + "valid_max":"", + "valid_min":"" + }, + "cVeg":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"Carbon mass per unit area in vegetation.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Carbon Mass in Vegetation", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"cVeg", + "positive":"", + "standard_name":"vegetation_carbon_content", + "type":"real", + "units":"kg m-2", + "valid_max":"", + "valid_min":"" + }, + "cropFrac":{ + "cell_measures":"", + "cell_methods":"area: mean where land over all_area_types time: mean", + "comment":"Percentage of entire grid cell that is covered by crop.", + "dimensions":"longitude latitude time typecrop", + "frequency":"mon", + "long_name":"Crop Fraction", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"cropFrac", + "positive":"", + "standard_name":"area_fraction", + "type":"real", + "units":"%", + "valid_max":"", + "valid_min":"" + }, + "evspsblsoi":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"Water evaporation from soil (including sublimation).", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Water Evaporation from Soil", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"evspsblsoi", + "positive":"", + "standard_name":"water_evaporation_flux_from_soil", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "evspsblveg":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"The canopy evaporation and sublimation (if present in model); may include dew formation as a negative flux.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Evaporation from Canopy", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"evspsblveg", + "positive":"", + "standard_name":"water_evaporation_flux_from_canopy", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "fFire":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"CO2 emissions (expressed as a carbon mass flux per unit area) from natural fires and human ignition fires as calculated by the fire module of the dynamic vegetation model, but excluding any CO2 flux from fire included in fLuc (CO2 Flux to Atmosphere from Land Use Change).", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Carbon Mass Flux into Atmosphere due to CO2 Emission from Fire", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"fFire", + "positive":"up", + "standard_name":"surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_emission_from_fires_excluding_anthropogenic_land_use_change", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "fGrazing":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"Carbon mass flux per unit area due to grazing on land", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Carbon Mass Flux into Atmosphere due to Grazing on Land", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"fGrazing", + "positive":"up", + "standard_name":"surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_emission_from_grazing", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "fHarvest":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"Carbon mass flux per unit area due to crop harvesting", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Carbon Mass Flux into Atmosphere due to Crop Harvesting", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"fHarvest", + "positive":"up", + "standard_name":"surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_emission_from_crop_harvesting", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "fLitterSoil":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"Carbon mass flux per unit area into soil from litter (dead plant material in or above the soil).", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Total Carbon Mass Flux from Litter to Soil", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"fLitterSoil", + "positive":"", + "standard_name":"carbon_mass_flux_into_soil_from_litter", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "fVegLitter":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Total Carbon Mass Flux from Vegetation to Litter", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"fVegLitter", + "positive":"", + "standard_name":"litter_carbon_flux", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "fVegSoil":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"Carbon mass flux per unit area from vegetation directly into soil, without intermediate conversion to litter.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Total Carbon Mass Flux from Vegetation Directly to Soil", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"fVegSoil", + "positive":"", + "standard_name":"carbon_mass_flux_into_soil_from_vegetation_excluding_litter", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "fapar":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"The fraction of incoming solar radiation in the photosynthetically active radiation spectral region that is absorbed by a vegetation canopy.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Fraction of Absorbed Photosynthetically Active Radiation", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"fapar", + "positive":"", + "standard_name":"fraction_of_surface_downwelling_photosynthetic_radiative_flux_absorbed_by_vegetation", + "type":"real", + "units":"1", + "valid_max":"1.0", + "valid_min":"0.0" + }, + "gpp":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Carbon Mass Flux out of Atmosphere due to Gross Primary Production on Land", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"gpp", + "positive":"", + "standard_name":"gross_primary_productivity_of_biomass_expressed_as_carbon", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "grassFrac":{ + "cell_measures":"", + "cell_methods":"area: mean where land over all_area_types time: mean", + "comment":"Percentage of entire grid cell that is covered by natural grass.", + "dimensions":"longitude latitude time typenatgr", + "frequency":"mon", + "long_name":"Natural Grass Fraction", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"grassFrac", + "positive":"", + "standard_name":"area_fraction", + "type":"real", + "units":"%", + "valid_max":"", + "valid_min":"" + }, + "lai":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Leaf Area Index", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"lai", + "positive":"", + "standard_name":"leaf_area_index", + "type":"real", + "units":"1", + "valid_max":"", + "valid_min":"" + }, + "landCoverFrac":{ + "cell_measures":"", + "cell_methods":"area: mean where land over all_area_types time: mean", + "comment":"Percentage of grid cell area occupied by different model vegetation/land cover categories. The categories may differ from model to model, depending on each model's subgrid land cover category definitions. Categories may include natural vegetation, anthropogenic vegetation, bare soil, lakes, urban areas, glaciers, etc. Sum of all should equal the fraction of the grid-cell that is land.", + "dimensions":"longitude latitude vegtype time", + "frequency":"mon", + "long_name":"Plant Functional Type Grid Fraction", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"landCoverFrac", + "positive":"", + "standard_name":"area_fraction", + "type":"real", + "units":"%", + "valid_max":"", + "valid_min":"" + }, + "mrfso":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"The mass per unit area (summed over all model layers) of frozen water.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Soil Frozen Water Content", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"mrfso", + "positive":"", + "standard_name":"soil_frozen_water_content", + "type":"real", + "units":"kg m-2", + "valid_max":"", + "valid_min":"" + }, + "mrro":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"The total run-off (including drainage through the base of the soil model) per unit area leaving the land portion of the grid cell.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Total Runoff", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"mrro", + "positive":"", + "standard_name":"runoff_flux", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "mrros":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"The total surface run off leaving the land portion of the grid cell (excluding drainage through the base of the soil model).", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Surface Runoff", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"mrros", + "positive":"", + "standard_name":"surface_runoff_flux", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "mrso":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"the mass per unit area (summed over all soil layers) of water in all phases.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Total Soil Moisture Content", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"mrso", + "positive":"", + "standard_name":"soil_moisture_content", + "type":"real", + "units":"kg m-2", + "valid_max":"", + "valid_min":"" + }, + "mrsos":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"The mass of water in all phases in the upper 10cm of the soil layer.", + "dimensions":"longitude latitude time sdepth1", + "frequency":"mon", + "long_name":"Moisture in Upper Portion of Soil Column", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"mrsos", + "positive":"", + "standard_name":"moisture_content_of_soil_layer", + "type":"real", + "units":"kg m-2", + "valid_max":"", + "valid_min":"" + }, + "nbp":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"This is the net mass flux of carbon from atmosphere into land, calculated as photosynthesis MINUS the sum of plant and soil respiration, carbon fluxes from fire, harvest, grazing and land use change. Positive flux is into the land.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Carbon Mass Flux out of Atmosphere due to Net Biospheric Production on Land", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"nbp", + "positive":"down", + "standard_name":"surface_net_downward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_all_land_processes", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "ndvi":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Normalized Difference Vegetation Index", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"ndvi", + "positive":"", + "standard_name":"normalized_difference_vegetation_index", + "type":"real", + "units":"1", + "valid_max":"1.0", + "valid_min":"-0.1" + }, + "npp":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Carbon Mass Flux out of Atmosphere due to Net Primary Production on Land", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"npp", + "positive":"down", + "standard_name":"net_primary_productivity_of_biomass_expressed_as_carbon", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "nppLeaf":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"This is the rate of carbon uptake by leaves due to NPP", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Carbon Mass Flux due to NPP Allocation to Leaf", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"nppLeaf", + "positive":"down", + "standard_name":"net_primary_productivity_of_carbon_accumulated_in_leaves", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "nppRoot":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"This is the rate of carbon uptake by roots due to NPP", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Carbon Mass Flux due to NPP Allocation to Roots", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"nppRoot", + "positive":"down", + "standard_name":"net_primary_productivity_of_carbon_accumulated_in_roots", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "nppWood":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"This is the rate of carbon uptake by wood due to NPP", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Carbon Mass Flux due to NPP Allocation to Wood", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"nppWood", + "positive":"down", + "standard_name":"net_primary_productivity_of_carbon_accumulated_in_wood", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "pastureFrac":{ + "cell_measures":"", + "cell_methods":"area: mean where land over all_area_types time: mean", + "comment":"Percentage of entire grid cell that is covered by anthropogenic pasture.", + "dimensions":"longitude latitude time typepasture", + "frequency":"mon", + "long_name":"Anthropogenic Pasture Fraction", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"pastureFrac", + "positive":"", + "standard_name":"area_fraction", + "type":"real", + "units":"%", + "valid_max":"", + "valid_min":"" + }, + "prveg":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"The precipitation flux that is intercepted by the vegetation canopy (if present in model) before reaching the ground.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Precipitation onto Canopy", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"prveg", + "positive":"", + "standard_name":"precipitation_flux_onto_canopy", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "rGrowth":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Carbon Mass Flux into Atmosphere due to Growth Autotrophic Respiration on Land", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"rGrowth", + "positive":"up", + "standard_name":"surface_upward_carbon_mass_flux_due_to_plant_respiration_for_biomass_growth", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "rMaint":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Carbon Mass Flux into Atmosphere due to Maintenance Autotrophic Respiration on Land", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"rMaint", + "positive":"up", + "standard_name":"surface_upward_carbon_mass_flux_due_to_plant_respiration_for_biomass_maintenance", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "ra":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"Carbon mass flux per unit area into atmosphere due to autotrophic respiration on land (respiration by producers) [see rh for heterotrophic production]", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Carbon Mass Flux into Atmosphere due to Autotrophic (Plant) Respiration on Land", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"ra", + "positive":"up", + "standard_name":"plant_respiration_carbon_flux", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "residualFrac":{ + "cell_measures":"", + "cell_methods":"area: mean where land over all_area_types time: mean", + "comment":"Percentage of entire grid cell that is land and is covered by neither vegetation nor bare-soil (e.g., urban, ice, lakes, etc.)", + "dimensions":"longitude latitude time typeresidual", + "frequency":"mon", + "long_name":"Fraction of Grid Cell that is Land but Neither Vegetation-Covered nor Bare Soil", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"residualFrac", + "positive":"", + "standard_name":"area_fraction", + "type":"real", + "units":"%", + "valid_max":"", + "valid_min":"" + }, + "rh":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"Carbon mass flux per unit area into atmosphere due to heterotrophic respiration on land (respiration by consumers)", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Carbon Mass Flux into Atmosphere due to Heterotrophic Respiration on Land", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"rh", + "positive":"up", + "standard_name":"heterotrophic_respiration_carbon_flux", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "shrubFrac":{ + "cell_measures":"", + "cell_methods":"area: mean where land over all_area_types time: mean", + "comment":"Percentage of entire grid cell that is covered by shrub.", + "dimensions":"longitude latitude time typeshrub", + "frequency":"mon", + "long_name":"Shrub Fraction", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"shrubFrac", + "positive":"", + "standard_name":"area_fraction", + "type":"real", + "units":"%", + "valid_max":"", + "valid_min":"" + }, + "tran":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"Transpiration (may include dew formation as a negative flux).", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Transpiration", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"tran", + "positive":"up", + "standard_name":"transpiration_flux", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "treeFrac":{ + "cell_measures":"", + "cell_methods":"area: mean where land over all_area_types time: mean", + "comment":"Percentage of entire grid cell that is covered by trees.", + "dimensions":"longitude latitude time typetree", + "frequency":"mon", + "long_name":"Tree Cover Fraction", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"treeFrac", + "positive":"", + "standard_name":"area_fraction", + "type":"real", + "units":"%", + "valid_max":"", + "valid_min":"" + }, + "treeFracPrimDec":{ + "cell_measures":"", + "cell_methods":"area: mean where land over all_area_types time: mean", + "comment":"Percentage of the entire grid cell that is covered by total primary deciduous trees.", + "dimensions":"longitude latitude time typepdec", + "frequency":"mon", + "long_name":"Total Primary Deciduous Tree Fraction", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"treeFracPrimDec", + "positive":"", + "standard_name":"area_fraction", + "type":"real", + "units":"%", + "valid_max":"", + "valid_min":"" + }, + "treeFracPrimEver":{ + "cell_measures":"", + "cell_methods":"area: mean where land over all_area_types time: mean", + "comment":"Percentage of entire grid cell that is covered by primary evergreen trees.", + "dimensions":"longitude latitude time typepever", + "frequency":"mon", + "long_name":"Total Primary Evergreen Tree Cover Fraction", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"treeFracPrimEver", + "positive":"", + "standard_name":"area_fraction", + "type":"real", + "units":"%", + "valid_max":"", + "valid_min":"" + }, + "treeFracSecDec":{ + "cell_measures":"", + "cell_methods":"area: mean where land over all_area_types time: mean", + "comment":"Percentage of entire grid cell that is covered by secondary deciduous trees.", + "dimensions":"longitude latitude time typesdec", + "frequency":"mon", + "long_name":"Total Secondary Deciduous Tree Cover Fraction", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"treeFracSecDec", + "positive":"", + "standard_name":"area_fraction", + "type":"real", + "units":"%", + "valid_max":"", + "valid_min":"" + }, + "treeFracSecEver":{ + "cell_measures":"", + "cell_methods":"area: mean where land over all_area_types time: mean", + "comment":"Percentage of entire grid cell that is covered by secondary evergreen trees.", + "dimensions":"longitude latitude time typesever", + "frequency":"mon", + "long_name":"Total Secondary Evergreen Tree Cover Fraction", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"treeFracSecEver", + "positive":"", + "standard_name":"area_fraction", + "type":"real", + "units":"%", + "valid_max":"", + "valid_min":"" + }, + "tsl":{ + "cell_measures":"", + "cell_methods":"area: mean where land time: mean", + "comment":"Temperature of soil. Reported as missing for grid cells with no land.", + "dimensions":"longitude latitude sdepth time", + "frequency":"mon", + "long_name":"Temperature of Soil", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"tsl", + "positive":"", + "standard_name":"soil_temperature", + "type":"real", + "units":"K", + "valid_max":"", + "valid_min":"" + } + } +} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_Omon.json b/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_Omon.json new file mode 100644 index 0000000000..2c5306fcb9 --- /dev/null +++ b/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_Omon.json @@ -0,0 +1,4609 @@ +{ + "Header":{ + "#dataRequest_specs_version":"01.00.21", + "#mip_era":"CMIP6", + "Conventions":"CF-1.7 ODS-2.1", + "approx_interval":"30.00000", + "cmor_version":"3.2", + "data_specs_version":"2.1.0", + "generic_levels":"olevel", + "int_missing_value":"-2147483648", + "missing_value":"1e20", + "product":"observations", + "realm":"ocean", + "table_date":"07 March 2018", + "table_id":"Table obs4MIPs_Omon" + }, + "variable_entry":{ + "agessc":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Time elapsed since water was last in surface layer of the ocean.", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Sea Water Age Since Surface Contact", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"agessc", + "positive":"", + "standard_name":"sea_water_age_since_surface_contact", + "type":"real", + "units":"yr", + "valid_max":"", + "valid_min":"" + }, + "arag":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Sum of particulate aragonite components (e.g. Phytoplankton, Detrital, etc.)", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Aragonite Concentration", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"arag", + "positive":"", + "standard_name":"mole_concentration_of_aragonite_expressed_as_carbon_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "bacc":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Sum of bacterial carbon component concentrations", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Bacterial Carbon Concentration", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"bacc", + "positive":"", + "standard_name":"mole_concentration_of_bacteria_expressed_as_carbon_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "bfe":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Sum of particulate organic iron component concentrations", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Mole Concentration of Particulate Organic Matter expressed as Iron in sea water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"bfe", + "positive":"", + "standard_name":"mole_concentration_of_particulate_organic_matter_expressed_as_iron_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "bfeos":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"sum of particulate organic iron component concentrations", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Surface Mole Concentration of Particulate Organic Matter expressed as Iron in sea water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"bfeos", + "positive":"", + "standard_name":"mole_concentration_of_particulate_organic_matter_expressed_as_iron_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "bigthetao":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Sea water conservative temperature (this should be contributed only for models using conservative temperature as prognostic field)", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Sea Water Convervative Temperature", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"bigthetao", + "positive":"", + "standard_name":"sea_water_conservative_temperature", + "type":"real", + "units":"degC", + "valid_max":"", + "valid_min":"" + }, + "bigthetaoga":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Diagnostic should be contributed only for models using conservative temperature as prognostic field.", + "dimensions":"time", + "frequency":"mon", + "long_name":"Global Average Sea Water Conservative Temperature", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"bigthetaoga", + "positive":"", + "standard_name":"sea_water_conservative_temperature", + "type":"real", + "units":"degC", + "valid_max":"", + "valid_min":"" + }, + "bsi":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Sum of particulate silica component concentrations", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Mole Concentration of Particulate Organic Matter expressed as silicon in sea water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"bsi", + "positive":"", + "standard_name":"mole_concentration_of_particulate_matter_expressed_as_silicon_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "bsios":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"sum of particulate silica component concentrations", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Surface Mole Concentration of Particulate Organic Matter expressed as Silicon in sea water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"bsios", + "positive":"", + "standard_name":"mole_concentration_of_particulate_organic_matter_expressed_as_silicon_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "calc":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Sum of particulate calcite component concentrations (e.g. Phytoplankton, Detrital, etc.)", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Calcite Concentration", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"calc", + "positive":"", + "standard_name":"mole_concentration_of_calcite_expressed_as_carbon_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "cfc11":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Moles Per Unit Mass of CFC-11 in sea water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"cfc11", + "positive":"", + "standard_name":"mole_concentration_of_cfc11_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "cfc12":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Moles Per Unit Mass of CFC-12 in sea water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"cfc12", + "positive":"", + "standard_name":"mole_concentration_of_cfc12_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "chl":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Sum of chlorophyll from all phytoplankton group concentrations. In most models this is equal to chldiat+chlmisc, that is the sum of Diatom Chlorophyll Mass Concentration and Other Phytoplankton Chlorophyll Mass Concentration", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Mass Concentration of Total Chlorophyll in sea water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"chl", + "positive":"", + "standard_name":"mass_concentration_of_phytoplankton_expressed_as_chlorophyll_in_sea_water", + "type":"real", + "units":"kg m-3", + "valid_max":"", + "valid_min":"" + }, + "chlcalc":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"chlorophyll concentration from the calcite-producing phytoplankton component alone", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Mass Concentration of Calcareous Phytoplankton expressed as Chlorophyll in sea water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"chlcalc", + "positive":"", + "standard_name":"mass_concentration_of_calcareous_phytoplankton_expressed_as_chlorophyll_in_sea_water", + "type":"real", + "units":"kg m-3", + "valid_max":"", + "valid_min":"" + }, + "chlcalcos":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"chlorophyll concentration from the calcite-producing phytoplankton component alone", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Surface Mass Concentration of Calcareous Phytoplankton expressed as Chlorophyll in sea water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"chlcalcos", + "positive":"", + "standard_name":"mass_concentration_of_calcareous_phytoplankton_expressed_as_chlorophyll_in_sea_water", + "type":"real", + "units":"kg m-3", + "valid_max":"", + "valid_min":"" + }, + "chldiat":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Chlorophyll from diatom phytoplankton component concentration alone", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Mass Concentration of Diatom expressed as Chlorophyll in sea water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"chldiat", + "positive":"", + "standard_name":"mass_concentration_of_diatoms_expressed_as_chlorophyll_in_sea_water", + "type":"real", + "units":"kg m-3", + "valid_max":"", + "valid_min":"" + }, + "chldiatos":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"chlorophyll from diatom phytoplankton component concentration alone", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Surface Mass Concentration of Diatoms expressed as Chlorophyll in sea water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"chldiatos", + "positive":"", + "standard_name":"mass_concentration_of_diatoms_expressed_as_chlorophyll_in_sea_water", + "type":"real", + "units":"kg m-3", + "valid_max":"", + "valid_min":"" + }, + "chldiaz":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Chlorophyll concentration from the diazotrophic phytoplankton component alone", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Mass Concentration of Diazotrophs expressed as Chlorophyll in sea water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"chldiaz", + "positive":"", + "standard_name":"mass_concentration_of_diazotrophs_expressed_as_chlorophyll_in_sea_water", + "type":"real", + "units":"kg m-3", + "valid_max":"", + "valid_min":"" + }, + "chldiazos":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"chlorophyll concentration from the diazotrophic phytoplankton component alone", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Surface Mass Concentration of Diazotrophs expressed as Chlorophyll in sea water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"chldiazos", + "positive":"", + "standard_name":"mass_concentration_of_diazotrophs_expressed_as_chlorophyll_in_sea_water", + "type":"real", + "units":"kg m-3", + "valid_max":"", + "valid_min":"" + }, + "chlmisc":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Chlorophyll from additional phytoplankton component concentrations alone", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Mass Concentration of Other Phytoplankton expressed as Chlorophyll in sea water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"chlmisc", + "positive":"", + "standard_name":"mass_concentration_of_miscellaneous_phytoplankton_expressed_as_chlorophyll_in_sea_water", + "type":"real", + "units":"kg m-3", + "valid_max":"", + "valid_min":"" + }, + "chlmiscos":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"chlorophyll from additional phytoplankton component concentrations alone", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Surface Mass Concentration of Other Phytoplankton expressed as Chlorophyll in sea water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"chlmiscos", + "positive":"", + "standard_name":"mass_concentration_of_miscellaneous_phytoplankton_expressed_as_chlorophyll_in_sea_water", + "type":"real", + "units":"kg m-3", + "valid_max":"", + "valid_min":"" + }, + "chlos":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Sum of chlorophyll from all phytoplankton group concentrations at the sea surface. In most models this is equal to chldiat+chlmisc, that is the sum of 'Diatom Chlorophyll Mass Concentration' plus 'Other Phytoplankton Chlorophyll Mass Concentration'", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Surface Mass Concentration of Total Phytoplankton expressed as Chlorophyll in sea water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"chlos", + "positive":"", + "standard_name":"mass_concentration_of_phytoplankton_expressed_as_chlorophyll_in_sea_water", + "type":"real", + "units":"kg m-3", + "valid_max":"", + "valid_min":"" + }, + "chlpico":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"chlorophyll concentration from the picophytoplankton (<2 um) component alone", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Mass Concentration of Picophytoplankton expressed as Chlorophyll in sea water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"chlpico", + "positive":"", + "standard_name":"mass_concentration_of_picophytoplankton_expressed_as_chlorophyll_in_sea_water", + "type":"real", + "units":"kg m-3", + "valid_max":"", + "valid_min":"" + }, + "chlpicoos":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"chlorophyll concentration from the picophytoplankton (<2 um) component alone", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Surface Mass Concentration of Picophytoplankton expressed as Chlorophyll in sea water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"chlpicoos", + "positive":"", + "standard_name":"mass_concentration_of_picophytoplankton_expressed_as_chlorophyll_in_sea_water", + "type":"real", + "units":"kg m-3", + "valid_max":"", + "valid_min":"" + }, + "co3":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Carbonate ion Concentration", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"co3", + "positive":"", + "standard_name":"mole_concentration_of_carbonate_expressed_as_carbon_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "co3abio":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Abiotic Carbonate ion Concentration", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"co3abio", + "positive":"", + "standard_name":"mole_concentration_of_carbonate_abiotic_analogue_expressed_as_carbon_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "co3nat":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Natural Carbonate ion Concentration", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"co3nat", + "positive":"", + "standard_name":"mole_concentration_of_carbonate_natural_analogue_expressed_as_carbon_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "co3satarag":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Carbonate ion Concentration for sea water in equilibrium with pure Aragonite", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"co3satarag", + "positive":"", + "standard_name":"mole_concentration_of_carbonate_expressed_as_carbon_at_equilibrium_with_pure_aragonite_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "co3satcalc":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Carbonate ion Concentration for sea water in equilibrium with pure Calcite", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"co3satcalc", + "positive":"", + "standard_name":"mole_concentration_of_carbonate_expressed_as_carbon_at_equilibrium_with_pure_calcite_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "detoc":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Sum of detrital organic carbon component concentrations", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Detrital Organic Carbon Concentration", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"detoc", + "positive":"", + "standard_name":"mole_concentration_of_organic_detritus_expressed_as_carbon_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "dfe":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Dissolved iron in sea water, including both Fe2+ and Fe3+ ions (but not particulate detrital iron)", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Mole Concentration of Dissolved Iron in sea water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"dfe", + "positive":"", + "standard_name":"mole_concentration_of_dissolved_iron_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "dfeos":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"dissolved iron in sea water is meant to include both Fe2+ and Fe3+ ions (but not, e.g., particulate detrital iron)", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Surface Dissolved Iron Concentration", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"dfeos", + "positive":"", + "standard_name":"mole_concentration_of_dissolved_iron_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "dissi13c":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Dissolved inorganic 14carbon (CO3+HCO3+H2CO3) concentration", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Dissolved Inorganic 13Carbon Concentration", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"dissi13c", + "positive":"", + "standard_name":"mole_concentration_of_dissolved_inorganic_carbon13_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "dissi13cos":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Dissolved inorganic 14carbon (CO3+HCO3+H2CO3) concentration", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Surface Dissolved Inorganic 13Carbon Concentration", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"dissi13cos", + "positive":"", + "standard_name":"mole_concentration_of_dissolved_inorganic_carbon13_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "dissi14cabio":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Abiotic Dissolved inorganic 14carbon (CO3+HCO3+H2CO3) concentration", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Abiotic Dissolved Inorganic 14Carbon Concentration", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"dissi14cabio", + "positive":"", + "standard_name":"mole_concentration_of_dissolved_inorganic_carbon14_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "dissi14cabioos":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Abiotic Dissolved inorganic 14carbon (CO3+HCO3+H2CO3) concentration", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Surface Abiotic Dissolved Inorganic 14Carbon Concentration", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"dissi14cabioos", + "positive":"", + "standard_name":"mole_concentration_of_dissolved_inorganic_carbon14_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "dissic":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Dissolved inorganic carbon (CO3+HCO3+H2CO3) concentration", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Dissolved Inorganic Carbon Concentration", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"dissic", + "positive":"", + "standard_name":"mole_concentration_of_dissolved_inorganic_carbon_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "dissicabio":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Abiotic Dissolved inorganic carbon (CO3+HCO3+H2CO3) concentration", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Abiotic Dissolved Inorganic Carbon Concentration", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"dissicabio", + "positive":"", + "standard_name":"mole_concentration_of_dissolved_inorganic_carbon_abiotic_analogue_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "dissicabioos":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Abiotic Dissolved inorganic carbon (CO3+HCO3+H2CO3) concentration", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Surface Abiotic Dissolved Inorganic Carbon Concentration", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"dissicabioos", + "positive":"", + "standard_name":"mole_concentration_of_dissolved_inorganic_carbon_abiotic_analogue_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "dissicnat":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Dissolved inorganic carbon (CO3+HCO3+H2CO3) concentration at preindustrial atmospheric xCO2", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Natural Dissolved Inorganic Carbon Concentration", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"dissicnat", + "positive":"", + "standard_name":"mole_concentration_of_dissolved_inorganic_carbon_natural_analogue_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "dissicnatos":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Dissolved inorganic carbon (CO3+HCO3+H2CO3) concentration at preindustrial atmospheric xCO2", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Surface Natural Dissolved Inorganic Carbon Concentration", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"dissicnatos", + "positive":"", + "standard_name":"mole_concentration_of_dissolved_inorganic_carbon_natural_analogue_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "dissoc":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Sum of dissolved carbon component concentrations explicitly represented (i.e. not ~40 uM refractory unless explicit)", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Dissolved Organic Carbon Concentration", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"dissoc", + "positive":"", + "standard_name":"mole_concentration_of_dissolved_organic_carbon_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "dmso":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Mole concentration of dimethyl sulphide in water", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Mole Concentration of Dimethyl Sulphide in sea water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"dmso", + "positive":"", + "standard_name":"mole_concentration_of_dimethyl_sulfide_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "dmsos":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Surface Mole Concentration of Dimethyl Sulphide in sea water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"dmsos", + "positive":"", + "standard_name":"mole_concentration_of_dimethyl_sulfide_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "dpco2":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Delta PCO2", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"dpco2", + "positive":"", + "standard_name":"surface_carbon_dioxide_partial_pressure_difference_between_sea_water_and_air", + "type":"real", + "units":"Pa", + "valid_max":"", + "valid_min":"" + }, + "dpco2abio":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Abiotic Delta PCO2", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"dpco2abio", + "positive":"", + "standard_name":"surface_carbon_dioxide_abiotic_analogue_partial_pressure_difference_between_sea_water_and_air", + "type":"real", + "units":"Pa", + "valid_max":"", + "valid_min":"" + }, + "dpco2nat":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Natural Delta PCO2", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"dpco2nat", + "positive":"", + "standard_name":"surface_carbon_dioxide_natural_analogue_partial_pressure_difference_between_sea_water_and_air", + "type":"real", + "units":"Pa", + "valid_max":"", + "valid_min":"" + }, + "dpo2":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Delta PO2", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"dpo2", + "positive":"", + "standard_name":"surface_molecular_oxygen_partial_pressure_difference_between_sea_water_and_air", + "type":"real", + "units":"Pa", + "valid_max":"", + "valid_min":"" + }, + "eparag100":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time depth100m", + "frequency":"mon", + "long_name":"Downward Flux of Aragonite", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"eparag100", + "positive":"", + "standard_name":"sinking_mole_flux_of_aragonite_expressed_as_carbon_in_sea_water", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "epc100":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time depth100m", + "frequency":"mon", + "long_name":"Downward Flux of Particle Organic Carbon", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"epc100", + "positive":"", + "standard_name":"sinking_mole_flux_of_particulate_organic_matter_expressed_as_carbon_in_sea_water", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "epcalc100":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time depth100m", + "frequency":"mon", + "long_name":"Downward Flux of Calcite", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"epcalc100", + "positive":"", + "standard_name":"sinking_mole_flux_of_calcite_expressed_as_carbon_in_sea_water", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "epfe100":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time depth100m", + "frequency":"mon", + "long_name":"Downward Flux of Particulate Iron", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"epfe100", + "positive":"", + "standard_name":"sinking_mole_flux_of_particulate_iron_in_sea_water", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "epn100":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time depth100m", + "frequency":"mon", + "long_name":"Downward Flux of Particulate Nitrogen", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"epn100", + "positive":"", + "standard_name":"sinking_mole_flux_of_particulate_organic_nitrogen_in_sea_water", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "epp100":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time depth100m", + "frequency":"mon", + "long_name":"Downward Flux of Particulate Phosphorus", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"epp100", + "positive":"", + "standard_name":"sinking_mole_flux_of_particulate_organic_phosphorus_in_sea_water", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "epsi100":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time depth100m", + "frequency":"mon", + "long_name":"Downward Flux of Particulate Silica", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"epsi100", + "positive":"", + "standard_name":"sinking_mole_flux_of_particulate_silicon_in_sea_water", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "evs":{ + "cell_measures":"", + "cell_methods":"area: mean where ice_free_sea over sea time: mean", + "comment":"computed as the total mass of water vapor evaporating from the ice-free portion of the ocean divided by the area of the ocean portion of the grid cell.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Water Evaporation Flux Where Ice Free Ocean over Sea", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"evs", + "positive":"", + "standard_name":"water_evaporation_flux", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "expc":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Downward flux of particulate organic carbon", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Sinking Particulate Organic Carbon Flux", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"expc", + "positive":"down", + "standard_name":"sinking_mole_flux_of_particulate_organic_matter_expressed_as_carbon_in_sea_water", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "fbddtalk":{ + "cell_measures":"", + "cell_methods":"area: mean where sea depth: sum where sea (top 100m only) time: mean", + "comment":"vertical integral of net biological terms in time rate of change of alkalinity", + "dimensions":"longitude latitude time olayer100m", + "frequency":"mon", + "long_name":"Rate of Change of Biological Alkalinity due to Biological Activity", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"fbddtalk", + "positive":"", + "standard_name":"integral_wrt_depth_of_tendency_of_sea_water_alkalinity_expressed_as_mole_equivalent_due_to_biological_processes", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "fbddtdic":{ + "cell_measures":"", + "cell_methods":"area: mean where sea depth: sum where sea (top 100m only) time: mean", + "comment":"vertical integral of net biological terms in time rate of change of dissolved inorganic carbon", + "dimensions":"longitude latitude time olayer100m", + "frequency":"mon", + "long_name":"Rate of Change of Dissolved Inorganic Carbon due to Biological Activity", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"fbddtdic", + "positive":"", + "standard_name":"tendency_of_ocean_mole_content_of_dissolved_inorganic_carbon_due_to_biological_processes", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "fbddtdife":{ + "cell_measures":"", + "cell_methods":"area: mean where sea depth: sum where sea (top 100m only) time: mean", + "comment":"vertical integral of net biological terms in time rate of change of dissolved inorganic iron", + "dimensions":"longitude latitude time olayer100m", + "frequency":"mon", + "long_name":"Rate of Change of Dissolved Inorganic Iron due to Biological Activity", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"fbddtdife", + "positive":"", + "standard_name":"tendency_of_ocean_mole_content_of_dissolved_inorganic_iron_due_to_biological_processes", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "fbddtdin":{ + "cell_measures":"", + "cell_methods":"area: mean where sea depth: sum where sea (top 100m only) time: mean", + "comment":"vertical integral of net biological terms in time rate of change of nitrogen nutrients (e.g. NO3+NH4)", + "dimensions":"longitude latitude time olayer100m", + "frequency":"mon", + "long_name":"Rate of Change of Dissolved Inorganic Nitrogen due to Biological Activity", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"fbddtdin", + "positive":"", + "standard_name":"tendency_of_ocean_mole_content_of_dissolved_inorganic_nitrogen_due_to_biological_processes", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "fbddtdip":{ + "cell_measures":"", + "cell_methods":"area: mean where sea depth: sum where sea (top 100m only) time: mean", + "comment":"vertical integral of net biological terms in time rate of change of phosphate", + "dimensions":"longitude latitude time olayer100m", + "frequency":"mon", + "long_name":"Rate of Change of Dissolved Inorganic Phosphorus due to Biological Activity", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"fbddtdip", + "positive":"", + "standard_name":"tendency_of_ocean_mole_content_of_dissolved_inorganic_phosphorus_due_to_biological_processes", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "fbddtdisi":{ + "cell_measures":"", + "cell_methods":"area: mean where sea depth: sum where sea (top 100m only) time: mean", + "comment":"vertical integral of net biological terms in time rate of change of dissolved inorganic silicate", + "dimensions":"longitude latitude time olayer100m", + "frequency":"mon", + "long_name":"Rate of Change of Dissolved Inorganic Silicon due to Biological Activity", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"fbddtdisi", + "positive":"", + "standard_name":"tendency_of_ocean_mole_content_of_dissolved_inorganic_silicon_due_to_biological_processes", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "fddtalk":{ + "cell_measures":"", + "cell_methods":"area: mean where sea depth: sum where sea (top 100m only) time: mean", + "comment":"vertical integral of net time rate of change of alkalinity", + "dimensions":"longitude latitude time olayer100m", + "frequency":"mon", + "long_name":"Rate of Change of Total Alkalinity", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"fddtalk", + "positive":"", + "standard_name":"integral_wrt_depth_of_tendency_of_sea_water_alkalinity_expressed_as_mole_equivalent", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "fddtdic":{ + "cell_measures":"", + "cell_methods":"area: mean where sea depth: sum where sea (top 100m only) time: mean", + "comment":"", + "dimensions":"longitude latitude time olayer100m", + "frequency":"mon", + "long_name":"Rate of Change of Net Dissolved Inorganic Carbon", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"fddtdic", + "positive":"", + "standard_name":"tendency_of_ocean_mole_content_of_dissolved_inorganic_carbon", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "fddtdife":{ + "cell_measures":"", + "cell_methods":"area: mean where sea depth: sum where sea (top 100m only) time: mean", + "comment":"vertical integral of net time rate of change of dissolved inorganic iron", + "dimensions":"longitude latitude time olayer100m", + "frequency":"mon", + "long_name":"Rate of Change of Net Dissolved Inorganic Iron", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"fddtdife", + "positive":"", + "standard_name":"tendency_of_ocean_mole_content_of_dissolved_inorganic_iron", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "fddtdin":{ + "cell_measures":"", + "cell_methods":"area: mean where sea depth: sum where sea (top 100m only) time: mean", + "comment":"Net time rate of change of nitrogen nutrients (e.g. NO3+NH4)", + "dimensions":"longitude latitude time olayer100m", + "frequency":"mon", + "long_name":"Rate of Change of Net Dissolved Inorganic Nitrogen", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"fddtdin", + "positive":"", + "standard_name":"tendency_of_ocean_mole_content_of_dissolved_inorganic_nitrogen", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "fddtdip":{ + "cell_measures":"", + "cell_methods":"area: mean where sea depth: sum where sea (top 100m only) time: mean", + "comment":"vertical integral of net time rate of change of phosphate", + "dimensions":"longitude latitude time olayer100m", + "frequency":"mon", + "long_name":"Rate of Change of Net Dissolved Inorganic Phosphate", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"fddtdip", + "positive":"", + "standard_name":"tendency_of_ocean_mole_content_of_dissolved_inorganic_phosphorus", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "fddtdisi":{ + "cell_measures":"", + "cell_methods":"area: mean where sea depth: sum where sea (top 100m only) time: mean", + "comment":"vertical integral of net time rate of change of dissolved inorganic silicate", + "dimensions":"longitude latitude time olayer100m", + "frequency":"mon", + "long_name":"Rate of Change of Net Dissolved Inorganic Silicon", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"fddtdisi", + "positive":"", + "standard_name":"tendency_of_ocean_mole_content_of_dissolved_inorganic_silicon", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "fg13co2":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Surface Downward Flux of Abiotic 13CO2", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"fg13co2", + "positive":"down", + "standard_name":"surface_downward_mass_flux_of_carbon13_dioxide_abiotic_analogue_expressed_as_carbon13", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "fg14co2abio":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Gas exchange flux of abiotic 14CO2 (positive into ocean)", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Surface Downward Flux of Abiotic 14CO2", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"fg14co2abio", + "positive":"down", + "standard_name":"surface_downward_mass_flux_of_carbon14_dioxide_abiotic_analogue_expressed_as_carbon", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "fgcfc11":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"gas exchange flux of CFC11", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Surface Downward CFC11 flux", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"fgcfc11", + "positive":"down", + "standard_name":"surface_downward_mole_flux_of_cfc11", + "type":"real", + "units":"mol sec-1 m-2", + "valid_max":"", + "valid_min":"" + }, + "fgcfc12":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"gas exchange flux of CFC12", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Surface Downward CFC12 flux", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"fgcfc12", + "positive":"down", + "standard_name":"surface_downward_mole_flux_of_cfc12", + "type":"real", + "units":"mol sec-1 m-2", + "valid_max":"", + "valid_min":"" + }, + "fgco2":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Gas exchange flux of CO2 (positive into ocean)", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Surface Downward Flux of Total CO2", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"fgco2", + "positive":"down", + "standard_name":"surface_downward_mass_flux_of_carbon_dioxide_expressed_as_carbon", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "fgco2abio":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Gas exchange flux of abiotic CO2 (positive into ocean)", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Surface Downward Flux of Abiotic CO2", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"fgco2abio", + "positive":"down", + "standard_name":"surface_downward_mass_flux_of_carbon_dioxide_abiotic_analogue_expressed_as_carbon", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "fgco2nat":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Gas exchange flux of natural CO2 (positive into ocean)", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Surface Downward Flux of Natural CO2", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"fgco2nat", + "positive":"down", + "standard_name":"surface_downward_mass_flux_of_carbon_dioxide_natural_analogue_expressed_as_carbon", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "fgdms":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Gas exchange flux of DMS (positive into atmosphere)", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Surface Upward Flux of DMS", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"fgdms", + "positive":"up", + "standard_name":"surface_upward_mole_flux_of_dimethyl_sulfide", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "fgo2":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Gas exchange flux of O2 (positive into ocean)", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Surface Downward Flux of O2", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"fgo2", + "positive":"down", + "standard_name":"surface_downward_mole_flux_of_molecular_oxygen", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "fgsf6":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"gas exchange flux of SF6", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Surface Downward SF6 flux", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"fgsf6", + "positive":"down", + "standard_name":"surface_downward_mole_flux_of_sulfur_hexafluoride", + "type":"real", + "units":"mol sec-1 m-2", + "valid_max":"", + "valid_min":"" + }, + "ficeberg":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"computed as the iceberg melt water flux into the ocean divided by the area of the ocean portion of the grid cell.", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Water Flux into Sea Water From Icebergs", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"ficeberg", + "positive":"", + "standard_name":"water_flux_into_sea_water_from_icebergs", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "ficeberg2d":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"computed as the iceberg melt water flux into the ocean divided by the area of the ocean portion of the grid cell.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Water Flux into Sea Water From Icebergs", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"ficeberg", + "positive":"", + "standard_name":"water_flux_into_sea_water_from_icebergs", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "frfe":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Iron Loss to Sediments", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"frfe", + "positive":"", + "standard_name":"tendency_of_ocean_mole_content_of_iron_due_to_sedimentation", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "fric":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Inorganic Carbon loss to sediments", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Downward Inorganic Carbon Flux at Ocean Bottom", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"fric", + "positive":"", + "standard_name":"tendency_of_ocean_mole_content_of_inorganic_carbon_due_to_sedimentation", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "friver":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"computed as the river flux of water into the ocean divided by the area of the ocean portion of the grid cell.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Water Flux into Sea Water From Rivers", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"friver", + "positive":"", + "standard_name":"water_flux_into_sea_water_from_rivers", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "frn":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Nitrogen Loss to Sediments and through Denitrification", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"frn", + "positive":"", + "standard_name":"tendency_of_ocean_mole_content_of_elemental_nitrogen_due_to_denitrification_and_sedimentation", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "froc":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Organic Carbon loss to sediments", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Downward Organic Carbon Flux at Ocean Bottom", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"froc", + "positive":"", + "standard_name":"tendency_of_ocean_mole_content_of_organic_carbon_due_to_sedimentation", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "fsfe":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Iron supply through deposition flux onto sea surface, runoff, coasts, sediments, etc", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Surface Downward Net Flux of Iron", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"fsfe", + "positive":"", + "standard_name":"tendency_of_ocean_mole_content_of_iron_due_to_deposition_and_runoff_and_sediment_dissolution", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "fsitherm":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"computed as the sea ice thermodynamic water flux into the ocean divided by the area of the ocean portion of the grid cell.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Water Flux into Sea Water due to Sea Ice Thermodynamics", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"fsitherm", + "positive":"", + "standard_name":"water_flux_into_sea_water_due_to_sea_ice_thermodynamics", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "fsn":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Surface Downward Net Flux of Nitrogen", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"fsn", + "positive":"", + "standard_name":"tendency_of_ocean_mole_content_of_elemental_nitrogen_due_to_deposition_and_fixation_and_runoff", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "graz":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Total Grazing of Phytoplankton by Zooplankton", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"graz", + "positive":"", + "standard_name":"tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_grazing_of_phytoplankton", + "type":"real", + "units":"mol m-3 s-1", + "valid_max":"", + "valid_min":"" + }, + "hfbasin":{ + "cell_measures":"", + "cell_methods":"longitude: mean (basin) time: mean", + "comment":"Contains contributions from all physical processes affecting the northward heat transport, including resolved advection, parameterized advection, lateral diffusion, etc. Diagnosed here as a function of latitude and basin. Use Celsius for temperature scale.", + "dimensions":"latitude basin time", + "frequency":"mon", + "long_name":"Northward Ocean Heat Transport", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"hfbasin", + "positive":"", + "standard_name":"northward_ocean_heat_transport", + "type":"real", + "units":"W", + "valid_max":"", + "valid_min":"" + }, + "hfbasinpadv":{ + "cell_measures":"", + "cell_methods":"longitude: mean (basin) time: mean", + "comment":"Contributions to heat transport from parameterized eddy-induced advective transport due to any subgrid advective process. Diagnosed here as a function of latitude and basin. Use Celsius for temperature scale.", + "dimensions":"latitude basin time", + "frequency":"mon", + "long_name":"northward ocean heat transport due to parameterized eddy advection", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"hfbasinpadv", + "positive":"", + "standard_name":"northward_ocean_heat_transport_due_to_parameterized_eddy_advection", + "type":"real", + "units":"W", + "valid_max":"", + "valid_min":"" + }, + "hfbasinpmadv":{ + "cell_measures":"", + "cell_methods":"longitude: mean (basin) time: mean", + "comment":"Contributions to heat transport from parameterized mesoscale eddy-induced advective transport. Diagnosed here as a function of latitude and basin. Use Celsius for temperature scale.", + "dimensions":"latitude basin time", + "frequency":"mon", + "long_name":"northward ocean heat transport due to parameterized mesoscale advection", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"hfbasinpmadv", + "positive":"", + "standard_name":"northward_ocean_heat_transport_due_to_parameterized_mesoscale_eddy_advection", + "type":"real", + "units":"W", + "valid_max":"", + "valid_min":"" + }, + "hfbasinpmdiff":{ + "cell_measures":"", + "cell_methods":"longitude: mean (basin) time: mean", + "comment":"Contributions to heat transport from parameterized mesoscale eddy-induced diffusive transport (i.e., neutral diffusion). Diagnosed here as a function of latitude and basin.", + "dimensions":"latitude basin time", + "frequency":"mon", + "long_name":"northward ocean heat transport due to parameterized mesoscale diffusion", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"hfbasinpmdiff", + "positive":"", + "standard_name":"northward_ocean_heat_transport_due_to_parameterized_mesoscale_eddy_diffusion", + "type":"real", + "units":"W", + "valid_max":"", + "valid_min":"" + }, + "hfbasinpsmadv":{ + "cell_measures":"", + "cell_methods":"longitude: mean (basin) time: mean", + "comment":"Contributions to heat transport from parameterized mesoscale eddy-induced advective transport. Diagnosed here as a function of latitude and basin. Use Celsius for temperature scale.", + "dimensions":"latitude basin time", + "frequency":"mon", + "long_name":"northward ocean heat transport due to parameterized submesoscale advection", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"hfbasinpsmadv", + "positive":"", + "standard_name":"northward_ocean_heat_transport_due_to_parameterized_submesoscale_eddy_advection", + "type":"real", + "units":"W", + "valid_max":"", + "valid_min":"" + }, + "hfcorr":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Heat Flux Correction", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"hfcorr", + "positive":"down", + "standard_name":"heat_flux_correction", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "hfds":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"This is the net flux of heat entering the liquid water column through its upper surface (excluding any 'flux adjustment') .", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Downward Heat Flux at Sea Water Surface", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"hfds", + "positive":"down", + "standard_name":"surface_downward_heat_flux_in_sea_water", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "hfevapds":{ + "cell_measures":"", + "cell_methods":"area: mean where ice_free_sea over sea time: mean", + "comment":"This is defined as 'where ice_free_sea over sea'", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Temperature Flux due to Evaporation Expressed as Heat Flux Out of Sea Water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"hfevapds", + "positive":"up", + "standard_name":"temperature_flux_due_to_evaporation_expressed_as_heat_flux_out_of_sea_water", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "hfgeou":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Upward Geothermal Heat Flux at Sea Floor", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"hfgeou", + "positive":"up", + "standard_name":"upward_geothermal_heat_flux_at_sea_floor", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "hfibthermds":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Heat Flux into Sea Water due to Iceberg Thermodynamics", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"hfibthermds", + "positive":"", + "standard_name":"heat_flux_into_sea_water_due_to_iceberg_thermodynamics", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "hfibthermds2d":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Heat Flux into Sea Water due to Iceberg Thermodynamics", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"hfibthermds", + "positive":"", + "standard_name":"heat_flux_into_sea_water_due_to_iceberg_thermodynamics", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "hflso":{ + "cell_measures":"", + "cell_methods":"area: mean where ice_free_sea over sea time: mean", + "comment":"This is defined as with the cell methods string: where ice_free_sea over sea", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Surface Downward Latent Heat Flux", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"hflso", + "positive":"up", + "standard_name":"surface_downward_latent_heat_flux", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "hfrainds":{ + "cell_measures":"", + "cell_methods":"area: mean where ice_free_sea over sea time: mean", + "comment":"This is defined as 'where ice_free_sea over sea'; i.e., the total flux (considered here) entering the ice-free portion of the grid cell divided by the area of the ocean portion of the grid cell. All such heat fluxes are computed based on Celsius scale.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Temperature Flux due to Rainfall Expressed as Heat Flux into Sea Water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"hfrainds", + "positive":"down", + "standard_name":"temperature_flux_due_to_rainfall_expressed_as_heat_flux_into_sea_water", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "hfrunoffds":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Temperature Flux due to Runoff Expressed as Heat Flux into Sea Water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"hfrunoffds", + "positive":"", + "standard_name":"temperature_flux_due_to_runoff_expressed_as_heat_flux_into_sea_water", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "hfrunoffds2d":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Temperature Flux due to Runoff Expressed as Heat Flux into Sea Water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"hfrunoffds", + "positive":"", + "standard_name":"temperature_flux_due_to_runoff_expressed_as_heat_flux_into_sea_water", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "hfsifrazil":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Heat Flux into Sea Water due to Frazil Ice Formation", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"hfsifrazil", + "positive":"", + "standard_name":"heat_flux_into_sea_water_due_to_freezing_of_frazil_ice", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "hfsifrazil2d":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Heat Flux into Sea Water due to Frazil Ice Formation", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"hfsifrazil", + "positive":"", + "standard_name":"heat_flux_into_sea_water_due_to_freezing_of_frazil_ice", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "hfsnthermds":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Heat Flux into Sea Water due to Snow Thermodynamics", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"hfsnthermds", + "positive":"", + "standard_name":"heat_flux_into_sea_water_due_to_snow_thermodynamics", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "hfsnthermds2d":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Heat Flux into Sea Water due to Snow Thermodynamics", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"hfsnthermds", + "positive":"", + "standard_name":"heat_flux_into_sea_water_due_to_snow_thermodynamics", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "hfsso":{ + "cell_measures":"", + "cell_methods":"area: mean where ice_free_sea over sea time: mean", + "comment":"This is defined as 'where ice_free_sea over sea'", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Surface Downward Sensible Heat Flux", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"hfsso", + "positive":"up", + "standard_name":"surface_downward_sensible_heat_flux", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "hfx":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Contains all contributions to 'x-ward' heat transport from resolved and parameterized processes. Use Celsius for temperature scale.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Ocean Heat X Transport", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"hfx", + "positive":"", + "standard_name":"ocean_heat_x_transport", + "type":"real", + "units":"W", + "valid_max":"", + "valid_min":"" + }, + "hfy":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Contains all contributions to 'y-ward' heat transport from resolved and parameterized processes. Use Celsius for temperature scale.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Ocean Heat Y Transport", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"hfy", + "positive":"", + "standard_name":"ocean_heat_y_transport", + "type":"real", + "units":"W", + "valid_max":"", + "valid_min":"" + }, + "htovgyre":{ + "cell_measures":"", + "cell_methods":"longitude: mean time: mean", + "comment":"From all advective mass transport processes, resolved and parameterized.", + "dimensions":"latitude basin time", + "frequency":"mon", + "long_name":"Northward Ocean Heat Transport due to Gyre", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"htovgyre", + "positive":"", + "standard_name":"northward_ocean_heat_transport_due_to_gyre", + "type":"real", + "units":"W", + "valid_max":"", + "valid_min":"" + }, + "htovovrt":{ + "cell_measures":"", + "cell_methods":"longitude: mean time: mean", + "comment":"From all advective mass transport processes, resolved and parameterized.", + "dimensions":"latitude basin time", + "frequency":"mon", + "long_name":"Northward Ocean Heat Transport due to Overturning", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"htovovrt", + "positive":"", + "standard_name":"northward_ocean_heat_transport_due_to_overturning", + "type":"real", + "units":"W", + "valid_max":"", + "valid_min":"" + }, + "icfriver":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Inorganic Carbon supply to ocean through runoff (separate from gas exchange)", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Flux of Inorganic Carbon Into Ocean Surface by Runoff", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"icfriver", + "positive":"", + "standard_name":"tendency_of_ocean_mole_content_of_inorganic_carbon_due_to_runoff_and_sediment_dissolution", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "intdic":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Vertically integrated DIC", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Dissolved Inorganic Carbon Content", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"intdic", + "positive":"", + "standard_name":"ocean_mass_content_of_dissolved_inorganic_carbon", + "type":"real", + "units":"kg m-2", + "valid_max":"", + "valid_min":"" + }, + "intdoc":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Vertically integrated DOC (explicit pools only)", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Dissolved Organic Carbon Content", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"intdoc", + "positive":"", + "standard_name":"ocean_mass_content_of_dissolved_organic_carbon", + "type":"real", + "units":"kg m-2", + "valid_max":"", + "valid_min":"" + }, + "intparag":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Vertically integrated aragonite production", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Aragonite Production", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"intparag", + "positive":"", + "standard_name":"tendency_of_ocean_mole_content_of_aragonite_expressed_as_carbon_due_to_biological_production", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "intpbfe":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Vertically integrated biogenic iron production", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Iron Production", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"intpbfe", + "positive":"", + "standard_name":"tendency_of_ocean_mole_content_of_iron_due_to_biological_production", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "intpbn":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Vertically integrated biogenic nitrogen production", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Nitrogen Production", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"intpbn", + "positive":"", + "standard_name":"tendency_of_ocean_mole_content_of_nitrogen_due_to_biological_production", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "intpbp":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Vertically integrated biogenic phosphorus production", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Phosphorus Production", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"intpbp", + "positive":"", + "standard_name":"tendency_of_ocean_mole_content_of_phosphorus_due_to_biological_production", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "intpbsi":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Vertically integrated biogenic silica production", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Silica Production", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"intpbsi", + "positive":"", + "standard_name":"tendency_of_ocean_mole_content_of_silicon_due_to_biological_production", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "intpcalcite":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Vertically integrated calcite production", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Calcite Production", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"intpcalcite", + "positive":"", + "standard_name":"tendency_of_ocean_mole_content_of_calcite_expressed_as_carbon_due_to_biological_production", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "intpn2":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Vertically integrated nitrogen fixation", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Nitrogen Fixation Rate in Ocean", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"intpn2", + "positive":"", + "standard_name":"tendency_of_ocean_mole_content_of_elemental_nitrogen_due_to_fixation", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "intpoc":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Vertically integrated POC", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Particulate Organic Carbon Content", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"intpoc", + "positive":"", + "standard_name":"ocean_mass_content_of_particulate_organic_matter_expressed_as_carbon", + "type":"real", + "units":"kg m-2", + "valid_max":"", + "valid_min":"" + }, + "intpp":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Vertically integrated total primary (organic carbon) production by phytoplankton. This should equal the sum of intpdiat+intpphymisc, but those individual components may be unavailable in some models.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Primary Organic Carbon Production by All Types of Phytoplankton", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"intpp", + "positive":"", + "standard_name":"net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_phytoplankton", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "intppcalc":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Net Primary Mole Productivity of Carbon by Calcareous Phytoplankton", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"intppcalc", + "positive":"", + "standard_name":"net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_calcareous_phytoplankton", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "intppdiat":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Vertically integrated primary (organic carbon) production by the diatom phytoplankton component alone", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Net Primary Organic Carbon Production by Diatoms", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"intppdiat", + "positive":"", + "standard_name":"net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_diatoms", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "intppdiaz":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Net Primary Mole Productivity of Carbon by Diazotrophs", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"intppdiaz", + "positive":"", + "standard_name":"net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_diazotrophs", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "intppmisc":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Vertically integrated total primary (organic carbon) production by other phytoplankton components alone", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Net Primary Organic Carbon Production by Other Phytoplankton", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"intppmisc", + "positive":"", + "standard_name":"net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_miscellaneous_phytoplankton", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "intppnitrate":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Vertically integrated primary (organic carbon) production by phytoplankton based on nitrate uptake alone", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Primary Organic Carbon Production by Phytoplankton Based on Nitrate Uptake Alone", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"intppnitrate", + "positive":"", + "standard_name":"net_primary_mole_productivity_of_biomass_expressed_as_carbon_due_to_nitrate_utilization", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "intpppico":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Net Primary Mole Productivity of Carbon by Picophytoplankton", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"intpppico", + "positive":"", + "standard_name":"net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_picophytoplankton", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "limfecalc":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Iron limitation of Calcareous Phytoplankton", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"limfecalc", + "positive":"", + "standard_name":"iron_growth_limitation_of_calcareous_phytoplankton", + "type":"real", + "units":"1", + "valid_max":"", + "valid_min":"" + }, + "limfediat":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Iron limitation of Diatoms", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"limfediat", + "positive":"", + "standard_name":"iron_growth_limitation_of_diatoms", + "type":"real", + "units":"1", + "valid_max":"", + "valid_min":"" + }, + "limfediaz":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Iron limitation of Diazotrophs", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"limfediaz", + "positive":"", + "standard_name":"iron_growth_limitation_of_diazotrophs", + "type":"real", + "units":"1", + "valid_max":"", + "valid_min":"" + }, + "limfemisc":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Iron Limitation of Other Phytoplankton", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"limfemisc", + "positive":"", + "standard_name":"iron_growth_limitation_of_miscellaneous_phytoplankton", + "type":"real", + "units":"1", + "valid_max":"", + "valid_min":"" + }, + "limfepico":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Iron limitation of Picophytoplankton", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"limfepico", + "positive":"", + "standard_name":"iron_growth_limitation_of_picophytoplankton", + "type":"real", + "units":"1", + "valid_max":"", + "valid_min":"" + }, + "limirrcalc":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Irradiance limitation of Calcareous Phytoplankton", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"limirrcalc", + "positive":"", + "standard_name":"growth_limitation_of_calcareous_phytoplankton_due_to_solar_irradiance", + "type":"real", + "units":"1", + "valid_max":"", + "valid_min":"" + }, + "limirrdiat":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Irradiance limitation of Diatoms", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"limirrdiat", + "positive":"", + "standard_name":"growth_limitation_of_diatoms_due_to_solar_irradiance", + "type":"real", + "units":"1", + "valid_max":"", + "valid_min":"" + }, + "limirrdiaz":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Irradiance limitation of Diazotrophs", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"limirrdiaz", + "positive":"", + "standard_name":"growth_limitation_of_diazotrophs_due_to_solar_irradiance", + "type":"real", + "units":"1", + "valid_max":"", + "valid_min":"" + }, + "limirrmisc":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Irradiance Limitation of Other Phytoplankton", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"limirrmisc", + "positive":"", + "standard_name":"growth_limitation_of_miscellaneous_phytoplankton_due_to_solar_irradiance", + "type":"real", + "units":"1", + "valid_max":"", + "valid_min":"" + }, + "limirrpico":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Irradiance limitation of Picophytoplankton", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"limirrpico", + "positive":"", + "standard_name":"growth_limitation_of_picophytoplankton_due_to_solar_irradiance", + "type":"real", + "units":"1", + "valid_max":"", + "valid_min":"" + }, + "limncalc":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Nitrogen limitation of Calcareous Phytoplankton", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"limncalc", + "positive":"", + "standard_name":"nitrogen_growth_limitation_of_calcareous_phytoplankton", + "type":"real", + "units":"1", + "valid_max":"", + "valid_min":"" + }, + "limndiat":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Nitrogen limitation of Diatoms", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"limndiat", + "positive":"", + "standard_name":"nitrogen_growth_limitation_of_diatoms", + "type":"real", + "units":"1", + "valid_max":"", + "valid_min":"" + }, + "limndiaz":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Nitrogen limitation of Diazotrophs", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"limndiaz", + "positive":"", + "standard_name":"nitrogen_growth_limitation_of_diazotrophs", + "type":"real", + "units":"1", + "valid_max":"", + "valid_min":"" + }, + "limnmisc":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Nitrogen Limitation of Other Phytoplankton", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"limnmisc", + "positive":"", + "standard_name":"nitrogen_growth_limitation_of_miscellaneous_phytoplankton", + "type":"real", + "units":"1", + "valid_max":"", + "valid_min":"" + }, + "limnpico":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Nitrogen limitation of Picophytoplankton", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"limnpico", + "positive":"", + "standard_name":"nitrogen_growth_limitation_of_picophytoplankton", + "type":"real", + "units":"1", + "valid_max":"", + "valid_min":"" + }, + "masscello":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Tracer grid-cell mass per unit area used for computing tracer budgets. For Boussinesq models with static ocean grid cell thickness, masscello = rhozero*thickcello, where thickcello is static cell thickness and rhozero is constant Boussinesq reference density. More generally, masscello is time dependent and reported as part of Omon.", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Sea Water Mass Per Unit Area", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"masscello", + "positive":"", + "standard_name":"sea_water_mass_per_unit_area", + "type":"real", + "units":"kg m-2", + "valid_max":"", + "valid_min":"" + }, + "masso":{ + "cell_measures":"", + "cell_methods":"area: sum where sea time: mean", + "comment":"Total mass of liquid sea water. For Boussinesq models, report this diagnostic as Boussinesq reference density times total volume.", + "dimensions":"time", + "frequency":"mon", + "long_name":"Sea Water Mass", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"masso", + "positive":"", + "standard_name":"sea_water_mass", + "type":"real", + "units":"kg", + "valid_max":"", + "valid_min":"" + }, + "mfo":{ + "cell_measures":"", + "cell_methods":"time: mean", + "comment":"", + "dimensions":"oline time", + "frequency":"mon", + "long_name":"Sea Water Transport", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"mfo", + "positive":"", + "standard_name":"sea_water_transport_across_line", + "type":"real", + "units":"kg s-1", + "valid_max":"", + "valid_min":"" + }, + "mlotst":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Sigma T is potential density referenced to ocean surface.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Ocean Mixed Layer Thickness Defined by Sigma T", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"mlotst", + "positive":"", + "standard_name":"ocean_mixed_layer_thickness_defined_by_sigma_t", + "type":"real", + "units":"m", + "valid_max":"", + "valid_min":"" + }, + "mlotstmax":{ + "cell_measures":"", + "cell_methods":"area: mean time: maximum", + "comment":"Sigma T is potential density referenced to ocean surface.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Maximum Ocean Mixed Layer Thickness Defined by Sigma T", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"mlotstmax", + "positive":"", + "standard_name":"ocean_mixed_layer_thickness_defined_by_sigma_t", + "type":"real", + "units":"m", + "valid_max":"", + "valid_min":"" + }, + "mlotstmin":{ + "cell_measures":"", + "cell_methods":"area: mean time: minimum", + "comment":"Sigma T is potential density referenced to ocean surface.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Minimum Ocean Mixed Layer Thickness Defined by Sigma T", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"mlotstmin", + "positive":"", + "standard_name":"ocean_mixed_layer_thickness_defined_by_sigma_t", + "type":"real", + "units":"m", + "valid_max":"", + "valid_min":"" + }, + "mlotstsq":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Square of Ocean Mixed Layer Thickness Defined by Sigma T", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"mlotstsq", + "positive":"", + "standard_name":"square_of_ocean_mixed_layer_thickness_defined_by_sigma_t", + "type":"real", + "units":"m2", + "valid_max":"", + "valid_min":"" + }, + "msftbarot":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Streamfunction or its approximation for free surface models. See OMDP document for details.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Ocean Barotropic Mass Streamfunction", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"msftbarot", + "positive":"", + "standard_name":"ocean_barotropic_mass_streamfunction", + "type":"real", + "units":"kg s-1", + "valid_max":"", + "valid_min":"" + }, + "msftmrho":{ + "cell_measures":"", + "cell_methods":"longitude: mean (comment: basin mean[ along zig-zag grid path]) time: mean", + "comment":"Overturning mass streamfunction arising from all advective mass transport processes, resolved and parameterized.", + "dimensions":"latitude rho basin time", + "frequency":"mon", + "long_name":"Ocean Meridional Overturning Mass Streamfunction", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"msftmrho", + "positive":"", + "standard_name":"ocean_meridional_overturning_mass_streamfunction", + "type":"real", + "units":"kg s-1", + "valid_max":"", + "valid_min":"" + }, + "msftmrhompa":{ + "cell_measures":"", + "cell_methods":"longitude: mean (comment: basin mean[ along zig-zag grid path]) time: mean", + "comment":"CMIP5 called this 'due to Bolus Advection'. Name change respects the more general physics of the mesoscale parameterizations.", + "dimensions":"latitude rho basin time", + "frequency":"mon", + "long_name":"ocean meridional overturning mass streamfunction due to parameterized mesoscale advection", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"msftmrhompa", + "positive":"", + "standard_name":"ocean_meridional_overturning_mass_streamfunction_due_to_parameterized_mesoscale_eddy_advection", + "type":"real", + "units":"kg s-1", + "valid_max":"", + "valid_min":"" + }, + "msftmz":{ + "cell_measures":"", + "cell_methods":"longitude: mean (comment: basin mean[ along zig-zag grid path]) time: mean", + "comment":"Overturning mass streamfunction arising from all advective mass transport processes, resolved and parameterized.", + "dimensions":"latitude olevel basin time", + "frequency":"mon", + "long_name":"Ocean Meridional Overturning Mass Streamfunction", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"msftmz", + "positive":"", + "standard_name":"ocean_meridional_overturning_mass_streamfunction", + "type":"real", + "units":"kg s-1", + "valid_max":"", + "valid_min":"" + }, + "msftmzmpa":{ + "cell_measures":"", + "cell_methods":"longitude: mean (comment: basin mean[ along zig-zag grid path]) time: mean", + "comment":"CMIP5 called this 'due to Bolus Advection'. Name change respects the more general physics of the mesoscale parameterizations.", + "dimensions":"latitude olevel basin time", + "frequency":"mon", + "long_name":"ocean meridional overturning mass streamfunction due to parameterized mesoscale advection", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"msftmzmpa", + "positive":"", + "standard_name":"ocean_meridional_overturning_mass_streamfunction_due_to_parameterized_mesoscale_eddy_advection", + "type":"real", + "units":"kg s-1", + "valid_max":"", + "valid_min":"" + }, + "msftmzsmpa":{ + "cell_measures":"", + "cell_methods":"longitude: mean (comment: basin mean[ along zig-zag grid path]) time: mean", + "comment":"Report only if there is a submesoscale eddy parameterization.", + "dimensions":"latitude olevel basin time", + "frequency":"mon", + "long_name":"ocean meridional overturning mass streamfunction due to parameterized submesoscale advection", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"msftmzsmpa", + "positive":"", + "standard_name":"ocean_meridional_overturning_mass_streamfunction_due_to_parameterized_submesoscale_eddy_advection", + "type":"real", + "units":"kg s-1", + "valid_max":"", + "valid_min":"" + }, + "msftyrho":{ + "cell_measures":"", + "cell_methods":"time: mean grid_longitude: mean", + "comment":"Overturning mass streamfunction arising from all advective mass transport processes, resolved and parameterized.", + "dimensions":"latitude rho basin time", + "frequency":"mon", + "long_name":"Ocean Y Overturning Mass Streamfunction", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"msftyrho", + "positive":"", + "standard_name":"ocean_y_overturning_mass_streamfunction", + "type":"real", + "units":"kg s-1", + "valid_max":"", + "valid_min":"" + }, + "msftyrhompa":{ + "cell_measures":"", + "cell_methods":"time: mean grid_longitude: mean", + "comment":"CMIP5 called this 'due to Bolus Advection'. Name change respects the more general physics of the mesoscale parameterizations.", + "dimensions":"latitude rho basin time", + "frequency":"mon", + "long_name":"ocean Y overturning mass streamfunction due to parameterized mesoscale advection", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"msftyrhompa", + "positive":"", + "standard_name":"ocean_y_overturning_mass_streamfunction_due_to_parameterized_mesoscale_eddy_advection", + "type":"real", + "units":"kg s-1", + "valid_max":"", + "valid_min":"" + }, + "msftyz":{ + "cell_measures":"", + "cell_methods":"time: mean grid_longitude: mean", + "comment":"Overturning mass streamfunction arising from all advective mass transport processes, resolved and parameterized.", + "dimensions":"latitude olevel basin time", + "frequency":"mon", + "long_name":"Ocean Y Overturning Mass Streamfunction", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"msftyz", + "positive":"", + "standard_name":"ocean_y_overturning_mass_streamfunction", + "type":"real", + "units":"kg s-1", + "valid_max":"", + "valid_min":"" + }, + "msftyzmpa":{ + "cell_measures":"", + "cell_methods":"time: mean grid_longitude: mean", + "comment":"CMIP5 called this 'due to Bolus Advection'. Name change respects the more general physics of the mesoscale parameterizations.", + "dimensions":"latitude olevel basin time", + "frequency":"mon", + "long_name":"ocean Y overturning mass streamfunction due to parameterized mesoscale advection", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"msftyzmpa", + "positive":"", + "standard_name":"ocean_y_overturning_mass_streamfunction_due_to_parameterized_mesoscale_eddy_advection", + "type":"real", + "units":"kg s-1", + "valid_max":"", + "valid_min":"" + }, + "msftyzsmpa":{ + "cell_measures":"", + "cell_methods":"longitude: mean (comment: basin mean[ along zig-zag grid path]) time: mean", + "comment":"Report only if there is a submesoscale eddy parameterization.", + "dimensions":"latitude olevel basin time", + "frequency":"mon", + "long_name":"ocean Y overturning mass streamfunction due to parameterized submesoscale advection", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"msftyzsmpa", + "positive":"", + "standard_name":"ocean_meridional_overturning_mass_streamfunction_due_to_parameterized_submesoscale_eddy_advection", + "type":"real", + "units":"kg s-1", + "valid_max":"", + "valid_min":"" + }, + "nh4":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Dissolved Ammonium Concentration", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"nh4", + "positive":"", + "standard_name":"mole_concentration_of_ammonium_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "no3":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Dissolved Nitrate Concentration", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"no3", + "positive":"", + "standard_name":"mole_concentration_of_nitrate_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "no3os":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Surface Dissolved Nitrate Concentration", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"no3os", + "positive":"", + "standard_name":"mole_concentration_of_nitrate_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "o2":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Dissolved Oxygen Concentration", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"o2", + "positive":"", + "standard_name":"mole_concentration_of_dissolved_molecular_oxygen_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "o2min":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Oxygen Minimum Concentration", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"o2min", + "positive":"", + "standard_name":"mole_concentration_of_dissolved_molecular_oxygen_in_sea_water_at_shallowest_local_minimum_in_vertical_profile", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "o2os":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Surface Dissolved Oxygen Concentration", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"o2os", + "positive":"", + "standard_name":"mole_concentration_of_dissolved_molecular_oxygen_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "o2sat":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Dissolved Oxygen Concentration at Saturation", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"o2sat", + "positive":"", + "standard_name":"mole_concentration_of_dissolved_molecular_oxygen_in_sea_water_at_saturation", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "o2satos":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Surface Dissolved Oxygen Concentration at Saturation", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"o2satos", + "positive":"", + "standard_name":"mole_concentration_of_dissolved_molecular_oxygen_in_sea_water_at_saturation", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "obvfsq":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Square of Brunt Vaisala Frequency in Sea Water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"obvfsq", + "positive":"", + "standard_name":"square_of_brunt_vaisala_frequency_in_sea_water", + "type":"real", + "units":"s-2", + "valid_max":"", + "valid_min":"" + }, + "ocfriver":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Organic Carbon supply to ocean through runoff (separate from gas exchange)", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Flux of Organic Carbon Into Ocean Surface by Runoff", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"ocfriver", + "positive":"", + "standard_name":"tendency_of_ocean_mole_content_of_organic_carbon_due_to_runoff_and_sediment_dissolution", + "type":"real", + "units":"mol m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "pbfe":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Biogenic Iron Production", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"pbfe", + "positive":"", + "standard_name":"tendency_of_mole_concentration_of_iron_in_sea_water_due_to_biological_production", + "type":"real", + "units":"mol m-3 s-1", + "valid_max":"", + "valid_min":"" + }, + "pbo":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Sea Water Pressure at Sea floor", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"pbo", + "positive":"", + "standard_name":"sea_water_pressure_at_sea_floor", + "type":"real", + "units":"Pa", + "valid_max":"", + "valid_min":"" + }, + "pbsi":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Biogenic Silica Production", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"pbsi", + "positive":"", + "standard_name":"tendency_of_mole_concentration_of_silicon_in_sea_water_due_to_biological_production", + "type":"real", + "units":"mol m-3 s-1", + "valid_max":"", + "valid_min":"" + }, + "ph":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"negative log of hydrogen ion concentration with the concentration expressed as mol H kg-1.", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"pH", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"ph", + "positive":"", + "standard_name":"sea_water_ph_reported_on_total_scale", + "type":"real", + "units":"1", + "valid_max":"", + "valid_min":"" + }, + "phabio":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"negative log10 of hydrogen ion concentration with the concentration expressed as mol H kg-1 (abiotic component)..", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Abiotic pH", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"phabio", + "positive":"", + "standard_name":"sea_water_ph_abiotic_analogue_reported_on_total_scale", + "type":"real", + "units":"1", + "valid_max":"", + "valid_min":"" + }, + "phabioos":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"negative log10 of hydrogen ion concentration with the concentration expressed as mol H kg-1.", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Surface Abiotic pH", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"phabioos", + "positive":"", + "standard_name":"sea_water_ph_abiotic_analogue_reported_on_total_scale", + "type":"real", + "units":"1", + "valid_max":"", + "valid_min":"" + }, + "phnat":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"negative log10 of hydrogen ion concentration with the concentration expressed as mol H kg-1.", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Natural pH", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"phnat", + "positive":"", + "standard_name":"sea_water_ph_natural_analogue_reported_on_total_scale", + "type":"real", + "units":"1", + "valid_max":"", + "valid_min":"" + }, + "phnatos":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"negative log10 of hydrogen ion concentration with the concentration expressed as mol H kg-1.", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Surface Natural pH", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"phnatos", + "positive":"", + "standard_name":"sea_water_ph_natural_analogue_reported_on_total_scale", + "type":"real", + "units":"1", + "valid_max":"", + "valid_min":"" + }, + "phyc":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"sum of phytoplankton carbon component concentrations. In most (all?) cases this is the sum of phycdiat and phycmisc (i.e., 'Diatom Carbon Concentration' and 'Non-Diatom Phytoplankton Carbon Concentration'", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Phytoplankton Carbon Concentration", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"phyc", + "positive":"", + "standard_name":"mole_concentration_of_phytoplankton_expressed_as_carbon_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "phycalc":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"carbon concentration from calcareous (calcite-producing) phytoplankton component alone", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Mole Concentration of Calcareous Phytoplankton expressed as Carbon in sea water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"phycalc", + "positive":"", + "standard_name":"mole_concentration_of_calcareous_phytoplankton_expressed_as_carbon_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "phycos":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"sum of phytoplankton organic carbon component concentrations at the sea surface", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Surface Phytoplankton Carbon Concentration", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"phycos", + "positive":"", + "standard_name":"mole_concentration_of_phytoplankton_expressed_as_carbon_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "phydiat":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"carbon from the diatom phytoplankton component concentration alone", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Mole Concentration of Diatoms expressed as Carbon in sea water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"phydiat", + "positive":"", + "standard_name":"mole_concentration_of_diatoms_expressed_as_carbon_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "phydiaz":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"carbon concentration from the diazotrophic phytoplankton component alone", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Mole Concentration of Diazotrophs Expressed as Carbon in sea water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"phydiaz", + "positive":"", + "standard_name":"mole_concentration_of_diazotrophs_expressed_as_carbon_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "phyfe":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"sum of phytoplankton iron component concentrations", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Mole Concentration of Total Phytoplankton expressed as Iron in sea water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"phyfe", + "positive":"", + "standard_name":"mole_concentration_of_phytoplankton_expressed_as_iron_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "phyfeos":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"sum of phytoplankton iron component concentrations", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Surface Mole Concentration of Total Phytoplankton expressed as Iron in Sea Water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"phyfeos", + "positive":"", + "standard_name":"mole_concentration_of_phytoplankton_expressed_as_iron_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "phymisc":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"carbon concentration from additional phytoplankton component alone", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Mole Concentration of Miscellaneous Phytoplankton expressed as Carbon in sea water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"phymisc", + "positive":"", + "standard_name":"mole_concentration_of_miscellaneous_phytoplankton_expressed_as_carbon_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "phyn":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"sum of phytoplankton nitrogen component concentrations", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Mole Concentration of Total Phytoplankton expressed as Nitrogen in sea water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"phyn", + "positive":"", + "standard_name":"mole_concentration_of_phytoplankton_expressed_as_nitrogen_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "phynos":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"sum of phytoplankton nitrogen component concentrations", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Surface Mole Concentration of Phytoplankton Nitrogen in sea water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"phynos", + "positive":"", + "standard_name":"mole_concentration_of_phytoplankton_expressed_as_nitrogen_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "phyp":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"sum of phytoplankton phosphorus components", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Mole Concentration of Total Phytoplankton expressed as Phosphorus in sea water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"phyp", + "positive":"", + "standard_name":"mole_concentration_of_phytoplankton_expressed_as_phosphorus_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "phypico":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"carbon concentration from the picophytoplankton (<2 um) component alone", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Mole Concentration of Picophytoplankton expressed as Carbon in sea water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"phypico", + "positive":"", + "standard_name":"mole_concentration_of_picophytoplankton_expressed_as_carbon_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "phypos":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"sum of phytoplankton phosphorus components", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Surface Mole Concentration of Total Phytoplankton expressed as Phosphorus in sea water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"phypos", + "positive":"", + "standard_name":"mole_concentration_of_phytoplankton_expressed_as_phosphorus_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "physi":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"sum of phytoplankton silica component concentrations", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Mole Concentration of Total Phytoplankton expressed as Silicon in sea water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"physi", + "positive":"", + "standard_name":"mole_concentration_of_phytoplankton_expressed_as_silicon_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "physios":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"sum of phytoplankton silica component concentrations", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Surface Mole Concentration of Total Phytoplankton expressed as Silicon in sea water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"physios", + "positive":"", + "standard_name":"mole_concentration_of_phytoplankton_expressed_as_silicon_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "pnitrate":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Primary (organic carbon) production by phytoplankton due to nitrate uptake alone", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Primary Carbon Production by Phytoplankton due to Nitrate Uptake Alone", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"pnitrate", + "positive":"", + "standard_name":"tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_nitrate_utilization", + "type":"real", + "units":"mol m-3 s-1", + "valid_max":"", + "valid_min":"" + }, + "po4":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Total Dissolved Inorganic Phosphorus Concentration", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"po4", + "positive":"", + "standard_name":"mole_concentration_of_dissolved_inorganic_phosphorus_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "pon":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"sum of particulate organic nitrogen component concentrations", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Mole Concentration of Particulate Organic Matter expressed as Nitrogen in sea water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"pon", + "positive":"", + "standard_name":"mole_concentration_of_particulate_organic_matter_expressed_as_nitrogen_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "ponos":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"sum of particulate organic nitrogen component concentrations", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Surface Mole Concentration of Particulate Organic Matter expressed as Nitrogen in sea water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"ponos", + "positive":"", + "standard_name":"mole_concentration_of_particulate_organic_matter_expressed_as_nitrogen_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "pop":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"sum of particulate organic phosphorus component concentrations", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Mole Concentration of Particulate Organic Matter expressed as Phosphorus in sea water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"pop", + "positive":"", + "standard_name":"mole_concentration_of_particulate_organic_matter_expressed_as_phosphorus_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "popos":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"sum of particulate organic phosphorus component concentrations", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Surface Mole Concentration of Particulate Organic Matter expressed as Phosphorus in sea water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"popos", + "positive":"", + "standard_name":"mole_concentration_of_particulate_organic_matter_expressed_as_phosphorus_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "pp":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"total primary (organic carbon) production by phytoplankton", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Primary Carbon Production by Total Phytoplankton", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"pp", + "positive":"", + "standard_name":"tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_net_primary_production", + "type":"real", + "units":"mol m-3 s-1", + "valid_max":"", + "valid_min":"" + }, + "prra":{ + "cell_measures":"", + "cell_methods":"area: mean where ice_free_sea over sea time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Rainfall Flux where Ice Free Ocean over Sea", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"prra", + "positive":"", + "standard_name":"rainfall_flux", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "prsn":{ + "cell_measures":"", + "cell_methods":"area: mean where ice_free_sea over sea time: mean", + "comment":"at surface; includes precipitation of all forms of water in the solid phase", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Snowfall Flux where Ice Free Ocean over Sea", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"prsn", + "positive":"", + "standard_name":"snowfall_flux", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "pso":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Sea Water Pressure at Sea Water Surface", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"pso", + "positive":"", + "standard_name":"sea_water_pressure_at_sea_water_surface", + "type":"real", + "units":"Pa", + "valid_max":"", + "valid_min":"" + }, + "rlntds":{ + "cell_measures":"", + "cell_methods":"area: mean where ice_free_sea over sea time: mean", + "comment":"This is defined as 'where ice_free_sea over sea'", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Surface Net Downward Longwave Radiation", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"rlntds", + "positive":"down", + "standard_name":"surface_net_downward_longwave_flux", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "rsdo":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Downwelling Shortwave Radiation in Sea Water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"rsdo", + "positive":"down", + "standard_name":"downwelling_shortwave_flux_in_sea_water", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "rsntds":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"This is the flux into the surface of liquid sea water only. This excludes shortwave flux absorbed by sea ice, but includes any light that passes through the ice and is absorbed by the ocean.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Net Downward Shortwave Radiation at Sea Water Surface", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"rsntds", + "positive":"down", + "standard_name":"net_downward_shortwave_flux_at_sea_water_surface", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "sf6":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Moles Per Unit Mass of SF6 in sea water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sf6", + "positive":"", + "standard_name":"mole_concentration_of_sulfur_hexafluoride_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "sfdsi":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"This field is physical, and it arises since sea ice has a nonzero salt content, so it exchanges salt with the liquid ocean upon melting and freezing.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Downward Sea Ice Basal Salt Flux", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sfdsi", + "positive":"down", + "standard_name":"downward_sea_ice_basal_salt_flux", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "sfriver":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"This field is physical, and it arises when rivers carry a nonzero salt content. Often this is zero, with rivers assumed to be fresh.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Salt Flux into Sea Water from Rivers", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sfriver", + "positive":"", + "standard_name":"salt_flux_into_sea_water_from_rivers", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "si":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Total Dissolved Inorganic Silicon Concentration", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"si", + "positive":"", + "standard_name":"mole_concentration_of_dissolved_inorganic_silicon_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "sios":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Surface Total Dissolved Inorganic Silicon Concentration", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sios", + "positive":"", + "standard_name":"mole_concentration_of_dissolved_inorganic_silicon_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "sltovgyre":{ + "cell_measures":"", + "cell_methods":"longitude: mean time: mean", + "comment":"From all advective mass transport processes, resolved and parameterized.", + "dimensions":"latitude basin time", + "frequency":"mon", + "long_name":"Northward Ocean Salt Transport due to Gyre", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sltovgyre", + "positive":"", + "standard_name":"northward_ocean_salt_transport_due_to_gyre", + "type":"real", + "units":"kg s-1", + "valid_max":"", + "valid_min":"" + }, + "sltovovrt":{ + "cell_measures":"", + "cell_methods":"longitude: mean time: mean", + "comment":"From all advective mass transport processes, resolved and parameterized.", + "dimensions":"latitude basin time", + "frequency":"mon", + "long_name":"Northward Ocean Salt Transport due to Overturning", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sltovovrt", + "positive":"", + "standard_name":"northward_ocean_salt_transport_due_to_overturning", + "type":"real", + "units":"kg s-1", + "valid_max":"", + "valid_min":"" + }, + "so":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Sea Water Salinity", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"so", + "positive":"", + "standard_name":"sea_water_salinity", + "type":"real", + "units":"0.001", + "valid_max":"", + "valid_min":"" + }, + "sob":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Model prognostic salinity at bottom-most model grid cell", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"sea water salinity at sea floor", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sob", + "positive":"", + "standard_name":"sea_water_salinity_at_sea_floor", + "type":"real", + "units":"0.001", + "valid_max":"", + "valid_min":"" + }, + "soga":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"time", + "frequency":"mon", + "long_name":"Global Mean Sea Water Salinity", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"soga", + "positive":"", + "standard_name":"sea_water_salinity", + "type":"real", + "units":"0.001", + "valid_max":"", + "valid_min":"" + }, + "sos":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Sea Surface Salinity", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sos", + "positive":"", + "standard_name":"sea_surface_salinity", + "type":"real", + "units":"0.001", + "valid_max":"", + "valid_min":"" + }, + "sosga":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"time", + "frequency":"mon", + "long_name":"Global Average Sea Surface Salinity", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sosga", + "positive":"", + "standard_name":"sea_surface_salinity", + "type":"real", + "units":"0.001", + "valid_max":"", + "valid_min":"" + }, + "sossq":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Square of Sea Surface Salinity", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sossq", + "positive":"", + "standard_name":"square_of_sea_surface_salinity", + "type":"real", + "units":"1e-06", + "valid_max":"", + "valid_min":"" + }, + "spco2":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Surface Aqueous Partial Pressure of CO2", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"spco2", + "positive":"", + "standard_name":"surface_partial_pressure_of_carbon_dioxide_in_sea_water", + "type":"real", + "units":"Pa", + "valid_max":"", + "valid_min":"" + }, + "spco2abio":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Abiotic Surface Aqueous Partial Pressure of CO2", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"spco2abio", + "positive":"", + "standard_name":"surface_partial_pressure_of_carbon_dioxide_abiotic_analogue_in_sea_water", + "type":"real", + "units":"Pa", + "valid_max":"", + "valid_min":"" + }, + "spco2nat":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Natural Surface Aqueous Partial Pressure of CO2", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"spco2nat", + "positive":"", + "standard_name":"surface_partial_pressure_of_carbon_dioxide_natural_analogue_in_sea_water", + "type":"real", + "units":"Pa", + "valid_max":"", + "valid_min":"" + }, + "talk":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"total alkalinity equivalent concentration (including carbonate, nitrogen, silicate, and borate components)", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Total Alkalinity", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"talk", + "positive":"", + "standard_name":"sea_water_alkalinity_expressed_as_mole_equivalent", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "talknat":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"total alkalinity equivalent concentration (including carbonate, borate, phosphorus, silicon, and nitrogen components) at preindustrial atmospheric xCO2", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Natural Total Alkalinity", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"talknat", + "positive":"", + "standard_name":"sea_water_alkalinity_natural_analogue_expressed_as_mole_equivalent", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "talknatos":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"total alkalinity equivalent concentration (including carbonate, borate, phosphorus, silicon, and nitrogen components) at preindustrial atmospheric xCO2", + "dimensions":"longitude latitude time depth0m", + "frequency":"mon", + "long_name":"Surface Natural Total Alkalinity", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"talknatos", + "positive":"", + "standard_name":"sea_water_alkalinity_natural_analogue_expressed_as_mole_equivalent", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "tauucorr":{ + "cell_measures":"", + "cell_methods":"time: mean", + "comment":"This is the stress on the liquid ocean from overlying atmosphere, sea ice, ice shelf, etc.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Surface Downward X Stress Correction", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"tauucorr", + "positive":"down", + "standard_name":"surface_downward_x_stress_correction", + "type":"real", + "units":"N m-2", + "valid_max":"", + "valid_min":"" + }, + "tauuo":{ + "cell_measures":"", + "cell_methods":"time: mean", + "comment":"This is the stress on the liquid ocean from overlying atmosphere, sea ice, ice shelf, etc.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Surface Downward X Stress", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"tauuo", + "positive":"down", + "standard_name":"surface_downward_x_stress", + "type":"real", + "units":"N m-2", + "valid_max":"", + "valid_min":"" + }, + "tauvcorr":{ + "cell_measures":"", + "cell_methods":"time: mean", + "comment":"This is the stress on the liquid ocean from overlying atmosphere, sea ice, ice shelf, etc.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Surface Downward Y Stress Correction", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"tauvcorr", + "positive":"down", + "standard_name":"surface_downward_y_stress_correction", + "type":"real", + "units":"N m-2", + "valid_max":"", + "valid_min":"" + }, + "tauvo":{ + "cell_measures":"", + "cell_methods":"time: mean", + "comment":"This is the stress on the liquid ocean from overlying atmosphere, sea ice, ice shelf, etc.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Surface Downward Y Stress", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"tauvo", + "positive":"down", + "standard_name":"surface_downward_y_stress", + "type":"real", + "units":"N m-2", + "valid_max":"", + "valid_min":"" + }, + "thetao":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Diagnostic should be contributed even for models using conservative temperature as prognostic field.", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Sea Water Potential Temperature", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"thetao", + "positive":"", + "standard_name":"sea_water_potential_temperature", + "type":"real", + "units":"degC", + "valid_max":"", + "valid_min":"" + }, + "thetaoga":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Diagnostic should be contributed even for models using conservative temperature as prognostic field", + "dimensions":"time", + "frequency":"mon", + "long_name":"Global Average Sea Water Potential Temperature", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"thetaoga", + "positive":"", + "standard_name":"sea_water_potential_temperature", + "type":"real", + "units":"degC", + "valid_max":"", + "valid_min":"" + }, + "thkcello":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Ocean Model Cell Thickness", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"thkcello", + "positive":"", + "standard_name":"cell_thickness", + "type":"real", + "units":"m", + "valid_max":"", + "valid_min":"" + }, + "tob":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Potential temperature at the ocean bottom-most grid cell.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Sea Water Potential Temperature at Sea Floor", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"tob", + "positive":"", + "standard_name":"sea_water_potential_temperature_at_sea_floor", + "type":"real", + "units":"degC", + "valid_max":"", + "valid_min":"" + }, + "tos":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Temperature of upper boundary of the liquid ocean, including temperatures below sea-ice and floating ice shelves.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Sea Surface Temperature", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"tos", + "positive":"", + "standard_name":"sea_surface_temperature", + "type":"real", + "units":"degC", + "valid_max":"", + "valid_min":"" + }, + "tosga":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Temperature of upper boundary of the liquid ocean, including temperatures below sea-ice and floating ice shelves.", + "dimensions":"time", + "frequency":"mon", + "long_name":"Global Average Sea Surface Temperature", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"tosga", + "positive":"", + "standard_name":"sea_surface_temperature", + "type":"real", + "units":"degC", + "valid_max":"", + "valid_min":"" + }, + "tossq":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Square of temperature of liquid ocean.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Square of Sea Surface Temperature", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"tossq", + "positive":"", + "standard_name":"square_of_sea_surface_temperature", + "type":"real", + "units":"degC2", + "valid_max":"", + "valid_min":"" + }, + "umo":{ + "cell_measures":"", + "cell_methods":"time: mean", + "comment":"X-ward mass transport from resolved and parameterized advective transport.", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Ocean Mass X Transport", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"umo", + "positive":"", + "standard_name":"ocean_mass_x_transport", + "type":"real", + "units":"kg s-1", + "valid_max":"", + "valid_min":"" + }, + "uo":{ + "cell_measures":"", + "cell_methods":"time: mean", + "comment":"Prognostic x-ward velocity component resolved by the model.", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Sea Water X Velocity", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"uo", + "positive":"", + "standard_name":"sea_water_x_velocity", + "type":"real", + "units":"m s-1", + "valid_max":"", + "valid_min":"" + }, + "vmo":{ + "cell_measures":"", + "cell_methods":"time: mean", + "comment":"Y-ward mass transport from resolved and parameterized advective transport.", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Ocean Mass Y Transport", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"vmo", + "positive":"", + "standard_name":"ocean_mass_y_transport", + "type":"real", + "units":"kg s-1", + "valid_max":"", + "valid_min":"" + }, + "vo":{ + "cell_measures":"", + "cell_methods":"time: mean", + "comment":"Prognostic x-ward velocity component resolved by the model.", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Sea Water Y Velocity", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"vo", + "positive":"", + "standard_name":"sea_water_y_velocity", + "type":"real", + "units":"m s-1", + "valid_max":"", + "valid_min":"" + }, + "volcello":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"grid-cell volume ca. 2000.", + "dimensions":"longitude latitude olevel time", + "frequency":"fx", + "long_name":"Ocean Grid-Cell Volume", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"volcello", + "positive":"", + "standard_name":"ocean_volume", + "type":"real", + "units":"m3", + "valid_max":"", + "valid_min":"" + }, + "volo":{ + "cell_measures":"", + "cell_methods":"area: sum where sea time: mean", + "comment":"Total volume of liquid sea water.", + "dimensions":"time", + "frequency":"mon", + "long_name":"Sea Water Volume", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"volo", + "positive":"", + "standard_name":"sea_water_volume", + "type":"real", + "units":"m3", + "valid_max":"", + "valid_min":"" + }, + "vsf":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"It is set to zero in models which receive a real water flux.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Virtual Salt Flux into Sea Water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"vsf", + "positive":"", + "standard_name":"virtual_salt_flux_into_sea_water", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "vsfcorr":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"It is set to zero in models which receive a real water flux.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Virtual Salt Flux Correction", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"vsfcorr", + "positive":"", + "standard_name":"virtual_salt_flux_correction", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "vsfevap":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"zero for models using real water fluxes.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Virtual Salt Flux into Sea Water due to Evaporation", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"vsfevap", + "positive":"", + "standard_name":"virtual_salt_flux_into_sea_water_due_to_evaporation", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "vsfpr":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"zero for models using real water fluxes.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Virtual Salt Flux into Sea Water due to Rainfall", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"vsfpr", + "positive":"", + "standard_name":"virtual_salt_flux_into_sea_water_due_to_rainfall", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "vsfriver":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"zero for models using real water fluxes.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Virtual Salt Flux into Sea Water From Rivers", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"vsfriver", + "positive":"", + "standard_name":"virtual_salt_flux_into_sea_water_from_rivers", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "vsfsit":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"This variable measures the virtual salt flux into sea water due to the melting of sea ice. It is set to zero in models which receive a real water flux.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Virtual Salt Flux into Sea Water due to Sea Ice Thermodynamics", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"vsfsit", + "positive":"", + "standard_name":"virtual_salt_flux_into_sea_water_due_to_sea_ice_thermodynamics", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "wfcorr":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Positive flux implies correction adds water to ocean.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Water Flux Correction", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"wfcorr", + "positive":"down", + "standard_name":"water_flux_correction", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "wfo":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"computed as the water flux into the ocean divided by the area of the ocean portion of the grid cell. This is the sum of the next two variables in this table.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Water Flux into Sea Water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"wfo", + "positive":"", + "standard_name":"water_flux_into_sea_water", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "wfonocorr":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"computed as the water flux (without flux correction) into the ocean divided by the area of the ocean portion of the grid cell.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Water Flux into Sea Water Without Flux Correction", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"wfonocorr", + "positive":"", + "standard_name":"water_flux_into_sea_water_without_flux_correction", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "wmo":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Upward mass transport from resolved and parameterized advective transport.", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Upward Ocean Mass Transport", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"wmo", + "positive":"", + "standard_name":"upward_ocean_mass_transport", + "type":"real", + "units":"kg s-1", + "valid_max":"", + "valid_min":"" + }, + "wo":{ + "cell_measures":"", + "cell_methods":"time: mean", + "comment":"", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Sea Water Z Velocity", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"wo", + "positive":"", + "standard_name":"upward_sea_water_velocity", + "type":"real", + "units":"m s-1", + "valid_max":"", + "valid_min":"" + }, + "zfullo":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Depth below geoid", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Depth Below Geoid of Ocean Layer", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"zfullo", + "positive":"", + "standard_name":"depth_below_geoid", + "type":"real", + "units":"m", + "valid_max":"", + "valid_min":"" + }, + "zhalfo":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Depth below geoid", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Depth Below Geoid of Interfaces Between Ocean Layers", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"zhalfo", + "positive":"", + "standard_name":"depth_below_geoid", + "type":"real", + "units":"m", + "valid_max":"", + "valid_min":"" + }, + "zmeso":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"carbon concentration from mesozooplankton (20-200 um) component alone", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Mole Concentration of Mesozooplankton expressed as Carbon in sea water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"zmeso", + "positive":"", + "standard_name":"mole_concentration_of_mesozooplankton_expressed_as_carbon_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "zmicro":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"carbon concentration from the microzooplankton (<20 um) component alone", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Mole Concentration of Microzooplankton expressed as Carbon in sea water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"zmicro", + "positive":"", + "standard_name":"mole_concentration_of_microzooplankton_expressed_as_carbon_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "zmisc":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"carbon from additional zooplankton component concentrations alone (e.g. Micro, meso). Since the models all have different numbers of components, this variable has been included to provide a check for intercomparison between models since some phytoplankton groups are supersets.", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Mole Concetration of Other Zooplankton expressed as Carbon in sea water", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"zmisc", + "positive":"", + "standard_name":"mole_concentration_of_miscellaneous_zooplankton_expressed_as_carbon_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "zo2min":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Depth of vertical minimum concentration of dissolved oxygen gas (if two, then the shallower)", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Depth of Oxygen Minimum Concentration", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"zo2min", + "positive":"", + "standard_name":"depth_at_shallowest_local_minimum_in_vertical_profile_of_mole_concentration_of_dissolved_molecular_oxygen_in_sea_water", + "type":"real", + "units":"m", + "valid_max":"", + "valid_min":"" + }, + "zooc":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"sum of zooplankton carbon component concentrations", + "dimensions":"longitude latitude olevel time", + "frequency":"mon", + "long_name":"Zooplankton Carbon Concentration", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"zooc", + "positive":"", + "standard_name":"mole_concentration_of_zooplankton_expressed_as_carbon_in_sea_water", + "type":"real", + "units":"mol m-3", + "valid_max":"", + "valid_min":"" + }, + "zos":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"This is the dynamic sea level, so should have zero global area mean. It should not include inverse barometer depressions from sea ice.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Sea Surface Height Above Geoid", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"zos", + "positive":"", + "standard_name":"sea_surface_height_above_geoid", + "type":"real", + "units":"m", + "valid_max":"", + "valid_min":"" + }, + "zossq":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Surface ocean geoid defines z=0.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Square of Sea Surface Height Above Geoid", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"zossq", + "positive":"", + "standard_name":"square_of_sea_surface_height_above_geoid", + "type":"real", + "units":"m2", + "valid_max":"", + "valid_min":"" + }, + "zostoga":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"There is no CMIP6 request for zosga nor zossga.", + "dimensions":"time", + "frequency":"mon", + "long_name":"Global Average Thermosteric Sea Level Change", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"zostoga", + "positive":"", + "standard_name":"global_average_thermosteric_sea_level_change", + "type":"real", + "units":"m", + "valid_max":"", + "valid_min":"" + }, + "zsatarag":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Depth of aragonite saturation horizon (0 if undersaturated at all depths, 'missing' if supersaturated at all depths; if multiple horizons exist, the shallowest should be taken).", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Aragonite Saturation Depth", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"zsatarag", + "positive":"", + "standard_name":"minimum_depth_of_aragonite_undersaturation_in_sea_water", + "type":"real", + "units":"m", + "valid_max":"", + "valid_min":"" + }, + "zsatcalc":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Depth of calcite saturation horizon (0 if undersaturated at all depths, and missing saturated through whole depth; if two or more horizons exist, then the shallowest is reported)", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Calcite Saturation Depth", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"zsatcalc", + "positive":"", + "standard_name":"minimum_depth_of_calcite_undersaturation_in_sea_water", + "type":"real", + "units":"m", + "valid_max":"", + "valid_min":"" + } + } +} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_SImon.json b/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_SImon.json new file mode 100644 index 0000000000..bd5e3733a2 --- /dev/null +++ b/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_SImon.json @@ -0,0 +1,1532 @@ +{ + "Header":{ + "#dataRequest_specs_version":"01.00.21", + "#mip_era":"CMIP6", + "Conventions":"CF-1.7 ODS-2.1", + "approx_interval":"30.00000", + "cmor_version":"3.2", + "data_specs_version":"2.1.0", + "generic_levels":"", + "int_missing_value":"-2147483648", + "missing_value":"1e20", + "product":"observations", + "realm":"seaIce", + "table_date":"07 March 2018", + "table_id":"Table obs4MIPs_SImon" + }, + "variable_entry":{ + "sfdsi":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", + "comment":"This field is physical, and it arises since sea ice has a nonzero salt content, so it exchanges salt with the liquid ocean upon melting and freezing.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Salt flux from sea ice", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sfdsi", + "positive":"down", + "standard_name":"downward_sea_ice_basal_salt_flux", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "siage":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", + "comment":"Age of sea ice", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Age of sea ice", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"siage", + "positive":"", + "standard_name":"age_of_sea_ice", + "type":"real", + "units":"s", + "valid_max":"", + "valid_min":"" + }, + "siareaacrossline":{ + "cell_measures":"", + "cell_methods":"time: mean", + "comment":"net (sum of transport in all directions) sea ice area transport through the following four passages, positive into the Arctic Ocean 1. Fram Strait = (11.5W,81.3N to (10.5E,79.6N) 2. Canadian Archipelago = (128.2W,70.6N) to (59.3W,82.1N) 3. Barents opening = (16.8E,76.5N) to (19.2E,70.2N) 4. Bering Strait = (171W,66.2N) to (166W,65N)", + "dimensions":"siline time", + "frequency":"mon", + "long_name":"Sea ice area flux through straits", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"siareaacrossline", + "positive":"", + "standard_name":"sea_ice_area_transport_across_line", + "type":"real", + "units":"m2 s-1", + "valid_max":"", + "valid_min":"" + }, + "siarean":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"total area of sea ice in the Northern hemisphere", + "dimensions":"time", + "frequency":"mon", + "long_name":"Sea ice area North", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"siarean", + "positive":"", + "standard_name":"sea_ice_area", + "type":"real", + "units":"1e6 km2", + "valid_max":"", + "valid_min":"" + }, + "siareas":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"total area of sea ice in the Southern hemisphere", + "dimensions":"time", + "frequency":"mon", + "long_name":"Sea ice area South", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"siareas", + "positive":"", + "standard_name":"sea_ice_area", + "type":"real", + "units":"1e6 km2", + "valid_max":"", + "valid_min":"" + }, + "sicompstren":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", + "comment":"Computed strength of the ice pack, defined as the energy (J m-2) dissipated per unit area removed from the ice pack under compression, and assumed proportional to the change in potential energy caused by ridging. For Hibler-type models, this is P (= P*hexp(-C(1-A)))", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Compressive sea ice strength", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sicompstren", + "positive":"", + "standard_name":"compressive_strength_of_sea_ice", + "type":"real", + "units":"N m-1", + "valid_max":"", + "valid_min":"" + }, + "siconc":{ + "cell_measures":"", + "cell_methods":"area: mean where sea time: mean", + "comment":"Area fraction of grid cell covered by sea ice", + "dimensions":"longitude latitude time typesi", + "frequency":"mon", + "long_name":"Sea Ice Area Fraction (Ocean Grid)", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"siconc", + "positive":"", + "standard_name":"sea_ice_area_fraction", + "type":"real", + "units":"%", + "valid_max":"", + "valid_min":"" + }, + "siconca":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Area fraction of grid cell covered by sea ice", + "dimensions":"longitude latitude time typesi", + "frequency":"mon", + "long_name":"Sea Ice Area Fraction (Atmospheric Grid)", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"siconca", + "positive":"", + "standard_name":"sea_ice_area_fraction", + "type":"real", + "units":"%", + "valid_max":"", + "valid_min":"" + }, + "sidconcdyn":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Total change in sea-ice area fraction through dynamics-related processes (advection, divergence...)", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"sea-ice area fraction change from dynamics", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sidconcdyn", + "positive":"", + "standard_name":"tendency_of_sea_ice_area_fraction_due_to_dynamics", + "type":"real", + "units":"s-1", + "valid_max":"", + "valid_min":"" + }, + "sidconcth":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Total change in sea-ice area fraction through thermodynamic processes", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"sea-ice area fraction change from thermodynamics", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sidconcth", + "positive":"", + "standard_name":"tendency_of_sea_ice_area_fraction_due_to_thermodynamics", + "type":"real", + "units":"s-1", + "valid_max":"", + "valid_min":"" + }, + "sidivvel":{ + "cell_measures":"", + "cell_methods":"area: mean where sea_ice (comment: mask=siconc) time: point", + "comment":"Divergence of sea-ice velocity field (first shear strain invariant)", + "dimensions":"longitude latitude time1", + "frequency":"monPt", + "long_name":"Divergence of the sea-ice velocity field", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sidivvel", + "positive":"", + "standard_name":"divergence_of_sea_ice_velocity", + "type":"real", + "units":"s-1", + "valid_max":"", + "valid_min":"" + }, + "sidmassdyn":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Total change in sea-ice mass through dynamics-related processes (advection,...) divided by grid-cell area", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"sea-ice mass change from dynamics", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sidmassdyn", + "positive":"", + "standard_name":"tendency_of_sea_ice_amount_due_to_dynamics", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "sidmassevapsubl":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"The rate of change of sea-ice mass change through evaporation and sublimation divided by grid-cell area", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"sea-ice mass change through evaporation and sublimation", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sidmassevapsubl", + "positive":"up", + "standard_name":"water_evaporation_flux", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "sidmassgrowthbot":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"The rate of change of sea ice mass due to vertical growth of existing sea ice at its base divided by grid-cell area.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"sea-ice mass change through basal growth", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sidmassgrowthbot", + "positive":"", + "standard_name":"tendency_of_sea_ice_amount_due_to_congelation_ice_accumulation", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "sidmassgrowthwat":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"The rate of change of sea ice mass due to sea ice formation in supercooled water (often through frazil formation) divided by grid-cell area. Together, sidmassgrowthwat and sidmassgrowthbot should give total ice growth", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"sea-ice mass change through growth in supercooled open water (aka frazil)", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sidmassgrowthwat", + "positive":"", + "standard_name":"tendency_of_sea_ice_amount_due_to_freezing_in_open_water", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "sidmasslat":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"The rate of change of sea ice mass through lateral melting divided by grid-cell area (report 0 if not explicitly calculated thermodynamically)", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Lateral sea ice melt rate", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sidmasslat", + "positive":"", + "standard_name":"tendency_of_sea_ice_amount_due_to_lateral_melting", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "sidmassmeltbot":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"The rate of change of sea ice mass through melting at the ice bottom divided by grid-cell area", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"sea-ice mass change through bottom melting", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sidmassmeltbot", + "positive":"", + "standard_name":"tendency_of_sea_ice_amount_due_to_basal_melting", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "sidmassmelttop":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"The rate of change of sea ice mass through melting at the ice surface divided by grid-cell area", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"sea-ice mass change through surface melting", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sidmassmelttop", + "positive":"", + "standard_name":"tendency_of_sea_ice_amount_due_to_surface_melting", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "sidmasssi":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"The rate of change of sea ice mass due to transformation of snow to sea ice divided by grid-cell area", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"sea-ice mass change through snow-to-ice conversion", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sidmasssi", + "positive":"", + "standard_name":"tendency_of_sea_ice_amount_due_to_snow_conversion", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "sidmassth":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Total change in sea-ice mass from thermodynamic processes divided by grid-cell area", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"sea-ice mass change from thermodynamics", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sidmassth", + "positive":"", + "standard_name":"tendency_of_sea_ice_amount_due_to_thermodynamics", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "sidmasstranx":{ + "cell_measures":"", + "cell_methods":"time: mean", + "comment":"Includes transport of both sea ice and snow by advection", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"X-component of sea-ice mass transport", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sidmasstranx", + "positive":"", + "standard_name":"sea_ice_x_transport", + "type":"real", + "units":"kg s-1", + "valid_max":"", + "valid_min":"" + }, + "sidmasstrany":{ + "cell_measures":"", + "cell_methods":"time: mean", + "comment":"Includes transport of both sea ice and snow by advection", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Y-component of sea-ice mass transport", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sidmasstrany", + "positive":"", + "standard_name":"sea_ice_y_transport", + "type":"real", + "units":"kg s-1", + "valid_max":"", + "valid_min":"" + }, + "sidragbot":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", + "comment":"Oceanic drag coefficient that is used to calculate the oceanic momentum drag on sea ice", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Ocean drag coefficient", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sidragbot", + "positive":"", + "standard_name":"surface_drag_coefficient_for_momentum_in_water", + "type":"real", + "units":"1", + "valid_max":"", + "valid_min":"" + }, + "sidragtop":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", + "comment":"Atmospheric drag coefficient that is used to calculate the atmospheric momentum drag on sea ice", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Atmospheric drag coefficient", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sidragtop", + "positive":"", + "standard_name":"surface_drag_coefficient_for_momentum_in_air", + "type":"real", + "units":"1", + "valid_max":"", + "valid_min":"" + }, + "siextentn":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Total area of all Northern-Hemisphere grid cells that are covered by at least 15 % areal fraction of sea ice", + "dimensions":"time", + "frequency":"mon", + "long_name":"Sea ice extent North", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"siextentn", + "positive":"", + "standard_name":"sea_ice_extent", + "type":"real", + "units":"1e6 km2", + "valid_max":"", + "valid_min":"" + }, + "siextents":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Total area of all Southern-Hemisphere grid cells that are covered by at least 15 % areal fraction of sea ice", + "dimensions":"time", + "frequency":"mon", + "long_name":"Sea ice extent South", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"siextents", + "positive":"", + "standard_name":"sea_ice_extent", + "type":"real", + "units":"1e6 km2", + "valid_max":"", + "valid_min":"" + }, + "sifb":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", + "comment":"Mean height of sea-ice surface (=snow-ice interface when snow covered) above sea level", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Sea-ice freeboard", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sifb", + "positive":"", + "standard_name":"sea_ice_freeboard", + "type":"real", + "units":"m", + "valid_max":"", + "valid_min":"" + }, + "siflcondbot":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", + "comment":"the net heat conduction flux at the ice base", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Net conductive heat fluxes in ice at the bottom", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"siflcondbot", + "positive":"down", + "standard_name":"conductive_heat_flux_at_sea_ice_bottom", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "siflcondtop":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", + "comment":"the net heat conduction flux at the ice surface", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Net conductive heat flux in ice at the surface", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"siflcondtop", + "positive":"down", + "standard_name":"conductive_heat_flux_at_sea_ice_surface", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "siflfwbot":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", + "comment":"Total flux of fresh water from water into sea ice divided by grid-cell area; This flux is negative during ice growth (liquid water mass decreases, hence upward flux of freshwater), positive during ice melt (liquid water mass increases, hence downward flux of freshwater)", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Freshwater flux from sea ice", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"siflfwbot", + "positive":"", + "standard_name":"freshwater_flux_from_ice", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "siflfwdrain":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", + "comment":"Total flux of fresh water from sea-ice surface into underlying ocean. This combines both surface melt water that drains directly into the ocean and the drainage of surface melt pond. By definition, this flux is always positive.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Freshwater flux from sea-ice surface", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"siflfwdrain", + "positive":"", + "standard_name":"freshwater_flux_from_ice_surface", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "sifllatstop":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", + "comment":"the net latent heat flux over sea ice", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Net latent heat flux over sea ice", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sifllatstop", + "positive":"up", + "standard_name":"surface_upward_latent_heat_flux", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "sifllwdtop":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", + "comment":"the downwelling longwave flux over sea ice (always positive)", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Downwelling longwave flux over sea ice", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sifllwdtop", + "positive":"down", + "standard_name":"surface_downwelling_longwave_flux_in_air", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "sifllwutop":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", + "comment":"the upwelling longwave flux over sea ice (always negative)", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Upwelling Longwave Flux over Sea Ice", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sifllwutop", + "positive":"up", + "standard_name":"surface_upwelling_longwave_flux_in_air", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "siflsenstop":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", + "comment":"the net sensible heat flux over sea ice", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Net upward sensible heat flux over sea ice", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"siflsenstop", + "positive":"up", + "standard_name":"surface_upward_sensible_heat_flux", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "siflsensupbot":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", + "comment":"the net sensible heat flux under sea ice from the ocean", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"siflsensupbot", + "positive":"up", + "standard_name":"upward_sea_ice_basal_heat_flux", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "siflswdbot":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", + "comment":"The downwelling shortwave flux underneath sea ice (always positive)", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Downwelling shortwave flux under sea ice", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"siflswdbot", + "positive":"down", + "standard_name":"bottom_downwelling_shortwave_flux_into_ocean", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "siflswdtop":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", + "comment":"The downwelling shortwave flux over sea ice (always positive by sign convention)", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Downwelling shortwave flux over sea ice", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"siflswdtop", + "positive":"down", + "standard_name":"surface_downwelling_shortwave_flux_in_air", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "siflswutop":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", + "comment":"The upwelling shortwave flux over sea ice (always negative)", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Upwelling Shortwave Flux over Sea Ice", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"siflswutop", + "positive":"up", + "standard_name":"surface_upwelling_shortwave_flux_in_air", + "type":"real", + "units":"W m-2", + "valid_max":"", + "valid_min":"" + }, + "siforcecoriolx":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc)", + "comment":"X-component of force on sea ice caused by coriolis force", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Coriolis force term in force balance (x-component)", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"siforcecoriolx", + "positive":"", + "standard_name":"coriolis_force_on_sea_ice_x", + "type":"real", + "units":"N m-2", + "valid_max":"", + "valid_min":"" + }, + "siforcecorioly":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc)", + "comment":"Y-component of force on sea ice caused by coriolis force", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Coriolis force term in force balance (y-component)", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"siforcecorioly", + "positive":"", + "standard_name":"coriolis_force_on_sea_ice_y", + "type":"real", + "units":"N m-2", + "valid_max":"", + "valid_min":"" + }, + "siforceintstrx":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc)", + "comment":"X-component of force on sea ice caused by internal stress (divergence of sigma)", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Internal stress term in force balance (x-component)", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"siforceintstrx", + "positive":"", + "standard_name":"internal_stress_in_sea_ice_x", + "type":"real", + "units":"N m-2", + "valid_max":"", + "valid_min":"" + }, + "siforceintstry":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc)", + "comment":"Y-component of force on sea ice caused by internal stress (divergence of sigma)", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Internal stress term in force balance (y-component)", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"siforceintstry", + "positive":"", + "standard_name":"internal_stress_in_sea_ice_y", + "type":"real", + "units":"N m-2", + "valid_max":"", + "valid_min":"" + }, + "siforcetiltx":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc)", + "comment":"X-component of force on sea ice caused by sea-surface tilt", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Sea-surface tilt term in force balance (x-component)", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"siforcetiltx", + "positive":"", + "standard_name":"sea_surface_tilt_force_on_sea_ice_x", + "type":"real", + "units":"N m-2", + "valid_max":"", + "valid_min":"" + }, + "siforcetilty":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc)", + "comment":"Y-component of force on sea ice caused by sea-surface tilt", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Sea-surface tilt term in force balance (y-component)", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"siforcetilty", + "positive":"", + "standard_name":"sea_surface_tilt_force_on_sea_ice_y", + "type":"real", + "units":"N m-2", + "valid_max":"", + "valid_min":"" + }, + "sihc":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Heat content of all ice in grid cell divided by total grid-cell area. Water at 0 Celsius is assumed to have a heat content of 0 J. Does not include heat content of snow, but does include heat content of brine. Heat content is always negative, since both the sensible and the latent heat content of ice are less than that of water", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Sea-ice heat content per unit area", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sihc", + "positive":"", + "standard_name":"integral_of_sea_ice_temperature_wrt_depth_expressed_as_heat_content", + "type":"real", + "units":"J m-2", + "valid_max":"", + "valid_min":"" + }, + "siitdconc":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc)", + "comment":"Area fraction of grid cell covered by each ice-thickness category (vector with one entry for each thickness category starting from the thinnest category, netcdf file should use thickness bounds of the categories as third coordinate axis)", + "dimensions":"longitude latitude iceband time", + "frequency":"mon", + "long_name":"Sea-ice area fractions in thickness categories", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"siitdconc", + "positive":"", + "standard_name":"sea_ice_area_fraction_over_categories", + "type":"real", + "units":"%", + "valid_max":"", + "valid_min":"" + }, + "siitdsnconc":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siitdconc)", + "comment":"Area fraction of grid cell covered by snow in each ice-thickness category (vector with one entry for each thickness category starting from the thinnest category, netcdf file should use thickness bounds of the categories as third coordinate axis)", + "dimensions":"longitude latitude iceband time", + "frequency":"mon", + "long_name":"Snow area fractions in thickness categories", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"siitdsnconc", + "positive":"", + "standard_name":"snow_area_fraction_over_categories", + "type":"real", + "units":"%", + "valid_max":"", + "valid_min":"" + }, + "siitdsnthick":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siitdconc)", + "comment":"Actual thickness of snow in each category (NOT volume divided by grid area), (vector with one entry for each thickness category starting from the thinnest category, netcdf file should use thickness bounds of categories as third coordinate axis)", + "dimensions":"longitude latitude iceband time", + "frequency":"mon", + "long_name":"Snow thickness in thickness categories", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"siitdsnthick", + "positive":"", + "standard_name":"snow_thickness_over_categories", + "type":"real", + "units":"m", + "valid_max":"", + "valid_min":"" + }, + "siitdthick":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siitdconc)", + "comment":"Actual (floe) thickness of sea ice in each category (NOT volume divided by grid area), (vector with one entry for each thickness category starting from the thinnest category, netcdf file should use thickness bounds of categories as third coordinate axis)", + "dimensions":"longitude latitude iceband time", + "frequency":"mon", + "long_name":"Sea-ice thickness in thickness categories", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"siitdthick", + "positive":"", + "standard_name":"sea_ice_thickness_over_categories", + "type":"real", + "units":"m", + "valid_max":"", + "valid_min":"" + }, + "simass":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Total mass of sea ice divided by grid-cell area", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Sea-ice mass per area", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"simass", + "positive":"", + "standard_name":"sea_ice_amount", + "type":"real", + "units":"kg m-2", + "valid_max":"", + "valid_min":"" + }, + "simassacrossline":{ + "cell_measures":"", + "cell_methods":"time: mean", + "comment":"net (sum of transport in all directions) sea ice area transport through the following four passages, positive into the Arctic Ocean 1. Fram Strait = (11.5W,81.3N to (10.5E,79.6N) 2. Canadian Archipelago = (128.2W,70.6N) to (59.3W,82.1N) 3. Barents opening = (16.8E,76.5N) to (19.2E,70.2N) 4. Bering Strait = (171W,66.2N) to (166W,65N)", + "dimensions":"siline time", + "frequency":"mon", + "long_name":"Sea mass area flux through straits", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"simassacrossline", + "positive":"", + "standard_name":"sea_ice_transport_across_line", + "type":"real", + "units":"kg s-1", + "valid_max":"", + "valid_min":"" + }, + "simpconc":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc)", + "comment":"Fraction of sea ice, by area, which is covered by melt ponds, giving equal weight to every square metre of sea ice .", + "dimensions":"longitude latitude time typemp", + "frequency":"mon", + "long_name":"Percentage Cover of Sea-Ice by Meltpond", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"simpconc", + "positive":"", + "standard_name":"area_fraction", + "type":"real", + "units":"%", + "valid_max":"", + "valid_min":"" + }, + "simpmass":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice_melt_pond (comment: mask=simpconc)", + "comment":"Meltpond mass per area of sea ice.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Meltpond Mass per Unit Area", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"simpmass", + "positive":"", + "standard_name":"surface_liquid_water_amount", + "type":"real", + "units":"kg m-2", + "valid_max":"", + "valid_min":"" + }, + "simprefrozen":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice_melt_pond (comment: mask=simpconc)", + "comment":"Volume of refrozen ice on melt ponds divided by meltpond covered area", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Thickness of Refrozen Ice on Melt Pond", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"simprefrozen", + "positive":"", + "standard_name":"melt_pond_refrozen_ice", + "type":"real", + "units":"m", + "valid_max":"", + "valid_min":"" + }, + "sipr":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", + "comment":"mass of liquid precipitation falling onto sea ice divided by grid-cell area", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Rainfall rate over sea ice", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sipr", + "positive":"", + "standard_name":"rainfall_flux", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "sirdgconc":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", + "comment":"Fraction of sea ice, by area, which is covered by sea ice ridges, giving equal weight to every square metre of sea ice .", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Percentage Cover of Sea-Ice by Ridging", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sirdgconc", + "positive":"", + "standard_name":"fraction_of_ridged_sea_ice", + "type":"real", + "units":"1", + "valid_max":"", + "valid_min":"" + }, + "sirdgthick":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=sirdgconc - ridges only)", + "comment":"Sea Ice Ridge Height (representing mean height over the ridged area)", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Ridged ice thickness", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sirdgthick", + "positive":"", + "standard_name":"thickness_of_ridged_sea_ice", + "type":"real", + "units":"m", + "valid_max":"", + "valid_min":"" + }, + "sisali":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", + "comment":"Mean sea-ice salinity of all sea ice in grid cell", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Sea ice salinity", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sisali", + "positive":"", + "standard_name":"sea_ice_salinity", + "type":"real", + "units":"0.001", + "valid_max":"", + "valid_min":"" + }, + "sisaltmass":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Total mass of all salt in sea ice divided by grid-cell area", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Mass of salt in sea ice per area", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sisaltmass", + "positive":"", + "standard_name":"sea_ice_salt_mass", + "type":"real", + "units":"kg m-2", + "valid_max":"", + "valid_min":"" + }, + "sishevel":{ + "cell_measures":"", + "cell_methods":"area: mean where sea_ice (comment: mask=siconc) time: point", + "comment":"Maximum shear of sea-ice velocity field (second shear strain invariant)", + "dimensions":"longitude latitude time1", + "frequency":"monPt", + "long_name":"Maximum shear of sea-ice velocity field", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sishevel", + "positive":"", + "standard_name":"maximum_shear_of_sea_ice_velocity", + "type":"real", + "units":"s-1", + "valid_max":"", + "valid_min":"" + }, + "sisnconc":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", + "comment":"Fraction of sea ice, by area, which is covered by snow, giving equal weight to every square metre of sea ice . Exclude snow that lies on land or land ice.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Snow area fraction", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sisnconc", + "positive":"", + "standard_name":"surface_snow_area_fraction", + "type":"real", + "units":"%", + "valid_max":"", + "valid_min":"" + }, + "sisnhc":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", + "comment":"Heat-content of all snow in grid cell divided by total grid-cell area. Snow-water equivalent at 0 Celsius is assumed to have a heat content of 0 J. Does not include heat content of sea ice.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Snow-heat content per unit area", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sisnhc", + "positive":"", + "standard_name":"thermal_energy_content_of_surface_snow", + "type":"real", + "units":"J m-2", + "valid_max":"", + "valid_min":"" + }, + "sisnmass":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", + "comment":"Total mass of snow on sea ice divided by grid-cell area", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Snow mass per area", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sisnmass", + "positive":"", + "standard_name":"liquid_water_content_of_surface_snow", + "type":"real", + "units":"kg m-2", + "valid_max":"", + "valid_min":"" + }, + "sisnthick":{ + "cell_measures":"", + "cell_methods":"area: mean where snow over sea_ice area: time: mean where sea_ice", + "comment":"Actual thickness of snow (snow volume divided by snow-covered area)", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Snow thickness", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sisnthick", + "positive":"", + "standard_name":"surface_snow_thickness", + "type":"real", + "units":"m", + "valid_max":"", + "valid_min":"" + }, + "sispeed":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", + "comment":"Speed of ice (i.e. mean absolute velocity) to account for back-and-forth movement of the ice", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Sea-ice speed", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sispeed", + "positive":"", + "standard_name":"sea_ice_speed", + "type":"real", + "units":"m s-1", + "valid_max":"", + "valid_min":"" + }, + "sistremax":{ + "cell_measures":"", + "cell_methods":"area: mean where sea_ice (comment: mask=siconc) time: point", + "comment":"Maximum shear stress in sea ice (second stress invariant)", + "dimensions":"longitude latitude time1", + "frequency":"monPt", + "long_name":"Maximum shear stress in sea ice", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sistremax", + "positive":"", + "standard_name":"maximum_shear_stress", + "type":"real", + "units":"N m-1", + "valid_max":"", + "valid_min":"" + }, + "sistresave":{ + "cell_measures":"", + "cell_methods":"area: mean where sea_ice (comment: mask=siconc) time: point", + "comment":"Average normal stress in sea ice (first stress invariant)", + "dimensions":"longitude latitude time1", + "frequency":"monPt", + "long_name":"Average normal stress in sea ice", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sistresave", + "positive":"", + "standard_name":"average_normal_stress", + "type":"real", + "units":"N m-1", + "valid_max":"", + "valid_min":"" + }, + "sistrxdtop":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc)", + "comment":"X-component of atmospheric stress on sea ice", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"X-component of atmospheric stress on sea ice", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sistrxdtop", + "positive":"down", + "standard_name":"surface_downward_x_stress", + "type":"real", + "units":"N m-2", + "valid_max":"", + "valid_min":"" + }, + "sistrxubot":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", + "comment":"X-component of ocean stress on sea ice", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"X-component of ocean stress on sea ice", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sistrxubot", + "positive":"up", + "standard_name":"upward_x_stress_at_sea_ice_base", + "type":"real", + "units":"N m-2", + "valid_max":"", + "valid_min":"" + }, + "sistrydtop":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc)", + "comment":"Y-component of atmospheric stress on sea ice", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Y-component of atmospheric stress on sea ice", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sistrydtop", + "positive":"down", + "standard_name":"surface_downward_y_stress", + "type":"real", + "units":"N m-2", + "valid_max":"", + "valid_min":"" + }, + "sistryubot":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", + "comment":"Y-component of ocean stress on sea ice", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Y-component of ocean stress on sea ice", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sistryubot", + "positive":"up", + "standard_name":"upward_y_stress_at_sea_ice_base", + "type":"real", + "units":"N m-2", + "valid_max":"", + "valid_min":"" + }, + "sitempbot":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", + "comment":"Report temperature at interface, NOT temperature within lowermost model layer", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Temperature at ice-ocean interface", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sitempbot", + "positive":"", + "standard_name":"sea_ice_bottom_temperature", + "type":"real", + "units":"K", + "valid_max":"", + "valid_min":"" + }, + "sitempsnic":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", + "comment":"Report surface temperature of ice where snow thickness is zero", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Temperature at snow-ice interface", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sitempsnic", + "positive":"", + "standard_name":"sea_ice_surface_temperature", + "type":"real", + "units":"K", + "valid_max":"", + "valid_min":"" + }, + "sitemptop":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", + "comment":"Report surface temperature of snow where snow covers the sea ice.", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Surface temperature of sea ice", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sitemptop", + "positive":"", + "standard_name":"sea_ice_surface_temperature", + "type":"real", + "units":"K", + "valid_max":"", + "valid_min":"" + }, + "sithick":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc or siconca)", + "comment":"Actual (floe) thickness of sea ice (NOT volume divided by grid area as was done in CMIP5)", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Sea Ice Thickness", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sithick", + "positive":"", + "standard_name":"sea_ice_thickness", + "type":"real", + "units":"m", + "valid_max":"", + "valid_min":"" + }, + "sitimefrac":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Fraction of time steps of the averaging period during which sea ice is present (siconc >0 ) in a grid cell", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Fraction of time steps with sea ice", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sitimefrac", + "positive":"", + "standard_name":"sea_ice_time_fraction", + "type":"real", + "units":"1", + "valid_max":"", + "valid_min":"" + }, + "siu":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc)", + "comment":"The x-velocity of ice on native model grid", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"X-component of sea ice velocity", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"siu", + "positive":"", + "standard_name":"sea_ice_x_velocity", + "type":"real", + "units":"m s-1", + "valid_max":"", + "valid_min":"" + }, + "siv":{ + "cell_measures":"", + "cell_methods":"area: time: mean where sea_ice (comment: mask=siconc)", + "comment":"The y-velocity of ice on native model grid", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Y-component of sea ice velocity", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"siv", + "positive":"", + "standard_name":"sea_ice_y_velocity", + "type":"real", + "units":"m s-1", + "valid_max":"", + "valid_min":"" + }, + "sivol":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"Total volume of sea ice divided by grid-cell area (this used to be called ice thickness in CMIP5)", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Sea-ice volume per area", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sivol", + "positive":"", + "standard_name":"sea_ice_thickness", + "type":"real", + "units":"m", + "valid_max":"", + "valid_min":"" + }, + "sivoln":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"total volume of sea ice in the Northern hemisphere", + "dimensions":"time", + "frequency":"mon", + "long_name":"Sea ice volume North", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sivoln", + "positive":"", + "standard_name":"sea_ice_volume", + "type":"real", + "units":"1e3 km3", + "valid_max":"", + "valid_min":"" + }, + "sivols":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"total volume of sea ice in the Southern hemisphere", + "dimensions":"time", + "frequency":"mon", + "long_name":"Sea ice volume South", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sivols", + "positive":"", + "standard_name":"sea_ice_volume", + "type":"real", + "units":"1e3 km3", + "valid_max":"", + "valid_min":"" + }, + "sndmassdyn":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"the rate of change of snow mass through advection with sea ice divided by grid-cell area", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Snow Mass Rate of Change through Avection by Sea-ice Dynamics", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sndmassdyn", + "positive":"", + "standard_name":"tendency_of_snow_mass_due_to_sea_ice_dynamics", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "sndmassmelt":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"the rate of change of snow mass through melt divided by grid-cell area", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Snow Mass Rate of Change through Melt", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sndmassmelt", + "positive":"", + "standard_name":"surface_snow_melt_flux", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "sndmasssi":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"the rate of change of snow mass due to transformation of snow to sea ice divided by grid-cell area", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Snow Mass Rate of Change through Snow-to-Ice Conversion", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sndmasssi", + "positive":"", + "standard_name":"tendency_of_surface_snow_amount_due_to_conversion_of_snow_to_sea_ice", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "sndmasssnf":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"mass of solid precipitation falling onto sea ice divided by grid-cell area", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"snow mass change through snow fall", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sndmasssnf", + "positive":"", + "standard_name":"snowfall_flux", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "sndmasssubl":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"the rate of change of snow mass through sublimation and evaporation divided by grid-cell area", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Snow Mass Rate of Change through Evaporation or Sublimation", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sndmasssubl", + "positive":"", + "standard_name":"surface_snow_sublimation_flux", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "sndmasswindrif":{ + "cell_measures":"", + "cell_methods":"area: time: mean", + "comment":"the rate of change of snow mass through wind drift of snow (from sea-ice into the sea) divided by grid-cell area", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Snow Mass Rate of Change through Wind Drift of Snow", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sndmasswindrif", + "positive":"", + "standard_name":"tendency_of_snow_mass_due_to_drifting_snow", + "type":"real", + "units":"kg m-2 s-1", + "valid_max":"", + "valid_min":"" + }, + "snmassacrossline":{ + "cell_measures":"", + "cell_methods":"time: mean", + "comment":"net (sum of transport in all directions) snow mass transport through the following four passages, positive into the Arctic Ocean 1. Fram Strait = (11.5W,81.3N to (10.5E,79.6N) 2. Canadian Archipela", + "dimensions":"siline time", + "frequency":"mon", + "long_name":"Snow mass flux through straits", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"snmassacrossline", + "positive":"", + "standard_name":"snow_mass_transport_across_line", + "type":"real", + "units":"kg s-1", + "valid_max":"", + "valid_min":"" + } + } +} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_coordinate.json b/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_coordinate.json new file mode 100644 index 0000000000..cafd418bd0 --- /dev/null +++ b/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_coordinate.json @@ -0,0 +1,2900 @@ +{ + "axis_entry":{ + "alev1":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"lowest atmospheric model level", + "must_have_bounds":"yes", + "out_name":"lev", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"", + "stored_direction":"", + "tolerance":"", + "type":"double", + "units":"", + "valid_max":"", + "valid_min":"", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "alt16":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"altitude", + "must_have_bounds":"yes", + "out_name":"alt16", + "positive":"up", + "requested":[ + "0", + "250", + "750", + "1250", + "1750", + "2250", + "2750", + "3500", + "4500", + "6000", + "8000", + "10000", + "12000", + "14500", + "16000", + "18000" + ], + "requested_bounds":[ + "-99000.0", + "0.0", + "0.0", + "500.0", + "500.0", + "1000.0", + "1000.0", + "1500.0", + "1500.0", + "2000.0", + "2000.0", + "2500.0", + "2500.0", + "3000.0", + "3000.0", + "4000.0", + "4000.0", + "5000.0", + "5000.0", + "7000.0", + "7000.0", + "9000.0", + "9000.0", + "11000.0", + "11000.0", + "13000.0", + "13000.0", + "15000.0", + "15000.0", + "17000.0", + "17000.0", + "99000.0" + ], + "standard_name":"altitude", + "stored_direction":"increasing", + "tolerance":"", + "type":"double", + "units":"m", + "valid_max":"", + "valid_min":"", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "alt40":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"altitude", + "must_have_bounds":"yes", + "out_name":"alt40", + "positive":"up", + "requested":[ + "240.", + "720.", + "1200.", + "1680.", + "2160.", + "2640.", + "3120.", + "3600.", + "4080.", + "4560.", + "5040.", + "5520.", + "6000.", + "6480.", + "6960.", + "7440.", + "7920.", + "8400.", + "8880.", + "9360.", + "9840.", + "10320.", + "10800.", + "11280.", + "11760.", + "12240.", + "12720.", + "13200.", + "13680.", + "14160.", + "14640.", + "15120.", + "15600.", + "16080.", + "16560.", + "17040.", + "17520.", + "18000.", + "18480.", + "18960." + ], + "requested_bounds":[ + "0.0", + "480.0", + "480.0", + "960.0", + "960.0", + "1440.0", + "1440.0", + "1920.0", + "1920.0", + "2400.0", + "2400.0", + "2880.0", + "2880.0", + "3360.0", + "3360.0", + "3840.0", + "3840.0", + "4320.0", + "4320.0", + "4800.0", + "4800.0", + "5280.0", + "5280.0", + "5760.0", + "5760.0", + "6240.0", + "6240.0", + "6720.0", + "6720.0", + "7200.0", + "7200.0", + "7680.0", + "7680.0", + "8160.0", + "8160.0", + "8640.0", + "8640.0", + "9120.0", + "9120.0", + "9600.0", + "9600.0", + "10080.0", + "10080.0", + "10560.0", + "10560.0", + "11040.0", + "11040.0", + "11520.0", + "11520.0", + "12000.0", + "12000.0", + "12480.0", + "12480.0", + "12960.0", + "12960.0", + "13440.0", + "13440.0", + "13920.0", + "13920.0", + "14400.0", + "14400.0", + "14880.0", + "14880.0", + "15360.0", + "15360.0", + "15840.0", + "15840.0", + "16320.0", + "16320.0", + "16800.0", + "16800.0", + "17280.0", + "17280.0", + "17760.0", + "17760.0", + "18240.0", + "18240.0", + "18720.0", + "18720.0", + "19200.0" + ], + "standard_name":"altitude", + "stored_direction":"increasing", + "tolerance":"", + "type":"double", + "units":"m", + "valid_max":"", + "valid_min":"", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "alternate_hybrid_sigma":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"p = ap + b*ps", + "long_name":"hybrid sigma pressure coordinate", + "must_have_bounds":"yes", + "out_name":"lev", + "positive":"down", + "requested":"", + "requested_bounds":"", + "standard_name":"atmosphere_hybrid_sigma_pressure_coordinate", + "stored_direction":"decreasing", + "tolerance":"", + "type":"", + "units":"1", + "valid_max":"1.0", + "valid_min":"0.0", + "value":"", + "z_bounds_factors":"ap: ap_bnds b: b_bnds ps: ps", + "z_factors":"ap: ap b: b ps: ps" + }, + "basin":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"ocean basin", + "must_have_bounds":"no", + "out_name":"basin", + "positive":"", + "requested":[ + "atlantic_arctic_ocean", + "indian_pacific_ocean", + "global_ocean" + ], + "requested_bounds":"", + "standard_name":"region", + "stored_direction":"", + "tolerance":"", + "type":"character", + "units":"", + "valid_max":"", + "valid_min":"", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "dbze":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"CloudSat simulator equivalent radar reflectivity factor", + "must_have_bounds":"yes", + "out_name":"dbze", + "positive":"", + "requested":[ + "-47.5", + "-42.5", + "-37.5", + "-32.5", + "-27.5", + "-22.5", + "-17.5", + "-12.5", + "-7.5", + "-2.5", + "2.5", + "7.5", + "12.5", + "17.5", + "22.5" + ], + "requested_bounds":[ + "-50.0", + "-45.0", + "-45.0", + "-40.0", + "-40.0", + "-35.0", + "-35.0", + "-30.0", + "-30.0", + "-25.0", + "-25.0", + "-20.0", + "-20.0", + "-15.0", + "-15.0", + "-10.0", + "-10.0", + "-5.0", + "-5.0", + "0.0", + "0.0", + "5.0", + "5.0", + "10.0", + "10.0", + "15.0", + "15.0", + "20.0", + "20.0", + "25.0" + ], + "standard_name":"equivalent_reflectivity_factor", + "stored_direction":"increasing", + "tolerance":"", + "type":"double", + "units":"dBZ", + "valid_max":"", + "valid_min":"", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "depth0m":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"depth", + "must_have_bounds":"no", + "out_name":"depth", + "positive":"down", + "requested":"", + "requested_bounds":"", + "standard_name":"depth", + "stored_direction":"increasing", + "tolerance":"", + "type":"double", + "units":"m", + "valid_max":"100.0", + "valid_min":"0.0", + "value":"0.", + "z_bounds_factors":"", + "z_factors":"" + }, + "depth100m":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"depth", + "must_have_bounds":"no", + "out_name":"depth", + "positive":"down", + "requested":"", + "requested_bounds":"", + "standard_name":"depth", + "stored_direction":"increasing", + "tolerance":"", + "type":"double", + "units":"m", + "valid_max":"120.0", + "valid_min":"80.0", + "value":"100.", + "z_bounds_factors":"", + "z_factors":"" + }, + "depth2000m":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"depth", + "must_have_bounds":"no", + "out_name":"depth", + "positive":"down", + "requested":"", + "requested_bounds":"", + "standard_name":"depth", + "stored_direction":"increasing", + "tolerance":"", + "type":"double", + "units":"m", + "valid_max":"2200.0", + "valid_min":"1980.0", + "value":"2000", + "z_bounds_factors":"", + "z_factors":"" + }, + "depth300m":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"depth", + "must_have_bounds":"no", + "out_name":"depth", + "positive":"down", + "requested":"", + "requested_bounds":"", + "standard_name":"depth", + "stored_direction":"increasing", + "tolerance":"", + "type":"double", + "units":"m", + "valid_max":"320.0", + "valid_min":"280.0", + "value":"300", + "z_bounds_factors":"", + "z_factors":"" + }, + "depth700m":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"depth", + "must_have_bounds":"no", + "out_name":"depth", + "positive":"down", + "requested":"", + "requested_bounds":"", + "standard_name":"depth", + "stored_direction":"increasing", + "tolerance":"", + "type":"double", + "units":"m", + "valid_max":"720.0", + "valid_min":"680.0", + "value":"700", + "z_bounds_factors":"", + "z_factors":"" + }, + "depth_coord":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"ocean depth coordinate", + "must_have_bounds":"yes", + "out_name":"lev", + "positive":"down", + "requested":"", + "requested_bounds":"", + "standard_name":"depth", + "stored_direction":"increasing", + "tolerance":"", + "type":"", + "units":"m", + "valid_max":"12000.0", + "valid_min":"0.0", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "effectRadIc":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"Effective Radius [Values to be specified]", + "must_have_bounds":"", + "out_name":"effectRadIc", + "positive":"", + "requested":[ + "5.", + "15.", + "25.", + "35.", + "50.", + "75." + ], + "requested_bounds":[ + "0.0", + "10.0", + "10.0", + "20.0", + "20.0", + "30.0", + "30.0", + "40.0", + "40.0", + "60.0", + "60.0", + "90.0" + ], + "standard_name":"", + "stored_direction":"", + "tolerance":"", + "type":"double", + "units":"micron", + "valid_max":"", + "valid_min":"", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "effectRadLi":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"Effective Radius [Values to be specified]", + "must_have_bounds":"", + "out_name":"effectRadLi", + "positive":"", + "requested":[ + "4.", + "9.", + "11.5", + "14.", + "17.5", + "25." + ], + "requested_bounds":[ + "0.0", + "8.0", + "8.0", + "10.0", + "10.0", + "13.0", + "13.0", + "15.0", + "15.0", + "20.0", + "20.0", + "30.0" + ], + "standard_name":"", + "stored_direction":"", + "tolerance":"", + "type":"double", + "units":"micron", + "valid_max":"", + "valid_min":"", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "height100m":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"height", + "must_have_bounds":"no", + "out_name":"height", + "positive":"up", + "requested":"", + "requested_bounds":"", + "standard_name":"height", + "stored_direction":"increasing", + "tolerance":"", + "type":"double", + "units":"m", + "valid_max":"120.0", + "valid_min":"80.0", + "value":"100.", + "z_bounds_factors":"", + "z_factors":"" + }, + "height10m":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"height", + "must_have_bounds":"no", + "out_name":"height", + "positive":"up", + "requested":"", + "requested_bounds":"", + "standard_name":"height", + "stored_direction":"increasing", + "tolerance":"", + "type":"double", + "units":"m", + "valid_max":"30.0", + "valid_min":"1.0", + "value":"10.", + "z_bounds_factors":"", + "z_factors":"" + }, + "height2m":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"height", + "must_have_bounds":"no", + "out_name":"height", + "positive":"up", + "requested":"", + "requested_bounds":"", + "standard_name":"height", + "stored_direction":"increasing", + "tolerance":"", + "type":"double", + "units":"m", + "valid_max":"10.0", + "valid_min":"1.0", + "value":"2.", + "z_bounds_factors":"", + "z_factors":"" + }, + "hybrid_height":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"z = a + b*orog", + "long_name":"hybrid height coordinate", + "must_have_bounds":"yes", + "out_name":"lev", + "positive":"up", + "requested":"", + "requested_bounds":"", + "standard_name":"atmosphere_hybrid_height_coordinate", + "stored_direction":"increasing", + "tolerance":"", + "type":"", + "units":"m", + "valid_max":"", + "valid_min":"0.0", + "value":"", + "z_bounds_factors":"a: lev_bnds b: b_bnds orog: orog", + "z_factors":"a: lev b: b orog: orog" + }, + "iceband":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"Ice Depth Band", + "must_have_bounds":"yes", + "out_name":"iceband", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"sea_ice_thickness", + "stored_direction":"", + "tolerance":"", + "type":"double", + "units":"m", + "valid_max":"", + "valid_min":"", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "landUse":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"Land use type", + "must_have_bounds":"no", + "out_name":"landuse", + "positive":"", + "requested":[ + "primary_and_secondary_land", + "pastures", + "crops", + "urban" + ], + "requested_bounds":"", + "standard_name":"area_type", + "stored_direction":"", + "tolerance":"", + "type":"character", + "units":"", + "valid_max":"", + "valid_min":"", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "latitude":{ + "axis":"Y", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"latitude", + "must_have_bounds":"yes", + "out_name":"lat", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"latitude", + "stored_direction":"increasing", + "tolerance":"", + "type":"double", + "units":"degrees_north", + "valid_max":"90.0", + "valid_min":"-90.0", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "location":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"location index", + "must_have_bounds":"no", + "out_name":"loc", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"", + "stored_direction":"increasing", + "tolerance":"", + "type":"integer", + "units":"", + "valid_max":"", + "valid_min":"", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "longitude":{ + "axis":"X", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"longitude", + "must_have_bounds":"yes", + "out_name":"lon", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"longitude", + "stored_direction":"increasing", + "tolerance":"", + "type":"double", + "units":"degrees_east", + "valid_max":"360.0", + "valid_min":"0.0", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "natural_log_pressure":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"p = p0 * exp(-lev)", + "long_name":"atmosphere natural log pressure coordinate", + "must_have_bounds":"yes", + "out_name":"lev", + "positive":"down", + "requested":"", + "requested_bounds":"", + "standard_name":"atmosphere_ln_pressure_coordinate", + "stored_direction":"decreasing", + "tolerance":"", + "type":"", + "units":"", + "valid_max":"20.0", + "valid_min":"-1.0", + "value":"", + "z_bounds_factors":"p0: p0 lev: lev_bnds", + "z_factors":"p0: p0 lev: lev" + }, + "ocean_double_sigma":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"for k <= k_c:\n z(k,j,i)= sigma(k)*f(j,i) \n for k > k_c:\n z(k,j,i)= f(j,i) + (sigma(k)-1)*(depth(j,i)-f(j,i)) \n f(j,i)= 0.5*(z1+ z2) + 0.5*(z1-z2)* tanh(2*a/(z1-z2)*(depth(j,i)-href))", + "long_name":"ocean double sigma coordinate", + "must_have_bounds":"yes", + "out_name":"lev", + "positive":"up", + "requested":"", + "requested_bounds":"", + "standard_name":"ocean_double_sigma", + "stored_direction":"", + "tolerance":"", + "type":"", + "units":"", + "valid_max":"", + "valid_min":"", + "value":"", + "z_bounds_factors":"sigma: sigma_bnds depth: depth z1: z1 z2: z2 a: a href: href k_c: k_c", + "z_factors":"sigma: sigma depth: depth z1: z1 z2: z2 a: a_coeff href: href k_c: k_c" + }, + "ocean_s":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"z(n,k,j,i) = eta(n,j,i)*(1+s(k)) + depth_c*s(k) + (depth(j,i)-depth_c)*C(k) \n where \n C(k)=(1-b)*sinh(a*s(k))/sinh(a) +\n b*(tanh(a*(s(k)+0.5))/(2*tanh(0.5*a)) - 0.5)", + "long_name":"ocean s-coordinate", + "must_have_bounds":"yes", + "out_name":"lev", + "positive":"up", + "requested":"", + "requested_bounds":"", + "standard_name":"ocean_s_coordinate", + "stored_direction":"decreasing", + "tolerance":"", + "type":"", + "units":"", + "valid_max":"0.0", + "valid_min":"-1.0", + "value":"", + "z_bounds_factors":"s: lev_bnds eta: eta depth: depth a: a b: b depth_c: depth_c", + "z_factors":"s: lev eta: eta depth: depth a: a_coeff b: b_coeff depth_c: depth_c" + }, + "ocean_sigma":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"z(n,k,j,i) = eta(n,j,i) + sigma(k)*(depth(j,i)+eta(n,j,i))", + "long_name":"ocean sigma coordinate", + "must_have_bounds":"yes", + "out_name":"lev", + "positive":"up", + "requested":"", + "requested_bounds":"", + "standard_name":"ocean_sigma_coordinate", + "stored_direction":"decreasing", + "tolerance":"", + "type":"", + "units":"", + "valid_max":"0.0", + "valid_min":"-1.0", + "value":"", + "z_bounds_factors":"sigma: lev_bnds eta: eta depth: depth", + "z_factors":"sigma: lev eta: eta depth: depth" + }, + "ocean_sigma_z":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"for k <= nsigma: z(n,k,j,i) = eta(n,j,i) + sigma(k)*(min(depth_c,depth(j,i))+eta(n,j,i)) ; for k > nsigma: z(n,k,j,i) = zlev(k)", + "long_name":"ocean sigma over z coordinate", + "must_have_bounds":"yes", + "out_name":"lev", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"ocean_sigma_z", + "stored_direction":"", + "tolerance":"", + "type":"", + "units":"", + "valid_max":"", + "valid_min":"", + "value":"", + "z_bounds_factors":"sigma: sigma_bnds eta: eta depth: depth depth_c: depth_c nsigma: nsigma zlev: zlev_bnds", + "z_factors":"sigma: sigma eta: eta depth: depth depth_c: depth_c nsigma: nsigma zlev: zlev" + }, + "olayer100m":{ + "axis":"Z", + "bounds_values":"0.0 100.0", + "climatology":"", + "formula":"", + "long_name":"depth", + "must_have_bounds":"no", + "out_name":"depth", + "positive":"down", + "requested":"", + "requested_bounds":"", + "standard_name":"depth", + "stored_direction":"increasing", + "tolerance":"", + "type":"double", + "units":"m", + "valid_max":"100.0", + "valid_min":"0.0", + "value":"50.", + "z_bounds_factors":"", + "z_factors":"" + }, + "oline":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"ocean passage", + "must_have_bounds":"no", + "out_name":"line", + "positive":"", + "requested":[ + "barents_opening", + "bering_strait", + "canadian_archipelago", + "denmark_strait", + "drake_passage", + "english_channel", + "pacific_equatorial_undercurrent", + "faroe_scotland_channel", + "florida_bahamas_strait", + "fram_strait", + "iceland_faroe_channel", + "indonesian_throughflow", + "mozambique_channel", + "taiwan_luzon_straits", + "windward_passage" + ], + "requested_bounds":"", + "standard_name":"region", + "stored_direction":"", + "tolerance":"", + "type":"character", + "units":"", + "valid_max":"", + "valid_min":"", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "p10":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"pressure", + "must_have_bounds":"no", + "out_name":"plev", + "positive":"down", + "requested":"", + "requested_bounds":"", + "standard_name":"air_pressure", + "stored_direction":"", + "tolerance":"", + "type":"double", + "units":"Pa", + "valid_max":"", + "valid_min":"", + "value":"1000.", + "z_bounds_factors":"", + "z_factors":"" + }, + "p100":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"pressure", + "must_have_bounds":"no", + "out_name":"plev", + "positive":"down", + "requested":"", + "requested_bounds":"", + "standard_name":"air_pressure", + "stored_direction":"", + "tolerance":"", + "type":"double", + "units":"Pa", + "valid_max":"", + "valid_min":"", + "value":"10000.", + "z_bounds_factors":"", + "z_factors":"" + }, + "p1000":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"pressure", + "must_have_bounds":"no", + "out_name":"plev", + "positive":"down", + "requested":"", + "requested_bounds":"", + "standard_name":"air_pressure", + "stored_direction":"", + "tolerance":"", + "type":"double", + "units":"Pa", + "valid_max":"", + "valid_min":"", + "value":"100000.", + "z_bounds_factors":"", + "z_factors":"" + }, + "p200":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"pressure", + "must_have_bounds":"no", + "out_name":"plev", + "positive":"down", + "requested":"", + "requested_bounds":"", + "standard_name":"air_pressure", + "stored_direction":"", + "tolerance":"", + "type":"double", + "units":"Pa", + "valid_max":"", + "valid_min":"", + "value":"20000.", + "z_bounds_factors":"", + "z_factors":"" + }, + "p220":{ + "axis":"Z", + "bounds_values":"44000.0 0.0", + "climatology":"", + "formula":"", + "long_name":"pressure", + "must_have_bounds":"yes", + "out_name":"plev", + "positive":"down", + "requested":"", + "requested_bounds":"", + "standard_name":"air_pressure", + "stored_direction":"decreasing", + "tolerance":"", + "type":"double", + "units":"Pa", + "valid_max":"", + "valid_min":"", + "value":"22000.", + "z_bounds_factors":"", + "z_factors":"" + }, + "p500":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"pressure", + "must_have_bounds":"no", + "out_name":"plev", + "positive":"down", + "requested":"", + "requested_bounds":"", + "standard_name":"air_pressure", + "stored_direction":"", + "tolerance":"", + "type":"double", + "units":"Pa", + "valid_max":"", + "valid_min":"", + "value":"50000.", + "z_bounds_factors":"", + "z_factors":"" + }, + "p560":{ + "axis":"Z", + "bounds_values":"68000.0 44000.0", + "climatology":"", + "formula":"", + "long_name":"pressure", + "must_have_bounds":"yes", + "out_name":"plev", + "positive":"down", + "requested":"", + "requested_bounds":"", + "standard_name":"air_pressure", + "stored_direction":"decreasing", + "tolerance":"", + "type":"double", + "units":"Pa", + "valid_max":"", + "valid_min":"", + "value":"56000.", + "z_bounds_factors":"", + "z_factors":"" + }, + "p700":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"pressure", + "must_have_bounds":"no", + "out_name":"plev", + "positive":"down", + "requested":"", + "requested_bounds":"", + "standard_name":"air_pressure", + "stored_direction":"", + "tolerance":"", + "type":"double", + "units":"Pa", + "valid_max":"", + "valid_min":"", + "value":"70000.", + "z_bounds_factors":"", + "z_factors":"" + }, + "p840":{ + "axis":"Z", + "bounds_values":"100000.0 68000.0", + "climatology":"", + "formula":"", + "long_name":"pressure", + "must_have_bounds":"yes", + "out_name":"plev", + "positive":"down", + "requested":"", + "requested_bounds":"", + "standard_name":"air_pressure", + "stored_direction":"decreasing", + "tolerance":"", + "type":"double", + "units":"Pa", + "valid_max":"", + "valid_min":"", + "value":"84000.", + "z_bounds_factors":"", + "z_factors":"" + }, + "p850":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"pressure", + "must_have_bounds":"no", + "out_name":"plev", + "positive":"down", + "requested":"", + "requested_bounds":"", + "standard_name":"air_pressure", + "stored_direction":"", + "tolerance":"", + "type":"double", + "units":"Pa", + "valid_max":"", + "valid_min":"", + "value":"85000.", + "z_bounds_factors":"", + "z_factors":"" + }, + "pl700":{ + "axis":"Z", + "bounds_values":"85000.0 60000.0", + "climatology":"", + "formula":"", + "long_name":"pressure", + "must_have_bounds":"yes", + "out_name":"plev", + "positive":"down", + "requested":"", + "requested_bounds":"", + "standard_name":"air_pressure", + "stored_direction":"", + "tolerance":"", + "type":"double", + "units":"Pa", + "valid_max":"", + "valid_min":"", + "value":"70000.", + "z_bounds_factors":"", + "z_factors":"" + }, + "plev10":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"pressure", + "must_have_bounds":"no", + "out_name":"plev", + "positive":"down", + "requested":[ + "100000.", + "85000.", + "70000.", + "50000.", + "25000.", + "15000.", + "10000.", + "7000.", + "5000.", + "1000." + ], + "requested_bounds":"", + "standard_name":"air_pressure", + "stored_direction":"decreasing", + "tolerance":"", + "type":"double", + "units":"Pa", + "valid_max":"", + "valid_min":"", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "plev19":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"pressure", + "must_have_bounds":"no", + "out_name":"plev", + "positive":"down", + "requested":[ + "100000.", + "92500.", + "85000.", + "70000.", + "60000.", + "50000.", + "40000.", + "30000.", + "25000.", + "20000.", + "15000.", + "10000.", + "7000.", + "5000.", + "3000.", + "2000.", + "1000.", + "500.", + "100." + ], + "requested_bounds":"", + "standard_name":"air_pressure", + "stored_direction":"decreasing", + "tolerance":"", + "type":"double", + "units":"Pa", + "valid_max":"", + "valid_min":"", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "plev23":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"pressure", + "must_have_bounds":"no", + "out_name":"plev", + "positive":"down", + "requested":[ + "100000.", + "92500.", + "85000.", + "70000.", + "60000.", + "50000.", + "40000.", + "30000.", + "25000.", + "20000.", + "15000.", + "10000.", + "7000.", + "5000.", + "3000.", + "2000.", + "1000.", + "700.", + "500.", + "300.", + "200.", + "100.", + "40." + ], + "requested_bounds":"", + "standard_name":"air_pressure", + "stored_direction":"decreasing", + "tolerance":"", + "type":"double", + "units":"Pa", + "valid_max":"", + "valid_min":"", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "plev27":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"pressure", + "must_have_bounds":"no", + "out_name":"plev", + "positive":"down", + "requested":[ + "100000.", + "97500.", + "95000.", + "92500.", + "90000.", + "87500.", + "85000.", + "82500.", + "80000.", + "77500.", + "75000.", + "70000.", + "65000.", + "60000.", + "55000.", + "50000.", + "45000.", + "40000.", + "35000.", + "30000.", + "25000.", + "22500.", + "20000.", + "17500.", + "15000.", + "12500.", + "10000." + ], + "requested_bounds":"", + "standard_name":"air_pressure", + "stored_direction":"decreasing", + "tolerance":"", + "type":"double", + "units":"Pa", + "valid_max":"", + "valid_min":"", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "plev3":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"pressure", + "must_have_bounds":"no", + "out_name":"plev", + "positive":"down", + "requested":[ + "85000.", + "50000.", + "25000." + ], + "requested_bounds":"", + "standard_name":"air_pressure", + "stored_direction":"decreasing", + "tolerance":"", + "type":"double", + "units":"Pa", + "valid_max":"", + "valid_min":"", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "plev39":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"pressure", + "must_have_bounds":"no", + "out_name":"plev", + "positive":"down", + "requested":[ + "100000.", + "92500.", + "85000.", + "70000.", + "60000.", + "50000.", + "40000.", + "30000.", + "25000.", + "20000.", + "17000.", + "15000.", + "13000.", + "11500.", + "10000.", + "9000.", + "8000.", + "7000.", + "5000.", + "3000.", + "2000.", + "1500.", + "1000.", + "700.", + "500.", + "300.", + "200.", + "150.", + "100.", + "70.", + "50.", + "40.", + "30.", + "20.", + "15.", + "10.", + "7.", + "5.", + "3." + ], + "requested_bounds":"", + "standard_name":"air_pressure", + "stored_direction":"decreasing", + "tolerance":"", + "type":"double", + "units":"Pa", + "valid_max":"", + "valid_min":"", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "plev3h":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"pressure", + "must_have_bounds":"no", + "out_name":"plev", + "positive":"down", + "requested":[ + "10000.", + "1000.", + "100." + ], + "requested_bounds":"", + "standard_name":"air_pressure", + "stored_direction":"decreasing", + "tolerance":"", + "type":"double", + "units":"Pa", + "valid_max":"", + "valid_min":"", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "plev4":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"pressure", + "must_have_bounds":"no", + "out_name":"plev", + "positive":"down", + "requested":[ + "92500.", + "85000.", + "50000.", + "25000." + ], + "requested_bounds":"", + "standard_name":"air_pressure", + "stored_direction":"decreasing", + "tolerance":"", + "type":"double", + "units":"Pa", + "valid_max":"", + "valid_min":"", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "plev7":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"pressure", + "must_have_bounds":"yes", + "out_name":"plev", + "positive":"down", + "requested":[ + "90000.", + "74000.", + "62000.", + "50000.", + "37500.", + "24500.", + "9000." + ], + "requested_bounds":[ + "100000.", + "80000.", + "80000.", + "68000.", + "68000.", + "56000.", + "56000.", + "44000.", + "44000.", + "31000.", + "31000.", + "18000.", + "18000.", + " 0." + ], + "standard_name":"air_pressure", + "stored_direction":"decreasing", + "tolerance":"0.001", + "type":"double", + "units":"Pa", + "valid_max":"", + "valid_min":"", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "plev7c":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"pressure", + "must_have_bounds":"yes", + "out_name":"plev", + "positive":"down", + "requested":[ + "90000.", + "74000.", + "62000.", + "50000.", + "37500.", + "24500.", + "9000." + ], + "requested_bounds":[ + "100000.0", + "80000.0", + "80000.0", + "68000.0", + "68000.0", + "56000.0", + "56000.0", + "44000.0", + "44000.0", + "31000.0", + "31000.0", + "18000.0", + "18000.0", + "0.0" + ], + "standard_name":"air_pressure", + "stored_direction":"decreasing", + "tolerance":"", + "type":"double", + "units":"Pa", + "valid_max":"", + "valid_min":"", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "plev7h":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"pressure", + "must_have_bounds":"no", + "out_name":"plev", + "positive":"down", + "requested":[ + "92500.", + "85000.", + "70000.", + "60000.", + "50000.", + "25000.", + "5000." + ], + "requested_bounds":"", + "standard_name":"air_pressure", + "stored_direction":"decreasing", + "tolerance":"", + "type":"double", + "units":"Pa", + "valid_max":"", + "valid_min":"", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "plev8":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"pressure", + "must_have_bounds":"no", + "out_name":"plev", + "positive":"down", + "requested":[ + "100000.", + "85000.", + "70000.", + "50000.", + "25000.", + "10000.", + "5000.", + "1000." + ], + "requested_bounds":"", + "standard_name":"air_pressure", + "stored_direction":"decreasing", + "tolerance":"", + "type":"double", + "units":"Pa", + "valid_max":"", + "valid_min":"", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "rho":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"potential density referenced to 2000 dbar", + "must_have_bounds":"yes", + "out_name":"rho", + "positive":"down", + "requested":"", + "requested_bounds":"", + "standard_name":"sea_water_potential_density", + "stored_direction":"increasing", + "tolerance":"", + "type":"double", + "units":"kg m-3", + "valid_max":"", + "valid_min":"", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "scatratio":{ + "axis":"", + "bounds_values":"0.0 0.01 1.2 3.0 5.0 7.0 10.0 15.0 20.0 25.0 30.0 40.0 50.0 60.0 80.0 100000.0", + "climatology":"", + "formula":"", + "long_name":"lidar backscattering ratio", + "must_have_bounds":"yes", + "out_name":"scatratio", + "positive":"", + "requested":[ + "0.005", + "0.605", + "2.1", + "4.", + "6.", + "8.5", + "12.5", + "17.5", + "22.5", + "27.5", + "35.", + "45.", + "55.", + "70.", + "50040." + ], + "requested_bounds":[ + "0.0", + "0.01", + "0.01", + "1.2", + "1.2", + "3.0", + "3.0", + "5.0", + "5.0", + "7.0", + "7.0", + "10.0", + "10.0", + "15.0", + "15.0", + "20.0", + "20.0", + "25.0", + "25.0", + "30.0", + "30.0", + "40.0", + "40.0", + "50.0", + "50.0", + "60.0", + "60.0", + "80.0", + "80.0", + "100000.0" + ], + "standard_name":"backscattering_ratio", + "stored_direction":"increasing", + "tolerance":"", + "type":"double", + "units":"1", + "valid_max":"", + "valid_min":"", + "value":"0.005, 0.605, 2.1, 4, 6, 8.5, 12.5, 17.5, 22.5, 27.5, 35, 45, 55, 70, 50040", + "z_bounds_factors":"", + "z_factors":"" + }, + "sdepth":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"depth", + "must_have_bounds":"yes", + "out_name":"depth", + "positive":"down", + "requested":"", + "requested_bounds":"", + "standard_name":"depth", + "stored_direction":"increasing", + "tolerance":"", + "type":"double", + "units":"m", + "valid_max":"200.0", + "valid_min":"0.0", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "sdepth1":{ + "axis":"Z", + "bounds_values":"0.0 0.1", + "climatology":"", + "formula":"", + "long_name":"depth", + "must_have_bounds":"yes", + "out_name":"depth", + "positive":"down", + "requested":"", + "requested_bounds":"", + "standard_name":"depth", + "stored_direction":"increasing", + "tolerance":"", + "type":"double", + "units":"m", + "valid_max":"0.2", + "valid_min":"0.0", + "value":"0.05", + "z_bounds_factors":"", + "z_factors":"" + }, + "siline":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"ocean passage", + "must_have_bounds":"no", + "out_name":"line", + "positive":"", + "requested":[ + "fram_strait,", + "canadian_archipelego,", + "barents_opening,", + "bering_strait" + ], + "requested_bounds":"", + "standard_name":"region", + "stored_direction":"", + "tolerance":"", + "type":"character", + "units":"", + "valid_max":"", + "valid_min":"", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "site":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"site index", + "must_have_bounds":"no", + "out_name":"site", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"", + "stored_direction":"", + "tolerance":"", + "type":"integer", + "units":"", + "valid_max":"", + "valid_min":"", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "smooth_level":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"z = a*ztop + b1*zsurf1 + b2*zsurf2", + "long_name":"atmosphere smooth level vertical (SLEVE) coordinate", + "must_have_bounds":"yes", + "out_name":"lev", + "positive":"up", + "requested":"", + "requested_bounds":"", + "standard_name":"atmosphere_sleve_coordinate", + "stored_direction":"increasing", + "tolerance":"", + "type":"", + "units":"m", + "valid_max":"800000.0", + "valid_min":"-200.0", + "value":"", + "z_bounds_factors":"a: a_bnds b1: b1_bnds b2: b2_bnds ztop: ztop zsurf1: zsurf1 zsurf2: zsurf2", + "z_factors":"a: a b1: b1 b2: b2 ztop: ztop zsurf1: zsurf1 zsurf2: zsurf2" + }, + "snowband":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"Snow Depth Band", + "must_have_bounds":"yes", + "out_name":"snowband", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"surface_snow_thickness", + "stored_direction":"", + "tolerance":"", + "type":"double", + "units":"m", + "valid_max":"", + "valid_min":"", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "snowdepth":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"depth", + "must_have_bounds":"yes", + "out_name":"depth", + "positive":"down", + "requested":"", + "requested_bounds":"", + "standard_name":"depth", + "stored_direction":"increasing", + "tolerance":"", + "type":"double", + "units":"m", + "valid_max":"200.0", + "valid_min":"0.0", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "soilpools":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"Soil Pools", + "must_have_bounds":"no", + "out_name":"soilpools", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"", + "stored_direction":"", + "tolerance":"", + "type":"character", + "units":"", + "valid_max":"", + "valid_min":"", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "spectband":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"Spectral Frequency Band", + "must_have_bounds":"yes", + "out_name":"spectband", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"sensor_band_central_radiation_wavenumber", + "stored_direction":"", + "tolerance":"", + "type":"double", + "units":"m-1", + "valid_max":"", + "valid_min":"", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "standard_hybrid_sigma":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"p = a*p0 + b*ps", + "long_name":"hybrid sigma pressure coordinate", + "must_have_bounds":"yes", + "out_name":"lev", + "positive":"down", + "requested":"", + "requested_bounds":"", + "standard_name":"atmosphere_hybrid_sigma_pressure_coordinate", + "stored_direction":"decreasing", + "tolerance":"", + "type":"", + "units":"1", + "valid_max":"1.0", + "valid_min":"0.0", + "value":"", + "z_bounds_factors":"p0: p0 a: a_bnds b: b_bnds ps: ps", + "z_factors":"p0: p0 a: a b: b ps: ps" + }, + "standard_sigma":{ + "axis":"Z", + "bounds_values":"", + "climatology":"", + "formula":"p = ptop + sigma*(ps - ptop)", + "long_name":"sigma coordinate", + "must_have_bounds":"yes", + "out_name":"lev", + "positive":"down", + "requested":"", + "requested_bounds":"", + "standard_name":"atmosphere_sigma_coordinate", + "stored_direction":"decreasing", + "tolerance":"", + "type":"", + "units":"", + "valid_max":"1.0", + "valid_min":"0.0", + "value":"", + "z_bounds_factors":"ptop: ptop sigma: lev_bnds ps: ps", + "z_factors":"ptop: ptop sigma: lev ps: ps" + }, + "sza5":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"solar zenith angle", + "must_have_bounds":"no", + "out_name":"sza", + "positive":"", + "requested":[ + "0.", + "20.", + "40.", + "60.", + "80." + ], + "requested_bounds":"", + "standard_name":"solar_zenith_angle", + "stored_direction":"increasing", + "tolerance":"", + "type":"double", + "units":"degree", + "valid_max":"", + "valid_min":"", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "tau":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"cloud optical thickness", + "must_have_bounds":"yes", + "out_name":"tau", + "positive":"", + "requested":[ + "0.15", + "0.8", + "2.45", + "6.5", + "16.2", + "41.5", + "100." + ], + "requested_bounds":[ + "0.0", + "0.3", + "0.3", + "1.3", + "1.3", + "3.6", + "3.6", + "9.4", + "9.4", + "23.0", + "23.0", + "60.0", + "60.0", + "100000.0" + ], + "standard_name":"atmosphere_optical_thickness_due_to_cloud", + "stored_direction":"increasing", + "tolerance":"", + "type":"double", + "units":"1", + "valid_max":"", + "valid_min":"", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "time":{ + "axis":"T", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"time", + "must_have_bounds":"yes", + "out_name":"time", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"time", + "stored_direction":"increasing", + "tolerance":"", + "type":"double", + "units":"days since ?", + "valid_max":"", + "valid_min":"", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "time1":{ + "axis":"T", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"time", + "must_have_bounds":"no", + "out_name":"time", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"time", + "stored_direction":"increasing", + "tolerance":"", + "type":"double", + "units":"days since ?", + "valid_max":"", + "valid_min":"", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "time2":{ + "axis":"T", + "bounds_values":"", + "climatology":"yes", + "formula":"", + "long_name":"time", + "must_have_bounds":"yes", + "out_name":"time", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"time", + "stored_direction":"increasing", + "tolerance":"", + "type":"double", + "units":"days since ?", + "valid_max":"", + "valid_min":"", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "time3":{ + "axis":"T", + "bounds_values":"", + "climatology":"yes", + "formula":"", + "long_name":"time", + "must_have_bounds":"yes", + "out_name":"time", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"time", + "stored_direction":"increasing", + "tolerance":"", + "type":"double", + "units":"days since ?", + "valid_max":"", + "valid_min":"", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "typebare":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"surface type", + "must_have_bounds":"no", + "out_name":"type", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"area_type", + "stored_direction":"", + "tolerance":"", + "type":"character", + "units":"", + "valid_max":"", + "valid_min":"", + "value":"bare_ground", + "z_bounds_factors":"", + "z_factors":"" + }, + "typeburnt":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"Burnt vegetation area type", + "must_have_bounds":"no", + "out_name":"type", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"area_type", + "stored_direction":"", + "tolerance":"", + "type":"character", + "units":"", + "valid_max":"", + "valid_min":"", + "value":"burnt_vegetation", + "z_bounds_factors":"", + "z_factors":"" + }, + "typec3pft":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"surface type", + "must_have_bounds":"no", + "out_name":"type", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"area_type", + "stored_direction":"", + "tolerance":"", + "type":"character", + "units":"", + "valid_max":"", + "valid_min":"", + "value":"c3_plant_functional_types", + "z_bounds_factors":"", + "z_factors":"" + }, + "typec4pft":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"surface type", + "must_have_bounds":"no", + "out_name":"type", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"area_type", + "stored_direction":"", + "tolerance":"", + "type":"character", + "units":"", + "valid_max":"", + "valid_min":"", + "value":"c4_plant_functional_types", + "z_bounds_factors":"", + "z_factors":"" + }, + "typecloud":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"Cloud area type", + "must_have_bounds":"no", + "out_name":"type", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"area_type", + "stored_direction":"", + "tolerance":"", + "type":"character", + "units":"", + "valid_max":"", + "valid_min":"", + "value":"cloud", + "z_bounds_factors":"", + "z_factors":"" + }, + "typecrop":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"Crop area type", + "must_have_bounds":"no", + "out_name":"type", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"area_type", + "stored_direction":"", + "tolerance":"", + "type":"character", + "units":"", + "valid_max":"", + "valid_min":"", + "value":"crops", + "z_bounds_factors":"", + "z_factors":"" + }, + "typefis":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"Floating Ice Shelf area type", + "must_have_bounds":"no", + "out_name":"type", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"area_type", + "stored_direction":"", + "tolerance":"", + "type":"character", + "units":"", + "valid_max":"", + "valid_min":"", + "value":"floating_ice_shelf", + "z_bounds_factors":"", + "z_factors":"" + }, + "typegis":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"Grounded Ice Sheet area type", + "must_have_bounds":"no", + "out_name":"type", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"area_type", + "stored_direction":"", + "tolerance":"", + "type":"character", + "units":"", + "valid_max":"", + "valid_min":"", + "value":"grounded_ice_sheet", + "z_bounds_factors":"", + "z_factors":"" + }, + "typeland":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"Land area type", + "must_have_bounds":"no", + "out_name":"type", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"area_type", + "stored_direction":"", + "tolerance":"", + "type":"character", + "units":"", + "valid_max":"", + "valid_min":"", + "value":"land", + "z_bounds_factors":"", + "z_factors":"" + }, + "typeli":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"Land Ice area type", + "must_have_bounds":"no", + "out_name":"type", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"area_type", + "stored_direction":"", + "tolerance":"", + "type":"character", + "units":"", + "valid_max":"", + "valid_min":"", + "value":"land_ice", + "z_bounds_factors":"", + "z_factors":"" + }, + "typemp":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"Melt pond area type", + "must_have_bounds":"no", + "out_name":"type", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"area_type", + "stored_direction":"", + "tolerance":"", + "type":"character", + "units":"", + "valid_max":"", + "valid_min":"", + "value":"sea_ice_melt_pond", + "z_bounds_factors":"", + "z_factors":"" + }, + "typenatgr":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"Natural grass area type", + "must_have_bounds":"no", + "out_name":"type", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"area_type", + "stored_direction":"", + "tolerance":"", + "type":"character", + "units":"", + "valid_max":"", + "valid_min":"", + "value":"natural_grasses", + "z_bounds_factors":"", + "z_factors":"" + }, + "typenwd":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"Non-Woody Vegetation area type", + "must_have_bounds":"no", + "out_name":"type", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"area_type", + "stored_direction":"", + "tolerance":"", + "type":"character", + "units":"", + "valid_max":"", + "valid_min":"", + "value":"non_woody_vegetation", + "z_bounds_factors":"", + "z_factors":"" + }, + "typepasture":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"Pasture area type", + "must_have_bounds":"no", + "out_name":"type", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"area_type", + "stored_direction":"", + "tolerance":"", + "type":"character", + "units":"", + "valid_max":"", + "valid_min":"", + "value":"pastures", + "z_bounds_factors":"", + "z_factors":"" + }, + "typepdec":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"surface type", + "must_have_bounds":"no", + "out_name":"type", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"area_type", + "stored_direction":"", + "tolerance":"", + "type":"character", + "units":"", + "valid_max":"", + "valid_min":"", + "value":"primary_deciduous_trees", + "z_bounds_factors":"", + "z_factors":"" + }, + "typepever":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"surface type", + "must_have_bounds":"no", + "out_name":"type", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"area_type", + "stored_direction":"", + "tolerance":"", + "type":"character", + "units":"", + "valid_max":"", + "valid_min":"", + "value":"primary_evergreen_trees", + "z_bounds_factors":"", + "z_factors":"" + }, + "typeresidual":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"Residual area", + "must_have_bounds":"no", + "out_name":"type", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"", + "stored_direction":"", + "tolerance":"", + "type":"character", + "units":"", + "valid_max":"", + "valid_min":"", + "value":"residual", + "z_bounds_factors":"", + "z_factors":"" + }, + "typesdec":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"surface type", + "must_have_bounds":"no", + "out_name":"type", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"area_type", + "stored_direction":"", + "tolerance":"", + "type":"character", + "units":"", + "valid_max":"", + "valid_min":"", + "value":"secondary_decidous_trees", + "z_bounds_factors":"", + "z_factors":"" + }, + "typesea":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"Ocean area type", + "must_have_bounds":"no", + "out_name":"type", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"area_type", + "stored_direction":"", + "tolerance":"", + "type":"character", + "units":"", + "valid_max":"", + "valid_min":"", + "value":"sea", + "z_bounds_factors":"", + "z_factors":"" + }, + "typesever":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"surface type", + "must_have_bounds":"no", + "out_name":"type", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"area_type", + "stored_direction":"", + "tolerance":"", + "type":"character", + "units":"", + "valid_max":"", + "valid_min":"", + "value":"secondary_evergreen_trees", + "z_bounds_factors":"", + "z_factors":"" + }, + "typeshrub":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"Shrub area type", + "must_have_bounds":"no", + "out_name":"type", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"area_type", + "stored_direction":"", + "tolerance":"", + "type":"character", + "units":"", + "valid_max":"", + "valid_min":"", + "value":"shrubs", + "z_bounds_factors":"", + "z_factors":"" + }, + "typesi":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"Sea Ice area type", + "must_have_bounds":"no", + "out_name":"type", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"area_type", + "stored_direction":"", + "tolerance":"", + "type":"character", + "units":"", + "valid_max":"", + "valid_min":"", + "value":"sea_ice", + "z_bounds_factors":"", + "z_factors":"" + }, + "typetree":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"Tree area type", + "must_have_bounds":"no", + "out_name":"type", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"area_type", + "stored_direction":"", + "tolerance":"", + "type":"character", + "units":"", + "valid_max":"", + "valid_min":"", + "value":"trees", + "z_bounds_factors":"", + "z_factors":"" + }, + "typetreebd":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"Tree area type (Broadleaf Deciduous)", + "must_have_bounds":"no", + "out_name":"type", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"area_type", + "stored_direction":"", + "tolerance":"", + "type":"character", + "units":"", + "valid_max":"", + "valid_min":"", + "value":"trees", + "z_bounds_factors":"", + "z_factors":"" + }, + "typetreebe":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"Tree area type (Broadleaf Evergreen)", + "must_have_bounds":"no", + "out_name":"type", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"area_type", + "stored_direction":"", + "tolerance":"", + "type":"character", + "units":"", + "valid_max":"", + "valid_min":"", + "value":"trees", + "z_bounds_factors":"", + "z_factors":"" + }, + "typetreend":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"Tree area type (Narrowleaf Deciduous)", + "must_have_bounds":"no", + "out_name":"type", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"area_type", + "stored_direction":"", + "tolerance":"", + "type":"character", + "units":"", + "valid_max":"", + "valid_min":"", + "value":"trees", + "z_bounds_factors":"", + "z_factors":"" + }, + "typetreene":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"Tree area type (Narrowleaf Evergreen)", + "must_have_bounds":"no", + "out_name":"type", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"area_type", + "stored_direction":"", + "tolerance":"", + "type":"character", + "units":"", + "valid_max":"", + "valid_min":"", + "value":"trees", + "z_bounds_factors":"", + "z_factors":"" + }, + "typeveg":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"Vegetation area type", + "must_have_bounds":"no", + "out_name":"type", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"area_type", + "stored_direction":"", + "tolerance":"", + "type":"character", + "units":"", + "valid_max":"", + "valid_min":"", + "value":"vegetation", + "z_bounds_factors":"", + "z_factors":"" + }, + "typewetla":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"Wetland", + "must_have_bounds":"no", + "out_name":"type", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"area_type", + "stored_direction":"", + "tolerance":"", + "type":"character", + "units":"", + "valid_max":"", + "valid_min":"", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "vegtype":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"plant functional type", + "must_have_bounds":"no", + "out_name":"type", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"", + "stored_direction":"", + "tolerance":"", + "type":"character", + "units":"", + "valid_max":"", + "valid_min":"", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "xant":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"__unset__", + "must_have_bounds":"", + "out_name":"xant", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"projection_x_coordinate", + "stored_direction":"", + "tolerance":"", + "type":"double", + "units":"km", + "valid_max":"", + "valid_min":"", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "xgre":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"__unset__", + "must_have_bounds":"", + "out_name":"xgre", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"projection_x_coordinate", + "stored_direction":"", + "tolerance":"", + "type":"double", + "units":"km", + "valid_max":"", + "valid_min":"", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "yant":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"__unset__", + "must_have_bounds":"", + "out_name":"yant", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"projection_y_coordinate", + "stored_direction":"", + "tolerance":"", + "type":"double", + "units":"km", + "valid_max":"", + "valid_min":"", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + }, + "ygre":{ + "axis":"", + "bounds_values":"", + "climatology":"", + "formula":"", + "long_name":"__unset__", + "must_have_bounds":"", + "out_name":"ygre", + "positive":"", + "requested":"", + "requested_bounds":"", + "standard_name":"projection_y_coordinate", + "stored_direction":"", + "tolerance":"", + "type":"double", + "units":"km", + "valid_max":"", + "valid_min":"", + "value":"", + "z_bounds_factors":"", + "z_factors":"" + } + } +} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_formula_terms.json b/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_formula_terms.json new file mode 100644 index 0000000000..08da1580cb --- /dev/null +++ b/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_formula_terms.json @@ -0,0 +1,186 @@ +{ + "formula_entry":{ + "a":{ + "dimensions":"alevel", + "long_name":"vertical coordinate formula term: a(k)", + "out_name":"a", + "type":"double", + "units":"" + }, + "a_bnds":{ + "dimensions":"alevel", + "long_name":"vertical coordinate formula term: a(k+1/2)", + "out_name":"a_bnds", + "type":"double", + "units":"" + }, + "ap":{ + "dimensions":"alevel", + "long_name":"vertical coordinate formula term: ap(k)", + "out_name":"ap", + "type":"double", + "units":"Pa" + }, + "ap_bnds":{ + "dimensions":"alevel", + "long_name":"vertical coordinate formula term: ap(k+1/2)", + "out_name":"ap_bnds", + "type":"double", + "units":"Pa" + }, + "b":{ + "dimensions":"alevel", + "long_name":"vertical coordinate formula term: b(k)", + "out_name":"b", + "type":"double", + "units":"" + }, + "b_bnds":{ + "dimensions":"alevel", + "long_name":"vertical coordinate formula term: b(k+1/2)", + "out_name":"b_bnds", + "type":"double", + "units":"" + }, + "depth":{ + "dimensions":"longitude latitude", + "long_name":"Sea Floor Depth: formula term: thetao", + "out_name":"depth", + "type":"real", + "units":"m" + }, + "depth_c":{ + "dimensions":"", + "long_name":"vertical coordinate formula term: depth_c", + "out_name":"depth_c", + "type":"double", + "units":"" + }, + "eta":{ + "dimensions":"longitude latitude time", + "long_name":"Sea Surface Height formula term: thetao", + "out_name":"eta", + "type":"real", + "units":"m" + }, + "eta2":{ + "dimensions":"longitude latitude time2", + "long_name":"Sea Surface Height formula term: thetao", + "out_name":"eta", + "type":"real", + "units":"m" + }, + "href":{ + "dimensions":"", + "long_name":"vertical coordinate formula term: href", + "out_name":"href", + "type":"double", + "units":"" + }, + "k_c":{ + "dimensions":"", + "long_name":"vertical coordinate formula term: k_c", + "out_name":"k_c", + "type":"integer", + "units":"" + }, + "nsigma":{ + "dimensions":"", + "long_name":"vertical coordinate formula term: nsigma", + "out_name":"nsigma", + "type":"integer", + "units":"" + }, + "orog":{ + "dimensions":"longitude latitude", + "long_name":"Surface Altitude", + "out_name":"orog", + "type":"real", + "units":"m" + }, + "p0":{ + "dimensions":"", + "long_name":"vertical coordinate formula term: reference pressure", + "out_name":"p0", + "type":"", + "units":"Pa" + }, + "ps":{ + "dimensions":"longitude latitude time", + "long_name":"Surface Air Pressure", + "out_name":"ps", + "type":"real", + "units":"Pa" + }, + "ps1":{ + "dimensions":"longitude latitude time1", + "long_name":"vertical coordinate formula term: ps", + "out_name":"ps", + "type":"real", + "units":"Pa" + }, + "ps2":{ + "dimensions":"longitude latitude time2", + "long_name":"vertical coordinate formula term: ps", + "out_name":"ps", + "type":"real", + "units":"Pa" + }, + "ptop":{ + "dimensions":"", + "long_name":"pressure at top of model", + "out_name":"ptop", + "type":"", + "units":"Pa" + }, + "sigma":{ + "dimensions":"olevel", + "long_name":"vertical coordinate formula term: sigma(k)", + "out_name":"sigma", + "type":"double", + "units":"" + }, + "sigma_bnds":{ + "dimensions":"olevel", + "long_name":"vertical coordinate formula term: sigma(k+1/2)", + "out_name":"sigma_bnds", + "type":"double", + "units":"" + }, + "z1":{ + "dimensions":"", + "long_name":"vertical coordinate formula term: z1", + "out_name":"z1", + "type":"double", + "units":"" + }, + "z2":{ + "dimensions":"", + "long_name":"vertical coordinate formula term: z2", + "out_name":"z2", + "type":"double", + "units":"" + }, + "zlev":{ + "dimensions":"olevel", + "long_name":"vertical coordinate formula term: zlev(k)", + "out_name":"zlev", + "type":"double", + "units":"" + }, + "zlev_bnds":{ + "dimensions":"olevel", + "long_name":"vertical coordinate formula term: zlev(k+1/2)", + "out_name":"zlev_bnds", + "type":"double", + "units":"" + }, + "ztop":{ + "dimensions":"", + "long_name":"height of top of model", + "out_name":"ztop", + "type":"", + "units":"m" + } + } +} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_fx.json b/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_fx.json new file mode 100644 index 0000000000..d36c2cae21 --- /dev/null +++ b/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_fx.json @@ -0,0 +1,163 @@ +{ + "Header":{ + "#dataRequest_specs_version":"01.00.21", + "#mip_era":"CMIP6", + "Conventions":"CF-1.7 ODS-2.1", + "approx_interval":"0.00000", + "cmor_version":"3.2", + "data_specs_version":"2.1.0", + "generic_levels":"alevel", + "int_missing_value":"-2147483648", + "missing_value":"1e20", + "product":"observations", + "realm":"fx", + "table_date":"07 March 2018", + "table_id":"Table obs4MIPs_fx" + }, + "variable_entry":{ + "areacella":{ + "cell_measures":"", + "cell_methods":"area: sum", + "comment":"For atmospheres with more than 1 mesh (e.g., staggered grids), report areas that apply to surface vertical fluxes of energy.", + "dimensions":"longitude latitude", + "frequency":"fx", + "long_name":"Grid-Cell Area for Atmospheric Variables", + "modeling_realm":"atmos land", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"areacella", + "positive":"", + "standard_name":"cell_area", + "type":"real", + "units":"m2", + "valid_max":"", + "valid_min":"" + }, + "areacellr":{ + "cell_measures":"", + "cell_methods":"area: sum", + "comment":"For river routing model, if grid differs from the atmospheric grid.", + "dimensions":"longitude latitude", + "frequency":"fx", + "long_name":"Grid-Cell Area for River Model Variables", + "modeling_realm":"land", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"areacellr", + "positive":"", + "standard_name":"cell_area", + "type":"real", + "units":"m2", + "valid_max":"", + "valid_min":"" + }, + "mrsofc":{ + "cell_measures":"area: areacella", + "cell_methods":"area: mean where land", + "comment":"The bulk water content retained by the soil at -33 J/kg of suction pressure, expressed as mass per unit land area; report as missing where there is no land", + "dimensions":"longitude latitude", + "frequency":"fx", + "long_name":"Capacity of Soil to Store Water", + "modeling_realm":"land", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"mrsofc", + "positive":"", + "standard_name":"soil_moisture_content_at_field_capacity", + "type":"real", + "units":"kg m-2", + "valid_max":"", + "valid_min":"" + }, + "orog":{ + "cell_measures":"area: areacella", + "cell_methods":"area: mean", + "comment":"The surface called 'surface' means the lower boundary of the atmosphere. Altitude is the (geometric) height above the geoid, which is the reference geopotential surface. The geoid is similar to mean sea level.", + "dimensions":"longitude latitude", + "frequency":"fx", + "long_name":"Surface Altitude", + "modeling_realm":"land", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"orog", + "positive":"", + "standard_name":"surface_altitude", + "type":"real", + "units":"m", + "valid_max":"", + "valid_min":"" + }, + "rootd":{ + "cell_measures":"area: areacella", + "cell_methods":"area: mean", + "comment":"report the maximum soil depth reachable by plant roots (if defined in model), i.e., the maximum soil depth from which they can extract moisture; report as *missing* where the land fraction is 0.", + "dimensions":"longitude latitude", + "frequency":"fx", + "long_name":"Maximum Root Depth", + "modeling_realm":"land", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"rootd", + "positive":"", + "standard_name":"root_depth", + "type":"real", + "units":"m", + "valid_max":"", + "valid_min":"" + }, + "sftgif":{ + "cell_measures":"area: areacella", + "cell_methods":"area: mean", + "comment":"Fraction of grid cell covered by land ice (ice sheet, ice shelf, ice cap, glacier)", + "dimensions":"longitude latitude typeli", + "frequency":"fx", + "long_name":"Fraction of Grid Cell Covered with Glacier", + "modeling_realm":"land", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sftgif", + "positive":"", + "standard_name":"land_ice_area_fraction", + "type":"real", + "units":"%", + "valid_max":"", + "valid_min":"" + }, + "sftlf":{ + "cell_measures":"area: areacella", + "cell_methods":"area: mean", + "comment":"Please express 'X_area_fraction' as the percentage of horizontal area occupied by X.", + "dimensions":"longitude latitude typeland", + "frequency":"fx", + "long_name":"Land Area Fraction", + "modeling_realm":"atmos", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"sftlf", + "positive":"", + "standard_name":"land_area_fraction", + "type":"real", + "units":"%", + "valid_max":"", + "valid_min":"" + }, + "zfull":{ + "cell_measures":"area: areacella", + "cell_methods":"area: mean", + "comment":"", + "dimensions":"longitude latitude alevel", + "frequency":"fx", + "long_name":"Altitude of Model Full-Levels", + "modeling_realm":"atmos", + "ok_max_mean_abs":"", + "ok_min_mean_abs":"", + "out_name":"zfull", + "positive":"", + "standard_name":"height_above_reference_ellipsoid", + "type":"real", + "units":"m", + "valid_max":"", + "valid_min":"" + } + } +} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_grids.json b/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_grids.json new file mode 100644 index 0000000000..276beaf467 --- /dev/null +++ b/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_grids.json @@ -0,0 +1,155 @@ +{ + "Header":{ + "#dataRequest_specs_version":"01.00.21", + "Conventions":"CF-1.7 ODS-2.1", + "cmor_version":"3.2", + "data_specs_version":"2.1.0", + "missing_value":"1e20", + "product":"observations", + "table_date":"07 March 2018", + "table_id":"Table obs4MIPs_grids" + }, + "axis_entry":{ + "grid_latitude":{ + "axis":"Y", + "long_name":"latitude in rotated pole grid", + "out_name":"rlat", + "standard_name":"grid_latitude", + "type":"", + "units":"degrees" + }, + "grid_longitude":{ + "axis":"X", + "long_name":"longitude in rotated pole grid", + "out_name":"rlon", + "standard_name":"grid_longitude", + "type":"", + "units":"degrees" + }, + "i_index":{ + "axis":"", + "long_name":"cell index along first dimension", + "out_name":"i", + "standard_name":"", + "type":"integer", + "units":"1" + }, + "j_index":{ + "axis":"", + "long_name":"cell index along second dimension", + "out_name":"j", + "standard_name":"", + "type":"integer", + "units":"1" + }, + "k_index":{ + "axis":"", + "long_name":"cell index along third dimension", + "out_name":"k", + "standard_name":"", + "type":"integer", + "units":"1" + }, + "l_index":{ + "axis":"", + "long_name":"cell index along fourth dimension", + "out_name":"l", + "standard_name":"", + "type":"integer", + "units":"1" + }, + "m_index":{ + "axis":"", + "long_name":"cell index along fifth dimension", + "out_name":"m", + "standard_name":"", + "type":"integer", + "units":"1" + }, + "vertices":{ + "axis":"", + "long_name":"", + "out_name":"", + "standard_name":"", + "type":"", + "units":"" + }, + "x":{ + "axis":"X", + "long_name":"x coordinate of projection", + "out_name":"", + "standard_name":"projection_x_coordinate", + "type":"", + "units":"m" + }, + "x_deg":{ + "axis":"X", + "long_name":"x coordinate of projection", + "out_name":"x", + "standard_name":"projection_x_coordinate", + "type":"", + "units":"degrees" + }, + "y":{ + "axis":"Y", + "long_name":"y coordinate of projection", + "out_name":"", + "standard_name":"projection_y_coordinate", + "type":"", + "units":"m" + }, + "y_deg":{ + "axis":"Y", + "long_name":"y coordinate of projection", + "out_name":"y", + "standard_name":"projection_y_coordinate", + "type":"", + "units":"degrees" + } + }, + "mapping_entry":{ + "sample_user_mapping":{ + "coordinates":"rlon rlat", + "parameter1":"false_easting", + "parameter2":"false_northing" + } + }, + "variable_entry":{ + "latitude":{ + "dimensions":"longitude latitude", + "long_name":"", + "out_name":"latitude", + "standard_name":"", + "units":"degrees_north", + "valid_max":"90.0", + "valid_min":"-90.0" + }, + "longitude":{ + "dimensions":"longitude latitude", + "long_name":"", + "out_name":"longitude", + "standard_name":"", + "units":"degrees_east", + "valid_max":"360.0", + "valid_min":"0.0" + }, + "vertices_latitude":{ + "dimensions":"vertices longitude latitude", + "long_name":"", + "out_name":"vertices_latitude", + "standard_name":"", + "units":"degrees_north", + "valid_max":"90.0", + "valid_min":"-90.0" + }, + "vertices_longitude":{ + "dimensions":"vertices longitude latitude", + "long_name":"", + "out_name":"vertices_longitude", + "standard_name":"", + "units":"degrees_east", + "valid_max":"360.0", + "valid_min":"0.0" + } + } +} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_monNobs.json b/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_monNobs.json new file mode 100644 index 0000000000..da00c57753 --- /dev/null +++ b/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_monNobs.json @@ -0,0 +1,29 @@ +{ + "Header":{ + "#mip_era":"CMIP6", + "Conventions":"CF-1.7 ODS-2.1", + "approx_interval":"30.00000", + "cmor_version":"3.2", + "data_specs_version":"2.1.0", + "generic_levels":"alevel alevhalf", + "int_missing_value":"-2147483648", + "missing_value":"1e20", + "product":"observations", + "realm":"aerosol atmos atmosChem land landIce ocean ocnBgchem seaIce", + "table_date":"07 March 2018", + "table_id":"Table obs4MIPs_monNobs" + }, + "variable_entry":{ + "ndviNobs":{ + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Normalized Difference Vegetation Index Number of Observations", + "modeling_realm":"atmos", + "out_name":"ndviNobs", + "standard_name":"number_of_observations", + "type":"integer", + "units":"1" + } + } +} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_monStderr.json b/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_monStderr.json new file mode 100644 index 0000000000..3c4b3814b8 --- /dev/null +++ b/esmvaltool/cmor/tables/obs4mips/Tables/obs4MIPs_monStderr.json @@ -0,0 +1,29 @@ +{ + "Header":{ + "#mip_era":"CMIP6", + "Conventions":"CF-1.7 ODS-2.1", + "approx_interval":"30.00000", + "cmor_version":"3.2", + "data_specs_version":"2.1.0", + "generic_levels":"alevel alevhalf", + "int_missing_value":"-2147483648", + "missing_value":"1e20", + "product":"observations", + "realm":"aerosol atmos atmosChem land landIce ocean ocnBgchem seaIce", + "table_date":"07 March 2018", + "table_id":"Table obs4MIPs_monStderr" + }, + "variable_entry":{ + "ndviStderr":{ + "comment":"", + "dimensions":"longitude latitude time", + "frequency":"mon", + "long_name":"Normalized Difference Vegetation Index Standard Error", + "modeling_realm":"atmos", + "out_name":"ndviStderr", + "standard_name":"normalized_difference_vegetation_index standard_error", + "type":"real", + "units":"" + } + } +} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/obs4MIPs_frequency.json b/esmvaltool/cmor/tables/obs4mips/obs4MIPs_frequency.json new file mode 100644 index 0000000000..d749596a35 --- /dev/null +++ b/esmvaltool/cmor/tables/obs4mips/obs4MIPs_frequency.json @@ -0,0 +1,20 @@ +{ + "frequency":{ + "1hr":"sampled hourly", + "1hrCM":"monthly-mean diurnal cycle resolving each day into 1-hour means", + "1hrPt":"sampled hourly, at specified time point within an hour", + "3hr":"sampled every 3 hours", + "3hrPt":"sampled 3 hourly, at specified time point within the time period", + "6hr":"sampled every 6 hours", + "6hrPt":"sampled 6 hourly, at specified time point within the time period", + "day":"daily mean samples", + "dec":"decadal mean samples", + "fx":"fixed (time invariant) field", + "mon":"monthly mean samples", + "monC":"monthly climatology computed from monthly mean samples", + "monPt":"sampled monthly, at specified time point within the time period", + "subhrPt":"sampled sub-hourly, at specified time point within an hour", + "yr":"annual mean samples", + "yrPt":"sampled yearly, at specified time point within the time period" + } +} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/obs4MIPs_grid_label.json b/esmvaltool/cmor/tables/obs4mips/obs4MIPs_grid_label.json new file mode 100644 index 0000000000..fa079918e2 --- /dev/null +++ b/esmvaltool/cmor/tables/obs4mips/obs4MIPs_grid_label.json @@ -0,0 +1,49 @@ +{ + "grid_label":{ + "gm":"global mean data", + "gn":"data reported on a model's native grid", + "gna":"data reported on a native grid in the region of Antarctica", + "gng":"data reported on a native grid in the region of Greenland", + "gnz":"zonal mean data reported on a model's native latitude grid", + "gr":"regridded data reported on the data provider's preferred target grid", + "gr1":"regridded data reported on a grid other than the native grid and other than the preferred target grid", + "gr1a":"regridded data reported in the region of Antarctica on a grid other than the native grid and other than the preferred target grid", + "gr1g":"regridded data reported in the region of Greenland on a grid other than the native grid and other than the preferred target grid", + "gr1z":"regridded zonal mean data reported on a grid other than the native latitude grid and other than the preferred latitude target grid", + "gr2":"regridded data reported on a grid other than the native grid and other than the preferred target grid", + "gr2a":"regridded data reported in the region of Antarctica on a grid other than the native grid and other than the preferred target grid", + "gr2g":"regridded data reported in the region of Greenland on a grid other than the native grid and other than the preferred target grid", + "gr2z":"regridded zonal mean data reported on a grid other than the native latitude grid and other than the preferred latitude target grid", + "gr3":"regridded data reported on a grid other than the native grid and other than the preferred target grid", + "gr3a":"regridded data reported in the region of Antarctica on a grid other than the native grid and other than the preferred target grid", + "gr3g":"regridded data reported in the region of Greenland on a grid other than the native grid and other than the preferred target grid", + "gr3z":"regridded zonal mean data reported on a grid other than the native latitude grid and other than the preferred latitude target grid", + "gr4":"regridded data reported on a grid other than the native grid and other than the preferred target grid", + "gr4a":"regridded data reported in the region of Antarctica on a grid other than the native grid and other than the preferred target grid", + "gr4g":"regridded data reported in the region of Greenland on a grid other than the native grid and other than the preferred target grid", + "gr4z":"regridded zonal mean data reported on a grid other than the native latitude grid and other than the preferred latitude target grid", + "gr5":"regridded data reported on a grid other than the native grid and other than the preferred target grid", + "gr5a":"regridded data reported in the region of Antarctica on a grid other than the native grid and other than the preferred target grid", + "gr5g":"regridded data reported in the region of Greenland on a grid other than the native grid and other than the preferred target grid", + "gr5z":"regridded zonal mean data reported on a grid other than the native latitude grid and other than the preferred latitude target grid", + "gr6":"regridded data reported on a grid other than the native grid and other than the preferred target grid", + "gr6a":"regridded data reported in the region of Antarctica on a grid other than the native grid and other than the preferred target grid", + "gr6g":"regridded data reported in the region of Greenland on a grid other than the native grid and other than the preferred target grid", + "gr6z":"regridded zonal mean data reported on a grid other than the native latitude grid and other than the preferred latitude target grid", + "gr7":"regridded data reported on a grid other than the native grid and other than the preferred target grid", + "gr7a":"regridded data reported in the region of Antarctica on a grid other than the native grid and other than the preferred target grid", + "gr7g":"regridded data reported in the region of Greenland on a grid other than the native grid and other than the preferred target grid", + "gr7z":"regridded zonal mean data reported on a grid other than the native latitude grid and other than the preferred latitude target grid", + "gr8":"regridded data reported on a grid other than the native grid and other than the preferred target grid", + "gr8a":"regridded data reported in the region of Antarctica on a grid other than the native grid and other than the preferred target grid", + "gr8g":"regridded data reported in the region of Greenland on a grid other than the native grid and other than the preferred target grid", + "gr8z":"regridded zonal mean data reported on a grid other than the native latitude grid and other than the preferred latitude target grid", + "gr9":"regridded data reported on a grid other than the native grid and other than the preferred target grid", + "gr9a":"regridded data reported in the region of Antarctica on a grid other than the native grid and other than the preferred target grid", + "gr9g":"regridded data reported in the region of Greenland on a grid other than the native grid and other than the preferred target grid", + "gr9z":"regridded zonal mean data reported on a grid other than the native latitude grid and other than the preferred latitude target grid", + "gra":"regridded data in the region of Antarctica reported on the data provider's preferred target grid", + "grg":"regridded data in the region of Greenland reported on the data provider's preferred target grid", + "grz":"regridded zonal mean data reported on the data provider's preferred latitude target grid" + } +} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/obs4MIPs_institution_id.json b/esmvaltool/cmor/tables/obs4mips/obs4MIPs_institution_id.json new file mode 100644 index 0000000000..8ac6bca123 --- /dev/null +++ b/esmvaltool/cmor/tables/obs4mips/obs4MIPs_institution_id.json @@ -0,0 +1,9 @@ +{ + "institution_id":{ + "DWD":"Deutscher Wetterdienst, Offenbach 63067, Germany", + "NASA-JPL":"NASA's Jet Propulsion Laboratory, Pasadena, CA 91109, USA", + "NOAA-NCEI":"NOAA's National Centers for Environmental Information, Asheville, NC 28801, USA", + "PCMDI":"Program for Climate Model Diagnosis and Intercomparison, Lawrence Livermore National Laboratory, Livermore, CA 94550, USA", + "RSS":"Remote Sensing Systems, Santa Rosa, CA 95401, USA" + } +} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/obs4MIPs_license.json b/esmvaltool/cmor/tables/obs4mips/obs4MIPs_license.json new file mode 100644 index 0000000000..b4060ed779 --- /dev/null +++ b/esmvaltool/cmor/tables/obs4mips/obs4MIPs_license.json @@ -0,0 +1,3 @@ +{ + "license":"Data in this file produced by is licensed under a Creative Commons Attribution-ShareAlike 4.0 International License (https://creativecommons.org/licenses/). Use of the data must be acknowledged following guidelines found at . Further information about this data, including some limitations, can be found via .)" +} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/obs4MIPs_nominal_resolution.json b/esmvaltool/cmor/tables/obs4mips/obs4MIPs_nominal_resolution.json new file mode 100644 index 0000000000..4de5620573 --- /dev/null +++ b/esmvaltool/cmor/tables/obs4mips/obs4MIPs_nominal_resolution.json @@ -0,0 +1,19 @@ +{ + "nominal_resolution":[ + "0.5 km", + "1 km", + "10 km", + "100 km", + "1000 km", + "10000 km", + "1x1 degree", + "2.5 km", + "25 km", + "250 km", + "2500 km", + "5 km", + "50 km", + "500 km", + "5000 km" + ] +} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/obs4MIPs_product.json b/esmvaltool/cmor/tables/obs4mips/obs4MIPs_product.json new file mode 100644 index 0000000000..5b74000b98 --- /dev/null +++ b/esmvaltool/cmor/tables/obs4mips/obs4MIPs_product.json @@ -0,0 +1,6 @@ +{ + "product":[ + "observations", + "reanalysis" + ] +} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/obs4MIPs_realm.json b/esmvaltool/cmor/tables/obs4mips/obs4MIPs_realm.json new file mode 100644 index 0000000000..ffe16ec257 --- /dev/null +++ b/esmvaltool/cmor/tables/obs4mips/obs4MIPs_realm.json @@ -0,0 +1,12 @@ +{ + "realm":[ + "aerosol", + "atmos", + "atmosChem", + "land", + "landIce", + "ocean", + "ocnBgchem", + "seaIce" + ] +} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/obs4MIPs_region.json b/esmvaltool/cmor/tables/obs4mips/obs4MIPs_region.json new file mode 100644 index 0000000000..69c23d6444 --- /dev/null +++ b/esmvaltool/cmor/tables/obs4mips/obs4MIPs_region.json @@ -0,0 +1,72 @@ +{ + "region":[ + "africa", + "antarctica", + "arabian_sea", + "aral_sea", + "arctic_ocean", + "asia", + "atlantic_ocean", + "australia", + "baltic_sea", + "barents_opening", + "barents_sea", + "beaufort_sea", + "bellingshausen_sea", + "bering_sea", + "bering_strait", + "black_sea", + "canadian_archipelago", + "caribbean_sea", + "caspian_sea", + "central_america", + "chukchi_sea", + "contiguous_united_states", + "denmark_strait", + "drake_passage", + "east_china_sea", + "english_channel", + "eurasia", + "europe", + "faroe_scotland_channel", + "florida_bahamas_strait", + "fram_strait", + "global", + "global_land", + "global_ocean", + "great_lakes", + "greenland", + "gulf_of_alaska", + "gulf_of_mexico", + "hudson_bay", + "iceland_faroe_channel", + "indian_ocean", + "indo_pacific_ocean", + "indonesian_throughflow", + "irish_sea", + "lake_baykal", + "lake_chad", + "lake_malawi", + "lake_tanganyika", + "lake_victoria", + "mediterranean_sea", + "mozambique_channel", + "north_america", + "north_sea", + "norwegian_sea", + "pacific_equatorial_undercurrent", + "pacific_ocean", + "persian_gulf", + "red_sea", + "ross_sea", + "sea_of_japan", + "sea_of_okhotsk", + "south_america", + "south_china_sea", + "southern_ocean", + "taiwan_luzon_straits", + "weddell_sea", + "windward_passage", + "yellow_sea" + ] +} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/obs4MIPs_required_global_attributes.json b/esmvaltool/cmor/tables/obs4mips/obs4MIPs_required_global_attributes.json new file mode 100644 index 0000000000..1c78c5af89 --- /dev/null +++ b/esmvaltool/cmor/tables/obs4mips/obs4MIPs_required_global_attributes.json @@ -0,0 +1,23 @@ +{ + "required_global_attributes":[ + "Conventions", + "activity_id", + "contact", + "creation_date", + "data_specs_version", + "frequency", + "grid", + "grid_label", + "institution", + "institution_id", + "license", + "nominal_resolution", + "product", + "realm", + "source_id", + "table_id", + "tracking_id", + "variable_id", + "variant_label" + ] +} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/obs4MIPs_source_id.json b/esmvaltool/cmor/tables/obs4mips/obs4MIPs_source_id.json new file mode 100644 index 0000000000..21ea8cef00 --- /dev/null +++ b/esmvaltool/cmor/tables/obs4mips/obs4MIPs_source_id.json @@ -0,0 +1,353 @@ +{ + "source_id":{ + "AIRS-1-0":{ + "institution_id":"NASA-JPL", + "region":[ + "global" + ], + "release_year":"2011", + "source_description":"Atmospheric Infrared Sounder", + "source_label":"AIRS", + "source_name":"AIRS", + "source_type":"satellite_retrieval", + "source_variables":[ + "hus", + "ta" + ], + "source_version_number":"1.0" + }, + "Aura-MLS-v04-2":{ + "institution_id":"NASA-JPL", + "region":[ + "global" + ], + "release_year":"2018", + "source_description":"EOS Aura Microwave Limb Sounder", + "source_label":"Aura-MLS", + "source_name":"Aura MLS", + "source_type":"satellite_retrieval", + "source_variables":[ + "cli", + "hus", + "ta" + ], + "source_version_number":"v04.2" + }, + "CMSAF-CLARA-A-2-0":{ + "institution_id":"DWD", + "region":[ + "global" + ], + "release_year":"2017", + "source_description":"CM SAF cLoud, Albedo and surface RAdiation dataset from AVHRR data", + "source_id":"CMSAF-CLARA-A-2-0", + "source_label":"CMSAF-CLARA-A", + "source_name":"CMSAF CLARA A", + "source_type":"satellite_retrieval", + "source_variables":[ + "clCLARA", + "clivi", + "cltCLARA", + "clwCLARA", + "clwtCLARA", + "clwvi", + "pctCLARA", + "rsds", + "rsdscs" + ], + "source_version_number":"2.0" + }, + "CMSAF-HOAPS-4-0":{ + "institution_id":"DWD", + "region":[ + "global_ocean" + ], + "release_year":"2017", + "source_description":"Hamburg Ocean Atmosphere Parameters and fluxes from Satellite data, based on SSM/I and SSMIS aboard DMSP", + "source_id":"CMSAF-HOAPS-4-0", + "source_label":"CMSAF-HOAPS", + "source_name":"CMSAF HOAPS", + "source_type":"satellite_retrieval", + "source_variables":[ + "evspsbl", + "hfls", + "hfss", + "huss", + "pme", + "pr", + "prw", + "sfcWind" + ], + "source_version_number":"4.0" + }, + "CMSAF-SARAH-2-0":{ + "institution_id":"DWD", + "region":[ + "africa", + "atlantic_ocean", + "europe" + ], + "release_year":"2017", + "source_description":"Surface solAr RAdiation data set - Heliosat, based on MVIRI/SEVIRI aboard METEOSAT", + "source_id":"CMSAF-SARAH-2.0", + "source_label":"CMSAF-SARAH", + "source_name":"CMSAF SARAH", + "source_type":"satellite_retrieval", + "source_variables":[ + "rsds" + ], + "source_version_number":"2.0" + }, + "ESACCI-CLOUD-ATSR2-AATSR-2-0":{ + "institution_id":"DWD", + "region":[ + "global" + ], + "release_year":"2017", + "source_description":"Cloud properties derived from ATSR2 and AATSR (aboard ERS2 and ENVISAT) measurements. This dataset belongs to the ESA Cloud_cci suite of long-term coherent cloud property datasets.", + "source_id":"ESACCI-CLOUD-ATSR2-AATSR-2-0", + "source_label":"ESACCI-CLOUD-ATSR2-AATSR", + "source_name":"ESACCI CLOUD ATSR2 AATSR", + "source_type":"satellite_retrieval", + "source_variables":[ + "clCCI", + "clivi", + "cltCCI", + "clwCCI", + "clwtCCI", + "clwvi", + "pctCCI" + ], + "source_version_number":"2.0" + }, + "ESACCI-CLOUD-AVHRR-AM-2-0":{ + "institution_id":"DWD", + "region":[ + "global" + ], + "release_year":"2017", + "source_description":"Cloud properties derived from AVHRR (aboard NOAA and MetOp AM) measurements. This dataset belongs to the ESA Cloud_cci suite of long-term coherent cloud property datasets", + "source_id":"ESACCI-CLOUD-AVHRR-AM-2-0", + "source_label":"ESACCI-CLOUD-AVHRR-AM", + "source_name":"ESACCI CLOUD AVHRR AM", + "source_type":"satellite_retrieval", + "source_variables":[ + "clCCI", + "clivi", + "cltCCI", + "clwCCI", + "clwtCCI", + "clwvi", + "pctCCI" + ], + "source_version_number":"2.0" + }, + "ESACCI-CLOUD-AVHRR-PM-2-0":{ + "institution_id":"DWD", + "region":[ + "global" + ], + "release_year":"2017", + "source_description":"Cloud properties derived from AVHRR (aboard NOAA and MetOp PM) measurements. This dataset belongs to the ESA Cloud_cci suite of long-term coherent cloud property datasets", + "source_id":"ESACCI-CLOUD-AVHRR-PM-2-0", + "source_label":"ESACCI-CLOUD-AVHRR-PM", + "source_name":"ESACCI CLOUD AVHRR PM", + "source_type":"satellite_retrieval", + "source_variables":[ + "clCCI", + "clivi", + "cltCCI", + "clwCCI", + "clwtCCI", + "clwvi", + "pctCCI" + ], + "source_version_number":"2.0" + }, + "ESACCI-CLOUD-MERIS-AATSR-2-0":{ + "institution_id":"DWD", + "region":[ + "global" + ], + "release_year":"2017", + "source_description":"Cloud properties derived from MERIS and AATSR (aboard ENVISAT) measurements. This dataset belongs to the ESA Cloud_cci suite of long-term coherent cloud property datasets.", + "source_id":"ESACCI-CLOUD-MERIS-AATSR-2-0", + "source_label":"ESACCI-CLOUD-MERIS-AATSR", + "source_name":"ESACCI CLOUD MERIS AATSR", + "source_type":"satellite_retrieval", + "source_variables":[ + "clCCI", + "clivi", + "cltCCI", + "clwCCI", + "clwtCCI", + "clwvi", + "pctCCI" + ], + "source_version_number":"2.0" + }, + "GNSS-RO-1-3":{ + "institution_id":"NASA-JPL", + "region":[ + "global" + ], + "release_year":"2016", + "source_description":"Global Navigation Satellite Systems Radio Occultation", + "source_label":"GNSS-RO", + "source_name":"GNSS RO", + "source_type":"satellite_retrieval", + "source_variables":[ + "ta", + "zg" + ], + "source_version_number":"1.3" + }, + "NOAA-NCEI-AVHRR-NDVI-4-0":{ + "institution_id":"NOAA-NCEI", + "region":[ + "global_land" + ], + "release_year":"2013", + "source_description":"Normalized Difference Vegetation Index", + "source_id":"NOAA-NCEI-AVHRR-NDVI-4-0", + "source_label":"NOAA-NCEI-AVHRR-NDVI", + "source_name":"NOAA NCEI AVHRR NDVI", + "source_type":"satellite_retrieval", + "source_variables":[ + "ndvi" + ], + "source_version_number":"4.0" + }, + "NOAA-NCEI-ERSST-4-0":{ + "institution_id":"NOAA-NCEI", + "region":[ + "global_ocean" + ], + "release_year":"2015", + "source_description":"Extended Reconstructed Sea Surface Temperatures", + "source_id":"NOAA-NCEI-ERSST-4-0", + "source_label":"NOAA-NCEI-ERSST", + "source_name":"NOAA NCEI ERSST", + "source_type":"gridded_insitu", + "source_variables":[ + "tos" + ], + "source_version_number":"4.0" + }, + "NOAA-NCEI-FAPAR-4-0":{ + "institution_id":"NOAA-NCEI", + "region":[ + "global_land" + ], + "release_year":"2014", + "source_description":"AVHRR Fraction of Absorbed Photosynthetically Active Radiation", + "source_id":"NOAA-NCEI-FAPAR-4-0", + "source_label":"NOAA-NCEI-FAPAR", + "source_name":"NOAA NCEI FAPAR", + "source_type":"satellite_retrieval", + "source_variables":[ + "fapar" + ], + "source_version_number":"4.0" + }, + "NOAA-NCEI-GridSat-4-0":{ + "institution_id":"NOAA-NCEI", + "region":[ + "global" + ], + "release_year":"2015", + "source_description":"Gridded Satellite ISCCP B1 11 Micron Brightness Temperature", + "source_id":"NOAA-NCEI-GridSat-4-0", + "source_label":"NOAA-NCEI-GridSat", + "source_name":"NOAA NCEI GridSat", + "source_type":"satellite_retrieval", + "source_variables":[ + "ttbr" + ], + "source_version_number":"4.0" + }, + "NOAA-NCEI-LAI-4-0":{ + "institution_id":"NOAA-NCEI", + "region":[ + "global_land" + ], + "release_year":"2014", + "source_description":"AVHRR Leaf Area Index", + "source_id":"NOAA-NCEI-LAI-4-0", + "source_label":"NOAA-NCEI-LAI", + "source_name":"NOAA NCEI LAI", + "source_type":"satellite_retrieval", + "source_variables":[ + "lai" + ], + "source_version_number":"4.0" + }, + "NOAA-NCEI-PERSIANN-1-1":{ + "institution_id":"NOAA-NCEI", + "region":[ + "global" + ], + "release_year":"2014", + "source_description":"Precipitation Estimation from Remotely Sensed Information using Artificial Neural Network", + "source_id":"NOAA-NCEI-PERSIANN-1-1", + "source_label":"NOAA-NCEI-PERSIANN", + "source_name":"NOAA NCEI PERSIANN", + "source_type":"satellite_retrieval", + "source_variables":[ + "pr" + ], + "source_version_number":"1.1" + }, + "NOAA-NCEI-SeaWinds-1-2":{ + "institution_id":"NOAA-NCEI", + "region":[ + "global_ocean" + ], + "release_year":"2008", + "source_description":"Blended Sea Surface Winds", + "source_id":"NOAA-NCEI-SeaWinds-1-2", + "source_label":"NOAA-NCEI-SeaWinds", + "source_name":"NOAA NCEI SeaWinds", + "source_type":"satellite_blended", + "source_variables":[ + "sfcWind", + "uas", + "vas" + ], + "source_version_number":"1.2" + }, + "QuikSCAT-v20110531":{ + "institution_id":"NASA-JPL", + "region":[ + "global" + ], + "release_year":"2011", + "source_description":"QuikSCAT winds monthly climatology derived from QuikSCAT L2B using the 2006 model function and processing algorithms.", + "source_label":"QuikSCAT", + "source_name":"QuikSCAT", + "source_type":"satellite_retrieval", + "source_variables":[ + "sfcWind", + "uas", + "vas" + ], + "source_version_number":"v20110531" + }, + "REMSS-PRW-6-6-0":{ + "institution_id":"RSS", + "region":[ + "global" + ], + "release_year":"2017", + "source_description":"Water Vapor Path", + "source_id":"REMSS-PRW-6-6-0", + "source_label":"REMSS-PRW", + "source_name":"REMSS PRW", + "source_type":"satellite_blended", + "source_variables":[ + "prw" + ], + "source_version_number":"6.6.0" + } + } +} diff --git a/esmvaltool/cmor/tables/obs4mips/obs4MIPs_source_type.json b/esmvaltool/cmor/tables/obs4mips/obs4MIPs_source_type.json new file mode 100644 index 0000000000..28d98f5756 --- /dev/null +++ b/esmvaltool/cmor/tables/obs4mips/obs4MIPs_source_type.json @@ -0,0 +1,8 @@ +{ + "source_type":{ + "gridded_insitu":"gridded product based on measurements collected from in-situ instruments", + "reanalysis":"gridded product generated from a model reanalysis based on in-situ instruments and possibly satellite measurements", + "satellite_blended":"gridded product based on both in-situ instruments and satellite measurements", + "satellite_retrieval":"gridded product based on satellite measurements" + } +} \ No newline at end of file diff --git a/esmvaltool/cmor/tables/obs4mips/obs4MIPs_table_id.json b/esmvaltool/cmor/tables/obs4mips/obs4MIPs_table_id.json new file mode 100644 index 0000000000..bbb23e82c8 --- /dev/null +++ b/esmvaltool/cmor/tables/obs4mips/obs4MIPs_table_id.json @@ -0,0 +1,12 @@ +{ + "table_id":[ + "obs4MIPs_Aday", + "obs4MIPs_Amon", + "obs4MIPs_Lmon", + "obs4MIPs_Omon", + "obs4MIPs_SImon", + "obs4MIPs_fx", + "obs4MIPs_monNobs", + "obs4MIPs_monStderr" + ] +} \ No newline at end of file diff --git a/esmvaltool/config-developer.yml b/esmvaltool/config-developer.yml index 26fade00c7..06433da1bf 100644 --- a/esmvaltool/config-developer.yml +++ b/esmvaltool/config-developer.yml @@ -16,148 +16,260 @@ # # Only the default drs is mandatory, the others are optional ############################################################################### +--- + CMIP6: - input_dir: 'CMIP/[institute]/[dataset]/[exp]/[ensemble]/[mip]/[var]/[grid]/[version]' - input_file: '[var]_[mip]_[dataset]_[exp]_[ensemble]_[grid]_*.nc' - output_file: '[project]_[dataset]_[mip]_[exp]_[ensemble]_[field]_[var]_[start_year]-[end_year]' + input_dir: + default: '/' + BADC: '[institute]/[dataset]/[exp]/[ensemble]/[mip]/[short_name]/[grid]/[latestversion]' + DKRZ: '[institute]/[dataset]/[exp]/[ensemble]/[mip]/[short_name]/[grid]/[latestversion]' + ETHZ: '[exp]/[mip]/[short_name]/[dataset]/[ensemble]/[grid]/' + input_file: '[short_name]_[mip]_[dataset]_[exp]_[ensemble]_[grid]_*.nc' + output_file: '[project]_[dataset]_[mip]_[exp]_[ensemble]_[short_name]_[start_year]-[end_year]' cmor_type: 'CMIP6' + institutes: + 'ACCESS-CM2': ['CSIRO-ARCCSS-BoM'] + 'ACCESS-ESM1-5': ['CSIRO'] + 'ARTS-2-3': ['UHH'] + 'AWI-CM-1-1-HR': ['AWI'] + 'AWI-CM-1-1-LR': ['AWI'] + 'AWI-CM-1-1-MR': ['AWI'] + 'AWI-ESM-1-1-LR': ['AWI'] + 'BCC-CSM2-HR': ['BCC'] + 'BCC-CSM2-MR': ['BCC'] + 'BCC-ESM1': ['BCC'] + 'BESM-2-7': ['INPE'] + 'BNU-ESM-1-1': ['BNU'] + 'CAMS-CSM1-0': ['CAMS'] + 'CanESM5': ['CCCma'] + 'CAS-ESM1-0': ['CAS'] + 'CESM2': ['NCAR'] + 'CESM2-SE': ['NCAR'] + 'CESM2-WACCM': ['NCAR'] + 'CIESM': ['THU'] + 'CMCC-CM2-HR4': ['CMCC'] + 'CMCC-CM2-HR5': ['CMCC'] + 'CMCC-CM2-SR5': ['CMCC'] + 'CMCC-CM2-VHR4': ['CMCC'] + 'CMCC-ESM2-HR5': ['CMCC'] + 'CMCC-ESM2-SR5': ['CMCC'] + 'CNRM-CM6-1': ['CNRM-CERFACS'] + 'CNRM-CM6-1-HR': ['CNRM-CERFACS'] + 'CNRM-ESM2-1': ['CNRM-CERFACS'] + 'CNRM-ESM2-1-HR': ['CNRM-CERFACS'] + 'CSIRO-Mk3L-1-3': ['UTAS'] + 'E3SM-1-0': ['E3SM-Project'] + 'EC-Earth3-AerChem': ['EC-Earth-Consortium'] + 'EC-Earth3-CC': ['EC-Earth-Consortium'] + 'EC-Earth3': ['EC-Earth-Consortium'] + 'EC-Earth3-GrIS': ['EC-Earth-Consortium'] + 'EC-Earth3-HR': ['EC-Earth-Consortium'] + 'EC-Earth3-LR': ['EC-Earth-Consortium'] + 'EC-Earth3P': ['EC-Earth-Consortium'] + 'EC-Earth3P-HR': ['EC-Earth-Consortium'] + 'EC-Earth3P-VHR': ['EC-Earth-Consortium'] + 'EC-Earth3-Veg': ['EC-Earth-Consortium'] + 'EC-Earth3-Veg-LR': ['EC-Earth-Consortium'] + 'ECMWF-IFS-HR': ['ECMWF'] + 'ECMWF-IFS-LR': ['ECMWF'] + 'ECMWF-IFS-MR': ['ECMWF'] + 'EMAC-2-53-AerChem': ['MESSy-Consortium'] + 'EMAC-2-53-Vol': ['MESSy-Consortium'] + 'FGOALS-f3-H': ['CAS'] + 'FGOALS-f3-L': ['CAS'] + 'FGOALS-g3': ['CAS'] + 'FIO-ESM-2-0': ['FIO-QLNM'] + 'GFDL-AM4': ['NOAA-GFDL'] + 'GFDL-CM4C192': ['NOAA-GFDL'] + 'GFDL-CM4': ['NOAA-GFDL'] + 'GFDL-ESM2M': ['NOAA-GFDL'] + 'GFDL-ESM4': ['NOAA-GFDL'] + 'GFDL-OM4p5B': ['NOAA-GFDL'] + 'GISS-E2-1-G': ['NASA-GISS'] + 'GISS-E2-1-H': ['NASA-GISS'] + 'GISS-E2-1-MA-G': ['NASA-GISS'] + 'GISS-E3-G': ['NASA-GISS'] + 'HadGEM3-GC31-HH': ['MOHC', 'NERC'] + 'HadGEM3-GC31-HM': ['MOHC', 'NERC'] + 'HadGEM3-GC31-LL': ['MOHC'] + 'HadGEM3-GC31-LM': ['MOHC'] + 'HadGEM3-GC31-MH': ['MOHC'] + 'HadGEM3-GC31-MM': ['MOHC'] + 'ICON-ESM-LR': ['MPI-M'] + 'IITM-ESM': ['CCCR-IITM'] + 'INM-CM4-8': ['INM'] + 'INM-CM5-0': ['INM'] + 'INM-CM5-H': ['INM'] + 'IPSL-CM6A-ATM-HR': ['IPSL'] + 'IPSL-CM6A-LR': ['IPSL'] + 'KACE-1-0-G': ['NIMS-KMA'] + 'KIOST-ESM': ['KIOST'] + 'LBLRTM-12-8': ['AER'] + 'MCM-UA-1-0': ['UA'] + 'MIROC6': ['MIROC'] + 'MIROC-ES2H': ['MIROC'] + 'MIROC-ES2L': ['MIROC'] + 'MPI-ESM-1-2-HAM': ['HAMMOZ-Consortium'] + 'MPI-ESM1-2-HR': ['MPI-M', 'DWD', 'DKRZ'] + 'MPI-ESM1-2-LR': ['MPI-M', 'AWI'] + 'MRI-AGCM3-2': ['MRI'] + 'MRI-ESM2-0': ['MRI'] + 'NESM3': ['NUIST'] + 'NICAM16-7S': ['MIROC'] + 'NICAM16-8S': ['MIROC'] + 'NICAM16-9D-L78': ['MIROC'] + 'NICAM16-9S': ['MIROC'] + 'NorESM2-HH': ['NCC'] + 'NorESM2-LMEC': ['NCC'] + 'NorESM2-LME': ['NCC'] + 'NorESM2-LM': ['NCC'] + 'NorESM2-MH': ['NCC'] + 'NorESM2-MM': ['NCC'] + 'PCMDI-test-1-0': ['PCMDI'] + 'RRTMG-LW-4-91': ['AER'] + 'RRTMG-SW-4-02': ['AER'] + 'RTE-RRTMGP-181204': ['AER'] + 'SAM0-UNICON': ['SNU'] + 'TaiESM1': ['AS-RCEC'] + 'UKESM1-0-LL': ['MOHC', 'NERC', 'NIMS-KMA', 'NIWA'] + 'UKESM1-0-MMh': ['MOHC', 'NERC'] + 'UofT-CCSM4': ['UofT'] + 'VRESM-1-0': ['CSIR-CSIRO'] + CMIP5: + cmor_strict: true input_dir: default: '/' - BADC: '[institute]/[dataset]/[exp]/[freq]/[realm]/[mip]/[ensemble]/latest/[var]' - DKRZ: '[institute]/[dataset]/[exp]/[freq]/[realm]/[mip]/[ensemble]/[latestversion]/[var]' - ETHZ: '[exp]/[mip]/[var]/[dataset]/[ensemble]/' - SMHI: '[dataset]/[ensemble]/[exp]/[freq]' - BSC: '[project]/[exp]/[dataset.lower]' - input_file: '[var]_[mip]_[dataset]_[exp]_[ensemble]_*' - fx_dir: + BADC: '[institute]/[dataset]/[exp]/[frequency]/[modeling_realm]/[mip]/[ensemble]/latest/[short_name]' + CP4CDS: '[institute]/[dataset]/[exp]/[frequency]/[modeling_realm]/[mip]/[ensemble]/[short_name]/latest/' + DKRZ: '[institute]/[dataset]/[exp]/[frequency]/[modeling_realm]/[mip]/[ensemble]/[latestversion]/[short_name]' + ETHZ: '[exp]/[mip]/[short_name]/[dataset]/[ensemble]/' + SMHI: '[dataset]/[ensemble]/[exp]/[frequency]' + BSC: '[type]/[project]/[exp]/[dataset.lower]' + input_file: '[short_name]_[mip]_[dataset]_[exp]_[ensemble]_*.nc' + input_fx_dir: default: '/' - BADC: '[institute]/[dataset]/[exp]/fx/[realm]/fx/r0i0p0/[latestversion]/[fx_var]' - DKRZ: '[institute]/[dataset]/[exp]/fx/[realm]/fx/r0i0p0/[latestversion]/[fx_var]' + BADC: '[institute]/[dataset]/[exp]/fx/[modeling_realm]/fx/r0i0p0/[latestversion]/[fx_var]' + CP4CDS: '[institute]/[dataset]/[exp]/fx/[modeling_realm]/fx/r0i0p0/[fx_var]/latest/' + DKRZ: '[institute]/[dataset]/[exp]/fx/[modeling_realm]/fx/r0i0p0/[latestversion]/[fx_var]' ETHZ: '[exp]/fx/[fx_var]/[dataset]/r0i0p0' - fx_file: '[fx_var]_fx_[dataset]_[exp]_r0i0p0.nc' - output_file: '[project]_[dataset]_[mip]_[exp]_[ensemble]_[field]_[var]_[start_year]-[end_year]' - institute: - 'ACCESS1-0': 'CSIRO-BOM' - 'ACCESS1-3': 'CSIRO-BOM' - 'bcc-csm1-1': 'BCC' - 'bcc-csm1-1-m': 'BCC' - 'BNU-ESM': 'BNU' - 'CanAM4': 'CCCma' - 'CanCM4': 'CCCma' - 'CanESM2': 'CCCma' - 'CCSM4': 'NCAR' - 'CESM1-BGC': 'NSF-DOE-NCAR' - 'CESM1-CAM5': 'NSF-DOE-NCAR' - 'CESM1-CAM5-1-FV2': 'NSF-DOE-NCA' - 'CESM1-FASTCHEM': 'NSF-DOE-NCAR' - 'CESM1-WACCM': 'NSF-DOE-NCAR' + input_fx_file: '[fx_var]_fx_[dataset]_[exp]_r0i0p0.nc' + fx_mip_change: + 'areacella': 'Amon' + 'areacello': 'Omon' + 'basin': 'Omon' + 'deptho': 'Omon' + 'mrsofc': 'Lmon' + 'orog': 'Amon' + 'rootd': 'Lmon' + 'sftgif': 'Lmon' + 'sftlf': 'Amon' + 'sftof': 'Omon' + 'volcello': 'Omon' + output_file: '[project]_[dataset]_[mip]_[exp]_[ensemble]_[short_name]_[start_year]-[end_year]' + institutes: + 'ACCESS1-0': ['CSIRO-BOM'] + 'ACCESS1-3': ['CSIRO-BOM'] + 'bcc-csm1-1': ['BCC'] + 'bcc-csm1-1-m': ['BCC'] + 'BNU-ESM': ['BNU'] + 'CanAM4': ['CCCma'] + 'CanCM4': ['CCCma'] + 'CanESM2': ['CCCma'] + 'CCSM4': ['NCAR'] + 'CESM1-BGC': ['NSF-DOE-NCAR'] + 'CESM1-CAM5': ['NSF-DOE-NCAR'] + 'CESM1-CAM5-1-FV2': ['NSF-DOE-NCAR'] + 'CESM1-FASTCHEM': ['NSF-DOE-NCAR'] + 'CESM1-WACCM': ['NSF-DOE-NCAR'] 'CFSv2-2011': ['COLA-CFS', 'NOAA-NCEP'] - 'CMCC-CESM': 'CMCC' - 'CMCC-CM': 'CMCC' - 'CMCC-CMS': 'CMCC' - 'CNRM-CM5': 'CNRM-CERFACS' - 'CNRM-CM5-2': 'CNRM-CERFACS' - 'CSIRO-Mk3-6-0': 'CSIRO-QCCCE' - 'EC-EARTH': 'ICHEC' - 'FGOALS-g2': 'LASG-CESS' - 'FGOALS-gl': 'LASG-IAP' - 'FGOALS-s2': 'LASG-IAP' - 'FIO-ESM': 'FIO' - 'fio-esm': 'FIO' - 'GEOS-5': 'NASA-GMAO' - 'GFDL-CM2p1': 'NOAA-GFDL' - 'GFDL-CM3': 'NOAA-GFDL' - 'GFDL-ESM2G': 'NOAA-GFDL' - 'GFDL-ESM2M': 'NOAA-GFDL' - 'GFDL-HIRAM-C180': 'NOAA-GFDL' - 'GFDL-HIRAM-C360': 'NOAA-GFDL' - 'GISS-E2-H': 'NASA-GISS' - 'GISS-E2-H-CC': 'NASA-GISS' - 'GISS-E2-R': 'NASA-GISS' - 'GISS-E2-R-CC': 'NASA-GISS' - 'HadCM3': 'MOHC' - 'HadGEM2-A': 'MOHC' - 'HadGEM2-AO': 'NIMR-KMA' - 'HadGEM2-CC': 'MOHC' + 'CMCC-CESM': ['CMCC'] + 'CMCC-CM': ['CMCC'] + 'CMCC-CMS': ['CMCC'] + 'CNRM-CM5': ['CNRM-CERFACS'] + 'CNRM-CM5-2': ['CNRM-CERFACS'] + 'CSIRO-Mk3-6-0': ['CSIRO-QCCCE'] + 'EC-EARTH': ['ICHEC'] + 'FGOALS-g2': ['LASG-CESS'] + 'FGOALS-gl': ['LASG-IAP'] + 'FGOALS-s2': ['LASG-IAP'] + 'FIO-ESM': ['FIO'] + 'fio-esm': ['FIO'] + 'GEOS-5': ['NASA-GMAO'] + 'GFDL-CM2p1': ['NOAA-GFDL'] + 'GFDL-CM3': ['NOAA-GFDL'] + 'GFDL-ESM2G': ['NOAA-GFDL'] + 'GFDL-ESM2M': ['NOAA-GFDL'] + 'GFDL-HIRAM-C180': ['NOAA-GFDL'] + 'GFDL-HIRAM-C360': ['NOAA-GFDL'] + 'GISS-E2-H': ['NASA-GISS'] + 'GISS-E2-H-CC': ['NASA-GISS'] + 'GISS-E2-R': ['NASA-GISS'] + 'GISS-E2-R-CC': ['NASA-GISS'] + 'HadCM3': ['MOHC'] + 'HadGEM2-A': ['MOHC'] + 'HadGEM2-AO': ['NIMR-KMA'] + 'HadGEM2-CC': ['MOHC'] 'HadGEM2-ES': ['INPE', 'MOHC'] - 'inmcm4': 'INM' - 'IPSL-CM5A-LR': 'IPSL' - 'IPSL-CM5A-MR': 'IPSL' - 'IPSL-CM5B-LR': 'IPSL' - 'MIROC-ESM': 'MIROC' - 'MIROC-ESM-CHEM': 'MIROC' - 'MIROC4h': 'MIROC' - 'MIROC5': 'MIROC' - 'MPI-ESM-LR': 'MPI-M' - 'MPI-ESM-MR': 'MPI-M' - 'MPI-ESM-P': 'MPI-M' - 'MRI-AGCM3-2H': 'MRI' - 'MRI-AGCM3-2S': 'MRI' - 'MRI-CGCM3': 'MRI' - 'MRI-ESM1': 'MRI' - 'NICAM-09': 'NICAM' - 'NorESM1-M': 'NCC' - 'NorESM1-ME': 'NCC' - realm_frequency: - 'Amon': ['atmos', 'mon'] - 'Omon': ['ocean', 'mon'] - 'Lmon': ['land', 'mon'] - 'LImon': ['landIce', 'mon'] - 'OImon': ['seaIce', 'mon'] - 'aero': ['aerosol', 'mon'] - # '3hr': ??? - 'cfDay': ['atmos', 'day'] - 'cfMon': ['atmos', 'mon'] - 'day': ['atmos', 'day'] - 'fx': ['*', 'fx'] + 'inmcm4': ['INM'] + 'IPSL-CM5A-LR': ['IPSL'] + 'IPSL-CM5A-MR': ['IPSL'] + 'IPSL-CM5B-LR': ['IPSL'] + 'MIROC-ESM': ['MIROC'] + 'MIROC-ESM-CHEM': ['MIROC'] + 'MIROC4h': ['MIROC'] + 'MIROC5': ['MIROC'] + 'MPI-ESM-LR': ['MPI-M'] + 'MPI-ESM-MR': ['MPI-M'] + 'MPI-ESM-P': ['MPI-M'] + 'MRI-AGCM3-2H': ['MRI'] + 'MRI-AGCM3-2S': ['MRI'] + 'MRI-CGCM3': ['MRI'] + 'MRI-ESM1': ['MRI'] + 'NICAM-09': ['NICAM'] + 'NorESM1-M': ['NCC'] + 'NorESM1-ME': ['NCC'] OBS: + cmor_strict: false input_dir: - default: '[tier]/[dataset]' - BSC: '[type]/[institute.lower]/[dataset.lower]/[freq_folder]/[var][freq_base]' + default: 'Tier[tier]/[dataset]' + BSC: '[type]/[institute.lower]/[dataset.lower]/[freq_folder]/[short_name][freq_base]' input_file: - default: '[project]_[dataset]_[type]_[version]_[field]_[var]_*' - BSC: '[var]_*.nc' - fx_dir: - default: '[tier]/[dataset]' - fx_file: - default: '[project]_[dataset]_[type]_fx_[fx_var].nc' - output_file: '[project]_[dataset]_[type]_[version]_[field]_[var]_[start_year]-[end_year]' + default: '[project]_[dataset]_[type]_[version]_[mip]_[short_name]_*.nc' + BSC: '[short_name]_*.nc' + input_fx_dir: + default: 'Tier[tier]/[dataset]' + input_fx_file: + default: '[project]_[dataset]_[type]_[version]_fx_[fx_var].nc' + output_file: '[project]_[dataset]_[type]_[version]_[mip]_[short_name]_[start_year]-[end_year]' cmor_type: 'CMIP5' obs4mips: + cmor_strict: false input_dir: - default: '[tier]/[dataset]' - input_file: '[var]_[dataset]_[level]_[version]_*' - output_file: '[project]_[dataset]_[level]_[version]_[field]_[var]_[start_year]-[end_year]' - cmor_type: 'CMIP5' + default: 'Tier[tier]/[dataset]' + input_file: '[short_name]_[dataset]_[level]_[version]_*.nc' + input_fx_dir: + default: 'Tier[tier]/[dataset]' + input_fx_file: + default: '[project]_[dataset]_fx_[fx_var].nc' + output_file: '[project]_[dataset]_[level]_[version]_[short_name]_[start_year]-[end_year]' + cmor_type: 'CMIP6' + cmor_path: 'obs4mips' ana4mips: + cmor_strict: false input_dir: - default: '[tier]/[dataset]' - input_file: '[var]_[mip]_[type]_[dataset]_*' - output_file: '[project]_[mip]_[type]_[dataset]_[field]_[var]_[start_year]-[end_year]' - cmor_type: 'CMIP5' - -CCMVal1: - input_dir: - default: '[exp]/[dataset]' - input_file: '[project]_[exp]_[name]_[ensemble]_[field]_[var]' - output_file: '[project]_[exp]_[name]_[ensemble]_[field]_[var]_[start_year]-[end_year]' - cmor_type: 'CMIP5' - -CCMVal2: - input_dir: - default: '[exp]/[dataset]' - input_file: '[project]_[exp]_[name]_[ensemble]_[field]_[var]' - output_file: '[project]_[exp]_[name]_[ensemble]_[field]_[var]_[start_year]-[end_year]' + default: 'Tier[tier]/[dataset]' + input_file: '[short_name]_[mip]_[type]_[dataset]_*.nc' + output_file: '[project]_[mip]_[type]_[dataset]_[short_name]_[start_year]-[end_year]' cmor_type: 'CMIP5' EMAC: input_dir: default: '[dataset]' input_file: '' - output_file: '[dataset]_[ensemble]_[field]_[var]_[start_year]-[end_year]' + output_file: '[dataset]_[ensemble]_[short_name]_[start_year]-[end_year]' cmor_type: 'CMIP5' diff --git a/esmvaltool/config-references.yml b/esmvaltool/config-references.yml index bbbcf5faa1..3895298b84 100644 --- a/esmvaltool/config-references.yml +++ b/esmvaltool/config-references.yml @@ -6,22 +6,26 @@ # authors: -### Core Team (PI and core developers) + # Core Team (PI and core developers) eyri_ve: name: Eyring, Veronika institute: DLR, Germany email: veronika.eyring 'at' dlr.de ande_bo: name: Andela, Bouwe - institute: NLESC, Netherlands + institute: NLeSC, Netherlands email: b.andela 'at' esciencecenter.nl broe_bj: name: Broetz, Bjoern institute: DLR, Germany email: bjoern.broetz 'at' dlr.de + demo_le: + name: de Mora, Lee + institute: PML, UK + email: ledm 'at' pml.ac.uK dros_ni: name: Drost, Niels - institute: NLESC, Netherlands + institute: NLeSC, Netherlands email: n.drost 'at' esciencecenter.nl kold_ni: name: Koldunov, Nikolay @@ -43,11 +47,15 @@ authors: name: Righi, Mattia institute: DLR, Germany email: mattia.righi 'at' dlr.de + schl_ma: + name: Schlund, Manuel + institute: DLR, Germany + email: manuel.schlund 'at' dlr.de vega_ja: name: Vegas-Regidor, Javier institute: BSC, Spain email: javier.vegas 'at' bsc.es -### Development team + # Development team anav_al: name: Anav, Alessandro institute: Univ. of Exeter, UK @@ -56,10 +64,34 @@ authors: name: Andrews, Oliver institute: Tyndall Centre, UK email: oliverdandrews 'at' googlemail.com + arno_en: + name: Arnone, Enrico + institute: ISAC-CNR, Torino, Italy + email: e.arnone 'at' isac.cnr.it + bell_om: + name: Bellprat, Omar + institute: BSC, Spain + email: omar.bellprat 'at' bsc.es + berg_pe: + name: Berg, Peter + institute: SMHI, Sweden + email: peter.berg 'at' smhi.se bock_ls: name: Bock, Lisa institute: DLR, Germany email: lisa.bock 'at' dlr.de + bojo_dr: + name: Bojovic, Dragana + institute: BSC, Spain + email: dragana.bojovic 'at' bsc.es + cagn_ch: + name: Cagnazzo, Chiara + institute: CNR, Italy + email: chiara.cagnazzo 'at' cnr.it + caro_lo: + name: Caron, Louis-Philippe + institute: BSC, Spain + email: louis-philippe.caron 'at' bsc.es chen_ja: name: Chen, Jack institute: NCAR, USA @@ -68,10 +100,26 @@ authors: name: Cionni, Irene institute: ENEA, Italy email: irene.cionni 'at' enea.it + cort_ni: + name: Nicola Cortesi + institute: BSC, Spain + email: nicola.cortesi 'at' bsc.es + crez_ba: + name: Crezee, Bas + institute: ETH Zurich, Switzerland + email: bas.crezee 'at' env.ethz.ch + mohr_cw: + name: Mohr, Christian Wilhelm + institute: Cicero, Norway + email: c.w.mohr 'at' cicero.oslo.no davi_ed: name: Davin, Edouard Leopold - institute: ETH, CH + institute: ETH Zurich, Switzerland email: edouard.davin 'at' env.ethz.ch + davi_pa: + name: Davini, Paolo + institute: CNR-ISAC, Italy + email: p.davini 'at' isac.cnr.it ehbr_ca: name: Ehbrecht, Carsten institute: DKRZ, Germany @@ -80,6 +128,10 @@ authors: name: Frank, Franziska institute: DLR, Germany email: franziska.frank 'at' dlr.de + fuck_ne: + name: Neven Fuckar + institute: BSC, Spain + email: neven.fuckar 'at' bsc.es gain_al: name: Gainusa-Bogdan, Alina institute: France @@ -91,10 +143,18 @@ authors: name: Gettelman, Andrew institute: NCAR, USA email: andrew 'at' ucar.edu + gier_be: + name: Gier, Bettina + institute: University of Bremen, Germany + email: bettina.gier 'at' dlr.de gott_kl: name: Gottschaldt, Klaus-Dirk institute: DLR, Germany email: klaus-dirk.gottschaldt 'at' dlr.de + guem_vi: + name: Guemas, Virginie + institute: BSC, Spain + email: virginie.guemas 'at' bsc.es hage_st: name: Hagemann, Stefan institute: MPI-M, Germany @@ -102,10 +162,22 @@ authors: hann_ul: name: Hansson, Ulf institute: SMHI, Sweden + hard_jo: + name: von Hardenberg, Jost + institute: ISAC-CNR, Torino, Italy + email: j.vonhardenberg 'at' isac.cnr.it + hass_bg: + name: Hassler, Birgit + institute: DLR, Germany + email: birgit.hassler 'at' dlr.de hemp_ni: name: Hempelmann, Nils institute: IPSL, France email: nils.hempelmann 'at' ipsl.jussieu.fr + hunt_al: + name: Hunter, Alasdair + institute: BSC, Spain + email: alasdair.hunter 'at' bsc.es hutj_ro: name: Hutjes, Ronald institute: Univ. of Wageningen, NL @@ -130,6 +202,10 @@ authors: name: Kunert, Dominik institute: DLR, Germany email: dominik.kunert 'at' dlr.de + lemb_va: + name: Lembo, Valerio + institute: CEN, University of Hamburg + email: valerio.lembo 'at' uni-hamburg.de levi_ri: name: Levine, Richard institute: MetOffice, UK @@ -138,16 +214,36 @@ authors: name: Little, Bill institute: MetOffice, UK email: bill.little 'at' metoffice.gov.uk + lled_ll: + name: Lledó, Llorenç + institute: BSC, Spain + email: llorenç.lledo 'at' bsc.es + lore_ru: + name: Lorenz, Ruth + institute: ETH Zurich, Switzerland + email: ruth.lorenz 'at' env.ethz.ch + lova_to: + name: Lovato, Tomas + institute: CMCC, IT + email: tomas.lovato 'at' cmcc.it maek_ja: name: Makela, Jarmo malo_er: name: Maloney, Eric institute: Colorado State University email: emaloney 'at' atmos.colostate.edu + manu_ni: + name: Manubens, Nicolau + institute: BSC, Spain + email: nicolau.manubens 'at' bsc.es maso_er: name: Mason, Erik institute: NOAA, USA email: erik.mason 'at' noaa.gov + sand_ma: + name: Sandstad, Marit + institute: Cicero, Norway + email: marit.sandstad 'at' cicero.oslo.no mass_fr: name: Massonnet, Francois institute: Spain @@ -155,6 +251,10 @@ authors: name: Martin, Gill institute: MetOffice, UK email: gill.martin 'at' metoffice.gov.uk + mavi_ir: + name: Mavilia, Irene + institute: ISAC-CNR, Bologna, Italy + email: i.mavilia 'at' isac.cnr.it mell_fe: name: Mello, Felipe institute: INPE, Brazil @@ -169,6 +269,10 @@ authors: nobr_pa: name: Nobre, Paulo institute: INPE, Brazil + gonz_nu: + name: González-Reviriego, Nube + institute: BSC, Spain + email: nube.gonzalez 'at' bsc.es oliv_ar: name: Oliveira, Arildo institute: INPE, Brazil @@ -196,26 +300,38 @@ authors: name: Roehrig, Romain institute: MeteoFr, France email: romain.roehrig 'at' meteo.fr - schl_ma: - name: Schlund, Manuel - institute: DLR, Germany - email: manuel.schlund 'at' dlr.de senf_da: name: Senftleben, Daniel institute: DLR, Germany email: daniel.senftleben 'at' dlr.de + serv_fe: + name: Serva, Federico + institute: CNR, Italy + email: federico.serva 'at' artov.isac.cnr.it somm_ph: name: Sommer, Philipp institute: Univ. of Hamburg, Germany email: 'at' + stac_to: + name: Stacke, Tobias + institute: MPI-M, Germany + email: tobias.stacke 'at' mpimet.mpg.de ster_an: name: Sterl, Andreas institute: KNMI, Netherlands email: sterl 'at' knmi.nl + swam_ra: + name: Swaminathan, Ranjini + institute: University of Reading, UK + email: r.swaminathan 'at' reading.ac.uk teic_ca: name: Teichmann, Claas institute: CSC2, Germany email: claas.teichmann 'at' hzg.de + torr_ve: + name: Veronica Torralba + institute: BSC, Spain + email: veronica.torralba 'at' bsc.es tsus_yo: name: Tsushima, Yoko institute: MetOffice, UK @@ -243,7 +359,7 @@ authors: name: Williams, Keith institute: MetOffice, UK email: keith.williams 'at' metoffice.gov.uk -### Viewers (not active developers) + # Viewers (not active developers) bala_ve: name: Balaji, Venkatramani institute: GFDL, USA @@ -256,6 +372,10 @@ authors: name: Charlton-Perez, Andrew institute: Univ. of Reading, UK email: a.j.charlton-perez 'at' reading.ac.uk + cort_su: + name: Corti, Susanna + institute: ISAC-CNR, Torino, Italy + email: s.corti 'at' isac.cnr.it denn_jo: name: Dennis, John institute: NCAR, USA @@ -312,7 +432,7 @@ authors: name: Wyser, Klaus institute: SMHI, Sweden email: klaus.wyser 'at' smhi.se -### Former developers + # Former developers brae_me: name: Braeu, Melanie institute: DLR, Germany @@ -347,6 +467,9 @@ authors: email: carolin.klinger 'at' physik.uni-muenchen.de kola_mi: name: Kolax, Michael + loew_al: + name: Loew, Alexander + institute: LMU, Germany neal_ri: name: Neale, Richard institute: NCAR, US @@ -359,11 +482,15 @@ authors: name: Pascoe, Stephen institute: STFC, UK email: stephen.pascoe 'at' stfc.ac.uk + pere_nu: + name: Perez_Zanon, Nuria + institute: BSC, Spain + email: nuria.perez 'at' bsc.es stev_ma: name: Stevens, Mark institute: NCAR, US email: stevens 'at' ucar.edu -### Former viewers (not active viewers) + # Former viewers (not active viewers) butc_ne: name: Butchart, Neal institute: MetOffice, UK @@ -392,106 +519,118 @@ authors: name: Tilmes, Simone institute: NCAR, US email: tilmes 'at' ucar.edu + lova_to: + name: Lovato, Tomas + institute: CMCC, IT + email: tomas.lovato 'at' cmcc.it + + references: acknow_author: "Please acknowledge the author(s)." - contact_authors:: "Please contact the author(s) to discuss acknowledgment or co-authorship." + contact_authors: "Please contact the author(s) to discuss acknowledgment or co-authorship." acknow_project: "Please acknowledge the project(s)." + alexander: "Alexander L.V., et al., J. Geophys. Res., 111, D05109, doi:10.1029/2005JD006290" anav13jclim: "Anav et al., J. Clim., 26, 6801-6843, doi:10.1175/JCLI-D-12-00417.1, 2013." + andrews12grl: "Andrews et al., Geophys. Res. Lett., 39, L09712, doi:10.1029/2012GL051607, 2012." antonov10usgov: "Antonov, J. I. et al., World Ocean Atlas 2009, Volume 2: Salinity. S. Levitus, Ed. NOAA Atlas NESDIS 69, U.S. Government Printing Office, Washington, D.C., 184 pp., 2010." aquila11gmd: "Aquila et al., Geosci. Model Dev. 4, 325-355, doi:10.5194/gmd-4-325-2011, 2011." bakker14essd: "Bakker, D. C. E. et al., Earth Syst. Sci. Data, 6, 69-90, doi:10.5194/essd-6-69-2014, 2014." + baldwin09qjrms: "Baldwin, D. P. et al., Q. J. R. Meteorol. Soc., 135, 1661-1672, doi:10.1002/qj.479, 2009" bianchi12gbc: "Bianchi, D. et al., Global Biogeochem. Cy., 26, GB2009, doi:10.1029/2011GB004209, 2012." cionni11acp: "Cionni et al., Atmos. Chem. Phys., 11, 11267-11292, doi:10.5194/acp-11-11267-2011, 2011." - clivar09jclim: "CLIVAR Madden-Julian Oscillation Working Group, J. Climate, 22, 3006–3030, doi:10.1175/2008JCLI2731.1, 2009." + clivar09jclim: "CLIVAR Madden-Julian Oscillation Working Group, J. Climate, 22, 3006-3030, doi:10.1175/2008JCLI2731.1, 2009." + collins13ipcc: "Collins, M. et al., Long-term climate change: Projections, Commitments, and Irreversibility, in: Climate Change 2013: the Physical Science Basis, contribution of Working Group I to the Fifth Assessment Report of the Intergovernmental Panel on Climate Change, edited by: Stocker, T. F., Qin, D., Plattner, G.-K., Tignor, M., Allen, S. K., Boschung, J., Nauels, A., Xia, Y., Bex, V., and Midgley, P. M., Cambridge University Press, Cambridge, UK and New York, NY, USA (2013)." + corti99nat: "Corti, S. et al. Nature 398, 799-801, doi:10.1038/19745" + cox18nature: "Cox, P. M. et al., Nature, 553, 319-322, doi:10.1038/nature25450, 2018." + davini12jclim: "Davini P., C. Cagnazzo, S. Gualdi, and A. Navarra. J. Climate, 25, 6496-6509, doi: 10.1175/JCLI-D-12-00032.1, 2012" + davini18: "Davini, P. MiLES - Mid Latitude Evaluation System. Zenodo. doi:10.5281/zenodo.1237837, 2018" + demora2018gmd: "de Mora et al., Geosci. Model Dev., 11, 4215-4240, doi:10.5194/gmd-11-4215-2018, 2018." dong08grl: "Dong, S. et al., J. Geophys. Res., 113, C06013, doi:10.1029/2006JC004051, 2008." + donofrio14jh: "D'Onofrio et al., J of Hydrometeorology 15, 830-843, 2014." duemenil00mpimr: "Duemenil Gates et al., Observed historical discharge data from major rivers for climate model validation. Max Planck Institute for Meteorology Report 307, Hamburg, Germany, 2000." emmons00jgr: "Emmons et al., J. Geophys. Res., 105, D16, 20497-20538, 2000." eyring06jgr: "Eyring et al., J. Geophys. Res., 111, D22308, doi:10.1029/2006JD007327, 2006." eyring13jgr: "Eyring et al., J. Geophys. Res., 118, 5029-5060, doi:10.1002/jgrd.50316, 2013." flato13ipcc: "Flato, G. et al., Evaluation of climate models, in: Climate Change 2013: the Physical Science Basis, 2013." + fuckar: "Fuckar et al., Clima Dynam, 47, 5-6, 1527-1543, doi:10.1007/s00382-015-2917-2, 2016." gen14jclim: "Gen, L. et al., J. Climate, 27, 1765-1780, doi:10.1175/JCLI-D-13-00337.1, 2014." + georgievski18tac: "Georgievski, G. & Hagemann, S. Theor Appl Climatol (2018). https://doi.org/10.1007/s00704-018-2675-2" + giorgi11jc: "Giorgi et al., J. Climate 24, 5309-5324, doi:10.1175/2011JCLI3979.1, 2011." + giorgi14jgr: "Giorgi et al., J. Geophys. Res. Atmos., 119, 11695-11708, doi:10.1002/ 2014JD022238, 2014." gleckler08jgr: "Gleckler et al., J. Geophys. Res., 113, D06104, doi:10.1029/2007JD008972, 2008." - goswami99qjrms: "Goswami, B., V. Krishnamurthy, and H. Annamalai, Q. J. R. Meteorol. Soc., 125, 611–633, doi:10.1002/qj.49712555412, 1999." - hagemann13jadvmodelearthsyst: "Hagemann et al., J. Adv. Model. Earth Syst., 5, doi:10.1029/2012MS000173, 2013." + goswami99qjrms: "Goswami, B., V. Krishnamurthy, and H. Annamalai, Q. J. R. Meteorol. Soc., 125, 611-633, doi:10.1002/qj.49712555412, 1999." + hagemann13james: "Hagemann et al., J. Adv. Model. Earth Syst., 5, doi:10.1029/2012MS000173, 2013." jones15james: "Jones et al., J. Adv. Model. Earth Syst., 7, 1554-1575, doi:10.1002/2014MS000416, 2015." kerry06jclim: "Kerry H. et al, J. Climate, 19, 3681-3703, doi:10.1175/JCLI3814.1, 2006." kim09jclim: "Kim, D. et al., J. Climate, 22, 6413-6436, doi:10.1175/2009JCLI3063.1, 2009." kim12grl: "Kim and Yu, Geophys. Res. Lett., 39, L11704, doi:10.1029/2012GL052006, 2012." key04gbc: "Key, R. M. et al., Global Biogeochem. Cy., 18, GB4031, doi:10.109/2004GB002247, 2004." + landschuetzer2016: "Landschuetzer, P., Gruber, N., Bakker, D. C. E.: Decadal variations and trends of the global ocean carbon sink, Global Biogeochemical Cycles, 30, doi:10.1002/2015GB005359 (2016)" lauer05acp: "Lauer et al., Atmos. Chem. Phys., 5, 3251-3276, doi:10.5194/acp-5-3251-2005, 2005." lauer13jclim: "Lauer and Hamilton, J. Climate, 26, 3823-3845, doi:10.1175/JCLI-D-12-00451.1, 2013." - lauer17rse: "Lauer et al., Remote Sens. Environ., in press, 2017." - li14jclim: "Li and Xie, J. Climate, 27, 1765-1780, doi:https://doi.org/10.1175/JCLI-D-13-00337.1, 2014." + lauer17rse: "Lauer et al., Remote Sens. Environ., 203, 9-39, doi:10.1016/j.rse.2017.01.007, 2017." + lembo16climdyn: "Lembo et al., Clim. Dyn., 48, 1793-1812, doi:10.1007/s00382-016-3173-9" + lembo19gmdd: "Lembo et al., Geosci. Model Dev. Discuss., doi:10.5194/gmd-2019-37, 2019" + li14jclim: "Li and Xie, J. Climate, 27, 1765-1780, doi:10.1175/JCLI-D-13-00337.1, 2014." lin08jclim: "Lin, J-L. et al., J. Climate, 21, 4541-4567, doi: 10.1175/2008JCLI1816.1, 2008." lloyd-hughes02jclim: "Lloyd-Hughes, B. and Saunders, M. A., Int. J. Climatol., 22, 1571-1592, doi:10.1002/joc.846, 2002." locarini10usgov: "Locarnini, R. A. et al., World Ocean Atlas 2009, Volume 1: Temperature. S. Levitus, Ed. NOAA Atlas NESDIS 68, U.S. Government Printing Office, Washington, D.C., 184 pp.,2010." + lucarini14revgeop: "Lucarini et al., Rev. Geophys., 52, 809-859, doi:https://doi.org/10.1002/2013RG000446" + mehran14jgr: "Mehran, A. et al., J. Geophys. Res., 119, 4, 1695-1707, doi: 10.1002/2013JD021152, 2014." + manubens: "Manubens, N., et al., ENVIRON MODELL SOFTW 103, 29-42. doi:10.1016/j.envsoft.2018.01.018" + mckee93: "McKee, T. B. and Doesken, N. J. and Kleist, J. In Proceedings of the 8th Conference on Applied Climatology, 17(22), 179-183, Boston, MA: American Meteorological Society, 1993." mueller14grl: "Mueller, B. and Seneviratne, S. I. Geophys. Res. Lett., 41, 128-134, doi:10.1002/2013GL058055, 2014." mueller13hess: "Mueller, B. et al., Hydrol. Earth Syst. Sci., 17, 3707-3720, doi:10.5194/hess-17-3707-2013, 2013." phillips14eos: "Phillips, A. S. et al., EOS T. Am. Geophys. Un., 95, 453-455, 2014." + rebora06jhm: "Rebora et. al., JHM 7, 724, 2006." righi13acp: "Righi et al., Atmos. Chem. Phys., 13, 9939-9970, doi:10.5194/acp-13-9939-2013, 2013." righi15gmd: "Righi et al., Geosci. Model Dev., 8, 733-768 doi:10.5194/gmd-8-733-2015, 2015." + rk2008bams: "Reichler and Kim, Bull. Amer. Meteor. Soc., 89, 303-312, doi:10.1175/BAMS-89-3-303, 2008." roedenbeck13os: "Roedenbeck, C. et al., Ocean Sci., 9, 193-216, doi:10.5194/os-9-193-2013, 2013." roehrig13jclim: "Roehrig, R. et al., J. Climate, 26, 6471-6505, doi:10.1175/JCLI-D-12-00505.1, 2013." sperber12asl: "Sperber and Kim, Atmos. Sci. Lett., 13, 3, 187-193, doi:10.1002/asl.378, 2012." + straus07jcli: "Straus, D.M., S. Corti, and F. Molteni. J. Climate, 20, 2251-2272, doi:10.1175/JCLI4070.1, 2007" stroeve07grl: "Stroeve, J. et al., Geophys. Res. Lett., 34, L09501, doi:10.1029/2007GL029703, 2007." + tibaldi90tel: "Tibaldi S. and Molteni F. Tellus A 42(3): 343-365, doi:10.1034/j.1600-0870.1990.t01-2-00003.x, 1990." taylor12: "Taylor et al., Nature, 489, 423-426, doi:10.1038/nature11377, 2012." + terzago18nhess: "Terzago, S. et al., Nat. Hazards Earth Syst. Sci., 18, 2825-2840, doi:10.5194/nhess-18-2825-2018, 2018." takahashi14marchem: "Takahashi et al., Mar. Chem., 164, 95-125, doi:10.1016/j.marchem.2014.06.004, 2014." + vicente10jclim: "Vicente-Serrano, S. M. and Beguería, S. and López-Moreno, J. I., Journal of climate, 23(7), 1696-1718, 10.1175/2009JCLI2909.1, 2010" wang99bams: "Wang, B. and Z. Fan, Bull. Amer. Meteor. Soc., 80, 629-638, doi:10.1175/1520-0477(1999)080<0629:COSASM>2.0.CO;2, 1999." wang11climdyn: "Wang, B. et al., Clim. Dyn., 39, 1123-1135, doi:10.1007/s00382-011-1266-z, 2011." webster92qjrms: "Webster, P. J. and Yang, S., Q.J.R. Meteorol. Soc., 118: 877-926. doi:10.1002/qj.49711850705, 1992." weedon14wrr: "Weedon, G. P. et al., Water Resour. Res., 50, 7505-7514, doi:10.1002/2014WR015638, 2014." + weigel: "Weigel, A P., et al., Q. J. Royal Meteorol. Soc. 134, 630, 241-260. doi:10.1002/qj.210" wenzel14jgr: "Wenzel et al., J. Geophys. Res. Biogeosci., 119(5), doi:2013JG002591, 2014." williams09climdyn: "Williams and Webb, Clim. Dynam., 33, 141-157, doi:10.1007/s00382-008-0443-1, 2009." - -observation references: - access2: "ACCESS2 - Roiger et al., accepted" - aeronet: "AERONET - See http://aeronet.gsfc.nasa.gov/new_web/data_usage.html" - airs: "AIRS" - auramls: "Aura MLS" - cirrus: "CIRRUS - Baumgardner et al., Environ. Res. Lett., 3, 025010, 2008." - castnet: "CASTNET - See http://epa.gov/castnet/javaweb/index.html" - ceresebaf: "CERES-EBAF" - concert: "CONCERT - Dahlkoetter et al., Atmos. Chem. Phys., 14, 6111-6137, 2014." - cr-ave: "CR-AVE - Schwarz et al., J. Geophys. Res., 113, D03203, 2008." - dc3: "DC3 - B. Weinzierl (DLR)" - duemenil00mpi: "GRDC station observations - Duemenil et al., Max Planck Institute for Meteorology Report 307, Hamburg, Germany, 2000." - eanet: "EANET - See http://www.eanet.cc/" - emep: "EMEP - See http://www.emep.int/" - emmons00jgr: "Emmons et al., J. Geophys. Res., 105, D16, 20497-20538, 2000." - era40: "ERA40" - erainterim: "ERA-Interim" - esacci-aerosol: "ESACCI-AEROSOL - Popp, T. et al. (2015): ESA Aerosol Climate Change Initiative (ESA Aerosol_cci) data: AOD v4.21 via Centre for Environmental Data Analysis, May 2016" - esacci-landcover: "ESACCI-LANDCOVER - Defourny et al. (2015): ESA Land Cover Climate Change Initiative (ESA LC_cci) data: ESACCI-LC-L4-LCCS-Map-300m-P5Y-[2000,2005,2010]-v1.6.1 via Centre for Environmental Data Analysis" - esacci-sm: "ESACCI-SOILMOISTURE - Liu, et al. (2011), Hydrology and Earth System Sciences, 15, 425-436; Liu, et al. (2012), Remote Sensing of Environment, 123, 280-297; Wagner, et al. (2012), Remote Sensing and Spatial Information Sciences, Volume I-7" - esacci-sst: "ESACCI-SST - Merchant, et al. (2014): ESA Sea Surface Temperature Climate Change Initiative (ESA SST CCI): Analysis long term product version 1.0, NERC Earth Observation Data Centre, 24th February 2014, doi:10.5285/878bef44-d32a-40cd-a02d-49b6286f0ea4." - haloe: "HALOE" - hippo: "HIPPO - Schwarz et al., Geophys. Res. Lett., 40, 5542-5547, 2013." - improve: "IMPROVE - See http://vista.cira.colostate.edu/improve/data/improve/improve_data.htm" - inca: "INCA - Minikin et al., Geophys. Res. Lett., 30, 1503, 2003." - isccp_d1: "ISCCP D1 - Rossow and Schiffer, Bull. Amer. Meteor. Soc., 80, 2261-2287, 1999." - king03tgrs: "MODIS-CFMIP - King, M. D., et al., IEEE Trans. Geosci. Remote Sens., 41, 442-458, 2003." - ncep: "NCEP - Kalnay et al., Bull. Amer. Meteor. Soc., 77, 437-470, 1996." - misr: "MISR" - modis: "MODIS" - niwa: "NIWA" - odell08jcli: "UWisc - O'Dell et al., J. Climate, 21, 1721-1739, 2008." - lace1: "LACE - Petzold et al., J. Geophys. Res., 107, D21, 8128, 2002." - lace2: "LACE - Schroeder et al., J. Geophys. Res., 107, D21, 8126, 2002." - oct-ave: "Oct-AVE - Schwarz et al., J. Geophys. Res., 111, D16207, 2006." - pincus08jcli: "MODIS-CFMIP - Pincus et al., J. Climate, 25, 4699-4720, 2012." - saltrace1: "SALTRACE - B. Weinzierl (DLR)" - saltrace2: "SALTRACE - K. Fomba (TROPOS)" - samum1: "SAMUM - Weinzierl et al., Tellus B, 2009." - samum2: "SAMUM - K. Kandler (TU Darmstadt)" - srb: "SRB" - tc4: "TC4 - Spackman et al., Geophys. Res. Lett., 38, L09803, 2011." - ucnpacific: "UCN-Pacific - Clarke and Kapustin, J. Atmos. Sci. 59, 363, 2002." - ukmo: "UKMO" - weedon14waterresourres: "WFDEI - Weedon et al., Water Resour. Res. 50, 2014." + # Observations + aura-tes: "Beer, R., IEEE Trans. Geosci. Rem. Sens., doi:10.1109/TGRS.2005.863716, 2006." + ceres-syn1deg: "Wielicki et al., Bull. Amer. Meteor. Soc., doi: 10.1175/1520-0477(1996)077<0853:CATERE>2.0.CO;2, 1996." + cru: "Harris, I. et al., Int. J. Climatol., 34, doi: 10.1002/joc.3711, 2014." + era-interim: "Dee, D. P. et al., Q. J. Roy. Meteor. Soc., doi:10.1002/qj.828, 2011." + esacci-aerosol: "Popp et al., ESA Aerosol Climate Change Initiative (ESA Aerosol_cci) data: AOD v4.21 via Centre for Environmental Data Analysis, 2016." + esacci-cloud: "Stengel et al., Earth Syst. Sci. Data, doi:10.5194/essd-9-881-2017, 2017." + esacci-fire: "Not available." + esacci-landcover: "Defourny et al.. ESA Land Cover Climate Change Initiative (ESA LC_cci) data, 2015." + esacci-oc: "Volpe, G., et al., 2018. PRODUCT USER MANUAL. http://resources.marine.copernicus.eu/documents/PUM/CMEMS-OC-PUM-009-ALL.pdf" + esacci-ozone: "Loyola et al., Int. J. Remote Sens. doi:10.1080/01431160902825016, 2009." + esacci-soilmoisture: "Not available." + esacci-sst: "Merchant et al., Geosci. Data J., doi:10.1002/gdj3.20, 2014." + hadisst: "Rayner et al., J. Geophys. Res., doi:10.1029/2002JD002670, 2013." + landflux-eval: "Mueller, B. et al., Hydrol. Earth Syst. Sci., 17, 3707-3720, doi:10.5194/hess-17-3707-2013, 2013." + modis: "Not available." + mte: "Jung et al., J. Geophys. Res., 116, doi:10.1029/2010JG001566, 2011." + ncep: "Kalnay et al., B. Am. Meteorol. Soc., doi:10.1175/1520-0477(1996)077<0437:TNYRP>2.0.CO;2, 1996." + niwa-bs: "Bodeker et al., Atmos. Chem. Phys., doi:10.5194/acp-5-2603-2005, 2005." + patmos-x: "Heidinger et al., NOAA National Centers for Environmental Information, doi:10.7289/V5348HCK, last access: 10 February 2019." + woa: "Locarnini et al., World Ocean Atlas 2013, Vol. 1: Temperature, 2013." + zhang-2011: "Zhang et al., WIREs Clim. Change, doi:10.1002/wcc.147, 2011" + sillman-2013: "Sillmann et al., J. Geophys. Res., doi:10.1029/2012JD018390, 2013" projects: + c3s-magic: Copernicus Climate Change Service 34a Lot 2 (MAGIC) project climval: BMBF MiKlip Project ClimVal cmip6dicad: BMBF CMIP6 Project Germany cmug: ESA CMUG @@ -501,6 +640,8 @@ projects: embrace: EU FP7 project EMBRACE esmval: DLR project ESMVal qa4ecv: QA4ECV + trr181: DFG Project TRR-181 + ukesm: UKESM, UK Earth System Model project (NERC) realms: aerosol: aerosol @@ -528,30 +669,41 @@ themes: varmodes: modes of variability domains: + eq: equatorial et: extra tropics global: global - midlat: mid-latittudes + midlat: mid-latitudes nh: northern hemisphere + nhext: northern extra tropics + nhmidlat: northern mid-latitudes + nhpolar: northern polar + nhtrop: northern tropics polar: polar reg: regional sh: southern hemisphere + shext: southern extra tropics + shmidlat: southern mid-latitudes + shpolar: southern polar + shtrop: southern tropics trop: tropics plot_types: errorbar: error bar plot bar: bar chart + circle: different overlapping circles diurn: diurnal cycle geo: geographical distribution portrait: portrait diagram polar: polar-stereographic plot scatter: scatter plot seas: seasonal cycle + sect: meridional section size: size-distribution vert: vertical profile taylor: taylor diagram times: time series zonal: zonal mean - pro: profile #(any other kind of line chart) + pro: profile # (any other kind of line chart) other: other plot types statistics: @@ -570,4 +722,5 @@ statistics: perc: percentiles median: median detrend: detrend + smpi: single metric performance index statistics other: other statistics diff --git a/esmvaltool/config-user.yml b/esmvaltool/config-user.yml index 182b184fd1..55968d5ce2 100644 --- a/esmvaltool/config-user.yml +++ b/esmvaltool/config-user.yml @@ -17,6 +17,8 @@ exit_on_warning: false output_file_type: pdf # Destination directory output_dir: ./esmvaltool_output +# Auxiliary data directory (used for some additional datasets) +auxiliary_data_dir: ./auxiliary_data # Use netCDF compression true/[false] compress_netcdf: false # Save intermediary cubes in the preprocessor true/[false] @@ -30,14 +32,16 @@ max_parallel_tasks: 1 # Path to custom config-developer file, to customise project configurations. # See config-developer.yml for an example. Set to None to use the default config_developer_file: null +# Get profiling information for diagnostics +# Only available for Python diagnostics +profile_diagnostic: false +# Rootpaths to the data from different projects (lists are also possible) rootpath: - # Rootpath to CMIP5 data - CMIP5: ~/esmvaltool_input - # Rootpath to OBS data - OBS: ~/esmvaltool_input - # Default - default: ~/esmvaltool_input + CMIP5: [~/cmip5_inputpath1, ~/cmip5_inputpath2] + OBS: ~/obs_inputpath + RAWOBS: ~/rawobs_inputpath + default: ~/default_inputpath # Directory structure for input data: [default]/BADC/DKRZ/ETHZ/etc # See config-developer.yml for definitions. diff --git a/esmvaltool/diag_scripts/autoassess/__init__.py b/esmvaltool/diag_scripts/autoassess/__init__.py new file mode 100644 index 0000000000..b6a3eec44d --- /dev/null +++ b/esmvaltool/diag_scripts/autoassess/__init__.py @@ -0,0 +1 @@ +"""Initialize the ESMValTool autoassess package.""" diff --git a/esmvaltool/diag_scripts/autoassess/autoassess_source/plot_norm_ac.py b/esmvaltool/diag_scripts/autoassess/_plot_mo_metrics.py similarity index 77% rename from esmvaltool/diag_scripts/autoassess/autoassess_source/plot_norm_ac.py rename to esmvaltool/diag_scripts/autoassess/_plot_mo_metrics.py index 3f292347d6..d26c03847b 100644 --- a/esmvaltool/diag_scripts/autoassess/autoassess_source/plot_norm_ac.py +++ b/esmvaltool/diag_scripts/autoassess/_plot_mo_metrics.py @@ -1,23 +1,16 @@ -#!/usr/bin/env python2.7 -# -*- coding: utf-8 -*- - """ -(C) Crown Copyright 2017, the Met Office +Plot autoassess standard metrics. +This is a modified version of plot_norm_ac +Author: Valeriu Predoi (UREAD, valeriu.predoi@ncas.ac.uk) Create normalised assessment criteria plot (NAC plot). """ -from __future__ import division, print_function - -import os -import os.path -import sys -import matplotlib as mpl -mpl.use('Agg') # noqa -import matplotlib.pyplot as plt -import argparse import csv import errno +import os + +import matplotlib.pyplot as plt import numpy as np # Define some colours @@ -41,33 +34,11 @@ for marker in MARKERS ] -# List of TODO: -# -# 1) What if we want acceptable range only? i.e. no trusted obs but a -# sensible idea of what the range should be. -# 2) How to get plots to display nicely in matplotlib viewer and non-png -# images. Issue is legend external to plot falling off screen. - - -class CommentedFile: - '''Class to help deal with comments in CSV files''' - - def __init__(self, f, commentstring="#"): - self.f = f - self.commentstring = commentstring - - def next(self): - line = self.f.next() - while line.startswith(self.commentstring): - line = self.f.next() - return line - - def __iter__(self): - return self - def merge_obs_acc(obs, acc): - ''' + """ + Merge observation errors. + Routine to merge observational uncertainty and acceptable range dictionaries into one dictionary. Returned dictionary will only contain metrics from the obs dictionary. @@ -75,8 +46,8 @@ def merge_obs_acc(obs, acc): :param dict obs: Dictonary of observational uncertainties :param dict acc: Dictonary of acceptable ranges :returns: A merge of the obs and acc dictionaries - :rtype: dict - ''' + :rtype: dict. + """ metrics = {} for metric in obs.keys(): values = list(obs[metric]) @@ -87,21 +58,23 @@ def merge_obs_acc(obs, acc): def write_order_metrics(csvfile, metrics): - ''' - Routine to write out an ordered list of metrics csv file + """ + Write out ordered metrics. + + Routine to write out an ordered list of metrics csv file. Not really csv but easily written out by csv package. This is a line by line ordered list of the metrics that will be plotted on a NAC plot. It should be read in and out of a list object. :param str csvfile: CSV file name - :param list metrics: Ordered list of metrics - ''' + :param list metrics: Ordered list of metrics. + """ if metrics: try: outf = open(csvfile, 'w') - except IOError as e: - if e.errno == errno.EACCES: + except IOError as ioerr: + if ioerr.errno == errno.EACCES: pass # Raise Error else: with outf: @@ -111,8 +84,10 @@ def write_order_metrics(csvfile, metrics): def write_model_metrics(csvfile, metrics): - ''' - Routine to write out model metrics csv file + """ + Write out ordered model metrics. + + Routine to write out model metrics csv file. An unordered list of metrics with a single value metric that are obtained from processing model output. Note that the model uncertainty also fits @@ -120,13 +95,13 @@ def write_model_metrics(csvfile, metrics): with metric name as key and single float as value. :param str csvfile: CSV file name - :param dict metrics: Dictionary containing metric values - ''' + :param dict metrics: Dictionary containing metric values. + """ if metrics: try: outf = open(csvfile, 'w') - except IOError as e: - if e.errno == errno.EACCES: + except IOError as ioerr: + if ioerr.errno == errno.EACCES: pass # Raise Error else: with outf: @@ -136,8 +111,10 @@ def write_model_metrics(csvfile, metrics): def write_obs_metrics(csvfile, obs, acc): - ''' - Routine to read in observation metrics csv file + """ + Write obs. + + Routine to read in observation metrics csv file. An unordered list of metrics with either 2 or 4 values. The first 2 vals are the observation range and must exist for any entry. The second 2 vals, @@ -150,14 +127,14 @@ def write_obs_metrics(csvfile, obs, acc): :param str csvfile: CSV file name :param dict obs: Dictonary of observational uncertainties - :param dict acc: Dictonary of acceptable ranges - ''' + :param dict acc: Dictonary of acceptable ranges. + """ metrics = merge_obs_acc(obs, acc) if metrics: try: outf = open(csvfile, 'w') - except IOError as e: - if e.errno == errno.EACCES: + except IOError as ioerr: + if ioerr.errno == errno.EACCES: pass # Raise Error else: with outf: @@ -167,8 +144,10 @@ def write_obs_metrics(csvfile, obs, acc): def read_order_metrics(csvfile, required=False): - ''' - Routine to read in ordered list of metrics csv file + """ + Read oredred metrics. + + Routine to read in ordered list of metrics csv file. Not really csv but easily read in by csv package. This is a line by line ordered list of the metrics that will be plotted on a NAC plot. It should @@ -177,24 +156,21 @@ def read_order_metrics(csvfile, required=False): :param str csvfile: CSV file name containing an ordered list of metrics :param bool required: If True then raise error if file does not exist :returns: An ordered list containing metric names - :rtype: list - ''' + :rtype: list. + """ metrics = [] if csvfile is not None: try: inf = open(csvfile, 'rb') - except IOError as e: - if e.errno == errno.EACCES: + except IOError as ioerr: + if ioerr.errno == errno.EACCES: if required: pass # Raise Error else: pass # Raise Warning else: with inf: - reader = csv.reader( - CommentedFile(inf), delimiter=',', quotechar='"') - # TODO: Must be a better way of unpacking data that does not - # rely on testing number of elements on line + reader = csv.reader(inf, delimiter=',', quotechar='"') for row in reader: if len(row) == 1: metrics.append(row[0]) @@ -206,8 +182,10 @@ def read_order_metrics(csvfile, required=False): def read_model_metrics(csvfile, required=False): - ''' - Routine to read in model metrics csv file + """ + Read model metrics. + + Routine to read in model metrics csv file. An unordered list of metrics with a single value metric that are obtained from processing model output. Note that the model uncertainty also fits @@ -217,24 +195,21 @@ def read_model_metrics(csvfile, required=False): :param str csvfile: CSV file name containing model data :param bool required: If True then raise error if file does not exist :returns: Dictionary containing metric values - :rtype: dict - ''' + :rtype: dict. + """ metrics = {} if csvfile is not None: try: - inf = open(csvfile, 'rb') - except IOError as e: - if e.errno == errno.EACCES: + inf = open(csvfile, 'rt') + except IOError as ioerr: + if ioerr.errno == errno.EACCES: if required: pass # Raise Error else: pass # Raise Warning else: with inf: - reader = csv.reader( - CommentedFile(inf), delimiter=',', quotechar='"') - # TODO: Must be a better way of unpacking data that does not - # rely on testing number of elements on line + reader = csv.reader(inf, delimiter=',', quotechar='"') for row in reader: metric = row.pop(0) if len(row) == 1: @@ -247,8 +222,8 @@ def read_model_metrics(csvfile, required=False): def read_obs_metrics(csvfile, required=False): - ''' - Routine to read in observation metrics csv file + """ + Routine to read in observation metrics csv file. An unordered list of metrics with either 2 or 4 values. The first 2 values are the observation range and must exist for any entry. The second 2 value @@ -263,25 +238,22 @@ def read_obs_metrics(csvfile, required=False): :param bool required: If True then raise error if file does not exist :returns: A pair of metric dictionaries containing observational uncertainties and acceptable ranges - :rtype: tuple - ''' + :rtype: tuple. + """ obs = {} acc = {} if csvfile is not None: try: - inf = open(csvfile, 'rb') - except IOError as e: - if e.errno == errno.EACCES: + inf = open(csvfile, 'rt') + except IOError as ioerr: + if ioerr.errno == errno.EACCES: if required: pass # Raise Error else: pass # Raise Warning else: with inf: - reader = csv.reader( - CommentedFile(inf), delimiter=',', quotechar='"') - # TODO: Must be a better way of unpacking data that does not - # rely on testing number of elements on line + reader = csv.reader(inf, delimiter=',', quotechar='"') for row in reader: metric = row.pop(0) # Contrary to documentation, allowing a single entry when @@ -311,8 +283,8 @@ def read_obs_metrics(csvfile, required=False): def metric_colour(test, ref=1.0, var=None, obs=None, acc=None): - ''' - Routine to determine whether to colour metric as: + """ + Routine to determine whether to colour metric. GREEN = test within observational uncertainty or acceptable range AMBER = within model uncertainty, or better than reference but neither ref @@ -332,9 +304,8 @@ def metric_colour(test, ref=1.0, var=None, obs=None, acc=None): :param tuple obs: Observational uncertainty as (min, max) :param tuple acc: Acceptable range as (min, max) :returns: Colour to use in plot indicating performance of metric - :rtype: str - ''' - + :rtype: str. + """ # Default colour to NOOBS_GREY indicating no observational uncertainty colour = NOOBS_GREY @@ -392,8 +363,8 @@ def metric_colour(test, ref=1.0, var=None, obs=None, acc=None): def metric_colours(test, ref=None, var=None, obs=None, acc=None): - ''' - Routine to loop over metrics and generate list of colours + """ + Routine to loop over metrics and generate list of colours. :param dict test: Dictionary of test metrics :param dict ref: Dictionary of reference metrics @@ -401,9 +372,8 @@ def metric_colours(test, ref=None, var=None, obs=None, acc=None): :param dict obs: Dictionary of observation uncertainties as (min, max) :param dict acc: Dictionary of acceptable ranges as (min, max) :returns: Dictionary of colours for test metrics - :rtype: dict - ''' - + :rtype: dict. + """ # initialize if ref is None: ref = {} @@ -437,16 +407,15 @@ def metric_colours(test, ref=None, var=None, obs=None, acc=None): def normalise(test, ref, strict=False): - ''' - Routine to normalise contents of test by contents of ref + """ + Routine to normalise contents of test by contents of ref. :param dict test: Dictionary of test metrics :param dict ref: Dictionary of reference metrics :param bool strict: if True then test and ref must have same metrics :returns: Dictionary of normalised test metrics - :rtype: dict - ''' - + :rtype: dict. + """ if strict: # Test to make sure reference metrics dictionary contains the same # metrics as test metrics dictionary @@ -471,16 +440,15 @@ def normalise(test, ref, strict=False): def plot_std(ax, metrics, data, color=STD_GREY, zorder=0): - ''' - Plot model uncertainty as filled bars about nac=1 line + """ + Plot model uncertainty as filled bars about nac=1 line. :param axes ax: ``matplotlib.axes`` to plot data in :param list metrics: List of metrics to plot in order :param dict data: Metrics dictionary :param str color: Colour to plot bars - :param int zorder: Matplotlib plot layer - ''' - + :param int zorder: Matplotlib plot layer. + """ # Extract metric data and line up with requested metrics coord = [i + 1 for (i, metric) in enumerate(metrics) if metric in data] std = [data[metric] for metric in metrics if metric in data] @@ -502,16 +470,15 @@ def plot_std(ax, metrics, data, color=STD_GREY, zorder=0): def plot_obs(ax, metrics, data, color=OBS_GREY, zorder=1): - ''' - Plot obs range as error bars + """ + Plot obs range as error bars. :param axes ax: ``matplotlib.axes`` to plot data in :param list metrics: List of metrics to plot in order :param dict data: Metrics dictionary :param str color: Colour to plot error bars - :param int zorder: Matplotlib plot layer - ''' - + :param int zorder: Matplotlib plot layer. + """ # Extract metric data and line up with requested metrics coord = [i + 1 for (i, metric) in enumerate(metrics) if metric in data] obsmin = [data[metric][0] for metric in metrics if metric in data] @@ -538,17 +505,16 @@ def plot_obs(ax, metrics, data, color=OBS_GREY, zorder=1): def plot_metrics(ax, metrics, data, cols, marker, zorder=3): - ''' - Plot metrics using symbols + """ + Plot metrics using symbols. :param axes ax: ``matplotlib.axes`` to plot data in :param list metrics: List of metrics to plot in order :param dict data: Metrics dictionary :param dict cols: Metric colours dictionary :param str marker: Matplotlib symbol to use in plot - :param int zorder: Matplotlib plot layer - ''' - + :param int zorder: Matplotlib plot layer. + """ # Extract metric data and line up with requested metrics coord = [i + 1 for (i, metric) in enumerate(metrics) if metric in data] pdata = [data[metric] for metric in metrics if metric in data] @@ -571,19 +537,18 @@ def plot_metrics(ax, metrics, data, cols, marker, zorder=3): def plot_get_limits(tests, obs, acc, extend_y=False): - ''' - Determine data axis limits + """ + Determine data axis limits. :param list tests: Test experiment metrics dictionary list :param dict obs: Observational uncertainty metrics dictionary :param dict acc: Acceptable range metrics dictionary - :param bool extend_y: Extend y-axis to include obs/acc ranges - ''' - + :param bool extend_y: Extend y-axis to include obs/acc ranges. + """ # Calculate absmax/max/min for experiments minval = min([min(test.values()) for test in tests]) maxval = max([max(test.values()) for test in tests]) - maxabs = max([abs(x) for x in test.values() for test in tests]) + maxabs = max([np.abs(list(test.values()))[0] for test in tests]) # If want to extend beyond range of observations if extend_y: @@ -626,8 +591,8 @@ def plot_nac(cref, extend_y=False, title=None, ofile=None): - ''' - Routine to produce NAC plot + """ + Routine to produce NAC plot. :param str cref: Reference experiment name :param list ctests: Test experiment names list @@ -640,8 +605,7 @@ def plot_nac(cref, :param bool extend_y: Extend y-axis to include obs/acc ranges :param str title: Plot title :param str ofile: Plot file name - ''' - + """ # initialize if metrics is None: metrics = [] @@ -721,91 +685,6 @@ def plot_nac(cref, plt.savefig(ofile, bbox_extra_artists=(legend, ), bbox_inches='tight') else: # Need the following to attempt to display legend in frame - # TODO: Is there a better way of doing this? fig.subplots_adjust(right=0.85) plt.show() plt.close() - - -def parse_args(cli_args): - """ - Parse arguments in a function to facilitate testing. Contains all command - line options. - - :param list cli_args: Command line arguments from sys.argv. - :returns: Checked command line arguments. - :rtype: argparse.Namespace - """ - - # Parse command line arguments - parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter) - - parser.add_argument( - '--exp', - required=True, - help='Test experiment names (commma separated)') - parser.add_argument( - '--ref', required=True, help='Reference experiment name') - parser.add_argument( - '--file-exp', - required=True, - help='Experiment metric files (commma separated)') - parser.add_argument( - '--file-ref', required=True, help='Reference metric file') - parser.add_argument('--file-ord', default=None, help='Metric order file') - parser.add_argument( - '--file-var', default=None, help='Model uncertainty metric file') - parser.add_argument( - '--file-obs', default=None, help='Observations metric file') - parser.add_argument('--plot', default=None, help='Plot file to be created') - parser.add_argument('--title', default=None, help='Plot title') - parser.add_argument( - '--exty', - default=False, - action='store_true', - help='Extend y axis to include observation uncertainties') - - # Return parsed args - return parser.parse_args(cli_args) - - -def main(): - '''Creating plots from existing metrics files at the command line''' - - # Parse script arguments - args = parse_args(sys.argv[1:]) - if args.plot: - args.plot = os.path.abspath(args.plot) - - # Check size of experiment inputs - expt_files = args.file_exp.split(',') - expt_names = args.exp.split(',') - assert len(expt_files) == len(expt_names), \ - 'Number of experiments and experiment files must be the same' - - # Read metrics files - metrics = read_order_metrics(args.file_ord) - ref = read_model_metrics(args.file_ref) - tests = [read_model_metrics(expt_file) for expt_file in expt_files] - var = read_model_metrics(args.file_var) - (obs, acc) = read_obs_metrics(args.file_obs) - - # Produce plot - plot_nac( - args.ref, - expt_names, - ref, - tests, - metrics=metrics, - var=var, - obs=obs, - acc=acc, - extend_y=args.exty, - title=args.title, - ofile=args.plot) - - -if __name__ == '__main__': - main() diff --git a/esmvaltool/diag_scripts/autoassess/autoassess_source/rms.py b/esmvaltool/diag_scripts/autoassess/_rms_radiation.py similarity index 93% rename from esmvaltool/diag_scripts/autoassess/autoassess_source/rms.py rename to esmvaltool/diag_scripts/autoassess/_rms_radiation.py index df09867ae8..a96f06cb23 100644 --- a/esmvaltool/diag_scripts/autoassess/autoassess_source/rms.py +++ b/esmvaltool/diag_scripts/autoassess/_rms_radiation.py @@ -1,5 +1,5 @@ """ -Port for ESMValTool v2 from v1 +Port for ESMValTool v2 from v1. Uses: ESMValTool v2, Python 3.x Valeriu Predoi, UREAD, July 2018 @@ -17,27 +17,27 @@ import logging import numpy.ma as ma import iris -from .valmod_radiation import area_avg +from esmvaltool.diag_scripts.autoassess._valmod_radiation import area_avg logger = logging.getLogger(os.path.basename(__file__)) class RMSLISTCLASS(list): """ - Construct the regions class + Construct the regions class. - This is the class for a list of RMSCLASS (i.e. for lots of regions) + This is the class for a list of RMSCLASS (i.e. for lots of regions). """ def __init__(self, *args): - """Init""" + """Init.""" if not args: super(RMSLISTCLASS, self).__init__() else: super(RMSLISTCLASS, self).__init__(args[0]) def __repr__(self): - """Repr""" + """Repr.""" rms_out = "[" for rms_item in self: rms_out += "rms.RMSCLASS for " + rms_item.region + ", \n" @@ -47,7 +47,7 @@ def __repr__(self): return rms_out def __call__(self, region=False): - """Call""" + """Call.""" rms_found = False region_list = [] for rms_item in self: @@ -70,17 +70,17 @@ def __call__(self, region=False): # This is the class for one set of rms values (i.e. for one region) class RMSCLASS: - """Class per region""" + """Class per region.""" def __init__(self, region, exper='experiment', control='control'): """ - Create instances of this class but also start making + Create instances of this class but also start making. html files that will contain all the rms data. (old) region = the region name exper = experiment jobid - control = control jobid + control = control jobid. """ # Store the region name, experiment and control self.region = region @@ -130,12 +130,12 @@ def __init__(self, region, exper='experiment', control='control'): # Allow iterations over this def __iter__(self): - """Iter""" + """Iter.""" return self # This defines how this class is shown on the screen if you print it def __repr__(self): - """Repr""" + """Repr.""" rms_out = "rms.RMSCLASS for {0}".format(self.region) return rms_out @@ -229,7 +229,7 @@ def calc_wrapper(self, toplot_cube, mask_cube, page_title): return rms_float def tofile(self, csv_dir): - """Output all the RMS statistics to csv files""" + """Output all the RMS statistics to csv files.""" csv_file = 'summary_' + self.region + '_RMS_' + self.exper + '.csv' csv_path = os.path.join(csv_dir, csv_file) with open(csv_path, 'a') as out_file: @@ -242,10 +242,10 @@ def tofile(self, csv_dir): def start(exper='experiment', control='control'): """ - Make some instances of the rms class + Make some instances of the rms class. exper = experiment jobid (optional) - control = control jobid (optional) + control = control jobid (optional). """ # Loop over all regions. Regions are: # 0 = globe @@ -283,13 +283,13 @@ def start(exper='experiment', control='control'): def calc_all(rms_list, toplot_cube, mask_cube, page_title): """ - Loop through all the regions + Loop through all the regions. Calculate rms values and store them in the class. rms_list = list of rms classes that stores all the information to do with the rms regions and the resulting answers. toplot_cube = (cube) cube that is to be plotted - page_title = (str) the page title for this plot + page_title = (str) the page title for this plot. """ # Run through the loop, calculating rms values for each region rms_float_list = [] @@ -305,7 +305,7 @@ def calc_all(rms_list, toplot_cube, mask_cube, page_title): def end(rms_list, csv_dir): """ - Finish using the rms class + Finish using the rms class. rms_list = list of rms classes that stores all the information to do with the rms regions and the resulting answers. diff --git a/esmvaltool/diag_scripts/autoassess/autoassess_source/valmod_radiation.py b/esmvaltool/diag_scripts/autoassess/_valmod_radiation.py similarity index 94% rename from esmvaltool/diag_scripts/autoassess/autoassess_source/valmod_radiation.py rename to esmvaltool/diag_scripts/autoassess/_valmod_radiation.py index b635a873e6..dd1f330623 100644 --- a/esmvaltool/diag_scripts/autoassess/autoassess_source/valmod_radiation.py +++ b/esmvaltool/diag_scripts/autoassess/_valmod_radiation.py @@ -1,6 +1,6 @@ #! /usr/local/sci/bin/python """ -Port for ESMValTool v2 from v1 +Port for ESMValTool v2 from v1. Uses: ESMValTool v2, Python 3.x Valeriu Predoi, UREAD, July 2018 @@ -15,7 +15,7 @@ def get_cube_ready(cube): - """Remve unwanted coords and check bounds""" + """Remve unwanted coords and check bounds.""" to_remove_list = [ 'forecast_reference_time', 'forecast_period', 'source', 'season', 'time' @@ -33,7 +33,7 @@ def get_cube_ready(cube): def area_avg(cube, coord1=None, coord2=None): """ - Get area average + Get area average. Perform an area average of a cube using weights to account for changes in latitude. @@ -50,7 +50,7 @@ def area_avg(cube, coord1=None, coord2=None): def perform_equation(dataset_1, dataset_2, analysis_type): """ - Perform a simple cube operation + Perform a simple cube operation. analysis_type = type of analysis (zonal_mean, vertical_mean,...) This can be easily adapted for more than one type of operation diff --git a/esmvaltool/diag_scripts/autoassess/autoassess_area_base.py b/esmvaltool/diag_scripts/autoassess/autoassess_area_base.py new file mode 100644 index 0000000000..d3e50b0c55 --- /dev/null +++ b/esmvaltool/diag_scripts/autoassess/autoassess_area_base.py @@ -0,0 +1,403 @@ +""" +Base autoassess area metrics diagnostic. + +Wrapper that takes two datasets (control_model and exp_model +and observational data (optionally); base for all area diags for +autoassess metrics. Current areas: + +version.September.2018 +----------------------- +monsoon -- not yet implemented +stratosphere -- implemented +hydrocycle -- not yet implemented +conservation -- implemented +globaltrop -- not yet implemented +land_surface_surfrad -- implemented +land_surface_snow -- implemented +land_surface_soilmoisture -- implemented +land_surface_permafrost -- implemented +custom -- not yet implemented + +Author: Valeriu Predoi, UREAD (valeriu.predoi@ncas.ac.uk) +First version: September 2018. +""" +import os +import datetime +import logging +import importlib +import csv +import tempfile +import iris +from esmvaltool.diag_scripts.shared import run_diagnostic + +logger = logging.getLogger(__name__) + + +def _import_package(area): + """Import the right area package.""" + root_import = 'esmvaltool.diag_scripts.autoassess.' + available_areas = [ + 'monsoon', 'stratosphere', 'hydrocycle', 'conservation', 'globaltrop', + 'land_surface_surfrad', 'land_surface_snow', + 'land_surface_soilmoisture', 'land_surface_permafrost' + ] + if area in available_areas: + module = root_import + area + area_package = importlib.import_module(module) + return area_package + else: + raise Exception('Unknown area: ' + area) + + +def _fix_cube(cube_list): + """Apply some ad hoc fixes to cubes.""" + # force add a long_name; supermeans uses extract_strict + # and for derived vars there is only + # invalid_standard_name which is an attribute + for cube in cube_list: + if 'invalid_standard_name' in cube.attributes: + cube.long_name = cube.attributes['invalid_standard_name'] + coord_names = [coord.standard_name for coord in cube.coords()] + if 'time' in coord_names: + if not cube.coord('time').has_bounds(): + cube.coord('time').guess_bounds() + return cube_list + + +def _make_tmp_dir(cfg): + """Make the tmp and ancil dirs.""" + tmp_dir = os.path.join(cfg['work_dir'], 'tmp') + ancil_dir = os.path.join(cfg['work_dir'], 'ancil') + if not os.path.exists(tmp_dir): + os.makedirs(tmp_dir) + if not os.path.exists(ancil_dir): + os.makedirs(ancil_dir) + return tmp_dir, ancil_dir + + +def _make_main_dirs(cfg): + """Create main dirs to hold analysis.""" + locations = {} # locations for control, exp and any addional metrics + suite_loc_m1 = os.path.join(cfg['work_dir'], cfg['control_model']) + if not os.path.exists(suite_loc_m1): + os.makedirs(suite_loc_m1) + locations['control_model'] = suite_loc_m1 + suite_loc_m2 = os.path.join(cfg['work_dir'], cfg['exp_model']) + if not os.path.exists(suite_loc_m2): + os.makedirs(suite_loc_m2) + locations['exp_model'] = suite_loc_m2 + if 'additional_metrics' in cfg: + if cfg['additional_metrics']: + for add_model in cfg['additional_metrics']: + suite_loc_add = os.path.join(cfg['work_dir'], add_model) + if not os.path.exists(suite_loc_add): + os.makedirs(suite_loc_add) + locations[add_model] = suite_loc_add + obs_loc = os.path.join(cfg['work_dir'], 'OBS') + if not os.path.exists(obs_loc): + os.makedirs(obs_loc) + + return locations, obs_loc + + +def _make_concatenated_data_dirs(suite_locs, area): + """Create dirs to hold cubeList files.""" + suites_locations = {} + supermeans_locations = {} + for suite_dir in suite_locs: + suite_data = os.path.join(suite_locs[suite_dir], area) + if not os.path.exists(suite_data): + os.makedirs(suite_data) + suites_locations[suite_dir] = suite_data + + # create supermeans directory: [area]_supermeans + sup_data = os.path.join(suite_locs[suite_dir], area + '_supermeans') + if not os.path.exists(sup_data): + os.makedirs(sup_data) + supermeans_locations[suite_dir] = sup_data + + return suites_locations, supermeans_locations + + +def _setup_data_dict(cfg): + """Set a dictionary to hold data.""" + metrics_dict = {} # dict keyed on daatasets for metrics + metrics_dict['control_model'] = [] + metrics_dict['exp_model'] = [] + + # set the additional_metrics parameter + additional_metrics = False + if 'additional_metrics' in cfg: + if cfg['additional_metrics']: + additional_metrics = True + for add_mod in cfg['additional_metrics']: + metrics_dict[add_mod] = [] + + # set obs's + obs_types = None + if 'obs_models' in cfg: + if cfg['obs_models']: + obs_types = cfg['obs_models'] + + return metrics_dict, additional_metrics, obs_types + + +def _get_filelists(cfg): + """Put files in dict(lists) and return them.""" + metrics_dict, additional_metrics, obs_types = _setup_data_dict(cfg) + obs_list = [] + + for filename, attributes in cfg['input_data'].items(): + base_file = os.path.basename(filename) + fullpath_file = filename + if base_file.split('_')[1] == cfg['control_model']: + metrics_dict['control_model'].append(fullpath_file) + if 'fx_files' in attributes: + for fx_file in cfg['fx']: + metrics_dict['control_model'].append( + attributes['fx_files'][fx_file]) + if base_file.split('_')[1] == cfg['exp_model']: + metrics_dict['exp_model'].append(fullpath_file) + if 'fx_files' in attributes: + for fx_file in cfg['fx']: + metrics_dict['exp_model'].append( + attributes['fx_files'][fx_file]) + if additional_metrics and base_file.split( + '_')[1] in cfg['additional_metrics']: + metrics_dict[base_file.split('_')[1]].append(fullpath_file) + if 'fx_files' in attributes: + for fx_file in cfg['fx']: + metrics_dict[base_file.split('_')[1]].append( + attributes['fx_files'][fx_file]) + if obs_types and base_file.split('_')[1] in obs_types: + obs_list.append(fullpath_file) + + return metrics_dict, obs_list + + +def _process_obs(cfg, obs_list, obs_loc): + """Gather obs files and save them applying specific cases.""" + group_files = [[ + ofile for ofile in obs_list + if os.path.basename(ofile).split('_')[1] == obs + ] for obs in cfg['obs_models']] + for obs_file_group, obs_name in zip(group_files, cfg['obs_models']): + cubes_list_obs = iris.load(obs_file_group) + cubes_list_obs = _fix_cube(cubes_list_obs) + obs_file_name = obs_name + '_cubeList.nc' + iris.save(cubes_list_obs, os.path.join(obs_loc, obs_file_name)) + + +def _process_metrics_data(all_files, suites, smeans): + """Create and save concatenated cubes for ctrl and exp.""" + cubes_lists_paths = [] + for key in all_files.keys(): + filelist = all_files[key] + if filelist: + cubelist = iris.load(filelist) + + # save to congragated files; save twice for supermeans as well + cubes_list_path = os.path.join(suites[key], 'cubeList.nc') + cubes_list_smean_path = os.path.join(smeans[key], 'cubeList.nc') + cubelist = _fix_cube(cubelist) + iris.save(cubelist, cubes_list_path) + iris.save(cubelist, cubes_list_smean_path) + cubes_lists_paths.append(cubes_list_path) + + return cubes_lists_paths + + +def create_output_tree(out_dir, ref_suite_id, exp_suite_id, area): + """ + Create directory tree for area output according to the following scheme. + + `out_dir`/`exp_suite_id`_vs_`ref_suite_id`/`area` + + If the leaf directory `area` exists raises OSError. + + :param str out_dir: Base directory for output. + :param str suite_id1: Suite Id of reference model run. + :param str suite_id2: Suite Id of test model run. + :param str area: Name of asssessment area. + :returns: Path to area output directory. + :rtype: str + :raises: OSError. + """ + assessment_name = exp_suite_id + '_vs_' + ref_suite_id + # make sure out_dir exists in output folder + _out_dir = os.path.join(out_dir, assessment_name) + if not os.path.exists(out_dir): + os.makedirs(out_dir) + + # create output folder for area + area_out_dir = os.path.join(_out_dir, area) + if not os.path.exists(area_out_dir): + os.makedirs(area_out_dir) + return area_out_dir + + +def create_tmp_dir(tmp_dir, ref_suite_id, exp_suite_id, area): + """ + Create directory tree for temporary data according to the following scheme. + + `tmp_dir`/`exp_suite_id`_vs_`ref_suite_id`_random/`area`_random + + :param str tmp_dir: Base temporary directory. + :param str suite_id1: Suite ID of reference model run. + :param str suite_id2: Suite ID of test model run. + :param str area: Name of asssessment area. + :returns: Path to area temporary directory. + :rtype: str. + """ + assessment_name = exp_suite_id + '_vs_' + ref_suite_id + # create unique temporary folder in tmp dir + _tmp_dir = tempfile.mkdtemp(prefix=assessment_name + '_', dir=tmp_dir) + + # create temporary folder for area + area_tmp_dir = tempfile.mkdtemp(prefix=area + '_', dir=_tmp_dir) + return area_tmp_dir + + +def _setup_input(cfg): + """Assemble all data structures.""" + logger.setLevel(cfg['log_level'].upper()) + + # set the main data dirs; + target_locs, obs_loc = _make_main_dirs(cfg) + suites, smeans = _make_concatenated_data_dirs(target_locs, cfg['area']) + + # create the ancil and tp dirs + tmp_dir, ancil_dir = _make_tmp_dir(cfg) + + # get files lists + metrics_dict, obs_list = _get_filelists(cfg) + + # spell out the files used + logger.info("Files for control model for metrics: %s", + metrics_dict['control_model']) + logger.info("Files for exp model for metrics: %s", + metrics_dict['exp_model']) + logger.info("Files for ALL metrics: %s", metrics_dict) + logger.info("Files for obs model NOT for metrics: %s", obs_list) + + # load and save control and exp cubelists + all_cubelists = _process_metrics_data(metrics_dict, suites, smeans) + + # print the paths + logger.info("Saved control data cubes: %s", str(all_cubelists)) + + # separately process the obs's that dont need metrics + if cfg['obs_models']: + _process_obs(cfg, obs_list, obs_loc) + + return tmp_dir, obs_loc, ancil_dir + + +def _create_run_dict(cfg): + """Create the run dictionary.""" + tmp_dir, obs_loc, ancil_dir = _setup_input(cfg) + run = {} + # general parameters (necessary) + run['suite_id1'] = cfg['control_model'] + run['suite_id2'] = cfg['exp_model'] + run['out_dir'] = cfg['plot_dir'] + run['tmp_dir'] = tmp_dir + run['_area'] = cfg['area'] + run['_start_date'] = cfg['start'] + run['_end_date'] = cfg['end'] + run['runid'] = cfg['area'] + run['data_root'] = cfg['work_dir'] + run['clim_root'] = obs_loc + run['ancil_root'] = ancil_dir + run['start'] = cfg['start'] + run['end'] = cfg['end'] + + # optional parameters + if 'climfiles_root' in cfg: + run['climfiles_root'] = cfg['climfiles_root'] + if 'additional_metrics' in cfg: + run['additional_metrics'] = cfg['additional_metrics'] + + # specific parameters needed by some areas + start_year = int(run['start'][0:4]) + end_year = int(run['end'][0:4]) + run['nyear'] = end_year - start_year + run['period'] = '{:04d}_{:03d}'.format(start_year, run['nyear']) + year, month, day = [int(s) for s in run['start'].split('/')] + run['from_instantaneous'] = datetime.datetime(year, month, day) + run['from_daily'] = datetime.datetime(year, month, day) + run['from_monthly'] = datetime.datetime(year, month, day) + run['from_seasonal'] = datetime.datetime(year, month, day) + run['from_annual'] = datetime.datetime(year, month, day) + + year, month, day = [int(s) for s in run['end'].split('/')] + run['to_instantaneous'] = datetime.datetime(year, 11, 30) + run['to_daily'] = datetime.datetime(year, 11, 30) + run['to_monthly'] = datetime.datetime(year, 11, 1) + run['to_seasonal'] = datetime.datetime(year, 9, 1) + run['to_annual'] = datetime.datetime(year - 1, 12, 1) + + return run + + +def run_area(cfg): + """Kick start the area diagnostic.""" + run_obj = _create_run_dict(cfg) + area_out_dir = create_output_tree(run_obj['out_dir'], run_obj['suite_id1'], + run_obj['suite_id2'], run_obj['_area']) + + # the areas write all output to the cwd + os.chdir(area_out_dir) + + # import area here to allow removal of areas + area_package = _import_package(run_obj['_area']) + + # assemble the work subjects + suite_ids = [run_obj['suite_id1'], run_obj['suite_id2']] + if 'additional_metrics' in cfg: + if run_obj['additional_metrics']: + suite_ids.extend(run_obj['additional_metrics']) + + # run the metrics generation + for suite_id in suite_ids: + logger.info('Calculating metrics for %s', suite_id) + # setup for file dumping + run_obj['runid'] = suite_id + run_obj['dump_output'] = os.path.join(area_out_dir, suite_id) + if not os.path.exists(run_obj['dump_output']): + os.makedirs(run_obj['dump_output']) + all_metrics = {} + + # run each metric function + for metric_function in area_package.metrics_functions: + logger.info('# Call: %s', metric_function) + + # run the metric + metrics = metric_function(run_obj) + # check duplication + duplicate_metrics = list( + set(all_metrics.keys()) & set(metrics.keys())) + if duplicate_metrics: + raise AssertionError('Duplicate Metrics ' + + str(duplicate_metrics)) + all_metrics.update(metrics) + + # write metrics to file + with open(os.path.join(run_obj['dump_output'], 'metrics.csv'), + 'w') as file_handle: + writer = csv.writer(file_handle) + for metric in all_metrics.items(): + writer.writerow(metric) + + # multimodel functions + if hasattr(area_package, 'multi_functions'): + for multi_function in area_package.multi_functions: + multi_function(run_obj) + else: + logger.info('# Area has no multi functions.') + + +if __name__ == '__main__': + + with run_diagnostic() as config: + run_area(config) diff --git a/esmvaltool/diag_scripts/autoassess/autoassess_radiation_rms.py b/esmvaltool/diag_scripts/autoassess/autoassess_radiation_rms.py index a3ba6aab7b..5687518dd7 100644 --- a/esmvaltool/diag_scripts/autoassess/autoassess_radiation_rms.py +++ b/esmvaltool/diag_scripts/autoassess/autoassess_radiation_rms.py @@ -1,5 +1,5 @@ """ -Port to Version 2 with implementation of v2-specific changes +Port to Version 2 with implementation of v2-specific changes. Uses: ESMValTool v2, Python3.x Valeriu Predoi, UREAD, July 2018 @@ -9,84 +9,48 @@ Original Description from Version 1 Diagnostic: ;;########################################################################### ;; AutoAssess_radiation_rms.py -;; Author: Yoko Tsushima (Met Office, UK) -;; CMUG project ;;########################################################################### ;; Description ;; This script is the RMS error metric script of ;; AutoAssess radiation -;; -;; -;; Modification history -;; 20180712- autoassess_radiation_rms: porting to v2 -;; 20170323-_AutoAssess_radiation_rms: Test finished. -;; 20160819-_test_AutoAssess_radiation_rms: written based on calc_rms code. -;; ;; ########################################################################### + +This diagnostic uses CMIP5 data; to switch to CMIP6 change _CMIP_TYPE. """ import os import logging import iris -import autoassess_source.rms as rms -import autoassess_source.valmod_radiation as vm -from esmvaltool.diag_scripts.shared import (group_metadata, run_diagnostic, - select_metadata) +from esmvaltool.diag_scripts.autoassess._rms_radiation import (start, end, + calc_all) +from esmvaltool.diag_scripts.autoassess._valmod_radiation import ( + perform_equation) +from esmvaltool.diag_scripts.shared import ( + group_metadata, run_diagnostic, get_control_exper_obs, apply_supermeans) logger = logging.getLogger(os.path.basename(__file__)) - -def apply_supermeans(ctrl, exper, obs_list): - """Apply supermeans on data components""" - ctrl_file = ctrl['filename'] - exper_file = exper['filename'] - logger.info("Loading %s", ctrl_file) - logger.info("Loading %s", exper_file) - ctrl_cube = iris.load_cube(ctrl_file) - exper_cube = iris.load_cube(exper_file) - ctrl_cube = ctrl_cube.collapsed('time', iris.analysis.MEAN) - logger.debug("Time-averaged control %s", ctrl_cube) - exper_cube = exper_cube.collapsed('time', iris.analysis.MEAN) - logger.debug("Time-averaged experiment %s", exper_cube) - if obs_list: - obs_cube_list = [] - for obs in obs_list: - obs_file = obs['filename'] - logger.info("Loading %s", obs_file) - obs_cube = iris.load_cube(obs_file) - obs_cube = obs_cube.collapsed('time', iris.analysis.MEAN) - logger.debug("Time-averaged obs %s", obs_cube) - obs_cube_list.append(obs_cube) - else: - obs_cube_list = None - - return ctrl_cube, exper_cube, obs_cube_list +_CMIP_TYPE = 'CMIP5' def apply_rms(data_1, data_2, cfg, component_dict, var_name): - """Compute RMS for any data1-2 combination""" + """Compute RMS for any data1-2 combination.""" data_names = [model['dataset'] for model in component_dict.values()] plot_title = var_name + ': ' + data_names[0] + ' vs ' + data_names[1] - rms_list = rms.start(data_names[0], data_names[1]) + rms_list = start(data_names[0], data_names[1]) analysis_type = cfg['analysis_type'] landsea_mask_file = os.path.join( os.path.dirname(__file__), 'autoassess_source', cfg['landsea_mask']) landsea_mask_cube = iris.load_cube(landsea_mask_file) - data1_vs_data2 = vm.perform_equation(data_1, data_2, analysis_type) + data1_vs_data2 = perform_equation(data_1, data_2, analysis_type) # call to rms.calc_all() to compute rms; rms.end() to write results - rms.calc_all(rms_list, data1_vs_data2, landsea_mask_cube, plot_title) - rms.end(rms_list, cfg['work_dir']) + calc_all(rms_list, data1_vs_data2, landsea_mask_cube, plot_title) + end(rms_list, cfg['work_dir']) def do_preamble(cfg): - """Execute some preamble functionality""" - # prepare output dirs - if not os.path.exists(cfg['plot_dir']): - os.makedirs(cfg['plot_dir']) - if not os.path.exists(cfg['work_dir']): - os.makedirs(cfg['work_dir']) - + """Execute some preamble functionality.""" # get data input_data = cfg['input_data'].values() grouped_input_data = group_metadata( @@ -95,39 +59,8 @@ def do_preamble(cfg): return input_data, grouped_input_data -def get_all_datasets(short_name, input_data, cfg): - """Get control, exper and obs datasets""" - dataset_selection = select_metadata( - input_data, short_name=short_name, project='CMIP5') - - # get the obs datasets - if 'observational_datasets' in cfg.keys(): - obs_selection = [ - select_metadata(input_data, short_name=short_name, - dataset=obs_dataset)[0] - for obs_dataset in cfg['observational_datasets'] - ] - else: - obs_selection = [] - - # determine CONTROL and EXPERIMENT datasets - for model in dataset_selection: - if model['dataset'] == cfg['control_model']: - logger.info("Control dataset %s", model['dataset']) - control = model - elif model['dataset'] == cfg['exper_model']: - logger.info("Experiment dataset %s", model['dataset']) - experiment = model - - if obs_selection: - logger.info("Observations dataset(s) %s", - [obs['dataset'] for obs in obs_selection]) - - return control, experiment, obs_selection - - def main(cfg): - """Execute the radiation rms diag""" + """Execute the radiation rms diag.""" logger.setLevel(cfg['log_level'].upper()) input_data, grouped_input_data = do_preamble(cfg) @@ -137,7 +70,8 @@ def main(cfg): logger.info("Processing variable %s", short_name) # control, experiment and obs's - ctrl, exper, obslist = get_all_datasets(short_name, input_data, cfg) + ctrl, exper, obslist = get_control_exper_obs(short_name, input_data, + cfg, _CMIP_TYPE) # apply the supermeans ctrl_sm, exper_sm, obs_sm_list = apply_supermeans(ctrl, exper, obslist) diff --git a/esmvaltool/diag_scripts/autoassess/autoassess_source/__init__.py b/esmvaltool/diag_scripts/autoassess/autoassess_source/__init__.py deleted file mode 100644 index 40d033a799..0000000000 --- a/esmvaltool/diag_scripts/autoassess/autoassess_source/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Initialize for autoassess_source""" diff --git a/esmvaltool/diag_scripts/autoassess/autoassess_source/area_utils.py b/esmvaltool/diag_scripts/autoassess/autoassess_source/area_utils.py deleted file mode 100644 index 25e4d7bfa5..0000000000 --- a/esmvaltool/diag_scripts/autoassess/autoassess_source/area_utils.py +++ /dev/null @@ -1,109 +0,0 @@ -''' -Module to hold functions useful to area assessments -''' - -import numpy as np -import numpy.ma as ma - -import iris -import iris.analysis.cartography as iac - -from . import iris_updates as newiris - - -def area_average(cube, - weighted=True, - mask=None, - logicmask=False, - coords=None, - aggregator=iris.analysis.MEAN, - **aggkeys): - ''' - Routine to calculate weighted horizontal area aggregations - - Routine defaults to longitude and latitude, but can be configured to - collapse over any coordinate in the cube - - Inputs: - - cube = cube to aggregate - - Keywords: - - weighted = perform area weighted aggregation (default: True) - mask = cube containing mask data (default: None) - logicmask = Does mask contain logical data (default: False) - aggregator = aggregator for collapsed method (default: iris.analysis.MEAN) - coords = list of coordinates to collapse cube over - (default: ["latitude", "longitude"]) - "coord" = (coord_min, coord_max) - range of coordinate to collapse over - **kwargs = any keywords required for the aggregator - - Return: - - aggregated cube - - ''' - if coords is None: - coords = ['latitude', 'longitude'] - - # Make sure that aggregator is an Aggregator instance - assert isinstance(aggregator, iris.analysis.Aggregator) - # If doing weighted aggregation make sure that aggregator - # is a WeightAggregator instance - if weighted: - assert isinstance(aggregator, iris.analysis.WeightedAggregator) - - # Extract region specification if available - intkeys = {} - for coord in coords: - if coord in aggkeys: - intkeys[coord] = aggkeys.pop(coord) - - # Extract region if required - if intkeys: - newcube = cube.intersection(ignore_bounds=True, **intkeys) - # For some reason cube.intersection() promotes dtype of coordinate - # arrays to float64, whereas cube.extract() doesn't. Need to make - # sure behaviour is identical. - for coord in intkeys.keys(): - newcube.coord(coord).points = \ - newcube.coord(coord).points.astype(np.float32, copy=False) - else: - newcube = cube.copy() - - # If doing area-weighted aggregation then calculate area weights - if weighted: - # Coords need bounding - for coord in coords: - if not newcube.coord(coord).has_bounds(): - # Test to make sure latitude bounds do not wrap over pole. - if coord in ['latitude', 'grid_latitude']: - newiris.guess_bounds( - newcube.coord(coord), bound_min=-90., bound_max=90.) - else: - newcube.coord(coord).guess_bounds() - aggkeys['weights'] = iac.area_weights(newcube) - - # Apply mask - if mask: - # Extract region of mask to match data - if intkeys: - newmask = mask.intersection(ignore_bounds=True, **intkeys) - else: - newmask = mask.copy() - # Apply mask to weights if they exist, else apply to data - # Do I really need two methods here? - if 'weights' in aggkeys: - if logicmask: - aggkeys['weights'] = ma.array( - data=aggkeys['weights'], mask=newmask.data) - else: - aggkeys['weights'] *= newmask.data - else: - if logicmask: - newcube.data = ma.array(data=newcube.data, mask=newmask.data) - else: - newcube.data *= newmask.data - - return newcube.collapsed(coords, aggregator, **aggkeys) diff --git a/esmvaltool/diag_scripts/autoassess/autoassess_source/iris_updates.py b/esmvaltool/diag_scripts/autoassess/autoassess_source/iris_updates.py deleted file mode 100644 index 4e5d4f6809..0000000000 --- a/esmvaltool/diag_scripts/autoassess/autoassess_source/iris_updates.py +++ /dev/null @@ -1,123 +0,0 @@ -''' -Use this module to store code that should be added to the main Iris code -base. - -This should be copies of iris routines with appropriate changes. -''' - -import numpy as np - -import iris.exceptions - - -def _guess_bounds(coord, bound_position=0.5, bound_min=None, bound_max=None): - """ - A copy of the iris.coords._guess_bounds() method, but applying physical - limits to coordinates. - - Return bounds for this coordinate based on its points. - - Kwargs: - - * bound_position - The desired position of the bounds relative to the - position of the points. - - * bound_min - A bound minimum beyond which a bound cannot be extrapolated - - * bound_max - A bound maximum beyond which a bound cannot be extrapolated - - Returns: - A numpy array of shape (len(coord.points), 2). - - .. note:: - - This method only works for coordinates with ``coord.ndim == 1``. - - """ - # XXX Consider moving into DimCoord - # ensure we have monotonic points - if not coord.is_monotonic(): - raise ValueError("Need monotonic points to generate bounds for %s" - % coord.name()) - - if coord.ndim != 1: - raise iris.exceptions.CoordinateMultiDimError(coord) - - if coord.shape[0] < 2: - raise ValueError('Cannot guess bounds for a coordinate of length ' - '1.') - - if coord.bounds is not None: - raise ValueError('Coord already has bounds. Remove the bounds ' - 'before guessing new ones.') - - if getattr(coord, 'circular', False): - points = np.empty(coord.points.shape[0] + 2) - points[1:-1] = coord.points - direction = 1 if coord.points[-1] > coord.points[0] else -1 - points[0] = coord.points[-1] - (coord.units.modulus * direction) - points[-1] = coord.points[0] + (coord.units.modulus * direction) - diffs = np.diff(points) - else: - diffs = np.diff(coord.points) - diffs = np.insert(diffs, 0, diffs[0]) - diffs = np.append(diffs, diffs[-1]) - - min_bounds = coord.points - diffs[:-1] * bound_position - max_bounds = coord.points + diffs[1:] * (1 - bound_position) - - # Apply given minimum bound - # Using explicit test for bound_min as bound_min=0.0 fails test - if bound_min is not None: - min_bounds = np.maximum(min_bounds, bound_min) - max_bounds = np.maximum(max_bounds, bound_min) - - # Apply given maximum bound - # Using explicit test for bound_max as bound_max=0.0 fails test - if bound_max is not None: - min_bounds = np.minimum(min_bounds, bound_max) - max_bounds = np.minimum(max_bounds, bound_max) - - bounds = np.array([min_bounds, max_bounds]).transpose() - - return bounds - - -def guess_bounds(coord, bound_position=0.5, bound_min=None, bound_max=None): - ''' - A copy of the iris.coords.guess_bounds() method, but applying physical - limits to coordinates. - - Add contiguous bounds to a coordinate, calculated from its points. - - Puts a cell boundary at the specified fraction between each point and - the next, plus extrapolated lowermost and uppermost bound points, so - that each point lies within a cell. - - With regularly spaced points, the resulting bounds will also be - regular, and all points lie at the same position within their cell. - With irregular points, the first and last cells are given the same - widths as the ones next to them. - - Kwargs: - - * bound_position - The desired position of the bounds relative to the - position of the points. - - * bound_min - A bound minimum beyond which a bound cannot be extrapolated - - * bound_max - A bound maximum beyond which a bound cannot be extrapolated - - .. note:: - - An error is raised if the coordinate already has bounds, is not - one-dimensional, or is not monotonic. - - .. note:: - - Unevenly spaced values, such from a wrapped longitude range, can - produce unexpected results : In such cases you should assign - suitable values directly to the bounds property, instead. - - ''' - coord.bounds = _guess_bounds(coord, bound_position, bound_min, bound_max) diff --git a/esmvaltool/diag_scripts/autoassess/autoassess_source/run_area.py b/esmvaltool/diag_scripts/autoassess/autoassess_source/run_area.py deleted file mode 100755 index 041e62bd1e..0000000000 --- a/esmvaltool/diag_scripts/autoassess/autoassess_source/run_area.py +++ /dev/null @@ -1,331 +0,0 @@ -""" -(C) Crown Copyright 2017, the Met Office - -Wrapper for existing assessment areas of AutoAssess. - -Replicates some behaviours of the old version of AutoAssess -which are all deprecated. -All lot of this data is not required for data loading anymore, -but just to not break -the assessment area code. - -""" - -import sys -import os -import os.path -import argparse -import csv -import datetime -from pprint import pprint -import re -import tempfile - -# use Agg backend for non-interactive runs; this is propagated to all funcs -# called from this module -import matplotlib -matplotlib.use('Agg') - - -def create_dir(path): - """ - Make a dir - """ - try: - os.makedirs(path) - except OSError: - if os.path.isdir(path): - pass - else: - raise - - -def create_run_object(args, area, suite_id): - """ - Create run object containing all necessary information for Ass - Areas of the previous version of AutoAssess. - Use only information provided through command line options. - - # Private entries - run['_area'] - assessment area name - run['_start_date'] - start date - run['_end_date'] - end date - - run['runid'] - name of the run (5-char UM runid,suite ID) - run['data_root'] - dir with data to be assessed - run['clim_root'] - dir for obs and climatologies - run['ancil_root'] - directory for ancillary files - run['nyear'] - length of the assessment period as full years - run['start'] - start year - - run['from_monthly'] - Date ranges for different mean periods. - run['to_monthly'] Climatologic years: - run['from_daily'] all from_* start on 01/12/start_year-1 - run['to_daily'] The to_* date will always be: - run['from_annual'] 1st date + nyears - mean period length - run['to_annual'] daily: 30/11/XX, monthly: 01/11/XX, - seasonal: 01/09/XX - run['from_seasonal'] annual: 01/12/XX-1 - run['to_seasonal'] - - # only use by Stratosphere area - run.id - suite ID - run.title - '# TITLE #' - - :param argparse.Namespace args: Command line arguments. - :param str area: Name of assessment area. - :param str suite_id: Model run suite ID. - :returns: Run dictionary. - :rtype: Dictionary with attributes - """ - - class run(dict): - pass - - run = run() - # added private entries required for replicating data retrieval with the - # previous API (loaddata) - run['_area'] = area - run['_start_date'] = args.start_date - run['_end_date'] = args.end_date - - run.title = '# TITLE #' # stratosphere - run['runid'] = suite_id - run.id = run['runid'] # stratosphere - run['data_root'] = args.data_dir - run['clim_root'] = args.obs_dir - run['ancil_root'] = args.ancil_dir - - start_year = int(args.start_date[0:4]) - end_year = int(args.end_date[0:4]) - run['start'] = start_year - run['nyear'] = end_year - start_year - run.period = '{:04d}_{:03d}'.format(run['start'], - run['nyear']) # stratosphere uses this - - year, month, day = map(int, args.start_date.split('/')) - run['from_instantaneous'] = datetime.datetime(year, month, day) - run['from_daily'] = datetime.datetime(year, month, day) - run['from_monthly'] = datetime.datetime(year, month, day) - run['from_seasonal'] = datetime.datetime(year, month, day) - run['from_annual'] = datetime.datetime(year, month, day) - - year, month, day = map(int, args.end_date.split('/')) - assert month == 12 and day == 1 # Climatological year - run['to_instantaneous'] = datetime.datetime(year, 11, 30) - run['to_daily'] = datetime.datetime(year, 11, 30) - run['to_monthly'] = datetime.datetime(year, 11, 1) - run['to_seasonal'] = datetime.datetime(year, 9, 1) - run['to_annual'] = datetime.datetime(year - 1, 12, 1) - return run - - -def parse_args(args): - """ - Parse arguments in a function to facilitate testing. Contains all command - line options. - - :param list args: Command line arguments from sys.argv. - :returns: Checked command line arguments. - :rtype: argparse.Namespace - """ - parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter) - parser.add_argument( - '--area', required=True, help='Name of assessment area.') - parser.add_argument( - '--suite-id1', - required=True, - help='The name of a Rose suite, something like: u-ab123') - parser.add_argument( - '--suite-id2', - required=True, - help='The name of a Rose suite, something like: u-ab123') - parser.add_argument( - '--start-date', - required=True, - help=('Start of assessment period. Must be the beginning ' - 'of a climatologic year.' - 'Format: YYYY/12/01')) - parser.add_argument( - '--end-date', - required=True, - help=('End of assessment period. Beginning of the ' - 'first climatologic year after the end of the ' - 'assessment period.' - 'Format: YYYY/12/01')) - parser.add_argument( - '--obs-dir', - required=True, - help='Directory with Observations and Climatologies.') - parser.add_argument( - '--ancil-dir', required=True, help='Directory with Ancillary files.') - parser.add_argument( - '--out-dir', required=True, help='Write results into this directory.') - parser.add_argument( - '--data-dir', required=True, help='Directory tree with climate data.') - parser.add_argument( - '--tmp-dir', - default='tmp', - help='Place for temporary files. Default is $TMPDIR.') - args = parser.parse_args(args) - - regex = '^[a-zA-Z_]+$' - assert re.match(regex, - args.area), regex + ' does not match ' + str(args.area) - - regex = '^[a-z0-9-]+$' - - # at least ONE year - year, month, day = map(int, args.start_date.split('/')) - start = datetime.date(year, month, day) - year, month, day = map(int, args.end_date.split('/')) - end = datetime.date(year, month, day) - assert end.year - start.year >= 1 and \ - end.month >= start.month and \ - end.day >= start.day, \ - 'Assessment requires at least two years of data.' - # As collapsing over a single year/season/month fails. - # Cubes also loose the DimCoord 'time' if only one time point is left. - - # climatologic years - regex = '^[0-9]{4}/12/01$' - assert re.match( - regex, - args.start_date), regex + ' does not match ' + str(args.start_date) - assert re.match( - regex, args.end_date), regex + ' does not match ' + str(args.end_date) - assert args.start_date < args.end_date, 'Start must be before end.' - regex = '^(/[a-zA-Z0-9_-]*)+$' - assert re.match( - regex, args.obs_dir), regex + ' does not match ' + str(args.obs_dir) - assert re.match( - regex, - args.ancil_dir), regex + ' does not match ' + str(args.ancil_dir) - assert re.match( - regex, args.out_dir), regex + ' does not match ' + str(args.out_dir) - assert re.match( - regex, args.data_dir), regex + ' does not match ' + str(args.data_dir) - assert re.match( - regex, args.tmp_dir), regex + ' does not match ' + str(args.tmp_dir) - - return args - - -def create_output_tree(out_dir, ref_suite_id, exp_suite_id, area): - """ - Create directory tree for area output according to the following scheme: - - `out_dir`/`exp_suite_id`_vs_`ref_suite_id`/`area` - - If the leaf directory `area` exists raises OSError. - - :param str out_dir: Base directory for output. - :param str suite_id1: Suite Id of reference model run. - :param str suite_id2: Suite Id of test model run. - :param str area: Name of asssessment area. - :returns: Path to area output directory. - :rtype: str - :raises: OSError - """ - assessment_name = exp_suite_id + '_vs_' + ref_suite_id - # make sure out_dir exists in output folder - _out_dir = os.path.join(out_dir, assessment_name) - create_dir(out_dir) - - # create output folder for area - area_out_dir = os.path.join(_out_dir, area) - create_dir(area_out_dir) - return area_out_dir - - -def create_tmp_dir(tmp_dir, ref_suite_id, exp_suite_id, area): - """ - Create directory tree for temporary data according to the following scheme: - - `tmp_dir`/`exp_suite_id`_vs_`ref_suite_id`_random/`area`_random - - :param str tmp_dir: Base temporary directory. - :param str suite_id1: Suite ID of reference model run. - :param str suite_id2: Suite ID of test model run. - :param str area: Name of asssessment area. - :returns: Path to area temporary directory. - :rtype: str - """ - assessment_name = exp_suite_id + '_vs_' + ref_suite_id - # create unique temporary folder in tmp dir - _tmp_dir = tempfile.mkdtemp(prefix=assessment_name + '_', dir=tmp_dir) - - # create temporary folder for area - area_tmp_dir = tempfile.mkdtemp(prefix=area + '_', dir=_tmp_dir) - return area_tmp_dir - - -def run_area(): - """ """ - args = parse_args(sys.argv[1:]) - - area = str(args.area).lower() - - # import area here to allow removal of areas - if area == 'monsoon': - import monsoon as area_package - elif area == 'stratosphere': - import stratosphere as area_package - elif area == 'hydrocycle': - import hydrocycle as area_package - elif area == 'conservation': - import conservation as area_package - elif area == 'globaltrop': - import globaltrop as area_package - elif area == 'land_surface': - import land_surface as area_package - else: - raise Exception('Unknown area: ' + str(area)) - - area_tmp_dir = create_tmp_dir(args.tmp_dir, args.suite_id1, args.suite_id2, - area) - area_out_dir = create_output_tree(args.out_dir, args.suite_id1, - args.suite_id2, area) - - # the areas write all output to the cwd - os.chdir(area_out_dir) - - for suite_id in [args.suite_id1, args.suite_id2]: - all_metrics = {} - # run each metric function - for metric_function in area_package.metrics_functions: - print('# Call:', metric_function.__name__) - run_object = create_run_object(args, area, suite_id) - metrics = metric_function(run_object) - print('# metrics: ', pprint(metrics)) - - duplicate_metrics = set(all_metrics.keys()) & set(metrics.keys()) - if len(duplicate_metrics) != 0: - raise AssertionError('Duplicate Metrics ' + - str(duplicate_metrics)) - all_metrics.update(metrics) - - # write metrics to file - create_dir(os.path.join(area_out_dir, suite_id)) - with open(os.path.join(area_out_dir, suite_id, 'metrics.csv'), - 'w') as fh: - writer = csv.writer(fh) - for metric in all_metrics.items(): - writer.writerow(metric) - - # multimodel functions - if hasattr(area_package, 'multi_functions'): - ref_run_object = create_run_object(args, area, args.suite_id1) - test_run_object = create_run_object(args, area, args.suite_id2) - run_objects = [ref_run_object, - test_run_object] # reference must be first - for multi_function in area_package.multi_functions: - multi_function(run_objects) - else: - print('# Area has no multi functions.') - - -if __name__ == '__main__': - run_area() diff --git a/esmvaltool/diag_scripts/autoassess/autoassess_source/stratosphere/__init__.py b/esmvaltool/diag_scripts/autoassess/autoassess_source/stratosphere/__init__.py deleted file mode 100644 index b92abc842e..0000000000 --- a/esmvaltool/diag_scripts/autoassess/autoassess_source/stratosphere/__init__.py +++ /dev/null @@ -1,42 +0,0 @@ -''' -List the functions which are to be called by auto-assess -There are two types of functions: - 1. those which compute metrics (and optionally make plots) for a single run - 2. those which make plots comparing multiple runs - - -Each metrics function (type 1) has the following interface: - Arguments: - run - dictionary containing metadata for a single model run - (see auto_assess.model_run for description of - the contents of this dictionary) - - Returns: - metrics - dictionary of metrics names and values - optionally also writes image files to the current working dir - - -Each "multi" function (type 2) has the following interface: - Arguments: - runs - list of run dictionaries. Each dictionary contains - metadata for a single model run. The first dictionary - in this list is the control experiment. - (see auto_assess.model_run for description of - the contents of this dictionary) - - Returns: - doesn't return any objects - it only writes image files to the - current working dir - -''' - -# local modules -from . import age_of_air -from . import strat_metrics_1 - -metrics_functions = [strat_metrics_1.mainfunc, age_of_air.age_of_air] - -multi_functions = [strat_metrics_1.multi_qbo_plot, - strat_metrics_1.multi_teq_plot, - strat_metrics_1.multi_t100_vs_q70_plot, - age_of_air.multi_age_plot] diff --git a/esmvaltool/diag_scripts/autoassess/autoassess_source/stratosphere/assessment_plot_obs/stratosphere_obs.csv b/esmvaltool/diag_scripts/autoassess/autoassess_source/stratosphere/assessment_plot_obs/stratosphere_obs.csv deleted file mode 100644 index b7b7ce409e..0000000000 --- a/esmvaltool/diag_scripts/autoassess/autoassess_source/stratosphere/assessment_plot_obs/stratosphere_obs.csv +++ /dev/null @@ -1,14 +0,0 @@ -Polar night jet: northern hem (January), 50.1258, 54.2734 -Polar night jet: southern hem (July), 101.7834, 97.2603 -Easterly jet: southern hem (January), 69.3691, 66.1543 -Easterly jet: northern hem (July), 52.1979, 49.7164 -50 hPa temperature: 60N-90N (DJF), 29.1470, 29.0325 -50 hPa temperature: 60N-90N (MAM), 40.0610, 40.2215 -50 hPa temperature: 90S-60S (JJA), 13.4659, 13.4147 -50 hPa temperature: 90S-60S (SON), 31.8144, 32.0790 -QBO period at 30 hPa, 27.1429, 27.0000 -QBO amplitude at 30 hPa (westward), 32.7533, 27.0271 -QBO amplitude at 30 hPa (eastward), 13.6788, 11.5840 -100 hPa equatorial temp (annual mean), 12.2478, 13.1725 -100 hPa equatorial temp (annual cycle strength), 2.42345, 2.31389 -70 hPa 10Sto10N wv (annual mean), 3.7585, 3.5249 \ No newline at end of file diff --git a/esmvaltool/diag_scripts/autoassess/autoassess_source/stratosphere/assessment_plot_selections/stratosphere_overview.csv b/esmvaltool/diag_scripts/autoassess/autoassess_source/stratosphere/assessment_plot_selections/stratosphere_overview.csv deleted file mode 100644 index cc60131773..0000000000 --- a/esmvaltool/diag_scripts/autoassess/autoassess_source/stratosphere/assessment_plot_selections/stratosphere_overview.csv +++ /dev/null @@ -1,17 +0,0 @@ -Polar night jet: northern hem (January) -Polar night jet: southern hem (July) -Easterly jet: southern hem (January) -Easterly jet: northern hem (July) -50 hPa temperature: 60N-90N (DJF) -50 hPa temperature: 60N-90N (MAM) -50 hPa temperature: 90S-60S (JJA) -50 hPa temperature: 90S-60S (SON) -QBO period at 30 hPa -QBO amplitude at 30 hPa (westward) -QBO amplitude at 30 hPa (eastward) -100 hPa equatorial temp (annual mean) -100 hPa equatorial temp (annual cycle strength) -70 hPa 10Sto10N wv (annual mean) -RMS error: tropical Age of Air -RMS error: NH midlatitude Age of Air -Summary diff --git a/esmvaltool/diag_scripts/autoassess/autoassess_stratosphere.py b/esmvaltool/diag_scripts/autoassess/autoassess_stratosphere.py deleted file mode 100644 index 8c47cedf87..0000000000 --- a/esmvaltool/diag_scripts/autoassess/autoassess_stratosphere.py +++ /dev/null @@ -1,139 +0,0 @@ -"""autoassess stratosphere diagnostic.""" -import os -import logging -import inspect -import sys -import subprocess -import numpy as np - -import iris -import yaml - -from esmvaltool.diag_scripts.shared import run_diagnostic - -logger = logging.getLogger(__name__) - -# Diagnostic that takes two datasets (control_model and exp_model -# and observational data (ERA-Interim and MERRA) - - -def _check_p30(cubelist): - """Check for plev = 30hPa""" - p30 = iris.Constraint(air_pressure=3000.) - for cube in cubelist: - qbo30 = cube.extract(p30) - # extract the masked array and check for missing data - qbo30.data = np.ma.array(qbo30.data) - if np.all(qbo30.data.mask): - logger.info('Cube metadata:', qbo30.metadata) - logger.error('All data is masked at 30hPa! Exiting.') - sys.exit(1) - - -def main(cfg): - """Execute the stratosphere area""" - logger.setLevel(cfg['log_level'].upper()) - if not os.path.exists(cfg['plot_dir']): - os.makedirs(cfg['plot_dir']) - if not os.path.exists(cfg['work_dir']): - os.makedirs(cfg['work_dir']) - suite_loc_m1 = os.path.join(cfg['work_dir'], cfg['control_model']) - if not os.path.exists(suite_loc_m1): - os.makedirs(suite_loc_m1) - suite_loc_m2 = os.path.join(cfg['work_dir'], cfg['exp_model']) - if not os.path.exists(suite_loc_m2): - os.makedirs(suite_loc_m2) - suite_data_m1 = os.path.join(suite_loc_m1, 'stratosphere') - if not os.path.exists(suite_data_m1): - os.makedirs(suite_data_m1) - suite_data_m2 = os.path.join(suite_loc_m2, 'stratosphere') - if not os.path.exists(suite_data_m2): - os.makedirs(suite_data_m2) - tmp_dir = os.path.join(cfg['work_dir'], 'tmp') - ancil_dir = os.path.join(cfg['work_dir'], 'ancil') - if not os.path.exists(tmp_dir): - os.makedirs(tmp_dir) - if not os.path.exists(ancil_dir): - os.makedirs(ancil_dir) - - files_list_m1 = [] - files_list_m2 = [] - obs_list = [] - for filename, attributes in cfg['input_data'].items(): - base_file = os.path.basename(filename) - fullpath_file = filename - if base_file.split('_')[1] == cfg[ - 'control_model']: - files_list_m1.append(fullpath_file) - elif base_file.split('_')[1] == cfg['exp_model']: - files_list_m2.append(fullpath_file) - elif base_file.split('_')[0] == 'OBS': - obs_list.append(fullpath_file) - - # spell out the files used - logger.info("Files for control model: %s", files_list_m1) - logger.info("Files for exp model: %s", files_list_m2) - logger.info("Files for obs model: %s", obs_list) - - # load cubelists - cubelist_m1 = iris.load(files_list_m1) - cubelist_m2 = iris.load(files_list_m2) - - # STRATOSPHERE computes QBO at 30hPa - # go through cubes and make sure they have 30hPa levels - # that have at least one unmasked value - _check_p30(cubelist_m1) - _check_p30(cubelist_m2) - - # save to congragated files - cubes_list_path_m1 = os.path.join(suite_data_m1, 'cubeList.nc') - iris.save(cubelist_m1, cubes_list_path_m1) - cubes_list_path_m2 = os.path.join(suite_data_m2, 'cubeList.nc') - iris.save(cubelist_m2, cubes_list_path_m2) - logger.info("Saved control data cube: %s", cubes_list_path_m1) - logger.info("Saved exp data cube: %s", cubes_list_path_m2) - - cwd = os.path.dirname( - os.path.abspath(inspect.getfile(inspect.currentframe()))) - command_call = 'python ' + os.path.join(cwd, - 'autoassess_source/run_area.py') - args = {} - args['--area'] = cfg['area'] - args['--suite-id1'] = cfg['control_model'] - args['--suite-id2'] = cfg['exp_model'] - args['--start-date'] = cfg['start'] - args['--end-date'] = cfg['end'] - args['--obs-dir'] = os.path.dirname(files_list_m1[0]) - if cfg['obs_models'] is not None: - group_files = [[ - ofile for ofile in obs_list - if os.path.basename(ofile).split('_')[1] == obs - ] for obs in cfg['obs_models']] - for obs_file_group in group_files: - cubes_list_obs = iris.load(obs_file_group) - cubes_list_obs_path = os.path.join( - os.path.dirname(obs_file_group[0]), - os.path.basename(obs_file_group[0]).split('_')[1] + - '_tropical_area_avg.nc') - iris.save(cubes_list_obs, cubes_list_obs_path) - args['--out-dir'] = cfg['plot_dir'] - args['--data-dir'] = cfg['work_dir'] - args['--tmp-dir'] = tmp_dir - args['--ancil-dir'] = ancil_dir - args_collection = [key + ' ' + args[key] for key in args.keys()] - sys_call = command_call + ' ' + ' '.join(args_collection) - logger.info(sys_call) - # run the thing - proc = subprocess.Popen(sys_call, stdout=subprocess.PIPE, shell=True) - (out, err) = proc.communicate() - rc = proc.returncode - logger.info("Diagnostic output: %s", out) - if int(rc) != 0: - logger.info("Diagnostic has failed!") - sys.exit(1) - - -if __name__ == '__main__': - - with run_diagnostic() as config: - main(config) diff --git a/esmvaltool/diag_scripts/autoassess/land_surface_permafrost/__init__.py b/esmvaltool/diag_scripts/autoassess/land_surface_permafrost/__init__.py new file mode 100644 index 0000000000..95f7d719b6 --- /dev/null +++ b/esmvaltool/diag_scripts/autoassess/land_surface_permafrost/__init__.py @@ -0,0 +1,4 @@ +"""Import snow and its metrics.""" +from . import permafrost + +metrics_functions = [permafrost.land_permafrost_top, ] diff --git a/esmvaltool/diag_scripts/autoassess/land_surface_permafrost/permafrost.py b/esmvaltool/diag_scripts/autoassess/land_surface_permafrost/permafrost.py new file mode 100644 index 0000000000..2aa56b3daf --- /dev/null +++ b/esmvaltool/diag_scripts/autoassess/land_surface_permafrost/permafrost.py @@ -0,0 +1,329 @@ +"""Module for permafrost metrics.""" + +import os + +import cartopy.crs as ccrs +import matplotlib.pyplot as plt +import numpy as np + +import iris +import iris.coord_categorisation +import iris.quickplot as qplt +import iris.util as ut + +from esmvaltool.diag_scripts.autoassess.loaddata import load_run_ss +# from esmvaltool.diag_scripts.shared._supermeans import get_supermean +from . import permafrost_koven_sites + + +# main permafrost subroutine +def land_permafrost_top(run): + """ + Make permafrost metrics. + + Code development Eleanor Burke. + + Arguments: + run - dictionary containing model run metadata + (see auto_assess/model_run.py for description) + + Returns: + metrics - dictionary of metrics names and values + also produces image files in the current working directory + + """ + metrics = dict() + + # Load the whole monthly soil temperature time-sequence as a single cube. + # STASH m01s08i225 + period = "monthly" + soiltemp = load_run_ss( + run, period, 'soil_temperature') # has dims(time, depth, lat, long) + + # check soil depths + expected_soil_depths = [0.05, 0.225, 0.675, 2.0] + soil_depths = soiltemp.coord('depth') + if np.array_equal(soil_depths, expected_soil_depths): + msg = ('Soil has changed levels from usual {}, ' + 'following not supported: {}'.format(expected_soil_depths, + soil_depths)) + raise Exception(msg) + + # load the whole monthly air temperature (STASH m01s03i236) + airtemp = load_run_ss(run, period, 'air_temperature') + + # get the land fraction mask using whole cube + landfrac = get_landfr_mask(run) + + # extract northern latitudes + airtemp = airtemp.extract(iris.Constraint(latitude=lambda cell: cell > 0)) + soiltemp = soiltemp.extract(iris.Constraint( + latitude=lambda cell: cell > 0)) + + # calculate the permafrost area and fraction less than zero + # permafrost_area returns a dict, which is added to the main metrics dict + # by metrics.update() + metrics.update(permafrost_area(soiltemp, airtemp, landfrac, run)) + + # calculate the koven temperature metrics + metrics.update(koven_temp_offsets(soiltemp, airtemp)) + metrics.update(koven_temp_atten(soiltemp, airtemp)) + + return metrics + + +def permafrost_area(soiltemp, airtemp, landfrac, run): + """Calculate the permafrost area and make a plot.""" + # Define parameters of the test to calculate the existence of permafrost + thresh_temperature = 273.2 + frozen_months = 24 + prop_months_frozen = 0.5 # frozen for at least half of the simulation + + # make a mask of land fraction over non iced areas and extract northern + # latitudes + nonice = get_nonice_mask(run) + mask = iris.analysis.maths.multiply(nonice, landfrac) + mask = mask.extract(iris.Constraint(latitude=lambda cell: cell > 0)) + + # extract northern high latitudes [and deeepst soil level] + soiltemp = soiltemp.extract(iris.Constraint(depth=2.0)) # from 1m to 3m + + # Make an aggregator to define the permafrost extent + # I dont really understand this but it works + frozen_count = iris.analysis.Aggregator( + 'frozen_count', num_frozen, units_func=lambda units: 1) + + # Calculate the permafrost locations + pf_periods = soiltemp.collapsed( + 'time', + frozen_count, + threshold=thresh_temperature, + frozen_length=frozen_months) + tot_time = len(soiltemp.coord('time').points) + pf_periods = pf_periods / float(tot_time) + pf_periods.rename('Fraction of months layer 4 (-1m to -3m) soil is frozen') + + # mask out non permafrost points, sea points and ice points + pf_periods.data = np.ma.masked_less(pf_periods.data, prop_months_frozen) + # set all non-masked values to 1 for area calculation + # (may be a better way of doing this but I'm not sure what it is) + pf_periods = pf_periods / pf_periods + # mask for land area also + pf_periods = pf_periods * mask + + # calculate the area of permafrost + # Generate area-weights array. This method requires bounds on lat/lon + # coords, add some in sensible locations using the "guess_bounds" + # method. + for coord in ['latitude', 'longitude']: + if not pf_periods.coord(coord).has_bounds(): + pf_periods.coord(coord).guess_bounds() + grid_areas = iris.analysis.cartography.area_weights(pf_periods) + # calculate the areas not masked in pf_periods + pf_area = pf_periods.collapsed( + ['longitude', 'latitude'], iris.analysis.SUM, weights=grid_areas).data + + # what is the area where the temperature is less than 0 degrees C? + airtemp = airtemp.collapsed('time', iris.analysis.MEAN) + # if more than 2 dims, select the ground level + if airtemp.ndim > 2: + airtemp = airtemp[0] + airtemp_below_zero = np.where(airtemp.data < 273.2, 1, 0) + airtemp_area = np.sum(airtemp_below_zero * grid_areas) + + pf_prop = pf_area / airtemp_area + pf_area = pf_area / 1e12 + + # Figure Permafrost extent north america + plt.figure(figsize=(8, 8)) + ax = plt.axes( + projection=ccrs.Orthographic( + central_longitude=-80.0, central_latitude=60.0)) + qplt.pcolormesh(pf_periods) + ax.gridlines() + ax.coastlines() + levels = [thresh_temperature] + qplt.contour(airtemp, levels, colors='k', linewidths=3) + plt.title('Permafrost extent & zero degree isotherm ({})'.format( + run['runid'])) + plt.savefig('pf_extent_north_america_' + run['runid'] + '.png') + + # Figure Permafrost extent asia + plt.figure(figsize=(8, 8)) + ax = plt.axes( + projection=ccrs.Orthographic( + central_longitude=100.0, central_latitude=50.0)) + qplt.pcolormesh(pf_periods) + ax.gridlines() + ax.coastlines() + levels = [thresh_temperature] + qplt.contour(airtemp, levels, colors='k', linewidths=3) + plt.title('Permafrost extent & zero degree isotherm ({})'.format( + run['runid'])) + plt.savefig('pf_extent_asia_' + run['runid'] + '.png') + + # defining metrics for return up to top level + metrics = { + 'permafrost area': pf_area, + 'fraction area permafrost over zerodeg': pf_prop, + } + + return metrics + + +# define the frozen area +def num_frozen(data, threshold, axis, frozen_length): + """ + Count valid frozen points. + + Function to calculate the number of points in a sequence where the value + is less than freezing for at least a certain number of timepoints. + + Generalised to operate on multiple time sequences arranged on a specific + axis of a multidimensional array. + """ + if axis < 0: + # just cope with negative axis numbers + axis += data.ndim + + # Threshold the data to find the 'significant' points. + data_hits = data < threshold + # Make an array with data values "windowed" along the time axis. + hit_windows = ut.rolling_window(data_hits, window=frozen_length, axis=axis) + # Find the windows "full of True-s" (along the added 'window axis'). + full_windows = np.all(hit_windows, axis=axis + 1) + # Count points fulfilling the condition (along the time axis). + frozen_point_counts = np.sum(full_windows, axis=axis, dtype=int) + + return frozen_point_counts + + +# land fraction +def get_landfr_mask(run): + """Get the land fraction mask.""" + supermean_data_dir = os.path.join(run['data_root'], run['runid'], + run['_area'] + '_supermeans') + # m01s03i395 + # TODO: replacing time-varying mask with fixed sftfx + # cube = get_supermean('land_area_fraction', 'ann', supermean_data_dir) + # replaced momentarily with: + name_constraint = iris.Constraint(name='land_area_fraction') + cubes_path = os.path.join(supermean_data_dir, 'cubeList.nc') + cubes = iris.load(cubes_path) + cube = cubes.extract_strict(name_constraint) + + return cube + + +# land ice mask +def get_nonice_mask(run): + """ + Get the land points without ice. + + Need to read the soil moisture data from the supermeans + """ + # TODO: currently set to mrsofc: soil_moisture_content_at_field_capacity + supermean_data_dir = os.path.join(run['data_root'], run['runid'], + run['_area'] + '_supermeans') + + # m01s08i223 + # TODO: original code + # cube = get_supermean('moisture_content_of_soil_layer', 'ann', + # supermean_data_dir) + # replaced with new time-invariant variable + name_constraint = iris.Constraint( + name='soil_moisture_content_at_field_capacity') + cubes_path = os.path.join(supermean_data_dir, 'cubeList.nc') + cubes = iris.load(cubes_path) + cube = cubes.extract_strict(name_constraint) + + # TODO: mrsofc does not have depth + # cube = cube.extract(iris.Constraint(depth=2.0)) # layer from 1m to 3m + + # make it into a mask of ones - extract first layer + # use masked_values for floating point fuzzy equals + cube.data = np.ma.masked_values(cube.data, 0.0) + cube = cube / cube + + return cube + + +def extract_sites(ex_points, cube): + """Extract points for the sites given.""" + tempsite = cube.interpolate(ex_points, iris.analysis.Linear()) + tempsite = np.diagonal(tempsite.data) + tempsite = np.ma.masked_array(tempsite) + tempsite = np.ma.masked_less(tempsite, 0.0) + return tempsite + + +def koven_temp_offsets(soiltemp, airtemp): + """Define thermal offsets in Koven et al 2013.""" + # read in list of observed lats and lons from Koven paper + ex_points = permafrost_koven_sites.site_points + + # interpolate to depth required + # the soil temperatures are for the middle of the layer not the bottom of + # the layer + soiltemp_surf = iris.analysis.interpolate.linear(soiltemp, + [('depth', 0.0)]) + soiltemp_1m = iris.analysis.interpolate.linear(soiltemp, [('depth', 1.0)]) + + # extract points for eachsite + airtemp_1d = extract_sites(ex_points, airtemp) + if len(airtemp_1d.shape) > 2: + airtemp_1d = airtemp_1d[:, :, 0] + soiltemp_surf_1d = extract_sites(ex_points, soiltemp_surf) + soiltemp_1m_1d = extract_sites(ex_points, soiltemp_1m) + + # assign metrics + metrics = {} + metrics['offset 1m minus surface'] = np.median(soiltemp_1m_1d - + soiltemp_surf_1d) + metrics['offset surface minus air'] = np.median(soiltemp_surf_1d - + airtemp_1d) + return metrics + + +def make_monthly_amp(cube): + """Make monthly climatology.""" + iris.coord_categorisation.add_month(cube, 'time', name='month') + cube_clim = cube.aggregated_by('month', iris.analysis.MEAN) + cube_ampl = cube_clim.collapsed('time', iris.analysis.MAX) - \ + cube_clim.collapsed('time', iris.analysis.MIN) + return cube_ampl + + +def koven_temp_atten(soiltemp, airtemp): + """Define thermal attenuation ratios as in Koven et al 2013.""" + # read in list of observed lats and lons from Koven paper + ex_points = permafrost_koven_sites.site_points + + # make amplitudes + airtemp_ampl = make_monthly_amp(airtemp) + soiltemp_ampl = make_monthly_amp(soiltemp) + + # interpolate the log to the correct depth + soiltemp_log = iris.analysis.maths.log(soiltemp_ampl) + soiltemp_log_surf = iris.analysis.interpolate.linear( + soiltemp_log, [('depth', 0.0)]) + soiltemp_ampl_surf = iris.analysis.maths.exp(soiltemp_log_surf) + soiltemp_log_1m = iris.analysis.interpolate.linear(soiltemp_log, + [('depth', 1.0)]) + soiltemp_ampl_1m = iris.analysis.maths.exp(soiltemp_log_1m) + + # extract points for eachsite + airtemp_ampl_1d = extract_sites(ex_points, airtemp_ampl) + if len(airtemp_ampl_1d.shape) > 1: + airtemp_ampl_1d = airtemp_ampl_1d[:, 0] + soiltemp_ampl_surf_1d = extract_sites(ex_points, soiltemp_ampl_surf) + soiltemp_ampl_1m_1d = extract_sites(ex_points, soiltemp_ampl_1m) + + # assign metrics + metrics = {} + metrics['attenuation 1m over surface'] = np.median( + soiltemp_ampl_1m_1d / soiltemp_ampl_surf_1d) + metrics['attenuation surface over air'] = np.median( + soiltemp_ampl_surf_1d / airtemp_ampl_1d) + + return metrics diff --git a/esmvaltool/diag_scripts/autoassess/land_surface_permafrost/permafrost_koven_sites.py b/esmvaltool/diag_scripts/autoassess/land_surface_permafrost/permafrost_koven_sites.py new file mode 100644 index 0000000000..4ad9489c10 --- /dev/null +++ b/esmvaltool/diag_scripts/autoassess/land_surface_permafrost/permafrost_koven_sites.py @@ -0,0 +1,29 @@ +""" +Module containing coordinates of observed sites from Koven paper. + +Written in format required by Iris interpolator: + (('latitude', ),('longitude', )) +""" + +import numpy as np + +lats = np.array([ + 71.320575, 64.9076, 64.86781, 68.0691666666667, 70.161283, 70.161283, + 70.1612833333333, 69.6551333333333, 69.6741402, 69.6741402, 69.65513, + 69.65513, 68.4776666666667, 70.31516, 70.31516, 69.14664082, 69.14664082, + 69.239, 68.6983, 67.0134, 65.3158, 68.2875, 68.2903, 65.3134, 65.3146, + 56.761, 69.4283, 69.43303, 66.9357, 66.9381, 64.86936667, 64.86611667, + 64.86751667, 64.86936, 64.867516, 64.867516, 64.8669, 70.3744666666667 +]) + +lons = np.array([ + -156.6493305, -163.674483, -147.78486111, -149.580333333333, -148.4653, + -148.4653, -148.4653, -148.722016666667, -148.72076632, -148.72076632, + -148.722016, -148.722016, -149.501666666667, -147.99316, -147.99316, + -148.8483008, -148.8483008, -51.0623, -149.35181, -50.7175, 72.8745, + 54.5026, 54.5026, 72.8872, 72.8629, 118.1901, -148.70015, -148.67385, + -53.6416, -53.5957, -147.8608, -147.8567667, -147.8588333, -147.8608, + -147.85676, -147.85883, -147.858383, -148.552166666667 +]) + +site_points = (('latitude', lats), ('longitude', lons)) diff --git a/esmvaltool/diag_scripts/autoassess/land_surface_snow/README.txt b/esmvaltool/diag_scripts/autoassess/land_surface_snow/README.txt new file mode 100644 index 0000000000..05404c9761 --- /dev/null +++ b/esmvaltool/diag_scripts/autoassess/land_surface_snow/README.txt @@ -0,0 +1,35 @@ +Metrics are (with obs-based estimates in brackets): + permafrost area (17.46 million square km) + area of permafrost / area northwards of zero degree isotherm (0.47) + soil temperature at 1m - soil temperature at surface (-0.53 degrees C) + soil temperature at surface - air temperature (6.15 degrees C) + annual amplitude at 1m / annual amplitude at the surface (0.40 unitless) + annual amplitude at the surface / annual air temperature (0.57 unitless) + +Observed permafrost extent is from http://nsidc.org/data/ggd318.html: Brown, J., +O. Ferrians, J. A. Heginbottom, and E. Melnikov. 2002. Circum-Arctic Map of +Permafrost and Ground-Ice Conditions, Version 2. Boulder, Colorado USA. NSIDC: +National Snow and Ice Data Center. When calculating the global area of +permafrost the grid cells are weighted by the proportion of permafrost within +them. + +Annual mean air temperature is from: Legates, D. R., and C. J. Willmott, 1990: +Mean seasonal and spatial variability in global surface air temperature. Theor. +Appl. Climatol., 41, 11-21. The annual mean is calculated from the seasonal +mean data available at the Met Office. + +The soil temperature metrics are calcuated following: Charles D. Koven, William +J. Riley, and Alex Stern, 2013: Analysis of Permafrost Thermal Dynamics and +Response to Climate Change in the CMIP5 Earth System Models. J. Climate, 26, +1877–1900. (Table 3) doi: http://dx.doi.org/10.1175/JCLI-D-12-00228.1 The +locations used for Table 3 were extracted from the model and the modelled +metrics calculated. The observed metrics were extracted from Table 3. + +The soil temperature metrics represent the ability of the model to accurately +simulate the coupling between the soil and the air temperatures at high +laititudes which reflect the accuracy of the snow physics and the soil +hydrology. + +The permafrost extent represents a rough evaluation of the land surface physics +in the northern high latitude. A more in depth evaluation would also include a +comparison with active layer thicknesses. diff --git a/esmvaltool/diag_scripts/autoassess/land_surface_snow/__init__.py b/esmvaltool/diag_scripts/autoassess/land_surface_snow/__init__.py new file mode 100644 index 0000000000..40aef137cf --- /dev/null +++ b/esmvaltool/diag_scripts/autoassess/land_surface_snow/__init__.py @@ -0,0 +1,4 @@ +"""Import snow and its metrics.""" +from . import snow + +metrics_functions = [snow.land_swe_top, ] diff --git a/esmvaltool/diag_scripts/autoassess/land_surface_snow/snow.py b/esmvaltool/diag_scripts/autoassess/land_surface_snow/snow.py new file mode 100644 index 0000000000..6d2c4f52bf --- /dev/null +++ b/esmvaltool/diag_scripts/autoassess/land_surface_snow/snow.py @@ -0,0 +1,61 @@ +"""Module for snow metrics.""" + +import os +import numpy as np +import iris + +from esmvaltool.preprocessor._regrid import regrid +from esmvaltool.diag_scripts.shared._supermeans import get_supermean + + +def land_swe_top(run): + """ + Compute median-absolute difference of SWE against GlobSnow. + + Arguments: + run - dictionary containing model run metadata + (see auto_assess/model_run.py for description) + + Returns: + metrics - dictionary of metrics names and values + + """ + supermean_data_dir = os.path.join(run['data_root'], run['runid'], + run['_area'] + '_supermeans') + + snow_seasons = ['son', 'djf', 'mam'] + + # Calculate rms errors for seasons with snow. + metrics = dict() + for season in snow_seasons: + clim_file = os.path.join(run['climfiles_root'], + 'SWE_clm_{}.pp'.format(season)) + swe_clim = iris.load_cube(clim_file) + swe_clim.data = np.ma.masked_array( + swe_clim.data, mask=(swe_clim.data == -1e20)) + + # snowfall + swe_run = get_supermean('surface_snow_amount', season, + supermean_data_dir) + + # Force same coord_system + swe_run.coord('longitude').coord_system = swe_clim.coord( + 'longitude').coord_system + swe_run.coord('latitude').coord_system = swe_clim.coord( + 'latitude').coord_system + + # Force the units for SWE to match the model + swe_clim.units = swe_run.units + + # form the difference + # active regridding here + swe_run = regrid(swe_run, swe_clim, 'linear') + dff = swe_run - swe_clim + iris.save(dff, os.path.join(run['dump_output'], + 'snow_diff_{}.nc'.format(season))) + + # Calculate median absolute error of the difference + name = "snow MedAbsErr {}".format(season) + metrics[name] = float(np.ma.median(np.ma.abs(dff.data))) + + return metrics diff --git a/esmvaltool/diag_scripts/autoassess/land_surface_soilmoisture/__init__.py b/esmvaltool/diag_scripts/autoassess/land_surface_soilmoisture/__init__.py new file mode 100644 index 0000000000..94686845a5 --- /dev/null +++ b/esmvaltool/diag_scripts/autoassess/land_surface_soilmoisture/__init__.py @@ -0,0 +1,4 @@ +"""Import correct modules and functions.""" +from . import soilmoisture + +metrics_functions = [soilmoisture.land_sm_top, ] diff --git a/esmvaltool/diag_scripts/autoassess/land_surface_soilmoisture/soilmoisture.py b/esmvaltool/diag_scripts/autoassess/land_surface_soilmoisture/soilmoisture.py new file mode 100644 index 0000000000..be08fa9f50 --- /dev/null +++ b/esmvaltool/diag_scripts/autoassess/land_surface_soilmoisture/soilmoisture.py @@ -0,0 +1,130 @@ +"""Run module for soil moisture metrics.""" + +import os +import logging +import numpy as np +import iris +from esmvaltool.preprocessor._regrid import regrid +from esmvaltool.diag_scripts.shared._supermeans import get_supermean + + +logger = logging.getLogger(__name__) + + +def land_sm_top(run): + """ + Calculate median absolute errors for soil mosture against CCI data. + + Arguments: + run - dictionary containing model run metadata + (see auto_assess/model_run.py for description) + + Returns: + metrics - dictionary of metrics names and values + + """ + supermean_data_dir = os.path.join(run['data_root'], run['runid'], + run['_area'] + '_supermeans') + + seasons = ['djf', 'mam', 'jja', 'son'] + + # Constants + # density of water and ice + rhow = 1000. + rhoi = 917. + # first soil layer depth + dz1 = 0.1 + + # Work through each season + metrics = dict() + for season in seasons: + fname = 'ecv_soil_moisture_{}.nc'.format(season) + clim_file = os.path.join(run['climfiles_root'], fname) + ecv_clim = iris.load_cube(clim_file) + # correct invalid units + if (ecv_clim.units == 'unknown' and + 'invalid_units' in ecv_clim.attributes): + if ecv_clim.attributes['invalid_units'] == 'm^3m^-3': + ecv_clim.units = 'm3 m-3' + + # m01s08i223 + # standard_name: mrsos + smcl_run = get_supermean('moisture_content_of_soil_layer', season, + supermean_data_dir) + + # m01s08i229i + # standard_name: ??? + # TODO: uncomment when implemented + # sthu_run = get_supermean( + # 'mass_fraction_of_unfrozen_water_in_soil_moisture', season, + # supermean_data_dir) + + # m01s08i230 + # standard_name: ??? soil_frozen_water_content - mrfso + # TODO: uncomment when implemented + # sthf_run = get_supermean( + # 'mass_fraction_of_frozen_water_in_soil_moisture', season, + # supermean_data_dir) + + # TODO: remove after correct implementation + sthu_run = smcl_run + sthf_run = smcl_run + + # extract top soil layer + cubes = [smcl_run, sthu_run, sthf_run] + for i, cube in enumerate(cubes): + if cube.coord('depth').attributes['positive'] != 'down': + logger.warning('Cube %s depth attribute is not down', cube) + top_level = min(cube.coord('depth').points) + topsoil = iris.Constraint(depth=top_level) + cubes[i] = cube.extract(topsoil) + smcl_run, sthu_run, sthf_run = cubes + + # Set all sea points to missing data np.nan + smcl_run.data[smcl_run.data < 0] = np.nan + sthu_run.data[sthu_run.data < 0] = np.nan + sthf_run.data[sthf_run.data < 0] = np.nan + + # set soil moisture to missing data on ice points (i.e. no soil) + sthu_plus_sthf = (dz1 * rhow * sthu_run) + (dz1 * rhoi * sthf_run) + ice_pts = sthu_plus_sthf.data == 0 + sthu_plus_sthf.data[ice_pts] = np.nan + + # Calculate the volumetric soil moisture in m3/m3 + theta_s_run = smcl_run / sthu_plus_sthf + vol_sm1_run = theta_s_run * sthu_run + vol_sm1_run.units = "m3 m-3" + vol_sm1_run.long_name = "Top layer Soil Moisture" + + # update the coordinate system ECV data with a WGS84 coord system + # TODO: ask Heather why this is needed + # TODO: who is Heather? + # unify coord systems for regridder + vol_sm1_run.coord('longitude').coord_system = \ + iris.coord_systems.GeogCS(semi_major_axis=6378137.0, + inverse_flattening=298.257223563) + vol_sm1_run.coord('latitude').coord_system = \ + iris.coord_systems.GeogCS(semi_major_axis=6378137.0, + inverse_flattening=298.257223563) + ecv_clim.coord('longitude').coord_system = \ + iris.coord_systems.GeogCS(semi_major_axis=6378137.0, + inverse_flattening=298.257223563) + ecv_clim.coord('latitude').coord_system = \ + iris.coord_systems.GeogCS(semi_major_axis=6378137.0, + inverse_flattening=298.257223563) + + # Interpolate to the grid of the climatology and form the difference + vol_sm1_run = regrid(vol_sm1_run, ecv_clim, 'linear') + # diff the cubes + dff = vol_sm1_run - ecv_clim + + # Remove NaNs from data before aggregating statistics + dff.data = np.ma.masked_invalid(dff.data) + + # save output + iris.save(dff, os.path.join(run['dump_output'], + 'soilmoist_diff_{}.nc'.format(season))) + name = 'soilmoisture MedAbsErr {}'.format(season) + metrics[name] = float(np.ma.median(np.ma.abs(dff.data))) + + return metrics diff --git a/esmvaltool/diag_scripts/autoassess/land_surface_surfrad/README.txt b/esmvaltool/diag_scripts/autoassess/land_surface_surfrad/README.txt new file mode 100644 index 0000000000..05404c9761 --- /dev/null +++ b/esmvaltool/diag_scripts/autoassess/land_surface_surfrad/README.txt @@ -0,0 +1,35 @@ +Metrics are (with obs-based estimates in brackets): + permafrost area (17.46 million square km) + area of permafrost / area northwards of zero degree isotherm (0.47) + soil temperature at 1m - soil temperature at surface (-0.53 degrees C) + soil temperature at surface - air temperature (6.15 degrees C) + annual amplitude at 1m / annual amplitude at the surface (0.40 unitless) + annual amplitude at the surface / annual air temperature (0.57 unitless) + +Observed permafrost extent is from http://nsidc.org/data/ggd318.html: Brown, J., +O. Ferrians, J. A. Heginbottom, and E. Melnikov. 2002. Circum-Arctic Map of +Permafrost and Ground-Ice Conditions, Version 2. Boulder, Colorado USA. NSIDC: +National Snow and Ice Data Center. When calculating the global area of +permafrost the grid cells are weighted by the proportion of permafrost within +them. + +Annual mean air temperature is from: Legates, D. R., and C. J. Willmott, 1990: +Mean seasonal and spatial variability in global surface air temperature. Theor. +Appl. Climatol., 41, 11-21. The annual mean is calculated from the seasonal +mean data available at the Met Office. + +The soil temperature metrics are calcuated following: Charles D. Koven, William +J. Riley, and Alex Stern, 2013: Analysis of Permafrost Thermal Dynamics and +Response to Climate Change in the CMIP5 Earth System Models. J. Climate, 26, +1877–1900. (Table 3) doi: http://dx.doi.org/10.1175/JCLI-D-12-00228.1 The +locations used for Table 3 were extracted from the model and the modelled +metrics calculated. The observed metrics were extracted from Table 3. + +The soil temperature metrics represent the ability of the model to accurately +simulate the coupling between the soil and the air temperatures at high +laititudes which reflect the accuracy of the snow physics and the soil +hydrology. + +The permafrost extent represents a rough evaluation of the land surface physics +in the northern high latitude. A more in depth evaluation would also include a +comparison with active layer thicknesses. diff --git a/esmvaltool/diag_scripts/autoassess/land_surface_surfrad/__init__.py b/esmvaltool/diag_scripts/autoassess/land_surface_surfrad/__init__.py new file mode 100644 index 0000000000..1646da2e97 --- /dev/null +++ b/esmvaltool/diag_scripts/autoassess/land_surface_surfrad/__init__.py @@ -0,0 +1,4 @@ +"""Import surfrad and its metrics.""" +from . import surfrad + +metrics_functions = [surfrad.land_surf_rad, ] diff --git a/esmvaltool/diag_scripts/autoassess/land_surface_surfrad/surfrad.py b/esmvaltool/diag_scripts/autoassess/land_surface_surfrad/surfrad.py new file mode 100644 index 0000000000..717326450d --- /dev/null +++ b/esmvaltool/diag_scripts/autoassess/land_surface_surfrad/surfrad.py @@ -0,0 +1,85 @@ +"""Module to compute surface radiation metrics.""" + +import os + +import numpy as np + +import iris + +from esmvaltool.preprocessor._regrid import regrid +from esmvaltool.diag_scripts.shared._supermeans import get_supermean + + +def land_surf_rad(run): + """ + Compute median absolute errors against CERES-EBAF data. + + Arguments: + run - dictionary containing model run metadata + (see auto_assess/model_run.py for description) + + Returns: + metrics - dictionary of metrics names and values. + """ + supermean_data_dir = os.path.join(run['data_root'], run['runid'], + run['_area'] + '_supermeans') + + rad_seasons = ['ann', 'djf', 'mam', 'jja', 'son'] + rad_fld = ['SurfRadNSW', 'SurfRadNLW'] + + # Land mask: Use fractional mask for now. + # Fraction of Land m01s03i395 + # replaced with a constant sftlf mask; original was + # lnd = get_supermean('land_area_fraction', 'ann', supermean_data_dir) + cubes = iris.load(os.path.join(supermean_data_dir, 'cubeList.nc')) + lnd = cubes.extract_strict(iris.Constraint(name='land_area_fraction')) + + metrics = dict() + for season in rad_seasons: + for fld in rad_fld: + if fld == 'SurfRadNSW': + ebaf_fld = get_supermean( + 'Surface Net downward Shortwave Radiation', season, + run['clim_root'], obs_flag='CERES-EBAF') + run_fld_rad = get_supermean( + 'Surface Net downward Shortwave Radiation', season, + supermean_data_dir) + + elif fld == 'SurfRadNLW': + ebaf_fld = get_supermean( + 'Surface Net downward Longwave Radiation', season, + run['clim_root'], obs_flag='CERES-EBAF') + run_fld_rad = get_supermean( + 'Surface Net downward Longwave Radiation', season, + supermean_data_dir) + + else: + raise Exception('Skipping unassigned case.') + + # Regrid both to land points and mask out where this is below + # a threshold. Force the coordinate system on model. + ebaf_fld.coord('latitude').coord_system = \ + run_fld_rad.coord('latitude').coord_system + ebaf_fld.coord('longitude').coord_system = \ + run_fld_rad.coord('longitude').coord_system + lnd.coord('latitude').coord_system = \ + run_fld_rad.coord('latitude').coord_system + lnd.coord('longitude').coord_system = \ + run_fld_rad.coord('longitude').coord_system + + reg_run_fld = regrid(run_fld_rad, lnd, 'linear') + reg_ebaf_fld = regrid(ebaf_fld, lnd, 'linear') + + # apply the mask + reg_run_fld.data = np.ma.masked_array( + reg_run_fld.data, mask=(lnd.data < 0.98)) + reg_ebaf_fld.data = np.ma.masked_array( + reg_ebaf_fld.data, mask=(lnd.data < 0.98)) + + # do a simple diff + dff = reg_run_fld - reg_ebaf_fld + + name = "{} MedAbsErr {}".format(fld, season) + metrics[name] = float(np.ma.median(np.abs(dff.data))) + + return metrics diff --git a/esmvaltool/diag_scripts/autoassess/autoassess_source/stratosphere/loaddata.py b/esmvaltool/diag_scripts/autoassess/loaddata.py similarity index 83% rename from esmvaltool/diag_scripts/autoassess/autoassess_source/stratosphere/loaddata.py rename to esmvaltool/diag_scripts/autoassess/loaddata.py index b3bc2437a8..7552a3c1f4 100644 --- a/esmvaltool/diag_scripts/autoassess/autoassess_source/stratosphere/loaddata.py +++ b/esmvaltool/diag_scripts/autoassess/loaddata.py @@ -1,7 +1,5 @@ -#!/usr/bin/env python2.7 -# -*- coding: utf-8 -*- """ -(C) Crown Copyright 2017, the Met Office +(C) Crown Copyright 2017, the Met Office. Module to replicate data access API of the previous version of AutoAssess: @@ -10,25 +8,24 @@ For information on the PP-Header attributes, see: Unified Model Documentation Paper F03: "Input and Output File Formats" -available here: https://code.metoffice.gov.uk/doc/um/vn10.5/umdp.html - +available here: https://code.metoffice.gov.uk/doc/um/vn10.5/umdp.html. """ -import cf_units import os.path import re import datetime from datetime import timedelta as td from datetime import datetime as dd +import cf_units import iris import iris.coord_categorisation as coord_cat def is_daily(cube): """Test whether the time coordinate contains only daily bound periods.""" - def is_day(bound): + """Check if day.""" time_span = td(hours=(bound[1] - bound[0])) return td(days=1) == time_span @@ -37,8 +34,8 @@ def is_day(bound): def is_monthly(cube): """A month is a period of at least 28 days, up to 31 days.""" - def is_month(bound): + """Check if month.""" time_span = td(days=(bound[1] - bound[0])) return td(days=31) >= time_span >= td(days=28) @@ -46,12 +43,10 @@ def is_month(bound): def is_seasonal(cube): - """ - A season is a period of 3 months, i.e. at least 89 days, and up to 92 days. - """ - + """Season is 3 months, i.e. at least 89 days, and up to 92 days.""" def is_season(bound): - time_span = td(hours=(bound[1] - bound[0])) + """Check if season.""" + time_span = td(days=(bound[1] - bound[0])) return td(days=31 + 30 + 31) >= time_span >= td(days=28 + 31 + 30) return all([is_season(bound) for bound in cube.coord('time').bounds]) @@ -59,9 +54,9 @@ def is_season(bound): def is_yearly(cube): """A year is a period of at least 360 days, up to 366 days.""" - def is_year(bound): - time_span = td(hours=(bound[1] - bound[0])) + """Check if year.""" + time_span = td(days=(bound[1] - bound[0])) return td(days=365) == time_span or td(days=360) == time_span return all([is_year(bound) for bound in cube.coord('time').bounds]) @@ -113,6 +108,47 @@ def select_by_variable_name(cubes, variable_name): return cubes.extract(constraint) +# get the seasonal mean +def seasonal_mean(mycube): + """ + Function to compute seasonal means with MEAN. + + Chunks time in 3-month periods and computes means over them; + Returns a cube. + """ + if 'clim_season' not in mycube.coords(): + coord_cat.add_season(mycube, 'time', name='clim_season') + if 'season_year' not in mycube.coords(): + coord_cat.add_season_year(mycube, 'time', name='season_year') + annual_seasonal_mean = mycube.aggregated_by(['clim_season', 'season_year'], + iris.analysis.MEAN) + + def spans_three_months(time): + """Check for three months.""" + return (time.bound[1] - time.bound[0]) == 90 # days + + three_months_bound = iris.Constraint(time=spans_three_months) + return annual_seasonal_mean.extract(three_months_bound) + + +# get annual mean +def annual_mean(mycube): + """ + Function to compute annual mean with MEAN. + + Chunks time in 365-day periods and computes means over them; + Returns a cube. + """ + yr_mean = mycube.aggregated_by('year', iris.analysis.MEAN) + + def spans_year(time): + """Check for 12 months.""" + return (time.bound[1] - time.bound[0]) == 365 + + t_bound = iris.Constraint(time=spans_year) + return yr_mean.extract(t_bound) + + def select_by_averaging_period(cubes, averaging_period): """ Select subset from CubeList depending on averaging period. @@ -131,9 +167,20 @@ def select_by_averaging_period(cubes, averaging_period): 'seasonal': is_seasonal, 'annual': is_yearly } - selected_cubes = [ - cube for cube in cubes if select_period[averaging_period](cube) - ] + if averaging_period == 'seasonal': + selected_cubes = [ + cube for cube in cubes + if select_period[averaging_period](seasonal_mean(cube)) + ] + elif averaging_period == 'annual': + selected_cubes = [ + cube for cube in cubes + if select_period[averaging_period](annual_mean(cube)) + ] + else: + selected_cubes = [ + cube for cube in cubes if select_period[averaging_period](cube) + ] return iris.cube.CubeList(selected_cubes) @@ -144,7 +191,7 @@ def select_by_pressure_level(cubes, lblev): :param CubeList cubes: Iris CubeList. :param list lblev: List of pressure levels in hPa. :returns: CubeList with Cubes only containing specified pressure levels. - :rtype: CubeList + :rtype: CubeList. """ pressure_level = iris.Constraint(pressure=lblev) return cubes.extract( @@ -200,6 +247,8 @@ def select_by_processing(cubes, lbproc): def select_by_initial_meaning_period(cubes, lbtim): """ + Select cube. + Select subset from CubeList by matching the some of the information encoded in the 'Time indicator' `lbtim`. Namely, the initial meaning period and the used calendar. @@ -229,34 +278,19 @@ def select_by_initial_meaning_period(cubes, lbtim): selected_cubes = iris.cube.CubeList() for lbtim in lbtims: - IA, IB, IC = str(lbtim)[:] # pylint: disable=unused-variable - # IA - time interval in hours between the individual fields from which - # the mean was calculated - # IB - = 2 if the field is a time mean between T1 and T2, or represents - # a sequence of times between T1 and T2. - # IC - = 1 if the Proleptic Gregorian calendar is used for T1 and T2. - # = 2 if the '360-day' calendar (i.e. 12 30-day months) is used - # for T1 and T2. + i_a, i_c = str(lbtim)[:][0], str(lbtim)[:][2] for cube in cubes: # select by original meaning interval (IA) select_meaning_interval = {1: ('1 hour', ), 6: ('6 hour', )} if select_meaning_interval[int( - IA)] != cube.cell_methods[0].intervals: + i_a)] != cube.cell_methods[0].intervals: continue - # select by IB - # Iris cubes have no T1 and T2 attributes, or equivalent - # Unclear how to select Iris cubes on IB - pass # pylint: disable=unnecessary-pass - - # select calendar (IC) + # select calendar (I_C) # see cf_units.CALENDARS for possible cube calendars - select_calendar = { - 1: 'gregorian', # TODO does iris distinguish between - 2: '360_day' - } # proleptic_greorian and gregorian? - if select_calendar[int(IC)] == cube.coord('time').units.calendar: + select_calendar = {1: 'gregorian', 2: '360_day'} + if select_calendar[int(i_c)] == cube.coord('time').units.calendar: selected_cubes.append(cube) return selected_cubes @@ -274,10 +308,8 @@ def select_certain_months(cubes, lbmon): """ # add 'month number' coordinate add_time_coord = { - 'monthly': lambda cube: - coord_cat.add_month_number(cube, - 'time', - name='month_number'), + 'monthly': lambda cube: coord_cat.add_month_number( + cube, 'time', name='month_number'), 'seasonal': lambda cube: coord_cat.add_season(cube, 'time', name='clim_season'), @@ -297,7 +329,7 @@ def select_certain_months(cubes, lbmon): def get_time_offset(time_unit): - """Return a datetime object equivalent to tunit""" + """Return a datetime object equivalent to tunit.""" # tunit e.g. 'day since 1950-01-01 00:00:00.0000000 UTC' cfunit = cf_units.Unit(time_unit, calendar=cf_units.CALENDAR_STANDARD) time_offset = cfunit.num2date(0) @@ -305,7 +337,7 @@ def get_time_offset(time_unit): def datetime_to_int_days(date_obj, tunit): - """Return time point converted from cube datetime cell""" + """Return time point converted from cube datetime cell.""" if float(iris.__version__.split('.')[0]) >= 2.0: time_offset = get_time_offset(tunit) real_date = dd(date_obj.year, date_obj.month, date_obj.day, 0, 0, 0) @@ -316,7 +348,7 @@ def datetime_to_int_days(date_obj, tunit): def extract_time_range(cubes, start, end): - """Extract time ranged data""" + """Extract time ranged data.""" time_ranged_cubes = [] iris.util.unify_time_units(cubes) time_unit = cubes[0].coord('time').units.name @@ -326,8 +358,8 @@ def extract_time_range(cubes, start, end): t_2 = cf_units.date2num(dd_end, time_unit, cf_units.CALENDAR_STANDARD) for cube in cubes: time_constraint = iris.Constraint( - time=lambda t: - (t_1 <= datetime_to_int_days(t.point, time_unit) <= t_2)) + time=lambda t: (t_1 <= datetime_to_int_days(t.point, + time_unit) <= t_2)) cube_slice = cube.extract(time_constraint) time_ranged_cubes.append(cube_slice) return time_ranged_cubes @@ -343,6 +375,8 @@ def load_run_ss(run_object, from_dt=None, to_dt=None): """ + Use - this is still used. + DEPRECATED: Do not use for new Assessment Areas. Instead, read the CubeList `cubeList.nc` in the directory with the retrieved data. diff --git a/esmvaltool/diag_scripts/autoassess/plot_autoassess_metrics.py b/esmvaltool/diag_scripts/autoassess/plot_autoassess_metrics.py new file mode 100644 index 0000000000..1bdd9f6428 --- /dev/null +++ b/esmvaltool/diag_scripts/autoassess/plot_autoassess_metrics.py @@ -0,0 +1,75 @@ +"""Standard MO metrics plotter.""" +import os +import logging +import sys + +import iris +import yaml +from esmvaltool.diag_scripts.autoassess._plot_mo_metrics import ( + read_model_metrics, read_obs_metrics, plot_nac) + +logger = logging.getLogger(__name__) + +# Diagnostic that takes two datasets (control_model and exp_model +# and observational data (ERA-Interim and MERRA); +# plotting OBS is not yet supported; it will be, hold your horses + + +def get_cfg(): + """Read diagnostic script configuration from settings.yml.""" + settings_file = sys.argv[1] + with open(settings_file) as file: + cfg = yaml.safe_load(file) + return cfg + + +def main(): + """Call the plotting script via command line.""" + cfg = get_cfg() + logger.setLevel(cfg['log_level'].upper()) + + control_model = cfg['control_model'] + exp_model = cfg['exp_model'] + + vsloc = exp_model + '_vs_' + control_model + file_exp = os.path.join( + os.path.dirname(os.path.dirname(cfg['plot_dir'])), cfg['diag_tag'], + cfg['diag_name'], vsloc, cfg['area'], exp_model, 'metrics.csv') + file_ref = os.path.join( + os.path.dirname(os.path.dirname(cfg['plot_dir'])), cfg['diag_tag'], + cfg['diag_name'], vsloc, cfg['area'], control_model, 'metrics.csv') + + plot_title = ' '.join([cfg['area'], control_model, 'vs', exp_model]) + # Read metrics files + # metrics = read_order_metrics(args.file_ord) + ref = read_model_metrics(file_ref) + tests = [read_model_metrics(file_exp)] + # var = read_model_metrics(args.file_var) + obs, acc = None, None + if 'additional_metrics' in cfg: + # choose the obs file to get the metrics from + file_obs = os.path.join( + os.path.dirname(os.path.dirname(cfg['plot_dir'])), cfg['diag_tag'], + cfg['diag_name'], vsloc, cfg['area'], cfg['error_metric'], + 'metrics.csv') + (obs, acc) = read_obs_metrics(file_obs) + + # Produce plot + plot_nac( + control_model, [exp_model], + ref, + tests, + metrics=None, + var=None, + obs=obs, + acc=acc, + extend_y=False, + title=plot_title, + ofile=os.path.join(cfg['plot_dir'], cfg['plot_name'] + '.png')) + + +if __name__ == '__main__': + iris.FUTURE.netcdf_promote = True + logging.basicConfig(format="%(asctime)s [%(process)d] %(levelname)-8s " + "%(name)s,%(lineno)s\t%(message)s") + main() diff --git a/esmvaltool/diag_scripts/autoassess/plot_autoassess_stratosphere.py b/esmvaltool/diag_scripts/autoassess/plot_autoassess_stratosphere.py deleted file mode 100644 index 9cafc82c6f..0000000000 --- a/esmvaltool/diag_scripts/autoassess/plot_autoassess_stratosphere.py +++ /dev/null @@ -1,62 +0,0 @@ -"""autoassess stratosphere diagnostic.""" -import os -import logging -import inspect -import sys - -import iris -import yaml - -logger = logging.getLogger(__name__) - -# Diagnostic that takes two datasets (control_model and exp_model -# and observational data (ERA-Interim and MERRA) - - -def get_cfg(): - """Read diagnostic script configuration from settings.yml.""" - settings_file = sys.argv[1] - with open(settings_file) as file: - cfg = yaml.safe_load(file) - return cfg - - -def main(): - - cfg = get_cfg() - logger.setLevel(cfg['log_level'].upper()) - - control_model = cfg['control_model'] - exp_model = cfg['exp_model'] - - vsloc = exp_model + '_vs_' + control_model - file_exp = os.path.join( - os.path.dirname(os.path.dirname(cfg['plot_dir'])), cfg['diag_tag'], - cfg['diag_name'], vsloc, 'stratosphere', exp_model, 'metrics.csv') - file_ref = os.path.join( - os.path.dirname(os.path.dirname(cfg['plot_dir'])), cfg['diag_tag'], - cfg['diag_name'], vsloc, 'stratosphere', control_model, 'metrics.csv') - - cwd = os.path.dirname( - os.path.abspath(inspect.getfile(inspect.currentframe()))) - plotter_script = os.path.join(cwd, 'autoassess_source/plot_norm_ac.py') - os.system('chmod +x ' + plotter_script) - command_call = plotter_script - args = {} - args['--exp'] = exp_model - args['--ref'] = control_model - args['--plot'] = os.path.join(cfg['plot_dir'], cfg['plot_name'] + '.png') - args['--title'] = cfg['plot_title'] - args['--file-exp'] = file_exp - args['--file-ref'] = file_ref - args_collection = [key + ' ' + args[key] for key in args.keys()] - sys_call = command_call + ' ' + ' '.join(args_collection) - logger.info(sys_call) - os.system(sys_call) - - -if __name__ == '__main__': - iris.FUTURE.netcdf_promote = True - logging.basicConfig(format="%(asctime)s [%(process)d] %(levelname)-8s " - "%(name)s,%(lineno)s\t%(message)s") - main() diff --git a/esmvaltool/diag_scripts/autoassess/autoassess_source/stratosphere/DATA_SPECIFICATION.txt b/esmvaltool/diag_scripts/autoassess/stratosphere/DATA_SPECIFICATION.txt similarity index 100% rename from esmvaltool/diag_scripts/autoassess/autoassess_source/stratosphere/DATA_SPECIFICATION.txt rename to esmvaltool/diag_scripts/autoassess/stratosphere/DATA_SPECIFICATION.txt diff --git a/esmvaltool/diag_scripts/autoassess/autoassess_source/stratosphere/README.txt b/esmvaltool/diag_scripts/autoassess/stratosphere/README.txt similarity index 100% rename from esmvaltool/diag_scripts/autoassess/autoassess_source/stratosphere/README.txt rename to esmvaltool/diag_scripts/autoassess/stratosphere/README.txt diff --git a/esmvaltool/diag_scripts/autoassess/stratosphere/__init__.py b/esmvaltool/diag_scripts/autoassess/stratosphere/__init__.py new file mode 100644 index 0000000000..942df726bf --- /dev/null +++ b/esmvaltool/diag_scripts/autoassess/stratosphere/__init__.py @@ -0,0 +1,10 @@ +"""Import stratosphere module and its functions.""" +from . import age_of_air +from . import strat_metrics_1 + +metrics_functions = [strat_metrics_1.mainfunc, age_of_air.age_of_air] + +multi_functions = [strat_metrics_1.multi_qbo_plot, + strat_metrics_1.multi_teq_plot, + strat_metrics_1.multi_t100_vs_q70_plot, + age_of_air.multi_age_plot] diff --git a/esmvaltool/diag_scripts/autoassess/autoassess_source/stratosphere/age_of_air.py b/esmvaltool/diag_scripts/autoassess/stratosphere/age_of_air.py similarity index 81% rename from esmvaltool/diag_scripts/autoassess/autoassess_source/stratosphere/age_of_air.py rename to esmvaltool/diag_scripts/autoassess/stratosphere/age_of_air.py index 6d4be3e9f0..861a99829f 100644 --- a/esmvaltool/diag_scripts/autoassess/autoassess_source/stratosphere/age_of_air.py +++ b/esmvaltool/diag_scripts/autoassess/stratosphere/age_of_air.py @@ -1,23 +1,26 @@ -''' -Stratospheric age-of-air assessment code -''' +"""Stratospheric age-of-air assessment code.""" +import datetime +import logging import os -import matplotlib as mpl -mpl.use('Agg') # noqa -import matplotlib.pyplot as plt +import warnings + import iris import iris.analysis as iai -from .loaddata import load_run_ss -from .strat_metrics_1 import weight_lat_ave -import warnings +import matplotlib.pyplot as plt import numpy as np -import datetime + +from esmvaltool.diag_scripts.autoassess.loaddata import load_run_ss + +from .strat_metrics_1 import weight_lat_ave + +logger = logging.getLogger(__name__) + # Constant for number of seconds in a 360 day calendar year -# TODO Wrong if gregorian calendar! +# Wrong if gregorian calendar! RSECS_PER_360DAY_YEAR = float(60 * 60 * 24 * 360) -# TODO what is the source of the reference data??? +# What is the source of the reference data??? # Diag 1 # SF6 based data AGE_YRS = [ @@ -64,7 +67,7 @@ def calculate_analysis_years(run): - + """Calculate years.""" # 1) Discard first 10 years of run. analysis_start_year = int(run['start']) + 10 analysis_end_year = int(run['start']) + int(run['nyear']) @@ -87,10 +90,7 @@ def calculate_analysis_years(run): def age_of_air(run): - ''' - Routine to calculate the age of air metrics - ''' - + """Calculate the age of air metrics.""" # Create metrics dictionary with MDI incase age of air # diagnostics not available metrics = { @@ -101,7 +101,6 @@ def age_of_air(run): try: # Set up to only run for 5 year period analysis_start_dt, analysis_end_dt = calculate_analysis_years(run) - print(analysis_start_dt, analysis_end_dt) constraint = dict( from_dt=analysis_start_dt, to_dt=analysis_end_dt, lbproc=128) # Calculate age of air metrics if appropriate diagnostic available @@ -110,12 +109,11 @@ def age_of_air(run): agecube = load_run_ss(run, 'monthly', 'age_of_stratospheric_air', **constraint) # m01s34i150 except iris.exceptions.ConstraintMismatchError: - print('Age of air fields absent. Skipping this diagnostic.') + logger.warning('Age of air fields absent. Skipping this diagnostic.') except ValueError: - print("Run length is less than 12 years: Can't assess age of air") + logger.warning("Run length < 12 years: Can't assess age of air") else: # Create time/zonal means of age data - # TODO: Memory issue here? agecube = agecube.collapsed(['longitude', 'time'], iris.analysis.MEAN) # Convert units of data from seconds to years agecube.data /= RSECS_PER_360DAY_YEAR @@ -159,13 +157,13 @@ def age_of_air(run): return metrics -def multi_age_plot(runs): +def multi_age_plot(run): """ + Plot results. + This function is plotting the results of the function age_of_air for each run against observations. """ - # TODO avoid running age_of_air twice - # Run age_of_air for each run. # Age_of_air returns metrics and writes results into an *.nc in the current # working directory. @@ -176,16 +174,8 @@ def multi_age_plot(runs): # return metric values, multi_functions are supposed to # only produce plots (see __init__.py). - # rerun age_of_air for each run - for run in runs: - _ = age_of_air(run) - ###################################### - # Split up control and experiments - run_cntl = runs[0] - run_expts = runs[1:] - # Set up constraints to deal with loading data trop_cons = iris.Constraint( cube_func=lambda c: c.var_name == 'tropical_age_of_air') @@ -195,17 +185,15 @@ def multi_age_plot(runs): # Set up generic input file name infile = '{0}_age_of_air_{1}.nc' - cntlfile = infile.format(run_cntl['runid'], run_cntl.period) + # Create control filename + cntlfile = infile.format(run['suite_id1'], run['period']) - # Create experiment filenames - exptfiles = dict() - for run_expt in run_expts: - exptfiles[run_expt.id] = infile.format(run_expt['runid'], - run_expt.period) + # Create experiment filename + exptfile = infile.format(run['suite_id2'], run['period']) # If no control data then stop ... if not os.path.exists(cntlfile): - print('Age of air for control absent. skipping ...') + logger.warning('Age of air for control absent. skipping ...') return # Create tropics plot @@ -229,14 +217,12 @@ def multi_age_plot(runs): # Plot control diag = iris.load_cube(cntlfile, trop_cons) levs = diag.coord('level_height').points - plt.plot(diag.data, levs, label=run_cntl.id) - # Plot experiments - for run_expt in run_expts: - exptfile = exptfiles[run_expt.id] - if os.path.exists(exptfile): - diag = iris.load_cube(exptfile, trop_cons) - levs = diag.coord('level_height').points - plt.plot(diag.data, levs, label=run_expt.id) + plt.plot(diag.data, levs, label=run['suite_id1']) + # Plot experiment + if os.path.exists(exptfile): + diag = iris.load_cube(exptfile, trop_cons) + levs = diag.coord('level_height').points + plt.plot(diag.data, levs, label=run['suite_id2']) ax1.set_title('Tropical mean age profile (10S-10N)') ax1.set_xlabel('Mean age (years)') ax1.set_ylabel('Height (km)') @@ -266,14 +252,12 @@ def multi_age_plot(runs): # Plot control diag = iris.load_cube(cntlfile, midl_cons) levs = diag.coord('level_height').points - plt.plot(diag.data, levs, label=run_cntl.id) - # Plot experiments - for run_expt in run_expts: - exptfile = exptfiles[run_expt.id] - if os.path.exists(exptfile): - diag = iris.load_cube(exptfile, midl_cons) - levs = diag.coord('level_height').points - plt.plot(diag.data, levs, label=run_expt.id) + plt.plot(diag.data, levs, label=run['suite_id1']) + # Plot experiment + if os.path.exists(exptfile): + diag = iris.load_cube(exptfile, midl_cons) + levs = diag.coord('level_height').points + plt.plot(diag.data, levs, label=run['suite_id2']) ax1.set_title('Midlatitude mean age profile (35N-45N)') ax1.set_xlabel('Mean age (years)') ax1.set_ylabel('Height (km)') diff --git a/esmvaltool/diag_scripts/autoassess/autoassess_source/stratosphere/plotting.py b/esmvaltool/diag_scripts/autoassess/stratosphere/plotting.py similarity index 94% rename from esmvaltool/diag_scripts/autoassess/autoassess_source/stratosphere/plotting.py rename to esmvaltool/diag_scripts/autoassess/stratosphere/plotting.py index 0dd1248621..be2c2ff99f 100644 --- a/esmvaltool/diag_scripts/autoassess/autoassess_source/stratosphere/plotting.py +++ b/esmvaltool/diag_scripts/autoassess/stratosphere/plotting.py @@ -1,6 +1,8 @@ -''' -Module for functions useful for image manipulation and plotting -''' +""" +Plot tool for stratosphere. + +Module for functions useful for image manipulation and plotting. +""" import matplotlib.colors as mcol import numpy as np @@ -25,7 +27,6 @@ def segment2list(cmap, numcol, reverse=False): behaviour for linear colour scales, and actually get required behaviour for non-linear colour scales. """ - cdict = cmap._segmentdata.copy() # numcol colors colors_i = np.linspace(0, 1., numcol + 1) diff --git a/esmvaltool/diag_scripts/autoassess/autoassess_source/stratosphere/strat_metrics_1.py b/esmvaltool/diag_scripts/autoassess/stratosphere/strat_metrics_1.py similarity index 81% rename from esmvaltool/diag_scripts/autoassess/autoassess_source/stratosphere/strat_metrics_1.py rename to esmvaltool/diag_scripts/autoassess/stratosphere/strat_metrics_1.py index 5898976302..500e0747a6 100644 --- a/esmvaltool/diag_scripts/autoassess/autoassess_source/stratosphere/strat_metrics_1.py +++ b/esmvaltool/diag_scripts/autoassess/stratosphere/strat_metrics_1.py @@ -1,43 +1,41 @@ -''' -Stratospheric assessment code; ESMValTool-autoassess version -''' +"""Stratospheric assessment code; ESMValTool-autoassess version.""" +import logging import os -import sys -import matplotlib as mpl +import iris +import iris.analysis.cartography as iac +import iris.coord_categorisation as icc +import iris.plot as iplt import matplotlib.cm as mpl_cm import matplotlib.colors as mcol import matplotlib.dates as mdates import matplotlib.pyplot as plt -from matplotlib.patches import Rectangle import numpy as np - from cartopy.mpl.gridliner import LATITUDE_FORMATTER -import iris -import iris.analysis.cartography as iac -import iris.coord_categorisation as icc -import iris.plot as iplt -from .loaddata import load_run_ss +from matplotlib.patches import Rectangle + +from esmvaltool.diag_scripts.autoassess.loaddata import load_run_ss + from .plotting import segment2list -MARKERS = 'ops*dh^v<>+xDH.,' +logger = logging.getLogger(__name__) # Candidates for general utility functions def weight_lat_ave(cube): - ''' - Routine to calculate weighted latitudinal average - ''' + """Routine to calculate weighted latitudinal average.""" grid_areas = iac.area_weights(cube) return cube.collapsed('latitude', iris.analysis.MEAN, weights=grid_areas) def cmap_and_norm(cmap, levels, reverse=False): - ''' + """ + Generate interpolated colour map. + Routine to generate interpolated colourmap and normalisation from given colourmap and level set. - ''' + """ # cmap must be a registered colourmap tcmap = mpl_cm.get_cmap(cmap) colourmap = segment2list(tcmap, levels.size, reverse=reverse) @@ -46,16 +44,18 @@ def cmap_and_norm(cmap, levels, reverse=False): def plot_zmean(cube, levels, title, log=False, ax1=None): - ''' + """ + Plot zonal means. + Routine to plot zonal mean fields as latitude-pressure contours with given - contour levels - Option to plot against log(pressure) - ''' + contour levels. + Option to plot against log(pressure). + """ (colormap, normalisation) = cmap_and_norm('brewer_RdBu_11', levels) if ax1 is None: ax1 = plt.gca() ax1.set_title(title) - cf1 = iplt.contourf(cube, levels=levels, cmap=colormap, norm=normalisation) + iplt.contourf(cube, levels=levels, cmap=colormap, norm=normalisation) lwid = 1. * np.ones_like(levels) cl1 = iplt.contour(cube, colors='k', linewidths=lwid, levels=levels) plt.clabel(cl1, cl1.levels, inline=1, fontsize=6, fmt='%1.0f') @@ -70,16 +70,18 @@ def plot_zmean(cube, levels, title, log=False, ax1=None): def plot_timehgt(cube, levels, title, log=False, ax1=None): - ''' + """ + Plot fields as time-pressure. + Routine to plot fields as time-pressure contours with given - contour levels - Option to plot against log(pressure) - ''' + contour levels. + Option to plot against log(pressure). + """ (colormap, normalisation) = cmap_and_norm('brewer_RdBu_11', levels) if ax1 is None: ax1 = plt.gca() ax1.set_title(title) - cf1 = iplt.contourf(cube, levels=levels, cmap=colormap, norm=normalisation) + iplt.contourf(cube, levels=levels, cmap=colormap, norm=normalisation) lwid = 1. * np.ones_like(levels) cl1 = iplt.contour(cube, colors='k', linewidths=lwid, levels=levels) plt.clabel(cl1, cl1.levels, inline=1, fontsize=6, fmt='%1.0f') @@ -100,9 +102,7 @@ def plot_timehgt(cube, levels, title, log=False, ax1=None): # Routines specific to stratosphere assessment def plot_uwind(cube, month, filename): - ''' - Routine to plot zonal mean zonal wind on log pressure scale - ''' + """Routine to plot zonal mean zonal wind on log pressure scale.""" levels = np.arange(-120, 121, 10) title = 'Zonal mean zonal wind ({})'.format(month) fig = plt.figure() @@ -112,9 +112,7 @@ def plot_uwind(cube, month, filename): def plot_temp(cube, season, filename): - ''' - Routine to plot zonal mean temperature on log pressure scale - ''' + """Routine to plot zonal mean temperature on log pressure scale.""" levels = np.arange(160, 321, 10) title = 'Temperature ({})'.format(season) fig = plt.figure() @@ -124,9 +122,7 @@ def plot_temp(cube, season, filename): def plot_qbo(cube, filename): - ''' - Routine to create time-height plot of 5S-5N mean zonal mean U - ''' + """Routine to create time-height plot of 5S-5N mean zonal mean U.""" levels = np.arange(-80, 81, 10) title = 'QBO' fig = plt.figure(figsize=(12, 6)) @@ -141,8 +137,8 @@ def plot_qbo(cube, filename): def calc_qbo_index(qbo): - ''' - Routine to calculate QBO indices + """ + Routine to calculate QBO indices. The segment of code you include scans the timeseries of U(30hPa) and looks for the times where this crosses the zero line. Essentially U(30hPa) @@ -150,7 +146,7 @@ def calc_qbo_index(qbo): defined as the length of time between where U becomes positive and then negative and then becomes positive again (or negative/positive/negative). Also, periods less than 12 months are discounted. - ''' + """ ufin = qbo.data indiciesdown, indiciesup = find_zero_crossings(ufin) @@ -158,7 +154,7 @@ def calc_qbo_index(qbo): counterdown = len(indiciesdown) # Did we start on an upwards or downwards cycle? - if (indiciesdown[0] < indiciesup[0]): + if indiciesdown[0] < indiciesup[0]: (kup, kdown) = (0, 1) else: (kup, kdown) = (1, 0) @@ -176,10 +172,10 @@ def calc_qbo_index(qbo): totvals = 0 # valsup limit was initially hardcoded to +10.0 for i in range(periodsmax): - if (valsup[i] > 0.): + if valsup[i] > 0.: totvals = totvals + valsup[i] counter = counter + 1 - if (counter == 0): + if counter == 0: ampl_east = 0. else: totvals = totvals / counter @@ -189,10 +185,10 @@ def calc_qbo_index(qbo): totvals = 0 for i in range(periodsmin): # valdown limit was initially hardcoded to -20.0 - if (valsdown[i] < 0.): + if valsdown[i] < 0.: totvals = totvals + valsdown[i] counter = counter + 1 - if (counter == 0): + if counter == 0: ampl_west = 0. else: totvals = totvals / counter @@ -215,6 +211,8 @@ def calc_qbo_index(qbo): def flatten_list(list_): """ + Flatten list. + Turn list of lists into a list of all elements. [[1], [2, 3]] -> [1, 2, 3] """ @@ -223,7 +221,7 @@ def flatten_list(list_): def find_zero_crossings(array): """ - Finds zero crossings in 1D iterable. + Find zero crossings in 1D iterable. Returns two lists with indices, last_pos and last_neg. If a zero crossing includes zero, zero is used as last positive @@ -256,10 +254,12 @@ def find_zero_crossings(array): def pnj_strength(cube, winter=True): - ''' + """ + Calculate PNJ. + Calculate PNJ and ENJ strength as max/(-min) of zonal mean U wind for nh/sh in winter and sh/nh in summer repsectively. - ''' + """ # Extract regions of interest notrop = iris.Constraint(air_pressure=lambda p: p < 8000.0) nh_cons = iris.Constraint(latitude=lambda l: l > 0) @@ -279,10 +279,12 @@ def pnj_strength(cube, winter=True): def pnj_metrics(run, ucube, metrics): - ''' + """ + Calculate PNJ strength. + Routine to calculate PNJ strength metrics from zonal mean U Also produce diagnostic plots of zonal mean U - ''' + """ # TODO side effect: changes metrics without returning # Extract U for January and average over years @@ -309,9 +311,7 @@ def pnj_metrics(run, ucube, metrics): def qbo_metrics(run, ucube, metrics): - ''' - Routine to calculate QBO metrics from zonal mean U - ''' + """Routine to calculate QBO metrics from zonal mean U.""" # TODO side effect: changes metrics without returning # Extract equatorial zonal mean U # tropics = iris.Constraint(latitude=lambda lat: -5 <= lat <= 5) @@ -324,7 +324,7 @@ def qbo_metrics(run, ucube, metrics): # write results to current working directory outfile = '{0}_qbo30_{1}.nc' with iris.FUTURE.context(netcdf_no_unlimited=True): - iris.save(qbo30, outfile.format(run['runid'], run.period)) + iris.save(qbo30, outfile.format(run['runid'], run['period'])) # Calculate QBO metrics (period, amp_west, amp_east) = calc_qbo_index(qbo30) @@ -339,11 +339,13 @@ def qbo_metrics(run, ucube, metrics): def tpole_metrics(run, tcube, metrics): - ''' + """ + Compute 50hPa polar temp. + Routine to calculate polar 50hPa temperature metrics from zonal mean - temperature - Also produce diagnostic plots of zonal mean temperature - ''' + temperature. + Also produce diagnostic plots of zonal mean temperature. + """ # TODO side effect: changes metrics without returning # Calculate and extract seasonal mean temperature t_seas_mean = tcube.aggregated_by('clim_season', iris.analysis.MEAN) @@ -376,10 +378,7 @@ def tpole_metrics(run, tcube, metrics): def mean_and_strength(cube): - ''' - Calculate mean and strength of equatorial temperature seasonal - cycle - ''' + """Calculate mean and strength of equatorial temperature season cycle.""" # Calculate mean, max and min values of seasonal timeseries tmean = cube.collapsed('time', iris.analysis.MEAN) tmax = cube.collapsed('time', iris.analysis.MAX) @@ -390,26 +389,20 @@ def mean_and_strength(cube): def t_mean(cube): - ''' - Calculate mean equatorial 100hPa temperature - ''' + """Calculate mean equatorial 100hPa temperature.""" tmean = cube.collapsed('time', iris.analysis.MEAN) return tmean.data def q_mean(cube): - ''' - Calculate mean tropical 70hPa water vapour - ''' + """Calculate mean tropical 70hPa water vapour.""" qmean = cube.collapsed('time', iris.analysis.MEAN) # TODO magic numbers - return ((1000000. * 29. / 18.) * qmean.data) # ppmv + return (1000000. * 29. / 18.) * qmean.data # ppmv def teq_metrics(run, tcube, metrics): - ''' - Routine to calculate equatorial 100hPa temperature metrics - ''' + """Routine to calculate equatorial 100hPa temperature metrics.""" # Extract equatorial temperature at 100hPa equator = iris.Constraint(latitude=lambda lat: -2 <= lat <= 2) p100 = iris.Constraint(air_pressure=10000.) @@ -422,7 +415,7 @@ def teq_metrics(run, tcube, metrics): # write results to current working directory outfile = '{0}_teq100_{1}.nc' with iris.FUTURE.context(netcdf_no_unlimited=True): - iris.save(t_months, outfile.format(run['runid'], run.period)) + iris.save(t_months, outfile.format(run['runid'], run['period'])) # Calculate metrics (tmean, tstrength) = mean_and_strength(t_months) @@ -433,9 +426,7 @@ def teq_metrics(run, tcube, metrics): def t_metrics(run, tcube, metrics): - ''' - Routine to calculate 10S-10N 100hPa temperature metrics - ''' + """Routine to calculate 10S-10N 100hPa temperature metrics.""" # TODO side effect: changes metrics without returning # Extract 10S-10N temperature at 100hPa equator = iris.Constraint(latitude=lambda lat: -10 <= lat <= 10) @@ -449,7 +440,7 @@ def t_metrics(run, tcube, metrics): # write results to current working directory outfile = '{0}_t100_{1}.nc' with iris.FUTURE.context(netcdf_no_unlimited=True): - iris.save(t_months, outfile.format(run['runid'], run.period)) + iris.save(t_months, outfile.format(run['runid'], run['period'])) # Calculate metrics (tmean, tstrength) = mean_and_strength(t_months) @@ -460,9 +451,7 @@ def t_metrics(run, tcube, metrics): def q_metrics(run, qcube, metrics): - ''' - Routine to calculate 10S-10N 70hPa water vapour metrics - ''' + """Routine to calculate 10S-10N 70hPa water vapour metrics.""" # TODO side effect: changes metrics without returning # Extract 10S-10N humidity at 100hPa tropics = iris.Constraint(latitude=lambda lat: -10 <= lat <= 10) @@ -476,7 +465,7 @@ def q_metrics(run, qcube, metrics): # write results to current working directory outfile = '{0}_q70_{1}.nc' with iris.FUTURE.context(netcdf_no_unlimited=True): - iris.save(q_months, outfile.format(run['runid'], run.period)) + iris.save(q_months, outfile.format(run['runid'], run['period'])) # Calculate metrics qmean = q_mean(q_months) @@ -486,11 +475,13 @@ def q_metrics(run, qcube, metrics): def summary_metric(metrics): - ''' + """ + Compute weighted avg of metrics. + This is a weighted average of all 13 metrics, giving equal weights to the averages of extratropical U, extratropical T, QBO, and equatorial T metrics. - ''' + """ # TODO side effect: changes metrics without returning pnj_metric = metrics['Polar night jet: northern hem (January)'] \ + metrics['Polar night jet: southern hem (July)'] \ @@ -516,9 +507,7 @@ def summary_metric(metrics): def mainfunc(run): - ''' - Main function in stratospheric assessment code - ''' + """Main function in stratospheric assessment code.""" metrics = dict() # Set up to only run for 10 year period (eventually) @@ -605,10 +594,8 @@ def mainfunc(run): return metrics -def multi_qbo_plot(runs): - ''' - Plot 30hPa QBO (5S to 5N) timeseries comparing experiments on one plot - ''' +def multi_qbo_plot(run): + """Plot 30hPa QBO (5S to 5N) timeseries on one plot.""" # TODO avoid running mainfunc # Run mainfunc for each run. @@ -621,30 +608,20 @@ def multi_qbo_plot(runs): # return metric values, multi_functions are supposed to # only produce plots (see __init__.py). - # rerun mainfunc for each run - for run in runs: - _ = mainfunc(run) - - # Split up control and experiments - run_cntl = runs[0] - run_expts = runs[1:] - # QBO at 30hPa timeseries plot # Set up generic input file name infile = '{0}_qbo30_{1}.nc' # Create control filename - cntlfile = infile.format(run_cntl['runid'], run_cntl.period) + cntlfile = infile.format(run['suite_id1'], run['period']) - # Create experiment filenames - exptfiles = dict() - for run_expt in run_expts: - exptfiles[run_expt.id] = infile.format(run['runid'], run_expt.period) + # Create experiment filename + exptfile = infile.format(run['suite_id2'], run['period']) # If no control data then stop ... if not os.path.exists(cntlfile): - print('30hPa QBO for control absent. skipping ...') + logger.warning('QBO30 Control absent. skipping ...') return # Create plot @@ -654,13 +631,11 @@ def multi_qbo_plot(runs): qbo30_cntl = iris.load_cube(cntlfile) ivlist = iris.__version__.split('.') if float('.'.join([ivlist[0], ivlist[1]])) >= 2.1: - iplt.plot(qbo30_cntl, label=run_cntl.id) + iplt.plot(qbo30_cntl, label=run['suite_id1']) # Plot experiments - for run_expt in run_expts: - exptfile = exptfiles[run_expt.id] - if os.path.exists(exptfile): - qbo30_expt = iris.load_cube(exptfile) - iplt.plot(qbo30_expt, label=run_expt.id) + if os.path.exists(exptfile): + qbo30_expt = iris.load_cube(exptfile) + iplt.plot(qbo30_expt, label=run['suite_id2']) ax1.set_title('QBO at 30hPa') ax1.set_xlabel('Time', fontsize='small') ax1.set_ylabel('U (m/s)', fontsize='small') @@ -669,11 +644,13 @@ def multi_qbo_plot(runs): plt.close() -def multi_teq_plot(runs): - ''' +def multi_teq_plot(run): + """ + Plot temperature. + Plot 100hPa equatorial temperature seasonal cycle comparing - experiments on one plot - ''' + experiments on one plot. + """ # TODO avoid running mainfunc # Run mainfunc for each run. @@ -686,29 +663,18 @@ def multi_teq_plot(runs): # return metric values, multi_functions are supposed to # only produce plots (see __init__.py). - # rerun mainfunc for each run - for run in runs: - _ = mainfunc(run) - - # Split up control and experiments - run_cntl = runs[0] - run_expts = runs[1:] - # Set up generic input file name infile = '{0}_teq100_{1}.nc' # Create control filename - cntlfile = infile.format(run_cntl['runid'], run_cntl.period) + cntlfile = infile.format(run['suite_id1'], run['period']) - # Create experiment filenames - exptfiles = dict() - for run_expt in run_expts: - exptfiles[run_expt.id] = infile.format(run_expt['runid'], - run_expt.period) + # Create experiment filename + exptfile = infile.format(run['suite_id2'], run['period']) # If no control data then stop ... if not os.path.exists(cntlfile): - print('100hPa Teq for control absent. skipping ...') + logger.warning('100hPa Teq for control absent. skipping ...') return # Set up generic plot label @@ -721,16 +687,14 @@ def multi_teq_plot(runs): # Plot control tmon = iris.load_cube(cntlfile) (tmean, tstrg) = mean_and_strength(tmon) - label = plotlabel.format(run_cntl.id, float(tmean), float(tstrg)) + label = plotlabel.format(run['suite_id1'], float(tmean), float(tstrg)) plt.plot(times, tmon.data, linewidth=2, label=label) # Plot experiments - for run_expt in run_expts: - exptfile = exptfiles[run_expt.id] - if os.path.exists(exptfile): - tmon = iris.load_cube(exptfile) - (tmean, tstrg) = mean_and_strength(tmon) - label = plotlabel.format(run_expt.id, float(tmean), float(tstrg)) - plt.plot(times, tmon.data, linewidth=2, label=label) + if os.path.exists(exptfile): + tmon = iris.load_cube(exptfile) + (tmean, tstrg) = mean_and_strength(tmon) + label = plotlabel.format(run['suite_id2'], float(tmean), float(tstrg)) + plt.plot(times, tmon.data, linewidth=2, label=label) ax1.set_title('Equatorial 100hPa temperature, Multi-annual monthly means') ax1.set_xlabel('Month', fontsize='small') ax1.set_xlim(0, 11) @@ -743,11 +707,9 @@ def multi_teq_plot(runs): def calc_merra(run): + """Use MERRA as obs to compare.""" # Load data - # VPREDOI::FIXME - # this is a hack - merrafile = os.path.join(run['clim_root'], - 'ERA-Interim_tropical_area_avg.nc') + merrafile = os.path.join(run['clim_root'], 'ERA-Interim_cubeList.nc') (t, q) = iris.load_cubes(merrafile, ['air_temperature', 'specific_humidity']) # Strip out required times @@ -763,16 +725,14 @@ def calc_merra(run): q = q.collapsed('time', iris.analysis.MEAN) # Create return values tmerra = t.data # K - # TODO magic numbers qmerra = ((1000000. * 29. / 18.) * q.data) # ppmv return tmerra, qmerra def calc_erai(run): + """Use ERA-Interim as obs to compare.""" # Load data - eraidir = run['clim_root'] - eraifile = os.path.join(run['clim_root'], - 'ERA-Interim_tropical_area_avg.nc') + eraifile = os.path.join(run['clim_root'], 'ERA-Interim_cubeList.nc') (t, q) = iris.load_cubes(eraifile, ['air_temperature', 'specific_humidity']) # Strip out required times @@ -788,15 +748,12 @@ def calc_erai(run): q = q.collapsed('time', iris.analysis.MEAN) # Create return values terai = t.data # K - # TODO magic numbers qerai = ((1000000. * 29. / 18.) * q.data) # ppmv return terai, qerai -def multi_t100_vs_q70_plot(runs): - ''' - Plot mean 100hPa temperature against mean 70hPa humidity - ''' +def multi_t100_vs_q70_plot(run): + """Plot mean 100hPa temperature against mean 70hPa humidity.""" # TODO avoid running mainfunc # Run mainfunc for each run. @@ -809,46 +766,33 @@ def multi_t100_vs_q70_plot(runs): # return metric values, multi_functions are supposed to # only produce plots (see __init__.py). - # rerun mainfunc for each run - for run in runs: - _ = mainfunc(run) - - # Split up control and experiments - run_cntl = runs[0] - run_expts = runs[1:] - # Set up generic input file name t_file = '{0}_t100_{1}.nc' q_file = '{0}_q70_{1}.nc' # Create control filenames - t_cntl = t_file.format(run_cntl['runid'], run_cntl.period) - q_cntl = q_file.format(run_cntl['runid'], run_cntl.period) + t_cntl = t_file.format(run['suite_id1'], run['period']) + q_cntl = q_file.format(run['suite_id1'], run['period']) # Create experiment filenames - t_expts = dict() - q_expts = dict() - for run_expt in run_expts: - t_expts[run_expt.id] = t_file.format(run_expt['runid'], - run_expt.period) - q_expts[run_expt.id] = q_file.format(run_expt['runid'], - run_expt.period) + t_expt = t_file.format(run['suite_id2'], run['period']) + q_expt = q_file.format(run['suite_id2'], run['period']) # If no control data then stop ... if not os.path.exists(t_cntl): - print('100hPa T for control absent. skipping ...') + logger.warning('100hPa T for control absent. skipping ...') return # If no control data then stop ... if not os.path.exists(q_cntl): - print('70hPa q for control absent. skipping ...') + logger.warning('70hPa q for control absent. skipping ...') return # Load MERRA data (currently set to pre-calculated values) - (t_merra, q_merra) = calc_merra(run_cntl) + (t_merra, q_merra) = calc_merra(run) # Load ERA-I data (currently set to pre-calculated values) - (t_erai, q_erai) = calc_erai(run_cntl) + (t_erai, q_erai) = calc_erai(run) # Create plot # Axes @@ -861,10 +805,10 @@ def multi_t100_vs_q70_plot(runs): merra_xmax = 4.0 merra_ymin = -1.0 merra_ymax = 3.0 - erai_xmin = merra_xmin + (t_merra - t_erai) - erai_xmax = merra_xmax + (t_merra - t_erai) - erai_ymin = merra_ymin + (q_merra - q_erai) - erai_ymax = merra_ymax + (q_merra - q_erai) + # erai_xmin = merra_xmin + (t_merra - t_erai) + # erai_xmax = merra_xmax + (t_merra - t_erai) + # erai_ymin = merra_ymin + (q_merra - q_erai) + # erai_ymax = merra_ymax + (q_merra - q_erai) fig = plt.figure() @@ -900,19 +844,16 @@ def multi_t100_vs_q70_plot(runs): tmean = t_mean(tmon) - t_merra qmon = iris.load_cube(q_cntl) qmean = q_mean(qmon) - q_merra - label = '{1} ({0})'.format(run_cntl.id, run_cntl.title) + label = run['suite_id1'] ax1.scatter(tmean, qmean, s=100, label=label, marker='^') - # Plot experiments - for i, run_expt in enumerate(run_expts): - t_expt = t_expts[run_expt.id] - q_expt = q_expts[run_expt.id] - if os.path.exists(t_expt) and os.path.exists(q_expt): - tmon = iris.load_cube(t_expt) - tmean = t_mean(tmon) - t_merra - qmon = iris.load_cube(q_expt) - qmean = q_mean(qmon) - q_merra - label = '{1} ({0})'.format(run_expt.id, run_expt.title) - ax1.scatter(tmean, qmean, s=100, label=label, marker=MARKERS[i]) + # Plot experiment + if os.path.exists(t_expt) and os.path.exists(q_expt): + tmon = iris.load_cube(t_expt) + tmean = t_mean(tmon) - t_merra + qmon = iris.load_cube(q_expt) + qmean = q_mean(qmon) - q_merra + label = run['suite_id2'] + ax1.scatter(tmean, qmean, s=100, label=label, marker='v') ax1.legend(loc='upper right', scatterpoints=1, fontsize='medium') fig.savefig('t100_vs_q70.png') diff --git a/esmvaltool/diag_scripts/climate_metrics/ecs.py b/esmvaltool/diag_scripts/climate_metrics/ecs.py index a214bd56e0..4ff2102676 100644 --- a/esmvaltool/diag_scripts/climate_metrics/ecs.py +++ b/esmvaltool/diag_scripts/climate_metrics/ecs.py @@ -1,209 +1,308 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- - - -"""Calculate ECS following Andrews et al. (2012). - -############################################################################### -climate_metrics/ecs.py -Author: Manuel Schlund (DLR, Germany) -CRESCENDO project -############################################################################### +"""Diagnostic script to calculate ECS following Andrews et al. (2012). Description ----------- - Calculate the equilibrium climate sensitivity (ECS) using the regression - method proposed by Andrews et al. (2012). +Calculate the effective climate sensitivity (ECS) using the regression method +proposed by Andrews et al. (2012). -Configuration options ---------------------- - plot_regression : Switch to plot the linear regression. - output_name : Name of the output files. +Author +------ +Manuel Schlund (DLR, Germany) -############################################################################### +Project +------- +CRESCENDO -""" +Configuration options in recipe +------------------------------- +read_external_file : str, optional + Read ECS from external file. +""" import logging import os -from collections import OrderedDict -from datetime import datetime +from pprint import pformat +import cf_units import iris import numpy as np +import yaml from scipy import stats -import esmvaltool.diag_scripts.shared as e -import esmvaltool.diag_scripts.shared.names as n +from esmvaltool.diag_scripts.shared import ( + ProvenanceLogger, extract_variables, get_diagnostic_filename, + get_plot_filename, group_metadata, io, plot, run_diagnostic, + select_metadata, variables_available) logger = logging.getLogger(os.path.basename(__file__)) - -def plot_ecs_regression(cfg, dataset_name, data, variables, regression_stats): +EXP_4XCO2 = { + 'CMIP5': 'abrupt4xCO2', + 'CMIP6': 'abrupt-4xCO2', +} + + +def check_input_data(cfg): + """Check input data.""" + if not variables_available(cfg, ['tas', 'rtnt']): + raise ValueError("This diagnostic needs 'tas' and 'rtnt' " + "variables if 'read_external_file' is not given") + input_data = cfg['input_data'].values() + project_group = group_metadata(input_data, 'project') + projects = list(project_group.keys()) + if len(projects) > 1: + raise ValueError("This diagnostic supports only unique 'project' " + "attributes, got {}".format(projects)) + project = projects[0] + if project not in EXP_4XCO2: + raise ValueError("Project '{}' not supported yet".format(project)) + exp_group = group_metadata(input_data, 'exp') + exps = set(exp_group.keys()) + if exps != {'piControl', EXP_4XCO2[project]}: + raise ValueError("This diagnostic needs 'piControl' and '{}' " + "experiments, got {}".format(EXP_4XCO2[project], + exps)) + + +def get_anomaly_data(tas_data, rtnt_data, dataset): + """Calculate anomaly data for both variables.""" + project = tas_data[0]['project'] + exp_4xco2 = EXP_4XCO2[project] + paths = { + 'tas_4x': select_metadata(tas_data, dataset=dataset, exp=exp_4xco2), + 'tas_pi': select_metadata(tas_data, dataset=dataset, exp='piControl'), + 'rtnt_4x': select_metadata(rtnt_data, dataset=dataset, exp=exp_4xco2), + 'rtnt_pi': select_metadata( + rtnt_data, dataset=dataset, exp='piControl'), + } + ancestor_files = [] + cubes = {} + for (key, [path]) in paths.items(): + ancestor_files.append(path['filename']) + cube = iris.load_cube(path['filename']) + cube = cube.aggregated_by('year', iris.analysis.MEAN) + cubes[key] = cube + + # Substract linear fit of piControl run from abrupt4xCO2 experiment + shape = None + for cube in cubes.values(): + if shape is None: + shape = cube.shape + else: + if cube.shape != shape: + raise ValueError( + "Expected all cubes of dataset '{}' to have identical " + "shapes, got {} and {}".format(dataset, shape, cube.shape)) + tas_pi_reg = stats.linregress(cubes['tas_pi'].coord('year').points, + cubes['tas_pi'].data) + rtnt_pi_reg = stats.linregress(cubes['rtnt_pi'].coord('year').points, + cubes['rtnt_pi'].data) + cubes['tas_4x'].data -= ( + tas_pi_reg.slope * cubes['tas_pi'].coord('year').points + + tas_pi_reg.intercept) + cubes['rtnt_4x'].data -= ( + rtnt_pi_reg.slope * cubes['rtnt_pi'].coord('year').points + + rtnt_pi_reg.intercept) + return (cubes['tas_4x'], cubes['rtnt_4x'], ancestor_files) + + +def get_provenance_record(caption): + """Create a provenance record describing the diagnostic data and plot.""" + record = { + 'caption': caption, + 'statistics': ['mean', 'diff'], + 'domains': ['global'], + 'authors': ['schl_ma'], + 'references': ['andrews12grl'], + 'realms': ['atmos'], + 'themes': ['phys'], + } + return record + + +def read_external_file(cfg): + """Read external file to get ECS.""" + ecs = {} + feedback_parameter = {} + if not cfg.get('read_external_file'): + return (ecs, feedback_parameter) + base_dir = os.path.dirname(__file__) + filepath = os.path.join(base_dir, cfg['read_external_file']) + if os.path.isfile(filepath): + with open(filepath, 'r') as infile: + external_data = yaml.safe_load(infile) + else: + logger.error("Desired external file %s does not exist", filepath) + return (ecs, feedback_parameter) + ecs = external_data.get('ecs', {}) + feedback_parameter = external_data.get('feedback_parameter', {}) + logger.info("External file %s", filepath) + logger.info("Found ECS (K):") + logger.info("%s", pformat(ecs)) + logger.info("Found climate feedback parameters (W m-2 K-1):") + logger.info("%s", pformat(feedback_parameter)) + return (ecs, feedback_parameter, filepath) + + +def plot_ecs_regression(cfg, dataset_name, tas_cube, rtnt_cube, reg_stats): """Plot linear regression used to calculate ECS.""" - if not (cfg[n.WRITE_PLOTS] and cfg.get('plot_ecs_regression')): - return - ecs = -regression_stats.intercept / (2 * regression_stats.slope) - filepath = os.path.join(cfg[n.PLOT_DIR], - dataset_name + '.' + cfg[n.OUTPUT_FILE_TYPE]) + if not cfg['write_plots']: + return (None, None) + ecs = -reg_stats.intercept / (2 * reg_stats.slope) # Regression line - x_reg = np.linspace(-1.0, 8.0, 2) - y_reg = regression_stats.slope * x_reg + regression_stats.intercept + x_reg = np.linspace(-1.0, 9.0, 2) + y_reg = reg_stats.slope * x_reg + reg_stats.intercept # Plot data - text = 'r = {:.2f}, '.format(regression_stats.rvalue) + \ - r'$\alpha$ = {:.2f}, '.format(-regression_stats.slope) + \ - 'F = {:.2f}, '.format(regression_stats.intercept) + \ - 'ECS = {:.2f}'.format(ecs) - e.plot.scatterplot( - [data[0], x_reg], - [data[1], y_reg], - filepath, - plot_kwargs=[{'linestyle': 'none', - 'markeredgecolor': 'b', - 'markerfacecolor': 'none', - 'marker': 's'}, - {'color': 'k', - 'linestyle': '-'}], + text = r'r = {:.2f}, $\lambda$ = {:.2f}, F = {:.2f}, ECS = {:.2f}'.format( + reg_stats.rvalue, -reg_stats.slope, reg_stats.intercept, ecs) + plot_path = get_plot_filename(dataset_name, cfg) + plot.scatterplot( + [tas_cube.data, x_reg], + [rtnt_cube.data, y_reg], + plot_path, + plot_kwargs=[{ + 'linestyle': 'none', + 'markeredgecolor': 'b', + 'markerfacecolor': 'none', + 'marker': 's', + }, { + 'color': 'k', + 'linestyle': '-', + }], save_kwargs={ 'bbox_inches': 'tight', - 'orientation': 'landscape'}, + 'orientation': 'landscape', + }, axes_functions={ 'set_title': dataset_name, - 'set_xlabel': 'tas / ' + variables.units('tas'), - 'set_ylabel': 'rtmt / ' + variables.units('rtmt'), - 'set_xlim': [0.0, 7.0], + 'set_xlabel': 'tas / ' + tas_cube.units.origin, + 'set_ylabel': 'rtnt / ' + rtnt_cube.units.origin, + 'set_xlim': [0.0, 8.0], 'set_ylim': [-2.0, 10.0], - 'text': {'args': [0.05, 0.9, text], - 'kwargs': {'transform': 'transAxes'}}}) + 'text': { + 'args': [0.05, 0.9, text], + 'kwargs': { + 'transform': 'transAxes' + }, + }, + }, + ) # Write netcdf file for every plot - if not cfg[n.WRITE_NETCDF]: - return - tas_coord = iris.coords.AuxCoord(data[0], **variables.iris_dict('tas')) - attr = {'model': dataset_name, - 'regression_r_value': regression_stats.rvalue, - 'regression_slope': regression_stats.slope, - 'regression_interception': regression_stats.intercept, - 'climate_sensitivity': -regression_stats.slope, - 'ECS': ecs, - 'created_by': 'ESMValTool version {}'.format(cfg[n.VERSION]) + - ', diagnostic {}'.format(cfg[n.SCRIPT]), - 'creation_date': datetime.utcnow().isoformat(' ') + 'UTC'} - cube = iris.cube.Cube(data[1], - attributes=attr, - aux_coords_and_dims=[(tas_coord, 0)], - **variables.iris_dict('rtmt')) - filepath = os.path.join(cfg[n.WORK_DIR], - 'ecs_regression_' + dataset_name + '.nc') - iris.save(cube, filepath) - logger.info("Writing %s", filepath) - return - - -############################################################################### -# Setup diagnostic -############################################################################### - -# Variables -ECS = e.Variable('ecs', - 'equilibrium_climate_sensitivity', - 'Change in global mean surface temperature at equilibrium ' - 'caused by a doubling of the atmospheric CO2 concentration', - 'K') - -# Experiments -PICONTROL = 'piControl' -ABRUPT4XCO2 = 'abrupt4xCO2' + tas_coord = iris.coords.AuxCoord( + tas_cube.data, + **extract_variables(cfg, as_iris=True)['tas']) + attrs = { + 'model': dataset_name, + 'regression_r_value': reg_stats.rvalue, + 'regression_slope': reg_stats.slope, + 'regression_interception': reg_stats.intercept, + 'Climate Feedback Parameter': -reg_stats.slope, + 'ECS': ecs, + } + cube = iris.cube.Cube( + rtnt_cube.data, + attributes=attrs, + aux_coords_and_dims=[(tas_coord, 0)], + **extract_variables(cfg, as_iris=True)['rtnt']) + netcdf_path = get_diagnostic_filename('ecs_regression_' + dataset_name, + cfg) + io.iris_save(cube, netcdf_path) + + # Provenance + provenance_record = get_provenance_record( + "Scatterplot between TOA radiance and global mean surface temperature " + "anomaly for 150 years of the abrupt 4x CO2 experiment including " + "linear regression to calculate ECS for {}.".format(dataset_name)) + provenance_record.update({ + 'plot_file': plot_path, + 'plot_types': ['scatter'], + }) + + return (netcdf_path, provenance_record) + + +def write_data(ecs_data, feedback_parameter_data, ancestor_files, cfg): + """Write netcdf files.""" + data = [ecs_data, feedback_parameter_data] + var_attrs = [ + { + 'short_name': 'ecs', + 'long_name': 'Effective Climate Sensitivity (ECS)', + 'units': cf_units.Unit('K'), + }, + { + 'short_name': 'lambda', + 'long_name': 'Climate Feedback Parameter', + 'units': cf_units.Unit('W m-2 K-1'), + }, + ] + for (idx, var_attr) in enumerate(var_attrs): + path = get_diagnostic_filename(var_attr['short_name'], cfg) + io.save_scalar_data(data[idx], path, var_attr) + caption = "{long_name} for multiple climate models.".format(**var_attr) + provenance_record = get_provenance_record(caption) + provenance_record['ancestors'] = ancestor_files + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(path, provenance_record) def main(cfg): - """Run the diagnostic. - - Parameters - ---------- - cfg : dict - Configuration dictionary of the recipe. + """Run the diagnostic.""" + input_data = cfg['input_data'].values() + + # Read external file if desired + if cfg.get('read_external_file'): + (ecs, feedback_parameter, external_file) = read_external_file(cfg) + else: + check_input_data(cfg) + ecs = {} + feedback_parameter = {} + external_file = None - """ - ########################################################################### - # Read recipe data - ########################################################################### - - # Dataset data containers - data = e.Datasets(cfg) - logging.debug("Found datasets in recipe:\n%s", data) - - # Variables - var = e.Variables(cfg) - logging.debug("Found variables in recipe:\n%s", var) - var.add_vars(ecs=ECS) - - # Check for tas and rtmt - if not var.vars_available('tas', 'rtmt'): - raise ValueError("This diagnostic needs 'tas' and 'rtmt' variables") - - ########################################################################### # Read data - ########################################################################### - - # Create iris cube for each dataset and save annual means - for dataset_path in data: - cube = iris.load(dataset_path, var.standard_names())[0] - cube = cube.aggregated_by(n.YEAR, iris.analysis.MEAN) - data.set_data(cube.data, dataset_path) - - ########################################################################### - # Process data - ########################################################################### - data_ecs = OrderedDict() - - for dataset_path in \ - data.get_path_list(short_name='tas', exp=PICONTROL): - - # Substract piControl experiment from abrupt4xCO2 experiment - dataset = data.get_info(n.DATASET, dataset_path) - data_rtmt_pic = data.get_data(short_name='rtmt', exp=PICONTROL, - dataset=dataset) - data_tas = data.get_data(short_name='tas', exp=ABRUPT4XCO2, - dataset=dataset) - data.get_data(dataset_path) - data_rtmt = data.get_data(short_name='rtmt', exp=ABRUPT4XCO2, - dataset=dataset) - data_rtmt_pic + tas_data = select_metadata(input_data, short_name='tas') + rtnt_data = select_metadata(input_data, short_name='rtnt') + + # Iterate over all datasets and save ECS and feedback parameter + for dataset in group_metadata(tas_data, 'dataset'): + logger.info("Processing %s", dataset) + (tas_cube, rtnt_cube, ancestor_files) = get_anomaly_data( + tas_data, rtnt_data, dataset) # Perform linear regression - reg = stats.linregress(data_tas, data_rtmt) + reg = stats.linregress(tas_cube.data, rtnt_cube.data) # Plot ECS regression if desired - plot_ecs_regression(cfg, dataset, [data_tas, data_rtmt], var, reg) + (path, provenance_record) = plot_ecs_regression( + cfg, dataset, tas_cube, rtnt_cube, reg) + + # Provenance + if path is not None: + provenance_record['ancestors'] = ancestor_files + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(path, provenance_record) # Save data - data_ecs[dataset] = -reg.intercept / (2 * reg.slope) + if cfg.get('read_external_file') and dataset in ecs: + logger.info( + "Overwriting external given ECS and climate feedback " + "parameter for %s", dataset) + ecs[dataset] = -reg.intercept / (2 * reg.slope) + feedback_parameter[dataset] = -reg.slope - ########################################################################### # Write data - ########################################################################### - if cfg[n.WRITE_NETCDF]: - dataset_coord = iris.coords.AuxCoord(list(data_ecs), - long_name='datasets') - attr = {'created_by': 'ESMValTool version {}'.format(cfg[n.VERSION]) + - ', diagnostic {}'.format(cfg[n.SCRIPT]), - 'creation_date': datetime.utcnow().isoformat(' ') + 'UTC'} - cube = iris.cube.Cube(list(data_ecs.values()), - long_name=var.long_name('ecs'), - var_name='ecs', - units=var.units('ecs'), - aux_coords_and_dims=[(dataset_coord, 0)], - attributes=attr) - - # Save file - filepath = os.path.join(cfg[n.WORK_DIR], - cfg.get('output_name', 'ecs') + '.nc') - iris.save(cube, filepath) - logger.info("Writing %s", filepath) + ancestor_files = [d['filename'] for d in tas_data + rtnt_data] + if external_file is not None: + ancestor_files.append(external_file) + write_data(ecs, feedback_parameter, ancestor_files, cfg) if __name__ == '__main__': - with e.run_diagnostic() as config: + with run_diagnostic() as config: main(config) diff --git a/esmvaltool/diag_scripts/climate_metrics/external_sources/ipcc_ar5.yml b/esmvaltool/diag_scripts/climate_metrics/external_sources/ipcc_ar5.yml new file mode 100644 index 0000000000..9a8ff54381 --- /dev/null +++ b/esmvaltool/diag_scripts/climate_metrics/external_sources/ipcc_ar5.yml @@ -0,0 +1,49 @@ +# ECS (IPCC AR5 table 9.5, units: K) +ecs: + ACCESS1-0: 3.8 + bcc-csm1-1-m: 2.9 + bcc-csm1-1: 2.8 + BNU-ESM: 4.1 + CanESM2: 3.7 + CCSM4: 2.9 + CNRM-CM5: 3.3 + CSIRO-Mk3-6-0: 4.1 + GFDL-CM3: 4.0 + GFDL-ESM2G: 2.4 + GFDL-ESM2M: 2.4 + GISS-E2-H: 2.3 + GISS-E2-R: 2.1 + HadGEM2-ES: 4.6 + inmcm4: 2.1 + IPSL-CM5A-LR: 4.1 + IPSL-CM5B-LR: 2.6 + MIROC-ESM: 4.7 + MIROC5: 2.7 + MPI-ESM-LR: 3.6 + MRI-CGCM3: 2.6 + NorESM1-M: 2.8 + +# Climate feedback parameter lambda (IPCC AR5 table 9.5, units: W m-2 K-1) +feedback_parameter: + ACCESS1-0: 0.8 + bcc-csm1-1-m: 1.2 + bcc-csm1-1: 1.1 + BNU-ESM: 1.0 + CanESM2: 1.0 + CCSM4: 1.2 + CNRM-CM5: 1.1 + CSIRO-Mk3-6-0: 0.6 + GFDL-CM3: 0.8 + GFDL-ESM2G: 1.3 + GFDL-ESM2M: 1.4 + GISS-E2-H: 1.7 + GISS-E2-R: 1.8 + HadGEM2-ES: 0.6 + inmcm4: 1.4 + IPSL-CM5A-LR: 0.8 + IPSL-CM5B-LR: 1.0 + MIROC-ESM: 0.9 + MIROC5: 1.5 + MPI-ESM-LR: 1.1 + MRI-CGCM3: 1.2 + NorESM1-M: 1.1 diff --git a/esmvaltool/diag_scripts/climate_metrics/psi.py b/esmvaltool/diag_scripts/climate_metrics/psi.py new file mode 100644 index 0000000000..04238f9496 --- /dev/null +++ b/esmvaltool/diag_scripts/climate_metrics/psi.py @@ -0,0 +1,153 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +"""Diagnostic script to temperature variability metric psi (Cox et al., 2018). + +Description +----------- +Calculate global temperature variability metric psi following Cox et al. +(2018). + +Author +------ +Manuel Schlund (DLR, Germany) + +Project +------- +CRESCENDO + +Configuration options in recipe +------------------------------- +window_length : int, optional (default: 55) + Number of years used for the moving window average. +lag : int, optional (default: 1) + Lag (in years) for the autocorrelation function. + +""" + +import logging +import os + +import cf_units +import iris +import numpy as np +from scipy import stats + +from esmvaltool.diag_scripts.shared import ( + ProvenanceLogger, get_diagnostic_filename, group_metadata, io, + run_diagnostic, select_metadata) + +logger = logging.getLogger(os.path.basename(__file__)) + + +def calculate_psi(cube, cfg): + """Calculate temperature variability metric psi for a given cube.""" + window_length = cfg.get('window_length', 55) + lag = cfg.get('lag', 1) + psi_years = [] + psis = [] + + # Moving average + for yr_idx in range(cube.shape[0] - window_length): + slc = slice(yr_idx, yr_idx + window_length) + years = cube.coord('year').points[slc] + tas = np.copy(cube.data[slc]) + + # De-trend data + reg = stats.linregress(years, tas) + tas -= reg.slope * years + reg.intercept + + # Autocorrelation + norm = np.sum(np.square(tas)) + [autocorr] = np.correlate(tas[:-lag], tas[lag:], mode='valid') / norm + + # Psi + psi_years.append(years[-1]) + psis.append(np.std(tas) / np.sqrt(-np.log(autocorr))) + + # Return new cube + year_coord = iris.coords.DimCoord( + np.array(psi_years), + var_name='year', + long_name='year', + units=cf_units.Unit('year')) + psi_cube = iris.cube.Cube( + np.array(psis), + dim_coords_and_dims=[(year_coord, 0)], + attributes={ + 'window_length': window_length, + 'lag': lag + }) + return psi_cube + + +def get_provenance_record(caption, ancestor_files): + """Create a provenance record describing the diagnostic data and plot.""" + record = { + 'caption': caption, + 'statistics': ['var', 'diff', 'corr', 'detrend'], + 'domains': ['global'], + 'authors': ['schl_ma'], + 'references': ['cox18nature'], + 'realms': ['atmos'], + 'themes': ['phys'], + 'ancestors': ancestor_files, + } + return record + + +def main(cfg): + """Run the diagnostic.""" + input_data = ( + select_metadata(cfg['input_data'].values(), short_name='tas') + + select_metadata(cfg['input_data'].values(), short_name='tasa')) + if not input_data: + raise ValueError("This diagnostics needs 'tas' or 'tasa' variable") + + # Calculate psi for every dataset + psis = {} + psi_attrs = { + 'short_name': 'psi', + 'long_name': 'Temperature variability metric', + 'units': 'K', + } + grouped_data = group_metadata(input_data, 'dataset') + for (dataset, [data]) in grouped_data.items(): + logger.info("Processing %s", dataset) + cube = iris.load_cube(data['filename']) + cube = cube.aggregated_by('year', iris.analysis.MEAN) + psi_cube = calculate_psi(cube, cfg) + data.update(psi_attrs) + data.pop('standard_name', '') + + # Provenance + caption = ("Temporal evolution of temperature variability metric psi " + "between {start_year} and {end_year} for {dataset}.".format( + **data)) + provenance_record = get_provenance_record(caption, [data['filename']]) + out_path = get_diagnostic_filename('psi_' + dataset, cfg) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(out_path, provenance_record) + + # Save psi for every dataset + data['filename'] = out_path + io.metadata_to_netcdf(psi_cube, data) + + # Save averaged psi + psis[dataset] = np.mean(psi_cube.data) + + # Save averaged psis for every dataset in one file + out_path = get_diagnostic_filename('psi', cfg) + io.save_scalar_data( + psis, out_path, psi_attrs, attributes=psi_cube.attributes) + + # Provenance + caption = "{long_name} for mutliple climate models.".format(**psi_attrs) + ancestor_files = [d['filename'] for d in input_data] + provenance_record = get_provenance_record(caption, ancestor_files) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(out_path, provenance_record) + + +if __name__ == '__main__': + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/clouds/clouds.ncl b/esmvaltool/diag_scripts/clouds/clouds.ncl new file mode 100644 index 0000000000..9159c781d2 --- /dev/null +++ b/esmvaltool/diag_scripts/clouds/clouds.ncl @@ -0,0 +1,1317 @@ +; CLOUDS +; ############################################################################ +; Author: Axel Lauer (DLR, Germany) +; PROJECT-NAME EMBRACE +; ############################################################################ +; Description +; Calculates annual/seasonal means of 2-d (cloud) parameters for comparison +; with a reference data set. Optionally, differences to the reference data +; set are also plotted. +; +; Required diag_script_info attributes (diagnostic specific) +; none +; +; Optional diag_script_info attributes (diagnostic specific) +; embracesetup: True = 2 plots per line, False = 4 plots per line +; (default) +; explicit_cn_levels: explicit contour levels (array) +; extralegend: plot legend(s) to extra file(s) +; filename_add: optionally add this string to plot filesnames +; panel_labels: label individual panels (true, false) +; PanelTop: manual override for "@gnsPanelTop" used by panel +; plot(s) +; projection: map projection for plotting (default = +; "CylindricalEquidistant") +; showdiff calculate and plot differences (default = False) +; rel_diff: if showdiff = True, then plot relative differences (%) +; (default = False) +; ref_diff_min: lower cutoff value in case of calculating relative +; differences +; (in units of input variable) +; region: show only selected geographic region given as latmin, +; latmax, lonmin, lonmax +; timemean: time averaging - "seasonal" = DJF, MAM, JJA, SON), +; "annual" = annual mean +; treat_var_as_error: treat variable as error when averaging (true, false) +; true: avg = sqrt(mean(var*var)) +; false: avg = mean(var) +; +; Required variable attributes (variable specific) +; none +; +; Optional variable_info attributes (variable specific) +; long_name: variable description +; reference_dataset: reference dataset; REQUIRED when calculating +; differences (showdiff = True) +; units: variable units (for labeling plot only) +; +; Caveats +; none +; +; Modification history +; 20190220-A_laue_ax: added output of provenance (v2.0) +; 20181119-A_laue_ax: adapted code to multi-variable capable framework +; 20180923-A_laue_ax: added writing of results to netcdf +; 20180518-A_laue_ax: code rewritten for ESMValTool v2.0 +; 20170621-A_laue_ax: reworked code to add tags for reporting +; 20160901-A_laue_ax: added regridding option 1 deg x 1 deg +; 20151027-A_laue_ax: moved call to 'write_references' to the beginning +; of the code +; 20150415-A-laue_ax: written. +; +; ############################################################################ + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/contour_maps.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + + var0 = variable_info[0]@short_name + info0 = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info0) + if (isatt(variable_info[0], "reference_dataset")) then + refname = variable_info[0]@reference_dataset + end if + names = metadata_att_as_array(info0, "dataset") + projects = metadata_att_as_array(info0, "project") + + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT + " (var: " + var0 + ")") + log_info("++++++++++++++++++++++++++++++++++++++++++") + + ; Set default values for non-required diag_script_info attributes + + set_default_att(diag_script_info, "embrace_setup", False) + set_default_att(diag_script_info, "extralegend", False) + set_default_att(diag_script_info, "filename_add", "") + set_default_att(diag_script_info, "panel_labels", True) + set_default_att(diag_script_info, "rel_diff", False) + set_default_att(diag_script_info, "rel_diff_min", -1.0e19) + set_default_att(diag_script_info, "showdiff", False) + set_default_att(diag_script_info, "timemean", "annualclim") + set_default_att(diag_script_info, "treat_var_as_error", False) + + flag_diff = diag_script_info@showdiff + flag_rel_diff = diag_script_info@rel_diff + flag_rel_diff_min = diag_script_info@rel_diff_min + + if (.not.flag_diff .and. flag_rel_diff) then + log_info("flag_rel_diff = True has no effect until flag_diff is also " \ + + "set to True") + end if + + if (diag_script_info@filename_add .ne. "") then + filename_add = "_" + diag_script_info@filename_add + else + filename_add = "" + end if + + embracesetup = diag_script_info@embrace_setup + + if (isatt(diag_script_info, "projection")) then + projection = diag_script_info@projection + perim = False + else + projection = "CylindricalEquidistant" + perim = True + end if + + ; time averaging: at the moment, only "annualclim" and "seasonalclim" + ; are supported + + timemean = diag_script_info@timemean + numseas = 1 ; default + season = (/"annual"/) + + if (timemean.eq."seasonalclim") then + numseas = 4 + delete(season) + season = (/"DJF", "MAM", "JJA", "SON"/) + end if + + ; create string for caption (netcdf provenance) + + allseas = season(0) + do is = 1, numseas - 1 + allseas = allseas + "/" + season(i) + end do + + panel_labels = diag_script_info@panel_labels + + treat_var_as_error = diag_script_info@treat_var_as_error + + extralegend = diag_script_info@extralegend + + ; make sure path for (mandatory) netcdf output exists + + work_dir = config_user_info@work_dir + "/" + ; Create work dir + system("mkdir -p " + work_dir) + + if (config_user_info@write_plots.eq."True") then + write_plots = True + else + write_plots = False + end if + + ref_ind = -1 ; set to invalid value + + ; if attribute is present, use it so correlations can be calculated + if (isvar("refname")) then + ; set reference model + ref_ind = ind(names .eq. refname) + if (ismissing(ref_ind)) then + log_info("warning: reference dataset (" + refname + ") not found.") + ref_ind = -1 + end if + end if + + climofiles = metadata_att_as_array(info0, "filename") + + outfile = new(numseas, string) + outfile(:) = "" + + if (flag_diff) then + outfile_d = new(numseas, string) + outfile_d(:) = "" + + ; check for reference model definition + if (.not.isvar("refname")) then + error_msg("f", DIAG_SCRIPT, "", \ + "no reference dataset defined in recipe") + end if + + ; set reference model + + ref_ind = ind(names .eq. refname) + if (ismissing(ref_ind)) then + error_msg("f", DIAG_SCRIPT, "", "reference dataset (" \ + + refname + ") is missing") + end if + end if + +end + +begin + ; ########################################### + ; # get data and average time # + ; ########################################### + + maps = new((/dim_MOD, 4/), graphic) + maps_d = new((/dim_MOD, 4/), graphic) + + ind_all_sorted = ispan(0, dim_MOD - 1, 1) ; create array + + if (ref_ind .ge. 0) then + ind_wo_ref = ind(names .ne. refname) + ind_all_sorted(0) = ref_ind + ind_all_sorted(1:dim_MOD - 1) = ind_wo_ref + end if + + corr = new((/numseas/), float) + gavg = new((/numseas/), float) + rmsd = new((/numseas/), float) + bias = new((/numseas/), float) + + ; filenames for netcdf output + + nc_filename_bias = work_dir + "clouds_" + var0 + "_bias.nc" + nc_filename_bias@existing = "append" + nc_filename_mean = work_dir + "clouds_" + var0 + "_mean.nc" + nc_filename_mean@existing = "append" + + do ii = 0, dim_MOD - 1 + + imod = ind_all_sorted(ii) + log_info("processing " + names(imod)) + + if (isvar("data1")) then + delete(data1) + end if + + if (isvar("A0")) then + delete(A0) + end if + + A0 = read_data(info0[imod]) + + ; check dimensions + + dims = getvardims(A0) + if (dimsizes(dims) .lt. 2) then + error_msg("f", DIAG_SCRIPT, "", dimsizes(dims) + \ + " dimensions, need 2 or 3") + end if + idx = ind(dims .eq. "lat") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lat dimension") + end if + idx = ind(dims .eq. "lon") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lon dimension") + end if + + ; average over time + + ; if variable is an error variable, we have to square it before + ; averaging and then calculate the square-root afterwards + + if (treat_var_as_error) then + log_info(" ++++++++++++++ Treating variable as error " + \ + "variable when averaging ") + A0 = A0 * A0 + end if + + data1 = time_operations(A0, -1, -1, "average", timemean, True) + + if (treat_var_as_error) then + data1 = sqrt(data1) + end if + + delete(A0) + + ; if requested, select geographical region + + if (isatt(diag_script_info, "region")) then + region = diag_script_info@region + data1 := area_operations(data1, region(0), region(1), region(2), \ + region(3), "extract", False) + if (region(2).eq.0. .and. region(3).eq.360.) then + else + data1@res_gsnAddCyclic = False + end if + data1@res_mpMinLatF = region(0) ; range to zoom in on + data1@res_mpMaxLatF = region(1) + data1@res_mpMinLonF = region(2) + data1@res_mpMaxLonF = region(3) + data1@res_mpCenterLonF = 0.5 * (region(2) + region(3)) + delete(region) + end if + + ; ########################################### + ; # Style dependent annotation # + ; ########################################### + ; retrieve unique strings describing the data + ; function in ./diag_scripts/shared/plot/style.ncl + + ; ########################################### + ; # plot ressources # + ; ########################################### + + data1@res_cnFillOn = True ; color plot desired + data1@res_cnLineLabelsOn = False ; contour lines + + ; colors + ; http://www.ncl.ucar.edu/Document/Graphics/color_table_gallery.shtml + + ; annotation + + data1@res_tiMainOn = False + data1@res_cnLevelSelectionMode = "ExplicitLevels" + data1@res_cnLinesOn = False + + data1@res_mpOutlineOn = True + data1@res_mpFillOn = False + + ; variable specific plotting settings + + if (var0.eq."pr") then + data1@res_cnLevels = fspan(0.5, 10, 20) + ; convert from kg m-2 s-1 to mm day-1 + data1 = data1 * 86400.0 + data1@units = "mm day-1" + end if + + if (var0.eq."lwp") then + data1@res_cnLevels = ispan(10, 200, 10) * 0.001 + data1@res_mpOutlineOn = False + data1@res_mpFillOn = True + data1@res_mpLandFillColor = "Black" + pal = read_colormap_file("$diag_scripts/shared/plot/rgb/qcm3.rgb") + data1@res_cnFillColors = pal + end if + + if (var0.eq."tas") then + data1@res_cnLevels = ispan(-30, 30, 3) + pal = read_colormap_file("$diag_scripts/shared/plot/rgb/ipcc-tas.rgb") + data1@res_cnFillColors = pal + ; convert from K to degC + data1 = data1 - 273.15 + data1@units = "degC" + end if + + if (var0.eq."clt") then + data1@res_cnLevels = fspan(5, 100, 20) + end if + + if (var0.eq."clivi") then + data1@res_cnLevels = ispan(10, 200, 10) * 0.001 + end if + + if (var0.eq."clwvi") then + data1@res_cnLevels = ispan(10, 300, 10) * 0.001 + end if + + if (var0.eq."swcre") then + data1@res_cnLevels = ispan(-100, 0, 10) + end if + + if (var0.eq."lwcre") then + data1@res_cnLevels = ispan(0, 100, 10) + end if + + if (var0.eq."netcre") then + data1@res_cnLevels = ispan(-70, 70, 10) + end if + + data1@res_lbLabelBarOn = False + data1@res_gsnRightString = "" + + data1@res_mpFillDrawOrder = "PostDraw" ; draw map last + data1@res_cnMissingValFillColor = "Gray" + + ; no tickmarks and no labels + + data1@res_tmYLLabelsOn = False + data1@res_tmYLOn = False + data1@res_tmYRLabelsOn = False + data1@res_tmYROn = False + data1@res_tmXBLabelsOn = False + data1@res_tmXBOn = False + data1@res_tmXTLabelsOn = False + data1@res_tmXTOn = False + data1@res_cnInfoLabelOn = False ; turn off cn info label + data1@res_mpPerimOn = perim ; draw line around map + + ; specified in namelist + + data1@res_mpProjection = projection + + ; set explicit contour levels + + if (isatt(diag_script_info, "explicit_cn_levels")) then + data1@res_cnLevelSelectionMode = "ExplicitLevels" + data1@res_cnLevels = diag_script_info@explicit_cn_levels + end if + + if (.not. isatt(data1, "res_cnLevels")) then + log_info(DIAG_SCRIPT + " (var: " + var0 + "):") + log_info("info: using default contour levels") + data1@res_cnLevels = fspan(min(data1), max(data1), 20) + end if + + ; ########################################### + ; # other Metadata: diag_script, var # + ; ########################################### + ; add to data1 as attributes without prefix + + if (isatt(data1, "diag_script")) then ; add to existing entries + temp = data1@diag_script + delete(data1@diag_script) + data1@diag_script = array_append_record(temp, (/DIAG_SCRIPT/), 0) + delete(temp) + else ; add as new attribute + data1@diag_script = (/DIAG_SCRIPT/) + end if + + if (isatt(variable_info[0], "long_name")) then + data1@var_long_name = variable_info[0]@long_name + end if + + data1@var = var0 + + if (isatt(variable_info[0], "units")) then + data1@var_units = variable_info[0]@units + else + data1@var_units = "" + end if + + if (.not. isvar("ref_data")) then + ref_data = data1 + end if + + ; check if data are on same grid (for calculating difference, RMSD, + ; correlation) + + same_grid = False + + if (all(dimsizes(ref_data) .eq. dimsizes(data1))) then + if (max(abs(ref_data&lat - data1&lat)) .le. 1.0e-6) then + if (max(abs(ref_data&lon - data1&lon)) .le. 1.0e-6) then + same_grid = True + end if + end if + end if + + if (flag_diff .and. .not.same_grid) then + flag_diff = False + error_msg("f", DIAG_SCRIPT, "", \ + "Data are not on same grid, cannot calculate differences. " \ + + "Set showdiff to False in namelist or regrid data to " \ + + "common grid (check/adjust " \ + + "preprocessor settings in recipe).") + end if + + corr = corr@_FillValue + gavg = gavg@_FillValue + + if (.not.all(ismissing(data1))) then + if (numseas.gt.1) then + do is = 0, numseas - 1 + if (same_grid .and. (ref_ind .ge. 0)) then + corr(is) = calculate_metric(ref_data(is, :, :), data1(is, :, :), \ + "correlation") + end if + gavg(is) = area_operations(data1(is, :, :), -90., 90., 0., 360., \ + "average", True) + end do + else + if (same_grid .and. (ref_ind .ge. 0)) then + corr(0) = calculate_metric(ref_data, data1, "correlation") + end if + gavg(0) = area_operations(data1, -90., 90., 0., 360., "average", True) + end if + end if + + data1@res_gsnLeftStringFontHeightF = min((/0.025, 0.015 * 6.0 \ + / tofloat((dim_MOD + 1) / 2)/)) + data1@res_gsnRightStringFontHeightF = min((/0.025, 0.015 * 6.0 \ + / tofloat((dim_MOD + 1) / 2)/)) + + ; ########################################### + ; # create the plot # + ; ########################################### + + data1@res_gsnDraw = False ; do not draw yet + data1@res_gsnFrame = False ; don't advance frame + + ; function in aux_plotting.ncl + + if (ii.eq.0) then + ; note: an array of workspaces (i.e. wks(numseas)) does not work as + ; attributes cannot be assigned to each array element + ; individually + wks0 = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_" + var0 + \ + "_" + season(0) + filename_add) + ; difference plots will be saved to a different file + if (flag_diff) then + wks0d = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_" + var0 + \ + "_bias_" + season(0) + filename_add) + end if + if (numseas.gt.1) then + wks1 = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_" + var0 + \ + "_" + season(1) + filename_add) + wks2 = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_" + var0 + \ + "_" + season(2) + filename_add) + wks3 = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_" + var0 + \ + "_" + season(3) + filename_add) + ; difference plots will be saved to a different files + if (flag_diff) then + wks1d = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_" + var0 + \ + "_bias_" + season(1) + filename_add) + wks2d = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_" + var0 + \ + "_bias_" + season(2) + filename_add) + wks3d = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_" + var0 + \ + "_bias_" + season(3) + filename_add) + end if + end if + end if + + if (numseas.gt.1) then + do is = 0, numseas - 1 + if (.not.ismissing(corr(is))) then + data1@res_gsnRightString = "corr = " + sprintf("%6.3f", corr(is)) + else + data1@res_gsnRightString = "" + end if + if (.not.ismissing(gavg(is))) then + data1@res_gsnLeftString = "mean = " + sprintf("%6.3f", gavg(is)) + else + data1@res_gsnLeftString = "" + end if + + if (imod.eq.ref_ind) then ; remove corr. string for reference dataset + data1@res_gsnRightString = "" + end if + + if (is.eq.0) then + maps(imod, is) = contour_map(wks0, data1(is, :, :), var0) + end if + if (is.eq.1) then + maps(imod, is) = contour_map(wks1, data1(is, :, :), var0) + end if + if (is.eq.2) then + maps(imod, is) = contour_map(wks2, data1(is, :, :), var0) + end if + if (is.eq.3) then + maps(imod, is) = contour_map(wks3, data1(is, :, :), var0) + end if + end do + else + if (.not.ismissing(corr(0))) then + data1@res_gsnRightString = "corr = " + sprintf("%6.3f", corr(0)) + else + data1@res_gsnRightString = "" + end if + if (.not.ismissing(gavg(0))) then + data1@res_gsnLeftString = "mean = " + sprintf("%6.3f", gavg(0)) + else + data1@res_gsnLeftString = "" + end if + + if (imod.eq.ref_ind) then ; remove corr. string for reference dataset + data1@res_gsnRightString = "" + end if + + maps(imod, 0) = contour_map(wks0, data1, var0) + end if + + ; mandatory netcdf output + + data1@var = var0 + "_mean_" + names(imod) + nc_outfile_mean = ncdf_write(data1, nc_filename_mean) + + ; ======================================================================= + ; Create difference plots (if requested) + ; ======================================================================= + + if (flag_diff .and. (imod .ne. ref_ind)) then + + diff = data1 + if (flag_rel_diff) then + diff = (diff - ref_data) / ref_data * 100.0 + diff = where(ref_data .le. rel_diff_min, diff@_FillValue, diff) + else + diff = diff - ref_data + end if + + diff@res_gsnLeftString = "" + diff@res_gsnRightString = "" + + rmsd = rmsd@_FillValue + bias = bias@_FillValue + + if (numseas.gt.1) then + do is = 0, numseas - 1 + if (.not. flag_rel_diff) then + if (same_grid) then + rmsd(is) = calculate_metric(ref_data(is, :, :), \ + data1(is, :, :), "RMSD") + end if + bias(is) = area_operations(diff(is, :, :), -90., 90., 0., 360., \ + "average", True) + end if + end do + else + if (.not. flag_rel_diff) then + if (same_grid) then + rmsd(0) = calculate_metric(ref_data, data1, "RMSD") + end if + bias(0) = area_operations(diff, -90., 90., 0., 360., "average", \ + True) + end if + end if + + ; ---------------------------------------------------------------------- + + ; ########################################### + ; # plot ressources # + ; ########################################### + + diff@res_gsnLeftStringFontHeightF = min((/0.025, 0.015 * 6.0 \ + / tofloat((dim_MOD + 1) / 2)/)) + diff@res_gsnRightStringFontHeightF = min((/0.025, 0.015 * 6.0 \ + / tofloat((dim_MOD + 1) / 2)/)) + + diff@res_tiMainOn = False + + diff@res_cnFillOn = True ; color plot desired + diff@res_cnLineLabelsOn = False ; contour lines + diff@res_cnLinesOn = False + + ; colors + ; http://www.ncl.ucar.edu/Document/Graphics/color_table_gallery.shtml + + ; annotation + + diff@res_cnLevelSelectionMode = "ExplicitLevels" + diff@res_mpOutlineOn = True + diff@res_mpFillOn = False + + ; variable specific plotting settings + + ; set contour levels / colors + + if (.not.isvar("cnLevels")) then + + if (isatt(diff, "res_cnLevels")) then + delete(diff@res_cnLevels) + end if + if (isatt(diff, "res_cnFillColors")) then + delete(diff@res_cnFillColors) + end if + if (isvar("pal")) then + delete(pal) + end if + + if (var0.eq."pr") then + diff@res_cnLevels = ispan(-30, 30, 5) * 0.1 + pal = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-precip-delta.rgb") + diff@res_cnFillColors = pal + diff@res_lbOrientation = "horizontal" + end if + + if ((var0.eq."tas") .or. (var0.eq."ts")) then + pal = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-tas-delta.rgb") + diff@res_cnFillPalette = pal + if (var0.eq."ts") then + diff@res_cnLevels = ispan(-5, 5, 1) * 0.5 + end if + end if + + if (var0.eq."lwp") then + diff@res_cnLevels = ispan(-50, 50, 10) * 0.001 + diff@res_mpOutlineOn = False + diff@res_mpFillOn = True + diff@res_mpLandFillColor = "Black" + pal = read_colormap_file("$diag_scripts/shared/plot/rgb/qcm3.rgb") + diff@res_cnFillColors = pal + end if + + if (var0.eq."clt") then + diff@res_cnLevels = fspan(-25, 25, 11) + end if + + if (var0.eq."clivi") then + diff@res_cnLevels = ispan(-70, 70, 10) * 0.001 + end if + + if (var0.eq."clwvi") then + diff@res_cnLevels = ispan(-50, 50, 10) * 0.001 + end if + + if (var0.eq."swcre") then + data1@res_cnLevels = ispan(-30, 30, 5) + end if + + if (var0.eq."lwcre") then + data1@res_cnLevels = ispan(-30, 30, 5) + end if + + if (var0.eq."netcre") then + data1@res_cnLevels = ispan(-30, 30, 5) + end if + + ; ****************************************************** + ; *** relative differences: use specific color table *** + ; ****************************************************** + + if (flag_rel_diff) then + if (isatt(diff, "res_cnLevels")) then + delete(diff@res_cnLevels) + end if + if (isatt(diff, "res_cnFillColors")) then + delete(diff@res_cnFillColors) + end if + diff@res_cnLevels = fspan(-100, 100, 21) + if (isvar("pal")) then + delete(pal) + end if + pal = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "percent100.rgb") + diff@res_cnFillColors = pal + end if + + ; ****************************************************** + + if (.not. isatt(diff, "res_cnLevels")) then + log_info(DIAG_SCRIPT + " (var: " + var0 + "):") + log_info("info: using default contour levels") + diff@res_cnLevels = fspan(min(diff), max(diff), 20) + end if + + cnLevels = diff@res_cnLevels + if (isatt(diff, "res_cnFillColors")) then + cnFillColors = diff@res_cnFillColors + end if + + else ; use previously defined colors and contour intervals + + if (isatt(diff, "res_cnLevels")) then + delete(diff@res_cnLevels) + end if + if (isatt(diff, "res_cnFillColors")) then + delete(diff@res_cnFillColors) + end if + + diff@res_cnLevels = cnLevels + + if (isvar("cnFillColors")) then + diff@res_cnFillColors = cnFillColors + end if + + end if ; if .not.isvar("cnLevels") + + if (imod.eq.ref_ind) then + diff@res_lbLabelBarOn = True + else + diff@res_lbLabelBarOn = False + end if + + ; map attributes + + diff@res_mpFillDrawOrder = "PostDraw" ; draw map last + diff@res_cnMissingValFillColor = "Gray" + + ; no tickmarks and no labels + + diff@res_tmYLLabelsOn = False + diff@res_tmYLOn = False + diff@res_tmYRLabelsOn = False + diff@res_tmYROn = False + diff@res_tmXBLabelsOn = False + diff@res_tmXBOn = False + diff@res_tmXTLabelsOn = False + diff@res_tmXTOn = False + diff@res_cnInfoLabelOn = False ; turn off cn info label + + ; specified in namelist + + diff@res_mpProjection = projection + + ; set explicit contour levels + + if (isatt(diag_script_info, "explicit_cn_levels")) then + diff@res_cnLevelSelectionMode = "ExplicitLevels" + if (isatt(diff, "res_cnLevels")) then + delete(diff@res_cnLevels) + end if + diff@res_cnLevels = diag_script_info@explicit_cn_levels + end if + + ; ########################################### + ; # other Metadata: diag_script, var # + ; ########################################### + ; add to diff as attributes without prefix + + if (isatt(variable_info, "long_name")) then + diff@var_long_name = variable_info@long_name + end if + if (isatt(variable_info, "units")) then + diff@var_units = variable_info@units + else + diff@var_units = "" + end if + + ; ########################################### + ; # create the plot # + ; ########################################### + + diff@res_gsnDraw = False ; do not draw yet + diff@res_gsnFrame = False ; don't advance frame + + ; ---------------------------------------------------------------------- + + if (numseas.gt.1) then + do is = 0, numseas - 1 + if (.not.ismissing(rmsd(is))) then + diff@res_gsnRightString = "rmsd = " + sprintf("%6.3f", rmsd(is)) + else + diff@res_gsnRightString = "" + end if + if (.not.ismissing(bias(is))) then + diff@res_gsnLeftString = "bias = " + sprintf("%6.3f", bias(is)) + else + diff@res_gsnLeftString = "" + end if + + if (is.eq.0) then + maps_d(imod, is) = contour_map(wks0d, diff(is, :, :), var0) + end if + if (is.eq.1) then + maps_d(imod, is) = contour_map(wks1d, diff(is, :, :), var0) + end if + if (is.eq.2) then + maps_d(imod, is) = contour_map(wks2d, diff(is, :, :), var0) + end if + if (is.eq.3) then + maps_d(imod, is) = contour_map(wks3d, diff(is, :, :), var0) + end if + end do + else + if (.not.ismissing(rmsd(0))) then + diff@res_gsnRightString = "rmsd = " + sprintf("%6.3f", rmsd(0)) + else + diff@res_gsnRightString = "" + end if + if (.not.ismissing(bias(0))) then + diff@res_gsnLeftString = "bias = " + sprintf("%6.3f", bias(0)) + else + diff@res_gsnLeftString = "" + end if + maps_d(imod, 0) = contour_map(wks0d, diff, var0) + end if + + ; mandatory netcdf output + + diff@var = var0 + "_bias_" + names(imod) + nc_outfile_bias = ncdf_write(diff, nc_filename_bias) + + end if ; if flag_diff + + ; ======================================================================= + + end do ; ii-loop (models) + + if (write_plots) then + ; save default color map in case it is needed later for optionally + ; plotting color bar to a separate file + + tmp_colors = gsn_retrieve_colormap(wks0) + cdims = dimsizes(tmp_colors) + nboxes = dimsizes(data1@res_cnLevels) + clen = cdims(0) + stride = max((/1, ((clen(0)-1) - 2) / nboxes /)) + fill_colors = ispan(2, clen(0) - 1, stride) + mean_colors = tmp_colors(fill_colors, :) + delete(tmp_colors) + delete(fill_colors) + delete(cdims) + + ; sort plots if needed (observations go first) + + plottmp = ispan(0, dim_MOD - 1, 1) + plotind = plottmp + + ; move plots of observational datasets (if present) into the first line(s) + ; of the panel plot + + j = 0 + do i = 0, dimsizes(plottmp) - 1 + if (i.eq.ref_ind) then + plotind(j) = plottmp(i) + j = j + 1 + else if (plottmp(i) .lt. dimsizes(projects)) then + if (isStrSubset(str_lower(projects(plottmp(i))), \ + "obs")) then + plotind(j) = plottmp(i) + j = j + 1 + end if + end if + end if + end do + + do i = 0, dimsizes(plottmp) - 1 + if ((isStrSubset(str_lower(projects(plottmp(i))), \ + "obs")).or.(i.eq.ref_ind)) then + else + plotind(j) = plottmp(i) + j = j + 1 + end if + end do + + pres = True ; needed to override + ; panelling defaults + pres@gsnPanelLabelBar = True ; add common colorbar + if (panel_labels) then + ; print dataset name on each panel + pres@gsnPanelFigureStrings = names(plotind) + end if + pres@gsnPanelFigureStringsFontHeightF = min((/0.01, 0.01 * 6.0 \ + / tofloat((dim_MOD + 1) / 2)/)) + pres@lbLabelFontHeightF = min((/0.015, 0.01 * 6.0 \ + / tofloat((dim_MOD + 1) / 2)/)) + pres@lbAutoManage = False + pres@lbTopMarginF = 0.1 + pres@lbTitleOn = True + pres@lbTitleFontHeightF = min((/0.015, 0.01 * 6.0 \ + / tofloat((dim_MOD + 1) / 2)/)) + pres@lbTitlePosition = "Bottom" + pres@lbTitleString = data1@long_name + " (" \ + + data1@units + ")" + pres@lbPerimOn = False ; draw line around label + ; bar area + pres@gsnPanelCenter = False + if (dim_MOD.le.8) then + pres@pmLabelBarOrthogonalPosF = -0.03 + else + pres@pmLabelBarOrthogonalPosF = -0.01 ; shift label bar a bit to + ; the bottom + end if + + if (embracesetup) then + if (numseas.gt.1) then + pres@txString = season(0) + outfile(0) = panelling(wks0, maps(plotind, 0), (dim_MOD + 3) / 4, \ + 4, pres) + + pres@txString = season(1) + outfile(1) = panelling(wks1, maps(plotind, 1), (dim_MOD + 3) / 4, \ + 4, pres) + + pres@txString = season(2) + outfile(2) = panelling(wks2, maps(plotind, 2), (dim_MOD + 3) / 4, \ + 4, pres) + + pres@txString = season(3) + outfile(3) = panelling(wks3, maps(plotind, 3), (dim_MOD + 3) / 4, \ + 4, pres) + log_info(" Wrote " + outfile) + else + pres@gsnPanelRowSpec = True ; tell panel what order to plt + pres@gsnPanelYWhiteSpacePercent = 5 + pres@gsnPanelXWhiteSpacePercent = 5 + if (isatt(diag_script_info, "PanelTop")) then + top = tofloat(diag_script_info@PanelTop) + else + top = 0.99 ; default + end if + pres@gsnPanelTop = top + + if (isvar("plotsperline")) then + delete(plotsperline) + end if + + plotsperline = new((dim_MOD + 1) / 2, integer) + plotsperline = 2 + + if ((isStrSubset(str_lower(projects(plotind(0))), \ + "obs")).and. \ + .not.(isStrSubset(str_lower(projects(plotind(1))), \ + "obs"))) then + plotsperline(0) = 1 + end if + + if (sum(plotsperline).gt.dimsizes(plotind)) then + plotsperline(dimsizes(plotsperline) - 1) = 1 + end if + + if (sum(plotsperline).lt.dimsizes(plotind)) then + xadd = 1 + xtmp = array_append_record(plotsperline, xadd, 0) + delete(plotsperline) + plotsperline = xtmp + delete(xtmp) + end if + + gsn_panel(wks0, maps(plotind, 0), plotsperline, pres) + outfile(0) = wks0@fullname + end if + else ; if embracesetup + if (numseas.gt.1) then + pres@txString = season(0) + outfile(0) = panelling(wks0, maps(plotind, 0), (dim_MOD + 3) / 4, \ + 4, pres) + + pres@txString = season(1) + outfile(1) = panelling(wks1, maps(plotind, 1), (dim_MOD + 3) / 4, \ + 4, pres) + + pres@txString = season(2) + outfile(2) = panelling(wks2, maps(plotind, 2), (dim_MOD + 3) / 4, \ + 4, pres) + + pres@txString = season(3) + outfile(3) = panelling(wks3, maps(plotind, 3), (dim_MOD + 3) / 4, \ + 4, pres) + else + outfile(0) = panelling(wks0, maps(plotind, 0), (dim_MOD + 3) / 4, \ + 4, pres) + end if + end if ; if embracesetup + + do is = 0, numseas - 1 + log_info("Wrote " + outfile(is)) + end do + + ; ------------------------------------------------------------------------ + ; write provenance to netcdf output and plot file(s) (mean) + ; ------------------------------------------------------------------------ + + statistics = (/"clim", "mean"/) + if (isatt(diag_script_info, "region")) then + domain = "reg" + else + domain = "global" + end if + plottype = "geo" + + do is = 0, numseas - 1 + caption = "Mean values for variable " + var0 \ + + " (" + allseas + ")." + log_provenance(nc_outfile_mean, outfile(is), caption, statistics, \ + domain, plottype, "", "", climofiles) + end do + + ; ======================================================================== + + if (flag_diff) then + pres@lbTitleString = "~F33~D~F21~" + diff@long_name + " (" + \ + diff@units + ")" + + ; save default color map in case it is needed later for optionally + ; plotting color bar to a separate file + + if (isvar("nboxes")) then + delete(nboxes) + end if + + tmp_colors = gsn_retrieve_colormap(wks0d) + cdims = dimsizes(tmp_colors) + nboxes = dimsizes(diff@res_cnLevels) + clen = cdims(0) + stride = max((/1, ((clen(0)-1) - 2) / nboxes /)) + fill_colors = ispan(2, clen(0) - 1, stride) + diff_colors = tmp_colors(fill_colors, :) + delete(tmp_colors) + delete(fill_colors) + delete(cdims) + + if (isvar("plottmp")) then + delete(plottmp) + end if + + if (isvar("plotind")) then + delete(plotind) + end if + + plottmp = ind(ispan(0, dim_MOD - 1, 1).ne.ref_ind) + plotind = plottmp + + ; if there is a second observational dataset, move the corresponding + ; plot to the first line of the panel plot + + j = 0 + do i = 0, dimsizes(plottmp) - 1 + if (isStrSubset(str_lower(projects(plottmp(i))), "obs")) then + plotind(j) = plottmp(i) + j = j + 1 + end if + end do + do i = 0, dimsizes(plottmp) - 1 + if (isStrSubset(str_lower(projects(plottmp(i))), "obs")) then + else + plotind(j) = plottmp(i) + j = j + 1 + end if + end do + + if (isatt(pres, "gsnPanelFigureStrings")) then + delete(pres@gsnPanelFigureStrings) + end if + if (panel_labels) then + pres@gsnPanelFigureStrings = names(plotind) + end if + + if (dimsizes(plotind).eq.1) then + pres@gsnPanelRight = 0.5 + end if + + if (embracesetup) then + if (numseas.gt.1) then + pres@txString = season(0) + outfile_d(0) = panelling(wks0d, maps_d(plotind, 0), \ + (dim_MOD + 3) / 4, 4, pres) + + pres@txString = season(1) + outfile_d(1) = panelling(wks1d, maps_d(plotind, 1), \ + (dim_MOD + 3) / 4, 4, pres) + + pres@txString = season(2) + outfile_d(2) = panelling(wks2d, maps_d(plotind, 2), \ + (dim_MOD + 3) / 4, 4, pres) + + pres@txString = season(3) + outfile_d(3) = panelling(wks3, maps_d(plotind, 3), \ + (dim_MOD + 3) / 4, 4, pres) + else + pres@gsnPanelRowSpec = True ; tell panel what order to plt + pres@gsnPanelYWhiteSpacePercent = 5 + pres@gsnPanelXWhiteSpacePercent = 5 + pres@gsnPanelTop = tofloat(diag_script_info@PanelTop) + + if (isvar("plotsperline")) then + delete(plotsperline) + end if + + plotsperline = new(max((/1, dim_MOD / 2/)), integer) + plotsperline = 2 + + if (dimsizes(plotind).gt.1) then + if ((isStrSubset(str_lower(projects(plotind(0))), "obs")).and. \ + .not. \ + (isStrSubset(str_lower(projects(plotind(1))), "obs"))) then + plotsperline(0) = 1 + end if + end if + + if (sum(plotsperline).gt.dimsizes(plotind)) then + plotsperline(dimsizes(plotsperline) - 1) = 1 + end if + + if (sum(plotsperline).lt.dimsizes(plotind)) then + xadd = 1 + xtmp = array_append_record(plotsperline, xadd, 0) + delete(plotsperline) + plotsperline = xtmp + delete(xtmp) + end if + + gsn_panel(wks0d, maps_d(plotind, 0), plotsperline, pres) + outfile_d(0) = wks0d@fullname + end if + else ; embracesetup = False + if (numseas.gt.1) then + pres@txString = season(0) + outfile_d(0) = panelling(wks0d, maps_d(plotind, 0), \ + (dim_MOD + 3) / 4, 4, pres) + + pres@txString = season(1) + outfile_d(1) = panelling(wks1d, maps_d(plotind, 1), \ + (dim_MOD + 3) / 4, 4, pres) + + pres@txString = season(2) + outfile_d(2) = panelling(wks2d, maps_d(plotind, 2), \ + (dim_MOD + 3) / 4, 4, pres) + + pres@txString = season(3) + outfile_d(3) = panelling(wks3d, maps_d(plotind, 3), \ + (dim_MOD + 3) / 4, 4, pres) + else + outfile_d(0) = panelling(wks0d, maps_d(plotind, 0), \ + (dim_MOD + 3) / 4, 4, pres) + end if + end if ; end if embracesetup + + do is = 0, numseas - 1 + log_info(" Wrote " + outfile(is)) + + ; -------------------------------------------------------------------- + ; write provenance to netcdf output and plot file(s) (bias) + ; -------------------------------------------------------------------- + + statistics = (/"clim", "diff"/) + if (isatt(diag_script_info, "region")) then + domain = "reg" + else + domain = "global" + end if + plottype = "geo" + + ; note: because function log_provenance does not yet support to attach + ; different captions to netcdf (contains all seasons) and plots + ; (contain one season each), the caption cannot specifiy the + ; season plotted; using "annual" or "DJF/MAM/JJA/SON" instead. + + caption = "Differences for variable " + var0 \ + + " (" + allseas + "), reference = " + refname + "." + log_provenance(nc_outfile_bias, outfile_d(is), caption, statistics, \ + domain, plottype, "", "", climofiles) + end do + + end if ; if flag_diff + + ; optionally save legend(s) to extra file(s) + + if (extralegend) then + nboxes = dimsizes(data1@res_cnLevels) + 1 + wksleg = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_" + var0 \ + + "_legend") + pres@lbMonoFillPattern = True + pres@lbOrientation = "Horizontal" + pres@vpWidthF = 0.7 + pres@vpHeightF = 0.1 + pres@lbLabelFontHeightF = 0.015 + pres@lbLabelAlignment = "InteriorEdges" + pres@lbTitleFontHeightF = 0.015 + pres@lbTitleString = data1@long_name + " (" + data1@units + ")" + + labels = tostring(data1@res_cnLevels) + + ; remove trailing zeros from strings + + do i = 0, dimsizes(labels) - 1 + i1 = str_index_of_substr(labels(i), ".", -1) + if (.not.ismissing(i1)) then + tmp = stringtochar(labels(i)) + do j = dimsizes(tmp) - 2, i1, 1 + if ((tmp(j).ne.".").and.(tmp(j).ne."0")) then + break + end if + end do + labels(i) = chartostring(tmp(0:j)) + delete(tmp) + end if + end do + + if (isatt(data1, "res_cnFillColors")) then + pres@lbFillColors = data1@res_cnFillColors + else if (isatt(data1, "res_cnFillPalette")) then + pres@lbFillColors = data1@res_cnFillPalette + else + pres@lbFillColors = mean_colors ; default colors + end if + end if + + gsn_labelbar_ndc(wksleg, nboxes, labels, 0.1, 0.9, pres) + + delete(wksleg) + delete(labels) + delete(pres@lbFillColors) + + if (flag_diff) then + nboxes = dimsizes(diff@res_cnLevels) + 1 + wksleg = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_" + var0 \ + + "_diff_legend") + + labels = tostring(diff@res_cnLevels) + + ; remove trailing zeros from strings + + do i = 0, dimsizes(labels) - 1 + i1 = str_index_of_substr(labels(i), ".", -1) + if (.not.ismissing(i1)) then + tmp = stringtochar(labels(i)) + do j = dimsizes(tmp) - 2, i1, 1 + if ((tmp(j).ne.".").and.(tmp(j).ne."0")) then + break + end if + end do + labels(i) = chartostring(tmp(0:j)) + delete(tmp) + end if + end do + + if (flag_rel_diff) then + pres@lbTitleString = "~F33~D~F21~" + data1@long_name + " (%)" + else + pres@lbTitleString = "~F33~D~F21~" + data1@long_name + " (" + \ + data1@units + ")" + end if + + if (isatt(diff, "res_cnFillColors")) then + pres@lbFillColors = diff@res_cnFillColors + else if (isatt(diff, "res_cnFillPalette")) then + pres@lbFillColors = diff@res_cnFillPalette + else + pres@lbFillColors = diff_colors ; default colors + end if + end if + + gsn_labelbar_ndc(wksleg, nboxes, labels, 0.1, 0.9, pres) + end if ; if (flag_diff) + end if ; if (extralegend) + end if ; if write_plots + + ; ========================================================================== + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/clouds/clouds_bias.ncl b/esmvaltool/diag_scripts/clouds/clouds_bias.ncl new file mode 100644 index 0000000000..ea5ba405ff --- /dev/null +++ b/esmvaltool/diag_scripts/clouds/clouds_bias.ncl @@ -0,0 +1,450 @@ +; CLOUDS_BIAS +; ############################################################################ +; Author: Axel Lauer (DLR, Germany) +; PROJECT-NAME EMBRACE +; ############################################################################ +; Description +; Calculates the multi-model mean bias, absolute difference and relative +; difference of annual mean 2-d cloud variables compared with a +; reference dataset (observations). +; +; Required diag_script_info attributes (diagnostic specific) +; none +; +; Optional diag_script_info attributes (diagnostic specific) +; plot_abs_diff: additionally also plot absolute differences (True, False) +; plot_rel_diff: additionally also plot relative differences (True, False) +; projection: map projection, e.g., Mollweide, Mercator +; timemean: time averaging, i.e. "seasonalclim" (DJF, MAM, JJA, SON), +; "annualclim" (annual mean) +; +; Required variable_info attributes (variable specific) +; reference_dataset: name of reference datatset +; +; Optional variable_info attributes (variable specific) +; long_name: description of variable +; +; Caveats +; none +; +; Modification history +; 20190222-A_laue_ax: added output of provenance (v2.0) +; 20181119-A_laue_ax: adapted code to multi-variable capable framework +; 20180923-A_laue_ax: added writing of results to netcdf +; 20180914-A_laue_ax: code rewritten for ESMValTool v2.0 +; 20170620-A_laue_ax: added tags for reporting +; 20160901-A_laue_ax: added regridding option 1 deg x 1 deg +; 20151027-A_laue_ax: moved call to 'write_references' to the beginning +; of the code +; 20150428-A-laue_ax: written. +; +; ############################################################################ + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/scaling.ncl" +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/contour_maps.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + + var0 = variable_info[0]@short_name + info0 = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info0) + if (isatt(variable_info[0], "reference_dataset")) then + refname = variable_info[0]@reference_dataset + end if + names = metadata_att_as_array(info0, "dataset") + infiles = metadata_att_as_array(info0, "filename") + + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT + " (var: " + var0 + ")") + log_info("++++++++++++++++++++++++++++++++++++++++++") + + ; time averaging: at the moment, only "annualclim" and "seasonalclim" + ; are supported + + ; Set default values for non-required diag_script_info attributes + set_default_att(diag_script_info, "plot_abs_diff", False) + set_default_att(diag_script_info, "plot_rel_diff", False) + set_default_att(diag_script_info, "projection", "CylindricalEquidistant") + set_default_att(diag_script_info, "timemean", "annualclim") + + timemean = diag_script_info@timemean + + if (timemean.eq."seasonalclim") then + numseas = 4 + season = (/"DJF", "MAM", "JJA", "SON"/) + else + numseas = 1 ; default + season = (/"annual"/) + end if + + ; create string for caption (netcdf provenance) + + allseas = season(0) + do is = 1, numseas - 1 + allseas = allseas + "/" + season(i) + end do + + ; make sure path for (mandatory) netcdf output exists + + work_dir = config_user_info@work_dir + "/" + ; Create work dir + system("mkdir -p " + work_dir) + + if (config_user_info@write_plots.eq."True") then + write_plots = True + else + write_plots = False + end if + +end + +begin + ; ======================================================================== + ; ========================== initialization ============================== + ; ======================================================================== + + ; check for reference dataset definition + + if (.not.(isvar("refname"))) then + error_msg("f", DIAG_SCRIPT, "", "no reference dataset defined in recipe") + end if + + plot_abs_diff = diag_script_info@plot_abs_diff + plot_rel_diff = diag_script_info@plot_rel_diff + + ; get reference dataset + + ref_ind = ind(names .eq. refname) + if (ismissing(ref_ind)) then + error_msg("f", DIAG_SCRIPT, "", "reference dataset (" \ + + refname + ") is missing") + end if + + ; get multi-model mean index + + mm_ind = ind(names .eq. "MultiModelMean") + + if (ismissing(mm_ind)) then + error_msg("f", DIAG_SCRIPT, "", "multi-model mean is missing (required)") + end if + + ; basename of diag_script + + diag_script_base = basename(DIAG_SCRIPT) + + ; ======================================================================== + ; =========================== calculations =============================== + ; ======================================================================== + + ; note: 1) masking is handled by the backend + ; 2) multi-model mean is calculated by the backend + + ; read data + + A0 = read_data(info0[mm_ind]) + mmdata = time_operations(A0, -1, -1, "average", timemean, True) + delete(A0) + + A0 = read_data(info0[ref_ind]) + refdata = time_operations(A0, -1, -1, "average", timemean, True) + delete(A0) + + ; differences between multi-model mean and reference data set + + diff = mmdata - refdata + mmdata@diag_script = DIAG_SCRIPT + copy_VarMeta(mmdata, diff) + + delete(refdata) + + ; convert units for plotting (precipitation: kg m-2 s-1 --> mm day-1) + + if (var0.eq."pr") then + mmdata = convert_units(mmdata, "mm/day") + diff = convert_units(diff, "mm/day") + ; for plotting IPCC AR5 fig 9.4 + diff@res_cnLevels = ispan(-30, 30, 5) * 0.1 + end if + + if (var0.eq."tas") then + mmdata = convert_units(mmdata, "degC") + diff@units = "degC" + ; for plotting IPCC AR5 fig 9.2 + diff@res_cnLevels = ispan(-5, 5, 1) + end if + + ; ======================================================================== + ; ============================= plotting ================================= + ; ======================================================================== + + climofiles = new(2, string) + climofiles(0) = infiles(mm_ind) + climofiles(1) = infiles(ref_ind) + + diff@res_gsnMaximize = True ; use full page for the plot + diff@res_cnFillOn = True ; color plot desired + diff@res_cnLineLabelsOn = False ; contour lines + diff@res_cnLinesOn = False + diff@res_tiMainOn = True + diff@res_gsnLeftStringFontHeightF = 0.015 + diff@res_cnLevelSelectionMode = "ExplicitLevels" + diff@res_mpOutlineOn = True + if (.not.isatt(diff, "res_cnLevels")) then + diff@res_cnLevels = fspan(min(diff), max(diff), 20) + end if + diff@res_mpFillOn = False + diff@res_lbLabelBarOn = True + diff@res_gsnRightString = "" + diff@res_mpFillDrawOrder = "PostDraw" ; draw map fill last + diff@res_cnMissingValFillColor = "Gray" + diff@res_tmYLLabelsOn = False + diff@res_tmYLOn = False + diff@res_tmYRLabelsOn = False + diff@res_tmYROn = False + diff@res_tmXBLabelsOn = False + diff@res_tmXBOn = False + diff@res_tmXTLabelsOn = False + diff@res_tmXTOn = False + diff@res_cnInfoLabelOn = False ; turn off cn info label + diff@res_mpProjection = diag_script_info@projection + + diff@var = var0 ; Overwrite existing entry + if (isatt(variable_info[0], "long_name")) then + diff@var_long_name = variable_info[0]@long_name + end if + diff@var_units = diff@units + + plots = new((/4, numseas/), graphic) + + ; -------------------------------------------------------------------- + ; plot contour map + + diff@res_gsnDraw = False ; Do not draw yet + diff@res_gsnFrame = False ; Don't advance frame. + diff@res_mpPerimOn = False + + diff@res_lbTitleString = "(" + diff@units + ")" + diff@res_lbTitlePosition = "Bottom" + + diff@res_lbLabelFontHeightF = 0.014 + diff@res_lbTopMarginF = 0.1 + diff@res_lbTitleFontHeightF = 0.014 + + diff@res_tiMainFontHeightF = 0.016 + + diff@res_tiMainString = "Multi Model Mean Bias" + + copy_VarMeta(diff, mmdata) + delete(mmdata@res_cnLevels) + + if (var0.eq."tas") then + mmdata@res_cnLevels = ispan(-30, 30, 3) + + pal = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-tas.rgb") + mmdata@res_cnFillColors = pal + pal2 = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-tas-delta.rgb") + diff@res_cnFillColors = pal2 + if (plot_abs_diff) then + pal3 = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-tas-absdelta.rgb") + end if + else if (var0.eq."pr") then + mmdata@res_cnLevels = ispan(10, 100, 15) * 0.1 + + pal = read_colormap_file("$diag_scripts/shared/plot/rgb/ipcc-precip.rgb") + mmdata@res_cnFillColors = pal + pal2 = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-precip-delta.rgb") + diff@res_cnFillColors = pal2 + if (plot_abs_diff) then + pal3 = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-precip-absdelta.rgb") + end if + if (plot_rel_diff) then + pal4 = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-precip-reldelta.rgb") + end if + else + mmdata@res_cnLevels = fspan(min(mmdata), max(mmdata), 20) + end if + end if + + mmdata@res_tiMainString = "Multi Model Mean" + + plotsperline = (/2, 0/) + plotind = (/0, 1/) ; mmm and mean bias are always plotted + + ; absolute differences + + if (plot_abs_diff) then + absdiff = abs(diff) + copy_VarMeta(diff, absdiff) + if (isvar("pal3")) then + delete(absdiff@res_cnFillColors) + absdiff@res_cnFillColors = pal3 + nt = dimsizes(pal3) + n = nt(0) + else + n = dimsizes(diff@res_cnLevels) + end if + mx = max(diff@res_cnLevels) + delete(absdiff@res_cnLevels) + + tmp = fspan(0.0, mx, n) + + absdiff@res_cnLevels = tmp(1:dimsizes(tmp)-1) + delete(tmp) + + absdiff@res_tiMainString = "Multi Model Mean of Absolute Error" + + iadd = 2 + itmp = array_append_record(plotind, iadd, 0) + delete(plotind) + plotind = itmp + delete(itmp) + plotsperline(1) = plotsperline(1) + 1 + end if + + ; relative differences + + if (plot_rel_diff) then + ; replace "epsilon" values with missing value + mm = where(abs(mmdata).lt.1.0e-6, mmdata@_FillValue, mmdata) + + reldiff = 100.0 * diff / mm + + copy_VarMeta(diff, reldiff) + delete(reldiff@res_cnLevels) + reldiff@res_cnLevels = fspan(-90.0, 90.0, 13) + reldiff@res_tiMainString = "Multi Model Mean of Relative Error" + reldiff@units = "%" + reldiff@res_lbTitleString = "(" + reldiff@units + ")" + if (isvar("pal4")) then + delete(reldiff@res_cnFillColors) + reldiff@res_cnFillColors = pal4 + end if + + iadd = 3 + itmp = array_append_record(plotind, iadd, 0) + delete(plotind) + plotind = itmp + delete(itmp) + plotsperline(1) = plotsperline(1) + 1 + end if + + ; panelling resources + + pres = True + pres@gsnPanelCenter = False + pres@gsnPanelRowSpec = True ; tell panel what order to plot + pres@gsnPanelYWhiteSpacePercent = 5 + pres@gsnPanelXWhiteSpacePercent = 5 + + plotfile = new(numseas, string) + plotfile(:) = "" + + if (write_plots) then + do is = 0, numseas - 1 + ; -------------------------------------------------------------------- + ; create workspace + + if (isvar("wks")) then + delete(wks) + end if + + wks = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_bias_" + var0 \ + + "_" + season(is)) + + plotfile(is) = wks@fullname + + if (numseas.gt.1) then + pres@txString = season(is) + plots(0, is) = contour_map(wks, mmdata(is, :, :), var0) + plots(1, is) = contour_map(wks, diff(is, :, :), var0) + if (plot_abs_diff) then + plots(2, is) = contour_map(wks, absdiff(is, :, :), var0) + end if + if (plot_rel_diff) then + plots(3, is) = contour_map(wks, reldiff(is, :, :), var0) + end if + gsn_panel(wks, plots(plotind, is), plotsperline, pres) + else + plots(0, 0) = contour_map(wks, mmdata, var0) + plots(1, 0) = contour_map(wks, diff, var0) + if (plot_abs_diff) then + plots(2, 0) = contour_map(wks, absdiff, var0) + end if + if (plot_rel_diff) then + plots(3, 0) = contour_map(wks, reldiff, var0) + end if + gsn_panel(wks, plots(plotind, 0), plotsperline, pres) + end if + + ; add meta data to plot (for reporting) + + caption = "Multi model values, from top left to bottom right: " \ + + "mean, bias" + if (plot_abs_diff) then + caption = caption + ", absolute error" + end if + if (plot_rel_diff) then + caption = caption + ", relative error" + end if + + end do ; is-loop (seasons) + end if ; if write_plots + + ; ########################################### + ; # output to netCDF # + ; ########################################### + + nc_filename = work_dir + "clouds_bias_" + var0 + ".nc" + nc_filename@existing = "append" + + mmdata@var = var0 + "_mean" + mmdata@long_name = var0 + " (multi-model mean)" + nc_outfile = ncdf_write(mmdata, nc_filename) + + diff@var = var0 + "_bias" + diff@long_name = var0 + " (multi-model bias)" + nc_outfile = ncdf_write(diff, nc_filename) + + if (isvar("absdiff")) then + absdiff@var = var0 + "_abs_bias" + absdiff@long_name = var0 + " (multi-model absolute bias)" + nc_outfile = ncdf_write(absdiff, nc_filename) + end if + + if (isvar("reldiff")) then + reldiff@var = var0 + "_rel_bias" + reldiff@long_name = var0 + " (multi-model relative bias)" + reldiff@units = reldiff@units + nc_outfile = ncdf_write(reldiff, nc_filename) + end if + + ; ------------------------------------------------------------------------ + ; write provenance to netcdf output and plot file(s) (mean) + ; ------------------------------------------------------------------------ + + statistics = (/"clim", "diff"/) + domain = "global" + plottype = "geo" + prov_caption = caption + " for variable " + var0 \ + + " (" + allseas + "), reference = " + names(ref_ind) + "." + + do is = 0, numseas - 1 + log_provenance(nc_outfile, plotfile(is), prov_caption, statistics, \ + domain, plottype, "", "", climofiles) + end do + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/clouds/clouds_interannual.ncl b/esmvaltool/diag_scripts/clouds/clouds_interannual.ncl new file mode 100644 index 0000000000..6c6835129d --- /dev/null +++ b/esmvaltool/diag_scripts/clouds/clouds_interannual.ncl @@ -0,0 +1,441 @@ +; CLOUDS_INTERANNUAL +; ############################################################################ +; Author: Axel Lauer (DLR, Germany) +; PROJECT-NAME EMBRACE +; ############################################################################ +; Description +; Calculates the interannual variability estimated as the temporal standard +; deviation calculated from monthly mean anomalies after subtracting the +; climatological mean seasonal cycle. +; +; Required diag_script_info attributes (diagnostic specific) +; none +; +; Optional diag_script_info attributes (diagnostic specific) +; colormap: e.g., WhiteBlueGreenYellowRed, rainbow +; explicit_cn_levels: use these contour levels for plotting +; extrafiles: write plots for individual models to separate files +; (True, False) +; projection: map projection, e.g., Mollweide, Mercator +; +; Required variable_info attributes (variable specific) +; none +; +; Optional variable_info attributes (variable specific) +; long_name: description of variable +; reference_dataset: name of reference datatset +; +; Caveats +; none +; +; Modification history +; 20190220-A_laue_ax: added provenance to output (v2.0) +; 20181120-A_laue_ax: adapted code to multi-variable capable framework +; 20180923-A_laue_ax: added writing of results to netcdf +; 20180611-A_laue_ax: code rewritten for ESMValTool v2.0 +; 20170620-A_laue_ax: added tags for reporting +; 20160901-A_laue_ax: added regridding option 1 deg x 1 deg +; 20151027-A_laue_ax: moved call to 'write_references' to the beginning +; of the code +; 20150415-A-laue_ax: written. +; +; ############################################################################ + +; ##################################### +; # load external NCL code, if needed # +; ##################################### + +; A temporary file written by the invoking Python script +; Passes on a number of variables from Python to NCL + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/plot/aux_plotting.ncl" +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/contour_maps.ncl" + +begin + enter_msg(DIAG_SCRIPT, "") + + var0 = variable_info[0]@short_name + info0 = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info0) + if (isatt(variable_info[0], "reference_dataset")) then + refname = variable_info[0]@reference_dataset + end if + names = metadata_att_as_array(info0, "dataset") + infiles = metadata_att_as_array(info0, "filename") + + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT + " (var: " + var0 + ")") + log_info("++++++++++++++++++++++++++++++++++++++++++") + + set_default_att(diag_script_info, "colormap", "WhiteBlueGreenYellowRed") + set_default_att(diag_script_info, "extrafiles", False) + set_default_att(diag_script_info, "projection", "CylindricalEquidistant") + + extrafiles = diag_script_info@extrafiles + + ; make sure path for (mandatory) netcdf output exists + + work_dir = config_user_info@work_dir + "/" + ; Create work dir + system("mkdir -p " + work_dir) + + if (config_user_info@write_plots.eq."True") then + write_plots = True + else + write_plots = False + end if + + ; get multi-model mean index (if present) + + mm_ind = ind(names .eq. "MultiModelMean") + + if (ismissing(mm_ind)) then + mm_ind = -1 + end if + + ref_ind = -1 ; set to invalid value + + ; if reference dataset has been defined, use it so plots can be sorted + if (isvar("refname")) then + ref_ind = ind(names .eq. refname) + end if + +end + +begin + ind_all_sorted = ispan(0, dim_MOD - 1, 1) ; create array + + if (ref_ind .ge. 0) then + ind_wo_ref = ind(names .ne. refname) + ind_all_sorted(0) = ref_ind + ind_all_sorted(1:dim_MOD - 1) = ind_wo_ref + end if + + maps = new(dim_MOD, graphic) ; collect individual maps in a graphic array + + ; ########################################### + ; # get data and average time # + ; ########################################### + + do ii = 0, dim_MOD - 1 + + imod = ind_all_sorted(ii) + + if (isvar("data1")) then + delete(data1) + end if + + log_info("processing " + names(imod)) + + if (isvar("A0")) then + delete(A0) + end if + + A0 = read_data(info0[imod]) + + ; check dimensions + + dims = getvardims(A0) + if (dimsizes(dims) .lt. 2) then + error_msg("f", DIAG_SCRIPT, "", dimsizes(dims) + \ + " dimensions, need 2 or 3") + end if + idx = ind(dims .eq. "lat") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lat dimension") + end if + idx = ind(dims .eq. "lon") + if (ismissing(idx)) then + error_msg("f", DIAG_SCRIPT, "", "no lon dimension") + end if + + if (var0.eq."pr") then + ; convert from kg m-2 s-1 to mm day-1 + A0 = A0 * 86400.0 + A0@units = "mm day-1" + end if + + ; subtract climatological seasonal cycle from time series + + if (isvar("timeseries")) then + delete(timeseries) + end if + + timeseries = calcMonAnomTLL(A0, time_operations(A0, -1, -1, \ + "average", "monthlyclim", True)) + + ; calculate temporal standard deviation for each grid cell + + data1 = dim_stddev_n_Wrap(timeseries, 0) + + ; normalize standard deviation and convert to percent + + if (isvar("mean")) then + delete(mean) + end if + mean = time_operations(A0, -1, -1, "average", "annualclim", True) + ; replace "epsilon" values with missing value + mean = where(abs(mean).lt.1.0e-4, mean@_FillValue, mean) + data1 = 100.0 * data1 / abs(mean) + + ; ########################################### + ; # Style dependent annotation # + ; ########################################### + ; retrieve unique strings describing the data + ; function in ./diag_scripts/lib/ncl/style.ncl + + annots = project_style(info0, diag_script_info, "annots") + + ; ########################################### + ; # plot ressources # + ; ########################################### + + data1@res_gsnMaximize = True ; use full page for the plot + data1@res_cnFillOn = True ; color plot desired + data1@res_cnLineLabelsOn = False ; contour lines + data1@res_cnLinesOn = False + + ; colors + ; http://www.ncl.ucar.edu/Document/Graphics/color_table_gallery.shtml + + if (isdefined("pal")) then + delete(pal) + end if + pal = read_colormap_file(diag_script_info@colormap) + + ; annotation + + data1@res_tiMainOn = False + data1@res_gsnLeftStringFontHeightF = 0.015 + data1@res_cnLevelSelectionMode = "ExplicitLevels" + + if (diag_script_info@projection.eq."Robinson") then + data1@res_mpPerimOn = False ; turn off perimeter around map + data1@res_mpGridLineColor = -1 + data1@res_mpGridAndLimbOn = True + end if + + data1@res_mpOutlineOn = True + data1@res_mpFillOn = False + + ; variable specific plotting settings + + if (any((/"clt"/).eq.var0)) then + data1@res_cnLevels = ispan(5, 50, 5) + else + data1@res_cnLevels = ispan(5, 100, 5) + end if + + if (var0.eq."lwp") then + data1@res_mpOutlineOn = False + data1@res_mpFillOn = True + data1@res_mpLandFillColor = "Black" +; delete(pal) +; pal = read_colormap_file("$diag_scripts/shared/plot/rgb/qcm3.rgb") + end if + + nboxes = dimsizes(data1@res_cnLevels) + clen = dimsizes(pal) + stride = max((/1, ((clen(0)-1) - 2) / nboxes /)) + fill_colors = ispan(2, clen(0) - 1, stride) + data1@res_cnFillColors = fill_colors + + data1@res_lbLabelBarOn = False + data1@res_gsnRightString = "" + + ; map attributes + + data1@res_mpFillDrawOrder = "PostDraw" ; draw map last + data1@res_cnMissingValFillColor = "Gray" + + ; no tickmarks and no labels + + data1@res_tmYLLabelsOn = False + data1@res_tmYLOn = False + data1@res_tmYRLabelsOn = False + data1@res_tmYROn = False + data1@res_tmXBLabelsOn = False + data1@res_tmXBOn = False + data1@res_tmXTLabelsOn = False + data1@res_tmXTOn = False + data1@res_cnInfoLabelOn = False ; turn off cn info label + + data1@res_mpProjection = diag_script_info@projection + + ; set explicit contour levels + + if (isatt(diag_script_info, "explicit_cn_levels")) then + data1@res_cnLevelSelectionMode = "ExplicitLevels" + data1@res_cnLevels = diag_script_info@explicit_cn_levels + end if + + ; ########################################### + ; # other Metadata: diag_script, var # + ; ########################################### + ; add to data1, as attributes without prefix + + if (isatt(data1, "diag_script")) then ; add to existing entries + temp = data1@diag_script + delete(data1@diag_script) + data1@diag_script = array_append_record(temp, (/DIAG_SCRIPT/), 0) + delete(temp) + else ; add as new attribute + data1@diag_script = (/DIAG_SCRIPT/) + end if + data1@var = var0 ; Overwrite existing entry + if (isatt(variable_info[0], "long_name")) then + data1@var_long_name = variable_info[0]@long_name + else + data1@var_long_name = var0 + end if + data1@var_units = "%" + + ; copy attributes for netCDF output + + data1@long_name = "interannual variability " + data1@var_long_name + data1@units = data1@var_units + + ; ########################################### + ; # create the plot # + ; ########################################### + + data1@res_gsnFrame = False ; don't advance frame + data1@res_gsnDraw = False + + ; function in aux_plotting.ncl + + if (ii.eq.0) then + if (.not.extrafiles) then + wks = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_interannual_" \ + + var0) + end if +; drawNDCGrid(wks) ; debugging option + end if + + if (extrafiles) then + if (isvar("wks")) then + delete(wks) + end if + wks = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_interannual_" \ + + var0 + "_" + annots(imod)) + end if + + maps(ii) = contour_map(wks, data1, var0) + + if (extrafiles) then + if (write_plots) then ; add labels + txres = True + txres@txFontHeightF = 0.03 + txres@txJust = "BottomRight" + txres@txPerimOn = True + txres@txBackgroundFillColor = "white" + text = gsn_add_text(wks, maps(ii), annots(imod), 170, -80, txres) + draw(maps(ii)) + frame(wks) + plotfile = maps@outfile + else + plotfile = "" + end if + + ; ########################################## + ; # output each dataset to separate netCDF # + ; ########################################## + + nc_filename = work_dir + "clouds_interannual_" + var0 + "_" \ + + annots(imod) + ".nc" + nc_outfile = ncdf_write(data1, nc_filename) + + ; ------------------------------------------------------------------- + ; write provenance info + ; ------------------------------------------------------------------- + + statistics = (/"clim", "var"/) + domain = ("glob") + plottype = ("geo") + climofile = infiles(imod) + caption = "Interannual variability of variable " + var0 + \ + " from dataset " + annots(imod) + "." + + log_provenance(nc_outfile, plotfile, caption, statistics, domain, \ + plottype, "", "", climofile) + + else ; extrafiles .eq. false + + ; ######################################### + ; # output all datasets to common netCDF # + ; ######################################### + + nc_filename = work_dir + "clouds_interannual_" + var0 + ".nc" + nc_filename@existing = "append" + data1@var = var0 + "_var_" + annots(imod) + nc_outfile = ncdf_write(data1, nc_filename) + + end if ; if extrafiles + end do ; ii-loop (datasets) + + if (write_plots) then + pres = True ; needed to override + ; panelling defaults + pres@gsnPanelLabelBar = True ; add common colorbar + ; print dataset name on each panel + pres@gsnPanelFigureStrings = annots(ind_all_sorted) + pres@gsnPanelFigureStringsFontHeightF = 0.007 + pres@lbLabelFontHeightF = 0.01 + pres@lbAutoManage = False + pres@lbTopMarginF = 0.1 + pres@lbTitleOn = True + pres@lbTitleFontHeightF = 0.009 + pres@lbTitlePosition = "Bottom" + pres@lbTitleString = "~F8~s~F21~" + var0 + " (%)" + pres@lbPerimOn = False ; draw line around label + ; bar area + pres@gsnPanelCenter = False + pres@pmLabelBarOrthogonalPosF = -0.01 ; shift label bar a bit to + ; the bottom +; pres@gsnPanelDebug = True + + if (extrafiles) then + if (isvar("wks")) then + delete(wks) + end if + ; plot legend + wks = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_interannual_" \ + + var0 + "_legend") + pres@lbMonoFillPattern = True + pres@lbOrientation = "Horizontal" + pres@vpWidthF = 0.7 + pres@vpHeightF = 0.1 + pres@lbLabelFontHeightF = 0.015 + pres@lbLabelAlignment = "InteriorEdges" + pres@lbTitleFontHeightF = 0.015 + labels = tostring(data1@res_cnLevels) + pres@lbFillColors = fill_colors + gsn_labelbar_ndc(wks, nboxes, labels, 0.1, 0.9, pres) + else + outfile = panelling(wks, maps, (dim_MOD + 3) / 4, 4, pres) + log_info(" Wrote " + outfile) + end if + else + outfile = "" + end if ; if write_plots + + ; ------------------------------------------------------------------------ + ; write provenance to common netcdf and plot file + ; ------------------------------------------------------------------------ + + if (.not. extrafiles) then + statistics = (/"clim", "var"/) + domain = "global" + plottype = "geo" + caption = "Interannual variability of variable " + var0 + "." + log_provenance(nc_outfile, outfile, caption, statistics, domain, \ + plottype, "", "", infiles) + end if + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/clouds/clouds_ipcc.ncl b/esmvaltool/diag_scripts/clouds/clouds_ipcc.ncl new file mode 100644 index 0000000000..23629e3af1 --- /dev/null +++ b/esmvaltool/diag_scripts/clouds/clouds_ipcc.ncl @@ -0,0 +1,761 @@ +; CLOUDS_IPCC +; ############################################################################ +; Author: Axel Lauer (DLR, Germany) +; PROJECT-NAME EMBRACE +; ############################################################################ +; Description +; Calculates the multi-model mean bias of annual mean 2-d cloud variables +; compared with a reference data set (observations). In addition, +; zonal averages of the individual models, the multi-model mean and the +; reference data set (observations) are calculated. +; If more than one variable is specified in the namelist, variables 2 to n +; are assumed to be observational error estimates. These error estimates are +; added to the zonal mean plot as light red shading. +; +; Required diag_script_info attributes (diagnostic specific) +; none +; +; Optional diag_script_info attributes (diagnostic specific) +; explicit_cn_levels: contour levels +; mask_ts_sea_ice: - True = mask T < 272 K as sea ice (only for +; variable "ts") +; - False = no additional grid cells masked for +; variable "ts" +; projection: map projection, e.g., Mollweide, Mercator +; styleset: style set for zonal mean plot ("CMIP5", "DEFAULT") +; timemean: time averaging, i.e. "seasonalclim" (DJF, MAM, JJA, +; SON), "annualclim" (annual mean) +; valid_fraction: used for creating sea ice mask +; (mask_ts_sea_ice = true): fraction of valid time steps +; required to mask grid cell as valid data +; +; Required variable_info attributes (variable specific) +; reference_dataset: name of reference data set +; +; Optional variable_info attributes (variable specific) +; long_name: description of variable +; units: variable units +; +; Caveats +; KNOWN ISSUES +; 1) specifying more than one data set for the observational uncertainties +; may lead to unexpected or undefined results +; 2) Bias and zonal means cannot be written to the same netCDF because +; function ncdf_write does not support writing variables with different +; dimensions to the same output file yet. As the Python function for +; writing the provenance information called by function log_provenance +; does not support different captions for netCDF and (associated) +; plotfile, the caption does not represent the contents of the netCDF +; exactly but rather those of the plot. The data used for creating the +; plot is written to 2 separate netCDFs. +; +; Modification history +; 20190222-A_laue_ax: added output of provenance (v2.0) +; 20181119-A_laue_ax: adapted code to multi-variable capable framework +; 20180923-A_laue_ax: added writing of results to netcdf +; 20180529-A_laue_ax: code rewritten for ESMValTool v2.0 +; 20170620-A_laue_ax: added tags for reporting +; 20160920-A_laue_ax: added optional shading of observational uncertainties +; to the zonal mean plot +; 20160901-A_laue_ax: added regridding option 1 deg x 1 deg +; 20151027-A_laue_ax: moved call to 'write_references' to the beginning +; of the code +; 20150428A-laue_ax: written. +; +; ############################################################################ + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/contour_maps.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + + tmp = metadata_att_as_array(input_file_info, "short_name") + variables = get_unique_values(tmp) + delete(tmp) + n = dimsizes(variables) + + if (n.gt.2) then + log_info(DIAG_SCRIPT + ": WARNING - max 2 variables supported. " \ + + "Using only first two variables.") + end if + + mainvarind = 0 + errvarind = -1 + + if (n .gt. 1) then + do i = 0, 1 + if (isStrSubset(variables(i), "err")) then + errvarind = i + exit + end if + end do + if (errvarind .eq. 0) then + mainvarind = 1 + end if + end if + + var0 = variable_info[mainvarind]@short_name + info0 = select_metadata_by_name(input_file_info, var0) + dim_MOD0 = ListCount(info0) + if (isatt(variable_info[mainvarind], "reference_dataset")) then + refname = variable_info[mainvarind]@reference_dataset + end if + if (isatt(variable_info[mainvarind], "alternative_dataset")) then + refname2 = variable_info[mainvarind]@alternative_dataset + end if + names = metadata_att_as_array(info0, "dataset") + infiles = metadata_att_as_array(info0, "filename") + + if (errvarind .ge. 0) then + var1 = variable_info[errvarind]@short_name + info1 = select_metadata_by_name(input_file_info, var1) + dim_MOD1 = ListCount(info1) + names_err = metadata_att_as_array(info1, "dataset") + infiles_err = metadata_att_as_array(info1, "filename") + else + dim_MOD1 = 0 + end if + + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT + " (var: " + var0 + ")") + log_info("++++++++++++++++++++++++++++++++++++++++++") + + ; Set default values for non-required diag_script_info attributes + set_default_att(diag_script_info, "mask_ts_sea_ice", False) + set_default_att(diag_script_info, "projection", "CylindricalEquidistant") + set_default_att(diag_script_info, "timemean", "annualclim") + set_default_att(diag_script_info, "valid_fraction", 0.5) + + ; time averaging: at the moment, only "annualclim" and "seasonalclim" + ; are supported + + numseas = 1 ; default + season = (/"annual"/) + + timemean = diag_script_info@timemean + + if (timemean.eq."seasonalclim") then + delete(season) + numseas = 4 + season = (/"DJF", "MAM", "JJA", "SON"/) + end if + + ; create string for caption (netcdf provenance) + + allseas = season(0) + do is = 1, numseas - 1 + allseas = allseas + "/" + season(i) + end do + + ; make sure path for (mandatory) netcdf output exists + + write_nc = True + work_dir = config_user_info@work_dir + "/" + ; Create work dir + system("mkdir -p " + work_dir) + + if (config_user_info@write_plots.eq."True") then + write_plots = True + else + write_plots = False + end if + +end + +begin + ; ======================================================================== + ; ========================== initialization ============================== + ; ======================================================================== + + diag_script_base = basename(DIAG_SCRIPT) + + ; check for reference dataset definition + if (.not.(isvar("refname"))) then + error_msg("f", DIAG_SCRIPT, "", "no reference dataset defined in recipe") + end if + + ; set reference dataset + + ref_ind = ind(names .eq. refname) + if (ismissing(ref_ind)) then + error_msg("f", DIAG_SCRIPT, "", "reference dataset (" \ + + refname + ") is missing") + end if + + if (isvar("refname2")) then + ref_ind2 = ind(names .eq. refname2) + if (ismissing(ref_ind2)) then + ref_ind2 = -1 + end if + else + ref_ind2 = -1 + end if + + ; get multi-model mean index + + mm_ind = ind(names .eq. "MultiModelMean") + + if (ismissing(mm_ind)) then + error_msg("f", DIAG_SCRIPT, "", "multi-model mean is missing (required)") + end if + + mask_ts_sea_ice = diag_script_info@mask_ts_sea_ice + + if ((var0 .eq. "ts") .and. (mask_ts_sea_ice)) + ; Create a missing value mask (from reference data) + ; The dim_avg_wgt function with option limit is used on the time + ; coordinate: in this way each grid box containing more than + ; valid_fraction of missing values along the time series is + ; set to missing. + + data_temp = read_data(info0[ref_ind]) + + ww = data_temp&time + ww = 1. + limit = toint(diag_script_info@valid_fraction * \ + dimsizes(data_temp&time)) + + if (limit.lt.1) then + limit = 1 + end if + + ; temperatures below 272 K are sea ice ---> mask + data_temp = where(data_temp.lt.272.0, data_temp@_FillValue, data_temp) + + ; dim_avg_wgt_n_Wrap produces an invalid result for rank = 1 + if (dimsizes(dimsizes(data_temp)).eq.1) then + tmp = dim_avg_wgt_Wrap(data_temp, ww, limit) + else + tmp = dim_avg_wgt_n_Wrap(data_temp, ww, limit, 0) + end if + + global_mask = where(ismissing(tmp), tmp, 0.) + + delete(tmp) + delete(ww) + delete(data_temp) + end if + + ; process uncertainty estimate (if present) + + if (errvarind .ge. 0) then + do imod = 0, dim_MOD1 - 1 + log_info("processing error estimate from " + names_err(imod)) + + A0 = read_data(info1[imod]) + + log_info(" ++++++++++++++ Treating variable " + \ + var1 + " as error variable when averaging ") + A0 = A0 * A0 + + ; time mean + + data1 = time_operations(A0, -1, -1, "average", timemean, True) + delete(A0) + + ; zonal mean + + err = dim_avg_n_Wrap(data1, dimsizes(dimsizes(data1)) - 1) + ; err is still the squared error (squared for averaging)! + err = sqrt(err) + delete(data1) + + if (.not.isdefined("err_zm")) then + tmp = dimsizes(err) + dims = array_append_record(dim_MOD0, tmp, 0) + delete(tmp) + err_zm = new(dims, float) + err_zm!0 = "model" + err_zm&model = names + if (numseas.eq.1) then + err_zm!1 = "lat" + err_zm&lat = err&lat + else + err_zm!1 = "season" + err_zm&season = err&season + err_zm!2 = "lat" + err_zm&lat = err&lat + end if + delete(dims) + end if + + idx = ind(names .eq. names_err(imod)) + if (all(ismissing(idx)) .or. (dimsizes(idx) .gt. 1)) then + error_msg("f", DIAG_SCRIPT, "", "error estimates for dataset " + \ + names_err(imod) + " do not have a (unique) corresponding " \ + + "dataset for variable " + var0) + end if + ; err_zm&model(imod) = names_err(imod) + if (numseas.gt.1) then + err_zm(idx, :, :) = err(:, :) + else + err_zm(idx, :) = err(:) + end if + + delete(err) + + end do ; imod-loop + end if ; if error estimate is present + + ; ======================================================================== + ; =========================== calculations =============================== + ; ======================================================================== + + ; debug output + +; system ("rm debug.nc") +; debugfile = addfile("debug.nc","c") +; debugfile->mask = global_mask + + ; calculate zonal means (main variable) + + do imod = 0, dim_MOD0 - 1 + ; note: 1) masking is handled by the backend + ; 2) multi-model mean is calculated by the backend + + A0 = read_data(info0[imod]) + data = time_operations(A0, -1, -1, "average", timemean, True) + delete(A0) + + if (isdefined("global_mask")) then + if (numseas.eq.1) then + data = data + global_mask + else + do is = 0, numseas - 1 + data(is, :, :) = data(is, :, :) + global_mask + end do + end if + end if + + ; zonal mean + + tmp = dim_avg_n_Wrap(data, dimsizes(dimsizes(data)) - 1) + + if (.not.isdefined("zm")) then + tmpdim = dimsizes(tmp) + dims = array_append_record(dim_MOD0, tmpdim, 0) + delete(tmpdim) + zm = new(dims, float) + zm!0 = "model" + zm&model = names + if (numseas.eq.1) then + zm!1 = "lat" + zm&lat = tmp&lat + else + zm!1 = "season" + zm&season = tmp&season + zm!2 = "lat" + zm&lat = tmp&lat + end if + delete(dims) + end if + + if (numseas.gt.1) then + zm(imod, :, :) = tmp(:, :) + else + zm(imod, :) = tmp(:) + end if + + delete(tmp) + + ; save maps of multi-model mean and reference data + + if (imod.eq.mm_ind) then + mmdata = data + copy_VarMeta(data, mmdata) + end if + if (imod.eq.ref_ind) then + refdata = data + copy_VarMeta(data, refdata) + end if + + ; debug output + +; debugfile->$names(imod)$ = data1 + + delete(data) + + end do ; imod + + ; differences between multi-model mean and reference data set + + diff = mmdata - refdata + copy_VarMeta(refdata, diff) + +; debugfile->diff = diff + + ; we order the zonal mean array in a way so that + ; the lines for the multi-model mean and reference model will + ; be drawn on top of the lines for the individual models, i.e.: + ; (1) individual model(s) + ; (2) reference model(s) (observations) + ; (3) multi-model mean + + dims = dimsizes(zm) + zonalmean = new(dims, float) + copy_VarMeta(zm, zonalmean) + + ; model indices with no reference model(s) and no multi-model mean + + model_ind = ispan(0, dim_MOD0 - 1, 1) + model_ind(ref_ind) = -1 + if (ref_ind2 .ge. 0) then + model_ind(ref_ind2) = -1 + end if + model_ind(mm_ind) = -1 + + modelsonly_ind = ind(model_ind.ge.0) + delete(model_ind) + + n = dimsizes(modelsonly_ind) - 1 + + ; first entries in "zonalmean" = individual models + + if (numseas.gt.1) then + zonalmean(0:n, :, :) = zm(modelsonly_ind, :, :) + else + zonalmean(0:n, :) = zm(modelsonly_ind, :) + end if + + zonalmean&model(0:n) = zm&model(modelsonly_ind) + + ; observation(s) + + n = n + 1 + zonalmean&model(n) = zm&model(ref_ind) + + if (numseas.gt.1) then + zonalmean(n, :, :) = zm(ref_ind, :, :) + if (ref_ind2 .ge. 0) then + n = n + 1 + zonalmean(n, :, :) = zm(ref_ind2, :, :) + end if + else + zonalmean(n, :) = zm(ref_ind, :) + if (ref_ind2 .ge. 0) then + n = n + 1 + zonalmean(n, :) = zm(ref_ind2, :) + end if + end if + + if (ref_ind2 .ge. 0) then + zonalmean&model(n) = zm&model(ref_ind2) + end if + + ; last entry in "zonalmean" = multi-model mean + + n = n + 1 + + if (numseas.gt.1) then + zonalmean(n, :, :) = zm(mm_ind, :, :) ; multi-model mean + else + zonalmean(n, :) = zm(mm_ind, :) ; multi-model mean + end if + + zonalmean&model(n) = zm&model(mm_ind) + + ; update indices for array "zonalmean" + + mm_ind = n + + delete(zm) + + ; ======================================================================== + ; ============================= plotting ================================= + ; ======================================================================== + + climofiles = new(dim_MOD0 + dim_MOD1, string) + climofiles(0:dim_MOD0 - 1) = infiles + if (dim_MOD1 .gt. 0) then + climofiles(dim_MOD0:dim_MOD0 + dim_MOD1 - 1) = infiles_err + end if + + if (.not. isatt(diag_script_info, "explicit_cn_levels")) then + log_info(DIAG_SCRIPT + " (var: " + var0 + "):") + log_info("info: using default contour levels") + cnLevels = fspan(min(diff), max(diff), 20) + end if + + diff@diag_script = DIAG_SCRIPT + diff@res = True + + diff@res_gsnMaximize = True ; use full page for the plot + diff@res_cnFillOn = True ; color plot desired + diff@res_cnLineLabelsOn = False ; contour lines + diff@res_cnLinesOn = False + diff@res_tiMainOn = False + diff@res_mpPerimOn = False + diff@res_mpGridLineColor = -1 + diff@res_mpGridAndLimbOn = True + + diff@res_gsnLeftStringFontHeightF = 0.015 + diff@res_cnLevelSelectionMode = "ExplicitLevels" + diff@res_mpOutlineOn = True + diff@res_cnLevels = cnLevels + diff@res_mpFillOn = False + + diff@res_lbLabelBarOn = True + diff@res_gsnRightString = "" + diff@res_mpFillDrawOrder = "PostDraw" ; draw map fill last + diff@res_cnMissingValFillColor = "Gray" + diff@res_tmYLLabelsOn = False + diff@res_tmYLOn = False + diff@res_tmYRLabelsOn = False + diff@res_tmYROn = False + diff@res_tmXBLabelsOn = False + diff@res_tmXBOn = False + diff@res_tmXTLabelsOn = False + diff@res_tmXTOn = False + diff@res_cnInfoLabelOn = False ; turn off cn info label + diff@res_mpProjection = diag_script_info@projection + if (isatt(diag_script_info, "explicit_cn_levels")) then + if (isatt(diff, "res_cnLevels")) then + delete(diff@res_cnLevels) + end if + diff@res_cnLevelSelectionMode = "ExplicitLevels" + diff@res_cnLevels = diag_script_info@explicit_cn_levels + end if + + diff@var = var0 ; overwrite existing entry + if (.not. isatt(variable_info[mainvarind], "long_name")) then + variable_info[mainvarind]@long_name = "" + end if + if (.not. isatt(variable_info[mainvarind], "units")) then + variable_info[mainvarind]@units = "" + end if + + diff@var_long_name = variable_info[mainvarind]@long_name + diff@var_units = variable_info[mainvarind]@units + + if (var0.eq."clt") then + if (isatt(diff, "res_cnLevels")) then + delete(diff@res_cnLevels) + end if + diff@res_cnLevels = fspan(-25, 25, 11) + end if + + if ((var0.eq."lwp").or.(var0.eq."clivi")) then + if (isatt(diff, "res_cnLevels")) then + delete(diff@res_cnLevels) + end if + diff@res_cnLevels = fspan(-100, 100, 11) + diff = diff * 1000 + zonalmean = zonalmean * 1000 + diff@var_units = "g m-2" + variable_info[mainvarind]@units = diff@var_units + zonalmean@units = diff@var_units + end if + + if ((var0.eq."pr").or.(var0.eq."pr-mmday")) then + diff@res_mpLandFillColor = "Black" + pal = read_colormap_file("$diag_scripts/shared/plot/rgb/" \ + + "ipcc-precip-delta.rgb") + diff@res_cnFillColors = pal + end if + + plots = new((/2, numseas/), graphic) + plotfile = new(numseas, string) + plotfile(:) = "" + + do is = 0, numseas - 1 + + ; -------------------------------------------------------------------- + ; create workspace + + if (isvar("wks")) then + delete(wks) + end if + + wks = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_ipcc_" + var0 \ + + "_" + season(is)) +; drawNDCGrid(wks) ; debugging option + + ; -------------------------------------------------------------------- + ; plot contour map + + diff@res_gsnDraw = False ; do not draw yet + diff@res_gsnFrame = False ; don't advance frame + + diff@res_lbTitleString = "~F8~D~F21~" + diff@var_long_name + \ + " (" + diff@var_units + ")" + diff@res_lbTitlePosition = "Bottom" + diff@res_lbLabelFontHeightF = 0.015 + diff@res_lbAutoManage = False +; diff@res_lbTopMarginF = 0.1 + diff@res_lbTitleFontHeightF = 0.015 + + ; plot contour map + + if (numseas.gt.1) then + plots(0, is) = contour_map(wks, diff(is, :, :), var0) + else + plots(0, 0) = contour_map(wks, diff, var0) + end if + + ; -------------------------------------------------------------------- + ; plot zonal means + + lat = (/zonalmean&lat/) + + ; the attribute "long_name" will be used as title string for the x-axis + + lat@long_name = "Latitude" + lat@units = "degrees_north" + + ; the attribute "long_name" will be used as title string for the y-axis + + zonalmean@long_name = zonalmean@long_name + " (" + \ + zonalmean@units + ")" + + linethickness = new(dim_MOD0, float) + linecolor = new(dim_MOD0, string) + linedash = new(dim_MOD0, integer) + + n = dimsizes(modelsonly_ind) - 1 + + ; settings for all models that have been used to calculate the + ; multi-model mean (= first entries in "zonalmean") + + linethickness(0:n) = 1.0 + linecolor(0:n) = "(/0.5, 0.5, 0.5/)" + linedash = 0 + + ; for all other models, styleset defined in config file is + ; used (if defined), otherwise use default values + + if (isatt(diag_script_info, "styleset")) then + colortab = project_style(info0, diag_script_info, "colors") + dashtab = project_style(info0, diag_script_info, "dashes") + thicknesstab = project_style(info0, diag_script_info, "thicks") + do i = n + 1, dim_MOD0 - 1 + if (isdefined("idx")) then + delete(idx) + end if + idx = ind(names .eq. zonalmean&model(i)) + if (all(ismissing(idx))) then + else + linecolor(i) = colortab(idx(0)) + linethickness(i) = thicknesstab(idx(0)) + linedash(i) = dashtab(idx(0)) + end if + end do + else + linethickness(n+1:dim_MOD0-1) = 4.0 ; reference dataset(s) + linethickness(mm_ind) = 4.0 ; multi-model mean + linecolor(n+1:dim_MOD0-1) = "Black" ; reference data set + linecolor(mm_ind) = "Red" ; multi-model mean + end if + + res = True + res@xyDashPattern = linedash + res@xyMonoLineColor = False + res@xyLineThicknesses = linethickness + res@xyLineColors = linecolor + res@tmXBLabelFontHeightF = 0.023 + res@tmYLLabelFontHeightF = 0.023 + res@gsnDraw = False ; donot draw yet + res@gsnFrame = False ; don't advance frame + + ; this controls the size and location of the second plot + + res@vpWidthF = 0.6 + res@vpHeightF = 0.475 + + if (numseas.gt.1) then + plots(1, is) = gsn_csm_xy(wks, lat, zonalmean(:, is, :), res) + if (isdefined("err_zm")) then + y = new((/2, dimsizes(err_zm&lat)/), float) + res@gsnXYFillColors = "(/1.0, 0.8, 0.8/)" + res@xyLineColors(:) = "transparent" + do imod = 0, dim_MOD0 - 1 + erridx = ind(err_zm&model.eq.zonalmean&model(imod)) + if (.not.all(ismissing(erridx))) then + y(0, :) = zonalmean(imod, is, :) - err_zm(erridx, is, :) + y(1, :) = zonalmean(imod, is, :) + err_zm(erridx, is, :) + errorshade = gsn_csm_xy(wks, err_zm&lat, y, res) + overlay(plots(1, is), errorshade) + end if + end do + end if + else + plots(1, 0) = gsn_csm_xy(wks, lat, zonalmean, res) + + if (isdefined("err_zm")) then + y = new((/2, dimsizes(err_zm&lat)/), float) + res@gsnXYFillColors = "(/1.0, 0.8, 0.8/)" + res@xyLineColors(:) = "transparent" + do imod = 0, dim_MOD - 1 + erridx = ind(err_zm&model.eq.zonalmean&model(imod)) + if (.not.all(ismissing(erridx))) then + y(0, :) = zonalmean(imod, :) - err_zm(erridx, :) + y(1, :) = zonalmean(imod, :) + err_zm(erridx, :) + errorshade = gsn_csm_xy(wks, err_zm&lat, y, res) + overlay(plots(1, 0), errorshade) + end if + end do + end if + end if + + ; -------------------------------------------------------------------- + + if (write_plots) then + pres = True + pres@gsnPanelCenter = False + pres@gsnPanelXF = (/0.075, 0.625/) ; hor. pos. of sub-plots + pres@txString = season(is) + + outfile = panelling(wks, plots(:, is), 1, 2, pres) + log_info("Wrote " + wks@fullname) + + plotfile(is) = wks@fullname + end if ; if write_plots + end do ; is-loop (seasons) + + ; ########################################### + ; # Output to netCDF # + ; ########################################### + + ; note: function ncdf_write currently does not support writing variables + ; diff and zonalmean to the same netCDF. + + nc_filename = work_dir + "clouds_ipcc_" + var0 + "_bias.nc" + diff@var = var0 + "_bias" + diff@diag_script = DIAG_SCRIPT + nc_outfile_bias = ncdf_write(diff, nc_filename) + + nc_filename = work_dir + "clouds_ipcc_" + var0 + "_zonal.nc" + nc_filename@existing = "append" + zonalmean@var = var0 + "_zonal" + zonalmean@diag_script = DIAG_SCRIPT + nc_outfile_zonal = ncdf_write(zonalmean, nc_filename) + if (isvar("err_zm")) then + err_zm@var = var1 + "_zonal" + nc_outfile_zonal = ncdf_write(err_zm, nc_filename) + end if + + ; ------------------------------------------------------------------------ + ; write provenance to netcdf output and plot file(s) + ; ------------------------------------------------------------------------ + + statistics = "clim" + domain = "global" + plottype = (/"geo", "zonal"/) + caption = "Multi model mean bias (left) and zonal averages (right) " \ + + "for variable " + var0 + " (" + allseas \ + + "), reference = " + names(ref_ind) + "." + + do is = 0, numseas - 1 + log_provenance(nc_outfile_bias, plotfile(is), caption, statistics, \ + domain, plottype, "", "", climofiles) + log_provenance(nc_outfile_zonal, plotfile(is), caption, statistics, \ + domain, plottype, "", "", climofiles) + end do + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/clouds/clouds_taylor.ncl b/esmvaltool/diag_scripts/clouds/clouds_taylor.ncl new file mode 100644 index 0000000000..fd1c6777f9 --- /dev/null +++ b/esmvaltool/diag_scripts/clouds/clouds_taylor.ncl @@ -0,0 +1,922 @@ +; ############################################################################ +; CLOUDS_TAYLOR +; ############################################################################ +; Author: Axel Lauer (DLR, Germany) +; PROJECT-NAME EMBRACE +; ############################################################################ +; Description +; Calculates the performance of models in reproducing 2-d fields of annual +; mean or seasonal (DFJ, MAM, JJA, SON) mean cloud properties +; (Taylor diagramms). The code is based on +; 1) perfmetrics_main.ncl +; 2) perfmetrics_taylor.ncl +; 3) perfmetrics_taylor_collect.ncl +; originally written by Franziska Frank (DLR, Germany). +; Note: This code requires that all data are provided on the same grid. +; +; Required diag_script_info attributes (diagnostic specific) +; none +; +; Optional diag_script_info attributes (diagnostic specific) +; embracelegend: - False (default) = include legend in plot, max. +; 2 columns with dataset names in legend +; - True = write extra file with legend, max. 7 dataset +; names per column in legend, alternative observational +; dataset(s) will be plotted as a red star and labeled +; "altern. ref. dataset" in legend (only if dataset is +; of class "OBS") +; estimate_obs_uncertainty: - True = estimate observational uncertainties +; from mean values (assuming fractions of obs. RMSE from +; documentation of the obs data); only available for +; "CERES-EBAF", "MODIS", "MODIS-L3" +; - False = do not estimate obs. uncertainties from mean +; values +; filename_add: legacy feature: arbitrary string to be added to all +; filenames of plots and netcdf output produced +; (default = "") +; mask_ts_sea_ice: - True = mask T < 272 K as sea ice (only for +; variable "ts") +; - False = no additional grid cells masked for +; variable "ts" +; styleset: "CMIP5", "DEFAULT" +; (if not set, CLOUDS_TAYLOR will create a color table and +; symbols for plotting) +; timemean: time averaging +; - annualclim (default) = 1 plot annual mean +; - seasonalclim = 4 plots (DJF, MAM, JJA, SON) +; valid_fraction: used for creating sea ice mask (mask_ts_sea_ice = true): +; fraction of valid time steps required to mask grid cell +; as valid data +; +; Required variable_info attributes (variable specific) +; reference_dataset: name of reference data set +; +; Optional variable attributes (variable specific) +; none +; +; Caveats +; KNOWN LIMITATIONS +; 1) only 2-dim variables are currently supported +; 2) observational uncertainties are regridded like standard variables +; 3) for derived variables (e.g. SW_CRE), also the original variables have +; to be specified in the nameslist for the reference model (for the +; reference dataset only) in order to estimate the observational +; uncertainty (if estimate_obs_uncertainty = True) +; 4) the variable to plot must be the *first* variable in the namelist, +; the observational uncertainty (or the variables from which a derived +; variable has been calculated from) the *second* (third, ...) +; +; Modification history +; 20190221-A_laue_ax: added provenance to output (v2.0) +; 20181120-A_laue_ax: adapted code to multi-variable capable framework +; 20180923-A_laue_ax: added writing of results to netcdf +; 20180611-A_laue_ax: code rewritten for ESMValTool v2.0 +; 20170620-A_laue_ax: added tags for reporting +; 20161104-A_laue_ax: changed calculation of correlation and standard +; deviation ratio from unweighted to grid area weighted +; 20160901-A_laue_ax: added regridding option 1 deg x 1 deg +; 20160818-A_laue_ax: added support for variable attribute "ref_model" +; 20160404-A_laue_ax: added optional drawing of observational +; uncertainties +; 20151027-A_laue_ax: moved call to 'write_references' to the beginning +; of the code +; 20150415-A-laue_ax: written. +; +; ########################################################################### + + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/plot/aux_plotting.ncl" +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/taylor_diagram_less_hardcoded.ncl" + +begin + + enter_msg(DIAG_SCRIPT, "") + + set_default_att(diag_script_info, "embracelegend", False) + set_default_att(diag_script_info, "estimate_obs_uncertainty", False) + set_default_att(diag_script_info, "mask_ts_sea_ice", False) + set_default_att(diag_script_info, "timemean", "annualclim") + set_default_att(diag_script_info, "valid_fraction", 0.5) + + estimate_obs_uncertainty = diag_script_info@estimate_obs_uncertainty + + variables = metadata_att_as_array(variable_info, "short_name") + numvars = dimsizes(variables) + + ; find "main" variable and if present uncertainty estimates or auxiliary + ; variables that can optionally be used to estimate the uncertainties + + mainvarind = 0 ; default = main variable is first (and only) variable + errvarind = -1 ; default = no error estimate available + + if (numvars.gt.1) then + ; uncertainty estimates are identified by "err" in their short_name + do i = 0, numvars - 1 + if (isStrSubset(variables(i), "err")) then + errvarind = i + break + end if + end do + ; now find the main variable: + ; it is assumed that the main variable is either the first non-error + ; variable or the first "derived" variable + idx = ind(ispan(0, numvars - 1, 1) .ne. errvarind) + mainvarind = idx(0) ; first non-error variable + ; now check for possibly derived variables + do ii = 1, dimsizes(idx) - 1 + i = idx(ii) + ; the "derived" attribute is stored in input_file_info only + do j = 0, ListCount(input_file_info) - 1 + if (input_file_info[j]@short_name .eq. variables(i)) then + if isatt(input_file_info[j], "derive") then + if (input_file_info[j]@derive) then + mainvarind = i + break + end if + end if + end if + end do + end do + else + end if + + var0 = variable_info[mainvarind]@short_name + info0 = select_metadata_by_name(input_file_info, var0) + dim_MOD = ListCount(info0) + if (isatt(variable_info[mainvarind], "reference_dataset")) then + refname = variable_info[mainvarind]@reference_dataset + end if + if (isatt(variable_info[mainvarind], "alternative_dataset")) then + refname2 = variable_info[mainvarind]@alternative_dataset + end if + names = metadata_att_as_array(info0, "dataset") + infiles = metadata_att_as_array(input_file_info, "filename") + + ; check for reference model definition + + if (.not.isvar("refname")) then + error_msg("f", DIAG_SCRIPT, "", "no reference dataset defined in recipe") + end if + + ; print info on variable(s) to log file + + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info(DIAG_SCRIPT) + log_info("++++++++++++++++++++++++++++++++++++++++++") + log_info("main variable: " + variables(mainvarind)) + if (errvarind .ge. 0) then + log_info("error variable: " + variables(errvarind)) + else + log_info("error variable: none") + end if + tmp = ispan(0, numvars - 1, 1) + auxind = ind(tmp .ne. mainvarind .and. tmp .ne. errvarind) + if(.not. all(ismissing(auxind))) then + if (estimate_obs_uncertainty) then + log_info("aux variable: " + variables(auxind)) + end if + else + if (estimate_obs_uncertainty) then + log_info("no aux variable(s): cannot estimate obs uncertainty") + end if + end if + log_info("++++++++++++++++++++++++++++++++++++++++++") + + delete(tmp) + delete(auxind) + + ; optional uncertainty estimates are only supported for the reference + ; dataset, i.e. max 1 dataset + + if (estimate_obs_uncertainty) then + auxind = ind(ispan(0, numvars - 1, 1) .ne. mainvarind) + if (.not. all(ismissing(auxind))) then + do i = 0, dimsizes(auxind) - 1 + tmpinfo = select_metadata_by_name(input_file_info, variables(i)) + items = ListCount(tmpinfo) + if (items .gt. 1) then + error_msg("f", DIAG_SCRIPT, "", "uncertainty estimate only " + \ + "supported for 1 dataset (i.e. reference dataset)") + end if + tmpname = metadata_att_as_array(tmpinfo, "dataset") + if (tmpname .ne. refname) then + error_msg("f", DIAG_SCRIPT, "", "uncertainty estimate only " + \ + "supported for the reference dataset (" + refname + ")") + end if + delete(tmpinfo) + end do + end if + delete(auxind) + end if + + ; time averaging: at the moment, only "annualclim" and "seasonalclim" + ; are supported + + timemean = diag_script_info@timemean + numseas = 1 ; default + season = (/"annual"/) + + if (timemean.eq."seasonalclim") then + numseas = 4 + delete(season) + season = (/"DJF", "MAM", "JJA", "SON"/) + end if + + ; create string for caption (netcdf provenance) + + allseas = season(0) + do is = 1, numseas - 1 + allseas = allseas + "/" + season(i) + end do + + ; make sure path for (mandatory) netcdf output exists + + work_dir = config_user_info@work_dir + "/" + ; Create work dir + system("mkdir -p " + work_dir) + + if (config_user_info@write_plots.eq."True") then + write_plots = True + else + write_plots = False + end if + +end + +begin + ; ======================================================================== + ; ========================== initialization ============================== + ; ======================================================================== + + ; get reference model + + ref_ind = ind(names .eq. refname) + if (ismissing(ref_ind)) then + error_msg("f", DIAG_SCRIPT, "", "reference dataset (" \ + + refname + ") is missing") + end if + + ; get multi-model mean index (if present) + + mm_ind = ind(names .eq. "MultiModelMean") + + if (ismissing(mm_ind)) then + mm_ind = -1 + end if + + mask_ts_sea_ice = diag_script_info@mask_ts_sea_ice + + if (isatt(diag_script_info, "filename_add")) then + filename_add = "_" + diag_script_info@filename_add + else + filename_add = "" + end if + + embracelegend = diag_script_info@embracelegend + + ; ======================================================================== + ; ============================ statistics ================================ + ; ======================================================================== + + if ((var0 .eq. "ts") .and. (mask_ts_sea_ice)) + ; Create a missing value mask (from reference data) + ; The dim_avg_wgt function with option limit is used on the time + ; coordinate: in this way each grid box containing more than + ; valid_fraction of missing values along the time series is + ; set to missing. + + data_temp = read_data(info0[ref_ind]) + + ww = data_temp&time + ww = 1. + limit = toint(diag_script_info@valid_fraction * \ + dimsizes(data_temp&time)) + + if (limit.lt.1) then + limit = 1 + end if + + ; temperatures below 272 K are sea ice ---> mask + data_temp = where(data_temp.lt.272.0, data_temp@_FillValue, data_temp) + + ; dim_avg_wgt_n_Wrap produces an invalid result for rank = 1 + if (dimsizes(dimsizes(data_temp)).eq.1) then + tmp = dim_avg_wgt_Wrap(data_temp, ww, limit) + else + tmp = dim_avg_wgt_n_Wrap(data_temp, ww, limit, 0) + end if + + global_mask = where(ismissing(tmp), tmp, 0.) + + delete(tmp) + delete(ww) + delete(data_temp) + end if + + ; ; debug output + ; system ("rm debug.nc") + ; debugfile = addfile("debug.nc","c") + ; debugfile->mask = global_mask + + ; read data and calculate time average + + ierridx = 0 + + do ivar = 0, numvars - 1 + + ; ignore auxiliary variables if estimate_obs_uncertainty = False + + if (.not.estimate_obs_uncertainty .and. ivar .ne. mainvarind .and. \ + ivar .ne. errvarind) then + continue + end if + + ; process all models for first variable + if (ivar.eq.mainvarind) then + modelf = 0 + modell = dim_MOD - 1 + ; process error estimates / auxiliary variables for error estimate + ; (reference model only) + else + modelf = 0 + modell = 0 + end if + + info = select_metadata_by_name(input_file_info, variables(ivar)) + + do imod = modelf, modell + data_temp = read_data(info[imod]) + + ; The uncertainty estimates are assumed to be given as + ; 1-sigma of normally distributed error estimates + ; Note: uncertainty estimates must have "err" in their + ; variable name, otherwise variables are assumed + ; to be fields used for calculating derived variables + ; (in this case, fields are not squared before averaging) + ; ---> square sigma before averaging over time, then + ; calculate square-root to get average sigma + + if (ivar.eq.errvarind) then + data_temp = data_temp * data_temp + end if + + data1 = time_operations(data_temp, -1, -1, "average", \ + timemean, True) + + if (ivar.eq.errvarind) then + data1 = sqrt(data1) + end if + + if (isdefined("global_mask")) then + if (numseas.eq.1) then + data1 = data1 + global_mask + else + do is = 0, numseas - 1 + data1(is, :, :) = data1(is, :, :) + global_mask + end do + end if + end if + + if (ivar.eq.mainvarind) then + if (.not.isdefined("data")) then + dim_data = array_append_record((/dim_MOD/), dimsizes(data1), 0) + data = new(dim_data, float) + data!0 = "model" + data&model = names + if (numseas.eq.1) then + copy_VarCoords(data1, data(imod, :, :)) + else + copy_VarCoords(data1, data(imod, :, :, :)) + end if + end if + dim_data = dimsizes(data) + rank = dimsizes(dim_data) + if (numseas.eq.1) then + data(imod, :, :) = data1 + else + data(imod, :, :, :) = data1 + end if + delete(data_temp) + delete(data1) + else + if (.not.isdefined("err")) then + dim_data = array_append_record((/numvars - 1/), dimsizes(data1), 0) + err = new(dim_data, float) + err!0 = "var" + err&var = variables(ind(variables .ne. var0)) + end if + if (numseas.eq.1) then + err(ierridx, :, :) = data1 + else + err(ierridx, :, :, :) = data1 + end if + end if + + ; debug output + + ; debugfile->$input_file_info@dataset(imod)$ = data1 + end do ; imod loop + if (ivar .ne. mainvarind) then + ierridx = ierridx + 1 + end if + end do ; ivar loop + + ; number of used reference data sets + + dim_REF = 1 + if (isvar("refname2")) then + dim_REF = dim_REF + 1 + end if + + ; define result variable + + val = new((/dim_MOD - dim_REF, 2, numseas/), float) + val!0 = "models" + val!1 = "statistic" + val!2 = "time" + val&models = names(ind((names .ne. refname))) + val&statistic = (/"stddev_ratio", "correlation"/) + val&time = ispan(0, numseas - 1, 1) + + ; extract data + + do is = 0, numseas - 1 + + if (numseas.eq.1) then + ref = data(ref_ind, :, :) + else + ref = data(ref_ind, is, :, :) + end if + + ; loop over models + ; (second model index, for models only, skip reference) + + mod_i = 0 + do imod = 0, dim_MOD - 1 + + ; skip reference + + if (imod.eq.ref_ind) then + log_info("Ref_dataset = " + data&model(imod)) + continue + end if + + if (numseas.eq.1) then + var = data(imod, :, :) + else + var = data(imod, is, :, :) + end if + + ; calculate rate of standard deviations + + ; val(mod_i, 0, is) = calculate_metric(var, ref, "stddev_ratio_nowgt") + val(mod_i, 0, is) = calculate_metric(var, ref, "stddev_ratio") + + ; calculate pattern correlation + + ; val(mod_i, 1, is) = calculate_metric(var, ref, "correlation_nowgt") + val(mod_i, 1, is) = calculate_metric(var, ref, "correlation") + + delete(var) + mod_i = mod_i + 1 + + end do ; loop over datasets + end do ; loop over seasons + + ; attach attributes to the results + + val@title = "taylor metrics" + val@long_name = "Taylor diagram" + val@diag_script = (/DIAG_SCRIPT/) + val@var = "var" + + ; ------------------- calculate/estimate RMS of observations --------------- + + rmsobs = new((/numseas/), float) + + ; optionally, if there are no variable(s) available for the error estimate, + ; the reference data can be used to create one for selected data sets + + if (estimate_obs_uncertainty) then + if (any(refname.eq.(/"CERES-EBAF", "MODIS", "MODIS-L3"/))) then + if (numvars.eq.1) then ; no variable(s) for error estimate + numvars = 2 + variables0 = (/variables(0), variables(0)/) + delete(variables) + variables = variables0 + dim_data = dimsizes(data) + dim_data(0) = numvars - 1 + err = new(dim_data, float) + if (numseas.eq.1) then + err(0, :, :) = data(ref_ind, :, :) + else + err(0, :, :, :) = data(ref_ind, :, :, :) + end if + end if + end if + end if ; if estimate_obs_uncertainty + + do is = 0, numseas - 1 + if (numseas.eq.1) then + ref = data(ref_ind, :, :) + else + ref = data(ref_ind, is, :, :) + end if + + ; This code is equivalent to the function "calculate_metric" + ; (statistics.ncl) + weights = map_area(ref&lat, ref&lon) + ; optional: no weights --> reset weights + ; wgt1d = 1.0 + + ; convert to 1-D arrays + ref1d = ndtooned(ref) + wgt1d = ndtooned(weights) + wgt1d = wgt1d / dim_avg_n_Wrap(wgt1d, 0) + + rmsobs(is) = 0.0 + + if (errvarind .ge. 0 .or. estimate_obs_uncertainty) then + ierridx = 0 + do iv = 0, numvars - 1 + if (iv .eq. mainvarind) then + continue + end if + + if (numseas.eq.1) then + var = err(ierridx, :, :) + else + var = err(ierridx, is, :, :) + end if + + ; If the variable name contains "err", the variable is assumed to + ; contain an estimate of the observational uncertainty (e.g. UWisc, + ; GPCP-SG, and SSMI-MERIS provide error estimates, MODIS-L3-C6 provides + ; error estimates for lwp and iwp but not for clt). In this case, there + ; is nothing to do here. + ; If the variable name does not contain "err", the variable is assumed + ; to be a field (e.g. rsut, rsutcs) used to calculate a derived + ; variable (e.g. swcre). In this case, the observational uncertainty + ; can be estimated if the fractional average error is known + ; (e.g. CERES-EBAF, MODIS). + + if (errvarind .ge. 0) then + ; variable contains error estimate provided by data set + ; ---> nothing to do + factor = 1.0 + else + ; variable(s) contain(s) fields used to derive the variable plotted + ; --> observational uncertainty might be estimated from these fields + ; if the average error(s) of these fields are known + + ; by default, uncertainty cannot be estimated + + factor = 0.0 + + ; for CERES-EBAF, we estimate the errors from the original variables + ; (rsut+rsutcs / rlut+rlutcs) assuming variable specific factors + + if (refname.eq."CERES-EBAF") then + if (any(variables(iv).eq.(/"rsut", "rsutcs"/))) then + ; From the "Technical Note on CERES EBAF Ed2.7": + ; rsutcs: 1% ~ 0.5 W/m2 + ; estimate total error = 2.6 W/m2 ---> ~5.2% + ; rsut: 4-5 W/m2 ---> ~4-5% + ; + ; ---> use 5% for rsut and rsutcs + factor = 0.05 + end if + if (any(variables(iv).eq.(/"rlut", "rlutcs"/))) then + ; From the Technical Note on CERES EBAF Ed2.7: + ; "For CERES, calibration uncertainty is 0.5% (1 sigma) + ; which for a typical global mean clear-sky LW flux corresponds + ; to ~1 W/m2. [...] The total error in TOA outgoing clear-sky LW + ; radiation in a region is estimated as + ; sqrt(1^2 + 1.74^2 + 0.7^2 + 1^2 + 2.75^2) or approximately + ; 3.6 W/m2." + ; + ; --> here, we estimate the total error as + ; 0.5% * 3.6 W/m2 / 1.0 W/m2 = 1.8% + factor = 0.018 + end if + end if + + if (refname.eq."MODIS") then + if (any(variables(iv).eq.(/"clt"/))) then + ; From the technical note on obs4MIPS MODIS satellite data + ; "Moderate Resolution Imaging Spectroradiometer (MODIS) + ; Cloud Fraction Technical Document": + ; + ; Global day/night fractional agreement of cloud detection + ; between MODIS and CALIPSO lidar (CALIOP): + ; Aug 2006 (clear): 0.84 + ; Aug 2006 (cloudy): 0.88 + ; Feb 2006 (clear): 0.85 + ; Feb 2006 (cloudy): 0.88 + ; + ; ---> using fractional agreement as error estimate (1 sigma) + ; ---> average error estimate = + ; sqrt((0.84^2 + 0.88^2 + 0.85^2 + 0.88^2) / 4) = 0.86 + ; ---> factor = 1 - 0.86 = 0.14 + factor = 0.14 + end if + end if + + if (refname.eq."MODIS-L3") then + if (any(variables(iv).eq.(/"clt"/))) then + factor = 0.0 ; no error estimates for clt from MODIS_C6_L3 + end if + end if + end if + + ; convert to 1-D arrays + var1d = ndtooned(var * factor) + + ; --- RMS --- + ; in case of derived variables (e.g., SW_CRE), the squared error + ; estimates of all input variables are added before calculating + ; the square root + + rmsobs(is) = rmsobs(is) + dim_avg_wgt_Wrap(var1d ^ 2, wgt1d, 1) + + ierridx = ierridx + 1 + + end do ; iv-loop + end if + + ; finish calculation of RMS + rmsobs(is) = sqrt(rmsobs(is)) + + ; normalize RMS by dividing by standard deviation of obs (ref) + avg_ref = dim_avg_wgt_Wrap(ref1d, wgt1d, 1) + rmsobs(is) = rmsobs(is) / sqrt(dim_avg_wgt_Wrap((ref1d - avg_ref) ^ 2, \ + wgt1d, 1)) + + end do ; loop over seasons + + if (isvar("var")) then + delete(var) + end if + if (isvar("ref")) then + delete(ref) + end if + if (isvar("weights")) then + delete(weights) + end if + if (isvar("var1d")) then + delete(var1d) + end if + if (isvar("ref1d")) then + delete(ref1d) + end if + if (isvar("wgt1d")) then + delete(wgt1d) + end if + + ; ======================================================================== + ; ============================= plotting ================================= + ; ======================================================================== + + nummods = dim_MOD - dim_REF + + colors = new(nummods, string) + markers = new(nummods, integer) + + ratio = new((/nummods, 1, numseas/), float) ; standard deviation ratios + ratio = val(:, 0, :) + cc = new((/nummods, 1, numseas/), float) ; correlations + cc = val(:, 1, :) + + if (isatt(diag_script_info, "styleset")) then + colortab = project_style(info0, diag_script_info, "colors") + markertab = project_style(info0, diag_script_info, "markers") + else + colortab = (/"(/0.00, 0.00, 0.59/)", "(/0.00, 0.39, 1.00/)", \ + "(/0.20, 1.00, 1.00/)", "(/0.20, 0.88, 0.00/)", \ + "(/1.00, 0.88, 0.00/)", "(/1.00, 0.59, 0.00/)", \ + "(/1.00, 0.20, 0.00/)", "(/0.59, 0.00, 0.00/)", \ + "(/0.78, 0.00, 0.78/)", "(/0.59, 0.00, 0.59/)", \ + "(/0.90, 0.90, 0.90/)", "(/0.70, 0.70, 0.70/)", \ + "(/0.50, 0.50, 0.50/)", "(/0.30, 0.30, 0.30/)"/) + markertab = (/16, 4, 5, 0/) + end if + + plotfile = new((/numseas/), string) + + do is = 0, numseas - 1 + if (isvar("wks")) then + delete(wks) + end if + + if (isvar("legendwks")) then + delete(legendwks) + end if + + if (isvar("plot")) then + delete(plot) + end if + + wks = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_taylor_" + var0 \ + + "_" + season(is) + filename_add) + + ; create new marker: filled star + + mstring = "z" + fontnum = 35 + size = 1.5 + angle = 0.0 + + new_index = NhlNewMarker(wks, mstring, fontnum, 0.0, 0.0, \ + 1.0, size, angle) + + if (embracelegend) then + legendwks = get_wks("legend_wks", DIAG_SCRIPT, "clouds_taylor_" \ + + "legend" + filename_add) + new_index = NhlNewMarker(legendwks, mstring, fontnum, 0.0, 0.0, \ + 1.0, size, angle) + else + legendwks = wks + end if + + i = 0 + idx = 0 + + if (isatt(diag_script_info, "styleset")) then + do ii = 0, dim_MOD - 1 + + if (ii.eq.ref_ind) then + continue + end if + + colors(i) = colortab(i) + markers(i) = markertab(i) + + ; override multi-model mean (if present) + if (ii .eq. mm_ind) then + colors(i) = "(/0.00, 0.00, 0.00/)" ; black + markers(i) = new_index + end if + ; override alternative obs (if "EMBRACE" legend) + if (embracelegend) then + if ((isStrSubset(str_lower(input_file_info@project(i)), "obs"))) \ + then + if (mm_ind .ge. 0) then + colors(i) = "(/1.00, 0.00, 0.00/)" ; red + else + colors(i) = "(/0.00, 0.00, 0.00/)" ; black + end if + markers(i) = new_index + val&models(i) = "altern. ref. dataset" + end if + end if + i = i + 1 + end do + else + do ii = 0, dim_MOD - 1 + + if (ii.eq.ref_ind) then + continue + end if + + if (ii .eq. mm_ind) then + colors(i) = "(/0.00, 0.00, 0.00/)" ; black + markers(i) = new_index + i = i + 1 + else + if (embracelegend) then + colors(i) = colortab(idx) + if ((isStrSubset(str_lower(input_file_info@project(i)), "obs"))) \ + then + if (mm_ind .ge. 0) then + colors(i) = "(/1.00, 0.00, 0.00/)" ; red + else + colors(i) = "(/0.00, 0.00, 0.00/)" ; black + end if + markers(i) = new_index + val&models(i) = "altern. ref. dataset" + else + markers(i) = markertab(mod(i, 2)) + end if + i = i + 1 + if (i.ge.dim_MOD) then + break + end if + idx = idx + 1 + if (idx.ge.dimsizes(colortab)) then + idx = dimsizes(colortab) - 1 + end if + else + do n = 0, dim_MOD / dimsizes(colortab) + colors(i) = colortab(idx) + markers(i) = markertab(n) + i = i + 1 + if (i.ge.dim_MOD) then + break + end if + end do + idx = idx + 1 + if (idx.ge.dimsizes(colortab)) then + idx = dimsizes(colortab) - 1 + end if + end if + end if + end do + end if + + ropts = True + ropts@Colors = colors + ropts@Markers = markers ; marker styles + ropts@gsMarkerSizeF = 0.0125 + + modelnames = val&models + ropts@caseLabels = modelnames + + ropts@stnRad = (/ 0.5, 1.5, 2.0, 2.5 /) + ropts@OneX = "1.00" + ropts@ccRays = (/ 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, \ + 0.9, 0.95, 0.99 /) + ropts@ccRays_color = "Black" + ropts@centerDiffRMS = True + ropts@centerDiffRMS_color = "LightGray" ; "PaleGreen1" + + ropts@printDataLabels = False ; print numeric label for each data pt. + ropts@reverseOrder = True ; reverse order of items in legend + ropts@legendBox = True ; draw box around legend + + ; legend parameters + + ropts@legendXpos = 0.575 + ropts@legendYpos = -0.5 + ropts@legendWidth = 0.2 + + ropts@caseLabelsFontHeightF = 0.1 + ropts@plotSize = 0.6 + + if (embracelegend) then + ropts@legendExtraFile = True ; save legend to extra file + ropts@modelsperrow = 7 ; 7 models per column in the legend + else + ropts@legendExtraFile = False + if (dim_MOD.ge.20) then + ropts@caseLabelsFontHeightF = 0.06 + ropts@plotSize = 0.45 + end if + end if + + ; plot + if (write_plots) then + ropts@tiMainString = var0 + " (" + season(is) + ")" ; title + ; observational uncertainties as normalized RMS error + ropts@rmsobs = rmsobs(is) + + plot = taylor_diagram(wks, legendwks, ratio(:, :, is), cc(:, :, is), \ + ropts) + + plotfile(is) = wks@fullname + log_info("Wrote " + plotfile) + + if (embracelegend.and.(is.eq.(numseas-1))) then + frame(legendwks) + end if + else + plotfile(is) = "" + end if ; if write_plots + end do ; is-loop (seasons) + + ; write netCDF output + + if (any(rmsobs .gt. 0.0)) then + val@RMSE_observations = rmsobs + end if + + nc_filename = work_dir + "clouds_taylor_" + var0 + filename_add + ".nc" + nc_outfile = ncdf_write(val, nc_filename) + + ; ------------------------------------------------------------------------ + ; write provenance to netcdf output and plot file(s) + ; ------------------------------------------------------------------------ + + statistics = "clim" + domain = "global" + plottype = "taylor" + + do is = 0, numseas - 1 + ; note: because function log_provenance does not yet support to attach + ; different captions to netcdf (contains all seasons) and plots + ; (contain one season each), the caption cannot specifiy the + ; season plotted; using "annual" or "DJF/MAM/JJA/SON" instead. + + caption = "Taylor diagram for variable " + var0 + " (" + allseas \ + + "), reference = " + refname + "." + + log_provenance(nc_outfile, plotfile, caption, statistics, domain, \ + plottype, "", "", infiles) + end do + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/crem/ww09_esmvaltool.py b/esmvaltool/diag_scripts/crem/ww09_esmvaltool.py new file mode 100644 index 0000000000..8fe9f0b0da --- /dev/null +++ b/esmvaltool/diag_scripts/crem/ww09_esmvaltool.py @@ -0,0 +1,595 @@ +""" +Cloud Regime Error Metrics (CREM). + + Author: Keith Williams (Metoffice, UK) + + Project: ESA-CMUG + + Description + Calculates the Cloud Regime Error Metric (CREM) following Williams and + Webb (2009, Clim. Dyn.). Regridding to the 2.5x2.5 degree ISCCP grid is + done by the ESMValTool preprocessor. + + Required diag_script_info attributes (diagnostics specific) + none + + Optional diag_script_info attributes (diagnostic specific) + none + + Required variable_info attributes (variable specific) + none + + Optional variable_info attributes (variable specific) + none + + Caveats + none + + Modification history + 20190216-A_laue_ax: outsourced regridding to preprocessor + 20190215-A_laue_ax: added metadata to netcdf output and plot + 20190213-A_laue_ax: made code more flexible to support CMIP6 data + 20181012-A_laue_ax: extended (optional) netCDF output + 20180920-A_laue_ax: code adapted for ESMValTool v2.0 + 20171128-A_laue_ax: added author and diagname to meta data + switched off "replacing of exact values" + in regridding function + 20170713-A_laue_ax: added tagging (for reporting) + 20151117-A_laue_ax: added parameters for call to "write_references" + 20151113-A_laue_ax: added creation of directory for plots if needed + (code was crashing if directory does not exist) + 20151029-A_laue_ax: added output of acknowledgements + processed files + to log-file + 20150903-A_laue_ax: ESMValTool implementation. + 20150521-A_will_ke: CREM routines written. +""" +import logging +import os +import sys +from pprint import pformat + +import matplotlib.pyplot as plt +import numpy as np +from netCDF4 import Dataset + +from esmvaltool.diag_scripts.shared import ( + group_metadata, ProvenanceLogger, run_diagnostic, select_metadata) + +logger = logging.getLogger(os.path.basename(__file__)) + + +def main(cfg): + """Run the diagnostic. + + Parameters + ---------- + cfg : dict + Configuration dictionary of the recipe. + + """ + # get description of the preprocessed data + input_data = cfg['input_data'].values() + + grouped_input_data = group_metadata(input_data, 'dataset') + nummod = len(grouped_input_data) + crems = np.empty(nummod) + + # list of variables needed for CREM calculations + ww_vars = ('albisccp', 'pctisccp', 'cltisccp', 'rsut', 'rsutcs', 'rlut', + 'rlutcs', 'sic') + ww_vars_plus = ('snc', 'snw') + # alternative variable names to check if variable was not found (CMIP6) + ww_vars_alternative = {'sic': 'siconc'} + + # for human readable output + # regions/regimes as they come from the CREM calculation + regions = {'tropics': ['shallow cumulus', 'congestus', 'thin cirrus', + 'stratocumulus/cumulus transition', + 'anvil cirrus', 'deep convection', + 'stratocumulus'], + 'ice-free-extra-tropics': ['shallow cumulus', 'congestus', + 'stratocumulus/cumulus transition', + 'cirrus', 'stratocumulus', + 'frontal', 'thin cirrus'], + 'snow-ice-covered': ['shallow cumulus', 'stratocumulus', + 'thick mid-level', 'frontal', + 'thin mid-level', 'thin cirrus']} + # regimes as we write them to netCDF + allregimes = ['shallow cumulus', 'congestus', 'thin cirrus', + 'stratocumulus/cumulus transition', 'anvil cirrus', + 'deep convection', 'stratocumulus', 'cirrus', + 'frontal', 'thick mid-level', 'thin mid-level'] + # field for (optional) netCDF output of individual regions and regimes + r_crems = np.empty((nummod, len(regions), len(allregimes))) + r_crems[:] = 999.9 + + # provenance information + climofiles = [] + + # create list of dataset names (plot labels) + models = [] + + i = 0 + missing_vars = [] + + for dataset in grouped_input_data: + models.append(dataset) + pointers = {} + + for var in ww_vars: + selection = select_metadata(input_data, dataset=dataset, + short_name=var) + alt_var = None + if not selection: + # try alternative variable name (if defined) + if var in ww_vars_alternative: + alt_var = ww_vars_alternative[var] + selection = select_metadata(input_data, dataset=dataset, + short_name=alt_var) + if not selection: + missing_vars.append(var) + else: + key_nc = var + '_nc' + key_var = var + pointers[key_nc] = selection[0]['filename'] + if alt_var is None: + pointers[key_var] = var + else: + pointers[key_var] = alt_var + + # snow variable: use 'snc' if available or alternatively use 'snw' + + missing_snow = True + + for var in ww_vars_plus: + selection = select_metadata(input_data, dataset=dataset, + short_name=var) + key_nc = var + '_nc' + key_var = var + if not selection: + logger.info("%s: no data for variable snc found, trying " + "variable snw instead", dataset) + pointers[key_nc] = "" + pointers[key_var] = "" + else: + pointers[key_nc] = selection[0]["filename"] + pointers[key_var] = var + missing_snow = False + break + + if missing_snow: + missing_vars.append(ww_vars_plus[0] + " or " + ww_vars_plus[1]) + + for key in pointers: + if key[-3:] == '_nc': + climofiles.append(pointers[key]) + + # check if all variables are available + + if missing_vars: + printlist = ', '.join(missing_vars) + logger.error("error: the following variables are not " + "available: %s", printlist) + raise Exception('Variables missing (see log file for details).') + + # calculate CREM + + (crem_pd, r_crem_pd) = crem_calc(pointers) + + crems[i] = crem_pd + + # sort results into output array + + j = 0 + for region in regions: + regime = regions[region] + k = 0 + for reg in regime: + idx = allregimes.index(reg) + r_crems[i, j, idx] = r_crem_pd[j, k] + k = k + 1 + j = j + 1 + + i = i + 1 + + logger.info("==================================") + logger.info("*** Cloud Regime Error Metrics ***") + logger.info("==================================") + logger.info(crems) + logger.info("==================================") + + # define diagnostic internal provenance data + + provenance_record = { + 'caption': 'Cloud Regime Error Metric (CREM) following Williams ' + + 'and Webb (2009, Clim. Dyn.).', + 'statistics': ['other'], + 'domains': ['global'], + 'plot_type': 'bar', + 'authors': [ + 'will_ke', + 'laue_ax', + ], + 'references': [ + 'acknow_project', + ], + 'ancestors': climofiles, + } + + # plot results + + if cfg['write_plots']: + plotname = os.path.join( + cfg['plot_dir'], + 'ww09_metric_multimodel.' + cfg['output_file_type'], + ) + logger.debug("Plotting results to %s", plotname) + + plt.figure() + ypos = np.arange(nummod) + plt.barh(ypos, crems, align='center') + plt.yticks(ypos, models) + plt.xlabel('Cloud Regime Error Metric') + + # draw observational uncertainties (dashed red line) + plt.plot([0.96, 0.96], [-0.5, nummod - 0.5], 'r--') + + plt.savefig(plotname, bbox_inches='tight') + + provenance_record['plot_file'] = plotname + + # save results to netcdf + + oname = os.path.join(cfg['work_dir'], 'ww09_metric_multimodel.nc') + logger.debug("Saving results to %s", oname) + # convert strings + modstr_out = np.array(models, dtype=object) + regionstr_out = np.array(list(regions.keys()), dtype=object) + regimestr_out = np.array(allregimes, dtype=object) + # open a new netCDF file for writing + ncfile = Dataset(oname, 'w') + # create dimensions + ncfile.createDimension('model', nummod) + ncfile.createDimension('region', len(regions)) + ncfile.createDimension('regime', len(allregimes)) + # create variables + data = ncfile.createVariable('crem', np.dtype('float32').char, ('model')) + r_data = ncfile.createVariable('r_crem', np.dtype('float32').char, + ('model', 'region', 'regime'), + fill_value=999.9) + mod = ncfile.createVariable('model', np.dtype('int32').char, ('model')) + reg = ncfile.createVariable('region', np.dtype('int32').char, ('region')) + rgm = ncfile.createVariable('regime', np.dtype('int32').char, ('regime')) + mod_name = ncfile.createVariable('model_name', str, ('model')) + reg_name = ncfile.createVariable('region_name', str, ('region')) + rgm_name = ncfile.createVariable('regime_name', str, ('regime')) + # write data to variable + data[:] = crems + r_data[:, :, :] = r_crems + mod[:] = range(nummod) + reg[:] = range(len(regions)) + rgm[:] = range(len(allregimes)) + mod_name[:] = modstr_out + reg_name[:] = regionstr_out + rgm_name[:] = regimestr_out + # close the file + ncfile.close() + + # add provenance data to netcdf and plot + + logger.info("Recording provenance of %s:\n%s", oname, + pformat(provenance_record)) + + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(oname, provenance_record) + + +def read_and_check(srcfilename, varname, lons2, lats2, time2): + """ + Function for reading and checking for correct regridding of input data. + + Parameters + ---------- + srcfilename : str + filename containing input data + varname : str + variable name in netcdf + lons2 : float + longitudes of target grid (ISCCP) + lats2 : float + latitudes of target grid (ISCCP) + time2: integer + number of time steps + """ + nlon = len(lons2) + nlat = len(lats2) + + src_dataset = Dataset(srcfilename, 'r') + + n_time = len(src_dataset.variables['time'][:]) + logger.debug('Number of data times in file %s is %i', srcfilename, n_time) + + # check number of time steps is matching + + if n_time != time2: + logger.error("error: number of time steps in input files are " + "not equal") + raise Exception('Variables contain different number of time steps ' + '(see log file for details).') + + grid_mismatch = False + coord_mismatch = False + + # check longitudes + + lons = src_dataset.variables['lon'][:] + if nlon != len(lons): + grid_mismatch = True + if np.amax(np.absolute(lons - lons2)) > 1.0e-3: + coord_mismatch = True + + # check latitudes + + lats = src_dataset.variables['lat'][:] + if nlat != len(lats): + grid_mismatch = True + if np.amax(np.absolute(lats - lats2)) > 1.0e-3: + coord_mismatch = True + + if grid_mismatch: + logger.error("error: input data are not on 2.5x2.5 deg ISCCP grid." + "lons = %i (required: %i), lats = %i (required: %i)", + len(lons), nlon, len(lats), nlat) + + if coord_mismatch: + logger.error("error: input data are not on 2.5x2.5 deg ISCCP grid, " + "longitudes and/or latitudes differ from ISCCP grid by " + "more than 1.0e-3") + + if (grid_mismatch or coord_mismatch): + raise Exception('Input variables are not on 2.5x2.5 deg ISCCP grid ' + '(see log file for details).') + + # read data + src_data = src_dataset.variables[varname] + + # create mask (missing values) + try: + data = np.ma.masked_equal(src_data, getattr(src_data, "_FillValue")) + rgmasked = np.ma.masked_invalid(data) + except AttributeError: + rgmasked = np.ma.masked_invalid(src_data) + np.ma.set_fill_value(rgmasked, 0.0) + + return np.ma.filled(rgmasked) + + +def crem_calc(pointers): + """ + Main program for calculating Cloud Regime Error Metric. + + Following equation 4 in Williams and Webb (2009) (WW09). + + Parameters + ---------- + pointers : dict + Keys in dictionary are: albisccp_nc, pctisccp_nc, cltisccp_nc, + rsut_nc, rsutcs_nc, rlut_nc, rlutcs_nc, snc_nc, sic_nc + + For CMIP5, snc is in the CMIP5 table 'day'. All other variables + are in the CMIP5 table 'cfday'. A minimum of 2 years, and ideally 5 + years, of data are required. The observational regime characteristics + were calculated for the period Mar 1985 - Feb 1990. + + If snc is not available then snw can be used instead. In this case + pointers[snc_nc] should be set to None and snw_nc set. + + Returns + ------- + crem_pd : float + present-day cloud regime error metric of WW09. + r_crem_pd : float + component from each regime. + """ + # Lookup arrays + # Observational regime centroids for assignment of the model data. + # These are taken from Table 3 of Williams and Webb (2009) + # (999.9 represents missing data). The observational regime + # characteristics were calculated for the period Mar 1985 - Feb 1990. + + obs_alb = np.array([[0.261, 0.339, 0.211, 0.338, 0.313, 0.532, 0.446], + [0.286, 0.457, 0.375, 0.325, 0.438, 0.581, 0.220], + [0.433, 0.510, 0.576, 0.505, 0.343, 0.247, 999.9]]) + + obs_pct = np.array([[0.652, 0.483, 0.356, 0.784, 0.327, 0.285, 0.722], + [0.643, 0.607, 0.799, 0.430, 0.723, 0.393, 0.389], + [0.582, 0.740, 0.620, 0.458, 0.595, 0.452, 999.9]]) + + obs_clt = np.array([[0.314, 0.813, 0.740, 0.640, 0.944, 0.979, 0.824], + [0.473, 0.932, 0.802, 0.914, 0.900, 0.978, 0.713], + [0.356, 0.747, 0.778, 0.884, 0.841, 0.744, 999.9]]) + + # Observed regime RFO's taken from Table 3 of WW09 + obs_rfo = np.array([[0.375, 0.195, 0.119, 0.103, 0.091, 0.064, 0.052], + [0.354, 0.170, 0.114, 0.104, 0.091, 0.083, 0.083], + [0.423, 0.191, 0.139, 0.111, 0.094, 0.042, 999.9]]) + + # Observed regime net cloud forcing (Figure 2f of WW09) + obs_ncf = np.array([[-10.14, -25.45, -5.80, -27.40, -16.83, -48.45, + -55.84], + [-13.67, -58.28, -36.26, -25.34, -64.27, -56.91, + -11.63], + [-3.35, -16.66, -13.76, -8.63, -12.17, 1.45, 999.9]]) + + # aw in eq 3 of WW09 + area_weights = np.array([0.342, 0.502, 0.156]) + # weighting for swcf to account for lack of ISCCP diagnostics + # during polar night (p153 of WW09) + solar_weights = np.array([1.000, 0.998, 0.846]) + + # number of regimes in each region (Table 3 of WW09) + nregimes = {'tropics': 7, 'extra-tropics': 7, 'snow-ice': 6} + + # ----------------------------------------------------------- + + # Section to re-grid onto 2.5 degr lat long grid. + # Note this has been tested with regular lat-long grids - other grid + # types may need changes to the regrid subroutine. + + # target grid spec + npts = 144 + nrows = 72 + z_x = -1.25 + d_x = 2.5 + z_y = -91.25 + d_y = 2.5 + + lons2 = np.array([z_x + d_x * (i + 1.0) for i in range(npts)]) + lats2 = np.array([z_y + d_y * (j + 1.0) for j in range(nrows)]) + + # Read input data + # --------------- + # pointers['xxx_nc'] = file name of input file + # pointers['xxx'] = actual variable name in input file + + logger.debug('Reading albisccp') + ntime2 = len(Dataset(pointers['albisccp_nc'], 'r').variables['time'][:]) + albisccp_data = read_and_check(pointers['albisccp_nc'], + pointers['albisccp'], lons2, lats2, ntime2) + logger.debug('Reading pctisccp') + pctisccp_data = read_and_check(pointers['pctisccp_nc'], + pointers['pctisccp'], lons2, lats2, ntime2) + logger.debug('Reading cltisccp') + cltisccp_data = read_and_check(pointers['cltisccp_nc'], + pointers['cltisccp'], lons2, lats2, ntime2) + logger.debug('Reading rsut') + rsut_data = read_and_check(pointers['rsut_nc'], + pointers['rsut'], lons2, lats2, ntime2) + logger.debug('Reading rsutcs') + rsutcs_data = read_and_check(pointers['rsutcs_nc'], + pointers['rsutcs'], lons2, lats2, ntime2) + logger.debug('Reading rlut') + rlut_data = read_and_check(pointers['rlut_nc'], + pointers['rlut'], lons2, lats2, ntime2) + logger.debug('Reading rlutcs') + rlutcs_data = read_and_check(pointers['rlutcs_nc'], + pointers['rlutcs'], lons2, lats2, ntime2) + logger.debug('Reading sic') + sic_data = read_and_check(pointers['sic_nc'], + pointers['sic'], lons2, lats2, ntime2) + if not pointers['snc_nc']: + logger.debug('Reading snw') + snc_data = read_and_check(pointers['snw_nc'], + pointers['snw'], lons2, lats2, ntime2) + else: + logger.debug('Reading snc') + snc_data = read_and_check(pointers['snc_nc'], + pointers['snc'], lons2, lats2, ntime2) + + # ----------------------------------------------------------- + + # Set up storage arrays + numreg = len(nregimes) # = 3 + numrgm = nregimes[max(nregimes)] # = 7 + + model_rfo = np.zeros((numreg, numrgm)) + model_ncf = np.zeros((numreg, numrgm)) + r_crem_pd = np.zeros((numreg, numrgm)) + model_rfo[:] = 999.9 + model_ncf[:] = 999.9 + r_crem_pd[:] = 999.9 + + # Normalize data used for assignment to regimes to be in the range 0-1 + pctisccp_data = pctisccp_data / 100000.0 + cltisccp_data = cltisccp_data / 100.0 + + # Calculate cloud forcing + swcf_data = rsutcs_data - rsut_data + lwcf_data = rlutcs_data - rlut_data + + # loop over 3 regions + # (0 = tropics, 1 = ice-free extra-tropics, 2 = snow/ice covered) + for idx_region, (region, regime) in enumerate(nregimes.items()): + + # Set up validity mask for region + + mask = pctisccp_data.copy() + if region == 'tropics': + mask[:, (lats2 < -20) | (lats2 > 20), :] = np.NAN + elif region == 'extra-tropics': + mask[:, (lats2 >= -20) & (lats2 <= 20), :] = np.NAN + mask[(snc_data >= 0.1) | (sic_data >= 0.1)] = np.NAN + elif region == 'snow-ice': + mask[:, (lats2 >= -20) & (lats2 <= 20), :] = np.NAN + mask[(snc_data < 0.1) & (sic_data < 0.1)] = np.NAN + + mask[cltisccp_data == 0.0] = np.NAN + + points = np.isfinite(mask) + npoints = len(mask[points]) # Number of valid data points in region + + group = np.zeros(npoints) + e_d = np.zeros((npoints, regime)) + + swcf_data_pts = swcf_data[points] + lwcf_data_pts = lwcf_data[points] + + # Assign model data to observed regimes + + for i in range(regime): + e_d[:, i] = \ + ((albisccp_data[points] - obs_alb[idx_region, i]) ** 2) + \ + ((pctisccp_data[points] - obs_pct[idx_region, i]) ** 2) + \ + ((cltisccp_data[points] - obs_clt[idx_region, i]) ** 2) + + group[:] = np.argmin(e_d, axis=1) + + for i in range(regime): + mem = (group == i) + + count = len(group[mem]) + + if count > 0: + + model_rfo[idx_region, i] = float(count) / float(npoints) + model_ncf[idx_region, i] = np.average(swcf_data_pts[mem]) \ + * solar_weights[idx_region] + \ + np.average(lwcf_data_pts[mem]) + else: + logger.info("Model does not reproduce all observed cloud " + "regimes.") + logger.info("Cannot calculate CREM. Abort.") + sys.exit() + model_rfo[idx_region, i] = 0.0 + model_ncf[idx_region, i] = 0.0 + + # Calculation of eq 3 in WW09 + for idx_region, (region, regime) in enumerate(nregimes.items()): + r_crem_pd[idx_region, 0:regime] = area_weights[idx_region] * \ + (((model_ncf[idx_region, 0:regime] - + obs_ncf[idx_region, 0:regime]) * + obs_rfo[idx_region, 0:regime]) ** 2 + + ((model_rfo[idx_region, 0:regime] - + obs_rfo[idx_region, 0:regime]) * + obs_ncf[idx_region, 0:regime]) ** 2) ** 0.5 + + # Calculation of eq 4 in WW09 + crem_pd = ((np.sum(r_crem_pd[0, :] ** 2) + np.sum(r_crem_pd[1, :] ** 2) + + np.sum(r_crem_pd[2, 0:5] ** 2)) / 20.0) ** 0.5 + + # A perfect crem_pd with respect to ISCCP would be 0.0 + # An estimate of observational uncertainty (obtained by calculating + # crem_pd wrt MODIS/ERBE) is 0.96 (i.e. models with crem_pd less than + # 0.96 may be regarded as within observational uncertainty overall, + # although not necessarily for every regime)'. + # Interrogation of the r_crem_pd array from this program will indicate + # which regimes contribute most to the total crem_pd (elements ordered + # as Table 3 of WW09)' + + return crem_pd, r_crem_pd + + +if __name__ == '__main__': + + with run_diagnostic() as config: + main(config) diff --git a/tests/integration/cmor/_fixes/OBS/__init__.py b/esmvaltool/diag_scripts/cvdp/__init__.py similarity index 100% rename from tests/integration/cmor/_fixes/OBS/__init__.py rename to esmvaltool/diag_scripts/cvdp/__init__.py diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/CVDP_readme.pdf b/esmvaltool/diag_scripts/cvdp/cvdp/CVDP_readme.pdf new file mode 100644 index 0000000000..ba0e6c74b5 Binary files /dev/null and b/esmvaltool/diag_scripts/cvdp/cvdp/CVDP_readme.pdf differ diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/LICENSE b/esmvaltool/diag_scripts/cvdp/cvdp/LICENSE new file mode 100644 index 0000000000..261eeb9e9f --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/driver.ncl b/esmvaltool/diag_scripts/cvdp/cvdp/driver.ncl new file mode 100644 index 0000000000..58a632832f --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/driver.ncl @@ -0,0 +1,214 @@ +; +; CVDP driver script. To run the CVDP at the command line type: ncl driver.ncl +; To run the CVDP at the command line, put it in background mode, and write the terminal output +; to a file named file.out, type: ncl driver.ncl >&! file.out & +; +;============================================================================================ + outdir = "/project/CVDP/" ; location of output files (must end in a "/") + ; It is recommended that a new or empty directory be pointed to here + ; as existing files in outdir can get removed. + + namelists_only = "False" ; Set to True to only create the variable namelists. Useful + ; upon running the package for the first time to verify that the correct + ; files are being selected by the package. (See files in namelist_byvar/ directory) + ; Set to False to run the entire package. + + obs = "True" ; True = analyze and plot observations (specified in namelist_obs), False = do not + scale_timeseries = "False" ; True = scale timeseries so that x-axis length is comparable across timeseries, False = do not + output_data = "True" ; True = output selected calculated data to a netCDF file. Make sure .nc files from previous CVDP + ; runs are not in outdir or they will get added to or modified. + compute_modes_mon = "True" ; True = compute DJF, MAM, JJA, SON, Annual and Monthly Atmospheric Modes of Variability + ; False = do not compute the Monthly Atmospheric Modes of Variability (saves computation time) +;- - - - - - - - - - - - - - - - - - + opt_climo = "Full" ; Full = remove climatology based on full record of each simulation, + ; Custom = set climatological period using climo_syear (climatological start year) and climo_eyear (climatological end year) + + if (opt_climo.eq."Custom") then ; When climo_syear and climo_eyear are positive, remove the climatology/annual cycle based on these years. + climo_syear = -30 ; Both settings should be within the range of years of all specified model runs and observational datasets. + climo_eyear = 0 ; When climo_syear is negative, remove the climatology/annual cycle relative to the end of each model run + end if ; or observational dataset. Example: climo_syear = -25, climo_eyear = 0 will result in the climatology + ; being removed from the last 26 years of each model run and observations. +;- - - - - - - - - - - - - - - - - - + colormap = 0 ; 0 = default colormaps, 1 = colormaps better for color blindness + + output_type = "png" ; png = create png files, ps = create postscript files as well as png files (for web viewing). + + png_scale = 1.5 ; Set the output .png size. Value between .1->5. Any value > 1 (< 1) increases (decreases) png size. + ; When output_type = "png" a value of 1 will result in a png sized 1500 (H) x 1500 (W) before automatic cropping of white space + ; When output_type = "ps" a value of 1 will result in a png density setting of 144 before automatic cropping of white space + webpage_title = "Title goes here" ; Set webpage title + + tar_output = "False" ; True = tar up all output in outdir and remove individual files, False = do not + ; Note: ALL files in outdir will be tarred up and then removed from the outdir directory. + +;---Advanced Options---------------------------------------------------------------------- + zp = "ncl_scripts/" ; directory path of CVDP NCL scripts. (must end in a "/") + ; Examples: "ncl_scripts/" if all code is local, or on CGD or CISL systems: "~asphilli/CESM-diagnostics/CVDP/Release/v4.1.0/ncl_scripts/" + ; Regardless of this setting the following files should be in one directory: namelist, driver.ncl, and namelist_obs. + ; If pointing to code in ~asphilli make sure the driver script version #s match between this script and the script in ~asphilli. + + ncl_exec = "ncl" ; This can be changed to a different path if a different version of NCL needs to be used, such as "/different/path/to/bin/ncl" + + run_style = "parallel" ; parallel = allow simple python-based parallelization to occur. X number of CVDP NCL scripts will be called at once. + ; X is set via max_num_tasks. Terminal output will be harder to follow. + ; serial = call CVDP NCL scripts serially. (Default) + + max_num_tasks = 4 ; if run_parallel = "parallel", how many CVDP NCL scripts can be called at once? (If unsure set to 3) + + modular = "False" ; True = Run only those CVDP scripts specified in modular_list. + ; False = Run all CVDP scripts (Default) + + modular_list = "pdo,aice.trends_timeseries,sst.indices" ; When modular = "True" list the CVDP scripts that will be run. + ; Example: modular_list = "amoc,amo,pr.trends_timeseries" + ; For a list of available scripts see complete_list at line 72. + + machine_casesen = "True" ; True = Your filesystem is case sensitive (Default) + ; False = Your filesystem is case insensitive +;========END USER MODIFICATIONS=========================================================== + version = "5.0.0" + + print("Starting: Climate Variability Diagnostics Package ("+systemfunc("date")+")") + + complete_list = "psl.nam_nao,psl.pna_npo,tas.trends_timeseries,snd.trends,psl.trends,amo,pdo,sst.indices,pr.trends_timeseries,"+\ + "psl.sam_psa,sst.mean_stddev,psl.mean_stddev,pr.mean_stddev,sst.trends_timeseries,amoc,tas.mean_stddev,"+\ + "snd.mean_stddev,aice.mean_stddev,aice.trends_timeseries,ipo" + + loadscript(zp+"functions.ncl") + outfiles = (/"ts","trefht","psl","prect","snowdp","moc","maxnum","aice_nh","aice_sh"/) + do gg = 0,dimsizes(outfiles)-1 + if (isfilepresent2("obs_"+outfiles(gg))) then + system("rm obs_"+outfiles(gg)) + end if + end do + + if (isfilepresent2(outdir+"metrics_orig.txt")) then ; remove metrics_orig.txt file if present + system("rm "+outdir+"metrics_orig.txt") + end if + + if (opt_climo.eq."Custom") then + if (climo_syear.ge.climo_eyear) then + print("Specified custom climatology start year (climo_syear) cannot be greater than or equal to the specified end year (climo_eyear), exiting CVDP.") + exit + end if + else + climo_syear = -999 + climo_eyear = -999 + end if + + if (.not.isfilepresent2(outdir)) then + system("mkdir "+outdir) + end if + envvar_str = " export OUTDIR="+outdir+"; export OBS="+obs+"; export SCALE_TIMESERIES="+scale_timeseries+"; "+\ + "export OUTPUT_DATA="+output_data+"; export VERSION="+version+"; export PNG_SCALE="+png_scale+"; "+\ + "export OPT_CLIMO="+opt_climo+"; export CLIMO_SYEAR="+climo_syear+"; export CLIMO_EYEAR="+climo_eyear+"; "+\ + "export COMPUTE_MODES_MON="+compute_modes_mon+"; export OUTPUT_TYPE="+output_type+"; export MACHINE="+machine_casesen+"; "+\ + "export COLORMAP="+colormap+"; export CVDP_SCRIPTS="+zp+"; export MAX_TASKS="+max_num_tasks+";" + ncl_exec = ncl_exec+" -n -Q" + + system(envvar_str + " "+str_sub_str(ncl_exec," -Q","")+" "+zp+"namelist.ncl") ; create variable namelists + if (namelists_only.eq."True") then + print("Variable namelists have been created. Examine files in namelist_byvar/ directory to verify CVDP file selection.") + print("Finished: Climate Variability Diagnostics Package ("+systemfunc("date")+")") + do gg = 0,dimsizes(outfiles)-1 + if (isfilepresent2("obs_"+outfiles(gg))) then + system("rm obs_"+outfiles(gg)) + end if + end do + exit + end if + + if (modular.eq."True") then + modular_list = str_sub_str(modular_list," ","") ; remove spaces if present + modular_list = str_sub_str(modular_list,",",".ncl,") ; add .ncl in to end of each script name + modular_list = modular_list+".ncl" ; add .ncl in to last script name + if (run_style.eq."parallel") then + modular_list = str_sub_str(modular_list,","," "+zp) + system(envvar_str+" python "+zp+"runTasks.py "+zp+modular_list) + else + modular_list = str_sub_str(modular_list,","," "+ncl_exec+" "+zp) + modular_list = str_sub_str(modular_list,".ncl",".ncl;") + system(envvar_str+" "+ncl_exec+" "+zp+modular_list) + end if + else + complete_list = str_sub_str(complete_list,",",".ncl,") ; add .ncl in to end of each script name + complete_list = complete_list+".ncl" ; add .ncl in to last script name + if (run_style.eq."parallel") then + complete_list = str_sub_str(complete_list,","," "+zp) + system(envvar_str+" python "+zp+"runTasks.py "+zp+complete_list) + else + complete_list = str_sub_str(complete_list,","," "+ncl_exec+" "+zp) + complete_list = str_sub_str(complete_list,".ncl",".ncl;") + system(envvar_str+" "+ncl_exec+" "+zp+complete_list) + end if + end if + + if (output_data.eq."True") then + system(envvar_str + " "+ncl_exec+" "+zp+"ncfiles.append.ncl") + end if + + if (output_type.eq."png") then + ofiles = systemfunc("ls "+outdir+"*.png") + do gg = 0,dimsizes(ofiles)-1 + system("convert -trim +repage -border 8 -bordercolor white "+ofiles(gg)+" "+ofiles(gg)) + end do + else + ofilesS = systemfunc("ls "+outdir+"*."+output_type) + ofilesT = systemfunc("ls -l "+outdir+"*."+output_type) + do gg = 0,dimsizes(ofilesT)-1 ; check for empty .ps files, remove + filesize = tofloat(str_get_field(ofilesT(gg),5," ")) + if (filesize.lt.10000) then + print("Removing: "+ofilesT(gg)) + system("rm "+ofilesS(gg)) + end if + end do + delete([/filesize,ofilesS,ofilesT/]) + + ofiles = systemfunc("ls "+outdir+"*."+output_type) + ofiles_png = str_sub_str(ofiles,"."+output_type,".png") + d_opt = 144*png_scale + print("Converting "+output_type+" files to .png") + do gg = 0,dimsizes(ofiles)-1 + system("convert -density "+d_opt+" -trim +repage -border 8 -bordercolor white -background white -flatten "+ofiles(gg)+" "+ofiles_png(gg)) + end do + print("Done with "+output_type+"->png conversion") + end if + + system("cp "+zp+"cas-cvdp.png "+outdir) + system("cp namelist_byvar/* "+outdir) + system("cp namelist "+outdir) + if (obs.eq."True") then + system("cp namelist_obs "+outdir) + end if + + met_files = systemfunc("ls "+outdir+"metrics.*.txt 2> /dev/null") + if (dimsizes(met_files).eq.9) then ; all 9 metrics text files are present, create metrics table(s) + system(" export OUTDIR="+outdir+"; "+ncl_exec+" "+zp+"metrics.ncl") + end if + + quote = str_get_dq() + system(" export OUTDIR="+outdir+"; export VERSION="+version+"; export OUTPUT_DATA="+output_data+"; "+\ + "export OPT_CLIMO="+opt_climo+"; export CLIMO_SYEAR="+climo_syear+"; export CLIMO_EYEAR="+climo_eyear+"; "+\ + "export OBS="+obs+"; export CVDP_SCRIPTS="+zp+"; "+ncl_exec+" 'webtitle="+quote+webpage_title+quote+"' "+zp+"webpage.ncl") + delete(quote) +;------------------------------- + if (tar_output.eq."True") then + if (isfilepresent2(outdir+"cvdp.tar")) then + system("rm "+outdir+"cvdp.tar") + end if +; print("cd "+outdir+"; tar -cf cvdp.tar *") + system("cd "+outdir+"; tar -cf cvdp.tar *") + system("cd "+outdir+"; rm *.png *.ps *.txt *.html *.nc namelist*") + end if +;------------------------------- +; Cleanup + do gg = 0,dimsizes(outfiles)-1 + if (isfilepresent2("obs_"+outfiles(gg))) then + system("rm obs_"+outfiles(gg)) + end if + end do + + delete([/ofiles,outfiles,outdir,obs,scale_timeseries,output_data,opt_climo,climo_syear,climo_eyear,\ + png_scale,webpage_title,compute_modes_mon,met_files/]) + + print("Finished: Climate Variability Diagnostics Package ("+systemfunc("date")+")") + diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/example_namelists/multiple_obs/namelist b/esmvaltool/diag_scripts/cvdp/cvdp/example_namelists/multiple_obs/namelist new file mode 100644 index 0000000000..a48929a810 --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/example_namelists/multiple_obs/namelist @@ -0,0 +1,2 @@ +Obs 2 | /project/cas/DSets/Hadley/ | 1979 | 2008 + diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/example_namelists/multiple_obs/namelist_obs b/esmvaltool/diag_scripts/cvdp/cvdp/example_namelists/multiple_obs/namelist_obs new file mode 100644 index 0000000000..9d7e743684 --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/example_namelists/multiple_obs/namelist_obs @@ -0,0 +1,5 @@ +TS | ERSSTv3b | /project/cas/DSets/ersstv3b.185401-201102.nc | 1979 | 2008 +PSL | 20thC_ReanV2 | /project/cas/20thC_ReanV2/prmsl.mon.mean.187101-201112.nc | 1979 | 2008 +TREFHT | MLOST | /project/cas/DSets/mlost.v3.5.2.188001-201212.nc | 1979 | 2008 +PRECT | GPCC | /project/cas/DSets/GPCC/full_data_v6_precip_10.190101-201012.nc | 1979 | 2008 +SNOWDP diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/example_namelists/multiple_obs/obs2_directory_contents b/esmvaltool/diag_scripts/cvdp/cvdp/example_namelists/multiple_obs/obs2_directory_contents new file mode 100644 index 0000000000..41bb1206e1 --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/example_namelists/multiple_obs/obs2_directory_contents @@ -0,0 +1,12 @@ +Soft links are used to point to the original file names. Note that variable names pr, slp, ts, and tas are inserted +into the soft link names to help the CVDP better identify the files. + +-------------------------------------------------------------------------------- +-rw-r--r-- 1 root cgdcas 447907648 Feb 7 14:42 hadisst.187001-201312.nc +-rw-r--r-- 1 root cgdcas 20724308 Feb 7 14:43 hadslp2r.185001-201112.nc +-rw-r--r-- 1 root cgdcas 17176260 Feb 7 14:43 gpcp.mon.mean.197901-201306.nc +-rw------- 1 root cgdcas 20422396 Feb 7 14:44 HadCRUT.4.2.0.0.temps.185001-201312.nc +lrwxrwxrwx 1 root cgdcas 30 Feb 7 14:45 gpcp.mon.mean.pr.197901-201306.nc -> gpcp.mon.mean.197901-201306.nc +lrwxrwxrwx 1 root cgdcas 25 Feb 7 14:46 hadslp2r.slp.185001-201112.nc -> hadslp2r.185001-201112.nc +lrwxrwxrwx 1 root cgdcas 24 Feb 7 14:51 hadisst.ts.187001-201312.nc -> hadisst.187001-201312.nc +lrwxrwxrwx 1 root cgdcas 38 Feb 7 14:56 HadCRUT.4.2.0.0.tas.185001-201312.nc -> HadCRUT.4.2.0.0.temps.185001-201312.nc diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/example_namelists/namelist.CESM1-LENS b/esmvaltool/diag_scripts/cvdp/cvdp/example_namelists/namelist.CESM1-LENS new file mode 100644 index 0000000000..fdcec5e019 --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/example_namelists/namelist.CESM1-LENS @@ -0,0 +1,40 @@ +CESM1-CAM5-BGC-LE #1 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.001.*.nc | 1920 | 2012 +CESM1-CAM5-BGC-LE #2 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.002.*.nc | 1920 | 2012 +CESM1-CAM5-BGC-LE #3 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.003.*.nc | 1920 | 2012 +CESM1-CAM5-BGC-LE #4 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.004.*.nc | 1920 | 2012 +CESM1-CAM5-BGC-LE #5 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.005.*.nc | 1920 | 2012 +CESM1-CAM5-BGC-LE #6 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.006.*.nc | 1920 | 2012 +CESM1-CAM5-BGC-LE #7 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.007.*.nc | 1920 | 2012 +CESM1-CAM5-BGC-LE #8 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.008.*.nc | 1920 | 2012 +CESM1-CAM5-BGC-LE #9 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.009.*.nc | 1920 | 2012 +CESM1-CAM5-BGC-LE #10 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.010.*.nc | 1920 | 2012 +CESM1-CAM5-BGC-LE #11 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.011.*.nc | 1920 | 2012 +CESM1-CAM5-BGC-LE #12 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.012.*.nc | 1920 | 2012 +CESM1-CAM5-BGC-LE #13 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.013.*.nc | 1920 | 2012 +CESM1-CAM5-BGC-LE #14 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.014.*.nc | 1920 | 2012 +CESM1-CAM5-BGC-LE #15 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.015.*.nc | 1920 | 2012 +CESM1-CAM5-BGC-LE #16 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.016.*.nc | 1920 | 2012 +CESM1-CAM5-BGC-LE #17 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.017.*.nc | 1920 | 2012 +CESM1-CAM5-BGC-LE #18 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.018.*.nc | 1920 | 2012 +CESM1-CAM5-BGC-LE #19 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.019.*.nc | 1920 | 2012 +CESM1-CAM5-BGC-LE #20 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.020.*.nc | 1920 | 2012 +CESM1-CAM5-BGC-LE #21 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.021.*.nc | 1920 | 2012 +CESM1-CAM5-BGC-LE #22 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.022.*.nc | 1920 | 2012 +CESM1-CAM5-BGC-LE #23 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.023.*.nc | 1920 | 2012 +CESM1-CAM5-BGC-LE #24 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.024.*.nc | 1920 | 2012 +CESM1-CAM5-BGC-LE #25 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.025.*.nc | 1920 | 2012 +CESM1-CAM5-BGC-LE #26 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.026.*.nc | 1920 | 2012 +CESM1-CAM5-BGC-LE #27 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.027.*.nc | 1920 | 2012 +CESM1-CAM5-BGC-LE #28 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.028.*.nc | 1920 | 2012 +CESM1-CAM5-BGC-LE #29 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.029.*.nc | 1920 | 2012 +CESM1-CAM5-BGC-LE #30 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.030.*.nc | 1920 | 2012 +CESM1-CAM5-BGC-LE #31 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.031.*.nc | 1920 | 2012 +CESM1-CAM5-BGC-LE #32 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.032.*.nc | 1920 | 2012 +CESM1-CAM5-BGC-LE #33 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.033.*.nc | 1920 | 2012 +CESM1-CAM5-BGC-LE #34 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.034.*.nc | 1920 | 2012 +CESM1-CAM5-BGC-LE #35 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.035.*.nc | 1920 | 2012 +CESM1-CAM5-BGC-LE #101 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.101.*.nc | 1920 | 2012 +CESM1-CAM5-BGC-LE #102 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.102.*.nc | 1920 | 2012 +CESM1-CAM5-BGC-LE #103 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.103.*.nc | 1920 | 2012 +CESM1-CAM5-BGC-LE #104 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.104.*.nc | 1920 | 2012 +CESM1-CAM5-BGC-LE #105 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.105.*.nc | 1920 | 2012 diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/example_namelists/namelist.CESMcomparison b/esmvaltool/diag_scripts/cvdp/cvdp/example_namelists/namelist.CESMcomparison new file mode 100644 index 0000000000..f9a6432097 --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/example_namelists/namelist.CESMcomparison @@ -0,0 +1,6 @@ +CESM1 (CAM5.2) LE Control | /project/yampa01/asphilli/b.e11.B1850C5CN.f09_g16.005/ | 700 | 1099 +CESM1 (CAM5.1) Control | /project/yampa01/asphilli/b40_1850_1d_b08c5cn_138j/ | 120 | 319 +CCSM4 Control | /project/yampa01/asphilli/b40.1850.track1.1deg.006/ | 800 | 1199 +CCSM3 Control | /project/cas/asphilli/CCSM3/b30.020.ES01/ | 300 | 799 +CCSM2 Control | /project/cas/asphilli/CCSM2/b20.007/ | 571 | 720 +CSM1 Control | /project/cas/asphilli/CSM1/b003/ | 0 | 299 diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/example_namelists/namelist.cmip3_20c3m b/esmvaltool/diag_scripts/cvdp/cvdp/example_namelists/namelist.cmip3_20c3m new file mode 100644 index 0000000000..56716c2b7a --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/example_namelists/namelist.cmip3_20c3m @@ -0,0 +1,2 @@ +CCSM4 Control | /project/yampa01/asphilli/b40.1850.track1.1deg.006/ | 800 | 1199 +CCSM3 Control | /project/cas/asphilli/CCSM3/b30.020.ES01/ | 300 | 799 diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/example_namelists/namelist.cmip3_historical b/esmvaltool/diag_scripts/cvdp/cvdp/example_namelists/namelist.cmip3_historical new file mode 100644 index 0000000000..6a9c3aa2a4 --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/example_namelists/namelist.cmip3_historical @@ -0,0 +1,78 @@ +ncar_pcm1_0 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/ncar_pcm1/run1/ | 1900 | 1999 +ncar_pcm1_1 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/ncar_pcm1/run2/ | 1900 | 1999 +ncar_pcm1_2 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/ncar_pcm1/run3/ | 1900 | 1999 +ncar_pcm1_3 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/ncar_pcm1/run4/ | 1900 | 1999 +bccr_bcm2_0_0 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/bccr_bcm2_0/run1/ | 1900 | 1999 +cccma_cgcm3_1_0 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/cccma_cgcm3_1/run1/ | 1900 | 1999 +cccma_cgcm3_1_1 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/cccma_cgcm3_1/run2/ | 1900 | 1999 +cccma_cgcm3_1_2 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/cccma_cgcm3_1/run3/ | 1900 | 1999 +cccma_cgcm3_1_3 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/cccma_cgcm3_1/run4/ | 1900 | 1999 +cccma_cgcm3_1_4 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/cccma_cgcm3_1/run5/ | 1900 | 1999 +cccma_cgcm3_1_t63_0 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/cccma_cgcm3_1_t63/run1/ | 1900 | 1999 +cnrm_cm3_0 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/cnrm_cm3/run1/ | 1900 | 1999 +csiro_mk3_0_0 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/csiro_mk3_0/run1/ | 1900 | 1999 +csiro_mk3_0_1 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/csiro_mk3_0/run2/ | 1900 | 1999 +csiro_mk3_0_2 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/csiro_mk3_0/run3/ | 1900 | 1999 +csiro_mk3_5_0 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/csiro_mk3_5/run1/ | 1900 | 1999 +csiro_mk3_5_1 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/csiro_mk3_5/run2/ | 1900 | 1999 +csiro_mk3_5_2 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/csiro_mk3_5/run3/ | 1900 | 1999 +gfdl_cm2_0_0 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/gfdl_cm2_0/run1/ | 1900 | 1999 +gfdl_cm2_0_1 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/gfdl_cm2_0/run2/ | 1900 | 1999 +gfdl_cm2_0_2 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/gfdl_cm2_0/run3/ | 1900 | 1999 +gfdl_cm2_1_0 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/gfdl_cm2_1/run1/ | 1900 | 1999 +gfdl_cm2_1_1 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/gfdl_cm2_1/run2/ | 1900 | 1999 +gfdl_cm2_1_2 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/gfdl_cm2_1/run3/ | 1900 | 1999 +giss_aom_0 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/giss_aom/run1/ | 1900 | 1999 +giss_aom_1 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/giss_aom/run2/ | 1900 | 1999 +giss_model_e_h_0 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/giss_model_e_h/run1/ | 1900 | 1999 +giss_model_e_h_1 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/giss_model_e_h/run2/ | 1900 | 1999 +giss_model_e_h_2 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/giss_model_e_h/run3/ | 1900 | 1999 +giss_model_e_h_3 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/giss_model_e_h/run4/ | 1900 | 1999 +giss_model_e_h_4 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/giss_model_e_h/run5/ | 1900 | 1999 +giss_model_e_r_0 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/giss_model_e_r/run1/ | 1900 | 1999 +giss_model_e_r_1 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/giss_model_e_r/run2/ | 1900 | 1999 +giss_model_e_r_2 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/giss_model_e_r/run3/ | 1900 | 1999 +giss_model_e_r_3 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/giss_model_e_r/run4/ | 1900 | 1999 +giss_model_e_r_4 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/giss_model_e_r/run5/ | 1900 | 1999 +giss_model_e_r_5 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/giss_model_e_r/run6/ | 1900 | 1999 +giss_model_e_r_6 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/giss_model_e_r/run7/ | 1900 | 1999 +giss_model_e_r_7 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/giss_model_e_r/run8/ | 1900 | 1999 +giss_model_e_r_8 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/giss_model_e_r/run9/ | 1900 | 1999 +iap_fgoals1_0_g_0 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/iap_fgoals1_0_g/run1/ | 1900 | 1999 +iap_fgoals1_0_g_1 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/iap_fgoals1_0_g/run2/ | 1900 | 1999 +iap_fgoals1_0_g_2 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/iap_fgoals1_0_g/run3/ | 1900 | 1999 +ingv_echam4_0 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/ingv_echam4/run1/ | 1900 | 1999 +mpi_echam5_0 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/mpi_echam5/run1/ | 1900 | 1999 +mpi_echam5_1 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/mpi_echam5/run2/ | 1900 | 1999 +mpi_echam5_2 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/mpi_echam5/run3/ | 1900 | 1999 +mpi_echam5_3 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/mpi_echam5/run4/ | 1900 | 1999 +inmcm3_0_0 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/inmcm3_0/run1/ | 1900 | 1999 +ipsl_cm4_0 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/ipsl_cm4/run1/ | 1900 | 1999 +ipsl_cm4_1 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/ipsl_cm4/run2/ | 1961 | 1999 +miroc3_2_hires_0 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/miroc3_2_hires/run1/ | 1900 | 1999 +miroc3_2_medres_0 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/miroc3_2_medres/run1/ | 1900 | 1999 +miroc3_2_medres_1 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/miroc3_2_medres/run2/ | 1900 | 1999 +miroc3_2_medres_2 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/miroc3_2_medres/run3/ | 1900 | 1999 +miub_echo_g_0 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/miub_echo_g/run1/ | 1900 | 1999 +miub_echo_g_1 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/miub_echo_g/run2/ | 1900 | 1999 +miub_echo_g_2 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/miub_echo_g/run3/ | 1900 | 1999 +miub_echo_g_3 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/miub_echo_g/run4/ | 1900 | 1999 +miub_echo_g_4 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/miub_echo_g/run5/ | 1900 | 1999 +mri_cgcm2_3_2a_0 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/mri_cgcm2_3_2a/run1/ | 1900 | 1999 +mri_cgcm2_3_2a_1 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/mri_cgcm2_3_2a/run2/ | 1900 | 1999 +mri_cgcm2_3_2a_2 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/mri_cgcm2_3_2a/run3/ | 1900 | 1999 +mri_cgcm2_3_2a_3 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/mri_cgcm2_3_2a/run4/ | 1900 | 1999 +mri_cgcm2_3_2a_4 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/mri_cgcm2_3_2a/run5/ | 1900 | 1999 +ncar_ccsm3_0_0 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/ncar_ccsm3_0/run1/ | 1900 | 1999 +ncar_ccsm3_0_1 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/ncar_ccsm3_0/run2/ | 1900 | 1999 +ncar_ccsm3_0_2 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/ncar_ccsm3_0/run3/ | 1900 | 1999 +ncar_ccsm3_0_3 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/ncar_ccsm3_0/run4/ | 1900 | 1999 +ncar_ccsm3_0_4 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/ncar_ccsm3_0/run5/ | 1900 | 1999 +ncar_ccsm3_0_5 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/ncar_ccsm3_0/run6/ | 1900 | 1999 +ncar_ccsm3_0_6 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/ncar_ccsm3_0/run7/ | 1900 | 1999 +ncar_ccsm3_0_7 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/ncar_ccsm3_0/run9/ | 1900 | 1999 +ukmo_hadcm3_0 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/ukmo_hadcm3/run1/ | 1900 | 1999 +ukmo_hadcm3_1 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/ukmo_hadcm3/run2/ | 1900 | 1944 +ukmo_hadgem1_0 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/ukmo_hadgem1/run1/ | 1900 | 1999 +ukmo_hadgem1_1 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/ukmo_hadgem1/run2/ | 1903 | 1999 + diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/example_namelists/namelist.cmip5_historical b/esmvaltool/diag_scripts/cvdp/cvdp/example_namelists/namelist.cmip5_historical new file mode 100644 index 0000000000..a58bac9f6d --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/example_namelists/namelist.cmip5_historical @@ -0,0 +1,41 @@ +ACCESS1-0 | /project/cmip5/ETH/cmip5/historical/{Amon/*,Omon/*,LImon/*,OImon/sic}/ACCESS1-0/r1i1p1/ | 1900 | 2005 +ACCESS1-3 | /project/cmip5/ETH/cmip5/historical/{Amon/*,Omon/*,LImon/*,OImon/sic}/ACCESS1-3/r1i1p1/ | 1900 | 2005 +bcc-csm1-1-m | /project/cmip5/ETH/cmip5/historical/{Amon/*,Omon/*,LImon/*,OImon/sic}/bcc-csm1-1-m/r1i1p1/ | 1900 | 2005 +bcc-csm1-1 | /project/cmip5/ETH/cmip5/historical/{Amon/*,Omon/*,LImon/*,OImon/sic}/bcc-csm1-1/r1i1p1/ | 1900 | 2005 +BNU-ESM | /project/cmip5/ETH/cmip5/historical/{Amon/*,Omon/*,LImon/*,OImon/sic}/BNU-ESM/r1i1p1/ | 1900 | 2005 +CanESM2 | /project/cmip5/ETH/cmip5/historical/{Amon/*,Omon/*,LImon/*,OImon/sic}/CanESM2/r1i1p1/ | 1900 | 2005 +CCSM4 | /project/cmip5/ETH/cmip5/historical/{Amon/*,Omon/*,LImon/*,OImon/sic}/CCSM4/r1i1p1/ | 1900 | 2005 +CESM1-BGC | /project/cmip5/ETH/cmip5/historical/{Amon/*,Omon/*,LImon/*,OImon/sic}/CESM1-BGC/r1i1p1/ | 1900 | 2005 +CESM1-CAM5-1-FV2 | /project/cmip5/ETH/cmip5/historical/{Amon/*,Omon/*,LImon/*,OImon/sic}/CESM1-CAM5-1-FV2/r1i1p1/ | 1900 | 2005 +CESM1-CAM5 | /project/cmip5/ETH/cmip5/historical/{Amon/*,Omon/*,LImon/*,OImon/sic}/CESM1-CAM5/r1i1p1/ | 1900 | 2005 +CESM1-FASTCHEM | /project/cmip5/ETH/cmip5/historical/{Amon/*,Omon/*,LImon/*,OImon/sic}/CESM1-FASTCHEM/r1i1p1/ | 1900 | 2005 +CESM1-WACCM | /project/cmip5/ETH/cmip5/historical/{Amon/*,Omon/*,LImon/*,OImon/sic}/CESM1-WACCM/r1i1p1/ | 1900 | 2005 +CMCC-CM | /project/cmip5/ETH/cmip5/historical/{Amon/*,Omon/*,LImon/*,OImon/sic}/CMCC-CM/r1i1p1/ | 1900 | 2005 +CMCC-CMS | /project/cmip5/ETH/cmip5/historical/{Amon/*,Omon/*,LImon/*,OImon/sic}/CMCC-CMS/r1i1p1/ | 1900 | 2005 +CNRM-CM5 | /project/cmip5/ETH/cmip5/historical/{Amon/*,Omon/*,LImon/*,OImon/sic}/CNRM-CM5/r1i1p1/ | 1900 | 2005 +CSIRO-Mk3-6-0 | /project/cmip5/ETH/cmip5/historical/{Amon/*,Omon/*,LImon/*,OImon/sic}/CSIRO-Mk3-6-0/r1i1p1/ | 1900 | 2005 +FGOALS-g2 | /project/cmip5/ETH/cmip5/historical/{Amon/*,Omon/*,LImon/*,OImon/sic}/FGOALS-g2/r1i1p1/ | 1900 | 2005 +FIO-ESM | /project/cmip5/ETH/cmip5/historical/{Amon/*,Omon/*,LImon/*,OImon/sic}/FIO-ESM/r1i1p1/ | 1900 | 2005 +GFDL-CM2p1 | /project/cmip5/ETH/cmip5/historical/{Amon/*,Omon/*,LImon/*,OImon/sic}/GFDL-CM2p1/r1i1p1/ | 1900 | 2005 +GFDL-ESM2G | /project/cmip5/ETH/cmip5/historical/{Amon/*,Omon/*,LImon/*,OImon/sic}/GFDL-ESM2G/r1i1p1/ | 1900 | 2005 +GFDL-ESM2M | /project/cmip5/ETH/cmip5/historical/{Amon/*,Omon/*,LImon/*,OImon/sic}/GFDL-ESM2M/r1i1p1/ | 1900 | 2005 +GISS-E2-H | /project/cmip5/ETH/cmip5/historical/{Amon/*,Omon/*,LImon/*,OImon/sic}/GISS-E2-H/r1i1p1/ | 1900 | 2005 +GISS-E2-H-CC | /project/cmip5/ETH/cmip5/historical/{Amon/*,Omon/*,LImon/*,OImon/sic}/GISS-E2-H-CC/r1i1p1/ | 1900 | 2005 +GISS-E2-R | /project/cmip5/ETH/cmip5/historical/{Amon/*,Omon/*,LImon/*,OImon/sic}/GISS-E2-R/r1i1p1/ | 1900 | 2005 +GISS-E2-R-CC | /project/cmip5/ETH/cmip5/historical/{Amon/*,Omon/*,LImon/*,OImon/sic}/GISS-E2-R-CC/r1i1p1/ | 1900 | 2005 +HadCM3 | /project/cmip5/ETH/cmip5/historical/{Amon/*,Omon/*,LImon/*,OImon/sic}/HadCM3/r1i1p1/ | 1900 | 2005 +HadGEM2-AO | /project/cmip5/ETH/cmip5/historical/{Amon/*,Omon/*,LImon/*,OImon/sic}/HadGEM2-AO/r1i1p1/ | 1900 | 2005 +HadGEM2-CC | /project/cmip5/ETH/cmip5/historical/{Amon/*,Omon/*,LImon/*,OImon/sic}/HadGEM2-CC/r1i1p1/ | 1900 | 2004 +HadGEM2-ES | /project/cmip5/ETH/cmip5/historical/{Amon/*,Omon/*,LImon/*,OImon/sic}/HadGEM2-ES/r1i1p1/ | 1900 | 2004 +inmcm4 | /project/cmip5/ETH/cmip5/historical/{Amon/*,Omon/*,LImon/*,OImon/sic}/inmcm4/r1i1p1/ | 1900 | 2005 +IPSL-CM5A-LR | /project/cmip5/ETH/cmip5/historical/{Amon/*,Omon/*,LImon/*,OImon/sic}/IPSL-CM5A-LR/r1i1p1/ | 1900 | 2005 +IPSL-CM5A-MR | /project/cmip5/ETH/cmip5/historical/{Amon/*,Omon/*,LImon/*,OImon/sic}/IPSL-CM5A-MR/r1i1p1/ | 1900 | 2005 +IPSL-CM5B-LR | /project/cmip5/ETH/cmip5/historical/{Amon/*,Omon/*,LImon/*,OImon/sic}/IPSL-CM5B-LR/r1i1p1/ | 1900 | 2005 +MIROC5 | /project/cmip5/ETH/cmip5/historical/{Amon/*,Omon/*,LImon/*,OImon/sic}/MIROC5/r1i1p1/ | 1900 | 2005 +MIROC-ESM | /project/cmip5/ETH/cmip5/historical/{Amon/*,Omon/*,LImon/*,OImon/sic}/MIROC-ESM/r1i1p1/ | 1900 | 2005 +MIROC-ESM-CHEM | /project/cmip5/ETH/cmip5/historical/{Amon/*,Omon/*,LImon/*,OImon/sic}/MIROC-ESM-CHEM/r1i1p1/ | 1900 | 2005 +MPI-ESM-LR | /project/cmip5/ETH/cmip5/historical/{Amon/*,Omon/*,LImon/*,OImon/sic}/MPI-ESM-LR/r1i1p1/ | 1900 | 2005 +MRI-CGCM3 | /project/cmip5/ETH/cmip5/historical/{Amon/*,Omon/*,LImon/*,OImon/sic}/MRI-CGCM3/r1i1p1/ | 1900 | 2005 +MRI-ESM1 | /project/cmip5/ETH/cmip5/historical/{Amon/*,Omon/*,LImon/*,OImon/sic}/MRI-ESM1/r1i1p1/ | 1900 | 2005 +NorESM1-M | /project/cmip5/ETH/cmip5/historical/{Amon/*,Omon/*,LImon/*,OImon/sic}/NorESM1-M/r1i1p1/ | 1900 | 2005 +NorESM1-ME | /project/cmip5/ETH/cmip5/historical/{Amon/*,Omon/*,LImon/*,OImon/sic}/NorESM1-ME/r1i1p1/ | 1900 | 2005 diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/example_namelists/namelist.cmip5_historical_1900-2005 b/esmvaltool/diag_scripts/cvdp/cvdp/example_namelists/namelist.cmip5_historical_1900-2005 new file mode 100644 index 0000000000..139ce9e58a --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/example_namelists/namelist.cmip5_historical_1900-2005 @@ -0,0 +1,41 @@ +ACCESS1-0 | /project/cmip5/ETH/cmip5/historical/Amon/*/ACCESS1-0/r1i1p1/ | 1900 | 2005 +ACCESS1-3 | /project/cmip5/ETH/cmip5/historical/Amon/*/ACCESS1-3/r1i1p1/ | 1900 | 2005 +bcc-csm1-1-m | /project/cmip5/ETH/cmip5/historical/Amon/*/bcc-csm1-1-m/r1i1p1/ | 1900 | 2005 +bcc-csm1-1 | /project/cmip5/ETH/cmip5/historical/Amon/*/bcc-csm1-1/r1i1p1/ | 1900 | 2005 +BNU-ESM | /project/cmip5/ETH/cmip5/historical/Amon/*/BNU-ESM/r1i1p1/ | 1900 | 2005 +CanESM2 | /project/cmip5/ETH/cmip5/historical/Amon/*/CanESM2/r1i1p1/ | 1900 | 2005 +CCSM4 | /project/cmip5/ETH/cmip5/historical/Amon/*/CCSM4/r1i1p1/ | 1900 | 2005 +CESM1-BGC | /project/cmip5/ETH/cmip5/historical/Amon/*/CESM1-BGC/r1i1p1/ | 1900 | 2005 +CESM1-CAM5-1-FV2 | /project/cmip5/ETH/cmip5/historical/Amon/*/CESM1-CAM5-1-FV2/r1i1p1/ | 1900 | 2005 +CESM1-CAM5 | /project/cmip5/ETH/cmip5/historical/Amon/*/CESM1-CAM5/r1i1p1/ | 1900 | 2005 +CESM1-FASTCHEM | /project/cmip5/ETH/cmip5/historical/Amon/*/CESM1-FASTCHEM/r1i1p1/ | 1900 | 2005 +CESM1-WACCM | /project/cmip5/ETH/cmip5/historical/Amon/*/CESM1-WACCM/r1i1p1/ | 1900 | 2005 +CMCC-CM | /project/cmip5/ETH/cmip5/historical/Amon/*/CMCC-CM/r1i1p1/ | 1900 | 2005 +CMCC-CMS | /project/cmip5/ETH/cmip5/historical/Amon/*/CMCC-CMS/r1i1p1/ | 1900 | 2005 +CNRM-CM5 | /project/cmip5/ETH/cmip5/historical/Amon/*/CNRM-CM5/r1i1p1/ | 1900 | 2005 +CSIRO-Mk3-6-0 | /project/cmip5/ETH/cmip5/historical/Amon/*/CSIRO-Mk3-6-0/r1i1p1/ | 1900 | 2005 +FGOALS-g2 | /project/cmip5/ETH/cmip5/historical/Amon/*/FGOALS-g2/r1i1p1/ | 1900 | 2005 +FIO-ESM | /project/cmip5/ETH/cmip5/historical/Amon/*/FIO-ESM/r1i1p1/ | 1900 | 2005 +GFDL-CM2p1 | /project/cmip5/ETH/cmip5/historical/Amon/*/GFDL-CM2p1/r1i1p1/ | 1900 | 2005 +GFDL-ESM2G | /project/cmip5/ETH/cmip5/historical/Amon/*/GFDL-ESM2G/r1i1p1/ | 1900 | 2005 +GFDL-ESM2M | /project/cmip5/ETH/cmip5/historical/Amon/*/GFDL-ESM2M/r1i1p1/ | 1900 | 2005 +GISS-E2-H | /project/cmip5/ETH/cmip5/historical/Amon/*/GISS-E2-H/r1i1p1/ | 1900 | 2005 +GISS-E2-H-CC | /project/cmip5/ETH/cmip5/historical/Amon/*/GISS-E2-H-CC/r1i1p1/ | 1900 | 2005 +GISS-E2-R | /project/cmip5/ETH/cmip5/historical/Amon/*/GISS-E2-R/r1i1p1/ | 1900 | 2005 +GISS-E2-R-CC | /project/cmip5/ETH/cmip5/historical/Amon/*/GISS-E2-R-CC/r1i1p1/ | 1900 | 2005 +HadCM3 | /project/cmip5/ETH/cmip5/historical/Amon/*/HadCM3/r1i1p1/ | 1900 | 2005 +HadGEM2-AO | /project/cmip5/ETH/cmip5/historical/Amon/*/HadGEM2-AO/r1i1p1/ | 1900 | 2005 +HadGEM2-CC | /project/cmip5/ETH/cmip5/historical/Amon/*/HadGEM2-CC/r1i1p1/ | 1900 | 2004 +HadGEM2-ES | /project/cmip5/ETH/cmip5/historical/Amon/*/HadGEM2-ES/r1i1p1/ | 1900 | 2004 +inmcm4 | /project/cmip5/ETH/cmip5/historical/Amon/*/inmcm4/r1i1p1/ | 1900 | 2005 +IPSL-CM5A-LR | /project/cmip5/ETH/cmip5/historical/Amon/*/IPSL-CM5A-LR/r1i1p1/ | 1900 | 2005 +IPSL-CM5A-MR | /project/cmip5/ETH/cmip5/historical/Amon/*/IPSL-CM5A-MR/r1i1p1/ | 1900 | 2005 +IPSL-CM5B-LR | /project/cmip5/ETH/cmip5/historical/Amon/*/IPSL-CM5B-LR/r1i1p1/ | 1900 | 2005 +MIROC5 | /project/cmip5/ETH/cmip5/historical/Amon/*/MIROC5/r1i1p1/ | 1900 | 2005 +MIROC-ESM | /project/cmip5/ETH/cmip5/historical/Amon/*/MIROC-ESM/r1i1p1/ | 1900 | 2005 +MIROC-ESM-CHEM | /project/cmip5/ETH/cmip5/historical/Amon/*/MIROC-ESM-CHEM/r1i1p1/ | 1900 | 2005 +MPI-ESM-LR | /project/cmip5/ETH/cmip5/historical/Amon/*/MPI-ESM-LR/r1i1p1/ | 1900 | 2005 +MRI-CGCM3 | /project/cmip5/ETH/cmip5/historical/Amon/*/MRI-CGCM3/r1i1p1/ | 1900 | 2005 +MRI-ESM1 | /project/cmip5/ETH/cmip5/historical/Amon/*/MRI-ESM1/r1i1p1/ | 1900 | 2005 +NorESM1-M | /project/cmip5/ETH/cmip5/historical/Amon/*/NorESM1-M/r1i1p1/ | 1900 | 2005 +NorESM1-ME | /project/cmip5/ETH/cmip5/historical/Amon/*/NorESM1-ME/r1i1p1/ | 1900 | 2005 diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/example_namelists/namelist.lgens b/esmvaltool/diag_scripts/cvdp/cvdp/example_namelists/namelist.lgens new file mode 100644 index 0000000000..8fd9c29e91 --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/example_namelists/namelist.lgens @@ -0,0 +1,30 @@ +CESM1-CAM5-BGC-LE 001 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.001* | 1979 | 2012 +CESM1-CAM5-BGC-LE 002 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.002* | 1979 | 2012 +CESM1-CAM5-BGC-LE 003 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.003* | 1979 | 2012 +CESM1-CAM5-BGC-LE 004 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.004* | 1979 | 2012 +CESM1-CAM5-BGC-LE 005 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.005* | 1979 | 2012 +CESM1-CAM5-BGC-LE 006 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.006* | 1979 | 2012 +CESM1-CAM5-BGC-LE 007 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.007* | 1979 | 2012 +CESM1-CAM5-BGC-LE 008 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.008* | 1979 | 2012 +CESM1-CAM5-BGC-LE 009 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.009* | 1979 | 2012 +CESM1-CAM5-BGC-LE 010 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.010* | 1979 | 2012 +CESM1-CAM5-BGC-LE 011 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.011* | 1979 | 2012 +CESM1-CAM5-BGC-LE 012 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.012* | 1979 | 2012 +CESM1-CAM5-BGC-LE 013 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.013* | 1979 | 2012 +CESM1-CAM5-BGC-LE 014 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.014* | 1979 | 2012 +CESM1-CAM5-BGC-LE 015 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.015* | 1979 | 2012 +CESM1-CAM5-BGC-LE 016 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.016* | 1979 | 2012 +CESM1-CAM5-BGC-LE 017 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.017* | 1979 | 2012 +CESM1-CAM5-BGC-LE 018 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.018* | 1979 | 2012 +CESM1-CAM5-BGC-LE 019 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.019* | 1979 | 2012 +CESM1-CAM5-BGC-LE 020 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.020* | 1979 | 2012 +CESM1-CAM5-BGC-LE 021 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.021* | 1979 | 2012 +CESM1-CAM5-BGC-LE 022 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.022* | 1979 | 2012 +CESM1-CAM5-BGC-LE 023 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.023* | 1979 | 2012 +CESM1-CAM5-BGC-LE 024 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.024* | 1979 | 2012 +CESM1-CAM5-BGC-LE 025 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.025* | 1979 | 2012 +CESM1-CAM5-BGC-LE 026 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.026* | 1979 | 2012 +CESM1-CAM5-BGC-LE 027 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.027* | 1979 | 2012 +CESM1-CAM5-BGC-LE 028 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.028* | 1979 | 2012 +CESM1-CAM5-BGC-LE 029 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.029* | 1979 | 2012 +CESM1-CAM5-BGC-LE 030 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.030* | 1979 | 2012 diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/example_namelists/namelist.lgens.add2ndobs b/esmvaltool/diag_scripts/cvdp/cvdp/example_namelists/namelist.lgens.add2ndobs new file mode 100644 index 0000000000..d0dda73a4f --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/example_namelists/namelist.lgens.add2ndobs @@ -0,0 +1,31 @@ +Observations #2 | /project/cas/asphilli/CVDP/Obs/ | 1979 | 2011 +CESM1-CAM5-BGC-LE 001 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.001* | 1979 | 2012 +CESM1-CAM5-BGC-LE 002 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.002* | 1979 | 2012 +CESM1-CAM5-BGC-LE 003 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.003* | 1979 | 2012 +CESM1-CAM5-BGC-LE 004 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.004* | 1979 | 2012 +CESM1-CAM5-BGC-LE 005 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.005* | 1979 | 2012 +CESM1-CAM5-BGC-LE 006 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.006* | 1979 | 2012 +CESM1-CAM5-BGC-LE 007 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.007* | 1979 | 2012 +CESM1-CAM5-BGC-LE 008 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.008* | 1979 | 2012 +CESM1-CAM5-BGC-LE 009 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.009* | 1979 | 2012 +CESM1-CAM5-BGC-LE 010 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.010* | 1979 | 2012 +CESM1-CAM5-BGC-LE 011 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.011* | 1979 | 2012 +CESM1-CAM5-BGC-LE 012 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.012* | 1979 | 2012 +CESM1-CAM5-BGC-LE 013 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.013* | 1979 | 2012 +CESM1-CAM5-BGC-LE 014 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.014* | 1979 | 2012 +CESM1-CAM5-BGC-LE 015 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.015* | 1979 | 2012 +CESM1-CAM5-BGC-LE 016 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.016* | 1979 | 2012 +CESM1-CAM5-BGC-LE 017 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.017* | 1979 | 2012 +CESM1-CAM5-BGC-LE 018 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.018* | 1979 | 2012 +CESM1-CAM5-BGC-LE 019 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.019* | 1979 | 2012 +CESM1-CAM5-BGC-LE 020 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.020* | 1979 | 2012 +CESM1-CAM5-BGC-LE 021 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.021* | 1979 | 2012 +CESM1-CAM5-BGC-LE 022 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.022* | 1979 | 2012 +CESM1-CAM5-BGC-LE 023 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.023* | 1979 | 2012 +CESM1-CAM5-BGC-LE 024 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.024* | 1979 | 2012 +CESM1-CAM5-BGC-LE 025 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.025* | 1979 | 2012 +CESM1-CAM5-BGC-LE 026 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.026* | 1979 | 2012 +CESM1-CAM5-BGC-LE 027 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.027* | 1979 | 2012 +CESM1-CAM5-BGC-LE 028 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.028* | 1979 | 2012 +CESM1-CAM5-BGC-LE 029 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.029* | 1979 | 2012 +CESM1-CAM5-BGC-LE 030 | /project/yampa01/asphilli/CESM1.1_lgens/b.e11.B*.f09_g16.030* | 1979 | 2012 diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/example_namelists/namelist.multobs b/esmvaltool/diag_scripts/cvdp/cvdp/example_namelists/namelist.multobs new file mode 100644 index 0000000000..a691ab8700 --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/example_namelists/namelist.multobs @@ -0,0 +1,3 @@ +OBS2 | /project/cas/asphilli/CVDP/Obs_Data/ | 1900 | 2013 +CCSM3 | /home/fasullo/CMIP3_links/20c3m/{atm,land}/mo/*/ncar_ccsm3_0/run1/ | 1900 | 1999 +CESM1-BGC | /project/cmip5/ETH/cmip5/{historical,rcp85}/Amon/*/CESM1-BGC/r1i1p1/ | 1900 | 2005 diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/example_namelists/namelist.obs1 b/esmvaltool/diag_scripts/cvdp/cvdp/example_namelists/namelist.obs1 new file mode 100644 index 0000000000..c6a2e1aefa --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/example_namelists/namelist.obs1 @@ -0,0 +1,5 @@ +TS | HadISST | /project/cas/asphilli/DSets/hadisst.187001-201312.nc | 1920 | 2011 +PSL | 20thC_ReanV2 | /project/cas/asphilli/20thC_ReanV2/prmsl.mon.mean.187101-201112.nc | 1920 | 2011 +TREFHT | MLOST | /project/cas/asphilli/DSets/mlost.v3.5.3.188001-201312.nc | 1920 | 2011 +PRECT | GPCC | /project/cas/asphilli/DSets/GPCC/full_data_v6_precip_10.190101-201012.nc | 1920 | 2010 +SNOWDP diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/example_namelists/namelist_obs_4sets b/esmvaltool/diag_scripts/cvdp/cvdp/example_namelists/namelist_obs_4sets new file mode 100644 index 0000000000..ab39dbc3d4 --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/example_namelists/namelist_obs_4sets @@ -0,0 +1,15 @@ +PSL | 20thC_ReanV2 | /project/20thC_ReanV2/prmsl.mon.mean.187101-201212.nc | 1920 | 2012 +PSL | HadSLP2r | /project/DSets/hadslp2r.185001-201312.nc | 1920 | 2012 +PSL | ERA20C | /project/ERA20C/msl.mon.mean.190001-201012.nc | 1920 | 2010 +PSL | 20thC_ReanV2 | /project/20thC_ReanV2/prmsl.mon.mean.187101-201212.nc | 1979 | 2012 +TS | ERSST v4 | /project/DSets/ersstv4.185401-201512.nc | 1920 | 2012 +TS | HadISST | /project/DSets/hadisst.187001-201312.nc | 1920 | 2012 +TS | ERSST v3b | /project/DSets/ersstv3b.185401-201401.nc | 1920 | 2012 +TS | HadSST 3.1 | /project/DSets/HadSST.3.1.0.0.median.185001-201309.nc | 1920 | 2012 +TREFHT | GISTEMP | /project/DSets/gistemp.tas.188001-201512.nc | 1920 | 2012 +TREFHT | MLOST | /project/DSets/mlost.v3.5.3.188001-201312.nc | 1920 | 2012 +TREFHT | HadCRUT 4.2 | /project/DSets/HadCRUT.4.2.0.0.temps.185001-201312.nc | 1920 | 2012 +TREFHT | GISTEMP | /project/DSets/gistemp.tas.188001-201512.nc | 1979 | 2015 +PRECT | GPCC | /project/DSets/GPCC/full_data_v6_precip_10.190101-201012.nc | 1920 | 2010 +PRECT | GPCP | /project/DSets/gpcp.mon.mean.197901-201411.nc | 1979 | 2012 +PRECT | GPCP | /project/DSets/gpcp.mon.mean.197901-201411.nc | 1979 | 2013 diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/namelist b/esmvaltool/diag_scripts/cvdp/cvdp/namelist new file mode 100644 index 0000000000..6ecac7ae4f --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/namelist @@ -0,0 +1,40 @@ +CESM1-CAM5-BGC-LE #1 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.001.*.nc | 1920 | 2017 +CESM1-CAM5-BGC-LE #2 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.002.*.nc | 1920 | 2017 +CESM1-CAM5-BGC-LE #3 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.003.*.nc | 1920 | 2017 +CESM1-CAM5-BGC-LE #4 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.004.*.nc | 1920 | 2017 +CESM1-CAM5-BGC-LE #5 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.005.*.nc | 1920 | 2017 +CESM1-CAM5-BGC-LE #6 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.006.*.nc | 1920 | 2017 +CESM1-CAM5-BGC-LE #7 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.007.*.nc | 1920 | 2017 +CESM1-CAM5-BGC-LE #8 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.008.*.nc | 1920 | 2017 +CESM1-CAM5-BGC-LE #9 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.009.*.nc | 1920 | 2017 +CESM1-CAM5-BGC-LE #10 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.010.*.nc | 1920 | 2017 +CESM1-CAM5-BGC-LE #11 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.011.*.nc | 1920 | 2017 +CESM1-CAM5-BGC-LE #12 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.012.*.nc | 1920 | 2017 +CESM1-CAM5-BGC-LE #13 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.013.*.nc | 1920 | 2017 +CESM1-CAM5-BGC-LE #14 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.014.*.nc | 1920 | 2017 +CESM1-CAM5-BGC-LE #15 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.015.*.nc | 1920 | 2017 +CESM1-CAM5-BGC-LE #16 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.016.*.nc | 1920 | 2017 +CESM1-CAM5-BGC-LE #17 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.017.*.nc | 1920 | 2017 +CESM1-CAM5-BGC-LE #18 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.018.*.nc | 1920 | 2017 +CESM1-CAM5-BGC-LE #19 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.019.*.nc | 1920 | 2017 +CESM1-CAM5-BGC-LE #20 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.020.*.nc | 1920 | 2017 +CESM1-CAM5-BGC-LE #21 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.021.*.nc | 1920 | 2017 +CESM1-CAM5-BGC-LE #22 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.022.*.nc | 1920 | 2017 +CESM1-CAM5-BGC-LE #23 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.023.*.nc | 1920 | 2017 +CESM1-CAM5-BGC-LE #24 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.024.*.nc | 1920 | 2017 +CESM1-CAM5-BGC-LE #25 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.025.*.nc | 1920 | 2017 +CESM1-CAM5-BGC-LE #26 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.026.*.nc | 1920 | 2017 +CESM1-CAM5-BGC-LE #27 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.027.*.nc | 1920 | 2017 +CESM1-CAM5-BGC-LE #28 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.028.*.nc | 1920 | 2017 +CESM1-CAM5-BGC-LE #29 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.029.*.nc | 1920 | 2017 +CESM1-CAM5-BGC-LE #30 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.030.*.nc | 1920 | 2017 +CESM1-CAM5-BGC-LE #31 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.031.*.nc | 1920 | 2017 +CESM1-CAM5-BGC-LE #32 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.032.*.nc | 1920 | 2017 +CESM1-CAM5-BGC-LE #33 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.033.*.nc | 1920 | 2017 +CESM1-CAM5-BGC-LE #34 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.034.*.nc | 1920 | 2017 +CESM1-CAM5-BGC-LE #35 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.035.*.nc | 1920 | 2017 +CESM1-CAM5-BGC-LE #101 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.101.*.nc | 1920 | 2017 +CESM1-CAM5-BGC-LE #102 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.102.*.nc | 1920 | 2017 +CESM1-CAM5-BGC-LE #103 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.103.*.nc | 1920 | 2017 +CESM1-CAM5-BGC-LE #104 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.104.*.nc | 1920 | 2017 +CESM1-CAM5-BGC-LE #105 | /glade/p/cesmLE/CESM-CAM5-BGC-LE/{atm,ice,lnd,ocn}/proc/tseries/monthly/*/b.e11.B*C5CNBDRD.f09_g16.105.*.nc | 1920 | 2017 diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/namelist_obs b/esmvaltool/diag_scripts/cvdp/cvdp/namelist_obs new file mode 100644 index 0000000000..5eb1744321 --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/namelist_obs @@ -0,0 +1,32 @@ +TS | ERSST v5 | /glade/p/cgd/cas/asphilli/CVDP-OBS/ersstv5.185401-201812.nc | 1920 | 2017 +PSL | ERA20C_ERAI | /glade/p/cgd/cas/asphilli/CVDP-OBS/era20c_erai.mon.mean.msl.190001-201802.nc | 1920 | 2017 +TREFHT | BEST | /glade/p/cgd/cas/asphilli/CVDP-OBS/best.tas.185001-201801.nc | 1920 | 2017 +PRECT | GPCC | /glade/p/cgd/cas/asphilli/CVDP-OBS/gpcc.precip.10.comb_v7v5mon.190101-201802.nc | 1920 | 2017 +aice_nh | Walsh and Chapman | /glade/p/cgd/cas/asphilli/CVDP-OBS/walsh_chapman.NH.seaice.187001-201112.nc | 1953 | 2011 +aice_sh | NASA Bootstrap SH | /glade/p/cgd/cas/asphilli/CVDP-OBS/seaice_conc_monthly_sh_NASA_Bootstrap.nsidc.v03r01.197811-201702.nc | 1979 | 2016 +MOC | CESM1 Forced Ocean Simulation | /glade/p/cgd/cas/asphilli/CVDP-OBS/g.e11_LENS.GECOIAF.T62_g16.009.pop.h.MOC.194801-201512.nc | 1948 | 2015 + +TS | HadISST | /glade/p/cgd/cas/asphilli/CVDP-OBS/hadisst.187001-201812.nc | 1920 | 2017 +PSL | CERA20C_ERAI | /glade/p/cgd/cas/asphilli/CVDP-OBS/cera20c_erai.mon.mean.msl.190101-201802.nc | 1920 | 2017 +TREFHT | GISTEMP | /glade/p/cgd/cas/asphilli/CVDP-OBS/gistemp.tas.188001-201812.nc | 1920 | 2017 +PRECT | GPCC | /glade/p/cgd/cas/asphilli/CVDP-OBS/gpcc.precip.10.comb_v7v5mon.190101-201802.nc | 1920 | 2017 +aice_nh | Walsh and Chapman | /glade/p/cgd/cas/asphilli/CVDP-OBS/walsh_chapman.NH.seaice.187001-201112.nc | 1953 | 2011 +aice_sh | NASA Bootstrap SH | /glade/p/cgd/cas/asphilli/CVDP-OBS/seaice_conc_monthly_sh_NASA_Bootstrap.nsidc.v03r01.197811-201702.nc | 1979 | 2016 +MOC | CESM1 Forced Ocean Simulation | /glade/p/cgd/cas/asphilli/CVDP-OBS/g.e11_LENS.GECOIAF.T62_g16.009.pop.h.MOC.194801-201512.nc | 1948 | 2015 + +TS | ERSST v5 | /glade/p/cgd/cas/asphilli/CVDP-OBS/ersstv5.185401-201812.nc | 1979 | 2017 +PSL | ERAI | /glade/p/cgd/cas/asphilli/CVDP-OBS/erai.mon.mean.msl.197901-201802.nc | 1979 | 2017 +TREFHT | BEST | /glade/p/cgd/cas/asphilli/CVDP-OBS/best.tas.185001-201801.nc | 1979 | 2017 +PRECT | GPCP | /glade/p/cgd/cas/asphilli/CVDP-OBS/gpcp.mon.mean.197901-201801.nc | 1979 | 2017 +aice_nh | NASA CDR NH | /glade/p/cgd/cas/asphilli/CVDP-OBS/seaice_conc_monthly_nh_NOAA_NSIDC_CDR.v03r01.197811-201702.nc | 1979 | 2016 +aice_sh | NASA CDR SH | /glade/p/cgd/cas/asphilli/CVDP-OBS/seaice_conc_monthly_sh_NOAA_NSIDC_CDR.v03r01.197811-201702.nc | 1979 | 2016 +MOC | CESM1 Forced Ocean Simulation | /glade/p/cgd/cas/asphilli/CVDP-OBS/g.e11_LENS.GECOIAF.T62_g16.009.pop.h.MOC.194801-201512.nc | 1979 | 2015 + +TS | HadISST | /glade/p/cgd/cas/asphilli/CVDP-OBS/hadisst.187001-201812.nc | 1980 | 2017 +PSL | MERRA2 | /glade/p/cgd/cas/asphilli/CVDP-OBS/merra2.mon.SLP.198001-201803.nc | 1980 | 2017 +TREFHT | GISTEMP | /glade/p/cgd/cas/asphilli/CVDP-OBS/gistemp.tas.188001-201812.nc | 1980 | 2017 +PRECT | GPCP | /glade/p/cgd/cas/asphilli/CVDP-OBS/gpcp.mon.mean.197901-201801.nc | 1980 | 2017 +aice_nh | NASA Bootstrap NH | /glade/p/cgd/cas/asphilli/CVDP-OBS/seaice_conc_monthly_nh_NASA_Bootstrap.nsidc.v03r01.197811-201702.nc | 1980 | 2016 +aice_sh | NASA Bootstrap SH | /glade/p/cgd/cas/asphilli/CVDP-OBS/seaice_conc_monthly_sh_NASA_Bootstrap.nsidc.v03r01.197811-201702.nc | 1980 | 2016 +MOC | CESM1 Forced Ocean Simulation | /glade/p/cgd/cas/asphilli/CVDP-OBS/g.e11_LENS.GECOIAF.T62_g16.009.pop.h.MOC.194801-201512.nc | 1979 | 2015 + diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/aice.mean_stddev.ncl b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/aice.mean_stddev.ncl new file mode 100644 index 0000000000..52a03ff83b --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/aice.mean_stddev.ncl @@ -0,0 +1,696 @@ +; Calculates SIC hemispheric means and standard deviations +; +; Variables used: sic +; +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/shea_util.ncl" +load "$CVDP_SCRIPTS/functions.ncl" + +begin + print("Starting: aice.mean_stddev.ncl") + + SCALE_TIMESERIES = getenv("SCALE_TIMESERIES") + OUTPUT_DATA = getenv("OUTPUT_DATA") + PNG_SCALE = tofloat(getenv("PNG_SCALE")) + OPT_CLIMO = getenv("OPT_CLIMO") + CLIMO_SYEAR = toint(getenv("CLIMO_SYEAR")) + CLIMO_EYEAR = toint(getenv("CLIMO_EYEAR")) + OUTPUT_TYPE = getenv("OUTPUT_TYPE") + COLORMAP = getenv("COLORMAP") + + nsim = numAsciiRow("namelist_byvar/namelist_aice_nh") + na = asciiread("namelist_byvar/namelist_aice_nh",(/nsim/),"string") + names = new(nsim,"string") + paths = new(nsim,"string") + syear = new(nsim,"integer",-999) + eyear = new(nsim,"integer",-999) + delim = "|" + + do gg = 0,nsim-1 + names(gg) = str_strip(str_get_field(na(gg),1,delim)) + paths(gg) = str_strip(str_get_field(na(gg),2,delim)) + syear(gg) = stringtointeger(str_strip(str_get_field(na(gg),3,delim))) + eyear(gg) = stringtointeger(str_strip(str_get_field(na(gg),4,delim))) + end do + nyr = eyear-syear+1 + nyr_max = max(nyr) + + nsim_sh = numAsciiRow("namelist_byvar/namelist_aice_sh") + na_sh = asciiread("namelist_byvar/namelist_aice_sh",(/nsim/),"string") + names_sh = new(nsim,"string") + paths_sh = new(nsim,"string") + syear_sh = new(nsim,"integer",-999) + eyear_sh = new(nsim,"integer",-999) + do gg = 0,nsim-1 + names_sh(gg) = str_strip(str_get_field(na_sh(gg),1,delim)) + paths_sh(gg) = str_strip(str_get_field(na_sh(gg),2,delim)) + syear_sh(gg) = stringtointeger(str_strip(str_get_field(na_sh(gg),3,delim))) + eyear_sh(gg) = stringtointeger(str_strip(str_get_field(na_sh(gg),4,delim))) + end do + nyr_sh = eyear_sh-syear_sh+1 + nyr_max_sh = max(nyr_sh) + + wks_type = OUTPUT_TYPE + if (wks_type.eq."png") then + wks_type@wkWidth = 1500*PNG_SCALE + wks_type@wkHeight = 1500*PNG_SCALE + end if + wks_stddev_djf = gsn_open_wks(wks_type,getenv("OUTDIR")+"aice.stddev.djf") + wks_stddev_mam = gsn_open_wks(wks_type,getenv("OUTDIR")+"aice.stddev.mam") + wks_stddev_jja = gsn_open_wks(wks_type,getenv("OUTDIR")+"aice.stddev.jja") + wks_stddev_son = gsn_open_wks(wks_type,getenv("OUTDIR")+"aice.stddev.son") + wks_stddev_ann = gsn_open_wks(wks_type,getenv("OUTDIR")+"aice.stddev.ann") + wks_mean_djf = gsn_open_wks(wks_type,getenv("OUTDIR")+"aice.mean.djf") + wks_mean_mam = gsn_open_wks(wks_type,getenv("OUTDIR")+"aice.mean.mam") + wks_mean_jja = gsn_open_wks(wks_type,getenv("OUTDIR")+"aice.mean.jja") + wks_mean_son = gsn_open_wks(wks_type,getenv("OUTDIR")+"aice.mean.son") + wks_mean_ann = gsn_open_wks(wks_type,getenv("OUTDIR")+"aice.mean.ann") + + if (COLORMAP.eq.0) then + gsn_define_colormap(wks_stddev_djf,"BkBlAqGrYeOrReViWh200") + gsn_define_colormap(wks_stddev_mam,"BkBlAqGrYeOrReViWh200") + gsn_define_colormap(wks_stddev_jja,"BkBlAqGrYeOrReViWh200") + gsn_define_colormap(wks_stddev_son,"BkBlAqGrYeOrReViWh200") + gsn_define_colormap(wks_stddev_ann,"BkBlAqGrYeOrReViWh200") + gsn_define_colormap(wks_mean_djf,"BkBlAqGrYeOrReViWh200") + gsn_define_colormap(wks_mean_mam,"BkBlAqGrYeOrReViWh200") + gsn_define_colormap(wks_mean_jja,"BkBlAqGrYeOrReViWh200") + gsn_define_colormap(wks_mean_son,"BkBlAqGrYeOrReViWh200") + gsn_define_colormap(wks_mean_ann,"BkBlAqGrYeOrReViWh200") + end if + if (COLORMAP.eq.1) then + gsn_define_colormap(wks_stddev_djf,"cb_rainbow") + gsn_define_colormap(wks_stddev_mam,"cb_rainbow") + gsn_define_colormap(wks_stddev_jja,"cb_rainbow") + gsn_define_colormap(wks_stddev_son,"cb_rainbow") + gsn_define_colormap(wks_stddev_ann,"cb_rainbow") + gsn_define_colormap(wks_mean_djf,"cb_rainbow") + gsn_define_colormap(wks_mean_mam,"cb_rainbow") + gsn_define_colormap(wks_mean_jja,"cb_rainbow") + gsn_define_colormap(wks_mean_son,"cb_rainbow") + gsn_define_colormap(wks_mean_ann,"cb_rainbow") + end if + + + plot_mean_nh_djf = new(nsim,"graphic") + plot_mean_nh_mam = new(nsim,"graphic") + plot_mean_nh_jja = new(nsim,"graphic") + plot_mean_nh_son = new(nsim,"graphic") + plot_mean_nh_ann = new(nsim,"graphic") + plot_stddev_nh_djf = new(nsim,"graphic") + plot_stddev_nh_mam = new(nsim,"graphic") + plot_stddev_nh_jja = new(nsim,"graphic") + plot_stddev_nh_son = new(nsim,"graphic") + plot_stddev_nh_ann = new(nsim,"graphic") + + plot_mean_sh_djf = new(nsim,"graphic") + plot_mean_sh_mam = new(nsim,"graphic") + plot_mean_sh_jja = new(nsim,"graphic") + plot_mean_sh_son = new(nsim,"graphic") + plot_mean_sh_ann = new(nsim,"graphic") + plot_stddev_sh_djf = new(nsim,"graphic") + plot_stddev_sh_mam = new(nsim,"graphic") + plot_stddev_sh_jja = new(nsim,"graphic") + plot_stddev_sh_son = new(nsim,"graphic") + plot_stddev_sh_ann = new(nsim,"graphic") + + do ee = 0,nsim-1 + aice_nh_flag = 0 + aice_nh = data_read_in_ice(paths(ee),"aice_nh",syear(ee),eyear(ee)) ; read in data, orient lats/lons correctly, set time coordinate variable up + if (isatt(aice_nh,"is_all_missing")) then + delete(aice_nh) + aice_nh_flag = 1 + end if + + if (aice_nh_flag.eq.0) then + do ff = 0,1 + aice_nhT = aice_nh + if (ff.eq.1) then + if (OPT_CLIMO.eq."Full") then + aice_nhT = rmMonAnnCycTLL(aice_nhT) + else + check_custom_climo(names(ee),syear(ee),eyear(ee),CLIMO_SYEAR,CLIMO_EYEAR) + temp_arr = aice_nhT + delete(temp_arr&time) + temp_arr&time = cd_calendar(aice_nhT&time,-1) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(temp_arr({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(temp_arr({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + delete(temp_arr) + aice_nhT = calcMonAnomTLL(aice_nhT,climo) + delete(climo) + end if + end if + aice_nh_seas = runave_n_Wrap(aice_nhT,3,0,0) + aice_nh_seas(0,:,:) = (/ dim_avg_n(aice_nhT(:1,:,:),0) /) + aice_nh_seas(dimsizes(aice_nhT&time)-1,:,:) = (/ dim_avg_n(aice_nhT(dimsizes(aice_nhT&time)-2:,:,:),0) /) + aice_nh_ann = runave_n_Wrap(aice_nhT,12,0,0) + delete(aice_nhT) + + if (ff.eq.0) then + aice_nh_mean_djf = dim_avg_n_Wrap(aice_nh_seas(0::12,:,:),0) + aice_nh_mean_mam = dim_avg_n_Wrap(aice_nh_seas(3::12,:,:),0) + aice_nh_mean_jja = dim_avg_n_Wrap(aice_nh_seas(6::12,:,:),0) + aice_nh_mean_son = dim_avg_n_Wrap(aice_nh_seas(9::12,:,:),0) + aice_nh_mean_ann = dim_avg_n_Wrap(aice_nh_ann(5::12,:,:),0) + end if + if (ff.eq.1) then + aice_nh_sd_djf = dim_stddev_n_Wrap(dtrend_msg_n(ispan(0,nyr(ee)-1,1),aice_nh_seas(0::12,:,:),False,False,0),0) + aice_nh_sd_mam = dim_stddev_n_Wrap(dtrend_msg_n(ispan(0,nyr(ee)-1,1),aice_nh_seas(3::12,:,:),False,False,0),0) + aice_nh_sd_jja = dim_stddev_n_Wrap(dtrend_msg_n(ispan(0,nyr(ee)-1,1),aice_nh_seas(6::12,:,:),False,False,0),0) + aice_nh_sd_son = dim_stddev_n_Wrap(dtrend_msg_n(ispan(0,nyr(ee)-1,1),aice_nh_seas(9::12,:,:),False,False,0),0) + aice_nh_sd_ann = dim_stddev_n_Wrap(dtrend_msg_n(ispan(0,nyr(ee)-1,1),aice_nh_ann(5::12,:,:),False,False,0),0) + end if + delete([/aice_nh_seas,aice_nh_ann/]) + end do + delete(aice_nh) + copy_VarMeta(aice_nh_mean_djf,aice_nh_sd_djf) + copy_VarMeta(aice_nh_mean_mam,aice_nh_sd_mam) + copy_VarMeta(aice_nh_mean_jja,aice_nh_sd_jja) + copy_VarMeta(aice_nh_mean_son,aice_nh_sd_son) + copy_VarMeta(aice_nh_mean_ann,aice_nh_sd_ann) + end if + + aice_sh_flag = 0 + aice_sh = data_read_in_ice(paths_sh(ee),"aice_sh",syear_sh(ee),eyear_sh(ee)) ; read in data, orient lats/lons correctly, set time coordinate variable up + if (isatt(aice_sh,"is_all_missing")) then + delete(aice_sh) + aice_sh_flag = 1 + end if + if (aice_sh_flag.eq.0) then + do ff = 0,1 + aice_shX = aice_sh + if (ff.eq.1) then + if (OPT_CLIMO.eq."Full") then + aice_shX = rmMonAnnCycTLL(aice_shX) + else + check_custom_climo(names_sh(ee),syear_sh(ee),eyear_sh(ee),CLIMO_SYEAR,CLIMO_EYEAR) + temp_arr = aice_shX + delete(temp_arr&time) + temp_arr&time = cd_calendar(aice_shX&time,-1) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(temp_arr({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(temp_arr({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + delete(temp_arr) + aice_shX = calcMonAnomTLL(aice_shX,climo) + delete(climo) + end if + end if + aice_sh_seas = runave_n_Wrap(aice_shX,3,0,0) + aice_sh_seas(0,:,:) = (/ dim_avg_n(aice_shX(:1,:,:),0) /) + aice_sh_seas(dimsizes(aice_shX&time)-1,:,:) = (/ dim_avg_n(aice_shX(dimsizes(aice_shX&time)-2:,:,:),0) /) + aice_sh_ann = runave_n_Wrap(aice_shX,12,0,0) + delete(aice_shX) + + if (ff.eq.0) then + aice_sh_mean_djf = dim_avg_n_Wrap(aice_sh_seas(0::12,:,:),0) + aice_sh_mean_mam = dim_avg_n_Wrap(aice_sh_seas(3::12,:,:),0) + aice_sh_mean_jja = dim_avg_n_Wrap(aice_sh_seas(6::12,:,:),0) + aice_sh_mean_son = dim_avg_n_Wrap(aice_sh_seas(9::12,:,:),0) + aice_sh_mean_ann = dim_avg_n_Wrap(aice_sh_ann(5::12,:,:),0) + end if + if (ff.eq.1) then + aice_sh_sd_djf = dim_stddev_n_Wrap(dtrend_msg_n(ispan(0,nyr_sh(ee)-1,1),aice_sh_seas(0::12,:,:),False,False,0),0) + aice_sh_sd_mam = dim_stddev_n_Wrap(dtrend_msg_n(ispan(0,nyr_sh(ee)-1,1),aice_sh_seas(3::12,:,:),False,False,0),0) + aice_sh_sd_jja = dim_stddev_n_Wrap(dtrend_msg_n(ispan(0,nyr_sh(ee)-1,1),aice_sh_seas(6::12,:,:),False,False,0),0) + aice_sh_sd_son = dim_stddev_n_Wrap(dtrend_msg_n(ispan(0,nyr_sh(ee)-1,1),aice_sh_seas(9::12,:,:),False,False,0),0) + aice_sh_sd_ann = dim_stddev_n_Wrap(dtrend_msg_n(ispan(0,nyr_sh(ee)-1,1),aice_sh_ann(5::12,:,:),False,False,0),0) + end if + delete([/aice_sh_seas,aice_sh_ann/]) + end do + delete(aice_sh) + copy_VarMeta(aice_sh_mean_djf,aice_sh_sd_djf) + copy_VarMeta(aice_sh_mean_mam,aice_sh_sd_mam) + copy_VarMeta(aice_sh_mean_jja,aice_sh_sd_jja) + copy_VarMeta(aice_sh_mean_son,aice_sh_sd_son) + copy_VarMeta(aice_sh_mean_ann,aice_sh_sd_ann) + end if + + if (OUTPUT_DATA.eq."True".and.aice_nh_flag.eq.0) then + modname = str_sub_str(names(ee)," ","_") + bc = (/"/","'","(",")"/) + do gg = 0,dimsizes(bc)-1 + modname = str_sub_str(modname,bc(gg),"_") + end do + fn = getenv("OUTDIR")+modname+".cvdp_data.aice.mean_stddev.nh."+syear(ee)+"-"+eyear(ee)+".nc" + if (.not.isfilepresent2(fn)) then + z = addfile(fn,"c") + z@source = "NCAR Climate Analysis Section's Climate Variability Diagnostics Package v"+getenv("VERSION") + z@notes = "Data from "+names(ee)+" from "+syear(ee)+"-"+eyear(ee) + if (OPT_CLIMO.eq."Full") then + z@climatology = syear(ee)+"-"+eyear(ee)+" climatology removed prior to all calculations (other than means)" + else + if (CLIMO_SYEAR.lt.0) then + z@climatology = (eyear(ee)+CLIMO_SYEAR)+"-"+(eyear(ee)+CLIMO_EYEAR)+" climatology removed prior to all calculations (other than means)" + else + z@climatology = CLIMO_SYEAR+"-"+CLIMO_EYEAR+" climatology removed prior to all calculations (other than means)" + end if + end if + z@Conventions = "CF-1.6" + else + z = addfile(fn,"w") + end if + nh_mean_djf = aice_nh_mean_djf + if (isatt(nh_mean_djf,"lat2d")) then ; if there is a lat2d there will be a lon2d + delete(nh_mean_djf@lat2d) + delete(nh_mean_djf@lon2d) + LAT2D = aice_nh_mean_djf@lat2d + copy_VarCoords(nh_mean_djf,LAT2D) + LON2D = aice_nh_mean_djf@lon2d + copy_VarCoords(nh_mean_djf,LON2D) + z->lat2d_ice_nh = set_varAtts(LAT2D,"Northern Hemisphere ice grid 2-dimensional latitudes","","") + z->lon2d_ice_nh = set_varAtts(LON2D,"Northern Hemisphere ice grid 2-dimensional longitudes","","") + delete([/LAT2D,LON2D/]) + nh_mean_djf@coordinates ="lat2d_ice_nh lon2d_ice_nh" + end if + if (isatt(nh_mean_djf,"area")) then + delete(nh_mean_djf@area) + end if + nh_mean_djf@long_name = nh_mean_djf@long_name+" mean" + nh_mean_mam = (/ aice_nh_mean_mam /) + copy_VarMeta(nh_mean_djf,nh_mean_mam) + nh_mean_jja = (/ aice_nh_mean_jja /) + copy_VarMeta(nh_mean_djf,nh_mean_jja) + nh_mean_son = (/ aice_nh_mean_son /) + copy_VarMeta(nh_mean_djf,nh_mean_son) + nh_mean_ann = (/ aice_nh_mean_ann /) + copy_VarMeta(nh_mean_djf,nh_mean_ann) + nh_sd_djf = aice_nh_sd_djf + if (isatt(nh_sd_djf,"lat2d")) then + delete(nh_sd_djf@lat2d) + delete(nh_sd_djf@lon2d) + nh_sd_djf@coordinates ="lat2d_ice_nh lon2d_ice_nh" + end if + if (isatt(nh_sd_djf,"area")) then + delete(nh_sd_djf@area) + end if + nh_sd_djf@long_name = nh_sd_djf@long_name+" standard deviation" + nh_sd_mam = (/ aice_nh_sd_mam /) + copy_VarMeta(nh_sd_djf,nh_sd_mam) + nh_sd_jja = (/ aice_nh_sd_jja /) + copy_VarMeta(nh_sd_djf,nh_sd_jja) + nh_sd_son = (/ aice_nh_sd_son /) + copy_VarMeta(nh_sd_djf,nh_sd_son) + nh_sd_ann = (/ aice_nh_sd_ann /) + copy_VarMeta(nh_sd_djf,nh_sd_ann) + z->sic_nh_spatialmean_djf = set_varAtts(nh_mean_djf,"Northern Hemisphere sic mean (DJF)","","") + z->sic_nh_spatialmean_mam = set_varAtts(nh_mean_mam,"Northern Hemisphere sic mean (MAM)","","") + z->sic_nh_spatialmean_jja = set_varAtts(nh_mean_jja,"Northern Hemisphere sic mean (JJA)","","") + z->sic_nh_spatialmean_son = set_varAtts(nh_mean_son,"Northern Hemisphere sic mean (SON)","","") + z->sic_nh_spatialmean_ann = set_varAtts(nh_mean_ann,"Northern Hemisphere sic mean (annual)","","") + + z->sic_nh_spatialstddev_djf = set_varAtts(nh_sd_djf,"Northern Hemisphere sic standard deviation (DJF)","","") + z->sic_nh_spatialstddev_mam = set_varAtts(nh_sd_mam,"Northern Hemisphere sic standard deviation (MAM)","","") + z->sic_nh_spatialstddev_jja = set_varAtts(nh_sd_jja,"Northern Hemisphere sic standard deviation (JJA)","","") + z->sic_nh_spatialstddev_son = set_varAtts(nh_sd_son,"Northern Hemisphere sic standard deviation (SON)","","") + z->sic_nh_spatialstddev_ann = set_varAtts(nh_sd_ann,"Northern Hemisphere sic standard deviation (annual)","","") + delete([/nh_mean_djf,nh_mean_mam,nh_mean_jja,nh_mean_son,nh_mean_ann/]) + delete([/nh_sd_djf,nh_sd_mam,nh_sd_jja,nh_sd_son,nh_sd_ann/]) + delete(z) + end if + if (OUTPUT_DATA.eq."True".and.aice_sh_flag.eq.0) then + modname = str_sub_str(names_sh(ee)," ","_") + bc = (/"/","'","(",")"/) + do gg = 0,dimsizes(bc)-1 + modname = str_sub_str(modname,bc(gg),"_") + end do + fn = getenv("OUTDIR")+modname+".cvdp_data.aice.mean_stddev.sh."+syear_sh(ee)+"-"+eyear_sh(ee)+".nc" + if (.not.isfilepresent2(fn)) then + z = addfile(fn,"c") + z@source = "NCAR Climate Analysis Section's Climate Variability Diagnostics Package v"+getenv("VERSION") + z@notes = "Data from "+names_sh(ee)+" from "+syear_sh(ee)+"-"+eyear_sh(ee) + if (OPT_CLIMO.eq."Full") then + z@climatology = syear_sh(ee)+"-"+eyear_sh(ee)+" climatology removed prior to all calculations (other than means)" + else + if (CLIMO_SYEAR.lt.0) then + z@climatology = (eyear(ee)+CLIMO_SYEAR)+"-"+(eyear(ee)+CLIMO_EYEAR)+" climatology removed prior to all calculations (other than means)" + else + z@climatology = CLIMO_SYEAR+"-"+CLIMO_EYEAR+" climatology removed prior to all calculations (other than means)" + end if + end if + z@Conventions = "CF-1.6" + else + z = addfile(fn,"w") + end if + sh_mean_djf = aice_sh_mean_djf + if (isatt(sh_mean_djf,"lat2d")) then ; if there is a lat2d there will be a lon2d + delete(sh_mean_djf@lat2d) + delete(sh_mean_djf@lon2d) + LAT2D = aice_sh_mean_djf@lat2d + copy_VarCoords(sh_mean_djf,LAT2D) + LON2D = aice_sh_mean_djf@lon2d + copy_VarCoords(sh_mean_djf,LON2D) + z->lat2d_ice_sh = set_varAtts(LAT2D,"Southern Hemisphere ice grid 2-dimensional latitudes","","") + z->lon2d_ice_sh = set_varAtts(LON2D,"Southern Hemisphere ice grid 2-dimensional longitudes","","") + delete([/LAT2D,LON2D/]) + sh_mean_djf@coordinates ="lat2d_ice_sh lon2d_ice_sh" + end if + if (isatt(sh_mean_djf,"area")) then + delete(sh_mean_djf@area) + end if + sh_mean_djf@long_name = sh_mean_djf@long_name+" mean" + sh_mean_djf!0 = "j2" + sh_mean_djf!1 = "i2" + sh_mean_mam = (/ aice_sh_mean_mam /) + copy_VarMeta(sh_mean_djf,sh_mean_mam) + sh_mean_jja = (/ aice_sh_mean_jja /) + copy_VarMeta(sh_mean_djf,sh_mean_jja) + sh_mean_son = (/ aice_sh_mean_son /) + copy_VarMeta(sh_mean_djf,sh_mean_son) + sh_mean_ann = (/ aice_sh_mean_ann /) + copy_VarMeta(sh_mean_djf,sh_mean_ann) + sh_sd_djf = aice_sh_sd_djf + if (isatt(sh_sd_djf,"lat2d")) then + delete(sh_sd_djf@lat2d) + delete(sh_sd_djf@lon2d) + sh_sd_djf@coordinates ="lat2d_ice_sh lon2d_ice_sh" + end if + if (isatt(sh_sd_djf,"area")) then + delete(sh_sd_djf@area) + end if + sh_sd_djf@long_name = sh_sd_djf@long_name+" standard deviation" + sh_sd_djf!0 = "j2" + sh_sd_djf!1 = "i2" + sh_sd_mam = (/ aice_sh_sd_mam /) + copy_VarMeta(sh_sd_djf,sh_sd_mam) + sh_sd_jja = (/ aice_sh_sd_jja /) + copy_VarMeta(sh_sd_djf,sh_sd_jja) + sh_sd_son = (/ aice_sh_sd_son /) + copy_VarMeta(sh_sd_djf,sh_sd_son) + sh_sd_ann = (/ aice_sh_sd_ann /) + copy_VarMeta(sh_sd_djf,sh_sd_ann) + z->sic_sh_spatialmean_djf = set_varAtts(sh_mean_djf,"Southern Hemisphere sic mean (DJF)","","") + z->sic_sh_spatialmean_mam = set_varAtts(sh_mean_mam,"Southern Hemisphere sic mean (MAM)","","") + z->sic_sh_spatialmean_jja = set_varAtts(sh_mean_jja,"Southern Hemisphere sic mean (JJA)","","") + z->sic_sh_spatialmean_son = set_varAtts(sh_mean_son,"Southern Hemisphere sic mean (SON","","") + z->sic_sh_spatialmean_ann = set_varAtts(sh_mean_ann,"Southern Hemisphere sic mean (annual)","","") + + z->sic_sh_spatialstddev_djf = set_varAtts(sh_sd_djf,"Southern Hemisphere sic standard deviation (DJF)","","") + z->sic_sh_spatialstddev_mam = set_varAtts(sh_sd_mam,"Southern Hemisphere sic standard deviation (MAM)","","") + z->sic_sh_spatialstddev_jja = set_varAtts(sh_sd_jja,"Southern Hemisphere sic standard deviation (JJA)","","") + z->sic_sh_spatialstddev_son = set_varAtts(sh_sd_son,"Southern Hemisphere sic standard deviation (SON)","","") + z->sic_sh_spatialstddev_ann = set_varAtts(sh_sd_ann,"Southern Hemisphere sic standard deviation (annual)","","") + delete([/sh_mean_djf,sh_mean_mam,sh_mean_jja,sh_mean_son,sh_mean_ann/]) + delete([/sh_sd_djf,sh_sd_mam,sh_sd_jja,sh_sd_son,sh_sd_ann/]) + delete(z) + end if + if (aice_nh_flag.eq.0) then + aice_nh_mean_djf = where(aice_nh_mean_djf.lt.1,aice_nh_mean_djf@_FillValue,aice_nh_mean_djf) + aice_nh_mean_mam = where(aice_nh_mean_mam.lt.1,aice_nh_mean_mam@_FillValue,aice_nh_mean_mam) + aice_nh_mean_jja = where(aice_nh_mean_jja.lt.1,aice_nh_mean_jja@_FillValue,aice_nh_mean_jja) + aice_nh_mean_son = where(aice_nh_mean_son.lt.1,aice_nh_mean_son@_FillValue,aice_nh_mean_son) + aice_nh_mean_ann = where(aice_nh_mean_ann.lt.1,aice_nh_mean_ann@_FillValue,aice_nh_mean_ann) + aice_nh_sd_djf = where(aice_nh_sd_djf.eq.0,aice_nh_sd_djf@_FillValue,aice_nh_sd_djf) + aice_nh_sd_mam = where(aice_nh_sd_mam.eq.0,aice_nh_sd_mam@_FillValue,aice_nh_sd_mam) + aice_nh_sd_jja = where(aice_nh_sd_jja.eq.0,aice_nh_sd_jja@_FillValue,aice_nh_sd_jja) + aice_nh_sd_son = where(aice_nh_sd_son.eq.0,aice_nh_sd_son@_FillValue,aice_nh_sd_son) + aice_nh_sd_ann = where(aice_nh_sd_ann.eq.0,aice_nh_sd_ann@_FillValue,aice_nh_sd_ann) + end if + if (aice_sh_flag.eq.0) then + aice_sh_mean_djf = where(aice_sh_mean_djf.lt.1,aice_sh_mean_djf@_FillValue,aice_sh_mean_djf) + aice_sh_mean_mam = where(aice_sh_mean_mam.lt.1,aice_sh_mean_mam@_FillValue,aice_sh_mean_mam) + aice_sh_mean_jja = where(aice_sh_mean_jja.lt.1,aice_sh_mean_jja@_FillValue,aice_sh_mean_jja) + aice_sh_mean_son = where(aice_sh_mean_son.lt.1,aice_sh_mean_son@_FillValue,aice_sh_mean_son) + aice_sh_mean_ann = where(aice_sh_mean_ann.lt.1,aice_sh_mean_ann@_FillValue,aice_sh_mean_ann) + aice_sh_sd_djf = where(aice_sh_sd_djf.eq.0,aice_sh_sd_djf@_FillValue,aice_sh_sd_djf) + aice_sh_sd_mam = where(aice_sh_sd_mam.eq.0,aice_sh_sd_mam@_FillValue,aice_sh_sd_mam) + aice_sh_sd_jja = where(aice_sh_sd_jja.eq.0,aice_sh_sd_jja@_FillValue,aice_sh_sd_jja) + aice_sh_sd_son = where(aice_sh_sd_son.eq.0,aice_sh_sd_son@_FillValue,aice_sh_sd_son) + aice_sh_sd_ann = where(aice_sh_sd_ann.eq.0,aice_sh_sd_ann@_FillValue,aice_sh_sd_ann) + end if +;========================================================================================== + res = True + res@mpGeophysicalLineColor = "gray42" + if (wks_type.eq."png") then + res@mpGeophysicalLineThicknessF = 2. + else + res@mpGeophysicalLineThicknessF = 1. + end if + res@mpGridAndLimbOn = False + res@mpLandFillColor = "gray75" + res@mpFillDrawOrder = "PostDraw" + res@mpPerimDrawOrder = "PostDraw" + + res@mpOutlineOn = True + res@mpMinLatF = 40. + res@mpCenterLonF = 0. + res@gsnPolar = "NH" + res@gsnDraw = False + res@gsnFrame = False + res@gsnAddCyclic = True + res@cnLevelSelectionMode = "ExplicitLevels" + res@cnLineLabelsOn = False + res@cnFillOn = True + res@cnLinesOn = False + res@trGridType = "TriangularMesh" +; res@cnFillMode = "RasterFill" + res@lbLabelBarOn = False + + res@gsnLeftStringOrthogonalPosF = -0.03 + res@gsnLeftStringParallelPosF = .005 + res@gsnRightStringOrthogonalPosF = -0.03 + res@gsnRightStringParallelPosF = 0.96 + res@gsnRightString = "" + res@gsnLeftString = "" + if (nsim.le.5) then + res@gsnLeftStringFontHeightF = 0.018 + res@gsnCenterStringFontHeightF = 0.022 + res@gsnRightStringFontHeightF = 0.018 + else + res@gsnLeftStringFontHeightF = 0.024 + res@gsnCenterStringFontHeightF = 0.028 + res@gsnRightStringFontHeightF = 0.024 + end if + + + res@cnLevelSelectionMode = "ExplicitLevels" + + sres = res + res@cnLevels = (/4,8,12,16,20,24,28,32,36/) + sres@cnLevels = (/5,10,15,20,30,40,50,60,70,80,85,90,95,99/) + contour_means = sres@cnLevels ; for use in paneling section + contour_sd = res@cnLevels + if (COLORMAP.eq.0) then + res@cnFillColors = (/42,29,80,95,105,120,140,161,170,193/) ; radar: (/5,6,7,8,9,11,12,13,14,15/) + sres@cnFillColors = (/52,42,34,24,65,80,95,105,120,140,155,161,170,184,193/) + end if + if (COLORMAP.eq.1) then + res@cnFillColors = (/2,18,34,50,66,82,98,114,137,162/) + sres@cnFillColors = (/8,26,38,50,62,74,86,98,110,122,134,146,158,170,182/) + end if + + if (aice_nh_flag.eq.0) then + res@gsnLeftString = syear(ee)+"-"+eyear(ee) + res@gsnRightString = aice_nh_sd_djf@units + res@gsnCenterString = names(ee) + plot_stddev_nh_djf(ee) = gsn_csm_contour_map(wks_stddev_djf,aice_nh_sd_djf,res) + plot_stddev_nh_mam(ee) = gsn_csm_contour_map(wks_stddev_mam,aice_nh_sd_mam,res) + plot_stddev_nh_jja(ee) = gsn_csm_contour_map(wks_stddev_jja,aice_nh_sd_jja,res) + plot_stddev_nh_son(ee) = gsn_csm_contour_map(wks_stddev_son,aice_nh_sd_son,res) + plot_stddev_nh_ann(ee) = gsn_csm_contour_map(wks_stddev_ann,aice_nh_sd_ann,res) + + sres@gsnLeftString = syear(ee)+"-"+eyear(ee) + sres@gsnRightString = aice_nh_mean_djf@units + sres@gsnCenterString = names(ee) + plot_mean_nh_djf(ee) = gsn_csm_contour_map(wks_mean_djf,aice_nh_mean_djf,sres) + plot_mean_nh_mam(ee) = gsn_csm_contour_map(wks_mean_mam,aice_nh_mean_mam,sres) + plot_mean_nh_jja(ee) = gsn_csm_contour_map(wks_mean_jja,aice_nh_mean_jja,sres) + plot_mean_nh_son(ee) = gsn_csm_contour_map(wks_mean_son,aice_nh_mean_son,sres) + plot_mean_nh_ann(ee) = gsn_csm_contour_map(wks_mean_ann,aice_nh_mean_ann,sres) + delete([/aice_nh_sd_djf,aice_nh_sd_mam,aice_nh_sd_jja,aice_nh_sd_son,aice_nh_sd_ann/]) + delete([/aice_nh_mean_djf,aice_nh_mean_mam,aice_nh_mean_jja,aice_nh_mean_son,aice_nh_mean_ann/]) + end if + + delete(res@mpMinLatF) + delete(sres@mpMinLatF) + res@mpMaxLatF = -45. + res@gsnPolar = "SH" + sres@mpMaxLatF = -45. + sres@gsnPolar = "SH" + + if (aice_sh_flag.eq.0) then + res@gsnLeftString = syear_sh(ee)+"-"+eyear_sh(ee) + res@gsnRightString = aice_sh_sd_djf@units + res@gsnCenterString = names_sh(ee) + plot_stddev_sh_djf(ee) = gsn_csm_contour_map(wks_stddev_djf,aice_sh_sd_djf,res) + plot_stddev_sh_mam(ee) = gsn_csm_contour_map(wks_stddev_mam,aice_sh_sd_mam,res) + plot_stddev_sh_jja(ee) = gsn_csm_contour_map(wks_stddev_jja,aice_sh_sd_jja,res) + plot_stddev_sh_son(ee) = gsn_csm_contour_map(wks_stddev_son,aice_sh_sd_son,res) + plot_stddev_sh_ann(ee) = gsn_csm_contour_map(wks_stddev_ann,aice_sh_sd_ann,res) + + sres@gsnLeftString = syear_sh(ee)+"-"+eyear_sh(ee) + sres@gsnRightString = aice_sh_mean_djf@units + sres@gsnCenterString = names_sh(ee) + plot_mean_sh_djf(ee) = gsn_csm_contour_map(wks_mean_djf,aice_sh_mean_djf,sres) + plot_mean_sh_mam(ee) = gsn_csm_contour_map(wks_mean_mam,aice_sh_mean_mam,sres) + plot_mean_sh_jja(ee) = gsn_csm_contour_map(wks_mean_jja,aice_sh_mean_jja,sres) + plot_mean_sh_son(ee) = gsn_csm_contour_map(wks_mean_son,aice_sh_mean_son,sres) + plot_mean_sh_ann(ee) = gsn_csm_contour_map(wks_mean_ann,aice_sh_mean_ann,sres) + delete([/aice_sh_sd_djf,aice_sh_sd_mam,aice_sh_sd_jja,aice_sh_sd_son,aice_sh_sd_ann/]) + delete([/aice_sh_mean_djf,aice_sh_mean_mam,aice_sh_mean_jja,aice_sh_mean_son,aice_sh_mean_ann/]) + end if + delete([/res,sres/]) + end do + + panres = True + panres@gsnMaximize = True + panres@gsnPaperOrientation = "portrait" + panres@gsnPanelLabelBar = True + panres@gsnPanelYWhiteSpacePercent = 3.0 + panres@pmLabelBarHeightF = 0.05 + panres@pmLabelBarWidthF = 0.65 + panres@lbTitleOn = False + panres@lbBoxLineColor = "gray70" + if (nsim.le.4) then + if (nsim.eq.1) then + panres@txFontHeightF = 0.022 + panres@gsnPanelBottom = 0.50 + else + panres@txFontHeightF = 0.0145 + panres@gsnPanelBottom = 0.50 + end if + else + panres@txFontHeightF = 0.016 + panres@gsnPanelBottom = 0.05 + end if + panres@lbLabelFontHeightF = 0.013 + panres@lbLabelStride = 1 + ncol = floattointeger(sqrt(nsim)) + nrow = (nsim/ncol)+mod(nsim,ncol) + + c_sd = (/4,8,12,16,20,24,28,32,36/) + if (dimsizes(c_sd).eq.dimsizes(contour_sd).and.all((c_sd - contour_sd).eq.0)) then ; needed to make sure contour intervals + panres@lbLabelAlignment = "ExternalEdges" ; set above match the labels set here + panres@lbLabelStrings = (/"0","4","8","12","16","20","24","28","32","36"," "/) + end if + delete([/c_sd,contour_sd/]) + + panres@txString = "SIC Standard Deviations (DJF)" + gsn_panel2(wks_stddev_djf,plot_stddev_nh_djf,(/nrow,ncol/),panres) + gsn_panel2(wks_stddev_djf,plot_stddev_sh_djf,(/nrow,ncol/),panres) + delete(wks_stddev_djf) + + panres@txString = "SIC Standard Deviations (MAM)" + gsn_panel2(wks_stddev_mam,plot_stddev_nh_mam,(/nrow,ncol/),panres) + gsn_panel2(wks_stddev_mam,plot_stddev_sh_mam,(/nrow,ncol/),panres) + delete(wks_stddev_mam) + + panres@txString = "SIC Standard Deviations (JJA)" + gsn_panel2(wks_stddev_jja,plot_stddev_nh_jja,(/nrow,ncol/),panres) + gsn_panel2(wks_stddev_jja,plot_stddev_sh_jja,(/nrow,ncol/),panres) + delete(wks_stddev_jja) + + panres@txString = "SIC Standard Deviations (SON)" + gsn_panel2(wks_stddev_son,plot_stddev_nh_son,(/nrow,ncol/),panres) + gsn_panel2(wks_stddev_son,plot_stddev_sh_son,(/nrow,ncol/),panres) + delete(wks_stddev_son) + + panres@txString = "SIC Standard Deviations (Annual)" + gsn_panel2(wks_stddev_ann,plot_stddev_nh_ann,(/nrow,ncol/),panres) + gsn_panel2(wks_stddev_ann,plot_stddev_sh_ann,(/nrow,ncol/),panres) + delete(wks_stddev_ann) + + if (isatt(panres,"lbLabelAlignment")) then + delete(panres@lbLabelAlignment) + delete(panres@lbLabelStrings) + end if + + c_me = (/5,10,15,20,30,40,50,60,70,80,85,90,95,99/) + if (dimsizes(c_me).eq.dimsizes(contour_means).and.all((c_me - contour_means).eq.0)) then ; needed to make sure contour intervals + panres@lbLabelAlignment = "ExternalEdges" ; set above match the labels set here + panres@lbLabelStrings = (/"1","5","10","15","20","30","40","50","60","70","80","85","90","95","99"," "/) + end if + delete([/c_me,contour_means/]) + + panres@txString = "SIC Means (DJF)" + gsn_panel2(wks_mean_djf,plot_mean_nh_djf,(/nrow,ncol/),panres) + gsn_panel2(wks_mean_djf,plot_mean_sh_djf,(/nrow,ncol/),panres) + delete(wks_mean_djf) + + panres@txString = "SIC Means (MAM)" + gsn_panel2(wks_mean_mam,plot_mean_nh_mam,(/nrow,ncol/),panres) + gsn_panel2(wks_mean_mam,plot_mean_sh_mam,(/nrow,ncol/),panres) + delete(wks_mean_mam) + + panres@txString = "SIC Means (JJA)" + gsn_panel2(wks_mean_jja,plot_mean_nh_jja,(/nrow,ncol/),panres) + gsn_panel2(wks_mean_jja,plot_mean_sh_jja,(/nrow,ncol/),panres) + delete(wks_mean_jja) + + panres@txString = "SIC Means (SON)" + gsn_panel2(wks_mean_son,plot_mean_nh_son,(/nrow,ncol/),panres) + gsn_panel2(wks_mean_son,plot_mean_sh_son,(/nrow,ncol/),panres) + delete(wks_mean_son) + + panres@txString = "SIC Means (Annual)" + gsn_panel2(wks_mean_ann,plot_mean_nh_ann,(/nrow,ncol/),panres) + gsn_panel2(wks_mean_ann,plot_mean_sh_ann,(/nrow,ncol/),panres) + delete(wks_mean_ann) + delete(panres) +;-------------------------------------------------------------------------------- + OUTDIR = getenv("OUTDIR") + if (wks_type.eq."png") then + system("mv "+OUTDIR+"aice.stddev.djf.000001.png "+OUTDIR+"aice.stddev.nh.djf.png") + system("mv "+OUTDIR+"aice.stddev.djf.000002.png "+OUTDIR+"aice.stddev.sh.djf.png") + system("mv "+OUTDIR+"aice.stddev.mam.000001.png "+OUTDIR+"aice.stddev.nh.mam.png") + system("mv "+OUTDIR+"aice.stddev.mam.000002.png "+OUTDIR+"aice.stddev.sh.mam.png") + system("mv "+OUTDIR+"aice.stddev.jja.000001.png "+OUTDIR+"aice.stddev.nh.jja.png") + system("mv "+OUTDIR+"aice.stddev.jja.000002.png "+OUTDIR+"aice.stddev.sh.jja.png") + system("mv "+OUTDIR+"aice.stddev.son.000001.png "+OUTDIR+"aice.stddev.nh.son.png") + system("mv "+OUTDIR+"aice.stddev.son.000002.png "+OUTDIR+"aice.stddev.sh.son.png") + system("mv "+OUTDIR+"aice.stddev.ann.000001.png "+OUTDIR+"aice.stddev.nh.ann.png") + system("mv "+OUTDIR+"aice.stddev.ann.000002.png "+OUTDIR+"aice.stddev.sh.ann.png") + + system("mv "+OUTDIR+"aice.mean.djf.000001.png "+OUTDIR+"aice.mean.nh.djf.png") + system("mv "+OUTDIR+"aice.mean.djf.000002.png "+OUTDIR+"aice.mean.sh.djf.png") + system("mv "+OUTDIR+"aice.mean.mam.000001.png "+OUTDIR+"aice.mean.nh.mam.png") + system("mv "+OUTDIR+"aice.mean.mam.000002.png "+OUTDIR+"aice.mean.sh.mam.png") + system("mv "+OUTDIR+"aice.mean.jja.000001.png "+OUTDIR+"aice.mean.nh.jja.png") + system("mv "+OUTDIR+"aice.mean.jja.000002.png "+OUTDIR+"aice.mean.sh.jja.png") + system("mv "+OUTDIR+"aice.mean.son.000001.png "+OUTDIR+"aice.mean.nh.son.png") + system("mv "+OUTDIR+"aice.mean.son.000002.png "+OUTDIR+"aice.mean.sh.son.png") + system("mv "+OUTDIR+"aice.mean.ann.000001.png "+OUTDIR+"aice.mean.nh.ann.png") + system("mv "+OUTDIR+"aice.mean.ann.000002.png "+OUTDIR+"aice.mean.sh.ann.png") + else + system("psplit "+OUTDIR+"aice.stddev.djf.ps "+OUTDIR+"aice_sd") + system("mv "+OUTDIR+"aice_sd0001.ps "+OUTDIR+"aice.stddev.nh.djf.ps") + system("mv "+OUTDIR+"aice_sd0002.ps "+OUTDIR+"aice.stddev.sh.djf.ps") + system("psplit "+OUTDIR+"aice.stddev.mam.ps "+OUTDIR+"aice_sd") + system("mv "+OUTDIR+"aice_sd0001.ps "+OUTDIR+"aice.stddev.nh.mam.ps") + system("mv "+OUTDIR+"aice_sd0002.ps "+OUTDIR+"aice.stddev.sh.mam.ps") + system("psplit "+OUTDIR+"aice.stddev.jja.ps "+OUTDIR+"aice_sd") + system("mv "+OUTDIR+"aice_sd0001.ps "+OUTDIR+"aice.stddev.nh.jja.ps") + system("mv "+OUTDIR+"aice_sd0002.ps "+OUTDIR+"aice.stddev.sh.jja.ps") + system("psplit "+OUTDIR+"aice.stddev.son.ps "+OUTDIR+"aice_sd") + system("mv "+OUTDIR+"aice_sd0001.ps "+OUTDIR+"aice.stddev.nh.son.ps") + system("mv "+OUTDIR+"aice_sd0002.ps "+OUTDIR+"aice.stddev.sh.son.ps") + system("psplit "+OUTDIR+"aice.stddev.ann.ps "+OUTDIR+"aice_sd") + system("mv "+OUTDIR+"aice_sd0001.ps "+OUTDIR+"aice.stddev.nh.ann.ps") + system("mv "+OUTDIR+"aice_sd0002.ps "+OUTDIR+"aice.stddev.sh.ann.ps") + system("rm "+OUTDIR+"aice.stddev.???.ps") + + system("psplit "+OUTDIR+"aice.mean.djf.ps "+OUTDIR+"aice_m") + system("mv "+OUTDIR+"aice_m0001.ps "+OUTDIR+"aice.mean.nh.djf.ps") + system("mv "+OUTDIR+"aice_m0002.ps "+OUTDIR+"aice.mean.sh.djf.ps") + system("psplit "+OUTDIR+"aice.mean.mam.ps "+OUTDIR+"aice_m") + system("mv "+OUTDIR+"aice_m0001.ps "+OUTDIR+"aice.mean.nh.mam.ps") + system("mv "+OUTDIR+"aice_m0002.ps "+OUTDIR+"aice.mean.sh.mam.ps") + system("psplit "+OUTDIR+"aice.mean.jja.ps "+OUTDIR+"aice_m") + system("mv "+OUTDIR+"aice_m0001.ps "+OUTDIR+"aice.mean.nh.jja.ps") + system("mv "+OUTDIR+"aice_m0002.ps "+OUTDIR+"aice.mean.sh.jja.ps") + system("psplit "+OUTDIR+"aice.mean.son.ps "+OUTDIR+"aice_m") + system("mv "+OUTDIR+"aice_m0001.ps "+OUTDIR+"aice.mean.nh.son.ps") + system("mv "+OUTDIR+"aice_m0002.ps "+OUTDIR+"aice.mean.sh.son.ps") + system("psplit "+OUTDIR+"aice.mean.ann.ps "+OUTDIR+"aice_m") + system("mv "+OUTDIR+"aice_m0001.ps "+OUTDIR+"aice.mean.nh.ann.ps") + system("mv "+OUTDIR+"aice_m0002.ps "+OUTDIR+"aice.mean.sh.ann.ps") + system("rm "+OUTDIR+"aice.mean.???.ps") + end if + print("Finished: aice.mean_stddev.ncl") +end diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/aice.trends_timeseries.ncl b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/aice.trends_timeseries.ncl new file mode 100644 index 0000000000..2feb9611b8 --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/aice.trends_timeseries.ncl @@ -0,0 +1,1544 @@ +; Calculates SIC hemispheric trends and extent +; +; Variables used: sic +; +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/shea_util.ncl" +load "$CVDP_SCRIPTS/functions.ncl" + +begin + print("Starting: aice.trends_timeseries.ncl") + + SCALE_TIMESERIES = getenv("SCALE_TIMESERIES") + OUTPUT_DATA = getenv("OUTPUT_DATA") + PNG_SCALE = tofloat(getenv("PNG_SCALE")) + OPT_CLIMO = getenv("OPT_CLIMO") + CLIMO_SYEAR = toint(getenv("CLIMO_SYEAR")) + CLIMO_EYEAR = toint(getenv("CLIMO_EYEAR")) + OUTPUT_TYPE = getenv("OUTPUT_TYPE") + COLORMAP = getenv("COLORMAP") + + nsim = numAsciiRow("namelist_byvar/namelist_aice_nh") + na = asciiread("namelist_byvar/namelist_aice_nh",(/nsim/),"string") + names = new(nsim,"string") + paths = new(nsim,"string") + syear = new(nsim,"integer",-999) + eyear = new(nsim,"integer",-999) + delim = "|" + + do gg = 0,nsim-1 + names(gg) = str_strip(str_get_field(na(gg),1,delim)) + paths(gg) = str_strip(str_get_field(na(gg),2,delim)) + syear(gg) = stringtointeger(str_strip(str_get_field(na(gg),3,delim))) + eyear(gg) = stringtointeger(str_strip(str_get_field(na(gg),4,delim))) + end do + nyr = eyear-syear+1 + nyr_max = max(nyr) + + nsim_sh = numAsciiRow("namelist_byvar/namelist_aice_sh") + na_sh = asciiread("namelist_byvar/namelist_aice_sh",(/nsim/),"string") + names_sh = new(nsim,"string") + paths_sh = new(nsim,"string") + syear_sh = new(nsim,"integer",-999) + eyear_sh = new(nsim,"integer",-999) + do gg = 0,nsim-1 + names_sh(gg) = str_strip(str_get_field(na_sh(gg),1,delim)) + paths_sh(gg) = str_strip(str_get_field(na_sh(gg),2,delim)) + syear_sh(gg) = stringtointeger(str_strip(str_get_field(na_sh(gg),3,delim))) + eyear_sh(gg) = stringtointeger(str_strip(str_get_field(na_sh(gg),4,delim))) + end do + nyr_sh = eyear_sh-syear_sh+1 + nyr_max_sh = max(nyr_sh) + + wks_type = OUTPUT_TYPE + if (wks_type.eq."png") then + wks_type@wkWidth = 1500*PNG_SCALE + wks_type@wkHeight = 1500*PNG_SCALE + end if + wks_trends_djf = gsn_open_wks(wks_type,getenv("OUTDIR")+"aice.trends.djf") + wks_trends_mam = gsn_open_wks(wks_type,getenv("OUTDIR")+"aice.trends.mam") + wks_trends_jja = gsn_open_wks(wks_type,getenv("OUTDIR")+"aice.trends.jja") + wks_trends_son = gsn_open_wks(wks_type,getenv("OUTDIR")+"aice.trends.son") + wks_trends_ann = gsn_open_wks(wks_type,getenv("OUTDIR")+"aice.trends.ann") + wks_trends_mon = gsn_open_wks(wks_type,getenv("OUTDIR")+"aice.trends.mon") + + wks_iceext_djf = gsn_open_wks(wks_type,getenv("OUTDIR")+"aice.extent.djf") + wks_iceext_mam = gsn_open_wks(wks_type,getenv("OUTDIR")+"aice.extent.mam") + wks_iceext_jja = gsn_open_wks(wks_type,getenv("OUTDIR")+"aice.extent.jja") + wks_iceext_son = gsn_open_wks(wks_type,getenv("OUTDIR")+"aice.extent.son") + wks_iceext_ann = gsn_open_wks(wks_type,getenv("OUTDIR")+"aice.extent.ann") + wks_iceext_febmar = gsn_open_wks(wks_type,getenv("OUTDIR")+"aice.extent.febmar") + wks_iceext_sep = gsn_open_wks(wks_type,getenv("OUTDIR")+"aice.extent.sep") + wks_iceext_mon = gsn_open_wks(wks_type,getenv("OUTDIR")+"aice.extent.mon") + + if (COLORMAP.eq.0) then + gsn_define_colormap(wks_trends_djf,"ncl_default") + gsn_define_colormap(wks_trends_mam,"ncl_default") + gsn_define_colormap(wks_trends_jja,"ncl_default") + gsn_define_colormap(wks_trends_son,"ncl_default") + gsn_define_colormap(wks_trends_ann,"ncl_default") + gsn_define_colormap(wks_trends_mon,"ncl_default") + end if + if (COLORMAP.eq.1) then + gsn_define_colormap(wks_trends_djf,"BlueDarkRed18") + gsn_define_colormap(wks_trends_mam,"BlueDarkRed18") + gsn_define_colormap(wks_trends_jja,"BlueDarkRed18") + gsn_define_colormap(wks_trends_son,"BlueDarkRed18") + gsn_define_colormap(wks_trends_ann,"BlueDarkRed18") + gsn_define_colormap(wks_trends_mon,"BlueDarkRed18") + end if + + plot_trends_nh_djf = new(nsim,"graphic") + plot_trends_nh_mam = new(nsim,"graphic") + plot_trends_nh_jja = new(nsim,"graphic") + plot_trends_nh_son = new(nsim,"graphic") + plot_trends_nh_ann = new(nsim,"graphic") + plot_trends_nh_mon = new(nsim,"graphic") + plot_iceext_nh_djf = new(nsim,"graphic") + plot_iceext_nh_mam = new(nsim,"graphic") + plot_iceext_nh_jja = new(nsim,"graphic") + plot_iceext_nh_son = new(nsim,"graphic") + plot_iceext_nh_ann = new(nsim,"graphic") + plot_iceext_nh_mon = new(nsim,"graphic") + plot_iceext_nh_mon_anom = new(nsim,"graphic") + plot_iceext_nh_feb = new(nsim,"graphic") + plot_iceext_nh_mar = new(nsim,"graphic") + plot_iceext_nh_sep = new(nsim,"graphic") + plot_iceext_nh_climo = new(nsim,"graphic") + + plot_trends_sh_djf = new(nsim,"graphic") + plot_trends_sh_mam = new(nsim,"graphic") + plot_trends_sh_jja = new(nsim,"graphic") + plot_trends_sh_son = new(nsim,"graphic") + plot_trends_sh_ann = new(nsim,"graphic") + plot_trends_sh_mon = new(nsim,"graphic") + plot_iceext_sh_djf = new(nsim,"graphic") + plot_iceext_sh_mam = new(nsim,"graphic") + plot_iceext_sh_jja = new(nsim,"graphic") + plot_iceext_sh_son = new(nsim,"graphic") + plot_iceext_sh_ann = new(nsim,"graphic") + plot_iceext_sh_mon = new(nsim,"graphic") + plot_iceext_sh_mon_anom = new(nsim,"graphic") + plot_iceext_sh_feb = new(nsim,"graphic") + plot_iceext_sh_mar = new(nsim,"graphic") + plot_iceext_sh_sep = new(nsim,"graphic") + plot_iceext_sh_climo = new(nsim,"graphic") + + if (isfilepresent2("obs_aice_nh")) then + plot_iceext_nh_obs = new((/6,nsim/),"graphic") + end if + if (isfilepresent2("obs_aice_sh")) then + plot_iceext_sh_obs = new((/6,nsim/),"graphic") + end if + + time_mon2 = ispan(0,11,1) + time_mon2@units = "months since 0000-01-01 00:00:00" + time_mon2@long_name = "Time" + time_mon2@standard_name = "time" + time_mon2@calendar = "standard" + time_mon2!0 = "time_mon2" + time_mon2&time_mon2 = time_mon2 + + do ee = 0,nsim-1 + aice_nh_flag = 0 + aice_nh = data_read_in_ice(paths(ee),"aice_nh",syear(ee),eyear(ee)) ; read in data, orient lats/lons correctly, set time coordinate variable up + if (isatt(aice_nh,"is_all_missing")) then + delete(aice_nh) + aice_nh_flag = 1 + end if + + ph_s = "" ; flag that will be used to denote if pole hole area filled in via pole_hole_area attribute + if (aice_nh_flag.eq.0) then + if (isatt(aice_nh,"area")) then + area3d = aice_nh@area + area3d_c = conform(aice_nh,area3d,(/1,2/)) + aice_nh_sum = aice_nh +; aice_nh_sum = (/ (where((aice_nh/100.).ge.0.15,(aice_nh/100.),aice_nh@_FillValue))*area3d_c /) ; ice area calculation (all cells > 15% kept) + aice_nh_sum = (/ where(aice_nh.ge.15,1.,aice_nh@_FillValue) /) ; ice extent calculation (all cells greater than 15% treated as 100% covered) + aice_nh_sum = aice_nh_sum*area3d_c + wgts = aice_nh_sum(0,:,:) + wgts = 1. + aice_nh_sum_mon = aice_nh_sum(:,0,0) ; preallocate array to retain metadata + aice_nh_sum_mon = (/ wgt_areasum2(aice_nh_sum,wgts,0) /) +; do gg = 0,dimsizes(aice_nh&time)-1 +; aice_nh_sum_mon(gg) = (/ sum(aice_nh_sum(gg,:,:)) /) +; end do + + if (isatt(aice_nh,"pole_hole_area")) then ; special attribute set up to account for pole hole in grids. NSIDC assumes hole is 100% ice covered as of Jan 2016 + ph_area = todouble(aice_nh_sum_mon@pole_hole_area) ; format: start YYYYMM, end YYYYMM, area, start YYYYMM, end YYYYMM, area, etc. + dimZ_ph = dimsizes(ph_area)/3 ; only used for Northern Hemisphere + temp_area_arr = aice_nh_sum_mon + temp_area_arr&time = cd_calendar(aice_nh_sum_mon&time,1) +; printVarSummary(temp_area_arr) +; print(ph_area) + do gg = 0,dimZ_ph-1 + temp_area_arr({ph_area(gg*3):ph_area(gg*3+1)}) = temp_area_arr({ph_area(gg*3):ph_area(gg*3+1)}) + tofloat(ph_area(gg*3+2)) +; print(ph_area(gg*3)+" "+ph_area(gg*3+1)+" "+tofloat(ph_area(gg*3+2))) + end do + aice_nh_sum_mon = (/ temp_area_arr /) + delete([/ph_area,dimZ_ph,temp_area_arr/]) + ph_s = "*" + end if + + aice_nh_sum_mon = aice_nh_sum_mon/1.e12 + aice_nh_sum_mon@units = "10^12 m2" + aice_nh_sum_mon@long_name = "sea_ice_extent" + if (isatt(aice_nh_sum_mon,"coordinates")) then + delete(aice_nh_sum_mon@coordinates) + end if + delete([/aice_nh_sum,area3d,area3d_c,wgts/]) + + taice = new((/dimsizes(aice_nh_sum_mon),1,1/),typeof(aice_nh_sum_mon)) + taice!0 = "time" + taice!1 = "lat" + taice!2 = "lon" + taice(:,0,0) = (/ aice_nh_sum_mon /) ; convert to 3D array so we can use clmMonTLL and calcMonAnomTLL + if (OPT_CLIMO.eq."Full") then + taice = rmMonAnnCycTLL(taice) + else + check_custom_climo(names(ee),syear(ee),eyear(ee),CLIMO_SYEAR,CLIMO_EYEAR) + temp_arr = taice + temp_arr&time = cd_calendar(aice_nh_sum_mon&time,-1) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(temp_arr({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(temp_arr({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + delete(temp_arr) + taice = calcMonAnomTLL(taice,climo) + delete(climo) + end if + aice_nh_sum_mon_anom = aice_nh_sum_mon + aice_nh_sum_mon_anom = (/ taice(:,0,0) /) + aice_nh_sum_mon_anom@long_name = "sea_ice_extent_anomaly" + delete(taice) + + aice_nh_sum_feb = aice_nh_sum_mon(1::12) + delete(aice_nh_sum_feb&time) + aice_nh_sum_feb@long_name = "February sea_ice_extent" + aice_nh_sum_feb!0 = "TIME" + aice_nh_sum_feb&TIME = ispan(syear(ee),eyear(ee),1) + aice_nh_sum_feb&TIME@units = "YYYY" + aice_nh_sum_feb&TIME@long_name = "time" + aice_nh_sum_mar = aice_nh_sum_mon(2::12) + aice_nh_sum_mar@long_name = "March sea_ice_extent" + copy_VarCoords(aice_nh_sum_feb,aice_nh_sum_mar) + aice_nh_sum_sep = aice_nh_sum_mon(8::12) + aice_nh_sum_sep@long_name = "September sea_ice_extent" + copy_VarCoords(aice_nh_sum_feb,aice_nh_sum_sep) + + aice_nh_sum_climo = new(12,typeof(aice_nh_sum_mon)) + copy_VarAtts(aice_nh_sum_mon,aice_nh_sum_climo) + aice_nh_sum_climo@long_name = "climatological_sea_ice_extent" + aice_nh_sum_climo!0 = "time_mon2" + aice_nh_sum_climo&time_mon2 = time_mon2 + do gg = 0,11 + aice_nh_sum_climo(gg) = (/ avg(aice_nh_sum_mon(gg::12)) /) + end do + + temp = runave_Wrap(aice_nh_sum_mon,3,0) + aice_nh_sum_djf = temp(0::12) + aice_nh_sum_mam = temp(3::12) + aice_nh_sum_jja = temp(6::12) + aice_nh_sum_son = temp(9::12) + delete(temp) + temp = runave_Wrap(aice_nh_sum_mon,12,0) + aice_nh_sum_ann = temp(5::12) + delete(temp) + + delete(aice_nh_sum_djf&time) + aice_nh_sum_djf!0 = "TIME" + aice_nh_sum_djf&TIME = ispan(syear(ee),eyear(ee),1) + aice_nh_sum_djf&TIME@units = "YYYY" + aice_nh_sum_djf&TIME@long_name = "time" + copy_VarMeta(aice_nh_sum_djf,aice_nh_sum_mam) + copy_VarMeta(aice_nh_sum_djf,aice_nh_sum_jja) + copy_VarMeta(aice_nh_sum_djf,aice_nh_sum_son) + copy_VarMeta(aice_nh_sum_djf,aice_nh_sum_ann) + end if + + if (OPT_CLIMO.eq."Full") then + aice_nh = rmMonAnnCycTLL(aice_nh) + else + check_custom_climo(names(ee),syear(ee),eyear(ee),CLIMO_SYEAR,CLIMO_EYEAR) + temp_arr = aice_nh + delete(temp_arr&time) + temp_arr&time = cd_calendar(aice_nh&time,-1) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(temp_arr({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(temp_arr({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + delete(temp_arr) + aice_nh = calcMonAnomTLL(aice_nh,climo) + delete(climo) + end if + + dimZ = dimsizes(aice_nh) + dim_j = dimZ(1) + dim_i = dimZ(2) + tttt = dtrend_msg_n(ispan(0,dimsizes(aice_nh&time)-1,1),aice_nh,False,True,0) + aice_nh_trends_mon = aice_nh(0,:,:) + aice_nh_trends_mon = (/ onedtond(tttt@slope, (/dim_j,dim_i/) ) /) + aice_nh_trends_mon = aice_nh_trends_mon*dimsizes(aice_nh&time) + aice_nh_trends_mon@units = aice_nh@units+" "+nyr(ee)+"yr~S~-1~N~" + delete(tttt) + + aice_nh_seas = runave_n_Wrap(aice_nh,3,0,0) + aice_nh_seas(0,:,:) = (/ dim_avg_n(aice_nh(:1,:,:),0) /) + aice_nh_seas(dimsizes(aice_nh&time)-1,:,:) = (/ dim_avg_n(aice_nh(dimsizes(aice_nh&time)-2:,:,:),0) /) + aice_nh_ann = runave_n_Wrap(aice_nh,12,0,0) + delete(aice_nh) + + aice_nh_trends_seas = aice_nh_seas(:3,:,:) + aice_nh_trends_seas = aice_nh_trends_seas@_FillValue + aice_nh_trends_ann = aice_nh_trends_seas(0,:,:) + do ff = 0,4 + if (ff.le.3) then + tarr = aice_nh_seas(ff*3::12,:,:) + end if + if (ff.eq.4) then + tarr = aice_nh_ann(5::12,:,:) + end if + tttt = dtrend_msg_n(ispan(0,dimsizes(tarr&time)-1,1),tarr,False,True,0) + if (ff.le.3) then + aice_nh_trends_seas(ff,:,:) = (/ onedtond(tttt@slope, (/dim_j,dim_i/) ) /) + end if + if (ff.eq.4) then + aice_nh_trends_ann = (/ onedtond(tttt@slope, (/dim_j,dim_i/) ) /) + end if + delete([/tarr,tttt/]) + end do + aice_nh_trends_seas = aice_nh_trends_seas*nyr(ee) + aice_nh_trends_seas@units = aice_nh_seas@units+" "+nyr(ee)+"yr~S~-1~N~" + aice_nh_trends_ann = aice_nh_trends_ann*nyr(ee) + aice_nh_trends_ann@units = aice_nh_ann@units+" "+nyr(ee)+"yr~S~-1~N~" + delete([/aice_nh_seas,aice_nh_ann,dim_j,dim_i,dimZ/]) + end if + + aice_sh_flag = 0 + aice_sh = data_read_in_ice(paths_sh(ee),"aice_sh",syear_sh(ee),eyear_sh(ee)) ; read in data, orient lats/lons correctly, set time coordinate variable up + if (isatt(aice_sh,"is_all_missing")) then + delete(aice_sh) + aice_sh_flag = 1 + end if + if (aice_sh_flag.eq.0) then + if (isatt(aice_sh,"area")) then + area3d = aice_sh@area + area3d_c = conform(aice_sh,area3d,(/1,2/)) + aice_sh_sum = aice_sh +; aice_sh_sum = (/ (where((aice_sh/100.).ge.0.15,(aice_sh/100.),aice_sh@_FillValue))*area3d_c /) ; ice area calculation (all cells > 15% kept) + aice_sh_sum = (/ where(aice_sh.ge.15,1.,aice_sh@_FillValue) /) ; ice extent calculation (all cells greater than 15% treated as 100% covered) + aice_sh_sum = aice_sh_sum*area3d_c + wgts = aice_sh_sum(0,:,:) + wgts = 1. + aice_sh_sum_mon = aice_sh_sum(:,0,0) ; preallocate array to retain metadata + aice_sh_sum_mon = (/ wgt_areasum2(aice_sh_sum,wgts,0) /) +; do gg = 0,dimsizes(aice_sh&time)-1 +; aice_sh_sum_mon(gg) = (/ sum(aice_sh_sum(gg,:,:)) /) +; end do + aice_sh_sum_mon = aice_sh_sum_mon/1.e12 + aice_sh_sum_mon@units = "10^12 m2" + aice_sh_sum_mon@long_name = "sea_ice_extent" + if (isatt(aice_sh_sum_mon,"coordinates")) then + delete(aice_sh_sum_mon@coordinates) + end if + delete([/aice_sh_sum,area3d,area3d_c,wgts/]) + + taice = new((/dimsizes(aice_sh_sum_mon),1,1/),typeof(aice_sh_sum_mon)) + taice!0 = "time" + taice!1 = "lat" + taice!2 = "lon" + taice(:,0,0) = (/ aice_sh_sum_mon /) ; convert to 3D array so we can use clmMonTLL and calcMonAnomTLL + if (OPT_CLIMO.eq."Full") then + taice = rmMonAnnCycTLL(taice) + else + check_custom_climo(names_sh(ee),syear_sh(ee),eyear_sh(ee),CLIMO_SYEAR,CLIMO_EYEAR) + temp_arr = taice + temp_arr&time = cd_calendar(aice_sh_sum_mon&time,-1) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(temp_arr({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(temp_arr({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + delete(temp_arr) + taice = calcMonAnomTLL(taice,climo) + delete(climo) + end if + aice_sh_sum_mon_anom = aice_sh_sum_mon + aice_sh_sum_mon_anom = (/ taice(:,0,0) /) + aice_sh_sum_mon_anom@long_name = "sea_ice_extent_anomaly" + delete(taice) + + aice_sh_sum_feb = aice_sh_sum_mon(1::12) + delete(aice_sh_sum_feb&time) + aice_sh_sum_feb@long_name = "February sea_ice_extent" + aice_sh_sum_feb!0 = "TIME" + aice_sh_sum_feb&TIME = ispan(syear_sh(ee),eyear_sh(ee),1) + aice_sh_sum_feb&TIME@units = "YYYY" + aice_sh_sum_feb&TIME@long_name = "time" + aice_sh_sum_mar = aice_sh_sum_mon(2::12) + aice_sh_sum_mar@long_name = "March sea_ice_extent" + copy_VarCoords(aice_sh_sum_feb,aice_sh_sum_mar) + aice_sh_sum_sep = aice_sh_sum_mon(8::12) + aice_sh_sum_sep@long_name = "September sea_ice_extent" + copy_VarCoords(aice_sh_sum_feb,aice_sh_sum_sep) + + aice_sh_sum_climo = new(12,typeof(aice_sh_sum_mon)) + copy_VarAtts(aice_sh_sum_mon,aice_sh_sum_climo) + aice_sh_sum_climo@long_name = "climatological_sea_ice_extent" + aice_sh_sum_climo!0 = "time_mon2" + aice_sh_sum_climo&time_mon2 = time_mon2 + do gg = 0,11 + aice_sh_sum_climo(gg) = (/ avg(aice_sh_sum_mon(gg::12)) /) + end do + + temp = runave_Wrap(aice_sh_sum_mon,3,0) + aice_sh_sum_djf = temp(0::12) + aice_sh_sum_mam = temp(3::12) + aice_sh_sum_jja = temp(6::12) + aice_sh_sum_son = temp(9::12) + delete(temp) + temp = runave_Wrap(aice_sh_sum_mon,12,0) + aice_sh_sum_ann = temp(5::12) + delete(temp) + + delete(aice_sh_sum_djf&time) + aice_sh_sum_djf!0 = "TIME" + aice_sh_sum_djf&TIME = ispan(syear_sh(ee),eyear_sh(ee),1) + aice_sh_sum_djf&TIME@units = "YYYY" + aice_sh_sum_djf&TIME@long_name = "time" + copy_VarMeta(aice_sh_sum_djf,aice_sh_sum_mam) + copy_VarMeta(aice_sh_sum_djf,aice_sh_sum_jja) + copy_VarMeta(aice_sh_sum_djf,aice_sh_sum_son) + copy_VarMeta(aice_sh_sum_djf,aice_sh_sum_ann) + end if + + if (OPT_CLIMO.eq."Full") then + aice_sh = rmMonAnnCycTLL(aice_sh) + else + check_custom_climo(names_sh(ee),syear_sh(ee),eyear_sh(ee),CLIMO_SYEAR,CLIMO_EYEAR) + temp_arr = aice_sh + delete(temp_arr&time) + temp_arr&time = cd_calendar(aice_sh&time,-1) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(temp_arr({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(temp_arr({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + delete(temp_arr) + aice_sh = calcMonAnomTLL(aice_sh,climo) + delete(climo) + end if + + dimZ = dimsizes(aice_sh) + dim_j = dimZ(1) + dim_i = dimZ(2) + tttt = dtrend_msg_n(ispan(0,dimsizes(aice_sh&time)-1,1),aice_sh,False,True,0) + aice_sh_trends_mon = aice_sh(0,:,:) + aice_sh_trends_mon = (/ onedtond(tttt@slope, (/dim_j,dim_i/) ) /) + aice_sh_trends_mon = aice_sh_trends_mon*dimsizes(aice_sh&time) + aice_sh_trends_mon@units = aice_sh@units+" "+nyr_sh(ee)+"yr~S~-1~N~" + delete(tttt) + + aice_sh_seas = runave_n_Wrap(aice_sh,3,0,0) + aice_sh_seas(0,:,:) = (/ dim_avg_n(aice_sh(:1,:,:),0) /) + aice_sh_seas(dimsizes(aice_sh&time)-1,:,:) = (/ dim_avg_n(aice_sh(dimsizes(aice_sh&time)-2:,:,:),0) /) + aice_sh_ann = runave_n_Wrap(aice_sh,12,0,0) + delete(aice_sh) + + aice_sh_trends_seas = aice_sh_seas(:3,:,:) + aice_sh_trends_seas = aice_sh_trends_seas@_FillValue + aice_sh_trends_ann = aice_sh_trends_seas(0,:,:) + do ff = 0,4 + if (ff.le.3) then + tarr = aice_sh_seas(ff*3::12,:,:) + end if + if (ff.eq.4) then + tarr = aice_sh_ann(5::12,:,:) + end if + tttt = dtrend_msg_n(ispan(0,dimsizes(tarr&time)-1,1),tarr,False,True,0) + if (ff.le.3) then + aice_sh_trends_seas(ff,:,:) = (/ onedtond(tttt@slope, (/dim_j,dim_i/) ) /) + end if + if (ff.eq.4) then + aice_sh_trends_ann = (/ onedtond(tttt@slope, (/dim_j,dim_i/) ) /) + end if + delete([/tarr,tttt/]) + end do + aice_sh_trends_seas = aice_sh_trends_seas*nyr_sh(ee) + aice_sh_trends_seas@units = aice_sh_seas@units+" "+nyr_sh(ee)+"yr~S~-1~N~" + aice_sh_trends_ann = aice_sh_trends_ann*nyr_sh(ee) + aice_sh_trends_ann@units = aice_sh_ann@units+" "+nyr_sh(ee)+"yr~S~-1~N~" + delete([/aice_sh_seas,aice_sh_ann,dim_j,dim_i,dimZ/]) + end if + + if (OUTPUT_DATA.eq."True".and.aice_nh_flag.eq.0) then + modname = str_sub_str(names(ee)," ","_") + bc = (/"/","'","(",")"/) + do gg = 0,dimsizes(bc)-1 + modname = str_sub_str(modname,bc(gg),"_") + end do + fn = getenv("OUTDIR")+modname+".cvdp_data.aice.trends_timeseries.nh."+syear(ee)+"-"+eyear(ee)+".nc" + if (.not.isfilepresent2(fn)) then + z = addfile(fn,"c") + z@source = "NCAR Climate Analysis Section's Climate Variability Diagnostics Package v"+getenv("VERSION") + z@notes = "Data from "+names(ee)+" from "+syear(ee)+"-"+eyear(ee) + if (OPT_CLIMO.eq."Full") then + z@climatology = syear(ee)+"-"+eyear(ee)+" climatology removed prior to all calculations (other than means)" + else + if (CLIMO_SYEAR.lt.0) then + z@climatology = (eyear(ee)+CLIMO_SYEAR)+"-"+(eyear(ee)+CLIMO_EYEAR)+" climatology removed prior to all calculations (other than means)" + else + z@climatology = CLIMO_SYEAR+"-"+CLIMO_EYEAR+" climatology removed prior to all calculations (other than means)" + end if + end if + z@Conventions = "CF-1.6" + else + z = addfile(fn,"w") + end if + nh_trends_seas = aice_nh_trends_seas + if (isatt(nh_trends_seas,"lat2d")) then ; if there is a lat2d there will be a lon2d + LAT2D = nh_trends_seas@lat2d + LON2D = nh_trends_seas@lon2d + delete(nh_trends_seas@lat2d) + delete(nh_trends_seas@lon2d) + copy_VarCoords(nh_trends_seas(0,:,:),LAT2D) + copy_VarCoords(nh_trends_seas(0,:,:),LON2D) + z->lat2d_ice_nh = set_varAtts(LAT2D,"Northern Hemisphere ice grid 2-dimensional latitudes","","") + z->lon2d_ice_nh = set_varAtts(LON2D,"Northern Hemisphere ice grid 2-dimensional longitudes","","") + delete([/LAT2D,LON2D/]) + nh_trends_seas@coordinates ="lat2d_ice_nh lon2d_ice_nh" + end if + if (isatt(nh_trends_seas,"area")) then + delete(nh_trends_seas@area) + end if + nh_trends_ann = (/ aice_nh_trends_ann /) + copy_VarMeta(nh_trends_seas(0,:,:),nh_trends_ann) + nh_trends_mon = (/ aice_nh_trends_mon /) + copy_VarMeta(nh_trends_seas(0,:,:),nh_trends_mon) + z->sic_nh_trends_djf = set_varAtts(nh_trends_seas(0,:,:),"Northern Hemisphere sic trends (DJF)","","") + z->sic_nh_trends_mam = set_varAtts(nh_trends_seas(1,:,:),"Northern Hemisphere sic trends (MAM)","","") + z->sic_nh_trends_jja = set_varAtts(nh_trends_seas(2,:,:),"Northern Hemisphere sic trends (JJA)","","") + z->sic_nh_trends_son = set_varAtts(nh_trends_seas(3,:,:),"Northern Hemisphere sic trends (SON)","","") + z->sic_nh_trends_ann = set_varAtts(nh_trends_ann,"Northern Hemisphere sic trends (annual)","","") + z->sic_nh_trends_mon = set_varAtts(nh_trends_mon,"Northern Hemisphere sic trends (monthly)","","") + + if (isvar("aice_nh_sum_djf")) then + nh_sum_climo = (/ aice_nh_sum_climo /) + copy_VarAtts(aice_nh_sum_djf,nh_sum_climo) + nh_sum_climo!0 = "time_mon2" + nh_sum_climo&time_mon2 = time_mon2 + if (isatt(nh_sum_climo,"lat2d")) then + delete(nh_sum_climo@lat2d) + delete(nh_sum_climo@lon2d) + end if + if (isatt(nh_sum_climo,"area")) then + delete(nh_sum_climo@area) + end if + z->sic_nh_extent_climo = set_varAtts(nh_sum_climo,"Northern Hemisphere sic extent climatology","","") + nh_sum_djf = aice_nh_sum_djf + if (isatt(nh_sum_djf,"lat2d")) then + delete(nh_sum_djf@lat2d) + delete(nh_sum_djf@lon2d) + end if + if (isatt(nh_sum_djf,"area")) then + delete(nh_sum_djf@area) + end if + nh_sum_mam = (/ aice_nh_sum_mam /) + copy_VarMeta(nh_sum_djf,nh_sum_mam) + nh_sum_jja = (/ aice_nh_sum_jja /) + copy_VarMeta(nh_sum_djf,nh_sum_jja) + nh_sum_son = (/ aice_nh_sum_son /) + copy_VarMeta(nh_sum_djf,nh_sum_son) + nh_sum_ann = (/ aice_nh_sum_ann /) + copy_VarMeta(nh_sum_djf,nh_sum_ann) + nh_sum_feb = (/ aice_nh_sum_feb /) + copy_VarMeta(nh_sum_djf,nh_sum_feb) + nh_sum_mar = (/ aice_nh_sum_mar /) + copy_VarMeta(nh_sum_djf,nh_sum_mar) + nh_sum_sep = (/ aice_nh_sum_sep /) + copy_VarMeta(nh_sum_djf,nh_sum_sep) + nh_sum_mon = aice_nh_sum_mon + nh_sum_mon_anom = aice_nh_sum_mon_anom + if (isatt(nh_sum_mon,"lat2d")) then + delete(nh_sum_mon@lat2d) + delete(nh_sum_mon@lon2d) + delete(nh_sum_mon_anom@lat2d) + delete(nh_sum_mon_anom@lon2d) + end if + if (isatt(nh_sum_mon,"area")) then + delete(nh_sum_mon@area) + delete(nh_sum_mon_anom@area) + end if + z->sic_nh_extent_djf = set_varAtts(nh_sum_djf,"Northern Hemisphere sic extent timeseries (DJF)","","") + z->sic_nh_extent_mam = set_varAtts(nh_sum_mam,"Northern Hemisphere sic extent timeseries (MAM)","","") + z->sic_nh_extent_jja = set_varAtts(nh_sum_jja,"Northern Hemisphere sic extent timeseries (JJA)","","") + z->sic_nh_extent_son = set_varAtts(nh_sum_son,"Northern Hemisphere sic extent timeseries (SON)","","") + z->sic_nh_extent_ann = set_varAtts(nh_sum_ann,"Northern Hemisphere sic extent timeseries (annual)","","") + z->sic_nh_extent_mon = set_varAtts(nh_sum_mon,"Northern Hemisphere sic extent timeseries (monthly)","","") + z->sic_nh_extent_mon_anom = set_varAtts(nh_sum_mon_anom,"Northern Hemisphere sic extent anomaly timeseries (monthly)","","") + z->sic_nh_extent_feb = set_varAtts(nh_sum_feb,"Northern Hemisphere sic extent timeseries (February)","","") + z->sic_nh_extent_mar = set_varAtts(nh_sum_mar,"Northern Hemisphere sic extent timeseries (March)","","") + z->sic_nh_extent_sep = set_varAtts(nh_sum_sep,"Northern Hemisphere sic extent timeseries (September)","","") + delete([/nh_sum_djf,nh_sum_mam,nh_sum_jja,nh_sum_son,nh_sum_ann,nh_sum_feb,nh_sum_mar,nh_sum_sep,nh_sum_mon,nh_sum_mon_anom/]) + end if + delete([/nh_trends_seas,nh_trends_ann,nh_trends_mon/]) + delete(z) + end if + if (OUTPUT_DATA.eq."True".and.aice_sh_flag.eq.0) then + modname = str_sub_str(names_sh(ee)," ","_") + bc = (/"/","'","(",")"/) + do gg = 0,dimsizes(bc)-1 + modname = str_sub_str(modname,bc(gg),"_") + end do + fn = getenv("OUTDIR")+modname+".cvdp_data.aice.trends_timeseries.sh."+syear_sh(ee)+"-"+eyear_sh(ee)+".nc" + if (.not.isfilepresent2(fn)) then + z = addfile(fn,"c") + z@source = "NCAR Climate Analysis Section's Climate Variability Diagnostics Package v"+getenv("VERSION") + z@notes = "Data from "+names_sh(ee)+" from "+syear_sh(ee)+"-"+eyear_sh(ee) + if (OPT_CLIMO.eq."Full") then + z@climatology = syear_sh(ee)+"-"+eyear_sh(ee)+" climatology removed prior to all calculations (other than means)" + else + if (CLIMO_SYEAR.lt.0) then + z@climatology = (eyear(ee)+CLIMO_SYEAR)+"-"+(eyear(ee)+CLIMO_EYEAR)+" climatology removed prior to all calculations (other than means)" + else + z@climatology = CLIMO_SYEAR+"-"+CLIMO_EYEAR+" climatology removed prior to all calculations (other than means)" + end if + end if + z@Conventions = "CF-1.6" + else + z = addfile(fn,"w") + end if + sh_trends_seas = aice_sh_trends_seas + if (isatt(sh_trends_seas,"lat2d")) then ; if there is a lat2d there will be a lon2d + LAT2D = sh_trends_seas@lat2d + LON2D = sh_trends_seas@lon2d + delete(sh_trends_seas@lat2d) + delete(sh_trends_seas@lon2d) + copy_VarCoords(sh_trends_seas(0,:,:),LAT2D) + copy_VarCoords(sh_trends_seas(0,:,:),LON2D) + z->lat2d_ice_sh = set_varAtts(LAT2D,"Northern Hemisphere ice grid 2-dimensional latitudes","","") + z->lon2d_ice_sh = set_varAtts(LON2D,"Northern Hemisphere ice grid 2-dimensional longitudes","","") + delete([/LAT2D,LON2D/]) + sh_trends_seas@coordinates ="lat2d_ice_sh lon2d_ice_sh" + end if + if (isatt(sh_trends_seas,"area")) then + delete(sh_trends_seas@area) + end if + sh_trends_seas!1 = "j2" + sh_trends_seas!2 = "i2" + sh_trends_seas@long_name = sh_trends_seas@long_name+" trends" + sh_trends_ann = (/ aice_sh_trends_ann /) + copy_VarMeta(sh_trends_seas(0,:,:),sh_trends_ann) + sh_trends_mon = (/ aice_sh_trends_mon /) + copy_VarMeta(sh_trends_seas(0,:,:),sh_trends_mon) + z->sic_sh_trends_djf = set_varAtts(sh_trends_seas(0,:,:),"Southern Hemisphere sic trends (DJF)","","") + z->sic_sh_trends_mam = set_varAtts(sh_trends_seas(1,:,:),"Southern Hemisphere sic trends (MAM)","","") + z->sic_sh_trends_jja = set_varAtts(sh_trends_seas(2,:,:),"Southern Hemisphere sic trends (JJA)","","") + z->sic_sh_trends_son = set_varAtts(sh_trends_seas(3,:,:),"Southern Hemisphere sic trends (SON)","","") + z->sic_sh_trends_ann = set_varAtts(sh_trends_ann,"Southern Hemisphere sic trends (annual)","","") + z->sic_sh_trends_mon = set_varAtts(sh_trends_mon,"Southern Hemisphere sic trends (monthly)","","") + + if (isvar("aice_sh_sum_djf")) then + sh_sum_climo = (/ aice_sh_sum_climo /) + copy_VarAtts(aice_sh_sum_djf,sh_sum_climo) + sh_sum_climo!0 = "time_mon2" + sh_sum_climo&time_mon2 = time_mon2 + if (isatt(sh_sum_climo,"lat2d")) then + delete(sh_sum_climo@lat2d) + delete(sh_sum_climo@lon2d) + end if + if (isatt(sh_sum_climo,"area")) then + delete(sh_sum_climo@area) + end if + z->sic_sh_extent_climo = set_varAtts(sh_sum_climo,"Southern Hemisphere sic extent climatology","","") + sh_sum_djf = aice_sh_sum_djf + if (isatt(sh_sum_djf,"lat2d")) then + delete(sh_sum_djf@lat2d) + delete(sh_sum_djf@lon2d) + end if + if (isatt(sh_sum_djf,"area")) then + delete(sh_sum_djf@area) + end if + sh_sum_mam = (/ aice_sh_sum_mam /) + copy_VarMeta(sh_sum_djf,sh_sum_mam) + sh_sum_jja = (/ aice_sh_sum_jja /) + copy_VarMeta(sh_sum_djf,sh_sum_jja) + sh_sum_son = (/ aice_sh_sum_son /) + copy_VarMeta(sh_sum_djf,sh_sum_son) + sh_sum_ann = (/ aice_sh_sum_ann /) + copy_VarMeta(sh_sum_djf,sh_sum_ann) + sh_sum_feb = (/ aice_sh_sum_feb /) + copy_VarMeta(sh_sum_djf,sh_sum_feb) + sh_sum_mar = (/ aice_sh_sum_mar /) + copy_VarMeta(sh_sum_djf,sh_sum_mar) + sh_sum_sep = (/ aice_sh_sum_sep /) + copy_VarMeta(sh_sum_djf,sh_sum_sep) + sh_sum_mon = aice_sh_sum_mon + sh_sum_mon_anom = aice_sh_sum_mon_anom + if (isatt(sh_sum_mon,"lat2d")) then + delete(sh_sum_mon@lat2d) + delete(sh_sum_mon@lon2d) + delete(sh_sum_mon_anom@lat2d) + delete(sh_sum_mon_anom@lon2d) + end if + if (isatt(sh_sum_mon,"area")) then + delete(sh_sum_mon@area) + delete(sh_sum_mon_anom@area) + end if + z->sic_sh_extent_djf = set_varAtts(sh_sum_djf,"Southern Hemisphere sic extent timeseries (DJF)","","") + z->sic_sh_extent_mam = set_varAtts(sh_sum_mam,"Southern Hemisphere sic extent timeseries (MAM)","","") + z->sic_sh_extent_jja = set_varAtts(sh_sum_jja,"Southern Hemisphere sic extent timeseries (JJA)","","") + z->sic_sh_extent_son = set_varAtts(sh_sum_son,"Southern Hemisphere sic extent timeseries (SON)","","") + z->sic_sh_extent_ann = set_varAtts(sh_sum_ann,"Southern Hemisphere sic extent timeseries (annual)","","") + z->sic_sh_extent_mon = set_varAtts(sh_sum_mon,"Southern Hemisphere sic extent timeseries (monthly)","","") + z->sic_sh_extent_mon_anom = set_varAtts(sh_sum_mon_anom,"Southern Hemisphere sic extent anomaly timeseries (monthly)","","") + z->sic_sh_extent_feb = set_varAtts(sh_sum_feb,"Southern Hemisphere sic extent timeseries (February)","","") + z->sic_sh_extent_mar = set_varAtts(sh_sum_mar,"Southern Hemisphere sic extent timeseries (March)","","") + z->sic_sh_extent_sep = set_varAtts(sh_sum_sep,"Southern Hemisphere sic extent timeseries (September)","","") + delete([/sh_sum_djf,sh_sum_mam,sh_sum_jja,sh_sum_son,sh_sum_ann,sh_sum_feb,sh_sum_mar,sh_sum_sep,sh_sum_mon,sh_sum_mon_anom/]) + end if + delete([/sh_trends_seas,sh_trends_ann,sh_trends_mon/]) + delete(z) + end if + if (aice_nh_flag.eq.0) then + aice_nh_trends_seas = where(aice_nh_trends_seas.eq.0,aice_nh_trends_seas@_FillValue,aice_nh_trends_seas) + aice_nh_trends_ann = where(aice_nh_trends_ann.eq.0,aice_nh_trends_ann@_FillValue,aice_nh_trends_ann) + aice_nh_trends_mon = where(aice_nh_trends_mon.eq.0,aice_nh_trends_mon@_FillValue,aice_nh_trends_mon) + end if + if (aice_sh_flag.eq.0) then + aice_sh_trends_seas = where(aice_sh_trends_seas.eq.0,aice_sh_trends_seas@_FillValue,aice_sh_trends_seas) + aice_sh_trends_ann = where(aice_sh_trends_ann.eq.0,aice_sh_trends_ann@_FillValue,aice_sh_trends_ann) + aice_sh_trends_mon = where(aice_sh_trends_mon.eq.0,aice_sh_trends_mon@_FillValue,aice_sh_trends_mon) + end if +;========================================================================================== + res = True + res@gsnPolar = "NH" + res@mpMinLatF = 40. + res@mpCenterLonF = 0. + res@mpGeophysicalLineColor = "gray42" + if (wks_type.eq."png") then + res@mpGeophysicalLineThicknessF = 2. + else + res@mpGeophysicalLineThicknessF = 1. + end if + res@mpGridAndLimbOn = False + res@mpLandFillColor = "gray75" + res@mpFillDrawOrder = "PostDraw" + res@mpPerimDrawOrder = "PostDraw" + + res@mpOutlineOn = True + res@gsnDraw = False + res@gsnFrame = False + res@gsnAddCyclic = True + res@cnLineLabelsOn = False + res@cnFillOn = True + res@cnLinesOn = False + res@trGridType = "TriangularMesh" +; res@cnFillMode = "RasterFill" + res@lbLabelBarOn = False + + res@cnLevelSelectionMode = "ExplicitLevels" + res@cnLevels = (/-90,-70,-50,-45,-40,-35,-30,-25,-20,-15,-10,-5,0,5,10,15,20,25,30,35,40,45,50,70,90/) + cmap = gsn_retrieve_colormap(wks_trends_djf) + dimc = dimsizes(cmap) + cmap2 = cmap(::-1,:) + res@cnFillPalette = cmap2(:dimc(0)-3,:) + delete([/cmap,cmap2,dimc/]) + + res@gsnLeftStringOrthogonalPosF = -0.03 + res@gsnLeftStringParallelPosF = .005 + res@gsnRightStringOrthogonalPosF = -0.03 + res@gsnRightStringParallelPosF = 0.96 + res@gsnRightString = "" + res@gsnLeftString = "" + if (nsim.le.5) then + res@gsnLeftStringFontHeightF = 0.018 + res@gsnCenterStringFontHeightF = 0.022 + res@gsnRightStringFontHeightF = 0.018 + else + res@gsnLeftStringFontHeightF = 0.024 + res@gsnCenterStringFontHeightF = 0.028 + res@gsnRightStringFontHeightF = 0.024 + end if + + xyres = True + xyres@gsnDraw = False + xyres@gsnFrame = False + xyres@gsnYRefLine = 0.0 + xyres@gsnYRefLineColor = "gray18" + if (wks_type.eq."png") then + xyres@xyLineThicknessF = 4. + else + xyres@xyLineThicknessF = 2. + end if + if (isfilepresent2("obs_aice_nh").and.ee.eq.0) then + xyres@xyLineColor = "black" + else + xyres@xyLineColor = "royalblue" + end if + xyres@tiYAxisString = "" + if (nsim.le.5) then + xyres@tmXBLabelFontHeightF = 0.0125 + xyres@tmYLLabelFontHeightF = 0.0125 + xyres@gsnLeftStringFontHeightF = 0.0155 + xyres@gsnCenterStringFontHeightF = 0.0155 + xyres@gsnRightStringFontHeightF = 0.012 + else + xyres@tmXBLabelFontHeightF = 0.018 + xyres@tmYLLabelFontHeightF = 0.018 + xyres@gsnLeftStringFontHeightF = 0.024 + xyres@gsnCenterStringFontHeightF = 0.024 + xyres@gsnRightStringFontHeightF = 0.020 + end if + xyres@gsnLeftStringOrthogonalPosF = 0.025 + xyres@gsnRightStringOrthogonalPosF = xyres@gsnLeftStringOrthogonalPosF + xyres@vpXF = 0.05 + xyres@vpHeightF = 0.15 + if (SCALE_TIMESERIES.eq."True") then + xyres@vpWidthF = 0.9*((nyr(ee)*1.)/nyr_max) + else + xyres@vpWidthF = 0.9 + end if + xyres@xyMonoDashPattern = True + xyres@gsnLeftString = "" + xyres@gsnCenterString = "" + xyres@gsnRightString = "" + + xyres_c = xyres + xyres_c@trXMinF = 1 + xyres_c@trXMaxF = 12 + xyres_c@vpWidthF = 0.65 + xyres_c@vpHeightF = 0.45 + if (isfilepresent2("obs_aice_nh").and.ee.eq.0) then + xyres_c@xyLineColor = "black" + else + xyres_c@xyLineColors = (/"royalblue","gray60"/) + end if + xyres_c@tmXBMode = "Explicit" ; explicit labels + xyres_c@tmXBValues = ispan(1,12,1) + xyres_c@tmXBLabels = (/"Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"/) + xyres_c@tmXTOn = False + xyres_c@gsnLeftStringOrthogonalPosF = 0.025 + xyres_c@gsnCenterStringOrthogonalPosF = xyres_c@gsnLeftStringOrthogonalPosF + xyres_c@gsnRightStringOrthogonalPosF = xyres_c@gsnLeftStringOrthogonalPosF + + xyres@trXMinF = syear(ee)-.5 + xyres@trXMaxF = eyear(ee)+0.5 + + xyres2 = xyres + xyres2@xyLineColor = "gray60" + xyres2@xyCurveDrawOrder = "PreDraw" + + if (aice_nh_flag.eq.0) then + res@gsnLeftString = syear(ee)+"-"+eyear(ee) + res@gsnRightString = aice_nh_trends_seas@units + res@gsnCenterString = names(ee) + + plot_trends_nh_djf(ee) = gsn_csm_contour_map(wks_trends_djf,aice_nh_trends_seas(0,:,:),res) + plot_trends_nh_mam(ee) = gsn_csm_contour_map(wks_trends_mam,aice_nh_trends_seas(1,:,:),res) + plot_trends_nh_jja(ee) = gsn_csm_contour_map(wks_trends_jja,aice_nh_trends_seas(2,:,:),res) + plot_trends_nh_son(ee) = gsn_csm_contour_map(wks_trends_son,aice_nh_trends_seas(3,:,:),res) + plot_trends_nh_ann(ee) = gsn_csm_contour_map(wks_trends_ann,aice_nh_trends_ann,res) + plot_trends_nh_mon(ee) = gsn_csm_contour_map(wks_trends_mon,aice_nh_trends_mon,res) + delete([/aice_nh_trends_seas,aice_nh_trends_ann,aice_nh_trends_mon/]) + + if (isvar("aice_nh_sum_djf")) then + xyres@gsnLeftString = names(ee)+ph_s + if (isfilepresent2("obs_aice_nh").and.ee.eq.0) then + aice_nh_sum_djf_obs_min = min(aice_nh_sum_djf) + aice_nh_sum_djf_obs_max = max(aice_nh_sum_djf) + aice_nh_sum_mam_obs_min = min(aice_nh_sum_mam) + aice_nh_sum_mam_obs_max = max(aice_nh_sum_mam) + aice_nh_sum_jja_obs_min = min(aice_nh_sum_jja) + aice_nh_sum_jja_obs_max = max(aice_nh_sum_jja) + aice_nh_sum_son_obs_min = min(aice_nh_sum_son) + aice_nh_sum_son_obs_max = max(aice_nh_sum_son) + aice_nh_sum_ann_obs_min = min(aice_nh_sum_ann) + aice_nh_sum_ann_obs_max = max(aice_nh_sum_ann) + aice_nh_sum_mon_obs_min = min(aice_nh_sum_mon) + aice_nh_sum_mon_obs_max = max(aice_nh_sum_mon) + aice_nh_sum_mon_anom_obs_min = min(aice_nh_sum_mon_anom) + aice_nh_sum_mon_anom_obs_max = max(aice_nh_sum_mon_anom) + aice_nh_sum_feb_obs_min = min(aice_nh_sum_feb) + aice_nh_sum_feb_obs_max = max(aice_nh_sum_feb) + aice_nh_sum_mar_obs_min = min(aice_nh_sum_mar) + aice_nh_sum_mar_obs_max = max(aice_nh_sum_mar) + aice_nh_sum_sep_obs_min = min(aice_nh_sum_sep) + aice_nh_sum_sep_obs_max = max(aice_nh_sum_sep) + aice_nh_obs_djf = aice_nh_sum_djf + aice_nh_obs_mam = aice_nh_sum_mam + aice_nh_obs_jja = aice_nh_sum_jja + aice_nh_obs_son = aice_nh_sum_son + aice_nh_obs_ann = aice_nh_sum_ann + aice_nh_obs_mon = aice_nh_sum_mon + aice_nh_obs_mon_anom = aice_nh_sum_mon_anom + aice_nh_obs_feb = aice_nh_sum_feb + aice_nh_obs_mar = aice_nh_sum_mar + aice_nh_obs_sep = aice_nh_sum_sep + aice_nh_obs_climo = aice_nh_sum_climo + end if + + xyres_c@gsnLeftString = syear(ee)+"-"+eyear(ee) + xyres_c@gsnCenterString = names(ee)+ph_s ; ph_s is used to denote extent timeseries that have had pole hole areas added in + xyres_c@gsnRightString = "("+str_sub_str(str_sub_str(aice_nh_sum_climo@units,"m2","m~S~2~N~"),"10^12","10~S~12~N~")+")" + if (ee.eq.0) then + plot_iceext_nh_climo(ee) = gsn_csm_xy(wks_iceext_mon,ispan(1,12,1),aice_nh_sum_climo,xyres_c) + end if + if (ee.ge.1.) then + if (isvar("aice_nh_obs_climo")) then + tarr = new((/2,12/),float) + tarr(0,:) = (/ tofloat(aice_nh_sum_climo) /) + tarr(1,:) = (/ tofloat(aice_nh_obs_climo) /) + plot_iceext_nh_climo(ee) = gsn_csm_xy(wks_iceext_mon,ispan(1,12,1),tarr,xyres_c) + delete(tarr) + else + plot_iceext_nh_climo(ee) = gsn_csm_xy(wks_iceext_mon,ispan(1,12,1),aice_nh_sum_climo,xyres_c) + end if + end if + + if (ee.ge.1.and.isvar("aice_nh_sum_djf_obs_min")) then + xyres@trYMinF = min((/min(aice_nh_sum_djf),aice_nh_sum_djf_obs_min/))-1 + xyres@trYMaxF = max((/max(aice_nh_sum_djf),aice_nh_sum_djf_obs_max/))+1 + xyres2@trYMinF = xyres@trYMinF + xyres2@trYMaxF = xyres@trYMaxF + end if + tttt = dtrend_msg(ispan(0,dimsizes(aice_nh_sum_djf)-1,1),aice_nh_sum_djf,False,True) + xyres@gsnRightString = str_sub_str(str_sub_str(decimalPlaces(tttt@slope*nyr(ee),2,True)+aice_nh_sum_djf@units,"m2","m~S~2~N~"),"10^12"," 10~S~12~N~") + plot_iceext_nh_djf(ee) = gsn_csm_xy(wks_iceext_djf,ispan(syear(ee),eyear(ee),1),aice_nh_sum_djf,xyres) + if (ee.ge.1.and.isvar("aice_nh_obs_djf")) then + plot_iceext_nh_obs_djf = gsn_csm_xy(wks_iceext_djf,ispan(syear(0),eyear(0),1),aice_nh_obs_djf,xyres2) + overlay(plot_iceext_nh_djf(ee),plot_iceext_nh_obs_djf) + end if + + if (ee.ge.1.and.isvar("aice_nh_sum_mam_obs_min")) then + xyres@trYMinF = min((/min(aice_nh_sum_mam),aice_nh_sum_mam_obs_min/))-1 + xyres@trYMaxF = max((/max(aice_nh_sum_mam),aice_nh_sum_mam_obs_max/))+1 + xyres2@trYMinF = xyres@trYMinF + xyres2@trYMaxF = xyres@trYMaxF + end if + tttt = dtrend_msg(ispan(0,dimsizes(aice_nh_sum_mam)-1,1),aice_nh_sum_mam,False,True) + xyres@gsnRightString = str_sub_str(str_sub_str(decimalPlaces(tttt@slope*nyr(ee),2,True)+aice_nh_sum_mam@units,"m2","m~S~2~N~"),"10^12"," 10~S~12~N~") + plot_iceext_nh_mam(ee) = gsn_csm_xy(wks_iceext_mam,ispan(syear(ee),eyear(ee),1),aice_nh_sum_mam,xyres) + if (ee.ge.1.and.isvar("aice_nh_obs_mam")) then + plot_iceext_nh_obs_mam = gsn_csm_xy(wks_iceext_mam,ispan(syear(0),eyear(0),1),aice_nh_obs_mam,xyres2) + overlay(plot_iceext_nh_mam(ee),plot_iceext_nh_obs_mam) + end if + + if (ee.ge.1.and.isvar("aice_nh_sum_jja_obs_min")) then + xyres@trYMinF = min((/min(aice_nh_sum_jja),aice_nh_sum_jja_obs_min/))-1 + xyres@trYMaxF = max((/max(aice_nh_sum_jja),aice_nh_sum_jja_obs_max/))+1 + xyres2@trYMinF = xyres@trYMinF + xyres2@trYMaxF = xyres@trYMaxF + end if + tttt = dtrend_msg(ispan(0,dimsizes(aice_nh_sum_jja)-1,1),aice_nh_sum_jja,False,True) + xyres@gsnRightString = str_sub_str(str_sub_str(decimalPlaces(tttt@slope*nyr(ee),2,True)+aice_nh_sum_jja@units,"m2","m~S~2~N~"),"10^12"," 10~S~12~N~") + plot_iceext_nh_jja(ee) = gsn_csm_xy(wks_iceext_jja,ispan(syear(ee),eyear(ee),1),aice_nh_sum_jja,xyres) + if (ee.ge.1.and.isvar("aice_nh_obs_jja")) then + plot_iceext_nh_obs_jja = gsn_csm_xy(wks_iceext_jja,ispan(syear(0),eyear(0),1),aice_nh_obs_jja,xyres2) + overlay(plot_iceext_nh_jja(ee),plot_iceext_nh_obs_jja) + end if + + if (ee.ge.1.and.isvar("aice_nh_sum_son_obs_min")) then + xyres@trYMinF = min((/min(aice_nh_sum_son),aice_nh_sum_son_obs_min/))-1 + xyres@trYMaxF = max((/max(aice_nh_sum_son),aice_nh_sum_son_obs_max/))+1 + xyres2@trYMinF = xyres@trYMinF + xyres2@trYMaxF = xyres@trYMaxF + end if + tttt = dtrend_msg(ispan(0,dimsizes(aice_nh_sum_son)-1,1),aice_nh_sum_son,False,True) + xyres@gsnRightString = str_sub_str(str_sub_str(decimalPlaces(tttt@slope*nyr(ee),2,True)+aice_nh_sum_son@units,"m2","m~S~2~N~"),"10^12"," 10~S~12~N~") + plot_iceext_nh_son(ee) = gsn_csm_xy(wks_iceext_son,ispan(syear(ee),eyear(ee),1),aice_nh_sum_son,xyres) + if (ee.ge.1.and.isvar("aice_nh_obs_son")) then + plot_iceext_nh_obs_son = gsn_csm_xy(wks_iceext_son,ispan(syear(0),eyear(0),1),aice_nh_obs_son,xyres2) + overlay(plot_iceext_nh_son(ee),plot_iceext_nh_obs_son) + end if + + if (ee.ge.1.and.isvar("aice_nh_sum_ann_obs_min")) then + xyres@trYMinF = min((/min(aice_nh_sum_ann),aice_nh_sum_ann_obs_min/))-1 + xyres@trYMaxF = max((/max(aice_nh_sum_ann),aice_nh_sum_ann_obs_max/))+1 + xyres2@trYMinF = xyres@trYMinF + xyres2@trYMaxF = xyres@trYMaxF + end if + tttt = dtrend_msg(ispan(0,dimsizes(aice_nh_sum_ann)-1,1),aice_nh_sum_ann,False,True) + xyres@gsnRightString = str_sub_str(str_sub_str(decimalPlaces(tttt@slope*nyr(ee),2,True)+aice_nh_sum_ann@units,"m2","m~S~2~N~"),"10^12"," 10~S~12~N~") + plot_iceext_nh_ann(ee) = gsn_csm_xy(wks_iceext_ann,ispan(syear(ee),eyear(ee),1),aice_nh_sum_ann,xyres) + if (ee.ge.1.and.isvar("aice_nh_obs_ann")) then + plot_iceext_nh_obs_ann = gsn_csm_xy(wks_iceext_ann,ispan(syear(0),eyear(0),1),aice_nh_obs_ann,xyres2) + overlay(plot_iceext_nh_ann(ee),plot_iceext_nh_obs_ann) + end if + delete(tttt) + + if (ee.ge.1.and.isvar("aice_nh_sum_feb_obs_min")) then + xyres@trYMinF = min((/min(aice_nh_sum_feb),aice_nh_sum_feb_obs_min/))-1 + xyres@trYMaxF = max((/max(aice_nh_sum_feb),aice_nh_sum_feb_obs_max/))+1 + xyres2@trYMinF = xyres@trYMinF + xyres2@trYMaxF = xyres@trYMaxF + end if + tttt = dtrend_msg(ispan(0,dimsizes(aice_nh_sum_feb)-1,1),aice_nh_sum_feb,False,True) + xyres@gsnRightString = str_sub_str(str_sub_str(decimalPlaces(tttt@slope*nyr(ee),2,True)+aice_nh_sum_feb@units,"m2","m~S~2~N~"),"10^12"," 10~S~12~N~") + plot_iceext_nh_feb(ee) = gsn_csm_xy(wks_iceext_febmar,ispan(syear(ee),eyear(ee),1),aice_nh_sum_feb,xyres) + if (ee.ge.1.and.isvar("aice_nh_obs_feb")) then + plot_iceext_nh_obs_feb = gsn_csm_xy(wks_iceext_febmar,ispan(syear(0),eyear(0),1),aice_nh_obs_feb,xyres2) + overlay(plot_iceext_nh_feb(ee),plot_iceext_nh_obs_feb) + end if + delete(tttt) + + if (ee.ge.1.and.isvar("aice_nh_sum_mar_obs_min")) then + xyres@trYMinF = min((/min(aice_nh_sum_mar),aice_nh_sum_mar_obs_min/))-1 + xyres@trYMaxF = max((/max(aice_nh_sum_mar),aice_nh_sum_mar_obs_max/))+1 + xyres2@trYMinF = xyres@trYMinF + xyres2@trYMaxF = xyres@trYMaxF + end if + tttt = dtrend_msg(ispan(0,dimsizes(aice_nh_sum_mar)-1,1),aice_nh_sum_mar,False,True) + xyres@gsnRightString = str_sub_str(str_sub_str(decimalPlaces(tttt@slope*nyr(ee),2,True)+aice_nh_sum_mar@units,"m2","m~S~2~N~"),"10^12"," 10~S~12~N~") + plot_iceext_nh_mar(ee) = gsn_csm_xy(wks_iceext_febmar,ispan(syear(ee),eyear(ee),1),aice_nh_sum_mar,xyres) + if (ee.ge.1.and.isvar("aice_nh_obs_mar")) then + plot_iceext_nh_obs_mar = gsn_csm_xy(wks_iceext_febmar,ispan(syear(0),eyear(0),1),aice_nh_obs_mar,xyres2) + overlay(plot_iceext_nh_mar(ee),plot_iceext_nh_obs_mar) + end if + delete(tttt) + + if (ee.ge.1.and.isvar("aice_nh_sum_sep_obs_min")) then + xyres@trYMinF = min((/min(aice_nh_sum_sep),aice_nh_sum_sep_obs_min/))-1 + xyres@trYMaxF = max((/max(aice_nh_sum_sep),aice_nh_sum_sep_obs_max/))+1 + xyres2@trYMinF = xyres@trYMinF + xyres2@trYMaxF = xyres@trYMaxF + end if + tttt = dtrend_msg(ispan(0,dimsizes(aice_nh_sum_sep)-1,1),aice_nh_sum_sep,False,True) + xyres@gsnRightString = str_sub_str(str_sub_str(decimalPlaces(tttt@slope*nyr(ee),2,True)+aice_nh_sum_sep@units,"m2","m~S~2~N~"),"10^12"," 10~S~12~N~") + plot_iceext_nh_sep(ee) = gsn_csm_xy(wks_iceext_sep,ispan(syear(ee),eyear(ee),1),aice_nh_sum_sep,xyres) + if (ee.ge.1.and.isvar("aice_nh_obs_sep")) then + plot_iceext_nh_obs_sep = gsn_csm_xy(wks_iceext_sep,ispan(syear(0),eyear(0),1),aice_nh_obs_sep,xyres2) + overlay(plot_iceext_nh_sep(ee),plot_iceext_nh_obs_sep) + end if + delete(tttt) + + if (ee.ge.1.and.isvar("aice_nh_sum_mon_obs_min")) then + xyres@trYMinF = min((/min(aice_nh_sum_mon),aice_nh_sum_mon_obs_min/))-1 + xyres@trYMaxF = max((/max(aice_nh_sum_mon),aice_nh_sum_mon_obs_max/))+1 + xyres2@trYMinF = xyres@trYMinF + xyres2@trYMaxF = xyres@trYMaxF + end if + tttt = dtrend_msg(ispan(0,dimsizes(aice_nh_sum_mon)-1,1),aice_nh_sum_mon,False,True) + xyres@gsnRightString = str_sub_str(str_sub_str(decimalPlaces(tttt@slope*nyr(ee),2,True)+aice_nh_sum_mon@units,"m2","m~S~2~N~"),"10^12"," 10~S~12~N~") + plot_iceext_nh_mon(ee) = gsn_csm_xy(wks_iceext_mon,fspan(syear(ee),eyear(ee)+.91667,dimsizes(aice_nh_sum_mon)),aice_nh_sum_mon,xyres) + if (ee.ge.1.and.isvar("aice_nh_obs_mon")) then + plot_iceext_nh_obs_mon = gsn_csm_xy(wks_iceext_mon,fspan(syear(0),eyear(0)+.91667,dimsizes(aice_nh_obs_mon)),aice_nh_obs_mon,xyres2) + overlay(plot_iceext_nh_mon(ee),plot_iceext_nh_obs_mon) + end if + delete(tttt) + + if (ee.ge.1.and.isvar("aice_nh_sum_mon_anom_obs_min")) then + xyres@trYMinF = min((/min(aice_nh_sum_mon_anom),aice_nh_sum_mon_anom_obs_min/))-1 + xyres@trYMaxF = max((/max(aice_nh_sum_mon_anom),aice_nh_sum_mon_anom_obs_max/))+1 + xyres2@trYMinF = xyres@trYMinF + xyres2@trYMaxF = xyres@trYMaxF + end if + tttt = dtrend_msg(ispan(0,dimsizes(aice_nh_sum_mon_anom)-1,1),aice_nh_sum_mon_anom,False,True) + xyres@gsnRightString = str_sub_str(str_sub_str(decimalPlaces(tttt@slope*nyr(ee),2,True)+aice_nh_sum_mon_anom@units,"m2","m~S~2~N~"),"10^12"," 10~S~12~N~") + plot_iceext_nh_mon_anom(ee) = gsn_csm_xy(wks_iceext_mon,fspan(syear(ee),eyear(ee)+.91667,dimsizes(aice_nh_sum_mon_anom)),aice_nh_sum_mon_anom,xyres) + if (ee.ge.1.and.isvar("aice_nh_obs_mon_anom")) then + plot_iceext_nh_obs_mon_anom = gsn_csm_xy(wks_iceext_mon,fspan(syear(0),eyear(0)+.91667,dimsizes(aice_nh_obs_mon_anom)),aice_nh_obs_mon_anom,xyres2) + overlay(plot_iceext_nh_mon_anom(ee),plot_iceext_nh_obs_mon_anom) + end if + delete([/aice_nh_sum_djf,aice_nh_sum_mam,aice_nh_sum_jja,aice_nh_sum_son,aice_nh_sum_ann,aice_nh_sum_feb,aice_nh_sum_mar,aice_nh_sum_sep,aice_nh_sum_mon,aice_nh_sum_mon_anom,tttt/]) + end if + end if + + delete(res@mpMinLatF) + res@mpMaxLatF = -45. + res@gsnPolar = "SH" + if (SCALE_TIMESERIES.eq."True") then + xyres@vpWidthF = 0.9*((nyr_sh(ee)*1.)/nyr_max_sh) + else + xyres@vpWidthF = 0.9 + end if + xyres2@vpWidthF = xyres@vpWidthF + xyres@trXMinF = syear_sh(ee)-.5 + xyres@trXMaxF = eyear_sh(ee)+0.5 + if (aice_sh_flag.eq.0) then + res@gsnLeftString = syear_sh(ee)+"-"+eyear_sh(ee) + res@gsnRightString = aice_sh_trends_seas@units + res@gsnCenterString = names_sh(ee) + plot_trends_sh_djf(ee) = gsn_csm_contour_map(wks_trends_djf,aice_sh_trends_seas(0,:,:),res) + plot_trends_sh_mam(ee) = gsn_csm_contour_map(wks_trends_mam,aice_sh_trends_seas(1,:,:),res) + plot_trends_sh_jja(ee) = gsn_csm_contour_map(wks_trends_jja,aice_sh_trends_seas(2,:,:),res) + plot_trends_sh_son(ee) = gsn_csm_contour_map(wks_trends_son,aice_sh_trends_seas(3,:,:),res) + plot_trends_sh_ann(ee) = gsn_csm_contour_map(wks_trends_ann,aice_sh_trends_ann,res) + plot_trends_sh_mon(ee) = gsn_csm_contour_map(wks_trends_mon,aice_sh_trends_mon,res) + delete([/aice_sh_trends_seas,aice_sh_trends_ann,aice_sh_trends_mon/]) + + if (isvar("aice_sh_sum_djf")) then + xyres@gsnLeftString = names_sh(ee) + if (isfilepresent2("obs_aice_sh").and.ee.eq.0) then + aice_sh_sum_djf_obs_min = min(aice_sh_sum_djf) + aice_sh_sum_djf_obs_max = max(aice_sh_sum_djf) + aice_sh_sum_mam_obs_min = min(aice_sh_sum_mam) + aice_sh_sum_mam_obs_max = max(aice_sh_sum_mam) + aice_sh_sum_jja_obs_min = min(aice_sh_sum_jja) + aice_sh_sum_jja_obs_max = max(aice_sh_sum_jja) + aice_sh_sum_son_obs_min = min(aice_sh_sum_son) + aice_sh_sum_son_obs_max = max(aice_sh_sum_son) + aice_sh_sum_ann_obs_min = min(aice_sh_sum_ann) + aice_sh_sum_ann_obs_max = max(aice_sh_sum_ann) + aice_sh_sum_mon_obs_min = min(aice_sh_sum_mon) + aice_sh_sum_mon_obs_max = max(aice_sh_sum_mon) + aice_sh_sum_mon_anom_obs_min = min(aice_sh_sum_mon_anom) + aice_sh_sum_mon_anom_obs_max = max(aice_sh_sum_mon_anom) + aice_sh_sum_feb_obs_min = min(aice_sh_sum_feb) + aice_sh_sum_feb_obs_max = max(aice_sh_sum_feb) + aice_sh_sum_mar_obs_min = min(aice_sh_sum_mar) + aice_sh_sum_mar_obs_max = max(aice_sh_sum_mar) + aice_sh_sum_sep_obs_min = min(aice_sh_sum_sep) + aice_sh_sum_sep_obs_max = max(aice_sh_sum_sep) + aice_sh_obs_djf = aice_sh_sum_djf + aice_sh_obs_mam = aice_sh_sum_mam + aice_sh_obs_jja = aice_sh_sum_jja + aice_sh_obs_son = aice_sh_sum_son + aice_sh_obs_ann = aice_sh_sum_ann + aice_sh_obs_mon = aice_sh_sum_mon + aice_sh_obs_mon_anom = aice_sh_sum_mon_anom + aice_sh_obs_feb = aice_sh_sum_feb + aice_sh_obs_mar = aice_sh_sum_mar + aice_sh_obs_sep = aice_sh_sum_sep + aice_sh_obs_climo = aice_sh_sum_climo + end if + + xyres_c@gsnLeftString = syear_sh(ee)+"-"+eyear_sh(ee) + xyres_c@gsnCenterString = names_sh(ee) + xyres_c@gsnRightString = "("+str_sub_str(str_sub_str(aice_sh_sum_climo@units,"m2","m~S~2~N~"),"10^12","10~S~12~N~")+")" + if (ee.eq.0) then + plot_iceext_sh_climo(ee) = gsn_csm_xy(wks_iceext_mon,ispan(1,12,1),aice_sh_sum_climo,xyres_c) + end if + if (ee.ge.1.) then + if (isvar("aice_sh_obs_climo")) then + tarr = new((/2,12/),float) + tarr(0,:) = (/ tofloat(aice_sh_sum_climo) /) + tarr(1,:) = (/ tofloat(aice_sh_obs_climo) /) + plot_iceext_sh_climo(ee) = gsn_csm_xy(wks_iceext_mon,ispan(1,12,1),tarr,xyres_c) + delete(tarr) + else + plot_iceext_sh_climo(ee) = gsn_csm_xy(wks_iceext_mon,ispan(1,12,1),aice_sh_sum_climo,xyres_c) + end if + end if + + if (ee.ge.1.and.isvar("aice_sh_sum_djf_obs_min")) then + xyres@trYMinF = min((/min(aice_sh_sum_djf),aice_sh_sum_djf_obs_min/))-1 + xyres@trYMaxF = max((/max(aice_sh_sum_djf),aice_sh_sum_djf_obs_max/))+1 + xyres2@trYMinF = xyres@trYMinF + xyres2@trYMaxF = xyres@trYMaxF + end if + tttt = dtrend_msg(ispan(0,dimsizes(aice_sh_sum_djf)-1,1),aice_sh_sum_djf,False,True) + xyres@gsnRightString = str_sub_str(str_sub_str(decimalPlaces(tttt@slope*nyr_sh(ee),2,True)+aice_sh_sum_djf@units,"m2","m~S~2~N~"),"10^12"," 10~S~12~N~") + plot_iceext_sh_djf(ee) = gsn_csm_xy(wks_iceext_djf,ispan(syear_sh(ee),eyear_sh(ee),1),aice_sh_sum_djf,xyres) + if (ee.ge.1.and.isvar("aice_sh_obs_djf")) then + plot_iceext_sh_obs_djf = gsn_csm_xy(wks_iceext_djf,ispan(syear_sh(0),eyear_sh(0),1),aice_sh_obs_djf,xyres2) + overlay(plot_iceext_sh_djf(ee),plot_iceext_sh_obs_djf) + end if + + if (ee.ge.1.and.isvar("aice_sh_sum_mam_obs_min")) then + xyres@trYMinF = min((/min(aice_sh_sum_mam),aice_sh_sum_mam_obs_min/))-1 + xyres@trYMaxF = max((/max(aice_sh_sum_mam),aice_sh_sum_mam_obs_max/))+1 + xyres2@trYMinF = xyres@trYMinF + xyres2@trYMaxF = xyres@trYMaxF + end if + tttt = dtrend_msg(ispan(0,dimsizes(aice_sh_sum_mam)-1,1),aice_sh_sum_mam,False,True) + xyres@gsnRightString = str_sub_str(str_sub_str(decimalPlaces(tttt@slope*nyr_sh(ee),2,True)+aice_sh_sum_mam@units,"m2","m~S~2~N~"),"10^12"," 10~S~12~N~") + plot_iceext_sh_mam(ee) = gsn_csm_xy(wks_iceext_mam,ispan(syear_sh(ee),eyear_sh(ee),1),aice_sh_sum_mam,xyres) + if (ee.ge.1.and.isvar("aice_sh_obs_mam")) then + plot_iceext_sh_obs_mam = gsn_csm_xy(wks_iceext_mam,ispan(syear_sh(0),eyear_sh(0),1),aice_sh_obs_mam,xyres2) + overlay(plot_iceext_sh_mam(ee),plot_iceext_sh_obs_mam) + end if + + if (ee.ge.1.and.isvar("aice_sh_sum_jja_obs_min")) then + xyres@trYMinF = min((/min(aice_sh_sum_jja),aice_sh_sum_jja_obs_min/))-1 + xyres@trYMaxF = max((/max(aice_sh_sum_jja),aice_sh_sum_jja_obs_max/))+1 + xyres2@trYMinF = xyres@trYMinF + xyres2@trYMaxF = xyres@trYMaxF + end if + tttt = dtrend_msg(ispan(0,dimsizes(aice_sh_sum_jja)-1,1),aice_sh_sum_jja,False,True) + xyres@gsnRightString = str_sub_str(str_sub_str(decimalPlaces(tttt@slope*nyr_sh(ee),2,True)+aice_sh_sum_jja@units,"m2","m~S~2~N~"),"10^12"," 10~S~12~N~") + plot_iceext_sh_jja(ee) = gsn_csm_xy(wks_iceext_jja,ispan(syear_sh(ee),eyear_sh(ee),1),aice_sh_sum_jja,xyres) + if (ee.ge.1.and.isvar("aice_sh_obs_jja")) then + plot_iceext_sh_obs_jja = gsn_csm_xy(wks_iceext_jja,ispan(syear_sh(0),eyear_sh(0),1),aice_sh_obs_jja,xyres2) + overlay(plot_iceext_sh_jja(ee),plot_iceext_sh_obs_jja) + end if + + if (ee.ge.1.and.isvar("aice_sh_sum_son_obs_min")) then + xyres@trYMinF = min((/min(aice_sh_sum_son),aice_sh_sum_son_obs_min/))-1 + xyres@trYMaxF = max((/max(aice_sh_sum_son),aice_sh_sum_son_obs_max/))+1 + xyres2@trYMinF = xyres@trYMinF + xyres2@trYMaxF = xyres@trYMaxF + end if + tttt = dtrend_msg(ispan(0,dimsizes(aice_sh_sum_son)-1,1),aice_sh_sum_son,False,True) + xyres@gsnRightString = str_sub_str(str_sub_str(decimalPlaces(tttt@slope*nyr_sh(ee),2,True)+aice_sh_sum_son@units,"m2","m~S~2~N~"),"10^12"," 10~S~12~N~") + plot_iceext_sh_son(ee) = gsn_csm_xy(wks_iceext_son,ispan(syear_sh(ee),eyear_sh(ee),1),aice_sh_sum_son,xyres) + if (ee.ge.1.and.isvar("aice_sh_obs_son")) then + plot_iceext_sh_obs_son = gsn_csm_xy(wks_iceext_son,ispan(syear_sh(0),eyear_sh(0),1),aice_sh_obs_son,xyres2) + overlay(plot_iceext_sh_son(ee),plot_iceext_sh_obs_son) + end if + + if (ee.ge.1.and.isvar("aice_sh_sum_ann_obs_min")) then + xyres@trYMinF = min((/min(aice_sh_sum_ann),aice_sh_sum_ann_obs_min/))-1 + xyres@trYMaxF = max((/max(aice_sh_sum_ann),aice_sh_sum_ann_obs_max/))+1 + xyres2@trYMinF = xyres@trYMinF + xyres2@trYMaxF = xyres@trYMaxF + end if + tttt = dtrend_msg(ispan(0,dimsizes(aice_sh_sum_ann)-1,1),aice_sh_sum_ann,False,True) + xyres@gsnRightString = str_sub_str(str_sub_str(decimalPlaces(tttt@slope*nyr_sh(ee),2,True)+aice_sh_sum_ann@units,"m2","m~S~2~N~"),"10^12"," 10~S~12~N~") + plot_iceext_sh_ann(ee) = gsn_csm_xy(wks_iceext_ann,ispan(syear_sh(ee),eyear_sh(ee),1),aice_sh_sum_ann,xyres) + if (ee.ge.1.and.isvar("aice_sh_obs_ann")) then + plot_iceext_sh_obs_ann = gsn_csm_xy(wks_iceext_ann,ispan(syear_sh(0),eyear_sh(0),1),aice_sh_obs_ann,xyres2) + overlay(plot_iceext_sh_ann(ee),plot_iceext_sh_obs_ann) + end if + delete(tttt) + + if (ee.ge.1.and.isvar("aice_sh_sum_feb_obs_min")) then + xyres@trYMinF = min((/min(aice_sh_sum_feb),aice_sh_sum_feb_obs_min/))-1 + xyres@trYMaxF = max((/max(aice_sh_sum_feb),aice_sh_sum_feb_obs_max/))+1 + xyres2@trYMinF = xyres@trYMinF + xyres2@trYMaxF = xyres@trYMaxF + end if + tttt = dtrend_msg(ispan(0,dimsizes(aice_sh_sum_feb)-1,1),aice_sh_sum_feb,False,True) + xyres@gsnRightString = str_sub_str(str_sub_str(decimalPlaces(tttt@slope*nyr_sh(ee),2,True)+aice_sh_sum_feb@units,"m2","m~S~2~N~"),"10^12"," 10~S~12~N~") + plot_iceext_sh_feb(ee) = gsn_csm_xy(wks_iceext_febmar,ispan(syear_sh(ee),eyear_sh(ee),1),aice_sh_sum_feb,xyres) + if (ee.ge.1.and.isvar("aice_sh_obs_feb")) then + plot_iceext_sh_obs_feb = gsn_csm_xy(wks_iceext_febmar,ispan(syear_sh(0),eyear_sh(0),1),aice_sh_obs_feb,xyres2) + overlay(plot_iceext_sh_feb(ee),plot_iceext_sh_obs_feb) + end if + delete(tttt) + + if (ee.ge.1.and.isvar("aice_sh_sum_mar_obs_min")) then + xyres@trYMinF = min((/min(aice_sh_sum_mar),aice_sh_sum_mar_obs_min/))-1 + xyres@trYMaxF = max((/max(aice_sh_sum_mar),aice_sh_sum_mar_obs_max/))+1 + xyres2@trYMinF = xyres@trYMinF + xyres2@trYMaxF = xyres@trYMaxF + end if + tttt = dtrend_msg(ispan(0,dimsizes(aice_sh_sum_mar)-1,1),aice_sh_sum_mar,False,True) + xyres@gsnRightString = str_sub_str(str_sub_str(decimalPlaces(tttt@slope*nyr_sh(ee),2,True)+aice_sh_sum_mar@units,"m2","m~S~2~N~"),"10^12"," 10~S~12~N~") + plot_iceext_sh_mar(ee) = gsn_csm_xy(wks_iceext_febmar,ispan(syear_sh(ee),eyear_sh(ee),1),aice_sh_sum_mar,xyres) + if (ee.ge.1.and.isvar("aice_sh_obs_mar")) then + plot_iceext_sh_obs_mar = gsn_csm_xy(wks_iceext_febmar,ispan(syear_sh(0),eyear_sh(0),1),aice_sh_obs_mar,xyres2) + overlay(plot_iceext_sh_mar(ee),plot_iceext_sh_obs_mar) + end if + delete(tttt) + + if (ee.ge.1.and.isvar("aice_sh_sum_sep_obs_min")) then + xyres@trYMinF = min((/min(aice_sh_sum_sep),aice_sh_sum_sep_obs_min/))-1 + xyres@trYMaxF = max((/max(aice_sh_sum_sep),aice_sh_sum_sep_obs_max/))+1 + xyres2@trYMinF = xyres@trYMinF + xyres2@trYMaxF = xyres@trYMaxF + end if + tttt = dtrend_msg(ispan(0,dimsizes(aice_sh_sum_sep)-1,1),aice_sh_sum_sep,False,True) + xyres@gsnRightString = str_sub_str(str_sub_str(decimalPlaces(tttt@slope*nyr_sh(ee),2,True)+aice_sh_sum_sep@units,"m2","m~S~2~N~"),"10^12"," 10~S~12~N~") + plot_iceext_sh_sep(ee) = gsn_csm_xy(wks_iceext_sep,ispan(syear_sh(ee),eyear_sh(ee),1),aice_sh_sum_sep,xyres) + if (ee.ge.1.and.isvar("aice_sh_obs_sep")) then + plot_iceext_sh_obs_sep = gsn_csm_xy(wks_iceext_sep,ispan(syear_sh(0),eyear_sh(0),1),aice_sh_obs_sep,xyres2) + overlay(plot_iceext_sh_sep(ee),plot_iceext_sh_obs_sep) + end if + delete(tttt) + + if (ee.ge.1.and.isvar("aice_sh_sum_mon_obs_min")) then + xyres@trYMinF = min((/min(aice_sh_sum_mon),aice_sh_sum_mon_obs_min/))-1 + xyres@trYMaxF = max((/max(aice_sh_sum_mon),aice_sh_sum_mon_obs_max/))+1 + xyres2@trYMinF = xyres@trYMinF + xyres2@trYMaxF = xyres@trYMaxF + end if + tttt = dtrend_msg(ispan(0,dimsizes(aice_sh_sum_mon)-1,1),aice_sh_sum_mon,False,True) + xyres@gsnRightString = str_sub_str(str_sub_str(decimalPlaces(tttt@slope*nyr_sh(ee),2,True)+aice_sh_sum_mon@units,"m2","m~S~2~N~"),"10^12"," 10~S~12~N~") + plot_iceext_sh_mon(ee) = gsn_csm_xy(wks_iceext_mon,fspan(syear_sh(ee),eyear_sh(ee)+.91667,dimsizes(aice_sh_sum_mon)),aice_sh_sum_mon,xyres) + if (ee.ge.1.and.isvar("aice_sh_obs_mon")) then + plot_iceext_sh_obs_mon = gsn_csm_xy(wks_iceext_mon,fspan(syear_sh(0),eyear_sh(0)+.91667,dimsizes(aice_sh_obs_mon)),aice_sh_obs_mon,xyres2) + overlay(plot_iceext_sh_mon(ee),plot_iceext_sh_obs_mon) + end if + delete(tttt) + + if (ee.ge.1.and.isvar("aice_sh_sum_mon_anom_obs_min")) then + xyres@trYMinF = min((/min(aice_sh_sum_mon_anom),aice_sh_sum_mon_anom_obs_min/))-1 + xyres@trYMaxF = max((/max(aice_sh_sum_mon_anom),aice_sh_sum_mon_anom_obs_max/))+1 + xyres2@trYMinF = xyres@trYMinF + xyres2@trYMaxF = xyres@trYMaxF + end if + tttt = dtrend_msg(ispan(0,dimsizes(aice_sh_sum_mon_anom)-1,1),aice_sh_sum_mon_anom,False,True) + xyres@gsnRightString = str_sub_str(str_sub_str(decimalPlaces(tttt@slope*nyr_sh(ee),2,True)+aice_sh_sum_mon_anom@units,"m2","m~S~2~N~"),"10^12"," 10~S~12~N~") + plot_iceext_sh_mon_anom(ee) = gsn_csm_xy(wks_iceext_mon,fspan(syear_sh(ee),eyear_sh(ee)+.91667,dimsizes(aice_sh_sum_mon_anom)),aice_sh_sum_mon_anom,xyres) + if (ee.ge.1.and.isvar("aice_sh_obs_mon_anom")) then + plot_iceext_sh_obs_mon_anom = gsn_csm_xy(wks_iceext_mon,fspan(syear_sh(0),eyear_sh(0)+.91667,dimsizes(aice_sh_obs_mon_anom)),aice_sh_obs_mon_anom,xyres2) + overlay(plot_iceext_sh_mon_anom(ee),plot_iceext_sh_obs_mon_anom) + end if + + delete([/aice_sh_sum_djf,aice_sh_sum_mam,aice_sh_sum_jja,aice_sh_sum_son,aice_sh_sum_ann,aice_sh_sum_feb,aice_sh_sum_mar,aice_sh_sum_sep,aice_sh_sum_mon,aice_sh_sum_mon_anom,tttt/]) + end if + end if + delete([/res,xyres,xyres2,xyres_c,ph_s/]) +; print("Done with ee = "+ee) + end do + delete(time_mon2) + + panres = True + panres@gsnMaximize = True + panres@gsnPaperOrientation = "portrait" + panres@gsnPanelLabelBar = True + panres@gsnPanelYWhiteSpacePercent = 3.0 + panres@pmLabelBarHeightF = 0.05 + panres@pmLabelBarWidthF = 0.65 + panres@lbTitleOn = False + panres@lbBoxLineColor = "gray70" + panres@lbLabelFontHeightF = 0.013 + panres@lbLabelStride = 1 + if (nsim.le.4) then + if (nsim.eq.1) then + panres@txFontHeightF = 0.0185 + panres@gsnPanelBottom = 0.50 + panres@pmLabelBarWidthF = 0.5 + panres@pmLabelBarHeightF = 0.04 + panres@lbLabelFontHeightF = 0.01 + else + panres@txFontHeightF = 0.0125 + panres@gsnPanelBottom = 0.50 + panres@pmLabelBarWidthF = 0.5 + panres@pmLabelBarHeightF = 0.04 + panres@lbLabelFontHeightF = 0.01 + end if + else + panres@txFontHeightF = 0.016 + panres@gsnPanelBottom = 0.05 + end if + + ncol = floattointeger(sqrt(nsim)) + nrow = (nsim/ncol)+mod(nsim,ncol) + + ncol_sh = floattointeger(sqrt(nsim_sh)) + nrow_sh = (nsim_sh/ncol_sh)+mod(nsim_sh,ncol_sh) + + panres@txString = "SIC Trends (DJF)" + gsn_panel2(wks_trends_djf,plot_trends_nh_djf,(/nrow,ncol/),panres) + gsn_panel2(wks_trends_djf,plot_trends_sh_djf,(/nrow_sh,ncol_sh/),panres) + delete(wks_trends_djf) + + panres@txString = "SIC Trends (MAM)" + gsn_panel2(wks_trends_mam,plot_trends_nh_mam,(/nrow,ncol/),panres) + gsn_panel2(wks_trends_mam,plot_trends_sh_mam,(/nrow_sh,ncol_sh/),panres) + delete(wks_trends_mam) + + panres@txString = "SIC Trends (JJA)" + gsn_panel2(wks_trends_jja,plot_trends_nh_jja,(/nrow,ncol/),panres) + gsn_panel2(wks_trends_jja,plot_trends_sh_jja,(/nrow_sh,ncol_sh/),panres) + delete(wks_trends_jja) + + panres@txString = "SIC Trends (SON)" + gsn_panel2(wks_trends_son,plot_trends_nh_son,(/nrow,ncol/),panres) + gsn_panel2(wks_trends_son,plot_trends_sh_son,(/nrow_sh,ncol_sh/),panres) + delete(wks_trends_son) + + panres@txString = "SIC Trends (Annual)" + gsn_panel2(wks_trends_ann,plot_trends_nh_ann,(/nrow,ncol/),panres) + gsn_panel2(wks_trends_ann,plot_trends_sh_ann,(/nrow_sh,ncol_sh/),panres) + delete(wks_trends_ann) + + panres@txString = "SIC Trends (Monthly)" + gsn_panel2(wks_trends_mon,plot_trends_nh_mon,(/nrow,ncol/),panres) + gsn_panel2(wks_trends_mon,plot_trends_sh_mon,(/nrow_sh,ncol_sh/),panres) + delete(wks_trends_mon) + delete(panres) + + panres2 = True + panres2@gsnMaximize = True + panres2@gsnPaperOrientation = "portrait" + panres2@gsnPanelYWhiteSpacePercent = 3.0 + if (nsim.le.5) then + panres2@txFontHeightF = 0.024 + else + panres2@txFontHeightF = 0.016 + end if + panres3 = panres2 + if (SCALE_TIMESERIES.eq."True") then + tt = ind(nyr.eq.nyr_max) + panres2@gsnPanelScalePlotIndex = tt(0) + delete(tt) + + tt = ind(nyr_sh.eq.nyr_max_sh) + panres3@gsnPanelScalePlotIndex = tt(0) + delete(tt) + end if + + if (nsim.le.12) then + lp = (/nsim,1/) + lp_sh = (/nsim_sh,1/) + else + lp = (/nrow,ncol/) ;(/nsim/2+1,nsim/8+1/) + lp_sh = (/nrow_sh,ncol_sh/) + end if + + panres2@txString = "SIC NH Extent (DJF)" + gsn_panel2(wks_iceext_djf,plot_iceext_nh_djf,lp,panres2) + panres3@txString = "SIC SH Extent (DJF)" + gsn_panel2(wks_iceext_djf,plot_iceext_sh_djf,lp_sh,panres3) + delete(wks_iceext_djf) + + panres2@txString = "SIC NH Extent (MAM)" + gsn_panel2(wks_iceext_mam,plot_iceext_nh_mam,lp,panres2) + panres3@txString = "SIC SH Extent (MAM)" + gsn_panel2(wks_iceext_mam,plot_iceext_sh_mam,lp_sh,panres3) + delete(wks_iceext_mam) + + panres2@txString = "SIC NH Extent (JJA)" + gsn_panel2(wks_iceext_jja,plot_iceext_nh_jja,lp,panres2) + panres3@txString = "SIC SH Extent (JJA)" + gsn_panel2(wks_iceext_jja,plot_iceext_sh_jja,lp_sh,panres3) + delete(wks_iceext_jja) + + panres2@txString = "SIC NH Extent (SON)" + gsn_panel2(wks_iceext_son,plot_iceext_nh_son,lp,panres2) + panres3@txString = "SIC SH Extent (SON)" + gsn_panel2(wks_iceext_son,plot_iceext_sh_son,lp_sh,panres3) + delete(wks_iceext_son) + + panres2@txString = "SIC NH Extent (Annual)" + gsn_panel2(wks_iceext_ann,plot_iceext_nh_ann,lp,panres2) + panres3@txString = "SIC SH Extent (Annual)" + gsn_panel2(wks_iceext_ann,plot_iceext_sh_ann,lp_sh,panres3) + delete(wks_iceext_ann) + + panres2@txString = "SIC NH Extent (February)" + gsn_panel2(wks_iceext_febmar,plot_iceext_nh_mar,lp,panres2) + panres3@txString = "SIC SH Extent (Feburary)" + gsn_panel2(wks_iceext_febmar,plot_iceext_sh_mar,lp_sh,panres3) + + panres2@txString = "SIC NH Extent (March)" + gsn_panel2(wks_iceext_febmar,plot_iceext_nh_mar,lp,panres2) + panres3@txString = "SIC SH Extent (March)" + gsn_panel2(wks_iceext_febmar,plot_iceext_sh_mar,lp_sh,panres3) + delete(wks_iceext_febmar) + + panres2@txString = "SIC NH Extent (September)" + gsn_panel2(wks_iceext_sep,plot_iceext_nh_sep,lp,panres2) + panres3@txString = "SIC SH Extent (September)" + gsn_panel2(wks_iceext_sep,plot_iceext_sh_sep,lp_sh,panres3) + delete(wks_iceext_sep) + + panres2@txString = "SIC NH Extent (Monthly)" + gsn_panel2(wks_iceext_mon,plot_iceext_nh_mon,lp,panres2) + panres3@txString = "SIC SH Extent (Monthly)" + gsn_panel2(wks_iceext_mon,plot_iceext_sh_mon,lp_sh,panres3) + panres2@txString = "SIC NH Extent (Monthly Anomalies)" + gsn_panel2(wks_iceext_mon,plot_iceext_nh_mon_anom,lp,panres2) + panres3@txString = "SIC SH Extent (Monthly Anomalies)" + gsn_panel2(wks_iceext_mon,plot_iceext_sh_mon_anom,lp_sh,panres3) + + if (nsim.le.5) then + panres2@txFontHeightF = 0.017 + else + panres2@txFontHeightF = 0.014 + end if + ncol = floattointeger(sqrt(nsim)) + nrow = (nsim/ncol)+mod(nsim,ncol) + + ncol_sh = floattointeger(sqrt(nsim_sh)) + nrow_sh = (nsim_sh/ncol_sh)+mod(nsim_sh,ncol_sh) + + panres2@txString = "SIC NH Extent Climatology" + gsn_panel2(wks_iceext_mon,plot_iceext_nh_climo,(/nrow,ncol/),panres2) + panres2@txString = "SIC SH Extent Climatology" + gsn_panel2(wks_iceext_mon,plot_iceext_sh_climo,(/nrow_sh,ncol_sh/),panres2) + delete(wks_iceext_mon) +;-------------------------------------------------------------------------------- + OUTDIR = getenv("OUTDIR") + if (wks_type.eq."png") then + system("mv "+OUTDIR+"aice.trends.djf.000001.png "+OUTDIR+"aice.trends.nh.djf.png") + system("mv "+OUTDIR+"aice.trends.djf.000002.png "+OUTDIR+"aice.trends.sh.djf.png") + system("mv "+OUTDIR+"aice.trends.mam.000001.png "+OUTDIR+"aice.trends.nh.mam.png") + system("mv "+OUTDIR+"aice.trends.mam.000002.png "+OUTDIR+"aice.trends.sh.mam.png") + system("mv "+OUTDIR+"aice.trends.jja.000001.png "+OUTDIR+"aice.trends.nh.jja.png") + system("mv "+OUTDIR+"aice.trends.jja.000002.png "+OUTDIR+"aice.trends.sh.jja.png") + system("mv "+OUTDIR+"aice.trends.son.000001.png "+OUTDIR+"aice.trends.nh.son.png") + system("mv "+OUTDIR+"aice.trends.son.000002.png "+OUTDIR+"aice.trends.sh.son.png") + system("mv "+OUTDIR+"aice.trends.ann.000001.png "+OUTDIR+"aice.trends.nh.ann.png") + system("mv "+OUTDIR+"aice.trends.ann.000002.png "+OUTDIR+"aice.trends.sh.ann.png") + system("mv "+OUTDIR+"aice.trends.mon.000001.png "+OUTDIR+"aice.trends.nh.mon.png") + system("mv "+OUTDIR+"aice.trends.mon.000002.png "+OUTDIR+"aice.trends.sh.mon.png") + + if (isfilepresent2(OUTDIR+"aice.extent.djf.000001.png")) then + system("mv "+OUTDIR+"aice.extent.djf.000001.png "+OUTDIR+"aice.extent.nh.djf.png") + system("mv "+OUTDIR+"aice.extent.djf.000002.png "+OUTDIR+"aice.extent.sh.djf.png") + system("mv "+OUTDIR+"aice.extent.mam.000001.png "+OUTDIR+"aice.extent.nh.mam.png") + system("mv "+OUTDIR+"aice.extent.mam.000002.png "+OUTDIR+"aice.extent.sh.mam.png") + system("mv "+OUTDIR+"aice.extent.jja.000001.png "+OUTDIR+"aice.extent.nh.jja.png") + system("mv "+OUTDIR+"aice.extent.jja.000002.png "+OUTDIR+"aice.extent.sh.jja.png") + system("mv "+OUTDIR+"aice.extent.son.000001.png "+OUTDIR+"aice.extent.nh.son.png") + system("mv "+OUTDIR+"aice.extent.son.000002.png "+OUTDIR+"aice.extent.sh.son.png") + system("mv "+OUTDIR+"aice.extent.ann.000001.png "+OUTDIR+"aice.extent.nh.ann.png") + system("mv "+OUTDIR+"aice.extent.ann.000002.png "+OUTDIR+"aice.extent.sh.ann.png") + system("mv "+OUTDIR+"aice.extent.febmar.000001.png "+OUTDIR+"aice.extent.nh.feb.png") + system("mv "+OUTDIR+"aice.extent.febmar.000002.png "+OUTDIR+"aice.extent.sh.feb.png") + system("mv "+OUTDIR+"aice.extent.febmar.000003.png "+OUTDIR+"aice.extent.nh.mar.png") + system("mv "+OUTDIR+"aice.extent.febmar.000004.png "+OUTDIR+"aice.extent.sh.mar.png") + system("mv "+OUTDIR+"aice.extent.sep.000001.png "+OUTDIR+"aice.extent.nh.sep.png") + system("mv "+OUTDIR+"aice.extent.sep.000002.png "+OUTDIR+"aice.extent.sh.sep.png") + system("mv "+OUTDIR+"aice.extent.mon.000001.png "+OUTDIR+"aice.extent.nh.mon.png") + system("mv "+OUTDIR+"aice.extent.mon.000002.png "+OUTDIR+"aice.extent.sh.mon.png") + system("mv "+OUTDIR+"aice.extent.mon.000003.png "+OUTDIR+"aice.extent.anom.nh.mon.png") + system("mv "+OUTDIR+"aice.extent.mon.000004.png "+OUTDIR+"aice.extent.anom.sh.mon.png") + system("mv "+OUTDIR+"aice.extent.mon.000005.png "+OUTDIR+"aice.extent.nh.climo.png") + system("mv "+OUTDIR+"aice.extent.mon.000006.png "+OUTDIR+"aice.extent.sh.climo.png") + end if + else + system("psplit "+OUTDIR+"aice.trends.djf.ps "+OUTDIR+"aice_tr") + system("mv "+OUTDIR+"aice_tr0001.ps "+OUTDIR+"aice.trends.nh.djf.ps") + system("mv "+OUTDIR+"aice_tr0002.ps "+OUTDIR+"aice.trends.sh.djf.ps") + system("psplit "+OUTDIR+"aice.trends.mam.ps "+OUTDIR+"aice_tr") + system("mv "+OUTDIR+"aice_tr0001.ps "+OUTDIR+"aice.trends.nh.mam.ps") + system("mv "+OUTDIR+"aice_tr0002.ps "+OUTDIR+"aice.trends.sh.mam.ps") + system("psplit "+OUTDIR+"aice.trends.jja.ps "+OUTDIR+"aice_tr") + system("mv "+OUTDIR+"aice_tr0001.ps "+OUTDIR+"aice.trends.nh.jja.ps") + system("mv "+OUTDIR+"aice_tr0002.ps "+OUTDIR+"aice.trends.sh.jja.ps") + system("psplit "+OUTDIR+"aice.trends.son.ps "+OUTDIR+"aice_tr") + system("mv "+OUTDIR+"aice_tr0001.ps "+OUTDIR+"aice.trends.nh.son.ps") + system("mv "+OUTDIR+"aice_tr0002.ps "+OUTDIR+"aice.trends.sh.son.ps") + system("psplit "+OUTDIR+"aice.trends.ann.ps "+OUTDIR+"aice_tr") + system("mv "+OUTDIR+"aice_tr0001.ps "+OUTDIR+"aice.trends.nh.ann.ps") + system("mv "+OUTDIR+"aice_tr0002.ps "+OUTDIR+"aice.trends.sh.ann.ps") + system("psplit "+OUTDIR+"aice.trends.mon.ps "+OUTDIR+"aice_tr") + system("mv "+OUTDIR+"aice_tr0001.ps "+OUTDIR+"aice.trends.nh.mon.ps") + system("mv "+OUTDIR+"aice_tr0002.ps "+OUTDIR+"aice.trends.sh.mon.ps") + system("rm "+OUTDIR+"aice.trends.???.ps") + + if (isfilepresent2(OUTDIR+"aice.extent.djf.ps")) then + system("psplit "+OUTDIR+"aice.extent.djf.ps "+OUTDIR+"aice_tr") + system("mv "+OUTDIR+"aice_tr0001.ps "+OUTDIR+"aice.extent.nh.djf.ps") + system("mv "+OUTDIR+"aice_tr0002.ps "+OUTDIR+"aice.extent.sh.djf.ps") + system("psplit "+OUTDIR+"aice.extent.mam.ps "+OUTDIR+"aice_tr") + system("mv "+OUTDIR+"aice_tr0001.ps "+OUTDIR+"aice.extent.nh.mam.ps") + system("mv "+OUTDIR+"aice_tr0002.ps "+OUTDIR+"aice.extent.sh.mam.ps") + system("psplit "+OUTDIR+"aice.extent.jja.ps "+OUTDIR+"aice_tr") + system("mv "+OUTDIR+"aice_tr0001.ps "+OUTDIR+"aice.extent.nh.jja.ps") + system("mv "+OUTDIR+"aice_tr0002.ps "+OUTDIR+"aice.extent.sh.jja.ps") + system("psplit "+OUTDIR+"aice.extent.son.ps "+OUTDIR+"aice_tr") + system("mv "+OUTDIR+"aice_tr0001.ps "+OUTDIR+"aice.extent.nh.son.ps") + system("mv "+OUTDIR+"aice_tr0002.ps "+OUTDIR+"aice.extent.sh.son.ps") + system("psplit "+OUTDIR+"aice.extent.ann.ps "+OUTDIR+"aice_tr") + system("mv "+OUTDIR+"aice_tr0001.ps "+OUTDIR+"aice.extent.nh.ann.ps") + system("mv "+OUTDIR+"aice_tr0002.ps "+OUTDIR+"aice.extent.sh.ann.ps") + system("psplit "+OUTDIR+"aice.extent.febmar.ps "+OUTDIR+"aice_tr") + system("mv "+OUTDIR+"aice_tr0001.ps "+OUTDIR+"aice.extent.nh.feb.ps") + system("mv "+OUTDIR+"aice_tr0002.ps "+OUTDIR+"aice.extent.sh.feb.ps") + system("mv "+OUTDIR+"aice_tr0003.ps "+OUTDIR+"aice.extent.nh.mar.ps") + system("mv "+OUTDIR+"aice_tr0004.ps "+OUTDIR+"aice.extent.sh.mar.ps") + system("psplit "+OUTDIR+"aice.extent.sep.ps "+OUTDIR+"aice_tr") + system("mv "+OUTDIR+"aice_tr0001.ps "+OUTDIR+"aice.extent.nh.sep.ps") + system("mv "+OUTDIR+"aice_tr0002.ps "+OUTDIR+"aice.extent.sh.sep.ps") + system("psplit "+OUTDIR+"aice.extent.mon.ps "+OUTDIR+"aice_tr") + system("mv "+OUTDIR+"aice_tr0001.ps "+OUTDIR+"aice.extent.nh.mon.ps") + system("mv "+OUTDIR+"aice_tr0002.ps "+OUTDIR+"aice.extent.sh.mon.ps") + system("mv "+OUTDIR+"aice_tr0003.ps "+OUTDIR+"aice.extent.anom.nh.mon.ps") + system("mv "+OUTDIR+"aice_tr0004.ps "+OUTDIR+"aice.extent.anom.sh.mon.ps") + system("mv "+OUTDIR+"aice_tr0005.ps "+OUTDIR+"aice.extent.nh.climo.ps") + system("mv "+OUTDIR+"aice_tr0006.ps "+OUTDIR+"aice.extent.sh.climo.ps") + system("rm "+OUTDIR+"aice.extent.???.ps "+OUTDIR+"aice.extent.febmar.ps") + end if + end if + print("Finished: aice.trends_timeseries.ncl") +end diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/amo.ncl b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/amo.ncl new file mode 100644 index 0000000000..9242196219 --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/amo.ncl @@ -0,0 +1,799 @@ +; Calculates the AMO pattern, timeseries, and spectra. +; +; Variables used: ts +; +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/shea_util.ncl" +load "$CVDP_SCRIPTS/functions.ncl" + +begin + print("Starting: amo.ncl") + + SCALE_TIMESERIES = getenv("SCALE_TIMESERIES") + OUTPUT_DATA = getenv("OUTPUT_DATA") + PNG_SCALE = tofloat(getenv("PNG_SCALE")) + OPT_CLIMO = getenv("OPT_CLIMO") + CLIMO_SYEAR = toint(getenv("CLIMO_SYEAR")) + CLIMO_EYEAR = toint(getenv("CLIMO_EYEAR")) + OUTPUT_TYPE = getenv("OUTPUT_TYPE") + COLORMAP = getenv("COLORMAP") + + nsim = numAsciiRow("namelist_byvar/namelist_ts") + na = asciiread("namelist_byvar/namelist_ts",(/nsim/),"string") + names = new(nsim,"string") + paths = new(nsim,"string") + syear = new(nsim,"integer",-999) + eyear = new(nsim,"integer",-999) + delim = "|" + + do gg = 0,nsim-1 + names(gg) = str_strip(str_get_field(na(gg),1,delim)) + paths(gg) = str_strip(str_get_field(na(gg),2,delim)) + syear(gg) = stringtointeger(str_strip(str_get_field(na(gg),3,delim))) + eyear(gg) = stringtointeger(str_strip(str_get_field(na(gg),4,delim))) + end do + nyr = eyear-syear+1 + nyr_max = max(nyr) + + pi=4.*atan(1.0) + rad=(pi/180.) +;---------TAS Regressions coding------------------------------------------------- + nsim_tas = numAsciiRow("namelist_byvar/namelist_trefht") + na_tas = asciiread("namelist_byvar/namelist_trefht",(/nsim_tas/),"string") + names_tas = new(nsim_tas,"string") + paths_tas = new(nsim_tas,"string") + syear_tas = new(nsim_tas,"integer",-999) + eyear_tas = new(nsim_tas,"integer",-999) + + do gg = 0,nsim_tas-1 + names_tas(gg) = str_strip(str_get_field(na_tas(gg),1,delim)) + paths_tas(gg) = str_strip(str_get_field(na_tas(gg),2,delim)) + syear_tas(gg) = stringtointeger(str_strip(str_get_field(na_tas(gg),3,delim))) + eyear_tas(gg) = stringtointeger(str_strip(str_get_field(na_tas(gg),4,delim))) + end do + delete(na_tas) + nyr_tas = eyear_tas-syear_tas+1 +;---------PR Regressions coding------------------------------------------------- + nsim_pr = numAsciiRow("namelist_byvar/namelist_prect") + na_pr = asciiread("namelist_byvar/namelist_prect",(/nsim_pr/),"string") + names_pr = new(nsim_pr,"string") + paths_pr = new(nsim_pr,"string") + syear_pr = new(nsim_pr,"integer",-999) + eyear_pr = new(nsim_pr,"integer",-999) + + do gg = 0,nsim_pr-1 + names_pr(gg) = str_strip(str_get_field(na_pr(gg),1,delim)) + paths_pr(gg) = str_strip(str_get_field(na_pr(gg),2,delim)) + syear_pr(gg) = stringtointeger(str_strip(str_get_field(na_pr(gg),3,delim))) + eyear_pr(gg) = stringtointeger(str_strip(str_get_field(na_pr(gg),4,delim))) + end do + delete(na_pr) + nyr_pr = eyear_pr-syear_pr+1 +;------------------------------------------------------------------------------------------------- + wks_type = OUTPUT_TYPE + if (wks_type.eq."png") then + wks_type@wkWidth = 1500*PNG_SCALE + wks_type@wkHeight = 1500*PNG_SCALE + end if + wks = gsn_open_wks(wks_type,getenv("OUTDIR")+"amo") + wks4 = gsn_open_wks(wks_type,getenv("OUTDIR")+"amo.prreg") + wks2 = gsn_open_wks(wks_type,getenv("OUTDIR")+"amo.powspec") + wks3 = gsn_open_wks(wks_type,getenv("OUTDIR")+"amo.timeseries") + + if (COLORMAP.eq."0") then + gsn_define_colormap(wks,"ncl_default") + gsn_define_colormap(wks2,"cb_9step") + gsn_define_colormap(wks3,"ncl_default") + gsn_define_colormap(wks4,"MPL_BrBG") + end if + if (COLORMAP.eq."1") then + gsn_define_colormap(wks,"BlueDarkRed18") + gsn_define_colormap(wks2,"cb_9step") + gsn_define_colormap(wks3,"ncl_default") + gsn_define_colormap(wks4,"BrownBlue12") + end if + + map = new(nsim,"graphic") + map_sst = new(nsim,"graphic") + map_tasreg = new(nsim,"graphic") + map_prreg = new(nsim,"graphic") + mapLP = new(nsim,"graphic") + mapLP_sst = new(nsim,"graphic") + mapLP_tasreg = new(nsim,"graphic") + mapLP_prreg = new(nsim,"graphic") + pspec = new(nsim,"graphic") + xyplot = new(nsim,"graphic") + xyplot2 = new(nsim,"graphic") + if (isfilepresent2("obs_ts")) then + pspec_obs = new(nsim,"graphic") + end if + + tasreg_frame = 1 ; *reg_frame = flag to create regressions .ps/.png files. Created/used instead of *reg_plot_flag + ; so that if {tas,pr} regressions are not created for the last simulation listed that .ps/png files are created + prreg_frame = 1 + + do ee = 0,nsim-1 + sstT = data_read_in(paths(ee),"TS",syear(ee),eyear(ee)) ; read in data, orient lats/lons correctly, set time coordinate variable up + if (isatt(sstT,"is_all_missing").or.nyr(ee).lt.15) then + delete(sstT) + continue + end if + sstT = where(sstT.le.-1.8,-1.8,sstT) ; set all values below -1.8 to -1.8 + d = addfile("$NCARG_ROOT/lib/ncarg/data/cdf/landsea.nc","r") ; mask out land (this is redundant for data that is already masked) + basemap = d->LSMASK + lsm = landsea_mask(basemap,sstT&lat,sstT&lon) + sstT = mask(sstT,conform(sstT,lsm,(/1,2/)).ge.1,False) + delete([/lsm,basemap/]) + delete(d) + + sstT = lonFlip(sstT) ; orient longitudes from -180:180 (set to 0:360 in data_read_in function) + if (OPT_CLIMO.eq."Full") then + sstT = rmMonAnnCycTLL(sstT) + else + check_custom_climo(names(ee),syear(ee),eyear(ee),CLIMO_SYEAR,CLIMO_EYEAR) + temp_arr = sstT + delete(temp_arr&time) + temp_arr&time = cd_calendar(sstT&time,-1) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(temp_arr({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(temp_arr({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + delete(temp_arr) + sstT = calcMonAnomTLL(sstT,climo) + delete(climo) + end if + coswgt=cos(rad*sstT&lat) + coswgt!0 = "lat" + coswgt&lat= sstT&lat + natl_aa = wgt_areaave(sstT(:,{0:60},{-80:0}),coswgt({0.:60.}),1.0,0) + global_aa = wgt_areaave(sstT(:,{-60:60},:),coswgt({-60.:60.}),1.0,0) + + finarr = new((/2,dimsizes(natl_aa)/),typeof(natl_aa),-999.) ; timeseries plot + finarr!1 = "time" + finarr&time = sstT&time + finarr(0,:) = (/ natl_aa - global_aa /) + finarr(1,:) = (/ runave(finarr(0,:),121,0) /) ; originally 61 + delete(natl_aa) + delete(global_aa) + + finreg = sstT(0,:,:) + finreg = (/ regCoef(finarr(0,:),sstT(lat|:,lon|:,time|:)) /) + finregLP = sstT(0,:,:) + finregLP = (/ regCoef(finarr(1,:),runave(sstT(lat|:,lon|:,time|:),121,0)) /) + delete([/sstT/]) + + do gg = 0,2 + finreg = (/ smth9(finreg,0.5,0.25,True) /) + end do + delete([/coswgt/]) + finreg@syear = syear(ee) + finreg@eyear = eyear(ee) + finregLP@syear = syear(ee) + finregLP@eyear = eyear(ee) + +;---------TAS Regressions coding------------------------------------------------- + if (any(ismissing((/syear(ee),syear_tas(ee),eyear(ee),eyear_tas(ee)/)))) then + tasreg_plot_flag = 1 + else + if (syear(ee).eq.syear_tas(ee)) then ; check that the start and end years match for ts, tas, and psl + if (eyear(ee).eq.eyear_tas(ee)) then + tasreg_plot_flag = 0 + else + tasreg_plot_flag = 1 + end if + else + tasreg_plot_flag = 1 + end if + end if + + if (tasreg_plot_flag.eq.0) then + tas = data_read_in(paths_tas(ee),"TREFHT",syear_tas(ee),eyear_tas(ee)) + if (isatt(tas,"is_all_missing")) then + tasreg_plot_flag = 1 + delete(tas) + end if + + if (tasreg_plot_flag.eq.0) then ; only continue if both TAS/SST fields are present + d = addfile("$NCARG_ROOT/lib/ncarg/data/cdf/landsea.nc","r") + basemap = d->LSMASK + lsm = landsea_mask(basemap,tas&lat,tas&lon) + tas = mask(tas,conform(tas,lsm,(/1,2/)).eq.0,False) + delete(lsm) + + if (OPT_CLIMO.eq."Full") then + tas = rmMonAnnCycTLL(tas) + else + check_custom_climo(names_tas(ee),syear_tas(ee),eyear_tas(ee),CLIMO_SYEAR,CLIMO_EYEAR) + temp_arr = tas + delete(temp_arr&time) + temp_arr&time = cd_calendar(tas&time,1) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(temp_arr({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(temp_arr({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + delete(temp_arr) + tas = calcMonAnomTLL(tas,climo) + delete(climo) + end if + finreg_tas = tas(0,:,:) + finreg_tas = (/ regCoef(finarr(0,:),tas(lat|:,lon|:,time|:)) /) + finregLP_tas = tas(0,:,:) + finregLP_tas = (/ regCoef(finarr(1,:),runave(tas(lat|:,lon|:,time|:),121,0)) /) + delete(tas) + end if + end if +;---------PR Regressions coding------------------------------------------------- + if (any(ismissing((/syear(ee),syear_pr(ee),eyear(ee),eyear_pr(ee)/)))) then + prreg_plot_flag = 1 + else + if (syear(ee).eq.syear_pr(ee)) then ; check that the start and end years match for pr and psl + if (eyear(ee).eq.eyear_pr(ee)) then + prreg_plot_flag = 0 + else + prreg_plot_flag = 1 + end if + else + prreg_plot_flag = 1 + end if + end if + + if (prreg_plot_flag.eq.0) then + pr = data_read_in(paths_pr(ee),"PRECT",syear_pr(ee),eyear_pr(ee)) + if (isatt(pr,"is_all_missing")) then + prreg_plot_flag = 1 + delete(pr) + end if + + if (prreg_plot_flag.eq.0) then ; only continue if both SST/PR fields are present + if (OPT_CLIMO.eq."Full") then + pr = rmMonAnnCycTLL(pr) + else + check_custom_climo(names_pr(ee),syear_pr(ee),eyear_pr(ee),CLIMO_SYEAR,CLIMO_EYEAR) + temp_arr = pr + delete(temp_arr&time) + temp_arr&time = cd_calendar(pr&time,1) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(temp_arr({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(temp_arr({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + delete(temp_arr) + pr = calcMonAnomTLL(pr,climo) + delete(climo) + end if + finreg_pr = pr(0,:,:) + finreg_pr = (/ regCoef(finarr(0,:),pr(lat|:,lon|:,time|:)) /) + finregLP_pr = pr(0,:,:) + finregLP_pr = (/ regCoef(finarr(1,:),runave(pr(lat|:,lon|:,time|:),121,0)) /) + delete(pr) + end if + end if +;--------------------------------------------------------------------------------------------- + if (tasreg_frame.eq.1.and.tasreg_plot_flag.eq.0) then ; tasreg_frame = flag to create regressions .ps/.png files + tasreg_frame = 0 + end if + if (prreg_frame.eq.1.and.prreg_plot_flag.eq.0) then ; prreg_frame = flag to create regressions .ps/.png files + prreg_frame = 0 + end if +;--------------------------------------------------------------------------------------------- + if (OUTPUT_DATA.eq."True") then + modname = str_sub_str(names(ee)," ","_") + bc = (/"/","'","(",")"/) + do gg = 0,dimsizes(bc)-1 + modname = str_sub_str(modname,bc(gg),"_") + end do + fn = getenv("OUTDIR")+modname+".cvdp_data.amo."+syear(ee)+"-"+eyear(ee)+".nc" + if (.not.isfilepresent2(fn)) then + z = addfile(fn,"c") + z@source = "NCAR Climate Analysis Section's Climate Variability Diagnostics Package v"+getenv("VERSION") + z@notes = "Data from "+names(ee)+" from "+syear(ee)+"-"+eyear(ee) + if (OPT_CLIMO.eq."Full") then + z@climatology = syear(ee)+"-"+eyear(ee)+" climatology removed prior to all calculations (other than means)" + else + if (CLIMO_SYEAR.lt.0) then + z@climatology = (eyear(ee)+CLIMO_SYEAR)+"-"+(eyear(ee)+CLIMO_EYEAR)+" climatology removed prior to all calculations (other than means)" + else + z@climatology = CLIMO_SYEAR+"-"+CLIMO_EYEAR+" climatology removed prior to all calculations (other than means)" + end if + end if + z@Conventions = "CF-1.6" + else + z = addfile(fn,"w") + end if + z->amo_pattern_mon = set_varAtts(lonFlip(finreg),"AMO regression pattern (monthly)","","") ; flip longitudes back to running from 0:360. + z->amo_pattern_lowpass_mon = set_varAtts(lonFlip(finregLP),"AMO low-pass regression pattern (monthly)","","") ; flip longitudes back to running from 0:360. + amo_ts = finarr(0,:) + amo_ts@units = "C" + z->amo_timeseries_mon = set_varAtts(amo_ts,"AMO timeseries (monthly)","","") + delete([/modname,fn,amo_ts/]) + + if (tasreg_plot_flag.eq.0) then + modname = str_sub_str(names_tas(ee)," ","_") + bc = (/"/","'","(",")"/) + do gg = 0,dimsizes(bc)-1 + modname = str_sub_str(modname,bc(gg),"_") + end do + fn = getenv("OUTDIR")+modname+".cvdp_data.amo.tas."+syear_tas(ee)+"-"+eyear_tas(ee)+".nc" + if (.not.isfilepresent2(fn)) then + z_tas = addfile(fn,"c") + z_tas@source = "NCAR Climate Analysis Section's Climate Variability Diagnostics Package v"+getenv("VERSION") + z_tas@notes = "Data from "+names_tas(ee)+" from "+syear_tas(ee)+"-"+eyear_tas(ee) + if (OPT_CLIMO.eq."Full") then + z_tas@climatology = syear_tas(ee)+"-"+eyear_tas(ee)+" climatology removed prior to all calculations (other than means)" + else + if (CLIMO_SYEAR.lt.0) then + z_tas@climatology = (eyear(ee)+CLIMO_SYEAR)+"-"+(eyear(ee)+CLIMO_EYEAR)+" climatology removed prior to all calculations (other than means)" + else + z_tas@climatology = CLIMO_SYEAR+"-"+CLIMO_EYEAR+" climatology removed prior to all calculations (other than means)" + end if + end if + z_tas@Conventions = "CF-1.6" + else + z_tas = addfile(fn,"w") + end if + z_tas->amo_tas_regression_mon = set_varAtts(finreg_tas,"tas regression onto AMO timeseries (monthly)","","") + z_tas->amo_tas_regression_lowpass_mon = set_varAtts(finregLP_tas,"tas low-pass regression onto AMO timeseries (monthly)","","") + delete([/modname,fn,z_tas/]) + end if + if (prreg_plot_flag.eq.0) then + modname = str_sub_str(names_pr(ee)," ","_") + bc = (/"/","'","(",")"/) + do gg = 0,dimsizes(bc)-1 + modname = str_sub_str(modname,bc(gg),"_") + end do + fn = getenv("OUTDIR")+modname+".cvdp_data.amo.pr."+syear_pr(ee)+"-"+eyear_pr(ee)+".nc" + if (.not.isfilepresent2(fn)) then + z_pr = addfile(fn,"c") + z_pr@source = "NCAR Climate Analysis Section's Climate Variability Diagnostics Package v"+getenv("VERSION") + z_pr@notes = "Data from "+names_pr(ee)+" from "+syear_pr(ee)+"-"+eyear_pr(ee) + if (OPT_CLIMO.eq."Full") then + z_pr@climatology = syear_pr(ee)+"-"+eyear_pr(ee)+" climatology removed prior to all calculations (other than means)" + else + if (CLIMO_SYEAR.lt.0) then + z_pr@climatology = (eyear(ee)+CLIMO_SYEAR)+"-"+(eyear(ee)+CLIMO_EYEAR)+" climatology removed prior to all calculations (other than means)" + else + z_pr@climatology = CLIMO_SYEAR+"-"+CLIMO_EYEAR+" climatology removed prior to all calculations (other than means)" + end if + end if + z_pr@Conventions = "CF-1.6" + else + z_pr = addfile(fn,"w") + end if + z_pr->amo_pr_regression_mon = set_varAtts(finreg_pr,"pr regression onto AMO timeseries (monthly)","","") + z_pr->amo_pr_regression_lowpass_mon = set_varAtts(finregLP_pr,"pr low-pass regression onto AMO timeseries (monthly)","","") + delete([/modname,fn,z_pr/]) + end if + end if +;------------------------------------------------------------------------ + iopt = 0 + jave = (7*nyr(ee))/100 + val1 = .95 + val2 = .99 + if (jave.eq.0) then + jave = 1 + end if + pct = 0.1 + spectra_mvf = False ; missing value flag + if (any(ismissing(finarr(0,:)))) then ; check for missing data + print("Missing data detected for "+names(ee)+", not creating AMO spectra") + spectra_mvf = True + if (isfilepresent2("obs_ts").and.ee.eq.0) then + spectra_mvf_obs = True ; missing value flag + end if + else + if (isfilepresent2("obs_ts").and.ee.eq.0) then + spectra_mvf_obs = False ; missing value flag + end if + sdof = specx_anal(dim_standardize(finarr(0,:),0),iopt,jave,pct) + splt1 = specx_ci(sdof,val1,val2) + if (OUTPUT_DATA.eq."True") then + splt1!0 = "ncurves" + splt1&ncurves = ispan(0,3,1) + splt1&ncurves@long_name = "power spectra curves" + splt1&ncurves@units = "1" + splt1!1 = "frequency" + splt1&frequency = sdof@frq + splt1&frequency@units = "1" + splt1@units_info = "df refers to frequency interval; data are standardized so there are no physical units" + splt1@units = "1/df" + splt1@info = "(0,:)=spectrum,(1,:)=Markov red noise spectrum, (2,:)="+val1+"% confidence bound for Markhov, (3,:)="+val2+"% confidence bound for Markhov" + z->amo_spectra = set_varAtts(splt1,"AMO (monthly) power spectra, Markov spectrum and confidence curves","","") + end if + if (isfilepresent2("obs_ts").and.ee.eq.0) then + sdof_obs = sdof + end if + delete([/iopt,jave,pct/]) + end if + if (isvar("z")) then + delete(z) + end if +;========================================================================= + res = True + res@mpProjection = "WinkelTripel" + res@mpGeophysicalLineColor = "gray42" + res@mpPerimOn = False + res@mpGridLatSpacingF = 90 ; change latitude line spacing + res@mpGridLonSpacingF = 180. ; change longitude line spacing + res@mpGridLineColor = "transparent" ; trick ncl into drawing perimeter + res@mpGridAndLimbOn = True ; turn on lat/lon lines + res@mpFillOn = False + res@mpCenterLonF = 0. + res@mpOutlineOn = True + res@gsnDraw = False + res@gsnFrame = False + res@vpYF = 0.95 + res@vpHeightF = 0.3 + res@vpXF = 0.2 + res@vpWidthF = 0.6 + +; res@cnFillMode = "RasterFill" + res@cnLevelSelectionMode = "ExplicitLevels" + if (COLORMAP.eq."0") then + res@cnLevels = fspan(-4.,4.,21) + end if + if (COLORMAP.eq."1") then + res@cnLevels = fspan(-3.2,3.2,17) + end if + + res@cnLineLabelsOn = False + res@cnFillOn = True + res@cnLinesOn = False + res@lbLabelBarOn = False + + res@gsnRightStringOrthogonalPosF = -0.05 + res@gsnRightStringParallelPosF = 0.96 + res@gsnLeftStringOrthogonalPosF = -0.05 + res@gsnLeftStringParallelPosF = 0.005 + res@gsnLeftStringFontHeightF = 0.014 + res@gsnCenterStringFontHeightF = 0.018 + res@gsnRightStringFontHeightF = 0.014 + res@gsnLeftString = syear(ee)+"-"+eyear(ee) + res@gsnCenterString = names(ee) + res@gsnRightString = "" + + res4 = res ; res4 = pr regression resources + delete(res4@cnLevels) + if (COLORMAP.eq.0) then + res4@cnLevels = (/-5,-4,-3,-2,-1,-.75,-.5,-.25,-.1,0,.1,.25,.5,.75,1,2,3,4,5/) + else + res4@cnLevels = (/-3,-2,-1,-.5,-.1,0,.1,.5,1,2,3/) + end if + + + res2 = True ; res2 = tas regression resources + res2@gsnDraw = False + res2@gsnFrame = False + res2@cnLevelSelectionMode = "ExplicitLevels" + res2@cnLevels = res@cnLevels + + res2@cnLineLabelsOn = False + res2@cnFillOn = True + res2@cnLinesOn = False + res2@cnFillMode = "AreaFill" + res2@lbLabelBarOn = False + res2@cnInfoLabelOn = False + res2@gsnRightString = "" + res2@gsnLeftString = "" + res2@gsnCenterString = "" + res2@gsnAddCyclic = True + + + if (isfilepresent2("obs_ts").and.ee.eq.0) then ; for pattern correlation table + patcor = new((/nsim,dimsizes(finreg&lat),dimsizes(finreg&lon)/),typeof(finreg)) + patcor!1 = "lat" + patcor&lat = finreg&lat + patcor!2 = "lon" + patcor&lon = finreg&lon + patcor(ee,:,:) = (/ finreg /) + end if + if (isfilepresent2("obs_ts").and.ee.ge.1.and.isvar("patcor")) then + patcor(ee,:,:) = (/ totype(linint2(finreg&lon,finreg&lat,finreg,True,patcor&lon,patcor&lat,0),typeof(patcor)) /) + end if + + map(ee) = gsn_csm_contour_map(wks,finreg,res) + mapLP(ee) = gsn_csm_contour_map(wks,finregLP,res) + + if (tasreg_plot_flag.eq.0) then + if (names(ee).eq.names_tas(ee)) then + res@gsnCenterString = names(ee) + else + res@gsnCenterString = names(ee)+" / "+names_tas(ee) + end if + map_sst(ee) = gsn_csm_contour_map(wks,finreg,res) + map_tasreg(ee) = gsn_csm_contour(wks,finreg_tas,res2) + overlay(map_sst(ee),map_tasreg(ee)) + delete(finreg_tas) + + mapLP_sst(ee) = gsn_csm_contour_map(wks,finregLP,res) + mapLP_tasreg(ee) = gsn_csm_contour(wks,finregLP_tas,res2) + overlay(mapLP_sst(ee),mapLP_tasreg(ee)) + delete(finregLP_tas) + end if + delete([/finreg,finregLP/]) + + if (prreg_plot_flag.eq.0) then + res4@gsnCenterString = names_pr(ee) + map_prreg(ee) = gsn_csm_contour_map(wks4,finreg_pr,res4) + delete(finreg_pr) + mapLP_prreg(ee) = gsn_csm_contour_map(wks4,finregLP_pr,res4) + delete(finregLP_pr) + end if + + pres = True + pres@vpXF = 0.07 + pres@trYMinF = 0. + pres@trXMinF = 0.0 +; pres@trYMaxF = 82. + pres@trXMaxF = 0.0832 + pres@tiYAxisString = "Power" ; yaxis + pres@xyLineColor = "black" + pres@gsnFrame = False + pres@gsnDraw = False + + pres@tmXBLabelDeltaF = -.8 + pres@tmXTLabelDeltaF = -.8 + pres@pmLegendDisplayMode = "Never" + pres@xyLineThicknesses = (/3.5,2.,1.,1./) + pres@xyDashPatterns = (/0,0,0,0/) + pres@xyLineColors = (/"foreground","red","blue","green"/) + pres@xyLabelMode = "custom" + pres@xyLineLabelFontColors = pres@xyLineColors + pres@xyExplicitLabels = (/"","",val1*100+"%",val2*100+"%"/) + pres@tmXTOn = True + pres@tmYROn = False + pres@tmXTLabelsOn = True + pres@tmXUseBottom = False + pres@tmXTMode = "Explicit" + pres@tmXBMode = "Explicit" + pres@tmXTValues = (/".00167",".00833",".01667",".02778",".0416",".0556",".0832"/) + pres@tmXTLabels = (/"50","10","5","3","2","1.5","1"/) + pres@tmXBValues = (/".0",".01",".02",".03",".042",".056",".083"/) + pres@tmXBLabels = pres@tmXBValues + pres@tmXTLabelFontHeightF = 0.018 + pres@tmXBLabelFontHeightF = 0.018 + pres@tmYLLabelFontHeightF = 0.018 + pres@tiYAxisString = "Variance" ;"Power (~S~o~N~C~S~2~N~ / cycles mo~S~-1~N~)" ; yaxis + pres@tiXAxisString = "Frequency (cycles mo~S~-1~N~)" + pres@tiMainString = "" + pres@txFontHeightF = 0.015 + pres@xyLineLabelFontHeightF = 0.022 + pres@tiXAxisFontHeightF = 0.025 + pres@tiYAxisFontHeightF = 0.025 + pres@tiMainFontHeightF = 0.03 + pres@gsnRightStringOrthogonalPosF = -0.115 + + pres@tiMainOn = False + pres@gsnCenterString = "Period (years)" + pres@gsnCenterStringFontHeightF = pres@tiYAxisFontHeightF + pres@gsnRightStringFontHeightF = pres@tiYAxisFontHeightF - 0.005 + pres@gsnRightString = syear(ee)+"-"+eyear(ee)+" " + pres@gsnLeftString = "" + if (wks_type.eq."png") then + pres@xyLineThicknessF = 3.5 + res@mpGeophysicalLineThicknessF = 2. + else + pres@xyLineThicknessF = 1.5 + res@mpGeophysicalLineThicknessF = 1. + end if + pres@gsnCenterString = names(ee) + if (spectra_mvf.eq.False) then + pspec(ee) = gsn_csm_xy(wks2,sdof@frq,splt1,pres) + if (isfilepresent2("obs_ts").and.ee.ge.1.and.spectra_mvf_obs.eq.False) then + pres@xyLineColors = (/"gray70","black","black","black"/) + pres@xyCurveDrawOrder = "PreDraw" + pres@gsnCenterString = "" + pres@gsnRightString = "" + pspec_obs(ee) = gsn_csm_xy(wks2,sdof_obs@frq,sdof_obs@spcx,pres) + overlay(pspec(ee),pspec_obs(ee)) + delete(pres@xyCurveDrawOrder) + end if + delete([/sdof,splt1/]) + end if + + xyres = True + xyres@gsnDraw = False + xyres@gsnFrame = False + xyres@gsnRightString = "" + xyres@gsnLeftString = "" + xyres@gsnYRefLine = 0.0 + xyres@gsnYRefLineColor = "gray42" + xyres@gsnXYBarChart = False + xyres@gsnAboveYRefLineColor = 185 + xyres@gsnBelowYRefLineColor = 35 + xyres@xyLineThicknessF = 0.1 + xyres@xyLineColor = "gray70" +; xyres@xyLineColors = (/ xyres@gsnAboveYRefLineColor, xyres@gsnBelowYRefLineColor/) + xyres@tiYAxisString = "" + if (nsim.le.5) then + xyres@tmXBLabelFontHeightF = 0.0125 + xyres@tmYLLabelFontHeightF = 0.0125 + xyres@gsnStringFontHeightF = 0.017 + else + xyres@tmXBLabelFontHeightF = 0.018 + xyres@tmYLLabelFontHeightF = 0.018 + xyres@gsnStringFontHeightF = 0.024 + end if + xyres@gsnCenterStringOrthogonalPosF = 0.025 + + xyres@vpXF = 0.05 + xyres@vpHeightF = 0.15 + if (SCALE_TIMESERIES.eq."True") then + xyres@vpWidthF = 0.9*((nyr(ee)*1.)/nyr_max) + else + xyres@vpWidthF = 0.9 + end if + xyres@gsnCenterString = "" + xyres@trXMinF = syear(ee)-.5 + xyres@trXMaxF = eyear(ee)+1.5 + + xyres2 = xyres + delete(xyres2@gsnXYBarChart) + delete(xyres2@gsnAboveYRefLineColor) + delete(xyres2@gsnBelowYRefLineColor) +; delete(xyres2@xyLineColors) + xyres2@xyLineColor = "black" + if (wks_type.eq."png") then + xyres2@xyLineThicknessF = 3.5 + else + xyres2@xyLineThicknessF = 2.5 + end if + + xyres@gsnCenterString = names(ee) + xyplot(ee) = gsn_csm_xy(wks3,fspan(syear(ee),eyear(ee)+.91667,dimsizes(finarr&time)),finarr(0,:),xyres) ; use standardized timeseries + xyplot2(ee) = gsn_csm_xy(wks3,fspan(syear(ee),eyear(ee)+.91667,dimsizes(finarr&time)),finarr(1,:),xyres2) + overlay(xyplot(ee),xyplot2(ee)) + delete([/val1,val2,finarr,res,pres,xyres,xyres2/]) + end do + + if (isvar("patcor")) then ; for pattern correlation table + clat = cos(0.01745329*patcor&lat) +; finpaco = "AMO (Monthly) " ; Must be 18 characters long +; finrms = finpaco + finpr = "AMO (Monthly) " ; Must be 18 characters long + line3 = " " ; Must be 18 characters long + line4 = line3 + header = (/"","Pattern Correlations/RMS Differences Observations vs. Model(s)",""/) + do hh = 1,nsim-1 + dimY = dimsizes(tochar(names(hh))) + nchar = dimY + nchar = where(nchar.le.10,10,nchar) + if (dimY.lt.10) then + ntb = "" + do ii = 0,10-dimY-1 + ntb = ntb+" " + end do + ntb = ntb+names(hh) + else + ntb = names(hh) + end if + + ntc = "" + do ii = 0,nchar-1 + ntc = ntc+"-" + end do + format2 = "%"+(nchar-5+1)+".2f" + format3 = "%4.2f" + line3 = line3+" "+ntb + line4 = line4+" "+ntc + if (all(ismissing(patcor(hh,:,:)))) then + finpr = finpr+sprintf(format2,9.99)+"/"+sprintf(format3,9.99) + else + finpr = finpr+sprintf(format2,(pattern_cor(patcor(0,:,:),patcor(hh,:,:),clat,0)))+"/"+sprintf(format3,(dim_rmsd(ndtooned(NewCosWeight(patcor(0,:,:))),ndtooned(NewCosWeight(patcor(hh,:,:)))))) + end if + end do + if (dimsizes(tochar(line4)).ge.8190) then ; system or fortran compiler limit + print("Metrics table warning: Not creating metrics table as size of comparison results in a invalid ascii row size.") + else + write_table(getenv("OUTDIR")+"metrics.amo.txt","w",[/header/],"%s") + write_table(getenv("OUTDIR")+"metrics.amo.txt","a",[/line3/],"%s") + write_table(getenv("OUTDIR")+"metrics.amo.txt","a",[/line4/],"%s") + write_table(getenv("OUTDIR")+"metrics.amo.txt","a",[/finpr/],"%s") + end if + delete([/finpr,line3,line4,format2,format3,nchar,ntc,clat,patcor,ntb,dimY,header/]) + end if + + panres = True + panres@gsnMaximize = True + panres@gsnPaperOrientation = "portrait" + panres@gsnPanelLabelBar = True + panres@gsnPanelYWhiteSpacePercent = 3.0 + panres@pmLabelBarHeightF = 0.05 + panres@pmLabelBarWidthF = 0.55 + panres@lbTitleOn = False + panres@lbBoxLineColor = "gray70" + panres@lbLabelFontHeightF = 0.013 + if (nsim.le.4) then + if (nsim.eq.1) then + panres@txFontHeightF = 0.022 + panres@gsnPanelBottom = 0.50 + else + panres@txFontHeightF = 0.0145 + panres@gsnPanelBottom = 0.50 + end if + else + panres@txFontHeightF = 0.016 + panres@gsnPanelBottom = 0.05 + end if + + panres@txString = "AMO (Monthly)" + ncol = floattointeger(sqrt(nsim)) + nrow = (nsim/ncol)+mod(nsim,ncol) + gsn_panel2(wks,map,(/nrow,ncol/),panres) + panres@txString = "AMO Low Pass (Monthly)" + gsn_panel2(wks,mapLP,(/nrow,ncol/),panres) + + if (tasreg_frame.eq.0) then + panres@txString = "AMO SST/TAS Regressions (Monthly)" + gsn_panel2(wks,map_sst,(/nrow,ncol/),panres) + panres@txString = "AMO SST/TAS Low Pass Regressions (Monthly)" + gsn_panel2(wks,mapLP_sst,(/nrow,ncol/),panres) + end if + delete(wks) + + if (prreg_frame.eq.0) then + panres@txString = "AMO PR Regressions (Monthly)" + gsn_panel2(wks4,map_prreg,(/nrow,ncol/),panres) + panres@txString = "AMO PR Low Pass Regressions (Monthly)" + gsn_panel2(wks4,mapLP_prreg,(/nrow,ncol/),panres) + end if + delete(wks4) + + delete(panres@gsnPanelLabelBar) + panres@txString = "AMO (Monthly)" + gsn_panel2(wks2,pspec,(/nrow,ncol/),panres) + delete(wks2) + + if (SCALE_TIMESERIES.eq."True") then + tt = ind(nyr.eq.nyr_max) + panres@gsnPanelScalePlotIndex = tt(0) + delete(tt) + end if + panres@txString = "AMO (Monthly)" + if (nsim.le.12) then + lp = (/nsim,1/) + else + lp = (/nrow,ncol/) ;(/nsim/2+1,nsim/8+1/) + end if + gsn_panel2(wks3,xyplot,lp,panres) + delete(wks3) + delete([/map,pspec,syear,eyear,nyr,nyr_max,SCALE_TIMESERIES,lp/]) +;-------------------------------------------------------------------------------------------------- + OUTDIR = getenv("OUTDIR") + if (wks_type.eq."png") then + if (tasreg_frame.eq.0) then + system("mv "+OUTDIR+"amo.000001.png "+OUTDIR+"amo.png") + system("mv "+OUTDIR+"amo.000002.png "+OUTDIR+"amo.lp.png") + system("mv "+OUTDIR+"amo.000003.png "+OUTDIR+"amo.tasreg.png") + system("mv "+OUTDIR+"amo.000004.png "+OUTDIR+"amo.lp.tasreg.png") + else + system("mv "+OUTDIR+"amo.000001.png "+OUTDIR+"amo.png") + system("mv "+OUTDIR+"amo.000002.png "+OUTDIR+"amo.lp.png") + end if + if (prreg_frame.eq.0) then + system("mv "+OUTDIR+"amo.prreg.000001.png "+OUTDIR+"amo.prreg.png") + system("mv "+OUTDIR+"amo.prreg.000002.png "+OUTDIR+"amo.lp.prreg.png") + end if + else + system("psplit "+OUTDIR+"amo.ps "+OUTDIR+"amo_nn") + if (tasreg_frame.eq.0) then + system("mv "+OUTDIR+"amo_nn0001.ps "+OUTDIR+"amo.ps") + system("mv "+OUTDIR+"amo_nn0002.ps "+OUTDIR+"amo.lp.ps") + system("mv "+OUTDIR+"amo_nn0003.ps "+OUTDIR+"amo.tasreg.ps") + system("mv "+OUTDIR+"amo_nn0004.ps "+OUTDIR+"amo.lp.tasreg.ps") + else + system("mv "+OUTDIR+"amo_nn0001.ps "+OUTDIR+"amo.ps") + system("mv "+OUTDIR+"amo_nn0002.ps "+OUTDIR+"amo.lp.ps") + end if + if (prreg_frame.eq.0) then + system("psplit "+OUTDIR+"amo.prreg.ps "+OUTDIR+"amo_oo") + system("mv "+OUTDIR+"amo_oo0001.ps "+OUTDIR+"amo.prreg.ps") + system("mv "+OUTDIR+"amo_oo0002.ps "+OUTDIR+"amo.lp.prreg.ps") + end if + end if + print("Finished: amo.ncl") +end + diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/amoc.ncl b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/amoc.ncl new file mode 100644 index 0000000000..11ec3aacc9 --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/amoc.ncl @@ -0,0 +1,933 @@ +; Calculates MOC means/standard deviations, AMOC EOF1/PC1, +; regressions onto AMOC PC1, and lag correlations vs. AMO +; +; Variables used: moc, ts, tas +; +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/shea_util.ncl" +load "$CVDP_SCRIPTS/functions.ncl" + +begin + print("Starting: amoc.ncl") + + SCALE_TIMESERIES = getenv("SCALE_TIMESERIES") + OUTPUT_DATA = getenv("OUTPUT_DATA") + PNG_SCALE = tofloat(getenv("PNG_SCALE")) + OPT_CLIMO = getenv("OPT_CLIMO") + CLIMO_SYEAR = toint(getenv("CLIMO_SYEAR")) + CLIMO_EYEAR = toint(getenv("CLIMO_EYEAR")) + OUTPUT_TYPE = getenv("OUTPUT_TYPE") + COLORMAP = getenv("COLORMAP") + + nsim = numAsciiRow("namelist_byvar/namelist_moc") + na = asciiread("namelist_byvar/namelist_moc",(/nsim/),"string") + names = new(nsim,"string") + paths = new(nsim,"string") + syear = new(nsim,"integer",-999) + eyear = new(nsim,"integer",-999) + delim = "|" + + do gg = 0,nsim-1 + names(gg) = str_strip(str_get_field(na(gg),1,delim)) + paths(gg) = str_strip(str_get_field(na(gg),2,delim)) + syear(gg) = stringtointeger(str_strip(str_get_field(na(gg),3,delim))) + eyear(gg) = stringtointeger(str_strip(str_get_field(na(gg),4,delim))) + end do + nyr = eyear-syear+1 + nyr_max = max(nyr) + +;-----------TS/TAS read in for AMOC regressions------------------------------------------ + + nsim_trefht = numAsciiRow("namelist_byvar/namelist_trefht") + na_trefht = asciiread("namelist_byvar/namelist_trefht",(/nsim_trefht/),"string") + names_trefht = new(nsim_trefht,"string") + paths_trefht = new(nsim_trefht,"string") + syear_trefht = new(nsim_trefht,"integer",-999) + eyear_trefht = new(nsim_trefht,"integer",-999) + + do gg = 0,nsim_trefht-1 + names_trefht(gg) = str_strip(str_get_field(na_trefht(gg),1,delim)) + paths_trefht(gg) = str_strip(str_get_field(na_trefht(gg),2,delim)) + syear_trefht(gg) = stringtointeger(str_strip(str_get_field(na_trefht(gg),3,delim))) + eyear_trefht(gg) = stringtointeger(str_strip(str_get_field(na_trefht(gg),4,delim))) + end do + delete(na_trefht) + nyr_trefht = eyear_trefht-syear_trefht+1 + + nsim_ts = numAsciiRow("namelist_byvar/namelist_ts") + na_ts = asciiread("namelist_byvar/namelist_ts",(/nsim_ts/),"string") + names_ts = new(nsim_ts,"string") + paths_ts = new(nsim_ts,"string") + syear_ts = new(nsim_ts,"integer",-999) + eyear_ts = new(nsim_ts,"integer",-999) + + do gg = 0,nsim_ts-1 + names_ts(gg) = str_strip(str_get_field(na_ts(gg),1,delim)) + paths_ts(gg) = str_strip(str_get_field(na_ts(gg),2,delim)) + syear_ts(gg) = stringtointeger(str_strip(str_get_field(na_ts(gg),3,delim))) + eyear_ts(gg) = stringtointeger(str_strip(str_get_field(na_ts(gg),4,delim))) + end do + delete(na_ts) + nyr_ts = eyear_ts-syear_ts+1 + + pi=4.*atan(1.0) + rad=(pi/180.) + + wks_type = OUTPUT_TYPE + if (wks_type.eq."png") then + wks_type@wkWidth = 1500*PNG_SCALE + wks_type@wkHeight = 1500*PNG_SCALE + end if + wks_mean = gsn_open_wks(wks_type,getenv("OUTDIR")+"amoc.mean.ann") + wks_stddev = gsn_open_wks(wks_type,getenv("OUTDIR")+"amoc.stddev.ann") + wks_amoc = gsn_open_wks(wks_type,getenv("OUTDIR")+"amoc.ann") + wks_amoc_ts = gsn_open_wks(wks_type,getenv("OUTDIR")+"amoc.timeseries.ann") + wks_amoc_powspec = gsn_open_wks(wks_type,getenv("OUTDIR")+"amoc.powspec.ann") + wks_amoc_sstreg = gsn_open_wks(wks_type,getenv("OUTDIR")+"amoc.sstreg.ann") + wks_amoc_tasreg = gsn_open_wks(wks_type,getenv("OUTDIR")+"amoc.tasreg.ann") + wks_amoc_amo = gsn_open_wks(wks_type,getenv("OUTDIR")+"amoc_amo.leadlag.ann") + + if (COLORMAP.eq.0) then + gsn_define_colormap(wks_mean,"rainbow+white") + gsn_define_colormap(wks_stddev,"rainbow+white") + gsn_define_colormap(wks_amoc,"ncl_default") + gsn_define_colormap(wks_amoc_ts,"ncl_default") + gsn_define_colormap(wks_amoc_powspec,"ncl_default") + gsn_define_colormap(wks_amoc_sstreg,"ncl_default") + gsn_define_colormap(wks_amoc_tasreg,"ncl_default") + gsn_define_colormap(wks_amoc_amo,"ncl_default") + end if + if (COLORMAP.eq.1) then + gsn_define_colormap(wks_mean,"BlueDarkRed18") + gsn_define_colormap(wks_stddev,"cb_rainbow") + gsn_define_colormap(wks_amoc,"BlueDarkRed18") + gsn_define_colormap(wks_amoc_ts,"ncl_default") + gsn_define_colormap(wks_amoc_powspec,"cb_9step") + gsn_define_colormap(wks_amoc_sstreg,"BlueDarkRed18") + gsn_define_colormap(wks_amoc_tasreg,"BlueDarkRed18") + gsn_define_colormap(wks_amoc_amo,"ncl_default") + end if + plot_mean_ann = new(nsim,"graphic") + plot_stddev_ann = new(nsim,"graphic") + plot_amoc_ann = new(nsim,"graphic") + plot_amoc_ts_ann = new(nsim,"graphic") + plot_amoc_powspec_ann = new(nsim,"graphic") + plot_amoc_sstreg_ann = new(nsim,"graphic") + plot_amoc_tasreg_ann = new(nsim,"graphic") + plot_amoc_amo_ann = new(nsim,"graphic") + if (isfilepresent2("obs_moc")) then + pspec_obs = new(nsim,"graphic") + end if + + do ee = 0,nsim-1 + mocT = data_read_in_ocean_MOC(paths(ee),"MOC",syear(ee),eyear(ee)) ; read in data, orient lats/lons correctly, set time coordinate variable up + if (isatt(mocT,"is_all_missing")) then + delete(mocT) + continue + end if + lat = tofloat(mocT&lat) + lev = tofloat(mocT&lev) + ny = dimsizes(lat) + nz = dimsizes(lev) +;----------------------------------------------------------------------------------- +; compute annual means and standard deviations +;----------------------------------------------------------------------------------- + moc_ann = runave_n_Wrap(mocT,12,0,0) + moc_mean_ann = dim_avg_n_Wrap(moc_ann(5::12,:,:),0) + delete(moc_ann) + + if (OPT_CLIMO.eq."Full") then + mocT = rmMonAnnCycTLL(mocT) + else + check_custom_climo(names(ee),syear(ee),eyear(ee),CLIMO_SYEAR,CLIMO_EYEAR) + temp_arr = mocT + delete(temp_arr&time) + temp_arr&time = cd_calendar(mocT&time,-1) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(temp_arr({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(temp_arr({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + delete(temp_arr) + mocT = calcMonAnomTLL(mocT,climo) + delete(climo) + end if + moc_ann2 = runave_n_Wrap(mocT,12,0,0) + moc_sd_ann = dim_stddev_n_Wrap(dtrend_msg_n(ispan(0,nyr(ee)-1,1),moc_ann2(5::12,:,:),False,False,0),0) + copy_VarMeta(moc_mean_ann,moc_sd_ann) + delete(moc_ann2) + + moc_sd_ann = where(moc_sd_ann.lt.0.001,moc_sd_ann@_FillValue,moc_sd_ann) + moc_mean_ann = where(ismissing(moc_sd_ann),moc_mean_ann@_FillValue,moc_mean_ann) +;----------------------------------------------------------------------------------- +;-----calculate AMOC EOF1 +;----------------------------------------------------------------------------------- + temp = runave_n_Wrap(mocT,12,0,0) ; form annual averages + amoc = temp(5::12,:,:) + delete([/temp,mocT/]) + + atl_begin = ind_nearest_coord (-33.0, lat, 0) ; set missing values based on variance, and mask Atlantic south of 33S + vareps = 1.e-6 + amocvar = conform(amoc,dim_variance_n_Wrap(amoc,0),(/1,2/)) + amoc@_FillValue = 1.e20 + amoc(:,:,0:atl_begin-1) = amoc@_FillValue + amoc = where(amocvar.lt.vareps,amoc@_FillValue,amoc) + delete(amocvar) + + dlat = lat ; Compute latitudinal weights (in meters) + rEarth = 6.37122e8 ; Earth radius in m + do iy=0,ny-1 + if (iy.gt.0.and.iy.lt.ny-1) then + dy0 = (lat(iy)-lat(iy-1))/2.0 + dy1 = (lat(iy+1)-lat(iy))/2.0 + dlat(iy) = (dy0+dy1)*rEarth + delete(dy0) + delete(dy1) + end if + if (iy.eq.0) then + dy1 = (lat(iy+1)-lat(iy))/2.0 + dlat(iy) = (2.*dy1)*rEarth + delete(dy1) + end if + if (iy.eq.ny-1) then + dy0 = (lat(iy)-lat(iy-1))/2.0 + dlat(iy) = (2.*dy0)*rEarth + delete(dy0) + end if + end do + + dz = lev ; compute vertical weights (in meters) + do iz=0,nz-1 + if (iz.gt.0.and.iz.lt.nz-1) then + dz(iz) = (lev(iz)-lev(iz-1))/2.0 + (lev(iz+1)-lev(iz))/2.0 + end if + if (iz.eq.0) then + dz(iz) = (lev(iz+1)-lev(iz))/2.0 + end if + if (iz.eq.nz-1) then + dz(iz) = (lev(iz)-lev(iz-1))/2.0 + end if + end do + + test = dlat(0)*dz(0) + wgt = new((/nz,ny/),typeof(test)) ; weight the data + delete(test) + do iz=0,nz-1 + do iy=0,ny-1 + wgt(iz,iy) = dlat(iy)*dz(iz) + end do + end do + amocW = amoc*conform(amoc, wgt, (/1,2/)) ; same units as "amoc" + delete(wgt) + copy_VarMeta(amoc,amocW) + amocW@long_name = "area weighted "+amoc@long_name + + workeof = eofunc_Wrap(amocW(lev|:,lat|:,time|:), 3, 75) + workeof_ts = eofunc_ts_Wrap (amocW(lev|:,lat|:,time|:), workeof, False) + delete(amocW) + amoc_pc_ann = dim_standardize(workeof_ts(0,:),0) + moc_reg_ann = amoc(0,:,:) + moc_reg_ann = (/ regCoef(amoc_pc_ann,amoc(lev|:,lat|:,time|:)) /) + sig_pcv = eofunc_north2(workeof@pcvar,dimsizes(amoc_pc_ann),False) + if (sig_pcv(0)) then ; if True then significant + moc_reg_ann@pcvar = tofloat(sprintf("%4.1f", workeof@pcvar(0)))+"%*" + else + moc_reg_ann@pcvar = tofloat(sprintf("%4.1f", workeof@pcvar(0)))+"%" + end if + delete(sig_pcv) + + delete([/atl_begin,lat,lev,dz,dlat,amoc,workeof,workeof_ts,ny,nz/]) + if (max(moc_reg_ann&lev).ge.2000) then + if (.not.ismissing(moc_reg_ann({2000.},{38}))) then + if (moc_reg_ann({2000.},{38}).lt.0) then ; arbitrary attempt to make all plots have the same sign.. + moc_reg_ann = moc_reg_ann*-1. + amoc_pc_ann = amoc_pc_ann*-1. + end if + end if + end if +;---------------------------------------------------------------------------------------- + iopt = 0 ; calculate spectra of AMOC PC1 + jave = (1*nyr(ee))/100 + if (jave.eq.0) then + jave = 1 + end if + val1 = .95 + val2 = .99 + pct = 0.1 + spectra_mvf = False ; missing value flag for amoc + spectra_mvf_obs = True ; missing value flag for obs amoc + if (any(ismissing(amoc_pc_ann))) then + print("Missing data detected for "+names(ee)+", not creating AMOC spectra") + spectra_mvf = True + else + if (isfilepresent2("obs_moc").and.ee.eq.0) then + spectra_mvf_obs = False ; missing value flag for obs amoc + end if + sdof = specx_anal(amoc_pc_ann,iopt,jave,pct) + splt1 = specx_ci(sdof,val1,val2) + splt1!0 = "ncurves" + splt1&ncurves = ispan(0,3,1) + splt1&ncurves@long_name = "power spectra curves" + splt1&ncurves@units = "1" + splt1!1 = "frequency2" + splt1&frequency2 = sdof@frq + splt1&frequency2@long_name = "power spectra frequency" + splt1&frequency2@units = "1" + splt1@units_info = "df refers to frequency interval; data are standardized so there are no physical units" + splt1@units = "1/df" + splt1@info = "(0,:)=spectrum,(1,:)=Markov red noise spectrum, (2,:)="+val1+"% confidence bound for Markhov, (3,:)="+val2+"% confidence bound for Markhov" + if (isfilepresent2("obs_moc").and.ee.eq.0) then + sdof_obs = sdof + end if + delete([/iopt,jave,pct/]) + end if +;-------------------Read in TS and TAS for regressions onto PC1 and for AMO calculation---------------------------------------- + if (syear(ee).eq.syear_ts(ee)) then + if (eyear(ee).eq.eyear_ts(ee)) then +; print("Years match") + sstreg_plot_flag = 0 + else +; print("End years do not match, skipping SST regressions in psl.nam_nao.ncl.") + sstreg_plot_flag = 1 + end if + else +; print("Start years do not match, skipping SST regressions in psl.nam_nao.ncl.") + sstreg_plot_flag = 1 + end if + + if (sstreg_plot_flag.eq.0) then + sst = data_read_in(paths_ts(ee),"TS",syear_ts(ee),eyear_ts(ee)) + + sst = where(sst.le.-1.8,-1.8,sst) + d = addfile("$NCARG_ROOT/lib/ncarg/data/cdf/landsea.nc","r") + basemap = d->LSMASK + lsm = landsea_mask(basemap,sst&lat,sst&lon) + sst = mask(sst,conform(sst,lsm,(/1,2/)).ge.1,False) + delete([/lsm,basemap/]) + delete(d) + + if (OPT_CLIMO.eq."Full") then + sst = rmMonAnnCycTLL(sst) + else + check_custom_climo(names_ts(ee),syear_ts(ee),eyear_ts(ee),CLIMO_SYEAR,CLIMO_EYEAR) + temp_arr = sst + delete(temp_arr&time) + temp_arr&time = cd_calendar(sst&time,-1) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(temp_arr({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(temp_arr({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + delete(temp_arr) + sst = calcMonAnomTLL(sst,climo) + delete(climo) + end if + temp = runave_n_Wrap(sst,12,0,0) ; form annual averages + sst_ann = temp(5::12,:,:) + delete([/temp,sst/]) + + sst_reg_ann = sst_ann(0,:,:) ; SST regression onto AMOC PC1 + sst_reg_ann = (/ regCoef(amoc_pc_ann,sst_ann(lat|:,lon|:,time|:)) /) + +; Compute AMO timeseries + sst_ann = lonFlip(sst_ann) ; orient longitudes from -180:180 (set to 0:360 in data_read_in function) for AMO calculation + coswgt=cos(rad*sst_ann&lat) + coswgt!0 = "lat" + coswgt&lat= sst_ann&lat + natl_aa = wgt_areaave(sst_ann(:,{0:60},{-80:0}),coswgt({0.:60.}),1.0,0) + global_aa = wgt_areaave(sst_ann(:,{-60:60},:),coswgt({-60.:60.}),1.0,0) + + AMO = new((/dimsizes(natl_aa)/),"float",-999.) ; timeseries plot + AMO!0 = "time" + AMO&time = sst_ann&time + AMO = (/ natl_aa - global_aa /) + delete([/coswgt,natl_aa,global_aa,sst_ann/]) + end if +;------------------------------------------------------------------------------------------------------------- + if (syear(ee).eq.syear_trefht(ee)) then + if (eyear(ee).eq.eyear_trefht(ee)) then +; print("Years match") + tasreg_plot_flag = 0 + else +; print("End years do not match, skipping SST regressions in psl.nam_nao.ncl.") + tasreg_plot_flag = 1 + end if + else +; print("Start years do not match, skipping SST regressions in psl.nam_nao.ncl.") + tasreg_plot_flag = 1 + end if + + if (tasreg_plot_flag.eq.0) then + tas = data_read_in(paths_trefht(ee),"TREFHT",syear_trefht(ee),eyear_trefht(ee)) + if (OPT_CLIMO.eq."Full") then + tas = rmMonAnnCycTLL(tas) + else + check_custom_climo(names_trefht(ee),syear(ee),eyear(ee),CLIMO_SYEAR,CLIMO_EYEAR) + temp_arr = tas + delete(temp_arr&time) + temp_arr&time = cd_calendar(tas&time,-1) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(temp_arr({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(temp_arr({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + delete(temp_arr) + tas = calcMonAnomTLL(tas,climo) + delete(climo) + end if + + temp = runave_n_Wrap(tas,12,0,0) ; form annual averages + tas_ann = temp(5::12,:,:) + delete([/temp,tas/]) + + tas_reg_ann = tas_ann(0,:,:) ; TAS regression onto AMOC PC1 + tas_reg_ann = (/ regCoef(amoc_pc_ann,tas_ann(lat|:,lon|:,time|:)) /) + delete(tas_ann) + end if +;---------------compute AMOC/AMO lead/lags----------------------------------------------------------------------------- + if (nyr(ee).ge.90.and.isvar("AMO")) then ; need a minimum number of years to compute lead/lag correlations + nwt = 51 + pda = 15 ; longest period + pdb = 1 ; shortest period + fca = 1./pda ; ==> lowest allowed frequency + fcb = 1./pdb ; ==> highest allowed frequency + ihp = 0 ; 0 ==> low pass filter, fcb ignored + nsigma = 1. + twgt = filwgts_lanczos (nwt, ihp, fca, fcb, nsigma) + AMO_wgt = wgt_runave_Wrap(AMO,twgt,0) + amoc_pc_ann_wgt = wgt_runave_Wrap(amoc_pc_ann,twgt,0) + + mxlag = 15 + x_Lead_y = esccr(amoc_pc_ann_wgt,AMO_wgt,mxlag) + y_Lead_x = esccr(AMO_wgt,amoc_pc_ann_wgt,mxlag) ; switch the order of the series + + ccr = new ( 2*mxlag+1, float) + ccr(0:mxlag-1) = y_Lead_x(1:mxlag:-1) ; "negative lag", -1 reverses order + ccr(mxlag:) = x_Lead_y(0:mxlag) ; "positive lag" + delete([/x_Lead_y,y_Lead_x,AMO_wgt,amoc_pc_ann_wgt/]) + end if + if (sstreg_plot_flag.eq.0) then + delete(AMO) + end if +;--------------------------------------------------------------------------------------------- + if (OUTPUT_DATA.eq."True") then + modname = str_sub_str(names(ee)," ","_") + bc = (/"/","'","(",")"/) + do gg = 0,dimsizes(bc)-1 + modname = str_sub_str(modname,bc(gg),"_") + end do + fn = getenv("OUTDIR")+modname+".cvdp_data.amoc."+syear(ee)+"-"+eyear(ee)+".nc" + if (.not.isfilepresent2(fn)) then + z = addfile(fn,"c") + z@source = "NCAR Climate Analysis Section's Climate Variability Diagnostics Package v"+getenv("VERSION") + z@notes = "Data from "+names(ee)+" from "+syear(ee)+"-"+eyear(ee) + if (OPT_CLIMO.eq."Full") then + z@climatology = syear(ee)+"-"+eyear(ee)+" climatology removed prior to all calculations (other than means)" + else + if (CLIMO_SYEAR.lt.0) then + z@climatology = (eyear(ee)+CLIMO_SYEAR)+"-"+(eyear(ee)+CLIMO_EYEAR)+" climatology removed prior to all calculations (other than means)" + else + z@climatology = CLIMO_SYEAR+"-"+CLIMO_EYEAR+" climatology removed prior to all calculations (other than means)" + end if + end if + z@Conventions = "CF-1.6" + else + z = addfile(fn,"w") + end if + + if (spectra_mvf.eq.False) then + z->amoc_spectra_ann = set_varAtts(splt1,"AMOC (monthly) power spectra, Markov spectrum and confidence curves","","") + end if + if (nyr(ee).ge.90.and.isvar("ccr")) then + time_lag_cor = ispan(mxlag*-1,mxlag,1) + time_lag_cor@units = "months since 0000-01-01 00:00:00" + time_lag_cor@long_name = "Time" + time_lag_cor@standard_name = "time" + time_lag_cor@calendar = "standard" + time_lag_cor!0 = "time_lag_cor" + time_lag_cor&time_lag_cor = time_lag_cor + ccr!0 = "time_lag_cor" + ccr&time_lag_cor = time_lag_cor + ccr@long_name = "AMOC AMO lead lag correlation" + z->amoc_amo_lag_cor = set_varAtts(ccr,"","1","") + delete(time_lag_cor) + end if + TIME = ispan(0,dimsizes(amoc_pc_ann)-1,1) + TIME@units = "years since "+syear(ee)+"-01-15 00:00:00" + TIME@long_name = "Time" + TIME@standard_name = "time" + TIME@calendar = "standard" + TIME!0 = "TIME" + TIME&TIME = TIME + amoc_pc_ann!0 = "TIME" + amoc_pc_ann&TIME = TIME + amoc_pc_ann@long_name = "AMOC pc1 timeseries (annual)" + z->amoc_timeseries_ann = set_varAtts(amoc_pc_ann,"","1","") + lat_amoc = moc_reg_ann&lat + lat_amoc!0 = "lat_amoc" + lat_amoc&lat_amoc = lat_amoc + delete(moc_reg_ann&lat) + moc_reg_ann!1 = "lat_amoc" + moc_reg_ann&lat_amoc = lat_amoc + delete(moc_mean_ann&lat) + moc_mean_ann!1 = "lat_amoc" + moc_mean_ann&lat_amoc = lat_amoc + delete(moc_sd_ann&lat) + moc_sd_ann!1 = "lat_amoc" + moc_sd_ann&lat_amoc = lat_amoc + z->amoc_mean_ann = set_varAtts(moc_mean_ann,"AMOC mean (annual)","","") + z->amoc_stddev_ann = set_varAtts(moc_sd_ann,"AMOC standard deviation (annual)","","") + z->amoc_pattern_ann = set_varAtts(moc_reg_ann,"AMOC regression onto AMOC principal component timeseries (annual)","","") + delete([/modname,fn,TIME,lat_amoc/]) + delete(z) + + if (sstreg_plot_flag.eq.0) then + modname = str_sub_str(names_ts(ee)," ","_") + bc = (/"/","'","(",")"/) + do gg = 0,dimsizes(bc)-1 + modname = str_sub_str(modname,bc(gg),"_") + end do + fn = getenv("OUTDIR")+modname+".cvdp_data.amoc.ts."+syear_ts(ee)+"-"+eyear_ts(ee)+".nc" + if (.not.isfilepresent2(fn)) then + z = addfile(fn,"c") + z@source = "NCAR Climate Analysis Section's Climate Variability Diagnostics Package v"+getenv("VERSION") + z@notes = "Data from "+names_ts(ee)+" from "+syear_ts(ee)+"-"+eyear_ts(ee) + if (OPT_CLIMO.eq."Full") then + z@climatology = syear_ts(ee)+"-"+eyear_ts(ee)+" climatology removed prior to all calculations (other than means)" + else + if (CLIMO_SYEAR.lt.0) then + z@climatology = (eyear(ee)+CLIMO_SYEAR)+"-"+(eyear(ee)+CLIMO_EYEAR)+" climatology removed prior to all calculations (other than means)" + else + z@climatology = CLIMO_SYEAR+"-"+CLIMO_EYEAR+" climatology removed prior to all calculations (other than means)" + end if + end if + else + z = addfile(fn,"w") + end if + z->amoc_sst_regression_ann = set_varAtts(sst_reg_ann,"sst regression onto AMOC principal component timeseries (annual)","","") + delete(z) + end if + if (tasreg_plot_flag.eq.0) then + modname = str_sub_str(names_trefht(ee)," ","_") + bc = (/"/","'","(",")"/) + do gg = 0,dimsizes(bc)-1 + modname = str_sub_str(modname,bc(gg),"_") + end do + fn = getenv("OUTDIR")+modname+".cvdp_data.amoc.tas."+syear_trefht(ee)+"-"+eyear_trefht(ee)+".nc" + if (.not.isfilepresent2(fn)) then + z = addfile(fn,"c") + z@source = "NCAR Climate Analysis Section's Climate Variability Diagnostics Package v"+getenv("VERSION") + z@notes = "Data from "+names_trefht(ee)+" from "+syear_trefht(ee)+"-"+eyear_trefht(ee) + if (OPT_CLIMO.eq."Full") then + z@climatology = syear_trefht(ee)+"-"+eyear_trefht(ee)+" climatology removed prior to all calculations (other than means)" + else + if (CLIMO_SYEAR.lt.0) then + z@climatology = (eyear(ee)+CLIMO_SYEAR)+"-"+(eyear(ee)+CLIMO_EYEAR)+" climatology removed prior to all calculations (other than means)" + else + z@climatology = CLIMO_SYEAR+"-"+CLIMO_EYEAR+" climatology removed prior to all calculations (other than means)" + end if + end if + else + z = addfile(fn,"w") + end if + z->amoc_tas_regression_ann = set_varAtts(tas_reg_ann,"tas regression onto AMOC PC1 (annual)","","") + delete(z) + end if + end if + + moc_mean_ann&lev = moc_mean_ann&lev/1000. + moc_mean_ann&lev@units = "km" + moc_sd_ann&lev = moc_sd_ann&lev/1000. + moc_sd_ann&lev@units = "km" + moc_reg_ann&lev = moc_reg_ann&lev/1000. + moc_reg_ann&lev@units = "km" +;======================================================================================================================= + mocres = True ; plot mods desired + mocres@gsnDraw = False + mocres@gsnFrame = False + mocres@cnFillOn = True ; turn on color fill + mocres@cnMissingValFillColor = "gray50" + mocres@cnMissingValFillPattern = 0 + mocres@cnLinesOn = True + mocres@cnLineLabelsOn = False +; mocres@cnLineLabelFontColor = "white" +; mocres@cnLineLabelBackgroundColor = -1 + mocres@lbLabelBarOn = False + + mocres@cnInfoLabelOn = False ; Turn off informational label + mocres@cnLevelSelectionMode = "ExplicitLevels" ; manually set the contour levels + if (nsim.le.7) then + mocres@tmXBLabelFontHeightF = 0.01 + mocres@tmYLLabelFontHeightF = 0.01 + mocres@gsnLeftStringFontHeightF = 0.0125 + mocres@gsnCenterStringFontHeightF = 0.0125 + mocres@gsnRightStringFontHeightF = 0.011 + else + mocres@tmXBLabelFontHeightF = 0.014 + mocres@tmYLLabelFontHeightF = 0.014 + mocres@gsnLeftStringFontHeightF = 0.017 + mocres@gsnCenterStringFontHeightF = 0.017 + mocres@gsnRightStringFontHeightF = 0.0155 + end if + mocres@tiYAxisFontHeightF = mocres@tmXBLabelFontHeightF + mocres@gsnCenterStringOrthogonalPosF = -0.96 + mocres@gsnCenterStringParallelPosF = 0.80 + + mocres@tmXBLabelsOn = True + mocres@tmXTLabelsOn = False + mocres@tmXTOn = False + mocres@tmYRLabelsOn = False + mocres@tmYROn = False + mocres@cnMonoLineThickness = False + mocres@cnMonoLineDashPattern = False + mocres@vpWidthF = 0.375 + mocres@vpHeightF = 0.28 + + mocres@trYReverse = True ; reverses y-axis + mocres@gsnYAxisIrregular2Linear = True + mocres@gsnXAxisIrregular2Linear = True + mocres@tiYAxisString= "depth (km)" + mocres@tiXAxisString= "" + mocres@gsnCenterString = "" + mocres@trXMinF = 0. + mocres@trXMaxF = 90. + mocres@tmXBMode = "Explicit" + mocres@tmXBValues = (/0.,30.,60.,90./) + mocres@tmXBLabels = (/"0~S~o~N~N","30~S~o~N~N","60~S~o~N~N","90~S~o~N~N"/) + + mocres@gsnCenterString = syear(ee)+"-"+eyear(ee) + mocres@gsnRightString = moc_mean_ann@units + mocres@gsnLeftString = names(ee) + + + mocres@cnLevels = ispan(-4,28,2) + mocres@cnLineThicknesses = (/1,1,2,1,1,1,1,2,1,1,1,1,2,1,1,1,1/) + mocres@cnLineDashPatterns = (/1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0/) + plot_mean_ann(ee) = gsn_csm_contour(wks_mean,moc_mean_ann,mocres) + delete(moc_mean_ann) + delete(mocres@cnLevels) + delete(mocres@cnLineThicknesses) + delete(mocres@cnLineDashPatterns) + + mocres@cnLevels = (/.1,.3,.5,1.0,1.5,2.0,2.5,3.0,4.0,5.0/) + if (COLORMAP.eq.0) then + mocres@cnFillColors = (/20,38,54,80,95,125,175,185,195,205,236/) + end if + if (COLORMAP.eq.1) then + mocres@cnFillColors = (/14,23,35,47,63,79,95,111,124,155,175/) + end if + plot_stddev_ann(ee) = gsn_csm_contour(wks_stddev,moc_sd_ann,mocres) + delete(moc_sd_ann) + delete(mocres@cnLevels) + delete(mocres@cnFillColors) + + mocres@cnLevels = fspan(-2,2,41) + mocres@cnMonoLineThickness = True + mocres@cnMonoLineDashPattern = True + mocres@cnLineDashPattern = 0 + mocres@gsnCenterString = syear(ee)+"-"+eyear(ee) + mocres@gsnRightString = moc_reg_ann@pcvar + mocres@gsnLeftString = names(ee) + plot_amoc_ann(ee) = gsn_csm_contour(wks_amoc,moc_reg_ann,mocres) + delete(moc_reg_ann) + delete(mocres) + + xyres = True + xyres@gsnDraw = False + xyres@gsnFrame = False + xyres@gsnXYBarChart = False + xyres@gsnYRefLine = 0.0 + xyres@gsnYRefLineColor = "gray42" + xyres@gsnAboveYRefLineColor = 185 + xyres@gsnBelowYRefLineColor = 35 + if (wks_type.eq."png") then + xyres@xyLineThicknessF = .5 + else + xyres@xyLineThicknessF = .2 + end if + xyres@xyLineColor = "gray52" + xyres@tiYAxisString = "" + xyres@tiXAxisString = "" + if (nsim.le.5) then + xyres@tmXBLabelFontHeightF = 0.0125 + xyres@tmYLLabelFontHeightF = 0.0125 + xyres@gsnStringFontHeightF = 0.017 + else + xyres@tmXBLabelFontHeightF = 0.018 + xyres@tmYLLabelFontHeightF = 0.018 + xyres@gsnStringFontHeightF = 0.024 + end if + xyres@gsnCenterStringOrthogonalPosF = 0.025 + xyres@vpXF = 0.05 + xyres@vpHeightF = 0.15 + if (SCALE_TIMESERIES.eq."True") then + xyres@vpWidthF = 0.9*((nyr(ee)*1.)/nyr_max) + else + xyres@vpWidthF = 0.9 + end if + xyres@gsnLeftString = "" + xyres@gsnRightString = "" + xyres@trXMinF = syear(ee)-.5 + xyres@trXMaxF = eyear(ee)+1.5 + + xyres@gsnCenterString = names(ee) + plot_amoc_ts_ann(ee) = gsn_csm_xy(wks_amoc_ts,ispan(syear(ee),eyear(ee),1),amoc_pc_ann,xyres) ; use standardized timeseries + delete(amoc_pc_ann) + delete(xyres) + + pres = True + pres@vpXF = 0.07 + pres@trYMinF = 0. + pres@trXMinF = 0.0 + pres@trXMaxF = 0.5 + pres@tiYAxisString = "Power" ; yaxis + pres@xyLineColor = "black" + pres@gsnFrame = False + pres@gsnDraw = False + + pres@tmXBLabelDeltaF = -.8 + pres@tmXTLabelDeltaF = -.8 + pres@pmLegendDisplayMode = "Never" + pres@xyLineThicknesses = (/3.5,2.,1.,1./) + pres@xyDashPatterns = (/0,0,0,0/) + pres@xyLineColors = (/"foreground","red","blue","green"/) + pres@xyLabelMode = "custom" + pres@xyLineLabelFontColors = pres@xyLineColors + pres@xyExplicitLabels = (/"","",val1*100+"%",val2*100+"%"/) + pres@tmXTOn = True + pres@tmYROn = False + pres@tmXTLabelsOn = True + pres@tmXUseBottom = False + pres@tmXTMode = "Explicit" + pres@tmXTValues = (/".02",".10",".20",".3333",".50"/) + pres@tmXTLabels = (/"50","10","5","3","2"/) + + pres@tmXTLabelFontHeightF = 0.018 + pres@tmXBLabelFontHeightF = 0.018 + pres@tmYLLabelFontHeightF = 0.018 + pres@tiYAxisString = "Variance" ;"Power (~S~o~N~C~S~2~N~ / cycles mo~S~-1~N~)" ; yaxis + pres@tiXAxisString = "Frequency (cycles mo~S~-1~N~)" + pres@tiMainString = "" + pres@txFontHeightF = 0.015 + pres@xyLineLabelFontHeightF = 0.022 + pres@tiXAxisFontHeightF = 0.025 + pres@tiYAxisFontHeightF = 0.025 + pres@tiMainFontHeightF = 0.03 + pres@gsnRightStringOrthogonalPosF = -0.115 + + pres@tiMainOn = False + pres@gsnCenterString = "Period (years)" + pres@gsnCenterStringFontHeightF = pres@tiYAxisFontHeightF + pres@gsnRightStringFontHeightF = pres@tiYAxisFontHeightF - 0.005 + pres@gsnRightString = "" + pres@gsnLeftString = "" + pres@gsnCenterString = names(ee) + pres@gsnRightString = syear(ee)+"-"+eyear(ee)+" " + if (spectra_mvf.eq.False) then + plot_amoc_powspec_ann(ee) = gsn_csm_xy(wks_amoc_powspec,sdof@frq,splt1,pres) + if (isfilepresent2("obs_moc").and.ee.ge.1.and.spectra_mvf_obs.eq.False) then + pres@xyLineColors = (/"gray70","black","black","black"/) + pres@xyCurveDrawOrder = "PreDraw" + pres@gsnCenterString = "" + pres@gsnRightString = "" + pspec_obs(ee) = gsn_csm_xy(wks2,sdof_obs@frq,sdof_obs@spcx,pres) + overlay(plot_amoc_powspec_ann(ee),pspec_obs(ee)) + delete(pres@xyCurveDrawOrder) + end if + delete([/splt1,sdof/]) + end if + delete(pres) + + res = True + res@mpProjection = "WinkelTripel" + res@mpGeophysicalLineColor = "gray42" + res@mpPerimOn = False + res@mpGridLatSpacingF = 90 ; change latitude line spacing + res@mpGridLonSpacingF = 180. ; change longitude line spacing + res@mpGridLineColor = "transparent" ; trick ncl into drawing perimeter + res@mpGridAndLimbOn = True ; turn on lat/lon lines + res@mpFillOn = False + res@mpCenterLonF = 0. + res@mpOutlineOn = True + if (wks_type.eq."png") then + res@mpGeophysicalLineThicknessF = 2. + else + res@mpGeophysicalLineThicknessF = 1. + end if + res@gsnDraw = False + res@gsnFrame = False + res@vpYF = 0.95 + res@vpHeightF = 0.3 + res@vpXF = 0.2 + res@vpWidthF = 0.6 + +; res@cnFillMode = "RasterFill" + res@cnLevelSelectionMode = "ExplicitLevels" + if (COLORMAP.eq."0") then + res@cnLevels = fspan(-.5,.5,21) + end if + if (COLORMAP.eq."1") then + res@cnLevels = fspan(-.4,.4,17) + end if + + res@cnLineLabelsOn = False + res@cnFillOn = True + res@cnLinesOn = False + res@lbLabelBarOn = False + + res@gsnRightStringOrthogonalPosF = -0.05 + res@gsnRightStringParallelPosF = 0.96 + res@gsnLeftStringOrthogonalPosF = -0.05 + res@gsnLeftStringParallelPosF = 0.005 + res@gsnLeftStringFontHeightF = 0.014 + res@gsnCenterStringFontHeightF = 0.018 + res@gsnRightStringFontHeightF = 0.014 + res@gsnLeftString = syear(ee)+"-"+eyear(ee) + res@gsnCenterString = names(ee) + res@gsnRightString = "" + + if (sstreg_plot_flag.eq.0) then + plot_amoc_sstreg_ann(ee) = gsn_csm_contour_map(wks_amoc_sstreg,sst_reg_ann,res) + delete(sst_reg_ann) + end if + if (tasreg_plot_flag.eq.0) then + plot_amoc_tasreg_ann(ee) = gsn_csm_contour_map(wks_amoc_tasreg,tas_reg_ann,res) + delete(tas_reg_ann) + end if + delete(res) + + res2 = True + res2@gsnDraw = False + res2@gsnFrame = False + res2@trYMinF = -1. ; min((/-0.4,min(ccr)/)) + res2@trYMaxF = 1. ; max((/0.6,max(ccr)/)) + res2@vpWidthF = 0.6 + res2@vpHeightF = 0.4 + res2@gsnYRefLine = 0.0 + res2@gsnYRefLineColor = "gray42" + res2@gsnXRefLine = 0.0 + res2@gsnXRefLineColor = "gray42" + res2@xyLineColor = "royalblue" + if (wks_type.eq."png") then + res2@xyLineThicknessF = 3.5 + else + res2@xyLineThicknessF = 1.75 + end if + if (nsim.le.5) then + res2@tmXBLabelFontHeightF = 0.0125 + res2@tmYLLabelFontHeightF = 0.0125 + res2@gsnLeftStringFontHeightF = 0.013 + res2@gsnCenterStringFontHeightF = 0.017 + res2@gsnRightStringFontHeightF = 0.013 + else + res2@tmXBLabelFontHeightF = 0.018 + res2@tmYLLabelFontHeightF = 0.018 + res2@gsnLeftStringFontHeightF = 0.020 + res2@gsnCenterStringFontHeightF = 0.024 + res2@gsnRightStringFontHeightF = 0.020 + end if + res2@gsnLeftStringOrthogonalPosF = -1.01 + res2@gsnRightStringOrthogonalPosF = -1.01 + res2@gsnLeftStringParallelPosF = 0.01 + res2@gsnRightStringParallelPosF = 0.99 + + res2@gsnLeftString = "AMO leads" + res2@gsnCenterString = names(ee) + res2@gsnRightString = "AMOC PC1 leads" + if (nyr(ee).ge.90.and.isvar("ccr")) then ; need a minimum number of years to compute lead/lag correlations + res2@trXMinF = mxlag*-1 + res2@trXMaxF = mxlag + plot_amoc_amo_ann(ee) = gsn_csm_xy(wks_amoc_amo,ispan(mxlag*-1,mxlag,1),ccr,res2) + delete([/mxlag,ccr/]) + end if + delete(res2) + end do + + panres = True + panres@gsnMaximize = True + panres@gsnPaperOrientation = "portrait" + panres@gsnPanelLabelBar = True + panres@gsnPanelYWhiteSpacePercent = 3.0 + panres@pmLabelBarHeightF = 0.05 + panres@pmLabelBarWidthF = 0.65 + panres@lbTitleOn = False + panres@lbBoxLineColor = "gray70" + if (nsim.le.4) then + if (nsim.eq.1) then + panres@txFontHeightF = 0.022 + panres@gsnPanelBottom = 0.50 + else + panres@txFontHeightF = 0.0145 + panres@gsnPanelBottom = 0.50 + end if + else + panres@txFontHeightF = 0.016 + panres@gsnPanelBottom = 0.05 + end if + panres@lbLabelFontHeightF = 0.013 + panres@lbLabelStride = 1 + ncol = floattointeger(sqrt(nsim)) + nrow = (nsim/ncol)+mod(nsim,ncol) + + panres@txString = "AMOC Means (Annual)" + gsn_panel2(wks_mean,plot_mean_ann,(/nrow,ncol/),panres) + delete(wks_mean) + + panres@txString = "AMOC Standard Deviations (Annual)" + gsn_panel2(wks_stddev,plot_stddev_ann,(/nrow,ncol/),panres) + delete(wks_stddev) + + panres@txString = "AMOC EOF1 (Annual)" + gsn_panel2(wks_amoc,plot_amoc_ann,(/nrow,ncol/),panres) + delete(wks_amoc) + + panres@txString = "AMOC TAS Regressions (Annual)" + gsn_panel2(wks_amoc_tasreg,plot_amoc_tasreg_ann,(/nrow,ncol/),panres) + delete(wks_amoc_tasreg) + + panres@txString = "AMOC SST Regressions (Annual)" + gsn_panel2(wks_amoc_sstreg,plot_amoc_sstreg_ann,(/nrow,ncol/),panres) + delete(wks_amoc_sstreg) + delete(panres) + + panres2 = True + if (nsim.le.5) then + panres2@txFontHeightF = 0.024 + else + panres2@txFontHeightF = 0.016 + end if + panres2@gsnMaximize = True + panres2@gsnPaperOrientation = "portrait" + if (nsim.le.12) then + lp = (/nsim,1/) + else + lp = (/nrow,ncol/) + end if + panres2@gsnPanelYWhiteSpacePercent = 3.0 + if (SCALE_TIMESERIES.eq."True") then + tt = ind(nyr.eq.nyr_max) + panres2@gsnPanelScalePlotIndex = tt(0) + delete(tt) + end if + panres2@txString = "AMOC PC1 (Annual)" + gsn_panel2(wks_amoc_ts,plot_amoc_ts_ann,(/nrow,ncol/),panres2) + delete(wks_amoc_ts) + + if (isatt(panres2,"gsnPanelScalePlotIndex")) then + delete(panres2@gsnPanelScalePlotIndex) + end if + panres2@txString = "AMOC PC1 (Annual, detrended)" + gsn_panel2(wks_amoc_powspec,plot_amoc_powspec_ann,(/nrow,ncol/),panres2) + delete(wks_amoc_powspec) + + + panres2@txString = "AMO / AMOC PC1 Lag Correlation (Annual)" + gsn_panel2(wks_amoc_amo,plot_amoc_amo_ann,(/nrow,ncol/),panres2) + delete(wks_amoc_amo) + delete(panres2) + print("Finished: amoc.ncl") +end + + diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/cas-cvdp.png b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/cas-cvdp.png new file mode 100644 index 0000000000..79fab57d63 Binary files /dev/null and b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/cas-cvdp.png differ diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/functions.ncl b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/functions.ncl new file mode 100644 index 0000000000..4a66ce2682 --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/functions.ncl @@ -0,0 +1,1179 @@ +;================================================================================================= +; create blank array for use when something may be/is wrong. +; +undef("create_empty_array") +function create_empty_array(yS:numeric,yE:numeric,mS:numeric,mE:numeric,opttype:string) +local yS,yE,mS,mE,opttype +begin + if (ismissing(yS).or.ismissing(yE)) then + yS = 1 + yE = 50 + end if + timeT = yyyymm_time(yS, yE, "integer") + time = timeT({yS*100+mS:yE*100+mE}) + if (opttype.eq."time_lat_lon") then + blankarr = new((/dimsizes(time),90,180/),"float",1.e20) + blankarr!0 = "time" ; time coordinate variable assigned below + blankarr&time = time + blankarr!1 = "lat" + blankarr&lat = fspan(-89,89,90) + blankarr!2 = "lon" + blankarr&lon = fspan(0,358,180) + blankarr&lat@units = "degrees_north" + blankarr&lon@units = "degrees_east" + end if + if (opttype.eq."time_lev_lat") then + blankarr = new((/dimsizes(time),41,90/),"float",1.e20) + blankarr!0 = "time" ; time coordinate variable assigned below + blankarr&time = time + blankarr!1 = "lev" + blankarr&lev =fspan(0,5000,41) + blankarr!2 = "lat" + blankarr&lat = fspan(-89,89,90) + blankarr&lat@units = "degrees_north" + blankarr&lev@units = "m" + blankarr&lev@positive = "down" + end if + blankarr@units = "" + blankarr@is_all_missing = True + return(blankarr) + delete([/yS,yE,mS,mE,opttype,blankarr,timeT,time/]) +end +;=================================================================================================== +; read in atmospheric/land data from selected files +; assign time coordinate variables, check for issues with the array, assign _FillValue (if needed) +; assign dimension names (for ease-of-use), check and modify units +; +; vname settings at top of this script can be modified if a different variable name is +; encountered. For instance, if a TS data file has the TS array named as "sfc_t", one +; could add "sfc_t" to the vname TS coding as follows: +; if (vn.eq."TS") then +; vname = (/"TS","ts","sst","sfc_t"/) +; end if +; +undef("data_read_in") +function data_read_in(zpath:string,vn:string,yearS:integer,yearE:integer) +; path for TS file(s), variable name, start year, and end year are read in. +local zpath,vn,cpath0,ta,tfiles,c,arr,farr,yearS,yearE,mocheck,fils_precc,fils_precl +begin + if (vn.eq."TS") then + vname = (/"TS","ts","sst","t_surf","skt"/) + end if + if (vn.eq."PSL") then + vname = (/"PSL","psl","slp","SLP","prmsl","msl","slp_dyn"/) + end if + if (vn.eq."TREFHT") then + vname = (/"TREFHT","tas","temp","air","temperature_anomaly","temperature","t2m","t_ref","T2","tempanomaly"/) + end if + if (vn.eq."PRECT") then + vname = (/"PRECC","PRECL","PRECT","pr","PPT","ppt","p","P","precip","PRECIP","tp","prcp","prate"/) + end if + if (vn.eq."SNOWDP") then + vname = (/"SNOWDP","snd"/) + end if + + if (ismissing(zpath) ) then + print("File missing, creating blank array of data. View "+vn+" namelist for details.") + arr = create_empty_array(yearS,yearE,1,12,"time_lat_lon") + sydata = yearS ; assign these variables based on yearS/yearE provided in namelist. Doesn't matter + eydata = yearE ; as data array is totally missing.. + smdata = 1 + emdata = 12 + else + cpath0 = stringtochar(zpath) + ta = stringtochar("*") + if (any(cpath0.eq.ta(0)).or.any(cpath0.eq."{")) then ; check for "*" and "{" denoting multiple files + tfiles = systemfunc("ls "+zpath+" 2> /dev/null") ; /dev/null suppresses all standard error output + if (vn.eq."PRECT") then ; special section for precip, as might need to do PRECC+PRECL + b = addfile(tfiles(0),"r") ; check for PRECC + if (isfilevar(b,"PRECC").or.isfilevar(b,"PRECL")) then ; PRECC/PRECL section + fils_precc = str_match(tfiles,"PRECC") + fils_precl = str_match(tfiles,"PRECL") + if (any(ismissing(fils_precc)).or.any(ismissing(fils_precl))) then + print("Fatal: Need both PRECC and PRECL file(s), creating blank array") + print(fils_precc) + print(fils_precl) + arr = create_empty_array(yearS,yearE,1,12,"time_lat_lon") + sydata = yearS ; assign these variables based on yearS/yearE provided in namelist. Doesn't matter + eydata = yearE ; as data array is totally missing.. + smdata = 1 + emdata = 12 + break + end if + c = addfiles(fils_precc,"r") + arr = c[:]->PRECC + c2 = addfiles(fils_precl,"r") + arr = (/ arr+c2[:]->PRECL /) + arr@long_name = "Large-scale (stable) + convective precipitation rate (liq + ice)" + delete([/c2,fils_precc,fils_precl/]) + else ; pr, ppt, PPT, PRECT multiple/single file read-in here.. + c = addfiles(tfiles,"r") + do ii=0,dimsizes(vname)-1 + if (isfilevar(c[0],vname(ii))) then + arr = c[:]->$vname(ii)$ + break + end if + end do + end if + delete(b) + else + c = addfiles(tfiles,"r") + do ii=0,dimsizes(vname)-1 + if (isfilevar(c[0],vname(ii))) then + arr = c[:]->$vname(ii)$ + break + end if + end do + end if + nfil = dimsizes(tfiles) + cpathS = stringtochar(tfiles(0)) ; this section will work for PRECC/PRECL, as it will read the first + cpathE = stringtochar(tfiles(nfil-1)) ; PRECC file and the last PRECL file. + ncharS = dimsizes(cpathS) + ncharE = dimsizes(cpathE) + sydata = stringtointeger(charactertostring(cpathS(ncharS-17:ncharS-14))) + smdata = stringtointeger(charactertostring(cpathS(ncharS-13:ncharS-12))) + eydata = stringtointeger(charactertostring(cpathE(ncharE-10:ncharE-7))) + emdata = stringtointeger(charactertostring(cpathE(ncharE-6:ncharE-5))) + delete([/cpathS,cpathE,ncharS,ncharE,nfil/]) +; delete(c) + else + c = addfile(zpath,"r") + do i=0,dimsizes(vname)-1 + if (isfilevar(c,vname(i))) then + arr = c->$vname(i)$ + break + end if + end do + cpath = stringtochar(zpath) + nchar = dimsizes(cpath) + sydata = stringtointeger(charactertostring(cpath(nchar-17:nchar-14))) + smdata = stringtointeger(charactertostring(cpath(nchar-13:nchar-12))) + eydata = stringtointeger(charactertostring(cpath(nchar-10:nchar-7))) + emdata = stringtointeger(charactertostring(cpath(nchar-6:nchar-5))) + delete([/cpath,nchar/]) +; delete(c) + end if + delete([/ta,cpath0/]) + end if + + if (isvar("arr").eq.False) then + print("Variable ("+vn+") not found. Examine input file "+zpath+". Creating empty array and continuing") + arr = create_empty_array(yearS,yearE,1,12,"time_lat_lon") + end if + + if (isshort(arr)) then + arrT = short2flt(arr) + delete(arr) + arr = arrT + delete(arrT) + end if + + if (.not.isatt(arr,"_FillValue")) then ; assign _FillValue if one is not present + if (isatt(arr,"missing_value")) then + arr@_FillValue = arr@missing_value + else + arr@_FillValue = default_fillvalue(typeof(arr)) + end if + end if + + dimz = dimsizes(arr) + if (any(dimz.eq.1)) then + arrT = rm_single_dims(arr) + delete(arr) + arr = arrT + delete(arrT) + end if + if (dimsizes(dimz).le.2) then + print("Possible curvilinear (or unstructured) grid detected. The CVDP cannot analyze curvilinear data. Please regrid to a rectilinear grid for inclusion in CVDP comparisons.") + print("Input file: "+zpath) + print("Setting array to all missing") + delete(arr) + arr = create_empty_array(yearS,yearE,smdata,emdata,"time_lat_lon") + sydata = yearS ; assign these variables based on yearS/yearE provided in namelist. Doesn't matter + eydata = yearE ; as data array is totally missing.. + smdata = 1 + emdata = 12 + end if + delete(dimz) + + arr!0 = "time" + arr!1 = "lat" + arr!2 = "lon" + + if (isatt(arr,"valid_range")) then ; check to make sure data is in valid range. Reset to stay within the valid range if needed. +; print("Data outside valid_range in "+zpath+", resetting data to be within valid_range") + arr = where(arr.lt.arr@valid_range(0),arr@valid_range(0),arr) + arr = where(arr.gt.arr@valid_range(1),arr@valid_range(1),arr) + end if + + if (any(abs(arr).ge.1.e20)) then ; check for inf values or values way out of range, reset to _FillValue. + print("Values greater than 1.e20 or less than -1.e20 detected in "+zpath+", resetting to _FillValue") + arr = where(abs(arr).ge.1.e20,arr@_FillValue,arr) + end if + +; if (.not.iscoord(arr,"lat")) then +; print("The data might have 2D latitudes. The functions.ncl coding is not set up to handle this, exiting from data_read_in.ncl") +; print("zpath") +; exit +; end if +; if (.not.iscoord(arr,"lon")) then +; print("The data might have 2D longitudes. The functions.ncl coding is not set up to handle this, exiting from data_read_in.ncl") +; print("zpath") +; exit +; end if + + if (yearS.lt.sydata.or.yearE.gt.eydata) then + print("Requested "+yearS+"-"+yearE+" time span is outside the input file "+zpath+" time span of "+sydata+"-"+eydata+"") + print("Setting array to all missing") + delete(arr) + arr = create_empty_array(yearS,yearE,smdata,emdata,"time_lat_lon") + sydata = yearS ; assign these variables based on yearS/yearE provided in namelist. Doesn't matter + eydata = yearE ; as data array is totally missing.. + smdata = 1 + emdata = 12 + else + timeT = yyyymm_time(sydata, eydata, "integer") + time = timeT({sydata*100+smdata:eydata*100+emdata}) + if (iscoord(arr,"time")) then + delete(arr&time) + end if + dimz = dimsizes(arr) + if (dimz(0).eq.dimsizes(time)) then + arr&time = time + else + print("Possible mismatch detected between time specified in file name and file variables, setting array to missing") + print("File = "+zpath) + print("Read from file name: "+min(time)+"-"+max(time)) + delete(arr) + arr = create_empty_array(yearS,yearE,smdata,emdata,"time_lat_lon") + sydata = yearS ; assign these variables based on yearS/yearE provided in namelist. Doesn't matter + eydata = yearE ; as data array is totally missing.. + smdata = 1 + emdata = 12 + end if + delete(dimz) + delete([/time,timeT/]) + end if + delete([/sydata,smdata,eydata,emdata/]) + +; printVarSummary(arr) +; printVarSummary(arr({sy*100+1:ey*100+12},:,:)) + if (arr&lat(0).ge.0) then + farr = arr({yearS*100+1:yearE*100+12},::-1,:) ; flip the latitudes + else + farr = arr({yearS*100+1:yearE*100+12},:,:) + end if +; printVarSummary(farr) + delete(arr) + + mocheck = (/(yearS*100+1)-min(farr&time),(yearE*100+12) - max(farr&time)/) + if (any(mocheck.ne.0)) then ; previously: if (mod(dimsizes(farr&time),12).ne.0) then + if (mocheck(0).ne.0) then + print("First requested year is incomplete") + end if + if (mocheck(1).ne.0) then + print("Last requested year is incomplete") + end if + print("Incomplete data year(s) requested for file "+zpath+", printing out time and creating blank array") + print("Time requested: "+yearS+"-"+yearE) + print(farr&time) + delete(farr) + farr = create_empty_array(yearS,yearE,1,12,"time_lat_lon") + end if + delete(mocheck) + + if (farr&lon(0).lt.0) then + farr = lonFlip(farr) ; lon flip + end if + if (min(farr&lon).lt.0.or.max(farr&lon).gt.360) then + print(farr&lon) + print("path = "+zpath) + print("Fatal: Longitudes not in expected 0-360E range, creating blank array") + delete(farr) + farr = create_empty_array(yearS,yearE,1,12,"time_lat_lon") + end if + + if (vn.eq."TREFHT".or.vn.eq."TS") then ; units check + if (farr@units.eq."K".or.farr@units.eq."Kelvin".or.farr@units.eq."deg_k".or.farr@units.eq."deg_K") then + if (max(farr).ge.100) then ; data sets can be anomalies with units of K, so check for range before subtracting + farr = farr-273.15 + end if + farr@units = "C" + end if + if (farr@units.eq."degrees_C".or.farr@units.eq."degrees C".or.farr@units.eq."degree_C".or.farr@units.eq."degree C") then + farr@units = "C" + end if + end if + if (vn.eq."PSL") then + if (farr@units.eq."Pa".or.farr@units.eq."Pascals".or.farr@units.eq."Pascal") then + farr = farr/100. + farr@units = "hPa" + end if + end if + if (vn.eq."PRECT") then ; convert (if necessary) to mm/day + if (farr@units.eq."m/s".or.farr@units.eq."m s-1") then + farr = farr*86400000. + end if + if (farr@units.eq."kg m-2 s-1".or.farr@units.eq."kg/m2/s".or.farr@units.eq."kg/m^2/s".or.farr@units.eq."kg/(s*m2)".or.farr@units.eq."mm/s") then + farr = farr*86400. + end if + if (farr@units.eq."m".or.farr@units.eq."m/month".or.farr@units.eq."cm".or.farr@units.eq."cm/month".or.farr@units.eq."mm".or.farr@units.eq."mm/month") then + yr = toint(farr&time)/100 + mo = toint(farr&time - (yr*100)) + days = days_in_month(yr,mo) + do gg = 0,dimsizes(farr&time)-1 + farr(gg,:,:) = (/ farr(gg,:,:) / days(gg) /) + end do + if (farr@units.eq."cm".or.farr@units.eq."cm/month") then + farr = farr*10. ; convert from cm/day to mm/day + end if + if (farr@units.eq."m".or.farr@units.eq."m/month") then + farr = farr*1000. ; convert from m/day to mm/day + end if + end if + if (farr@units.eq."m/day".or.farr@units.eq."m day-1") then + farr = farr*1000. + end if + farr@units = "mm/day" + end if + if (vn.eq."SNOWDP") then + if (.not.isatt(farr,"is_all_missing")) then + if (farr@units.ne."m".and.farr@units.ne."meters") then + print("Warning: SNOWDP/snd units may not be in meters. listed units = "+farr@units) + end if + end if + end if + + date = farr&time ; switch time to be CF-conforming + delete(farr&time) + yyyy = date/100 + mm = date-(yyyy*100) + days = (days_in_month(yyyy,mm))/2 + hms = days + hms = 0 ; hours, minutes, seconds all the same (=0) + time = cd_inv_calendar(yyyy,mm,days,hms,hms,hms,"months since "+min(yyyy)+"-01-15 00:00:00",0) + time@long_name = "Time" + time@standard_name = "time" + time@actual_range = (/min(time),max(time)/) + time!0 = "time" + time&time = time + farr&time = time + delete([/time,yyyy,mm,days,hms,date/]) + return(farr) +end +;================================================================================================= +; read in MOC ocean data from given files +; +; assign time coordinate variables, check for issues with the array, assign _FillValue (if needed) +; assign dimension names (for ease-of-use), check and modify units +; +undef("data_read_in_ocean_MOC") +function data_read_in_ocean_MOC(zpath:string,vn:string,yearS:integer,yearE:integer) +; path for MOC file(s), variable name, start year, and end year are read in. +local zpath,vn,cpath0,ta,tfiles,c,arr,farr,yearS,yearE,mocheck,dimC,lev +begin + if (vn.eq."MOC") then + vname = (/"MOC","msftmyz","stfmmc"/) + end if + + if (ismissing(zpath) ) then + print("File missing, creating blank array of data. View "+vn+" namelist for details.") + arr = create_empty_array(yearS,yearE,1,12,"time_lev_lat") + sydata = yearS ; assign these variables based on yearS/yearE provided in namelist. Doesn't matter + eydata = yearE ; as data array is totally missing.. + smdata = 1 + emdata = 12 + else + cpath0 = stringtochar(zpath) + + ta = stringtochar("*") + if (any(cpath0.eq.ta(0)).or.any(cpath0.eq."{")) then ; check for "*" and "{" denoting multiple files + tfiles = systemfunc("ls "+zpath+" 2> /dev/null") ; /dev/null suppresses all standard error output + c = addfiles(tfiles,"r") + do ii=0,dimsizes(vname)-1 + if (isfilevar(c[0],vname(ii))) then + dimC = filevardimsizes(c[0],"MOC") + if (vname(ii).eq."MOC") then ; CCSM/CESM file + if (dimC(2).ge.2) then + arr = dim_sum_n_Wrap(c[:]->$vname(ii)$(:,1,:,:,:),1) ; select Atl+Med+Labrador+GIN sea+Arctic+Hudson Bay transport region and sum over moc_comp + else + arr = c[:]->$vname(ii)$(:,1,0,:,:) ; select Atl+Med+Labrador+GIN sea+Arctic+Hudson Bay transport region and the only moc_comp dimension + end if + else ; CMIP file + arr = c[:]->$vname(ii)$(:,0,:,:) ; CMIP file: 0th basin/region = atlantic_ocean (CMIP3) or atlantic_arctic_ocean (CMIP5) + end if + delete(dimC) + break + end if + end do + nfil = dimsizes(tfiles) + cpathS = stringtochar(tfiles(0)) + cpathE = stringtochar(tfiles(nfil-1)) + ncharS = dimsizes(cpathS) + ncharE = dimsizes(cpathE) + sydata = stringtointeger(charactertostring(cpathS(ncharS-17:ncharS-14))) + smdata = stringtointeger(charactertostring(cpathS(ncharS-13:ncharS-12))) + eydata = stringtointeger(charactertostring(cpathE(ncharE-10:ncharE-7))) + emdata = stringtointeger(charactertostring(cpathE(ncharE-6:ncharE-5))) + delete([/cpathS,cpathE,ncharS,ncharE,nfil/]) +; delete(c) + else + c = addfile(zpath,"r") + do i=0,dimsizes(vname)-1 + if (isfilevar(c,vname(i))) then + dimC = filevardimsizes(c,"MOC") + if (vname(i).eq."MOC") then ; CCSM/CESM file + if (dimC(2).ge.2) then + arr = dim_sum_n_Wrap(c->$vname(i)$(:,1,:,:,:),1) ; select Atl+Med+Labrador+GIN sea+Arctic+Hudson Bay transport region and sum over moc_comp + else + arr = c->$vname(i)$(:,1,0,:,:) ; select Atl+Med+Labrador+GIN sea+Arctic+Hudson Bay transport region + end if + else ; CMIP file + arr = c->$vname(i)$(:,0,:,:) ; CMIP file: 0th basin/region = atlantic_ocean (CMIP3) or atlantic_arctic_ocean (CMIP5) + end if + delete(dimC) + break + end if + end do + cpath = stringtochar(zpath) + nchar = dimsizes(cpath) + sydata = stringtointeger(charactertostring(cpath(nchar-17:nchar-14))) + smdata = stringtointeger(charactertostring(cpath(nchar-13:nchar-12))) + eydata = stringtointeger(charactertostring(cpath(nchar-10:nchar-7))) + emdata = stringtointeger(charactertostring(cpath(nchar-6:nchar-5))) + delete([/cpath,nchar/]) +; delete(c) + end if + delete([/ta,cpath0/]) + end if + + if (isvar("arr").eq.False) then + print("Variable ("+vn+") not found. Examine input file "+zpath+". Creating empty array and continuing") + arr = create_empty_array(yearS,yearE,1,12,"time_lev_lat") + end if + if (isshort(arr)) then + arrT = short2flt(arr) + delete(arr) + arr = arrT + delete(arrT) + end if + if (.not.isatt(arr,"_FillValue")) then ; assign _FillValue if one is not present + if (isatt(arr,"missing_value")) then + arr@_FillValue = arr@missing_value + else + arr@_FillValue = default_fillvalue(typeof(arr)) + end if + end if + arr!0 = "time" + arr!1 = "lev" + arr!2 = "lat" + + if (isatt(arr,"coordinates")) then + delete(arr@coordinates) + end if + + if (arr&lev@units.eq."centimeters".or.arr&lev@units.eq."cm") then + lev = arr&lev + lev@units = "m" + lev = lev/100. + lev&lev = lev + delete(arr&lev) + arr&lev = lev +; print("Level converted to m from cm") +; printVarSummary(lev) +; print(lev) + delete(lev) + end if + + if (arr&lev(2).lt.0) then ; check for negative levels + lev = arr&lev + lev = lev*-1. + if (any(lev.lt.0)) then + print("Error detected in MOC level sign conversion") + print(lev) + end if + lev@positive = "down" + lev&lev = lev + delete(arr&lev) + arr&lev = lev +; print("Levels converted from negative downwards to positive downwards") +; printVarSummary(lev) +; print(lev) + delete(lev) + end if + + + if (isatt(arr,"valid_range")) then ; check to make sure data is in valid range. Reset to stay within the valid range if needed. +; print("Data outside valid_range in "+zpath+", resetting data to be within valid_range") + arr = where(arr.lt.arr@valid_range(0),arr@valid_range(0),arr) + arr = where(arr.gt.arr@valid_range(1),arr@valid_range(1),arr) + end if + + if (any(abs(arr).ge.1.e20)) then ; check for inf values or values way out of range, reset to _FillValue. + print("Values greater than 1.e20 or less than -1.e20 detected in "+zpath+", resetting to _FillValue") + arr = where(abs(arr).ge.1.e20,arr@_FillValue,arr) + end if + + if (yearS.lt.sydata.or.yearE.gt.eydata) then + print("Requested "+yearS+"-"+yearE+" time span is outside the input file "+zpath+" time span of "+sydata+"-"+eydata+"") + print("Setting array to all missing") + delete(arr) + arr = create_empty_array(yearS,yearE,smdata,emdata,"time_lev_lat") + sydata = yearS ; assign these variables based on yearS/yearE provided in namelist. Doesn't matter + eydata = yearE ; as data array is totally missing.. + smdata = 1 + emdata = 12 + else + timeT = yyyymm_time(sydata, eydata, "integer") + time = timeT({sydata*100+smdata:eydata*100+emdata}) + if (iscoord(arr,"time")) then + delete(arr&time) + end if + dimz = dimsizes(arr) + if (dimz(0).eq.dimsizes(time)) then + arr&time = time + else + print("Possible mismatch detected between time specified in file name and file variables, setting array to missing") + print("File = "+zpath) + print("Read from file name: "+min(time)+"-"+max(time)) + delete(arr) + arr = create_empty_array(yearS,yearE,smdata,emdata,"time_lev_lat") + sydata = yearS ; assign these variables based on yearS/yearE provided in namelist. Doesn't matter + eydata = yearE ; as data array is totally missing.. + smdata = 1 + emdata = 12 + end if + delete(dimz) + delete([/time,timeT/]) + end if + delete([/sydata,smdata,eydata,emdata/]) + +; printVarSummary(arr) +; printVarSummary(arr({sy*100+1:ey*100+12},:,:)) + if (arr&lat(0).ge.0) then + farr = arr({yearS*100+1:yearE*100+12},:,::-1) ; flip the latitudes + else + farr = arr({yearS*100+1:yearE*100+12},:,:) + end if +; printVarSummary(farr) + delete(arr) + + mocheck = (/(yearS*100+1)-min(farr&time),(yearE*100+12) - max(farr&time)/) + if (any(mocheck.ne.0)) then ; previously: if (mod(dimsizes(farr&time),12).ne.0) then + if (mocheck(0).ne.0) then + print("First requested year is incomplete") + end if + if (mocheck(1).ne.0) then + print("Last requested year is incomplete") + end if + print("Incomplete data year(s) requested for file "+zpath+", printing out time and creating blank array") + print("Time requested: "+yearS+"-"+yearE) + print(farr&time) + delete(farr) + farr = create_empty_array(yearS,yearE,1,12,"time_lev_lat") + end if + delete(mocheck) + + ; check units for MOC array. CMIP5 = "kg s-1" CMIP3 = "m3 s-1" CCSM3 = "Sverdrups" CCSM4 = "Sverdrups" + + if (farr@units.eq."Sverdrups") then + farr@units = "Sv" + end if + if (farr@units.eq."kg s-1".or.farr@units.eq."KG S-1".or.farr@units.eq."kg/s".or.farr@units.eq."KG/S") then ; 1 Sv = 1.e9 kg/s + farr = (/ farr/1.e9 /) + farr@units = "Sv" + end if + if (farr@units.eq."m3 s-1".or.farr@units.eq."M3 S-1".or.farr@units.eq."m3/s".or.farr@units.eq."M3/S") then ; 1 Sv = 1.e6 m3/s + farr = (/ farr/1.e6 /) + farr@units = "Sv" + end if + +; printVarSummary(farr) + + date = farr&time ; switch time to be CF-conforming + delete(farr&time) + yyyy = date/100 + mm = date-(yyyy*100) + days = (days_in_month(yyyy,mm))/2 + hms = days + hms = 0 ; hours, minutes, seconds all the same (=0) + time = cd_inv_calendar(yyyy,mm,days,hms,hms,hms,"months since "+min(yyyy)+"-01-15 00:00:00",0) + time@long_name = "Time" + time@standard_name = "time" + time!0 = "time" + time&time = time + farr&time = time + delete([/time,yyyy,mm,days,hms,date/]) + return(farr) +end +;================================================================================================= +; read in ice data from given files +; +; assign time coordinate variables, check for issues with the array, assign _FillValue (if needed) +; assign dimension names (for ease-of-use), check and modify units +; +undef("data_read_in_ice") +function data_read_in_ice(zpath:string,vn:string,yearS:integer,yearE:integer) +; path for ice file(s), variable name, start year, and end year are read in. +local zpath,vn,cpath0,ta,tfiles,c,arr,farr,yearS,yearE,mocheck,dimC,lev +begin + if (vn.eq."aice_nh") then + vname = (/"aice_nh","aice","sic","SIC","CN","ice","icec"/) + end if + if (vn.eq."aice_sh") then + vname = (/"aice_sh","aice","sic","SIC","CN","ice","icec"/) + end if + + if (ismissing(zpath) ) then + print("File missing, creating blank array of data. View "+vn+" namelist for details.") + arr = create_empty_array(yearS,yearE,1,12,"time_lat_lon") + sydata = yearS ; assign these variables based on yearS/yearE provided in namelist. Doesn't matter + eydata = yearE ; as data array is totally missing.. + smdata = 1 + emdata = 12 + else + cpath0 = stringtochar(zpath) + + ta = stringtochar("*") + tfiles = systemfunc("ls "+zpath+" 2> /dev/null") ; /dev/null suppresses all standard error output + c = addfiles(tfiles,"r") + do ii=0,dimsizes(vname)-1 + if (isfilevar(c[0],vname(ii))) then + if (vname(ii).eq."aice_nh".or.vname(ii).eq."aice_sh".or.vname(ii).eq."aice") then ; CCSM/CESM file + arr = c[:]->$vname(ii)$ + if (isatt(arr,"coordinates")) then + strarr = str_split(arr@coordinates," ") + if (any(strarr.eq."TLON")) then ; CESM longitude 2D coordinate + dimZ = dimsizes(c[0]->TLON) + if (dimsizes(dimZ).eq.3) then + arr@lon2d = c[0]->TLON(0,:,:) + else + arr@lon2d = c[0]->TLON + end if + delete(dimZ) + end if + if (any(strarr.eq."TLAT")) then ; CESM latitude 2D coordinate + dimZ = dimsizes(c[0]->TLAT) + if (dimsizes(dimZ).eq.3) then + arr@lat2d = c[0]->TLAT(0,:,:) + else + arr@lat2d = c[0]->TLAT + end if + delete(dimZ) + end if + delete(strarr) +; else +; print("2D coordinates for ice data are not detected") + end if + if (isatt(arr,"cell_measures").and.isfilevar(c[0],"tarea")) then ; if an attribute named cell_measures exists, and tarea is on file(0) + if (arr@cell_measures.eq."area: tarea") then + arr@area = totype(c[0]->tarea,typeof(arr)) ; in units of m^2 + end if + end if + else ; CMIP or other file + if (vname(ii).eq."CN") then ; GFDL file + arrT = c[:]->$vname(ii)$ + arr = dim_sum_n_Wrap(arrT,1) + delete(arrT) + arr = where(arr.ge.1,1,arr) ; optional + else + arr = c[:]->$vname(ii)$ + end if + if (isatt(arr,"coordinates")) then + strarr = str_split(arr@coordinates," ") + if (any(strarr.eq."lon")) then ; IPCC longitude 2D coordinate + arr@lon2d = c[0]->lon + end if + if (any(strarr.eq."lat")) then ; IPCC latitude 2D coordinate + arr@lat2d = c[0]->lat + end if + if (any(strarr.eq."longitude")) then ; NSIDC longitude 2D coordinate + arr@lon2d = c[0]->longitude + end if + if (any(strarr.eq."latitude")) then ; NSIDC latitude 2D coordinate + arr@lat2d = c[0]->latitude + end if + delete(strarr) +; else +; print("2D coordinates for ice data are not detected") + end if + dir_name = str_split(tfiles(0),"/") + if (dimsizes(dir_name).ge.8) then + dir_name_new = "/"+str_join(dir_name(:4),"/")+"/fx/areacello/"+dir_name(7)+"/r0i0p0/*.nc" + ufile = systemfunc("ls "+dir_name_new+" 2> /dev/null") ; /dev/null suppresses all standard error output + delete(dir_name_new) + else + ufile = new(1,string) + end if + if (.not.ismissing(ufile)) then + d = addfile(ufile,"r") + arr@area = totype(d->areacello,typeof(arr)) + dimQ = dimsizes(arr) + if (dimsizes(ndtooned(arr@area)).ne.(dimQ(1)*dimQ(2))) then ; the dimension sizes of areacello + delete(arr@area) ; do not match sizes of area j,i dimensions + end if + delete(dimQ) + end if + + if (isfilevar(c[0],"AREA")) then ; check to see if there is an AREA array present and if so use it + areaT = c[0]->AREA + if (areaT@units.eq."km^2") then + area_unit_km2_to_m2 = True + areaT = areaT*1000000. + areaT@units = "m^2" + end if + areaT@_FillValue = 1.e20 + arr@area = totype(areaT,typeof(arr)) + if (isatt(areaT,"pole_hole_area")) then ; format of ystart, yend, value, ystart, yend, value + if (isvar("area_unit_km2_to_m2")) then + extra_area = tofloat(areaT@pole_hole_area) + extra_area(2::3) = extra_area(2::3)*1000000. ; convert pole hole area from km^2->m^2 + arr@pole_hole_area = totype(extra_area,typeof(arr)) + delete(extra_area) + else + arr@pole_hole_area = totype(areaT@pole_hole_area,typeof(arr)) + end if + end if + delete(areaT) + end if + delete([/dir_name,ufile/]) + end if + break + end if + end do + nfil = dimsizes(tfiles) + cpathS = stringtochar(tfiles(0)) + cpathE = stringtochar(tfiles(nfil-1)) + ncharS = dimsizes(cpathS) + ncharE = dimsizes(cpathE) + sydata = stringtointeger(charactertostring(cpathS(ncharS-17:ncharS-14))) + smdata = stringtointeger(charactertostring(cpathS(ncharS-13:ncharS-12))) + eydata = stringtointeger(charactertostring(cpathE(ncharE-10:ncharE-7))) + emdata = stringtointeger(charactertostring(cpathE(ncharE-6:ncharE-5))) + delete([/cpathS,cpathE,ncharS,ncharE,nfil,ta,cpath0/]) + end if + + if (isvar("arr").eq.False) then + print("Variable ("+vn+") not found. Examine input file "+zpath+". Creating empty array and continuing") + arr = create_empty_array(yearS,yearE,1,12,"time_lat_lon") + end if + + if (.not.isatt(arr,"area")) then ; calculate grid cell areas manually (not implemented) +; print("Grid cell areas not found.") + end if + + if (isshort(arr)) then + arrT = short2flt(arr) + delete(arr) + arr = arrT + delete(arrT) + end if + if (.not.isatt(arr,"_FillValue")) then ; assign _FillValue if one is not present + if (isatt(arr,"missing_value")) then + arr@_FillValue = arr@missing_value + else + arr@_FillValue = default_fillvalue(typeof(arr)) + end if + end if + arr!0 = "time" + arr!1 = "j" + arr!2 = "i" + + if (.not.isatt(arr,"lat2d")) then ; if latitudes are 1D, make sure latitudes run from south to north + + if (arr&j(0).ge.0) then ; calculate area of 1D lat/lon arrays + tarr = arr(:,::-1,:) + delete(arr) + arr = tarr + delete(tarr) + end if + + if (min(arr&i).ge.0.and.max(arr&i).le.360) then + fctr = 111120 ; how many meters per degree of latitude (approximate) + pi=4.*atan(1.0) + rad=(pi/180.) + lat = tofloat(arr&j) + dimlat = dimsizes(lat) + latr = new(dimlat,typeof(lat)) + do gg = 0,dimlat-1 + if (gg.eq.0) then + latr(gg) = abs(-90-(lat(1)+lat(0))/2.) + end if + if (gg.ge.1.and.gg.lt.dimlat-1) then + latr(gg) = abs((lat(gg-1)+lat(gg))/2. - (lat(gg)+lat(gg+1))/2.) + end if + if (gg.eq.dimlat-1) then + latr(gg) = abs(90 - (lat(dimlat-2)+lat(dimlat-1))/2.) + end if + end do + lon = tofloat(arr&i) + dimlon = dimsizes(lon) + lonr = new(dimlon,typeof(lon)) + do gg = 0,dimlon-1 + if (gg.eq.0) then + lonr(gg) = abs( (lon(1)+lon(0))/2. - (((lon(dimlon-1)+(lon(0)+360))/2.)-360) ) + end if + if (gg.ge.1.and.gg.lt.dimlon-1) then + lonr(gg) = abs((lon(gg)+lon(gg+1))/2. - (lon(gg-1)+lon(gg))/2.) + end if + if (gg.eq.dimlon-1) then + lonr(gg) = abs(((lon(dimlon-1)+(lon(0)+360))/2.) - (lon(gg-1)+lon(gg))/2.) + end if + end do + area = tofloat(arr(0,:,:)) + area = area@_FillValue + area@long_name = "Area of grid box" + area@units = "m2" + +; printVarSummary(area) + do ff = 0,dimlat-1 + do gg = 0,dimlon-1 + area(ff,gg) = (/ (fctr*latr(ff))*(cos(rad*lat(ff))*lonr(gg)*fctr) /) ; cosine weighting + end do + end do +; print("Total area = "+sum(area)) + arr@area = totype(area,typeof(arr)) + delete([/lat,lon,latr,lonr,area,fctr,pi,rad,dimlat,dimlon/]) + end if + end if + + if (.not.isatt(arr,"is_all_missing")) then ; erase data in hemisphere not specified via vn + if (isatt(arr,"lat2d")) then + tlat2 = conform(arr,arr@lat2d,(/1,2/)) + tlon2 = conform(arr,arr@lon2d,(/1,2/)) + if (vn.eq."aice_nh") then + arr = where(tlat2.ge.0,arr,arr@_FillValue) + end if + if (vn.eq."aice_sh") then + arr = where(tlat2.lt.0,arr,arr@_FillValue) + end if + delete([/tlat2,tlon2/]) + else + if (vn.eq."aice_nh") then + arr(:,{:-1.},:) = arr@_FillValue + end if + if (vn.eq."aice_sh") then + arr(:,{0:},:) = arr@_FillValue + end if + end if + end if + + if (yearS.lt.sydata.or.yearE.gt.eydata) then + print("Requested "+yearS+"-"+yearE+" time span is outside the input file "+zpath+" time span of "+sydata+"-"+eydata+"") + print("Setting array to all missing") + delete(arr) + arr = create_empty_array(yearS,yearE,smdata,emdata,"time_lat_lon") + sydata = yearS ; assign these variables based on yearS/yearE provided in namelist. Doesn't matter + eydata = yearE ; as data array is totally missing.. + smdata = 1 + emdata = 12 + else + timeT = yyyymm_time(sydata, eydata, "integer") + time = timeT({sydata*100+smdata:eydata*100+emdata}) + if (iscoord(arr,"time")) then + delete(arr&time) + end if + dimz = dimsizes(arr) + if (dimz(0).eq.dimsizes(time)) then + arr&time = time + else + print("Possible mismatch detected between time specified in file name and file variables, setting array to missing") + print("File = "+zpath) + print("Read from file name: "+min(time)+"-"+max(time)) + delete(arr) + arr = create_empty_array(yearS,yearE,smdata,emdata,"time_lat_lon") + sydata = yearS ; assign these variables based on yearS/yearE provided in namelist. Doesn't matter + eydata = yearE ; as data array is totally missing.. + smdata = 1 + emdata = 12 + end if + delete(dimz) + delete([/time,timeT/]) + end if + delete([/sydata,smdata,eydata,emdata/]) + farr = arr({yearS*100+1:yearE*100+12},:,:) +; printVarSummary(farr) + delete(arr) + + mocheck = (/(yearS*100+1)-min(farr&time),(yearE*100+12) - max(farr&time)/) + if (any(mocheck.ne.0)) then ; previously: if (mod(dimsizes(farr&time),12).ne.0) then + if (mocheck(0).ne.0) then + print("First requested year is incomplete") + end if + if (mocheck(1).ne.0) then + print("Last requested year is incomplete") + end if + print("Incomplete data year(s) requested for file "+zpath+", printing out time and creating blank array") + print("Time requested: "+yearS+"-"+yearE) + print("From file: Times present from "+min(farr&time)+"-"+max(farr&time)) + delete(farr) + farr = create_empty_array(yearS,yearE,1,12,"time_lat_lon") + end if + delete(mocheck) + + if (farr@units.eq."0-1".or.farr@units.eq."1") then ; GFDL units, NSIDC units + farr = (/ farr*100. /) + farr@units = "%" + end if + + date = farr&time ; switch time to be CF-conforming + delete(farr&time) + yyyy = date/100 + mm = date-(yyyy*100) + days = (days_in_month(yyyy,mm))/2 + hms = days + hms = 0 ; hours, minutes, seconds all the same (=0) + time = cd_inv_calendar(yyyy,mm,days,hms,hms,hms,"months since "+min(yyyy)+"-01-15 00:00:00",0) + time@long_name = "Time" + time@standard_name = "time" + time!0 = "time" + time&time = time + farr&time = time + delete([/time,yyyy,mm,days,hms,date/]) + return(farr) +end +;================================================================================================= +; alters the formatting of the Y-axis +; +; not currently used +; +undef("y_axis_check") +function y_axis_check(temparr:numeric,tempres:logical) +local temparr,tempres,minval,maxval +begin + minval = min(temparr) + maxval = max(temparr) + if (minval.gt.-1.and.minval.lt.0.and.maxval.lt.1.and.maxval.gt.0) then + tempres@tmYLFormat = "0@;*.2f" + else + tempres@tmYLFormat = "0@*+^sg" + end if + return(tempres) + delete([/tempres,temparr,minval,maxval/]) +end +;================================================================================================= +; Check that the user-specified climatological period is within the time range of the data +; +undef("check_custom_climo") +procedure check_custom_climo(mn:string,startyear:numeric,endyear:numeric,climo_startyear:numeric,climo_endyear:numeric) +local startyear,endyear,climo_startyear,climo_endyear,mn +begin + do gg = 0,dimsizes(startyear)-1 + if (climo_startyear.ge.0) then ; exact years specified for climatological period + if (climo_startyear.ge.startyear(gg).and.climo_endyear.le.endyear(gg)) then + else + print("check_custom_climo: Warning! Beginning and/or ending of climatological period is outside time range of data.") + print("Dataset: "+mn+", years = "+startyear(gg)+":"+endyear(gg)+", set climatological period = "+climo_startyear+":"+climo_endyear) + print("The diagnostics package will proceed, but one or more dataset(s) will not have the full climatological period removed and/or the package may fail with the following message: fatal:NclOneDValGetRangeIndex: start coordinate index out of range.") + end if + else ; relative years specified for climatological period + if ((endyear(gg)-startyear(gg)+1).lt.(climo_endyear-climo_startyear+1)) then + print("check_custom_climo: Warning! Beginning and/or ending of climatological period is outside time range of data.") + print("Dataset: "+mn+", years = "+startyear(gg)+":"+endyear(gg)+", set climatological period = "+(endyear(gg)+climo_startyear)+":"+(endyear(gg)+climo_endyear)) + print("The diagnostics package will proceed, but one or more dataset(s) will not have the full climatological period removed and/or the package may fail with the following message: fatal:NclOneDValGetRangeIndex: start coordinate index out of range.") + end if + if (abs(climo_startyear).ge.(endyear(gg)-startyear+1)) then + print("check_custom_climo: Warning! Dataset: "+mn+", climatology start year "+(endyear(gg)+climo_startyear)+" is outside of analysis time period ("+startyear(gg)+":"+endyear(gg)+"), exiting script.") + exit + end if + end if + end do +end +;================================================================================================== +; In version 6.2.1 the behavior of isfilepresent switched, where only files readable by NCL return +; True. Previously if a file (or directory) simply existed, isfilepresent would return True. A new +; function has been created in v6.2.1, fileexists, that acts like the previous version of isfilepresent +; did. To compensate for this, check the NCL version number, and use isfilepresent/fileexists when +; appropriate. +; +undef("isfilepresent2") +function isfilepresent2(fdpres:string) +local nclver, num0, num1, ra +begin + nclver = stringtochar(get_ncl_version()) + + num0 = toint(tostring(nclver(0))) + num1 = toint(tostring(nclver(2))) + num2 = toint(tostring(nclver(4))) + if (num0.le.5) then + ra = isfilepresent(fdpres) + end if + if (num0.eq.6) then + if (num1.le.1) then + ra = isfilepresent(fdpres) + end if + if (num1.eq.2) then + if (num2.eq.0) then + ra = isfilepresent(fdpres) + else + ra = fileexists(fdpres) + end if + end if + if (num1.ge.3) then + ra = fileexists(fdpres) + end if + end if + if (num0.ge.7) then + ra = fileexists(fdpres) + end if + return(ra) + delete([/nclver,num0,num1,ra/]) +end +;================================================================================================= +; +undef("table_link_setup") +function table_link_setup(ipath:string,iname:string,ltxt:string) +; image name, along with link text +local ipath, iname, ltxt, otxt, quote +begin + quote = str_get_dq() + if (isfilepresent2(ipath+iname)) then + otxt = ""+ltxt+"" + else + otxt = ltxt + end if + return(otxt) + delete([/ipath,iname,ltxt,otxt,quote/]) +end +;================================================================================================= +undef ("gsn_panel2") +procedure gsn_panel2(wksp:graphic,plotp:graphic,lpl:numeric,panelres:logical) +; checks to make sure at least one image is present in plot before paneling, +; thereby eliminating error message: +; Error: gsn_panel: all of the plots passed to gsn_panel appear to be invalid + +local wksp, plotp, lpl, panelres +begin + if (.not.all(ismissing(plotp))) then + gsn_panel(wksp,plotp,lpl,panelres) + end if +end +;================================================================================================= +undef ("eofunc_north2") +function eofunc_north2(eval[*]:numeric, N[1]:integer, prinfo[1]:logical) +; +; North, G.R. et al (1982): Sampling Errors in the Estimation of Empirical Orthogonal Functions. +; Mon. Wea. Rev., 110, 699–706. +; doi: http://dx.doi.org/10.1175/1520-0493(1982)110<0699:SEITEO>2.0.CO;2 +; +; Usage after 'eofunc'. Here ntim was used, +; prinfo = True +; sig = eval_north(eof@eval, ntim, prinfo) +; +; Copied directly from v6.3.0 contributed.ncl for use in the package regardless of NCL version. +; +local neval, dlam, low, high, sig, n +begin + neval = dimsizes(eval) + if (neval.eq.1) + print("eofunc_north: neval=1, no testing can be performed") + sig = True + sig@long_name = "EOF separation is not testable N=1" + sig@N = N + return(sig) + end if + + dlam = eval * sqrt(2.0/N) ; eq 24 + low = eval-dlam + high = eval+dlam + + sig = new(dimsizes(eval), logical) + sig = False ; default is not significantly separated + +; first and last eigenvalues are special cales + + if (eval(0).gt.high(1)) then + sig(0) = True + end if + if (eval(neval-1).lt.low(neval-2)) then + sig(neval-1) = True + end if + +; loop over other eignvalues + + if (N.gt.2) then + do n=1,neval-2 + if (eval(n).lt.low(n-1) .and. eval(n).gt.high(n+1)) then + sig(n) = True + end if + end do + end if + + if (prinfo) then + print(dlam+" "+low+" "+eval+" "+high+" "+sig) + end if + + sig@long_name = "EOF separation" + sig@N = N + return(sig) +end +;================================================================================================= +; Standardize and set attributes for array. Remove NCL-added and superfluous attributes, +; set missing_value equal to _FillValue, provide options to set long_name, units, and comment_cvdp +; attributes. For last three inputs "" means leave as set and "delete" means delete the attribute. +; This function will be used immediately prior to an array being written to a netCDF file. +; +undef("set_varAtts") +function set_varAtts(zarr:numeric,loname:string,uni:string,com_cvdp:string) + +local zarr,loname,uni,com_cvdp +begin + if (isatt(zarr,"anomaly_op_ncl")) then + delete(zarr@anomaly_op_ncl) + end if + if (isatt(zarr,"average_op_ncl")) then + delete(zarr@average_op_ncl) + end if + if (isatt(zarr,"cell_measures")) then + delete(zarr@cell_measures) + end if + if (isatt(zarr,"cell_methods")) then + delete(zarr@cell_methods) + end if + if (isatt(zarr,"lonFlip")) then + delete(zarr@lonFlip) + end if + if (isatt(zarr,"runave_op_ncl")) then + delete(zarr@runave_op_ncl) + end if + if (isatt(zarr,"stddev_op_ncl")) then + delete(zarr@stddev_op_ncl) + end if + if (isatt(zarr,"sum_op_ncl")) then + delete(zarr@sum_op_ncl) + end if + if (isatt(zarr,"time")) then + delete(zarr@time) + end if + if (isatt(zarr,"wgt_areaave_op_ncl")) then + delete(zarr@wgt_areaave_op_ncl) + end if + + if (isatt(zarr,"_FillValue")) then ; set missing_value = _FillValue + zarr@missing_value = zarr@_FillValue + end if + + if (loname.eq."delete") then + delete(zarr@long_name) + else + if (loname.ne."") then ; "" = leave as is + zarr@long_name = loname + end if + end if + if (uni.eq."delete") then + delete(zarr@units) + else + if (uni.ne."") then ; "" = leave as is + zarr@units = uni + end if + end if + if (com_cvdp.eq."delete") then + delete(zarr@comment_cvdp) + else + if (com_cvdp.ne."") then ; "" = leave as is + zarr@comment_cvdp = com_cvdp + end if + end if + + return(zarr) +end + diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/ipo.ncl b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/ipo.ncl new file mode 100644 index 0000000000..f22e911c58 --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/ipo.ncl @@ -0,0 +1,716 @@ +; Calculates the IPO pattern, timeseries, and spectra. +; +; Variables used: ts +; +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/shea_util.ncl" +load "$CVDP_SCRIPTS/functions.ncl" + +begin + print("Starting: ipo.ncl") + + SCALE_TIMESERIES = getenv("SCALE_TIMESERIES") + OUTPUT_DATA = getenv("OUTPUT_DATA") + PNG_SCALE = tofloat(getenv("PNG_SCALE")) + OPT_CLIMO = getenv("OPT_CLIMO") + CLIMO_SYEAR = toint(getenv("CLIMO_SYEAR")) + CLIMO_EYEAR = toint(getenv("CLIMO_EYEAR")) + OUTPUT_TYPE = getenv("OUTPUT_TYPE") + COLORMAP = getenv("COLORMAP") + + nsim = numAsciiRow("namelist_byvar/namelist_ts") + na = asciiread("namelist_byvar/namelist_ts",(/nsim/),"string") + names = new(nsim,"string") + paths = new(nsim,"string") + syear = new(nsim,"integer",-999) + eyear = new(nsim,"integer",-999) + delim = "|" + + do gg = 0,nsim-1 + names(gg) = str_strip(str_get_field(na(gg),1,delim)) + paths(gg) = str_strip(str_get_field(na(gg),2,delim)) + syear(gg) = stringtointeger(str_strip(str_get_field(na(gg),3,delim))) + eyear(gg) = stringtointeger(str_strip(str_get_field(na(gg),4,delim))) + end do + nyr = eyear-syear+1 + nyr_max = max(nyr) + + pi=4.*atan(1.0) + rad=(pi/180.) +;---------TAS Regressions coding------------------------------------------------- + nsim_tas = numAsciiRow("namelist_byvar/namelist_trefht") + na_tas = asciiread("namelist_byvar/namelist_trefht",(/nsim_tas/),"string") + names_tas = new(nsim_tas,"string") + paths_tas = new(nsim_tas,"string") + syear_tas = new(nsim_tas,"integer",-999) + eyear_tas = new(nsim_tas,"integer",-999) + + do gg = 0,nsim_tas-1 + names_tas(gg) = str_strip(str_get_field(na_tas(gg),1,delim)) + paths_tas(gg) = str_strip(str_get_field(na_tas(gg),2,delim)) + syear_tas(gg) = stringtointeger(str_strip(str_get_field(na_tas(gg),3,delim))) + eyear_tas(gg) = stringtointeger(str_strip(str_get_field(na_tas(gg),4,delim))) + end do + delete(na_tas) + nyr_tas = eyear_tas-syear_tas+1 +;---------PR Regressions coding------------------------------------------------- + nsim_pr = numAsciiRow("namelist_byvar/namelist_prect") + na_pr = asciiread("namelist_byvar/namelist_prect",(/nsim_pr/),"string") + names_pr = new(nsim_pr,"string") + paths_pr = new(nsim_pr,"string") + syear_pr = new(nsim_pr,"integer",-999) + eyear_pr = new(nsim_pr,"integer",-999) + + do gg = 0,nsim_pr-1 + names_pr(gg) = str_strip(str_get_field(na_pr(gg),1,delim)) + paths_pr(gg) = str_strip(str_get_field(na_pr(gg),2,delim)) + syear_pr(gg) = stringtointeger(str_strip(str_get_field(na_pr(gg),3,delim))) + eyear_pr(gg) = stringtointeger(str_strip(str_get_field(na_pr(gg),4,delim))) + end do + delete(na_pr) + nyr_pr = eyear_pr-syear_pr+1 +;------------------------------------------------------------------------------------------------- + wks_type = OUTPUT_TYPE + if (wks_type.eq."png") then + wks_type@wkWidth = 1500*PNG_SCALE + wks_type@wkHeight = 1500*PNG_SCALE + end if + wks = gsn_open_wks(wks_type,getenv("OUTDIR")+"ipo") + wks4 = gsn_open_wks(wks_type,getenv("OUTDIR")+"ipo.prreg") + wks2 = gsn_open_wks(wks_type,getenv("OUTDIR")+"ipo.powspec") + wks3 = gsn_open_wks(wks_type,getenv("OUTDIR")+"ipo.timeseries") + + if (COLORMAP.eq."0") then + gsn_define_colormap(wks,"ncl_default") + gsn_define_colormap(wks2,"cb_9step") + gsn_define_colormap(wks3,"ncl_default") + gsn_define_colormap(wks4,"MPL_BrBG") + end if + if (COLORMAP.eq."1") then + gsn_define_colormap(wks,"BlueDarkRed18") + gsn_define_colormap(wks2,"cb_9step") + gsn_define_colormap(wks3,"ncl_default") + gsn_define_colormap(wks4,"BrownBlue12") + end if + map = new(nsim,"graphic") + map_sst = new(nsim,"graphic") + map_tasreg = new(nsim,"graphic") + map_prreg = new(nsim,"graphic") + pspec = new(nsim,"graphic") + xyplot = new(nsim,"graphic") + xyplot2 = new(nsim,"graphic") + if (isfilepresent2("obs_ts")) then + pspec_obs = new(nsim,"graphic") + end if + + fca = 1./157. + fcb = 0. + ihp = 0 + nsigma = 1 + nwgt = 217 + + wgt = new(nwgt,float) + wgt = filwgts_lancos(nwgt,ihp,fca,fcb,nsigma) ; create low pass filter + + + tasreg_frame = 1 ; *reg_frame = flag to create regressions .ps/.png files. Created/used instead of *reg_plot_flag + ; so that if {tas,pr} regressions are not created for the last simulation listed that .ps/png files are created + prreg_frame = 1 + do ee = 0,nsim-1 + sst = data_read_in(paths(ee),"TS",syear(ee),eyear(ee)) ; read in data, orient lats/lons correctly, set time coordinate variable up + if (isatt(sst,"is_all_missing").or.nyr(ee).lt.40) then + delete(sst) + continue + end if + sst = where(sst.le.-1.8,-1.8,sst) ; set all values below -1.8 to -1.8 + d = addfile("$NCARG_ROOT/lib/ncarg/data/cdf/landsea.nc","r") ; mask out land (this is redundant for data that is already masked) + basemap = d->LSMASK + lsm = landsea_mask(basemap,sst&lat,sst&lon) + sst = mask(sst,conform(sst,lsm,(/1,2/)).ge.1,False) + delete([/lsm,basemap/]) + delete(d) + + if (OPT_CLIMO.eq."Full") then + sst = rmMonAnnCycTLL(sst) + else + check_custom_climo(names(ee),syear(ee),eyear(ee),CLIMO_SYEAR,CLIMO_EYEAR) + temp_arr = sst + delete(temp_arr&time) + temp_arr&time = cd_calendar(sst&time,-1) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(temp_arr({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(temp_arr({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + delete(temp_arr) + sst = calcMonAnomTLL(sst,climo) + delete(climo) + end if + + sst = wgt_runave_n_Wrap(sst,wgt,1,0) ; apply low pass filter + + coswgt=cos(rad*sst&lat) + coswgt!0 = "lat" + coswgt&lat= sst&lat + do ff = 0,dimsizes(sst&time)-1 + sst(ff,:,:) = (/ sst(ff,:,:) - wgt_areaave(sst(ff,{-60:70},:),coswgt({-60.:70.}),1.0,0) /) + end do + delete(coswgt) + + sst_CW = sst + sst_CW = SqrtCosWeight(sst) + evecv = eofunc(sst_CW({lat|-40:60},{lon|110:290},time|:),2,75) + delete(sst_CW) + pcts = eofunc_ts(sst({lat|-40:60},{lon|110:290},time|:),evecv,False) + pctsS = dim_standardize(pcts(0,:),0) + delete([/pcts/]) + + ipo = sst(0,:,:) + ipo = ipo@_FillValue + ipo = (/ regCoef(pctsS,sst(lat|:,lon|:,time|:)) /) + ipo@syear = syear(ee) + ipo@eyear = eyear(ee) + + pc1 = pctsS + pc1!0 = "time" + pc1&time = sst&time + pc1@units = "1" + sig_pcv = eofunc_north2(evecv@pcvar,dimsizes(pc1),False) + if (sig_pcv(0)) then ; if True then significant + ipo@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%*" + else + ipo@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%" + end if + delete([/sig_pcv,evecv/]) + +; if (.not.ismissing(ipo({35},{160}))) then +; if (ipo({35},{160}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + if (.not.ismissing(ipo({40},{165}))) then + if (ipo({40},{165}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + ipo = ipo*-1. + pc1 = pc1*-1. + end if + end if + delete([/sst,pctsS/]) +;---------TAS Regressions coding------------------------------------------------- + if (any(ismissing((/syear(ee),syear_tas(ee),eyear(ee),eyear_tas(ee)/)))) then + tasreg_plot_flag = 1 + else + if (syear(ee).eq.syear_tas(ee)) then ; check that the start and end years match for ts, tas, and psl + if (eyear(ee).eq.eyear_tas(ee)) then + tasreg_plot_flag = 0 + else + tasreg_plot_flag = 1 + end if + else + tasreg_plot_flag = 1 + end if + end if + + if (tasreg_plot_flag.eq.0) then + tas = data_read_in(paths_tas(ee),"TREFHT",syear_tas(ee),eyear_tas(ee)) + if (isatt(tas,"is_all_missing")) then + tasreg_plot_flag = 1 + delete(tas) + end if + + if (tasreg_plot_flag.eq.0) then ; only continue if both TAS/SST fields are present + d = addfile("$NCARG_ROOT/lib/ncarg/data/cdf/landsea.nc","r") + basemap = d->LSMASK + lsm = landsea_mask(basemap,tas&lat,tas&lon) + tas = mask(tas,conform(tas,lsm,(/1,2/)).eq.0,False) + delete(lsm) + + if (OPT_CLIMO.eq."Full") then + tas = rmMonAnnCycTLL(tas) + else + check_custom_climo(names_tas(ee),syear_tas(ee),eyear_tas(ee),CLIMO_SYEAR,CLIMO_EYEAR) + temp_arr = tas + delete(temp_arr&time) + temp_arr&time = cd_calendar(tas&time,1) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(temp_arr({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(temp_arr({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + delete(temp_arr) + tas = calcMonAnomTLL(tas,climo) + delete(climo) + end if + finreg_tas = tas(0,:,:) + finreg_tas = (/ regCoef(pc1,tas(lat|:,lon|:,time|:)) /) + delete(tas) + end if + end if +;---------PR Regressions coding------------------------------------------------- + if (any(ismissing((/syear(ee),syear_pr(ee),eyear(ee),eyear_pr(ee)/)))) then + prreg_plot_flag = 1 + else + if (syear(ee).eq.syear_pr(ee)) then ; check that the start and end years match for pr and psl + if (eyear(ee).eq.eyear_pr(ee)) then + prreg_plot_flag = 0 + else + prreg_plot_flag = 1 + end if + else + prreg_plot_flag = 1 + end if + end if + + if (prreg_plot_flag.eq.0) then + pr = data_read_in(paths_pr(ee),"PRECT",syear_pr(ee),eyear_pr(ee)) + if (isatt(pr,"is_all_missing")) then + prreg_plot_flag = 1 + delete(pr) + end if + + if (prreg_plot_flag.eq.0) then ; only continue if both SST/PR fields are present + if (OPT_CLIMO.eq."Full") then + pr = rmMonAnnCycTLL(pr) + else + check_custom_climo(names_pr(ee),syear_pr(ee),eyear_pr(ee),CLIMO_SYEAR,CLIMO_EYEAR) + temp_arr = pr + delete(temp_arr&time) + temp_arr&time = cd_calendar(pr&time,1) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(temp_arr({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(temp_arr({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + delete(temp_arr) + pr = calcMonAnomTLL(pr,climo) + delete(climo) + end if + finreg_pr = pr(0,:,:) + finreg_pr = (/ regCoef(pc1,pr(lat|:,lon|:,time|:)) /) + delete(pr) + end if + end if +;--------------------------------------------------------------------------------------------- + if (tasreg_frame.eq.1.and.tasreg_plot_flag.eq.0) then ; tasreg_frame = flag to create regressions .ps/.png files + tasreg_frame = 0 + end if + if (prreg_frame.eq.1.and.prreg_plot_flag.eq.0) then ; prreg_frame = flag to create regressions .ps/.png files + prreg_frame = 0 + end if +;--------------------------------------------------------------------------------------------- + if (OUTPUT_DATA.eq."True") then + modname = str_sub_str(names(ee)," ","_") + bc = (/"/","'","(",")"/) + do gg = 0,dimsizes(bc)-1 + modname = str_sub_str(modname,bc(gg),"_") + end do + fn = getenv("OUTDIR")+modname+".cvdp_data.ipo."+syear(ee)+"-"+eyear(ee)+".nc" + if (.not.isfilepresent2(fn)) then + z = addfile(fn,"c") + z@source = "NCAR Climate Analysis Section's Climate Variability Diagnostics Package v"+getenv("VERSION") + z@notes = "Data from "+names(ee)+" from "+syear(ee)+"-"+eyear(ee) + if (OPT_CLIMO.eq."Full") then + z@climatology = syear(ee)+"-"+eyear(ee)+" climatology removed prior to all calculations (other than means)" + else + if (CLIMO_SYEAR.lt.0) then + z@climatology = (eyear(ee)+CLIMO_SYEAR)+"-"+(eyear(ee)+CLIMO_EYEAR)+" climatology removed prior to all calculations (other than means)" + else + z@climatology = CLIMO_SYEAR+"-"+CLIMO_EYEAR+" climatology removed prior to all calculations (other than means)" + end if + end if + z@Conventions = "CF-1.6" + else + z = addfile(fn,"w") + end if + z->ipo_pattern_mon = set_varAtts(ipo,"IPO spatial pattern (monthly)","","") + z->ipo_timeseries_mon = set_varAtts(pc1,"IPO normalized principal component timeseries (monthly)","1","") + delete([/modname,fn/]) + if (tasreg_plot_flag.eq.0) then + modname = str_sub_str(names_tas(ee)," ","_") + bc = (/"/","'","(",")"/) + do gg = 0,dimsizes(bc)-1 + modname = str_sub_str(modname,bc(gg),"_") + end do + fn = getenv("OUTDIR")+modname+".cvdp_data.ipo.tas."+syear_tas(ee)+"-"+eyear_tas(ee)+".nc" + if (.not.isfilepresent2(fn)) then + z_tas = addfile(fn,"c") + z_tas@source = "NCAR Climate Analysis Section's Climate Variability Diagnostics Package v"+getenv("VERSION") + z_tas@notes = "Data from "+names_tas(ee)+" from "+syear_tas(ee)+"-"+eyear_tas(ee) + if (OPT_CLIMO.eq."Full") then + z_tas@climatology = syear_tas(ee)+"-"+eyear_tas(ee)+" climatology removed prior to all calculations (other than means)" + else + if (CLIMO_SYEAR.lt.0) then + z_tas@climatology = (eyear(ee)+CLIMO_SYEAR)+"-"+(eyear(ee)+CLIMO_EYEAR)+" climatology removed prior to all calculations (other than means)" + else + z_tas@climatology = CLIMO_SYEAR+"-"+CLIMO_EYEAR+" climatology removed prior to all calculations (other than means)" + end if + end if + z_tas@Conventions = "CF-1.6" + else + z_tas = addfile(fn,"w") + end if + z_tas->ipo_tas_regression_mon = set_varAtts(finreg_tas,"tas regression onto IPO timeseries (monthly)","","") + delete([/modname,fn,z_tas/]) + end if + if (prreg_plot_flag.eq.0) then + modname = str_sub_str(names_pr(ee)," ","_") + bc = (/"/","'","(",")"/) + do gg = 0,dimsizes(bc)-1 + modname = str_sub_str(modname,bc(gg),"_") + end do + fn = getenv("OUTDIR")+modname+".cvdp_data.ipo.pr."+syear_pr(ee)+"-"+eyear_pr(ee)+".nc" + if (.not.isfilepresent2(fn)) then + z_pr = addfile(fn,"c") + z_pr@source = "NCAR Climate Analysis Section's Climate Variability Diagnostics Package v"+getenv("VERSION") + z_pr@notes = "Data from "+names_pr(ee)+" from "+syear_pr(ee)+"-"+eyear_pr(ee) + if (OPT_CLIMO.eq."Full") then + z_pr@climatology = syear_pr(ee)+"-"+eyear_pr(ee)+" climatology removed prior to all calculations (other than means)" + else + if (CLIMO_SYEAR.lt.0) then + z_pr@climatology = (eyear(ee)+CLIMO_SYEAR)+"-"+(eyear(ee)+CLIMO_EYEAR)+" climatology removed prior to all calculations (other than means)" + else + z_pr@climatology = CLIMO_SYEAR+"-"+CLIMO_EYEAR+" climatology removed prior to all calculations (other than means)" + end if + end if + z_pr@Conventions = "CF-1.6" + else + z_pr = addfile(fn,"w") + end if + z_pr->ipo_pr_regression_mon = set_varAtts(finreg_pr,"pr regression onto IPO timeseries (monthly)","","") + delete([/modname,fn,z_pr/]) + end if + end if +;------------------------------------------------------------------------ + iopt = 0 + jave = (3*nyr(ee))/100 + val1 = .95 + val2 = .99 + if (jave.eq.0) then + jave = 1 + end if + pct = 0.1 + spectra_mvf = False ; missing value flag + spectra_mvf_obs = True ; missing value flag + if (any(ismissing(pc1))) then ; check for missing data + print("Missing data detected for "+names(ee)+", not creating IPO spectra") + spectra_mvf = True + else + if (isfilepresent2("obs_ts").and.ee.eq.0) then + spectra_mvf_obs = False ; missing value flag + end if + sdof = specx_anal(pc1,iopt,jave,pct) ; pc1 is standardized + splt1 = specx_ci(sdof,val1,val2) + + if (OUTPUT_DATA.eq."True") then + splt1!0 = "ncurves" + splt1&ncurves = ispan(0,3,1) + splt1&ncurves@long_name = "power spectra curves" + splt1&ncurves@units = "1" + splt1!1 = "frequency" + splt1&frequency = sdof@frq + splt1&frequency@units = "1" + splt1@units_info = "df refers to frequency interval; data are standardized so there are no physical units" + splt1@units = "1/df" + splt1@info = "(0,:)=spectrum,(1,:)=Markov red noise spectrum, (2,:)="+val1+"% confidence bound for Markhov, (3,:)="+val2+"% confidence bound for Markhov" + z->ipo_spectra = set_varAtts(splt1,"IPO (monthly) power spectra, Markov spectrum and confidence curves","","") + end if + if (isfilepresent2("obs_ts").and.ee.eq.0) then + sdof_obs = sdof + end if + delete([/iopt,jave,pct/]) + end if + if (isvar("z")) then + delete(z) + end if +;======================================================================== + res = True + res@mpProjection = "WinkelTripel" + res@mpGeophysicalLineColor = "gray42" + res@mpPerimOn = False + res@mpGridLatSpacingF = 90 ; change latitude line spacing + res@mpGridLonSpacingF = 180. ; change longitude line spacing + res@mpGridLineColor = "transparent" ; trick ncl into drawing perimeter + res@mpGridAndLimbOn = True ; turn on lat/lon lines + res@mpFillOn = False + res@mpCenterLonF = 210. + res@mpOutlineOn = True + res@gsnDraw = False + res@gsnFrame = False + res@vpYF = 0.95 + res@vpHeightF = 0.3 + res@vpXF = 0.2 + res@vpWidthF = 0.6 + +; res@cnFillMode = "RasterFill" + res@cnLevelSelectionMode = "ExplicitLevels" + + if (COLORMAP.eq."0") then + res@cnLevels = fspan(-.65,.65,27) + end if + if (COLORMAP.eq."1") then + res@cnLevels = fspan(-.8,.8,17) + end if + + res@cnLineLabelsOn = False + res@cnFillOn = True + res@cnLinesOn = False + res@lbLabelBarOn = False + + res@gsnLeftStringOrthogonalPosF = -0.05 + res@gsnLeftStringParallelPosF = .005 + res@gsnRightStringOrthogonalPosF = -0.05 + res@gsnRightStringParallelPosF = 0.96 + res@gsnRightString = "" + res@gsnLeftString = "" + res@gsnLeftStringFontHeightF = 0.014 + res@gsnCenterStringFontHeightF = 0.018 + res@gsnRightStringFontHeightF = 0.014 + res@gsnLeftString = syear(ee)+"-"+eyear(ee) + res@gsnCenterString = names(ee) + res@gsnRightString = ipo@pcvar + + res4 = res ; res4 = pr regression resources + delete(res4@cnLevels) + if (COLORMAP.eq.0) then + res4@cnLevels = (/-5,-4,-3,-2,-1,-.75,-.5,-.25,-.1,0,.1,.25,.5,.75,1,2,3,4,5/) + else + res4@cnLevels = (/-3,-2,-1,-.5,-.1,0,.1,.5,1,2,3/) + end if + + res2 = True ; res2 = tas regression resources + res2@gsnDraw = False + res2@gsnFrame = False + res2@cnLevelSelectionMode = "ExplicitLevels" + res2@cnLevels = res@cnLevels + + res2@cnLineLabelsOn = False + res2@cnFillOn = True + res2@cnLinesOn = False + res2@cnFillMode = "AreaFill" + res2@lbLabelBarOn = False + res2@cnInfoLabelOn = False + res2@gsnRightString = "" + res2@gsnLeftString = "" + res2@gsnCenterString = "" + res2@gsnAddCyclic = True + + map(ee) = gsn_csm_contour_map(wks,ipo,res) + + if (tasreg_plot_flag.eq.0) then + if (names(ee).eq.names_tas(ee)) then + res@gsnCenterString = names(ee) + else + res@gsnCenterString = names(ee)+" / "+names_tas(ee) + end if + map_sst(ee) = gsn_csm_contour_map(wks,ipo,res) + map_tasreg(ee) = gsn_csm_contour(wks,finreg_tas,res2) + overlay(map_sst(ee),map_tasreg(ee)) + delete([/finreg_tas/]) + end if + delete([/ipo/]) + if (prreg_plot_flag.eq.0) then + res4@gsnCenterString = names_pr(ee) + map_prreg(ee) = gsn_csm_contour_map(wks4,finreg_pr,res4) + delete(finreg_pr) + end if + + pres = True + pres@vpXF = 0.07 + pres@trYMinF = 0. + pres@trXMinF = 0.0 + pres@trXMaxF = 0.0832 + pres@tiYAxisString = "Power" ; yaxis + pres@xyLineColor = "black" + pres@gsnFrame = False + pres@gsnDraw = False + + pres@tmXBLabelDeltaF = -.8 + pres@tmXTLabelDeltaF = -.8 + pres@pmLegendDisplayMode = "Never" + pres@xyLineThicknesses = (/3.5,2.,1.,1./) + pres@xyDashPatterns = (/0,0,0,0/) + pres@xyLineColors = (/"foreground","red","blue","green"/) + pres@xyLabelMode = "custom" + pres@xyLineLabelFontColors = pres@xyLineColors + pres@xyExplicitLabels = (/"","",val1*100+"%",val2*100+"%"/) + pres@tmXTOn = True + pres@tmYROn = False + pres@tmXTLabelsOn = True + pres@tmXUseBottom = False + pres@tmXTMode = "Explicit" + pres@tmXBMode = "Explicit" + pres@tmXTValues = (/".00167",".00833",".01667",".02778",".0416",".0556",".0832"/) + pres@tmXTLabels = (/"50","10","5","3","2","1.5","1"/) + pres@tmXBValues = (/".0",".01",".02",".03",".042",".056",".083"/) + pres@tmXBLabels = pres@tmXBValues + pres@tmXTLabelFontHeightF = 0.018 + pres@tmXBLabelFontHeightF = 0.018 + pres@tmYLLabelFontHeightF = 0.018 + pres@tiYAxisString = "Variance" ;"Power (~S~o~N~C~S~2~N~ / cycles mo~S~-1~N~)" ; yaxis + pres@tiXAxisString = "Frequency (cycles mo~S~-1~N~)" + pres@tiMainString = "" + pres@txFontHeightF = 0.015 + pres@xyLineLabelFontHeightF = 0.022 + pres@tiXAxisFontHeightF = 0.025 + pres@tiYAxisFontHeightF = 0.025 + pres@tiMainFontHeightF = 0.03 + pres@gsnRightStringOrthogonalPosF = -0.115 + + pres@tiMainOn = False + pres@gsnCenterString = "Period (years)" + pres@gsnCenterStringFontHeightF = pres@tiYAxisFontHeightF + pres@gsnRightStringFontHeightF = pres@tiYAxisFontHeightF - 0.005 + pres@gsnRightString = syear(ee)+"-"+eyear(ee)+" " + pres@gsnLeftString = "" + if (wks_type.eq."png") then + pres@xyLineThicknessF = 3.5 + res@mpGeophysicalLineThicknessF = 2. + else + pres@xyLineThicknessF = 1.5 + res@mpGeophysicalLineThicknessF = 1. + end if + pres@gsnCenterString = names(ee) + if (spectra_mvf.eq.False) then + pres@trYMaxF = max(splt1(0,:))*1.1 + pspec(ee) = gsn_csm_xy(wks2,sdof@frq,splt1,pres) + if (isfilepresent2("obs_ts").and.ee.ge.1.and.spectra_mvf_obs.eq.False) then + pres@xyLineColors = (/"gray70","black","black","black"/) + pres@xyCurveDrawOrder = "PreDraw" + pres@gsnCenterString = "" + pres@gsnRightString = "" + pspec_obs(ee) = gsn_csm_xy(wks2,sdof_obs@frq,sdof_obs@spcx,pres) + overlay(pspec(ee),pspec_obs(ee)) + delete(pres@xyCurveDrawOrder) + end if + delete([/sdof,splt1/]) + end if + + xyres = True + xyres@gsnDraw = False + xyres@gsnFrame = False + xyres@gsnRightString = "" + xyres@gsnLeftString = "" + xyres@gsnFrame = False + xyres@gsnYRefLine = 0.0 + xyres@gsnYRefLineColor = "gray42" + xyres@gsnXYBarChart = False + xyres@gsnAboveYRefLineColor = 185 + xyres@gsnBelowYRefLineColor = 35 + xyres@xyLineThicknessF = 0.1 + xyres@xyLineColor = "gray70" + xyres@tiYAxisString = "" + if (nsim.le.5) then + xyres@tmXBLabelFontHeightF = 0.0125 + xyres@tmYLLabelFontHeightF = 0.0125 + xyres@gsnStringFontHeightF = 0.017 + else + xyres@tmXBLabelFontHeightF = 0.018 + xyres@tmYLLabelFontHeightF = 0.018 + xyres@gsnStringFontHeightF = 0.024 + end if + xyres@gsnCenterStringOrthogonalPosF = 0.025 + xyres@vpXF = 0.05 + xyres@vpHeightF = 0.15 + if (SCALE_TIMESERIES.eq."True") then + xyres@vpWidthF = 0.9*((nyr(ee)*1.)/nyr_max) + else + xyres@vpWidthF = 0.9 + end if + xyres@gsnCenterString = "" + + xyres@trXMinF = syear(ee)-.5 + xyres@trXMaxF = eyear(ee)+1.5 + +; xyres2 = xyres +; delete(xyres2@gsnXYBarChart) +; delete(xyres2@gsnAboveYRefLineColor) +; delete(xyres2@gsnBelowYRefLineColor) +; xyres2@xyLineColor = "black" +; if (wks_type.eq."png") then +; xyres2@xyLineThicknessF = 3.5 +; else +; xyres2@xyLineThicknessF = 2.5 +; end if + + xyres@gsnCenterString = names(ee) + xyplot(ee) = gsn_csm_xy(wks3,fspan(syear(ee),eyear(ee)+.91667,dimsizes(pc1)),pc1,xyres) ; use standardized timeseries +; xyplot2(ee) = gsn_csm_xy(wks3,fspan(syear(ee),eyear(ee)+.91667,dimsizes(pc1)),runave(pc1,61,0),xyres2) +; overlay(xyplot(ee),xyplot2(ee)) + + delete([/val1,val2,pc1,res,pres,xyres/]) + end do + + panres = True + panres@gsnMaximize = True + panres@gsnPaperOrientation = "portrait" + panres@gsnPanelLabelBar = True + panres@gsnPanelYWhiteSpacePercent = 3.0 + panres@pmLabelBarHeightF = 0.05 + panres@pmLabelBarWidthF = 0.55 + panres@lbTitleOn = False + panres@lbBoxLineColor = "gray70" + panres@lbLabelFontHeightF = 0.013 + if (nsim.le.4) then + if (nsim.eq.1) then + panres@txFontHeightF = 0.022 + panres@gsnPanelBottom = 0.50 + else + panres@txFontHeightF = 0.0145 + panres@gsnPanelBottom = 0.50 + end if + else + panres@txFontHeightF = 0.016 + panres@gsnPanelBottom = 0.05 + end if + panres@txString = "IPO (Monthly)" + ncol = floattointeger(sqrt(nsim)) + nrow = (nsim/ncol)+mod(nsim,ncol) + gsn_panel2(wks,map,(/nrow,ncol/),panres) + + if (tasreg_frame.eq.0) then + panres@txString = "IPO SST/TAS Regressions (Monthly)" + gsn_panel2(wks,map_sst,(/nrow,ncol/),panres) + end if + delete(wks) + + if (prreg_frame.eq.0) then + panres@txString = "IPO PR Regressions (Monthly)" + gsn_panel2(wks4,map_prreg,(/nrow,ncol/),panres) + end if + delete(wks4) + + delete(panres@gsnPanelLabelBar) + panres@txString = "IPO (Monthly)" + gsn_panel2(wks2,pspec,(/nrow,ncol/),panres) + delete(wks2) + + if (SCALE_TIMESERIES.eq."True") then + tt = ind(nyr.eq.nyr_max) + panres@gsnPanelScalePlotIndex = tt(0) + delete(tt) + end if + if (nsim.le.12) then + lp = (/nsim,1/) + else + lp = (/nrow,ncol/) ;(/nsim/2+1,nsim/8+1/) + end if + panres@txString = "IPO (Monthly)" + gsn_panel2(wks3,xyplot,lp,panres) + delete(wks3) + delete([/map,pspec,syear,eyear,nyr,nyr_max,lp/]) +;-------------------------------------------------------------------------------------------------- + OUTDIR = getenv("OUTDIR") + + if (wks_type.eq."png") then + if (tasreg_frame.eq.0) then + system("mv "+OUTDIR+"ipo.000001.png "+OUTDIR+"ipo.png") + system("mv "+OUTDIR+"ipo.000002.png "+OUTDIR+"ipo.tasreg.png") + end if + else + if (tasreg_frame.eq.0) then + system("psplit "+OUTDIR+"ipo.ps "+OUTDIR+"ipo_nn") + system("mv "+OUTDIR+"ipo_nn0001.ps "+OUTDIR+"ipo.ps") + system("mv "+OUTDIR+"ipo_nn0002.ps "+OUTDIR+"ipo.tasreg.ps") + end if + end if + print("Finished: ipo.ncl") +end + diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/metrics.ncl b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/metrics.ncl new file mode 100644 index 0000000000..8a0b83320c --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/metrics.ncl @@ -0,0 +1,556 @@ +; This script takes all the metrics created by the various scripts and placed +; in metrics_orig.txt, calculates the total scores, reorganizes the data, +; and writes out a new metrics.txt file. + +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl" + +undef("add_labelbar") +procedure add_labelbar(wks,plot,colors,labels) +local vph, vpw, nboxes, lbres, lbid, amres, annoid +begin + getvalues plot ; Get plot size for use in + "vpHeightF" : vph ; creating labelbar. + "vpWidthF" : vpw + end getvalues + + nboxes = dimsizes(colors) + + lbres = True ; labelbar only resources + lbres@lbAutoManage = False ; Necessary to control sizes + lbres@vpWidthF = 0.15 * vpw ; labelbar width + if (vph.eq..175) then ; minimum height + lbres@vpHeightF = 0.155 + lbres@lbLabelFontHeightF = 0.008 + end if + if (vph.gt..175.and.vph.lt..8601) then + lbres@vpHeightF = 0.85 * vph + lbres@lbLabelFontHeightF = 0.008+(((vph-.17)/.7101)*.0075) + end if + if (vph.ge..8601) then + lbres@vpHeightF = 0.7 * vph + lbres@lbLabelFontHeightF = 0.0105-(((vph-.8601)*10)*2) + end if + lbres@lbFillColors = colors ; labelbar colors + lbres@lbMonoFillPattern = True ; Solid fill pattern + + lbres@lbLabelAlignment = "InteriorEdges" ; center of box + lbres@lbOrientation = "Vertical" + lbres@lbPerimOn = False + lbres@lbFillOpacityF = 0.5 + + lbid = gsn_create_labelbar(wks,nboxes,labels,lbres) + amres = True + amres@amJust = "CenterLeft" + amres@amParallelPosF = 0.52 + amres@amOrthogonalPosF = 0.0 + plot@annoid = gsn_add_annotation(plot,lbid,amres) +end + + +begin + print("Starting: metrics.ncl") + + nclver = stringtochar(get_ncl_version()) ; check NCL version to turn off error messages + num0 = toint(tostring(nclver(0))) + num1 = toint(tostring(nclver(2))) + errmsg = True + if (num0.le.5) then + errmsg = False + end if + if (num0.eq.6) then + if (num1.le.4) then + errmsg = False + else + errmsg = True + end if + end if + if (num0.ge.7) then + errmsg = True + end if + delete([/num0,num1/]) + + OUTDIR = getenv("OUTDIR") + + nsim = numAsciiRow("namelist_byvar/namelist_trefht") ; retrieve total number of observational + models (all namelist_byvar/namelist have same # of rows) + na = asciiread("namelist_byvar/namelist_trefht",(/nsim/),"string") ; (It is not done via metrics.txt as there might be a space in names. + blankrow = ind(na.eq."") + if (.not.any(ismissing(blankrow))) then + goodrows = ind(na.ne."") + na2 = na(goodrows) + delete(na) + na = na2 + delete(na2) + nsim = dimsizes(na) + end if + nsim = nsim - 1 ; first listed dataset is what all others are compared to, thus, output metrics table has nsim-1 column + + files = (/"sst.indices.1","sst.indices.2","amo","pdo","psl.nam_nao","psl.sam_psa",\ + "sst.mean_stddev","psl.mean_stddev","pr.mean_stddev"/) + files = "metrics."+files + files = files+".txt" + + do gg = 0,dimsizes(files)-1 + if (.not.fileexists(OUTDIR+files(gg))) then + print("1 or more metrics files missing, exiting metrics.ncl") + exit + end if + end do + + ch = new((/nsim,dimsizes(files)/),"string") ; hold obs/simulation names + patcor_rms = new((/nsim,11/),"string") ; 11 metrics + cntr = 0 + + do gg = 0,dimsizes(files)-1 + nrow = numAsciiRow(OUTDIR+files(gg)) + a = asciiread(OUTDIR+files(gg),(/-1/),"string") + + t0 = tochar(a(3)) + sti0 = str_index_of_substr(a(4), " -",0) ; read in individual column headers from each metrics file + do hh = 0,dimsizes(sti0)-1 + if (hh.eq.(dimsizes(sti0)-1)) then + ch(hh,gg) = str_strip(tostring(t0(sti0(hh):))) + else + ch(hh,gg) = str_strip(tostring(t0(sti0(hh):sti0(hh+1)))) + end if + end do + delete([/sti0,t0/]) + + test = tochar(a(5:)) + if (dimsizes(dimsizes(test)).eq.2) then + patcor_rms(:,cntr) = str_split(tostring(test(0,18:))," ") + patcor_rms(:,cntr+1) = str_split(tostring(test(1,18:))," ") + cntr = cntr+2 + else + patcor_rms(:,cntr) = str_split(tostring(test(18:))," ") + cntr = cntr+1 + end if + delete(a) + delete([/test/]) + end do + delete(cntr) + + do gg = 0,dimsizes(files)-1 ; remove individual metrics files + system("rm "+OUTDIR+files(gg)) + end do + + names = ch(:,0) + do gg = 0,nsim-1 ; check to see if data is observations or models by seeing if every name matches + if (all(ch(gg,0).eq.ch(gg,1:))) then + names(gg) = ch(gg,0) + else + names(gg) = "OBS "+(gg+2) + end if + end do + delete(ch) + + names_nchar = max(dimsizes(tochar(names))) + spacer = "" + do gg = 0,names_nchar + spacer = spacer+" " + end do + delete(names_nchar) + pc_score = new(nsim,"string") + rms_score = new(nsim,"string") + do gg = 0,nsim-1 ; strip out pattern correlations, and calculated score for each model + pc = new(11,float,9.99) + rms = pc + do hh = 0,10 ; 11 metrics + n1 = str_split(patcor_rms(gg,hh),"/") +; print(n1) + pc(hh) = tofloat(n1(0)) ; strip out pattern correlations. 9.99 = missing. + rms(hh) = tofloat(n1(1)) ; strip out rms. 9.99 = missing. + delete(n1) + end do + if (any(ismissing(rms))) then + rms_score(gg) = "----" + else + rms_score(gg) = sprintf("%4.2f",avg(rms)) + end if + delete(rms) + +; total_score(gg) = ""+avg(pc) +; print("Simple average = "+avg(pc)) + + pc_z = pc + pc_z = pc_z@_FillValue + if (any(ismissing(pc))) then +; print("Missing Values detected") +; print(pc) + pc_score(gg) = "----" + else + do ii = 0,10 ; use Fisher's z-transformation to translate r->z + if (pc(ii).eq.1.0) then + pc_z(ii) = 0.5*(log( (1+1.001) / (1-1.001) )) ; needed when pattern correlation = 1.0 + else + pc_z(ii) = 0.5*(log( (1+pc(ii)) / (1-pc(ii)) )) + end if + end do + zavg = avg(pc_z) ; compute average of z + delete(pc_z) + + pc_score(gg) = sprintf("%4.2f",((2.71828^(2*zavg))-1)/ ((2.71828^(2*zavg))+1)) ; reverse process and convert z-avg -> r. +; print("average of Z-tranformed correlations = "+pc_score(gg)) ; r = (e^2Z - 1)/(e^2Z+1) ; e = 2.71828 + delete(zavg) + end if + delete(pc) + end do + pc_score = where(pc_score.eq." nan","----",pc_score) ; needed for when the nan's come out of the z-transform (likey due to numerous pattern correlations = 1) + + header = (/"","Pattern Correlations/RMS Differences Observations vs. Model(s)",""/) + write_table(OUTDIR+"metrics.txt","w",[/header/],"%s") + column_header1 = spacer+" ENSO TAS ENSO PSL El Nino La Nina AMO PDO NAM SAM SST sigma PSL sigma PR sigma Mean " + column_header2 = spacer+" (DJF+1) (DJF+1) Hov Hov (Monthly) (Monthly) (DJF) (DJF) (Ann) (Ann) (Ann) Score " + column_header3 = spacer+" --------- --------- --------- --------- --------- --------- --------- --------- --------- --------- --------- ---------" + write_table(OUTDIR+"metrics.txt","a",[/column_header1/],"%s") + write_table(OUTDIR+"metrics.txt","a",[/column_header2/],"%s") + write_table(OUTDIR+"metrics.txt","a",[/column_header3/],"%s") + + patcor_rms = where(patcor_rms.eq."9.99/9.99","----/----",patcor_rms) + spacer_char = tochar(spacer) + do gg = 0,nsim-1 + spacer_char1 = spacer_char + mname_char = tochar(names(gg)) + dimC = dimsizes(mname_char) + spacer_char1(:dimC-1) = mname_char + srow = tostring(spacer_char1) +; print(srow) + do hh = 0,10 + n1 = str_split(patcor_rms(gg,hh),"/") +; print("n1 = "+n1) + if (n1(0).eq."----") then + srow = srow+" "+patcor_rms(gg,hh) + else + if (tofloat(n1(0)).ge.0) then + srow = srow+" "+patcor_rms(gg,hh) + else + srow = srow+" "+patcor_rms(gg,hh) + end if + end if + delete(n1) + end do + srow = srow+" "+pc_score(gg)+"/"+rms_score(gg) + write_table(OUTDIR+"metrics.txt","a",[/srow/],"%s") + delete([/spacer_char1,dimC,mname_char,srow/]) + end do + delete([/spacer_char/]) + +; Create tables that are colored by value and sorted by value +; if there are less than 256 simulations+(number of observational datasets-1) +; (NCL can only create 255 tickmarks on one plot and each tickmark equals a +; model/obs below.) + + if (nsim.le.255) then + names!0 = "sim" + names&sim = ispan(0,nsim-1,1) + patcor = new((/nsim,12/),typeof(patcor_rms)) + rms = patcor + do gg = 0,nsim-1 + do hh = 0,11 + if (hh.le.10) then + n1 = str_split(patcor_rms(gg,hh),"/") + patcor(gg,hh) = n1(0) + rms(gg,hh) = n1(1) + else + patcor(gg,hh) = pc_score(gg) + rms(gg,hh) = rms_score(gg) + end if + end do + end do + delete([/pc_score,rms_score/]) + patcor!0 = "sim" + patcor&sim = ispan(0,nsim-1,1) + copy_VarCoords(patcor,rms) + + ncols = 12 + nrows = nsim + col_width = 1./ncols + row_width = 1./nrows + col_width2 = col_width/2. + row_width2 = row_width/2. + + fcolors = new(dimsizes(patcor),"integer") + colors = (/7,12,17,22,27,33,37,42,47,53,59,65/) + cnLevels = (/0.5,0.55,0.60,0.65,0.7,0.75,0.8,0.85,0.9,0.95,0.99/) + do gg = 0,dimsizes(cnLevels) + if (gg.eq.0) then + fcolors = where(patcor.lt.cnLevels(0),colors(0),fcolors) + end if + if (gg.ge.1.and.gg.lt.dimsizes(cnLevels)) then + fcolors = where(patcor.lt.cnLevels(gg).and.patcor.ge.cnLevels(gg-1),colors(gg),fcolors) + end if + if (gg.eq.dimsizes(cnLevels)) then + fcolors = where(patcor.ge.cnLevels(gg-1),colors(gg),fcolors) + end if + end do + fcolors = where(patcor.eq."----",75,fcolors) + + fcolorsR = new(dimsizes(rms),"integer") + colorsR = (/65,59,53,47,42,37,33,27,22,17,12,7/) + cnLevelsR = (/.05,.1,.2,.3,.4,.5,.6,.7,.8,.9,1./) + do gg = 0,dimsizes(cnLevelsR) + if (gg.eq.0) then + fcolorsR = where(rms.lt.cnLevelsR(0),colorsR(0),fcolorsR) + end if + if (gg.ge.1.and.gg.lt.dimsizes(cnLevelsR)) then + fcolorsR = where(rms.lt.cnLevelsR(gg).and.rms.ge.cnLevelsR(gg-1),colorsR(gg),fcolorsR) + end if + if (gg.eq.dimsizes(cnLevelsR)) then + fcolorsR = where(rms.ge.cnLevelsR(gg-1),colorsR(gg),fcolorsR) + end if + end do + fcolorsR = where(rms.eq."----",75,fcolorsR) +;-------------------------------------------------------------------------------------------- + wks_type = "png" ; output png + wks_type@wkWidth = 1500 + wks_type@wkHeight = 1500 + if (nsim.ge.80.and.nsim.lt.179) then + wks_type@wkWidth = 2500 + wks_type@wkHeight = 2500 + end if + if (nsim.ge.180) then + wks_type@wkWidth = 4000 + wks_type@wkHeight = 4000 + end if + wks = gsn_open_wks(wks_type,OUTDIR+"table") ; send graphics to PNG file + gsn_merge_colormaps(wks,"cmp_b2r","gsltod") + + resb = True ; resource list for blank plot that gsn_table will be overlaid on + resb@gsnDraw = False + resb@gsnFrame = False + resb@vpXF = 0.3 + title_loc = (/.185,0.075,.185,0.05/) ; default x/y ndc values for location of plot title and subtitle + b_int = 0.0 + if (nsim.le.32) then + resb@vpWidthF = 0.59 + resb@vpYF = 0.825 + resb@vpHeightF = nsim*0.025 + if (resb@vpHeightF.lt..175) then ; set a minimum height + resb@vpHeightF = .175 + end if + resb@tmXTLabelFontHeightF = 0.0125 + resb@tmXTMajorLengthF = 0.009 + end if + if (nsim.ge.33.and.nsim.lt.80) then + resb@vpWidthF = 0.59 + resb@vpYF = 0.865 + resb@vpHeightF = 0.8601 + resb@tmXTLabelFontHeightF = 0.0085 + resb@tmXTMajorLengthF = 0.009 + end if + if (nsim.ge.80.and.nsim.lt.109) then + resb@vpWidthF = 0.59 + resb@vpYF = 0.865 + resb@vpHeightF = 0.8602 + resb@tmXTLabelFontHeightF = 0.0065 + resb@tmXTMajorLengthF = 0.009 + b_int = .00185 + end if + if (nsim.ge.109.and.nsim.lt.150) then + resb@vpWidthF = 0.425 + resb@vpYF = 0.865 + resb@vpHeightF = 0.8603 + resb@tmXTLabelFontHeightF = 0.0045 + resb@tmXTMajorLengthF = 0.0065 + title_loc = (/.085,0.035,.085,0.025/) + b_int = .002 + end if + if (nsim.ge.150) then + resb@vpWidthF = 0.25 + resb@vpYF = 0.865 + resb@vpHeightF = 0.8604 + resb@tmXTLabelFontHeightF = 0.0025 + resb@tmXTMajorLengthF = 0.0045 + title_loc = (/.07,0.02,.07,0.0125/) + b_int = .002 + end if + resb@tmYLMajorLengthF = resb@tmXTMajorLengthF + resb@tmXTMajorOutwardLengthF = resb@tmXTMajorLengthF + resb@tmYLMajorOutwardLengthF = resb@tmXTMajorLengthF + resb@tmXTMajorLineColor = "gray55" + resb@tmYLMajorLineColor = resb@tmXTMajorLineColor + resb@tmXTLabelFont = 21 + resb@tmXTMode = "Explicit" ; Explicitly label X axis. The blank plot goes from 0 to 1, by default. + resb@tmXTValues = fspan(col_width2,1.-col_width2,ncols) + ncol_labels = (/"ENSO TAS (DJF~S~+1~N~)","ENSO PSL (DJF~S~+1~N~)","El Nin~H-13V2F35~D~FV-2H3F21~o Hovmo~H-14V2F35~H~FV-2H3~ller","La Nin~H-13V2F35~D~FV-2H3F21~a Hovmo~H-14V2F35~H~FV-2H3~ller","AMO","PDO", \ + "NAM (DJF)","SAM (DJF)","SST std dev (Ann)","PSL std dev (Ann)","PR std dev (Ann)","Mean Score"/) + resb@tmXTLabels = ncol_labels + resb@tmXTOn = True + resb@tmXUseBottom = False + resb@tmXTLabelsOn = True + resb@tmXBOn = False + resb@tmXTLabelAngleF = 70. + resb@tmXTLabelJust = "CenterLeft" + + resb@tmYLMode = "Explicit" + if (nsim.gt.1) then + resb@tmYLValues = fspan(row_width2,1.-row_width2,nrows) + else + resb@tmYLValues = row_width2 + end if + resb@tmYLLabelFontHeightF = resb@tmXTLabelFontHeightF + resb@tmYROn = False + resb@tiMainOn = False + + resT = True + resT@gsLineThicknessF = 2.0 + resT@gsLineColor = resb@tmXTMajorLineColor + resT@txFontHeightF = resb@tmXTLabelFontHeightF + resT@gsFillOpacityF = 0.5 + resT@tfPolyDrawOrder = "PreDraw" + + polyres = True + polyres@gsLineColor = "gray25" + polyres@gsLineThicknessF = 8.0 + polyres@gsLineDashPattern = 0 + polyres@tfPolyDrawOrder = "PostDraw" + + tres = True + tres@txFontHeightF = resb@tmYLLabelFontHeightF*1.2 + tres@txJust = "CenterLeft" + + tres2 = tres + tres2@txFontHeightF = resb@tmYLLabelFontHeightF*0.8 + do gg = 0,13 + namesF = names + patcorF = patcor + if (gg.eq.0) then + int_s = namesF&sim + s_txt = "" + end if + if (gg.eq.1) then ; sort names + namesF = str_upper(namesF) ; make all model names uppercase so sqsort sorts like this: A,b,C instead of this: A,C,b + sqsort(namesF) + int_s = namesF&sim + namesF = names(int_s) + s_txt = "Namelist (Alphab.)" + end if + if (gg.ge.2) then + patcorT = patcorF(:,gg-2) + sqsort(patcorT) + int_s = patcorT&sim(::-1) + namesF = names(int_s) + delete(patcorT) + s_txt = ncol_labels(gg-2) + end if + resb@tmYLLabels = namesF(::-1) ; this resource takes labels in reverse order as gsn_table + blank = gsn_csm_blank_plot(wks,resb) + add_labelbar(wks,blank,colors,""+decimalPlaces(cnLevels,2,True)) ; Attach labelbar + if (gg.eq.2) then + dum = gsn_add_polyline(wks,blank,(/.002,.083333,.083333,.002,.002/),(/.002-b_int,.002-b_int,.998+b_int,.998+b_int,.002-b_int/),polyres) + end if + if (gg.ge.3.and.gg.le.12) then + dum = gsn_add_polyline(wks,blank,(/(gg-2)*.083333,(gg-1)*.083333,(gg-1)*.083333,(gg-2)*.083333,(gg-2)*.083333/),(/.002-b_int,.002-b_int,.998+b_int,.998+b_int,.002-b_int/),polyres) + end if + if (gg.eq.13) then + dum = gsn_add_polyline(wks,blank,(/(gg-2)*.083333,.998,.998,(gg-2)*.083333,(gg-2)*.083333/),(/.002-b_int,.002-b_int,.998+b_int,.998+b_int,.002-b_int/),polyres) + end if + + getvalues blank + "vpXF" : vpx + "vpYF" : vpy ; Get position and size of the blank plot so we can + "vpWidthF" : vpw ; be sure to draw the table in same location. + "vpHeightF" : vph + end getvalues + x = (/vpx,vpx+vpw/) + y = (/vpy-vph,vpy/) + + resT@gsFillColor = fcolors(int_s,:) + + if (.not.errmsg) then ; turn off error messages output from gsn_table if NCL v6.4.0 or older + err = NhlGetErrorObjectId() + setvalues err + "errPrint" : "False" + end setvalues + end if + gsn_table(wks,dimsizes(patcorF),x,y,patcorF(int_s,:),resT) + if (.not.errmsg) then + setvalues err + "errPrint" : "True" + end setvalues + end if + gsn_text_ndc(wks,"Pattern Correlations",resb@vpXF-title_loc(0),resb@vpYF+title_loc(1),tres) + if (s_txt.ne."") then + gsn_text_ndc(wks,"Sorted by: "+s_txt,resb@vpXF-title_loc(2),resb@vpYF+title_loc(3),tres2) + end if + draw(blank) + frame(wks) + delete([/namesF,patcorF,int_s/]) + end do + do gg = 0,13 + namesF = names + rmsF = rms + if (gg.eq.0) then + int_s = namesF&sim + s_txt = "" + end if + if (gg.eq.1) then ; sort names + namesF = str_upper(namesF) ; make all model names uppercase so sqsort sorts like this: A,b,C instead of this: A,C,b + sqsort(namesF) + int_s = namesF&sim + namesF = names(int_s) + s_txt = "Name" + end if + if (gg.ge.2) then + rmsT = rmsF(:,gg-2) + rmsT = where(rmsT.eq."----","1000",rmsT) ; make sure values of ---- get put to bottom of sorted list + sqsort(rmsT) + rmsT = where(rmsT.eq."1000","----",rmsT) ; make sure values of ---- get put to bottom of sorted list + int_s = rmsT&sim + namesF = names(int_s) + delete(rmsT) + s_txt = ncol_labels(gg-2) + end if + resb@tmYLLabels = namesF(::-1) ; this resource takes labels in reverse order as gsn_table + blank = gsn_csm_blank_plot(wks,resb) + add_labelbar(wks,blank,colorsR,""+decimalPlaces(cnLevelsR,2,True)) ; Attach labelbar + if (gg.eq.2) then + dum = gsn_add_polyline(wks,blank,(/.002,.083333,.083333,.002,.002/),(/.002-b_int,.002-b_int,.998+b_int,.998+b_int,.002-b_int/),polyres) + end if + if (gg.ge.3.and.gg.le.12) then + dum = gsn_add_polyline(wks,blank,(/(gg-2)*.083333,(gg-1)*.083333,(gg-1)*.083333,(gg-2)*.083333,(gg-2)*.083333/),(/.002-b_int,.002-b_int,.998+b_int,.998+b_int,.002-b_int/),polyres) + end if + if (gg.eq.13) then + dum = gsn_add_polyline(wks,blank,(/(gg-2)*.083333,.998,.998,(gg-2)*.083333,(gg-2)*.083333/),(/.002-b_int,.002-b_int,.998+b_int,.998+b_int,.002-b_int/),polyres) + end if + + getvalues blank + "vpXF" : vpx + "vpYF" : vpy ; Get position and size of the blank plot so we can + "vpWidthF" : vpw ; be sure to draw the table in same location. + "vpHeightF" : vph + end getvalues + x = (/vpx,vpx+vpw/) + y = (/vpy-vph,vpy/) + resT@gsFillColor = fcolorsR(int_s,:) + if (.not.errmsg) then ; turn off error messages output from gsn_table if NCL v6.4.0 or older + err = NhlGetErrorObjectId() + setvalues err + "errPrint" : "False" + end setvalues + end if + gsn_table(wks,dimsizes(rmsF),x,y,rmsF(int_s,:),resT) + if (.not.errmsg) then + setvalues err + "errPrint" : "True" + end setvalues + end if + gsn_text_ndc(wks,"RMS Differences",resb@vpXF-title_loc(0),resb@vpYF+title_loc(1),tres) + if (s_txt.ne."") then + gsn_text_ndc(wks,"Sorted by: "+s_txt,resb@vpXF-title_loc(2),resb@vpYF+title_loc(3),tres2) + end if + draw(blank) + frame(wks) + delete([/namesF,rmsF,int_s/]) + end do + delete(wks) + fils = systemfunc("ls "+OUTDIR+"table*.png") + do gg = 0,dimsizes(fils)-1 + system("convert -density 144 -trim +repage -bordercolor white -border 20 -transparent white "+fils(gg)+" "+OUTDIR+"metrics.table_"+gg+".gif") + end do + system("rm "+OUTDIR+"table*.png") + end if + delete([/patcor_rms,names,nsim/]) + print("Finished: metrics.ncl") +end diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/namelist.ncl b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/namelist.ncl new file mode 100644 index 0000000000..a9b2069b56 --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/namelist.ncl @@ -0,0 +1,469 @@ +; use the user specified namelist / namelist_obs files to locate +; the files to be used, and write those file paths to namelist_byvar/namelist_* +; for use by other CVDP scripts. +; +; Note: ".nc" is removed from the paths given in namelist. +; +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl" +load "$CVDP_SCRIPTS/functions.ncl" + +begin + print("Starting: namelist.ncl") + o = getenv("OBS") + case_sens = getenv("MACHINE") + + if (o.eq."True") then + obsflag = True + else + obsflag = False + end if + + if (.not.isfilepresent2("namelist_byvar/")) then + system("mkdir namelist_byvar") + end if + + nsim = numAsciiRow("namelist") + na = asciiread("namelist",(/nsim/),"string") + + blankrow = ind(na.eq."") + if (.not.any(ismissing(blankrow))) then + goodrows = ind(na.ne."") + na2 = na(goodrows) + delete(na) + na = na2 + delete(na2) + nsim = dimsizes(na) + end if + + system(" export NSIM="+nsim) + + nentry = numAsciiCol("namelist") + names = new(nsim,"string") + paths = new(nsim,"string") + syear = new(nsim,"integer",-999) + eyear = new(nsim,"integer",-999) + + delim = "|" + do gg = 0,nsim-1 + names(gg) = str_strip(str_get_field(na(gg),1,delim)) + paths(gg) = str_strip(str_get_field(na(gg),2,delim)) + syear(gg) = stringtointeger(str_strip(str_get_field(na(gg),3,delim))) + eyear(gg) = stringtointeger(str_strip(str_get_field(na(gg),4,delim))) + end do + + delete([/na,delim/]) + + + do gg = 0,nsim-1 ; if path ends in .nc remove it. +; print(paths(gg)) ; (It will get appended to the end of the path automatically when searching below.) + paths(gg) = str_sub_str(paths(gg),".nc","") +; print(paths(gg)) + end do + +;----- Read in namelist_obs, and check number of supplied Observational datasets ------ + + maxnumobs = 0 ; set maximum number of obs datasets per variable. if(obsflag).eq.True, this will likely get altered. + + if (obsflag) then + nobs = numAsciiRow("namelist_obs") + nentryB = numAsciiCol("namelist_obs") + vnamesB = new(nobs,"string") + namesB = new(nobs,"string") + pathsB = new(nobs,"string") + syearBT = new(nobs,"string") + eyearBT = new(nobs,"string") + + na = asciiread("namelist_obs",(/nobs/),"string") +; print(na) + + delim = "|" + + do gg = 0,nobs-1 + vnamesB(gg) = str_strip(str_get_field(na(gg),1,delim)) + namesB(gg) = str_strip(str_get_field(na(gg),2,delim)) + pathsB(gg) = str_strip(str_get_field(na(gg),3,delim)) + syearBT(gg) = str_strip(str_get_field(na(gg),4,delim)) + eyearBT(gg) = str_strip(str_get_field(na(gg),5,delim)) + end do + namesB = where(namesB.eq."",namesB@_FillValue,namesB) + pathsB = where(pathsB.eq."",pathsB@_FillValue,pathsB) + syearBT = where(syearBT.eq."",syearBT@_FillValue,syearBT) + eyearBT = where(eyearBT.eq."",eyearBT@_FillValue,eyearBT) + + maxnumobs = max((/dimsizes(ind(vnamesB.eq."TS")),dimsizes(ind(vnamesB.eq."PSL")),dimsizes(ind(vnamesB.eq."TREFHT")), \ + dimsizes(ind(vnamesB.eq."PRECT")),dimsizes(ind(vnamesB.eq."MOC")),dimsizes(ind(vnamesB.eq."SNOWDP")), \ + dimsizes(ind(vnamesB.eq."aice_nh")),dimsizes(ind(vnamesB.eq."aice_sh"))/)) + syearB = stringtointeger(syearBT) + eyearB = stringtointeger(eyearBT) + + + do gg = 0,nobs-1 ; check to see if any names are duplicated. If they are, add a "_2", "_3" to the name + dupn = ind(namesB.eq.namesB(gg)) ; this is needed so that each output .nc file has a different name + if (dimsizes(dupn).ge.2) then + do hh = 1,dimsizes(dupn)-1 + namesB(dupn(hh)) = namesB(dupn(hh))+"_"+hh + end do + end if + delete(dupn) + end do + delete([/eyearBT,syearBT/]) + delete([/na,delim,nentryB,nobs/]) + asciiwrite("obs_maxnum",maxnumobs) + end if +; print(vnamesB+" "+namesB+" "+pathsB+" "+syearB+" "+eyearB) +;exit +;----- TS section--------------- + namelist_ts = new(nsim+maxnumobs,string) + if (obsflag) then + ts_i = ind(vnamesB.eq."TS") + if (.not.ismissing(ts_i(0))) then + incr = dimsizes(ts_i) + do gg = 0,incr-1 + namelist_ts(gg) = namesB(ts_i(gg))+" | "+pathsB(ts_i(gg))+" | "+syearB(ts_i(gg))+" | "+eyearB(ts_i(gg)) + end do + if (incr.lt.maxnumobs) then ; fill in the missing obs rows with the first obs file, altering the name slightly for .nc write-outs + if (.not.ismissing(namelist_ts(0))) then + nmiss = ind(ismissing(namelist_ts(:maxnumobs-1))) + do hh = nmiss(0),nmiss(dimsizes(nmiss)-1) + namelist_ts(hh) = namesB(ts_i(0))+"_"+hh+" | "+pathsB(ts_i(0))+" | "+syearB(ts_i(0))+" | "+eyearB(ts_i(0)) + end do + delete(nmiss) + end if + end if + asciiwrite("obs_ts",namelist_ts(0)) + delete(incr) + end if + delete(ts_i) + end if + + if (case_sens.eq."True") then + tstring0 = "TS_,.TS.,ts_,.ts.,t_surf_,t_surf.,sst.,sst_" + tstring1 = "TS,ts,t_surf,sst" + else + tstring0 = "TS_,.TS.,t_surf_,t_surf.,sst.,sst_" + tstring1 = "TS,t_surf,sst" + end if + + do gg = 0,nsim-1 + fsst = systemfunc("bash -c 'ls "+paths(gg)+"*{"+tstring0+"}*.nc 2> /dev/null'") ; /dev/null suppresses all standard error output + if (dimsizes(fsst).eq.1) then + namelist_ts(gg+maxnumobs) = names(gg)+" | "+fsst(0)+" | "+syear(gg)+" | "+eyear(gg) ; grab first file + else + tpath = str_sub_str(paths(gg),"/*/","/{"+tstring1+"}/") ; explicitly specify TS,ts in directory structure to eliminate "/tsmin/" being used + namelist_ts(gg+maxnumobs) = names(gg)+" | "+tpath+"*{"+tstring0+"}*.nc | "+syear(gg)+" | "+eyear(gg) + delete(tpath) + end if + delete(fsst) + end do + delete([/tstring0,tstring1/]) + asciiwrite("namelist_byvar/namelist_ts",namelist_ts) +;------- PSL section---------------------------- + namelist_psl = new(nsim+maxnumobs,string) + if (obsflag) then + psl_i = ind(vnamesB.eq."PSL") + if (.not.ismissing(psl_i(0))) then + incr = dimsizes(psl_i) + do gg = 0,incr-1 + namelist_psl(gg) = namesB(psl_i(gg))+" | "+pathsB(psl_i(gg))+" | "+syearB(psl_i(gg))+" | "+eyearB(psl_i(gg)) + end do + if (incr.lt.maxnumobs) then ; fill in the missing obs rows with the first obs file, altering the name slightly for .nc write-outs + if (.not.ismissing(namelist_psl(0))) then + nmiss = ind(ismissing(namelist_psl(:maxnumobs-1))) + do hh = nmiss(0),nmiss(dimsizes(nmiss)-1) + namelist_psl(hh) = namesB(psl_i(0))+"_"+hh+" | "+pathsB(psl_i(0))+" | "+syearB(psl_i(0))+" | "+eyearB(psl_i(0)) + end do + delete(nmiss) + end if + end if + asciiwrite("obs_psl",namelist_psl(0)) + delete(incr) + end if + delete(psl_i) + end if + + if (case_sens.eq."True") then + tstring0 = "PSL_,PSL.,psl_,psl.,slp.,slp_" + tstring1 = "PSL,psl,SLP,slp" + else + tstring0 = "PSL_,PSL.,slp.,slp_" + tstring1 = "PSL,slp" + end if + do gg = 0,nsim-1 + fsst = systemfunc("bash -c 'ls "+paths(gg)+"*{"+tstring0+"}*.nc 2> /dev/null'") ; /dev/null suppresses all standard error output + if (dimsizes(fsst).eq.1) then + namelist_psl(gg+maxnumobs) = names(gg)+" | "+fsst(0)+" | "+syear(gg)+" | "+eyear(gg) ; grab first file + else + tpath = str_sub_str(paths(gg),"/*/","/{"+tstring1+"}/") + namelist_psl(gg+maxnumobs) = names(gg)+" | "+tpath+"*{"+tstring0+"}*.nc | "+syear(gg)+" | "+eyear(gg) + delete(tpath) + end if + delete(fsst) + end do + delete([/tstring0,tstring1/]) + asciiwrite("namelist_byvar/namelist_psl",namelist_psl) +;------- TREFHT section---------------------------- + namelist_trefht = new(nsim+maxnumobs,string) + if (obsflag) then + trefht_i = ind(vnamesB.eq."TREFHT") + if (.not.ismissing(trefht_i(0))) then + incr = dimsizes(trefht_i) + do gg = 0,incr-1 + namelist_trefht(gg) = namesB(trefht_i(gg))+" | "+pathsB(trefht_i(gg))+" | "+syearB(trefht_i(gg))+" | "+eyearB(trefht_i(gg)) + end do + if (incr.lt.maxnumobs) then ; fill in the missing obs rows with the first obs file, altering the name slightly for .nc write-outs + if (.not.ismissing(namelist_trefht(0))) then + nmiss = ind(ismissing(namelist_trefht(:maxnumobs-1))) + do hh = nmiss(0),nmiss(dimsizes(nmiss)-1) + namelist_trefht(hh) = namesB(trefht_i(0))+"_"+hh+" | "+pathsB(trefht_i(0))+" | "+syearB(trefht_i(0))+" | "+eyearB(trefht_i(0)) + end do + delete(nmiss) + end if + end if + asciiwrite("obs_trefht",namelist_trefht(0)) + delete(incr) + end if + delete(trefht_i) + end if + + if (case_sens.eq."True") then + tstring0 = "TREFHT_,TREFHT.,tas.,tas_,t_ref.,t_ref_,T2.,T2_" + tstring1 = "TREFHT,tas,t_ref,T2" + else + tstring0 = "TREFHT_,TREFHT.,tas.,tas_,t_ref.,t_ref_,T2.,T2_" + tstring1 = "TREFHT,tas,t_ref,T2" + end if + do gg = 0,nsim-1 + fsst = systemfunc("bash -c 'ls "+paths(gg)+"*{"+tstring0+"}*.nc 2> /dev/null'") ; /dev/null suppresses all standard error output + if (dimsizes(fsst).eq.1) then + namelist_trefht(gg+maxnumobs) = names(gg)+" | "+fsst(0)+" | "+syear(gg)+" | "+eyear(gg) ; grab first file + else + tpath = str_sub_str(paths(gg),"/*/","/{"+tstring1+"}/") + namelist_trefht(gg+maxnumobs) = names(gg)+" | "+tpath+"*{"+tstring0+"}*.nc | "+syear(gg)+" | "+eyear(gg) + delete(tpath) + end if + delete(fsst) + end do + asciiwrite("namelist_byvar/namelist_trefht",namelist_trefht) +;------- PRECT section--(more complicated due to PRECC+PRECL, + pr being a common 2 letter combination)------ + namelist_prect = new(nsim+maxnumobs,string) + if (obsflag) then + prect_i = ind(vnamesB.eq."PRECT") + if (.not.ismissing(prect_i(0))) then + incr = dimsizes(prect_i) + do gg = 0,incr-1 + namelist_prect(gg) = namesB(prect_i(gg))+" | "+pathsB(prect_i(gg))+" | "+syearB(prect_i(gg))+" | "+eyearB(prect_i(gg)) + end do + if (incr.lt.maxnumobs) then ; fill in the missing obs rows with the first obs file, altering the name slightly for .nc write-ouprect + if (.not.ismissing(namelist_prect(0))) then + nmiss = ind(ismissing(namelist_prect(:maxnumobs-1))) + do hh = nmiss(0),nmiss(dimsizes(nmiss)-1) + namelist_prect(hh) = namesB(prect_i(0))+"_"+hh+" | "+pathsB(prect_i(0))+" | "+syearB(prect_i(0))+" | "+eyearB(prect_i(0)) + end do + delete(nmiss) + end if + end if + asciiwrite("obs_prect",namelist_prect(0)) + delete(incr) + end if + delete(prect_i) + end if + do gg = 0,nsim-1 + pstring = (/"pr_*","pr.*","_pr_*",".pr.*","PRECT.*","PRECT_*","PRECC.*","PRECC_*","precip_*","precip.*","prcp_*","prcp.*","prate_*","prate.*"/) + do hh = 0,dimsizes(pstring)-1 + if (isvar((/"fsst"/))) then + delete(fsst) + end if + fsst = systemfunc("bash -c 'ls "+paths(gg)+"*"+pstring(hh)+".nc 2> /dev/null'") ; /dev/null suppresses all standard error output + if (dimsizes(fsst).eq.1) then + if (.not.ismissing(fsst)) then + if (pstring(hh).eq."PRECC.*".or.pstring(hh).eq."PRECC_*") then + tpath = str_sub_str(paths(gg),"/*/","/{PRECC,PRECL}/") + namelist_prect(gg+maxnumobs) = names(gg)+" | "+tpath+"*{PRECC,PRECL}*.nc | "+syear(gg)+" | "+eyear(gg) + delete(tpath) + else + namelist_prect(gg+maxnumobs) = names(gg)+" | "+fsst(0)+" | "+syear(gg)+" | "+eyear(gg) ; grab first file + end if + break + else + namelist_prect(gg+maxnumobs) = names(gg)+" | "+fsst(0)+" | "+syear(gg)+" | "+eyear(gg) ; file is missing.. + end if + else + if (pstring(hh).eq."PRECC.*".or.pstring(hh).eq."PRECC_*") then + tpath = str_sub_str(paths(gg),"/*/","/{PRECC,PRECL}/") + namelist_prect(gg+maxnumobs) = names(gg)+" | "+tpath+"*{PRECC,PRECL}*.nc | "+syear(gg)+" | "+eyear(gg) + else + if (pstring(hh).eq."pr_*".or.pstring(hh).eq."pr.*".or.pstring(hh).eq."*_pr_*".or.pstring(hh).eq."*.pr.*") then + tpath = str_sub_str(paths(gg),"/*/","/pr/") + end if + if (pstring(hh).eq."PRECT.*".or.pstring(hh).eq."PRECT_*") then + tpath = str_sub_str(paths(gg),"/*/","/PRECC/") + end if + if (isvar((/"tpath"/))) then + namelist_prect(gg+maxnumobs) = names(gg)+" | "+tpath+"*"+pstring(hh)+"*.nc | "+syear(gg)+" | "+eyear(gg) + delete(tpath) + else + namelist_prect(gg+maxnumobs) = names(gg)+" | missing" + end if + end if + break + end if + end do + end do + delete(pstring) + if (isvar((/"fsst"/))) then + delete(fsst) + end if + asciiwrite("namelist_byvar/namelist_prect",namelist_prect) +;----- SNOWDP section--------------- + namelist_snowdp = new(nsim+maxnumobs,string) + if (obsflag) then + snowdp_i = ind(vnamesB.eq."SNOWDP") + if (.not.ismissing(snowdp_i(0))) then + incr = dimsizes(snowdp_i) + do gg = 0,incr-1 + namelist_snowdp(gg) = namesB(snowdp_i(gg))+" | "+pathsB(snowdp_i(gg))+" | "+syearB(snowdp_i(gg))+" | "+eyearB(snowdp_i(gg)) + end do + if (incr.lt.maxnumobs) then ; fill in the missing obs rows with the first obs file, altering the name slightly for .nc write-outs + if (.not.ismissing(namelist_snowdp(0))) then + nmiss = ind(ismissing(namelist_snowdp(:maxnumobs-1))) + do hh = nmiss(0),nmiss(dimsizes(nmiss)-1) + namelist_snowdp(hh) = namesB(snowdp_i(0))+"_"+hh+" | "+pathsB(snowdp_i(0))+" | "+syearB(snowdp_i(0))+" | "+eyearB(snowdp_i(0)) + end do + delete(nmiss) + end if + end if + asciiwrite("obs_snowdp",namelist_snowdp(0)) + delete(incr) + end if + delete(snowdp_i) + end if + sn_string = "SNOWDP_,SNOWDP.,snd_,snd." + do gg = 0,nsim-1 + fsst = systemfunc("bash -c 'ls "+paths(gg)+"*{"+sn_string+"}*.nc 2> /dev/null'") ; /dev/null suppresses all standard error output + if (dimsizes(fsst).eq.1) then + namelist_snowdp(gg+maxnumobs) = names(gg)+" | "+fsst(0)+" | "+syear(gg)+" | "+eyear(gg) ; grab first file + else + tpath = str_sub_str(paths(gg),"/*/","/{SNOWDP,snd}/") ; explicitly specify SNOWDP/snd in directory structure to eliminate "/sndmin/" being used + namelist_snowdp(gg+maxnumobs) = names(gg)+" | "+tpath+"*{SNOWDP_,SNOWDP.,snd_,snd.}*.nc | "+syear(gg)+" | "+eyear(gg) + delete(tpath) + end if + delete(fsst) + end do + asciiwrite("namelist_byvar/namelist_snowdp",namelist_snowdp) +;------- MOC section---------------------------- + namelist_moc = new(nsim+maxnumobs,string) + if (obsflag) then + moc_i = ind(vnamesB.eq."MOC") + if (.not.ismissing(moc_i(0))) then + incr = dimsizes(moc_i) + do gg = 0,incr-1 + namelist_moc(gg) = namesB(moc_i(gg))+" | "+pathsB(moc_i(gg))+" | "+syearB(moc_i(gg))+" | "+eyearB(moc_i(gg)) + end do + if (incr.lt.maxnumobs) then ; fill in the missing obs rows with the first obs file, altering the name slightly for .nc write-outs + if (.not.ismissing(namelist_moc(0))) then + nmiss = ind(ismissing(namelist_moc(:maxnumobs-1))) + do hh = nmiss(0),nmiss(dimsizes(nmiss)-1) + namelist_moc(hh) = namesB(moc_i(0))+"_"+hh+" | "+pathsB(moc_i(0))+" | "+syearB(moc_i(0))+" | "+eyearB(moc_i(0)) + end do + delete(nmiss) + end if + end if + asciiwrite("obs_moc",namelist_moc(0)) + delete(incr) + end if + delete(moc_i) + end if + + do gg = 0,nsim-1 + fsst = systemfunc("bash -c 'ls "+paths(gg)+"*{MOC_,MOC.,msftmyz.,msftmyz_,stfmmc.,stfmmc_}*.nc 2> /dev/null'") ; /dev/null suppresses all standard error output + if (dimsizes(fsst).eq.1) then + namelist_moc(gg+maxnumobs) = names(gg)+" | "+fsst(0)+" | "+syear(gg)+" | "+eyear(gg) ; grab first file + else + tpath = str_sub_str(paths(gg),"/*/","/{MOC,msftmyz,stfmmc}/") + namelist_moc(gg+maxnumobs) = names(gg)+" | "+tpath+"*{MOC_,MOC.,msftmyz.,msftmyz_,stfmmc.,stfmmc_}*.nc | "+syear(gg)+" | "+eyear(gg) + delete(tpath) + end if + delete(fsst) + end do + asciiwrite("namelist_byvar/namelist_moc",namelist_moc) + +;------- aice_nh section---------------------------- + namelist_aice_nh = new(nsim+maxnumobs,string) + if (obsflag) then + aice_nh_i = ind(vnamesB.eq."aice_nh".or.vnamesB.eq."AICE_NH") + if (.not.ismissing(aice_nh_i(0))) then + incr = dimsizes(aice_nh_i) + do gg = 0,incr-1 + namelist_aice_nh(gg) = namesB(aice_nh_i(gg))+" | "+pathsB(aice_nh_i(gg))+" | "+syearB(aice_nh_i(gg))+" | "+eyearB(aice_nh_i(gg)) + end do + if (incr.lt.maxnumobs) then ; fill in the missing obs rows with the first obs file, altering the name slightly for .nc write-outs + if (.not.ismissing(namelist_aice_nh(0))) then + nmiss = ind(ismissing(namelist_aice_nh(:maxnumobs-1))) + do hh = nmiss(0),nmiss(dimsizes(nmiss)-1) + namelist_aice_nh(hh) = namesB(aice_nh_i(0))+"_"+hh+" | "+pathsB(aice_nh_i(0))+" | "+syearB(aice_nh_i(0))+" | "+eyearB(aice_nh_i(0)) + end do + delete(nmiss) + end if + end if + asciiwrite("obs_aice_nh",namelist_aice_nh(0)) + delete(incr) + end if + delete(aice_nh_i) + end if + + do gg = 0,nsim-1 + fsst = systemfunc("bash -c 'ls "+paths(gg)+"*{aice_nh.,aice.,sic_,sic.,.CN.,_CN_}*.nc 2> /dev/null'") ; /dev/null suppresses all standard error output + if (dimsizes(fsst).eq.1) then + namelist_aice_nh(gg+maxnumobs) = names(gg)+" | "+fsst(0)+" | "+syear(gg)+" | "+eyear(gg) ; grab first file + else + tpath = str_sub_str(paths(gg),"/*/","/{aice,sic,aice_nh,CN}/") + namelist_aice_nh(gg+maxnumobs) = names(gg)+" | "+tpath+"*{aice_nh.,aice.,sic_,sic.,.CN.,_CN_}*.nc | "+syear(gg)+" | "+eyear(gg) + delete(tpath) + end if + delete(fsst) + end do + asciiwrite("namelist_byvar/namelist_aice_nh",namelist_aice_nh) +;------- aice_sh section---------------------------- + namelist_aice_sh = new(nsim+maxnumobs,string) + if (obsflag) then + aice_sh_i = ind(vnamesB.eq."aice_sh".or.vnamesB.eq."AICE_SH") + if (.not.ismissing(aice_sh_i(0))) then + incr = dimsizes(aice_sh_i) + do gg = 0,incr-1 + namelist_aice_sh(gg) = namesB(aice_sh_i(gg))+" | "+pathsB(aice_sh_i(gg))+" | "+syearB(aice_sh_i(gg))+" | "+eyearB(aice_sh_i(gg)) + end do + if (incr.lt.maxnumobs) then ; fill in the missing obs rows with the first obs file, altering the name slightly for .nc write-outs + if (.not.ismissing(namelist_aice_sh(0))) then + nmiss = ind(ismissing(namelist_aice_sh(:maxnumobs-1))) + do hh = nmiss(0),nmiss(dimsizes(nmiss)-1) + namelist_aice_sh(hh) = namesB(aice_sh_i(0))+"_"+hh+" | "+pathsB(aice_sh_i(0))+" | "+syearB(aice_sh_i(0))+" | "+eyearB(aice_sh_i(0)) + end do + delete(nmiss) + end if + end if + asciiwrite("obs_aice_sh",namelist_aice_sh(0)) + delete(incr) + end if + delete(aice_sh_i) + end if + + do gg = 0,nsim-1 + fsst = systemfunc("bash -c 'ls "+paths(gg)+"*{aice_sh.,aice.,sic_,sic.,.CN.,_CN_}*.nc 2> /dev/null'") ; /dev/null suppresses all standard error output + if (dimsizes(fsst).eq.1) then + namelist_aice_sh(gg+maxnumobs) = names(gg)+" | "+fsst(0)+" | "+syear(gg)+" | "+eyear(gg) ; grab first file + else + tpath = str_sub_str(paths(gg),"/*/","/{aice,sic,aice_sh,CN}/") + namelist_aice_sh(gg+maxnumobs) = names(gg)+" | "+tpath+"*{aice_sh.,aice.,sic_,sic.,.CN.,_CN_}*.nc | "+syear(gg)+" | "+eyear(gg) + delete(tpath) + end if + delete(fsst) + end do + asciiwrite("namelist_byvar/namelist_aice_sh",namelist_aice_sh) +;---------------------------------------------------------------------------- + if (obsflag) then + delete([/vnamesB,namesB,pathsB,syearB,eyearB/]) + end if + print("Finished: namelist.ncl") +end diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/ncfiles.append.ncl b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/ncfiles.append.ncl new file mode 100644 index 0000000000..4ec09fa021 --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/ncfiles.append.ncl @@ -0,0 +1,142 @@ +; Concatenate all .nc files from same model/observational dataset +; into a single .nc file. + +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/shea_util.ncl" +;load "$CVDP_SCRIPTS/functions.ncl" + +begin + print("Starting: ncfiles.append.ncl") + + OUTDIR = getenv("OUTDIR") + o = getenv("OBS") +; + if (o.eq."True") then + obsflag = True + else + obsflag = False + end if + + nsim = numAsciiRow("namelist") + na = asciiread("namelist",(/nsim/),"string") + + blankrow = ind(na.eq."") + if (.not.any(ismissing(blankrow))) then + goodrows = ind(na.ne."") + na2 = na(goodrows) + delete(na) + na = na2 + delete(na2) + nsim = dimsizes(na) + end if + + nentry = numAsciiCol("namelist") + names = new(nsim,"string") + syear = new(nsim,"integer",-999) + eyear = new(nsim,"integer",-999) + + delim = "|" + do gg = 0,nsim-1 + names(gg) = str_sub_str(str_sub_str(str_sub_str(str_sub_str(str_sub_str(str_strip(str_get_field(na(gg),1,delim))," ","_"),"/","_"),"'","_"),"(","_"),")","_") + syear(gg) = stringtointeger(str_strip(str_get_field(na(gg),3,delim))) + eyear(gg) = stringtointeger(str_strip(str_get_field(na(gg),4,delim))) + end do + + do gg = 0,nsim-1 + fils = systemfunc("ls "+OUTDIR+names(gg)+".*.nc 2> /dev/null") + if (.not.ismissing(fils(0))) then + dimf = dimsizes(fils) + ofile = OUTDIR+names(gg)+".cvdp_data."+syear(gg)+"-"+eyear(gg)+".nc" + if (dimf.eq.1) then + system("mv "+fils(0)+" "+ofile) + else + if (isfilepresent(ofile)) then ; if file master is present append individual data files to file master. + do hh = 0,dimf-1 + if (fils(hh).ne.ofile) then + system("ncks -A -h "+fils(hh)+" "+ofile) + end if + end do + else ; if file master is not present, append individual data files to last file in list, + do hh = 0,dimf-2 ; and when done move the last file to be the master file + system("ncks -A -h "+fils(hh)+" "+fils(dimf-1)) + end do + system("mv "+fils(dimf-1)+" "+ofile) + end if + if (dimsizes(fils(:dimf-2)).ge.2) then + system("rm "+str_sub_str(str_join(fils(:dimf-2)," "),ofile,"")) ; remove each script's file, but do not remove the master file (if present) + end if + end if + system("ncks -O "+ofile+" "+ofile) ; done to alphabetize output variable + delete([/dimf,ofile/]) + else +; print("NetCDF files not found for "+names+", skipping appending") + end if + delete(fils) + end do + delete([/nsim,na,blankrow,nentry,names,syear,eyear/]) +;------------------------------------------------ + if (obsflag) then + maxnumobs = asciiread("obs_maxnum",(/1/),"integer") + + namelist_files = (/"psl","prect","trefht","ts","snowdp","moc","aice_nh","aice_sh"/) + delim = "|" + cntr = 0 + namesB = new(maxnumobs*dimsizes(namelist_files),string) + do gg = 0,dimsizes(namelist_files)-1 ; grab all observational dataset names from namelist_$var files + na = asciiread("namelist_byvar/namelist_"+namelist_files(gg),(/maxnumobs/),"string") + namesB(cntr:cntr+maxnumobs-1) = str_sub_str(str_sub_str(str_sub_str(str_sub_str(str_sub_str(str_strip(str_get_field(na,1,delim))," ","_"),"/","_"),"'","_"),"(","_"),")","_") + cntr = cntr+maxnumobs + delete(na) + end do + + namesB = where(namesB.eq."",namesB@_FillValue,namesB) ; for blank names set them to _FillValue + if (any(namesB.eq."missing")) then + namesB(str_match_ind(namesB,"missing")) = namesB@_FillValue ; check for any names containing "missing", set to _FillValue + end if + delete([/delim,cntr,namelist_files/]) + + do gg = 0,dimsizes(namesB)-1 + if (.not.ismissing(namesB(gg))) then + fils = systemfunc("ls "+OUTDIR+namesB(gg)+".cvdp_data.*.nc 2> /dev/null") + if (.not.ismissing(fils(0))) then + dimf = dimsizes(fils) + fil0 = tochar(fils(0)) + suffix = tostring(fil0(dimsizes(fil0)-12:dimsizes(fil0)-1)) + delete(fil0) + ofi = OUTDIR+namesB(gg)+".cvdp_data."+suffix + if (dimf.ge.2) then + if (isfilepresent(ofi)) then ; if file master is present append individual data files to file master. + do hh = 0,dimf-1 + if (fils(hh).ne.ofi) then + system("ncks -A -h "+fils(hh)+" "+ofi) + end if + end do + else ; if file master is not present, append individual data files to last file in list, + do hh = 0,dimf-2 ; and when done move the last file to be the master file + system("ncks -A -h "+fils(hh)+" "+fils(dimf-1)) + end do + system("mv "+fils(dimf-1)+" "+ofi) + end if + + if (dimsizes(fils(:dimf-2)).ge.2) then + system("rm "+str_sub_str(str_join(fils(:dimf-2)," "),ofi,"")) ; remove each script's file, but do not remove the master file (if present) + end if + else + if (fils(0).ne.ofi) then + system("mv "+fils(0)+" "+ofi) + end if + end if + system("ncks -O "+ofi+" "+ofi) ; done to alphabetize output variable + delete([/dimf,ofi/]) + else +; print("NetCDF files not found for "+namesB(gg)+", skipping appending") + end if + delete(fils) + end if + end do + delete([/namesB/]) + end if + print("Finished: ncfiles.append.ncl") +end diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/pdo.ncl b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/pdo.ncl new file mode 100644 index 0000000000..c6c6d3f35a --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/pdo.ncl @@ -0,0 +1,774 @@ +; Calculates the PDO pattern, timeseries, and spectra. +; +; Variables used: ts +; +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/shea_util.ncl" +load "$CVDP_SCRIPTS/functions.ncl" + +begin + print("Starting: pdo.ncl") + + SCALE_TIMESERIES = getenv("SCALE_TIMESERIES") + OUTPUT_DATA = getenv("OUTPUT_DATA") + PNG_SCALE = tofloat(getenv("PNG_SCALE")) + OPT_CLIMO = getenv("OPT_CLIMO") + CLIMO_SYEAR = toint(getenv("CLIMO_SYEAR")) + CLIMO_EYEAR = toint(getenv("CLIMO_EYEAR")) + OUTPUT_TYPE = getenv("OUTPUT_TYPE") + COLORMAP = getenv("COLORMAP") + + nsim = numAsciiRow("namelist_byvar/namelist_ts") + na = asciiread("namelist_byvar/namelist_ts",(/nsim/),"string") + names = new(nsim,"string") + paths = new(nsim,"string") + syear = new(nsim,"integer",-999) + eyear = new(nsim,"integer",-999) + delim = "|" + + do gg = 0,nsim-1 + names(gg) = str_strip(str_get_field(na(gg),1,delim)) + paths(gg) = str_strip(str_get_field(na(gg),2,delim)) + syear(gg) = stringtointeger(str_strip(str_get_field(na(gg),3,delim))) + eyear(gg) = stringtointeger(str_strip(str_get_field(na(gg),4,delim))) + end do + nyr = eyear-syear+1 + nyr_max = max(nyr) + + pi=4.*atan(1.0) + rad=(pi/180.) +;---------TAS Regressions coding------------------------------------------------- + nsim_tas = numAsciiRow("namelist_byvar/namelist_trefht") + na_tas = asciiread("namelist_byvar/namelist_trefht",(/nsim_tas/),"string") + names_tas = new(nsim_tas,"string") + paths_tas = new(nsim_tas,"string") + syear_tas = new(nsim_tas,"integer",-999) + eyear_tas = new(nsim_tas,"integer",-999) + + do gg = 0,nsim_tas-1 + names_tas(gg) = str_strip(str_get_field(na_tas(gg),1,delim)) + paths_tas(gg) = str_strip(str_get_field(na_tas(gg),2,delim)) + syear_tas(gg) = stringtointeger(str_strip(str_get_field(na_tas(gg),3,delim))) + eyear_tas(gg) = stringtointeger(str_strip(str_get_field(na_tas(gg),4,delim))) + end do + delete(na_tas) + nyr_tas = eyear_tas-syear_tas+1 +;---------PR Regressions coding------------------------------------------------- + nsim_pr = numAsciiRow("namelist_byvar/namelist_prect") + na_pr = asciiread("namelist_byvar/namelist_prect",(/nsim_pr/),"string") + names_pr = new(nsim_pr,"string") + paths_pr = new(nsim_pr,"string") + syear_pr = new(nsim_pr,"integer",-999) + eyear_pr = new(nsim_pr,"integer",-999) + + do gg = 0,nsim_pr-1 + names_pr(gg) = str_strip(str_get_field(na_pr(gg),1,delim)) + paths_pr(gg) = str_strip(str_get_field(na_pr(gg),2,delim)) + syear_pr(gg) = stringtointeger(str_strip(str_get_field(na_pr(gg),3,delim))) + eyear_pr(gg) = stringtointeger(str_strip(str_get_field(na_pr(gg),4,delim))) + end do + delete(na_pr) + nyr_pr = eyear_pr-syear_pr+1 +;------------------------------------------------------------------------------------------------- + wks_type = OUTPUT_TYPE + if (wks_type.eq."png") then + wks_type@wkWidth = 1500*PNG_SCALE + wks_type@wkHeight = 1500*PNG_SCALE + end if + wks = gsn_open_wks(wks_type,getenv("OUTDIR")+"pdo") + wks4 = gsn_open_wks(wks_type,getenv("OUTDIR")+"pdo.prreg") + wks2 = gsn_open_wks(wks_type,getenv("OUTDIR")+"pdo.powspec") + wks3 = gsn_open_wks(wks_type,getenv("OUTDIR")+"pdo.timeseries") + + if (COLORMAP.eq."0") then + gsn_define_colormap(wks,"ncl_default") + gsn_define_colormap(wks2,"cb_9step") + gsn_define_colormap(wks3,"ncl_default") + gsn_define_colormap(wks4,"MPL_BrBG") + end if + if (COLORMAP.eq."1") then + gsn_define_colormap(wks,"BlueDarkRed18") + gsn_define_colormap(wks2,"cb_9step") + gsn_define_colormap(wks3,"ncl_default") + gsn_define_colormap(wks4,"BrownBlue12") + end if + map = new(nsim,"graphic") + map_sst = new(nsim,"graphic") + map_tasreg = new(nsim,"graphic") + map_prreg = new(nsim,"graphic") + pspec = new(nsim,"graphic") + xyplot = new(nsim,"graphic") + xyplot2 = new(nsim,"graphic") + if (isfilepresent2("obs_ts")) then + pspec_obs = new(nsim,"graphic") + end if + + tasreg_frame = 1 ; *reg_frame = flag to create regressions .ps/.png files. Created/used instead of *reg_plot_flag + ; so that if {tas,pr} regressions are not created for the last simulation listed that .ps/png files are created + prreg_frame = 1 + do ee = 0,nsim-1 + sst = data_read_in(paths(ee),"TS",syear(ee),eyear(ee)) ; read in data, orient lats/lons correctly, set time coordinate variable up + if (isatt(sst,"is_all_missing")) then + delete(sst) + continue + end if + sst = where(sst.le.-1.8,-1.8,sst) ; set all values below -1.8 to -1.8 + d = addfile("$NCARG_ROOT/lib/ncarg/data/cdf/landsea.nc","r") ; mask out land (this is redundant for data that is already masked) + basemap = d->LSMASK + lsm = landsea_mask(basemap,sst&lat,sst&lon) + sst = mask(sst,conform(sst,lsm,(/1,2/)).ge.1,False) + delete([/lsm,basemap/]) + delete(d) + + if (OPT_CLIMO.eq."Full") then + sst = rmMonAnnCycTLL(sst) + else + check_custom_climo(names(ee),syear(ee),eyear(ee),CLIMO_SYEAR,CLIMO_EYEAR) + temp_arr = sst + delete(temp_arr&time) + temp_arr&time = cd_calendar(sst&time,-1) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(temp_arr({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(temp_arr({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + delete(temp_arr) + sst = calcMonAnomTLL(sst,climo) + delete(climo) + end if + + coswgt=cos(rad*sst&lat) + coswgt!0 = "lat" + coswgt&lat= sst&lat + + do ff = 0,dimsizes(sst&time)-1 + sst(ff,:,:) = (/ sst(ff,:,:) - wgt_areaave(sst(ff,{-60:70},:),coswgt({-60.:70.}),1.0,0) /) + end do + delete(coswgt) + sst2 = sst(lat|:,lon|:,time|:) + delete(sst) + sst_CW= SqrtCosWeight(sst2(time|:,lat|:,lon|:)) + + evecv = eofunc(sst_CW({lat|20:70},{lon|110:260},time|:),2,75) + delete(sst_CW) + pcts = eofunc_ts(sst2({20:70},{110:260},:),evecv,False) + pctsS = dim_standardize(pcts(0,:),0) + delete([/pcts/]) + finarr = sst2(:,:,0) + finarr = finarr@_FillValue + + finarr = (/ regCoef(pctsS,sst2) /) + finarr@syear = syear(ee) + finarr@eyear = eyear(ee) + + pdo = finarr + pc1 = pctsS + pc1!0 = "time" + pc1&time = sst2&time + pc1@units = "1" + sig_pcv = eofunc_north2(evecv@pcvar,dimsizes(pc1),False) + if (sig_pcv(0)) then ; if True then significant + pdo@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%*" + else + pdo@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%" + end if + delete([/sig_pcv,evecv/]) + + if (.not.ismissing(pdo({37},{200}))) then + if (pdo({37},{200}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + pdo = pdo*-1. + pc1 = pc1*-1. + end if + end if + delete([/sst2,pctsS,finarr/]) +;---------TAS Regressions coding------------------------------------------------- + if (any(ismissing((/syear(ee),syear_tas(ee),eyear(ee),eyear_tas(ee)/)))) then + tasreg_plot_flag = 1 + else + if (syear(ee).eq.syear_tas(ee)) then ; check that the start and end years match for ts, tas, and psl + if (eyear(ee).eq.eyear_tas(ee)) then + tasreg_plot_flag = 0 + else + tasreg_plot_flag = 1 + end if + else + tasreg_plot_flag = 1 + end if + end if + + if (tasreg_plot_flag.eq.0) then + tas = data_read_in(paths_tas(ee),"TREFHT",syear_tas(ee),eyear_tas(ee)) + if (isatt(tas,"is_all_missing")) then + tasreg_plot_flag = 1 + delete(tas) + end if + + if (tasreg_plot_flag.eq.0) then ; only continue if both TAS/SST fields are present + d = addfile("$NCARG_ROOT/lib/ncarg/data/cdf/landsea.nc","r") + basemap = d->LSMASK + lsm = landsea_mask(basemap,tas&lat,tas&lon) + tas = mask(tas,conform(tas,lsm,(/1,2/)).eq.0,False) + delete(lsm) + + if (OPT_CLIMO.eq."Full") then + tas = rmMonAnnCycTLL(tas) + else + check_custom_climo(names_tas(ee),syear_tas(ee),eyear_tas(ee),CLIMO_SYEAR,CLIMO_EYEAR) + temp_arr = tas + delete(temp_arr&time) + temp_arr&time = cd_calendar(tas&time,1) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(temp_arr({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(temp_arr({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + delete(temp_arr) + tas = calcMonAnomTLL(tas,climo) + delete(climo) + end if + finreg_tas = tas(0,:,:) + finreg_tas = (/ regCoef(pc1,tas(lat|:,lon|:,time|:)) /) + delete(tas) + end if + end if +;---------PR Regressions coding------------------------------------------------- + if (any(ismissing((/syear(ee),syear_pr(ee),eyear(ee),eyear_pr(ee)/)))) then + prreg_plot_flag = 1 + else + if (syear(ee).eq.syear_pr(ee)) then ; check that the start and end years match for pr and psl + if (eyear(ee).eq.eyear_pr(ee)) then + prreg_plot_flag = 0 + else + prreg_plot_flag = 1 + end if + else + prreg_plot_flag = 1 + end if + end if + + if (prreg_plot_flag.eq.0) then + pr = data_read_in(paths_pr(ee),"PRECT",syear_pr(ee),eyear_pr(ee)) + if (isatt(pr,"is_all_missing")) then + prreg_plot_flag = 1 + delete(pr) + end if + + if (prreg_plot_flag.eq.0) then ; only continue if both SST/PR fields are present + if (OPT_CLIMO.eq."Full") then + pr = rmMonAnnCycTLL(pr) + else + check_custom_climo(names_pr(ee),syear_pr(ee),eyear_pr(ee),CLIMO_SYEAR,CLIMO_EYEAR) + temp_arr = pr + delete(temp_arr&time) + temp_arr&time = cd_calendar(pr&time,1) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(temp_arr({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(temp_arr({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + delete(temp_arr) + pr = calcMonAnomTLL(pr,climo) + delete(climo) + end if + finreg_pr = pr(0,:,:) + finreg_pr = (/ regCoef(pc1,pr(lat|:,lon|:,time|:)) /) + delete(pr) + end if + end if +;--------------------------------------------------------------------------------------------- + if (tasreg_frame.eq.1.and.tasreg_plot_flag.eq.0) then ; tasreg_frame = flag to create regressions .ps/.png files + tasreg_frame = 0 + end if + if (prreg_frame.eq.1.and.prreg_plot_flag.eq.0) then ; prreg_frame = flag to create regressions .ps/.png files + prreg_frame = 0 + end if +;--------------------------------------------------------------------------------------------- + if (OUTPUT_DATA.eq."True") then + modname = str_sub_str(names(ee)," ","_") + bc = (/"/","'","(",")"/) + do gg = 0,dimsizes(bc)-1 + modname = str_sub_str(modname,bc(gg),"_") + end do + fn = getenv("OUTDIR")+modname+".cvdp_data.pdo."+syear(ee)+"-"+eyear(ee)+".nc" + if (.not.isfilepresent2(fn)) then + z = addfile(fn,"c") + z@source = "NCAR Climate Analysis Section's Climate Variability Diagnostics Package v"+getenv("VERSION") + z@notes = "Data from "+names(ee)+" from "+syear(ee)+"-"+eyear(ee) + if (OPT_CLIMO.eq."Full") then + z@climatology = syear(ee)+"-"+eyear(ee)+" climatology removed prior to all calculations (other than means)" + else + if (CLIMO_SYEAR.lt.0) then + z@climatology = (eyear(ee)+CLIMO_SYEAR)+"-"+(eyear(ee)+CLIMO_EYEAR)+" climatology removed prior to all calculations (other than means)" + else + z@climatology = CLIMO_SYEAR+"-"+CLIMO_EYEAR+" climatology removed prior to all calculations (other than means)" + end if + end if + z@Conventions = "CF-1.6" + else + z = addfile(fn,"w") + end if + z->pdo_pattern_mon = set_varAtts(pdo,"PDO spatial pattern (monthly)","","") + z->pdo_timeseries_mon = set_varAtts(pc1,"PDO normalized principal component timeseries (monthly)","1","") + delete([/modname,fn/]) + if (tasreg_plot_flag.eq.0) then + modname = str_sub_str(names_tas(ee)," ","_") + bc = (/"/","'","(",")"/) + do gg = 0,dimsizes(bc)-1 + modname = str_sub_str(modname,bc(gg),"_") + end do + fn = getenv("OUTDIR")+modname+".cvdp_data.pdo.tas."+syear_tas(ee)+"-"+eyear_tas(ee)+".nc" + if (.not.isfilepresent2(fn)) then + z_tas = addfile(fn,"c") + z_tas@source = "NCAR Climate Analysis Section's Climate Variability Diagnostics Package v"+getenv("VERSION") + z_tas@notes = "Data from "+names_tas(ee)+" from "+syear_tas(ee)+"-"+eyear_tas(ee) + if (OPT_CLIMO.eq."Full") then + z_tas@climatology = syear_tas(ee)+"-"+eyear_tas(ee)+" climatology removed prior to all calculations (other than means)" + else + if (CLIMO_SYEAR.lt.0) then + z_tas@climatology = (eyear(ee)+CLIMO_SYEAR)+"-"+(eyear(ee)+CLIMO_EYEAR)+" climatology removed prior to all calculations (other than means)" + else + z_tas@climatology = CLIMO_SYEAR+"-"+CLIMO_EYEAR+" climatology removed prior to all calculations (other than means)" + end if + end if + z_tas@Conventions = "CF-1.6" + else + z_tas = addfile(fn,"w") + end if + z_tas->pdo_tas_regression_mon = set_varAtts(finreg_tas,"tas regression onto PDO timeseries (monthly)","","") + delete([/modname,fn,z_tas/]) + end if + if (prreg_plot_flag.eq.0) then + modname = str_sub_str(names_pr(ee)," ","_") + bc = (/"/","'","(",")"/) + do gg = 0,dimsizes(bc)-1 + modname = str_sub_str(modname,bc(gg),"_") + end do + fn = getenv("OUTDIR")+modname+".cvdp_data.pdo.pr."+syear_pr(ee)+"-"+eyear_pr(ee)+".nc" + if (.not.isfilepresent2(fn)) then + z_pr = addfile(fn,"c") + z_pr@source = "NCAR Climate Analysis Section's Climate Variability Diagnostics Package v"+getenv("VERSION") + z_pr@notes = "Data from "+names_pr(ee)+" from "+syear_pr(ee)+"-"+eyear_pr(ee) + if (OPT_CLIMO.eq."Full") then + z_pr@climatology = syear_pr(ee)+"-"+eyear_pr(ee)+" climatology removed prior to all calculations (other than means)" + else + if (CLIMO_SYEAR.lt.0) then + z_pr@climatology = (eyear(ee)+CLIMO_SYEAR)+"-"+(eyear(ee)+CLIMO_EYEAR)+" climatology removed prior to all calculations (other than means)" + else + z_pr@climatology = CLIMO_SYEAR+"-"+CLIMO_EYEAR+" climatology removed prior to all calculations (other than means)" + end if + end if + z_pr@Conventions = "CF-1.6" + else + z_pr = addfile(fn,"w") + end if + z_pr->pdo_pr_regression_mon = set_varAtts(finreg_pr,"pr regression onto PDO timeseries (monthly)","","") + delete([/modname,fn,z_pr/]) + end if + end if +;------------------------------------------------------------------------ + iopt = 0 + jave = (7*nyr(ee))/100 + val1 = .95 + val2 = .99 + if (jave.eq.0) then + jave = 1 + end if + pct = 0.1 + spectra_mvf = False ; missing value flag + if (any(ismissing(pc1))) then ; check for missing data + print("Missing data detected for "+names(ee)+", not creating PDO spectra") + spectra_mvf = True + if (isfilepresent2("obs_ts").and.ee.eq.0) then + spectra_mvf_obs = True ; missing value flag + end if + else + if (isfilepresent2("obs_ts").and.ee.eq.0) then + spectra_mvf_obs = False ; missing value flag + end if + sdof = specx_anal(pc1,iopt,jave,pct) ; pc1 is standardized + splt1 = specx_ci(sdof,val1,val2) + if (OUTPUT_DATA.eq."True") then + splt1!0 = "ncurves" + splt1&ncurves = ispan(0,3,1) + splt1&ncurves@long_name = "power spectra curves" + splt1&ncurves@units = "1" + splt1!1 = "frequency" + splt1&frequency = sdof@frq + splt1&frequency@units = "1" + splt1@units_info = "df refers to frequency interval; data are standardized so there are no physical units" + splt1@units = "1/df" + splt1@info = "(0,:)=spectrum,(1,:)=Markov red noise spectrum, (2,:)="+val1+"% confidence bound for Markhov, (3,:)="+val2+"% confidence bound for Markhov" + z->pdo_spectra = set_varAtts(splt1,"PDO (monthly) power spectra, Markov spectrum and confidence curves","","") + end if + if (isfilepresent2("obs_ts").and.ee.eq.0) then + sdof_obs = sdof + end if + delete([/iopt,jave,pct/]) + end if + if (isvar("z")) then + delete(z) + end if +;======================================================================== + res = True + res@mpProjection = "WinkelTripel" + res@mpGeophysicalLineColor = "gray42" + res@mpPerimOn = False + res@mpGridLatSpacingF = 90 ; change latitude line spacing + res@mpGridLonSpacingF = 180. ; change longitude line spacing + res@mpGridLineColor = "transparent" ; trick ncl into drawing perimeter + res@mpGridAndLimbOn = True ; turn on lat/lon lines + res@mpFillOn = False + res@mpCenterLonF = 210. + res@mpOutlineOn = True + res@gsnDraw = False + res@gsnFrame = False + res@vpYF = 0.95 + res@vpHeightF = 0.3 + res@vpXF = 0.2 + res@vpWidthF = 0.6 + +; res@cnFillMode = "RasterFill" + res@cnLevelSelectionMode = "ExplicitLevels" + + if (COLORMAP.eq."0") then + res@cnLevels = fspan(-.65,.65,27) + end if + if (COLORMAP.eq."1") then + res@cnLevels = fspan(-.8,.8,17) + end if + + res@cnLineLabelsOn = False + res@cnFillOn = True + res@cnLinesOn = False + res@lbLabelBarOn = False + + res@gsnLeftStringOrthogonalPosF = -0.05 + res@gsnLeftStringParallelPosF = .005 + res@gsnRightStringOrthogonalPosF = -0.05 + res@gsnRightStringParallelPosF = 0.96 + res@gsnRightString = "" + res@gsnLeftString = "" + res@gsnLeftStringFontHeightF = 0.014 + res@gsnCenterStringFontHeightF = 0.018 + res@gsnRightStringFontHeightF = 0.014 + res@gsnLeftString = syear(ee)+"-"+eyear(ee) + res@gsnRightString = pdo@pcvar + res@gsnCenterString = names(ee) + + res4 = res ; res4 = pr regression resources + delete(res4@cnLevels) + if (COLORMAP.eq.0) then + res4@cnLevels = (/-5,-4,-3,-2,-1,-.75,-.5,-.25,-.1,0,.1,.25,.5,.75,1,2,3,4,5/) + else + res4@cnLevels = (/-3,-2,-1,-.5,-.1,0,.1,.5,1,2,3/) + end if + + res2 = True ; res2 = tas regression resources + res2@gsnDraw = False + res2@gsnFrame = False + res2@cnLevelSelectionMode = "ExplicitLevels" + res2@cnLevels = res@cnLevels + + res2@cnLineLabelsOn = False + res2@cnFillOn = True + res2@cnLinesOn = False + res2@cnFillMode = "AreaFill" + res2@lbLabelBarOn = False + res2@cnInfoLabelOn = False + res2@gsnRightString = "" + res2@gsnLeftString = "" + res2@gsnCenterString = "" + res2@gsnAddCyclic = True + + if (isfilepresent2("obs_ts").and.ee.eq.0) then ; for pattern correlation table + patcor = new((/nsim,dimsizes(pdo&lat),dimsizes(pdo&lon)/),typeof(pdo)) + patcor!1 = "lat" + patcor&lat = pdo&lat + patcor!2 = "lon" + patcor&lon = pdo&lon + patcor(ee,:,:) = (/ pdo /) + end if + if (isfilepresent2("obs_ts").and.ee.ge.1.and.isvar("patcor")) then + patcor(ee,:,:) = (/ totype(linint2(pdo&lon,pdo&lat,pdo,True,patcor&lon,patcor&lat,0),typeof(patcor)) /) + end if + + map(ee) = gsn_csm_contour_map(wks,pdo,res) + + if (tasreg_plot_flag.eq.0) then + if (names(ee).eq.names_tas(ee)) then + res@gsnCenterString = names(ee) + else + res@gsnCenterString = names(ee)+" / "+names_tas(ee) + end if + map_sst(ee) = gsn_csm_contour_map(wks,pdo,res) + map_tasreg(ee) = gsn_csm_contour(wks,finreg_tas,res2) + overlay(map_sst(ee),map_tasreg(ee)) + delete([/finreg_tas/]) + end if + delete([/pdo/]) + if (prreg_plot_flag.eq.0) then + res4@gsnCenterString = names_pr(ee) + map_prreg(ee) = gsn_csm_contour_map(wks4,finreg_pr,res4) + delete(finreg_pr) + end if + + pres = True + pres@vpXF = 0.07 + pres@trYMinF = 0. + pres@trXMinF = 0.0 +; pres@trYMaxF = 82. + pres@trXMaxF = 0.0832 + pres@tiYAxisString = "Power" ; yaxis + pres@xyLineColor = "black" + pres@gsnFrame = False + pres@gsnDraw = False + + pres@tmXBLabelDeltaF = -.8 + pres@tmXTLabelDeltaF = -.8 + pres@pmLegendDisplayMode = "Never" + pres@xyLineThicknesses = (/3.5,2.,1.,1./) + pres@xyDashPatterns = (/0,0,0,0/) + pres@xyLineColors = (/"foreground","red","blue","green"/) + pres@xyLabelMode = "custom" + pres@xyLineLabelFontColors = pres@xyLineColors + pres@xyExplicitLabels = (/"","",val1*100+"%",val2*100+"%"/) + pres@tmXTOn = True + pres@tmYROn = False + pres@tmXTLabelsOn = True + pres@tmXUseBottom = False + pres@tmXTMode = "Explicit" + pres@tmXBMode = "Explicit" + pres@tmXTValues = (/".00167",".00833",".01667",".02778",".0416",".0556",".0832"/) + pres@tmXTLabels = (/"50","10","5","3","2","1.5","1"/) + pres@tmXBValues = (/".0",".01",".02",".03",".042",".056",".083"/) + pres@tmXBLabels = pres@tmXBValues + pres@tmXTLabelFontHeightF = 0.018 + pres@tmXBLabelFontHeightF = 0.018 + pres@tmYLLabelFontHeightF = 0.018 + pres@tiYAxisString = "Variance" ;"Power (~S~o~N~C~S~2~N~ / cycles mo~S~-1~N~)" ; yaxis + pres@tiXAxisString = "Frequency (cycles mo~S~-1~N~)" + pres@tiMainString = "" + pres@txFontHeightF = 0.015 + pres@xyLineLabelFontHeightF = 0.022 + pres@tiXAxisFontHeightF = 0.025 + pres@tiYAxisFontHeightF = 0.025 + pres@tiMainFontHeightF = 0.03 + pres@gsnRightStringOrthogonalPosF = -0.115 + + pres@tiMainOn = False + pres@gsnCenterString = "Period (years)" + pres@gsnCenterStringFontHeightF = pres@tiYAxisFontHeightF + pres@gsnRightStringFontHeightF = pres@tiYAxisFontHeightF - 0.005 + pres@gsnRightString = syear(ee)+"-"+eyear(ee)+" " + pres@gsnLeftString = "" + if (wks_type.eq."png") then + pres@xyLineThicknessF = 3.5 + res@mpGeophysicalLineThicknessF = 2. + else + pres@xyLineThicknessF = 1.5 + res@mpGeophysicalLineThicknessF = 1. + end if + pres@gsnCenterString = names(ee) + if (spectra_mvf.eq.False) then + pspec(ee) = gsn_csm_xy(wks2,sdof@frq,splt1,pres) + + if (isfilepresent2("obs_ts").and.ee.ge.1.and.spectra_mvf_obs.eq.False) then + pres@xyLineColors = (/"gray70","black","black","black"/) + pres@xyCurveDrawOrder = "PreDraw" + pres@gsnCenterString = "" + pres@gsnRightString = "" + pspec_obs(ee) = gsn_csm_xy(wks2,sdof_obs@frq,sdof_obs@spcx,pres) + overlay(pspec(ee),pspec_obs(ee)) + delete(pres@xyCurveDrawOrder) + end if + delete([/sdof,splt1/]) + end if + + xyres = True + xyres@gsnDraw = False + xyres@gsnFrame = False +; xyres@trYMinF = -.65 +; xyres@trYMaxF = .65 +; xyres@tmYLFormat = "f" +; xyres@tmYLMode = "Explicit" +; xyres@tmYLValues = (/-0.5,-0.25,0,0.25,0.5/) +; xyres@tmYLLabels = (/"-0.5","-0.25","0","0.25","0.5"/) +; xyres@tmYLMinorValues = fspan(-1,1,41) + xyres@gsnRightString = "" + xyres@gsnLeftString = "" + xyres@gsnFrame = False + xyres@gsnYRefLine = 0.0 + xyres@gsnYRefLineColor = "gray42" + xyres@gsnXYBarChart = False + xyres@gsnAboveYRefLineColor = 185 + xyres@gsnBelowYRefLineColor = 35 + xyres@xyLineThicknessF = 0.1 +; xyres@xyLineColors = (/ xyres@gsnAboveYRefLineColor, xyres@gsnBelowYRefLineColor/) + xyres@xyLineColor = "gray70" + xyres@tiYAxisString = "" + if (nsim.le.5) then + xyres@tmXBLabelFontHeightF = 0.0125 + xyres@tmYLLabelFontHeightF = 0.0125 + xyres@gsnStringFontHeightF = 0.017 + else + xyres@tmXBLabelFontHeightF = 0.018 + xyres@tmYLLabelFontHeightF = 0.018 + xyres@gsnStringFontHeightF = 0.024 + end if + xyres@gsnCenterStringOrthogonalPosF = 0.025 + xyres@vpXF = 0.05 + xyres@vpHeightF = 0.15 + if (SCALE_TIMESERIES.eq."True") then + xyres@vpWidthF = 0.9*((nyr(ee)*1.)/nyr_max) + else + xyres@vpWidthF = 0.9 + end if + xyres@gsnCenterString = "" + + xyres@trXMinF = syear(ee)-.5 + xyres@trXMaxF = eyear(ee)+1.5 + + + xyres2 = xyres + delete(xyres2@gsnXYBarChart) + delete(xyres2@gsnAboveYRefLineColor) + delete(xyres2@gsnBelowYRefLineColor) +; delete(xyres2@xyLineColors) + xyres2@xyLineColor = "black" + if (wks_type.eq."png") then + xyres2@xyLineThicknessF = 3.5 + else + xyres2@xyLineThicknessF = 2.5 + end if + + xyres@gsnCenterString = names(ee) + xyplot(ee) = gsn_csm_xy(wks3,fspan(syear(ee),eyear(ee)+.91667,dimsizes(pc1)),pc1,xyres) ; use standardized timeseries + xyplot2(ee) = gsn_csm_xy(wks3,fspan(syear(ee),eyear(ee)+.91667,dimsizes(pc1)),runave(pc1,61,0),xyres2) + overlay(xyplot(ee),xyplot2(ee)) + + delete([/val1,val2,pc1,res,pres,xyres,xyres2/]) + end do + + if (isvar("patcor")) then ; for pattern correlation table + clat = cos(0.01745329*patcor&lat) + finpr = "PDO (Monthly) " ; Must be 18 characters long + line3 = " " ; Must be 18 characters long + line4 = line3 + header = (/"","Pattern Correlations/RMS Differences Observations vs. Model(s)",""/) + do hh = 1,nsim-1 + dimY = dimsizes(tochar(names(hh))) + nchar = dimY + nchar = where(nchar.le.10,10,nchar) + if (dimY.lt.10) then + ntb = "" + do ii = 0,10-dimY-1 + ntb = ntb+" " + end do + ntb = ntb+names(hh) + else + ntb = names(hh) + end if + + ntc = "" + do ii = 0,nchar-1 + ntc = ntc+"-" + end do + format2 = "%"+(nchar-5+1)+".2f" + format3 = "%4.2f" + line3 = line3+" "+ntb + line4 = line4+" "+ntc + if (all(ismissing(patcor(hh,:,:)))) then + finpr = finpr+sprintf(format2,9.99)+"/"+sprintf(format3,9.99) + else + finpr = finpr+sprintf(format2,(pattern_cor(patcor(0,:,:),patcor(hh,:,:),clat,0)))+"/"+sprintf(format3,(dim_rmsd(ndtooned(NewCosWeight(patcor(0,:,:))),ndtooned(NewCosWeight(patcor(hh,:,:)))))) + end if + end do + if (dimsizes(tochar(line4)).ge.8190) then ; system or fortran compiler limit + print("Metrics table warning: Not creating metrics table as size of comparison results in a invalid ascii row size.") + else + write_table(getenv("OUTDIR")+"metrics.pdo.txt","w",[/header/],"%s") + write_table(getenv("OUTDIR")+"metrics.pdo.txt","a",[/line3/],"%s") + write_table(getenv("OUTDIR")+"metrics.pdo.txt","a",[/line4/],"%s") + write_table(getenv("OUTDIR")+"metrics.pdo.txt","a",[/finpr/],"%s") + end if + delete([/finpr,line3,line4,format2,format3,nchar,ntc,clat,patcor,dimY,ntb,header/]) + end if + + panres = True + panres@gsnMaximize = True + panres@gsnPaperOrientation = "portrait" + panres@gsnPanelLabelBar = True + panres@gsnPanelYWhiteSpacePercent = 3.0 + panres@pmLabelBarHeightF = 0.05 + panres@pmLabelBarWidthF = 0.55 + panres@lbTitleOn = False + panres@lbBoxLineColor = "gray70" + panres@lbLabelFontHeightF = 0.013 + if (nsim.le.4) then + if (nsim.eq.1) then + panres@txFontHeightF = 0.022 + panres@gsnPanelBottom = 0.50 + else + panres@txFontHeightF = 0.0145 + panres@gsnPanelBottom = 0.50 + end if + else + panres@txFontHeightF = 0.016 + panres@gsnPanelBottom = 0.05 + end if + panres@txString = "PDO (Monthly)" + ncol = floattointeger(sqrt(nsim)) + nrow = (nsim/ncol)+mod(nsim,ncol) + gsn_panel2(wks,map,(/nrow,ncol/),panres) + + if (tasreg_frame.eq.0) then + panres@txString = "PDO SST/TAS Regressions (Monthly)" + gsn_panel2(wks,map_sst,(/nrow,ncol/),panres) + end if + delete(wks) + + if (prreg_frame.eq.0) then + panres@txString = "PDO PR Regressions (Monthly)" + gsn_panel2(wks4,map_prreg,(/nrow,ncol/),panres) + end if + delete(wks4) + + + delete(panres@gsnPanelLabelBar) + panres@txString = "PDO (Monthly)" + gsn_panel2(wks2,pspec,(/nrow,ncol/),panres) + delete(wks2) + + if (SCALE_TIMESERIES.eq."True") then + tt = ind(nyr.eq.nyr_max) + panres@gsnPanelScalePlotIndex = tt(0) + delete(tt) + end if + if (nsim.le.12) then + lp = (/nsim,1/) + else + lp = (/nrow,ncol/) ;(/nsim/2+1,nsim/8+1/) + end if + panres@txString = "PDO (Monthly)" + gsn_panel2(wks3,xyplot,lp,panres) + delete(wks3) + delete([/map,pspec,syear,eyear,nyr,nyr_max,lp/]) +;-------------------------------------------------------------------------------------------------- + OUTDIR = getenv("OUTDIR") + if (wks_type.eq."png") then + if (tasreg_frame.eq.0) then + system("mv "+OUTDIR+"pdo.000001.png "+OUTDIR+"pdo.png") + system("mv "+OUTDIR+"pdo.000002.png "+OUTDIR+"pdo.tasreg.png") + end if + else + if (tasreg_frame.eq.0) then + system("psplit "+OUTDIR+"pdo.ps "+OUTDIR+"pdo_nn") + system("mv "+OUTDIR+"pdo_nn0001.ps "+OUTDIR+"pdo.ps") + system("mv "+OUTDIR+"pdo_nn0002.ps "+OUTDIR+"pdo.tasreg.ps") + end if + end if + print("Finished: pdo.ncl") +end + diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/pr.mean_stddev.ncl b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/pr.mean_stddev.ncl new file mode 100644 index 0000000000..7dee4fec1d --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/pr.mean_stddev.ncl @@ -0,0 +1,573 @@ +; Calculates precipitation global means, zonal means, and standard deviations +; +; Variables used: pr +; +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/shea_util.ncl" +load "$CVDP_SCRIPTS/functions.ncl" + +begin + print("Starting: pr.mean_stddev.ncl") + + SCALE_TIMESERIES = getenv("SCALE_TIMESERIES") + OUTPUT_DATA = getenv("OUTPUT_DATA") + PNG_SCALE = tofloat(getenv("PNG_SCALE")) + OPT_CLIMO = getenv("OPT_CLIMO") + CLIMO_SYEAR = toint(getenv("CLIMO_SYEAR")) + CLIMO_EYEAR = toint(getenv("CLIMO_EYEAR")) + OUTPUT_TYPE = getenv("OUTPUT_TYPE") + COLORMAP = getenv("COLORMAP") + + nsim = numAsciiRow("namelist_byvar/namelist_prect") + na = asciiread("namelist_byvar/namelist_prect",(/nsim/),"string") + names = new(nsim,"string") + paths = new(nsim,"string") + syear = new(nsim,"integer",-999) + eyear = new(nsim,"integer",-999) + delim = "|" + + do gg = 0,nsim-1 + names(gg) = str_strip(str_get_field(na(gg),1,delim)) + paths(gg) = str_strip(str_get_field(na(gg),2,delim)) + syear(gg) = stringtointeger(str_strip(str_get_field(na(gg),3,delim))) + eyear(gg) = stringtointeger(str_strip(str_get_field(na(gg),4,delim))) + end do + nyr = eyear-syear+1 + nyr_max = max(nyr) + + wks_type = OUTPUT_TYPE + if (wks_type.eq."png") then + wks_type@wkWidth = 1500*PNG_SCALE + wks_type@wkHeight = 1500*PNG_SCALE + end if + wks_stddev_djf = gsn_open_wks(wks_type,getenv("OUTDIR")+"pr.stddev.djf") + wks_stddev_mam = gsn_open_wks(wks_type,getenv("OUTDIR")+"pr.stddev.mam") + wks_stddev_jja = gsn_open_wks(wks_type,getenv("OUTDIR")+"pr.stddev.jja") + wks_stddev_son = gsn_open_wks(wks_type,getenv("OUTDIR")+"pr.stddev.son") + wks_stddev_ann = gsn_open_wks(wks_type,getenv("OUTDIR")+"pr.stddev.ann") + wks_mean = gsn_open_wks(wks_type,getenv("OUTDIR")+"pr.mean") + wks_za_djf = gsn_open_wks(wks_type,getenv("OUTDIR")+"pr.za.djf") + wks_za_mam = gsn_open_wks(wks_type,getenv("OUTDIR")+"pr.za.mam") + wks_za_jja = gsn_open_wks(wks_type,getenv("OUTDIR")+"pr.za.jja") + wks_za_son = gsn_open_wks(wks_type,getenv("OUTDIR")+"pr.za.son") + wks_za_ann = gsn_open_wks(wks_type,getenv("OUTDIR")+"pr.za.ann") + + if (COLORMAP.eq.0) then + gsn_define_colormap(wks_stddev_djf,"precip3_16lev") + gsn_define_colormap(wks_stddev_mam,"precip3_16lev") + gsn_define_colormap(wks_stddev_jja,"precip3_16lev") + gsn_define_colormap(wks_stddev_son,"precip3_16lev") + gsn_define_colormap(wks_stddev_ann,"precip3_16lev") + gsn_define_colormap(wks_mean,"precip3_16lev") + gsn_define_colormap(wks_za_djf,"cb_9step") + gsn_define_colormap(wks_za_mam,"cb_9step") + gsn_define_colormap(wks_za_jja,"cb_9step") + gsn_define_colormap(wks_za_son,"cb_9step") + gsn_define_colormap(wks_za_ann,"cb_9step") + end if + if (COLORMAP.eq.1) then + gsn_define_colormap(wks_stddev_djf,"cb_rainbow") + gsn_define_colormap(wks_stddev_mam,"cb_rainbow") + gsn_define_colormap(wks_stddev_jja,"cb_rainbow") + gsn_define_colormap(wks_stddev_son,"cb_rainbow") + gsn_define_colormap(wks_stddev_ann,"cb_rainbow") + gsn_define_colormap(wks_mean,"BlueDarkRed18") + gsn_define_colormap(wks_za_djf,"cb_9step") + gsn_define_colormap(wks_za_mam,"cb_9step") + gsn_define_colormap(wks_za_jja,"cb_9step") + gsn_define_colormap(wks_za_son,"cb_9step") + gsn_define_colormap(wks_za_ann,"cb_9step") + end if + + plot_mean_djf = new(nsim,"graphic") + plot_mean_mam = new(nsim,"graphic") + plot_mean_jja = new(nsim,"graphic") + plot_mean_son = new(nsim,"graphic") + plot_mean_ann = new(nsim,"graphic") + plot_stddev_djf = new(nsim,"graphic") + plot_stddev_mam = new(nsim,"graphic") + plot_stddev_jja = new(nsim,"graphic") + plot_stddev_son = new(nsim,"graphic") + plot_stddev_ann = new(nsim,"graphic") + + plot_za_djf = new(nsim,"graphic") + plot_za_mam = new(nsim,"graphic") + plot_za_jja = new(nsim,"graphic") + plot_za_son = new(nsim,"graphic") + plot_za_ann = new(nsim,"graphic") + + if (isfilepresent2("obs_pr")) then + c1 = 1 + else + c1 = 76 + end if +; color = (/c1,2,6,11,5,3,7,15,23,31,39,47,55,63,71,79,c1,2,6,11,5,3,7,15,23,31,39,47,55,63,71,79,c1,2,6,11,5,3,7,15,23,31,39,47,55,63,71,79,c1,2/) +; dash = (/0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,3,3/) + + if (nsim.le.15) then + color = (/c1,2,6,11,5,3,7,15,23,31,39,47,55,63,71,79/) + dash = (/0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0/) + else + zt = (nsim/16)+1 + color = new((/zt*16/),integer) + dash = color + eind = 0 + do dd = 0,zt-1 + color(eind:eind+15) = (/c1,2,6,11,5,3,7,15,23,31,39,47,55,63,71,79/) + if (dd.le.16) then + dash(eind:eind+15) = dd + else + dash(eind:eind+15) = mod(dd,16) + end if + eind = eind+16 + end do + delete([/zt,eind/]) + end if + + do ee = 0,nsim-1 + ppt = data_read_in(paths(ee),"PRECT",syear(ee),eyear(ee)) ; read in data, orient lats/lons correctly, set time coordinate variable up + if (isatt(ppt,"is_all_missing")) then + delete(ppt) + continue + end if + do ff = 0,1 + pptT = ppt + if (ff.eq.1) then + if (OPT_CLIMO.eq."Full") then + pptT = rmMonAnnCycTLL(pptT) + else + check_custom_climo(names(ee),syear(ee),eyear(ee),CLIMO_SYEAR,CLIMO_EYEAR) + temp_arr = pptT + delete(temp_arr&time) + temp_arr&time = cd_calendar(pptT&time,-1) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(temp_arr({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(temp_arr({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + delete(temp_arr) + pptT = calcMonAnomTLL(pptT,climo) + delete(climo) + end if + end if + ppt_seas = runave_n_Wrap(pptT,3,0,0) + ppt_seas(0,:,:) = (/ dim_avg_n(pptT(:1,:,:),0) /) + ppt_seas(dimsizes(pptT&time)-1,:,:) = (/ dim_avg_n(pptT(dimsizes(pptT&time)-2:,:,:),0) /) + ppt_ann = runave_n_Wrap(pptT,12,0,0) + delete(pptT) + + if (ff.eq.0) then + ppt_mean_djf = dim_avg_n_Wrap(ppt_seas(0::12,:,:),0) + ppt_mean_mam = dim_avg_n_Wrap(ppt_seas(3::12,:,:),0) + ppt_mean_jja = dim_avg_n_Wrap(ppt_seas(6::12,:,:),0) + ppt_mean_son = dim_avg_n_Wrap(ppt_seas(9::12,:,:),0) + ppt_mean_ann = dim_avg_n_Wrap(ppt_ann(5::12,:,:),0) + + ppt_zamean_djf = dim_avg_n_Wrap(ppt_mean_djf,1) + ppt_zamean_mam = dim_avg_n_Wrap(ppt_mean_mam,1) + ppt_zamean_jja = dim_avg_n_Wrap(ppt_mean_jja,1) + ppt_zamean_son = dim_avg_n_Wrap(ppt_mean_son,1) + ppt_zamean_ann = dim_avg_n_Wrap(ppt_mean_ann,1) + end if + if (ff.eq.1) then + ppt_sd_djf = dim_stddev_n_Wrap(dtrend_msg_n(ispan(0,nyr(ee)-1,1),ppt_seas(0::12,:,:),False,False,0),0) + ppt_sd_mam = dim_stddev_n_Wrap(dtrend_msg_n(ispan(0,nyr(ee)-1,1),ppt_seas(3::12,:,:),False,False,0),0) + ppt_sd_jja = dim_stddev_n_Wrap(dtrend_msg_n(ispan(0,nyr(ee)-1,1),ppt_seas(6::12,:,:),False,False,0),0) + ppt_sd_son = dim_stddev_n_Wrap(dtrend_msg_n(ispan(0,nyr(ee)-1,1),ppt_seas(9::12,:,:),False,False,0),0) + ppt_sd_ann = dim_stddev_n_Wrap(dtrend_msg_n(ispan(0,nyr(ee)-1,1),ppt_ann(5::12,:,:),False,False,0),0) + end if + delete([/ppt_seas,ppt_ann/]) + end do + delete(ppt) + copy_VarMeta(ppt_mean_djf,ppt_sd_djf) + copy_VarMeta(ppt_mean_mam,ppt_sd_mam) + copy_VarMeta(ppt_mean_jja,ppt_sd_jja) + copy_VarMeta(ppt_mean_son,ppt_sd_son) + copy_VarMeta(ppt_mean_ann,ppt_sd_ann) + + if (OUTPUT_DATA.eq."True") then + modname = str_sub_str(names(ee)," ","_") + bc = (/"/","'","(",")"/) + do gg = 0,dimsizes(bc)-1 + modname = str_sub_str(modname,bc(gg),"_") + end do + fn = getenv("OUTDIR")+modname+".cvdp_data.pr.mean_stddev."+syear(ee)+"-"+eyear(ee)+".nc" + if (.not.isfilepresent2(fn)) then + z = addfile(fn,"c") + z@source = "NCAR Climate Analysis Section's Climate Variability Diagnostics Package v"+getenv("VERSION") + z@notes = "Data from "+names(ee)+" from "+syear(ee)+"-"+eyear(ee) + if (OPT_CLIMO.eq."Full") then + z@climatology = syear(ee)+"-"+eyear(ee)+" climatology removed prior to all calculations (other than means)" + else + if (CLIMO_SYEAR.lt.0) then + z@climatology = (eyear(ee)+CLIMO_SYEAR)+"-"+(eyear(ee)+CLIMO_EYEAR)+" climatology removed prior to all calculations (other than means)" + else + z@climatology = CLIMO_SYEAR+"-"+CLIMO_EYEAR+" climatology removed prior to all calculations (other than means)" + end if + end if + z@Conventions = "CF-1.6" + else + z = addfile(fn,"w") + end if + + z->pr_spatialmean_djf = set_varAtts(ppt_mean_djf,"pr mean (DJF)","","") + z->pr_spatialmean_mam = set_varAtts(ppt_mean_mam,"pr mean (MAM)","","") + z->pr_spatialmean_jja = set_varAtts(ppt_mean_jja,"pr mean (JJA)","","") + z->pr_spatialmean_son = set_varAtts(ppt_mean_son,"pr mean (SON)","","") + z->pr_spatialmean_ann = set_varAtts(ppt_mean_ann,"pr mean (annual)","","") + + z->pr_spatialstddev_djf = set_varAtts(ppt_sd_djf,"pr standard deviation (DJF)","","") + z->pr_spatialstddev_mam = set_varAtts(ppt_sd_mam,"pr standard deviation (MAM)","","") + z->pr_spatialstddev_jja = set_varAtts(ppt_sd_jja,"pr standard deviation (JJA)","","") + z->pr_spatialstddev_son = set_varAtts(ppt_sd_son,"pr standard deviation (SON)","","") + z->pr_spatialstddev_ann = set_varAtts(ppt_sd_ann,"pr standard deviation (annual)","","") + delete(z) + delete([/modname,fn/]) + end if +;========================================================================================== + res = True + res@mpProjection = "WinkelTripel" + res@mpGeophysicalLineColor = "gray42" + res@mpPerimOn = False + res@mpGridLatSpacingF = 90 ; change latitude line spacing + res@mpGridLonSpacingF = 180. ; change longitude line spacing + res@mpGridLineColor = "transparent" ; trick ncl into drawing perimeter + res@mpGridAndLimbOn = True ; turn on lat/lon lines + res@mpFillOn = False + res@mpCenterLonF = 210. + res@mpOutlineOn = True + if (wks_type.eq."png") then + res@mpGeophysicalLineThicknessF = 2. + else + res@mpGeophysicalLineThicknessF = 1. + end if + res@gsnDraw = False + res@gsnFrame = False + + res@cnLineLabelsOn = False + res@cnFillOn = True + res@cnLinesOn = False + res@lbLabelBarOn = False + + res@cnLevelSelectionMode = "ExplicitLevels" + + + res@gsnLeftStringOrthogonalPosF = -0.05 + res@gsnLeftStringParallelPosF = .005 + res@gsnRightStringOrthogonalPosF = -0.05 + res@gsnRightStringParallelPosF = 0.96 + res@gsnRightString = "" + res@gsnLeftString = "" + res@gsnLeftStringFontHeightF = 0.014 + res@gsnCenterStringFontHeightF = 0.018 + res@gsnRightStringFontHeightF = 0.014 + + sres = res + sres@cnLevels = (/0.5,1,2,3,4,5,6,7,8,9,10,12,14,16,18/) + res@cnLevels = (/.2,.4,.6,1.0,1.5,2.0,2.5,3.5/) + if (COLORMAP.eq.0) then + res@cnFillColors = (/2,4,6,8,10,12,14,16,18/) + end if + if (COLORMAP.eq.1) then + res@cnFillColors = (/35,47,63,79,95,111,124,155,175/) + end if + + + + if (isfilepresent2("obs_prect").and.ee.eq.0) then ; for pattern correlation table + patcor = new((/nsim,dimsizes(ppt_sd_ann&lat),dimsizes(ppt_sd_ann&lon)/),typeof(ppt_sd_ann)) + patcor!1 = "lat" + patcor&lat = ppt_sd_ann&lat + patcor!2 = "lon" + patcor&lon = ppt_sd_ann&lon + patcor(ee,:,:) = (/ ppt_sd_ann /) + end if + if (isfilepresent2("obs_prect").and.ee.ge.1.and.isvar("patcor")) then + patcor(ee,:,:) = (/ totype(linint2(ppt_sd_ann&lon,ppt_sd_ann&lat,ppt_sd_ann,True,patcor&lon,patcor&lat,0),typeof(patcor)) /) + end if + + res@gsnLeftString = syear(ee)+"-"+eyear(ee) + res@gsnRightString = ppt_mean_djf@units + res@gsnCenterString = names(ee) + plot_stddev_djf(ee) = gsn_csm_contour_map(wks_stddev_djf,ppt_sd_djf,res) + plot_stddev_mam(ee) = gsn_csm_contour_map(wks_stddev_mam,ppt_sd_mam,res) + plot_stddev_jja(ee) = gsn_csm_contour_map(wks_stddev_jja,ppt_sd_jja,res) + plot_stddev_son(ee) = gsn_csm_contour_map(wks_stddev_son,ppt_sd_son,res) + plot_stddev_ann(ee) = gsn_csm_contour_map(wks_stddev_ann,ppt_sd_ann,res) + + sres@gsnLeftString = syear(ee)+"-"+eyear(ee) + sres@gsnRightString = ppt_mean_djf@units + sres@gsnCenterString = names(ee) + plot_mean_djf(ee) = gsn_csm_contour_map(wks_mean,ppt_mean_djf,sres) + plot_mean_mam(ee) = gsn_csm_contour_map(wks_mean,ppt_mean_mam,sres) + plot_mean_jja(ee) = gsn_csm_contour_map(wks_mean,ppt_mean_jja,sres) + plot_mean_son(ee) = gsn_csm_contour_map(wks_mean,ppt_mean_son,sres) + plot_mean_ann(ee) = gsn_csm_contour_map(wks_mean,ppt_mean_ann,sres) + delete([/ppt_sd_djf,ppt_sd_mam,ppt_sd_jja,ppt_sd_son,ppt_sd_ann,ppt_mean_djf,ppt_mean_mam,ppt_mean_jja,ppt_mean_son,ppt_mean_ann,res,sres/]) + + zres = True + zres@vpYF = 0.8 + zres@vpXF = 0.14 + zres@vpWidthF = 0.55 + zres@vpHeightF = 0.55 + zres@trYMinF = 0. + zres@trYMaxF = 11.0 + zres@gsnDraw = False + zres@gsnFrame = False + + zres@tmXTLabelFontHeightF = 0.018 + zres@tmXBLabelFontHeightF = 0.018 + zres@tmYLLabelFontHeightF = 0.018 + zres@tiMainString = "" + zres@txFontHeightF = 0.015 + zres@xyLineLabelFontHeightF = 0.016 + zres@tiXAxisFontHeightF = 0.019 + zres@tiYAxisFontHeightF = 0.019 + zres@tiMainFontHeightF = 0.03 + + zres@pmLegendDisplayMode = "Never" + zres@tiYAxisString = "mm day~S~-1~N~" + + zres@xyLineColor = "black" + zres@xyDashPattern = 0 + if (wks_type.eq."png") then + zres@xyLineThicknessF = 3.5 + if (isfilepresent2("obs_prect").and.ee.eq.0) then + zres@xyLineThicknessF = 7. + end if + else + zres@xyLineThicknessF = 2. + if (isfilepresent2("obs_prect").and.ee.eq.0) then + zres@xyLineThicknessF = 4. + end if + end if + + zres@xyDashPattern = dash(ee) ;dash(mod(ee,50)) + zres@xyLineColor = color(ee) ;color(mod(ee,50)) + zres@tiMainFont = "helvetica" + + polyres = True + polyres@gsLineColor = color(mod(ee,50)) + polyres@gsLineThicknessF = zres@xyLineThicknessF + polyres@gsLineDashPattern = dash(mod(ee,50)) + + txres = True + if (nsim.le.15) then + txres@txFontHeightF = 0.012 + yeval = .02 + end if + if (nsim.ge.16.and.nsim.le.45) then + txres@txFontHeightF = 0.009 + yeval = .0175 + end if + if (nsim.ge.46.and.nsim.le.72) then + txres@txFontHeightF = 0.006 + yeval = .011 + end if + if (nsim.ge.73.and.nsim.le.106) then + txres@txFontHeightF = 0.004 + yeval = .0075 + end if + if (nsim.ge.107.and.nsim.le.228) then + txres@txFontHeightF = 0.002 + yeval = .0035 + end if + if (nsim.ge.229) then + txres@txFontHeightF = 0.001 + yeval = .002 + end if + + txres@txJust = "CenterLeft" + + zres@tiMainString = "PR Zonal Average (DJF)" + zres@gsnRightString = "mm/day" + + plot_za_djf(ee) = gsn_csm_xy(wks_za_djf,ppt_zamean_djf&lat,ppt_zamean_djf,zres) + if (ee.ne.0) then + overlay(plot_za_djf(0),plot_za_djf(ee)) + end if + gsn_text_ndc(wks_za_djf,names(ee),0.765,0.8-(ee*yeval),txres) + gsn_polyline_ndc(wks_za_djf,(/0.72,.75/),(/0.8-(ee*yeval),0.8-(ee*yeval)/),polyres) + + zres@tiMainString = "PR Zonal Average (MAM)" + plot_za_mam(ee) = gsn_csm_xy(wks_za_mam,ppt_zamean_mam&lat,ppt_zamean_mam,zres) + if (ee.ne.0) then + overlay(plot_za_mam(0),plot_za_mam(ee)) + end if + gsn_text_ndc(wks_za_mam,names(ee),0.765,0.8-(ee*yeval),txres) + gsn_polyline_ndc(wks_za_mam,(/0.72,.75/),(/0.8-(ee*yeval),0.8-(ee*yeval)/),polyres) + + zres@tiMainString = "PR Zonal Average (JJA)" + plot_za_jja(ee) = gsn_csm_xy(wks_za_jja,ppt_zamean_jja&lat,ppt_zamean_jja,zres) + if (ee.ne.0) then + overlay(plot_za_jja(0),plot_za_jja(ee)) + end if + gsn_text_ndc(wks_za_jja,names(ee),0.765,0.8-(ee*yeval),txres) + gsn_polyline_ndc(wks_za_jja,(/0.72,.75/),(/0.8-(ee*yeval),0.8-(ee*yeval)/),polyres) + + zres@tiMainString = "PR Zonal Average (SON)" + plot_za_son(ee) = gsn_csm_xy(wks_za_son,ppt_zamean_son&lat,ppt_zamean_son,zres) + if (ee.ne.0) then + overlay(plot_za_son(0),plot_za_son(ee)) + end if + gsn_text_ndc(wks_za_son,names(ee),0.765,0.8-(ee*yeval),txres) + gsn_polyline_ndc(wks_za_son,(/0.72,.75/),(/0.8-(ee*yeval),0.8-(ee*yeval)/),polyres) + + zres@tiMainString = "PR Zonal Average (ANN)" + plot_za_ann(ee) = gsn_csm_xy(wks_za_ann,ppt_zamean_ann&lat,ppt_zamean_ann,zres) + if (ee.ne.0) then + overlay(plot_za_ann(0),plot_za_ann(ee)) + end if + gsn_text_ndc(wks_za_ann,names(ee),0.765,0.8-(ee*yeval),txres) + gsn_polyline_ndc(wks_za_ann,(/0.72,.75/),(/0.8-(ee*yeval),0.8-(ee*yeval)/),polyres) + delete([/zres,polyres,txres,ppt_zamean_djf,ppt_zamean_mam,ppt_zamean_jja,ppt_zamean_son,ppt_zamean_ann/]) + end do + + if (isvar("patcor")) then ; for pattern correlation table + clat = cos(0.01745329*patcor&lat) + finpr = "pr Std Dev (Ann) " ; Must be 18 characters long + line3 = " " ; Must be 18 characters long + line4 = line3 + header = (/"","Pattern Correlations/RMS Differences Observations vs. Model(s)",""/) + do hh = 1,nsim-1 + dimY = dimsizes(tochar(names(hh))) + nchar = dimY + nchar = where(nchar.le.10,10,nchar) + if (dimY.lt.10) then + ntb = "" + do ii = 0,10-dimY-1 + ntb = ntb+" " + end do + ntb = ntb+names(hh) + else + ntb = names(hh) + end if + + ntc = "" + do ii = 0,nchar-1 + ntc = ntc+"-" + end do + format2 = "%"+(nchar-5+1)+".2f" + format3 = "%4.2f" + line3 = line3+" "+ntb + line4 = line4+" "+ntc + if (all(ismissing(patcor(hh,:,:)))) then + finpr = finpr+sprintf(format2,9.99)+"/"+sprintf(format3,9.99) + else + finpr = finpr+sprintf(format2,(pattern_cor(patcor(0,:,:),patcor(hh,:,:),clat,0)))+"/"+sprintf(format3,(dim_rmsd(ndtooned(NewCosWeight(patcor(0,:,:))),ndtooned(NewCosWeight(patcor(hh,:,:)))))) + end if + end do + if (dimsizes(tochar(line4)).ge.8190) then ; system or fortran compiler limit + print("Metrics table warning: Not creating metrics table as size of comparison results in a invalid ascii row size.") + else + write_table(getenv("OUTDIR")+"metrics.pr.mean_stddev.txt","w",[/header/],"%s") + write_table(getenv("OUTDIR")+"metrics.pr.mean_stddev.txt","a",[/line3/],"%s") + write_table(getenv("OUTDIR")+"metrics.pr.mean_stddev.txt","a",[/line4/],"%s") + write_table(getenv("OUTDIR")+"metrics.pr.mean_stddev.txt","a",[/finpr/],"%s") + end if + delete([/finpr,line3,line4,format2,format3,nchar,ntc,clat,patcor,dimY,ntb,header/]) + end if + + panres = True + panres@gsnMaximize = True + panres@gsnPaperOrientation = "portrait" + panres@gsnPanelLabelBar = True + panres@gsnPanelYWhiteSpacePercent = 3.0 + panres@pmLabelBarHeightF = 0.05 + panres@pmLabelBarWidthF = 0.65 + panres@lbTitleOn = False + panres@lbBoxLineColor = "gray70" + if (nsim.le.4) then + if (nsim.eq.1) then + panres@txFontHeightF = 0.022 + panres@gsnPanelBottom = 0.50 + else + panres@txFontHeightF = 0.0145 + panres@gsnPanelBottom = 0.50 + end if + else + panres@txFontHeightF = 0.016 + panres@gsnPanelBottom = 0.05 + end if + panres@lbLabelFontHeightF = 0.013 + panres@lbLabelStride = 1 + ncol = floattointeger(sqrt(nsim)) + nrow = (nsim/ncol)+mod(nsim,ncol) + + panres@txString = "PR Standard Deviations (DJF)" + gsn_panel2(wks_stddev_djf,plot_stddev_djf,(/nrow,ncol/),panres) + delete(wks_stddev_djf) + + panres@txString = "PR Standard Deviations (MAM)" + gsn_panel2(wks_stddev_mam,plot_stddev_mam,(/nrow,ncol/),panres) + delete(wks_stddev_mam) + + panres@txString = "PR Standard Deviations (JJA)" + gsn_panel2(wks_stddev_jja,plot_stddev_jja,(/nrow,ncol/),panres) + delete(wks_stddev_jja) + + panres@txString = "PR Standard Deviations (SON)" + gsn_panel2(wks_stddev_son,plot_stddev_son,(/nrow,ncol/),panres) + delete(wks_stddev_son) + + panres@txString = "PR Standard Deviations (Annual)" + gsn_panel2(wks_stddev_ann,plot_stddev_ann,(/nrow,ncol/),panres) + delete(wks_stddev_ann) + + panres@txString = "PR Means (DJF)" + gsn_panel2(wks_mean,plot_mean_djf,(/nrow,ncol/),panres) + + panres@txString = "PR Means (MAM)" + gsn_panel2(wks_mean,plot_mean_mam,(/nrow,ncol/),panres) + + panres@txString = "PR Means (JJA)" + gsn_panel2(wks_mean,plot_mean_jja,(/nrow,ncol/),panres) + + panres@txString = "PR Means (SON)" + gsn_panel2(wks_mean,plot_mean_son,(/nrow,ncol/),panres) + + panres@txString = "PR Means (Annual)" + gsn_panel2(wks_mean,plot_mean_ann,(/nrow,ncol/),panres) + delete(wks_mean) + delete(panres) + + draw(plot_za_djf(0)) + frame(wks_za_djf) + delete(wks_za_djf) + + draw(plot_za_mam(0)) + frame(wks_za_mam) + delete(wks_za_mam) + + draw(plot_za_jja(0)) + frame(wks_za_jja) + delete(wks_za_jja) + + draw(plot_za_son(0)) + frame(wks_za_son) + delete(wks_za_son) + + draw(plot_za_ann(0)) + frame(wks_za_ann) + delete(wks_za_ann) +;-------------------------------------------------------------------------------------------------------------------------------------------- + OUTDIR = getenv("OUTDIR") + if (wks_type.eq."png") then + system("mv "+OUTDIR+"pr.mean.000001.png "+OUTDIR+"pr.mean.djf.png") + system("mv "+OUTDIR+"pr.mean.000002.png "+OUTDIR+"pr.mean.mam.png") + system("mv "+OUTDIR+"pr.mean.000003.png "+OUTDIR+"pr.mean.jja.png") + system("mv "+OUTDIR+"pr.mean.000004.png "+OUTDIR+"pr.mean.son.png") + system("mv "+OUTDIR+"pr.mean.000005.png "+OUTDIR+"pr.mean.ann.png") + else + system("psplit "+OUTDIR+"pr.mean.ps "+OUTDIR+"pr_m") + system("mv "+OUTDIR+"pr_m0001.ps "+OUTDIR+"pr.mean.djf.ps") + system("mv "+OUTDIR+"pr_m0002.ps "+OUTDIR+"pr.mean.mam.ps") + system("mv "+OUTDIR+"pr_m0003.ps "+OUTDIR+"pr.mean.jja.ps") + system("mv "+OUTDIR+"pr_m0004.ps "+OUTDIR+"pr.mean.son.ps") + system("mv "+OUTDIR+"pr_m0005.ps "+OUTDIR+"pr.mean.ann.ps") + system("rm "+OUTDIR+"pr.mean.ps") + end if + print("Finished: pr.mean_stddev.ncl") +end diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/pr.trends_timeseries.ncl b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/pr.trends_timeseries.ncl new file mode 100644 index 0000000000..d537d4286d --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/pr.trends_timeseries.ncl @@ -0,0 +1,550 @@ +; Calculates precipitation global trends and timeseries +; +; Variables used: pr +; +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/shea_util.ncl" +load "$CVDP_SCRIPTS/functions.ncl" + +begin + print("Starting: pr.trends_timeseries.ncl") + + SCALE_TIMESERIES = getenv("SCALE_TIMESERIES") + OUTPUT_DATA = getenv("OUTPUT_DATA") + PNG_SCALE = tofloat(getenv("PNG_SCALE")) + OPT_CLIMO = getenv("OPT_CLIMO") + CLIMO_SYEAR = toint(getenv("CLIMO_SYEAR")) + CLIMO_EYEAR = toint(getenv("CLIMO_EYEAR")) + OUTPUT_TYPE = getenv("OUTPUT_TYPE") + COLORMAP = getenv("COLORMAP") + + nsim = numAsciiRow("namelist_byvar/namelist_prect") + na = asciiread("namelist_byvar/namelist_prect",(/nsim/),"string") + names = new(nsim,"string") + paths = new(nsim,"string") + syear = new(nsim,"integer",-999) + eyear = new(nsim,"integer",-999) + delim = "|" + + do gg = 0,nsim-1 + names(gg) = str_strip(str_get_field(na(gg),1,delim)) + paths(gg) = str_strip(str_get_field(na(gg),2,delim)) + syear(gg) = stringtointeger(str_strip(str_get_field(na(gg),3,delim))) + eyear(gg) = stringtointeger(str_strip(str_get_field(na(gg),4,delim))) + end do + nyr = eyear-syear+1 + nyr_max = max(nyr) + + pi=4.*atan(1.0) + rad=(pi/180.) + + wks_type = OUTPUT_TYPE + if (wks_type.eq."png") then + wks_type@wkWidth = 1500*PNG_SCALE + wks_type@wkHeight = 1500*PNG_SCALE + end if + wks_trends_djf = gsn_open_wks(wks_type,getenv("OUTDIR")+"pr.trends.djf") + wks_trends_mam = gsn_open_wks(wks_type,getenv("OUTDIR")+"pr.trends.mam") + wks_trends_jja = gsn_open_wks(wks_type,getenv("OUTDIR")+"pr.trends.jja") + wks_trends_son = gsn_open_wks(wks_type,getenv("OUTDIR")+"pr.trends.son") + wks_trends_ann = gsn_open_wks(wks_type,getenv("OUTDIR")+"pr.trends.ann") + wks_trends_mon = gsn_open_wks(wks_type,getenv("OUTDIR")+"pr.trends.mon") + + wks_aa_djf = gsn_open_wks(wks_type,getenv("OUTDIR")+"pr.timeseries.djf") + wks_aa_mam = gsn_open_wks(wks_type,getenv("OUTDIR")+"pr.timeseries.mam") + wks_aa_jja = gsn_open_wks(wks_type,getenv("OUTDIR")+"pr.timeseries.jja") + wks_aa_son = gsn_open_wks(wks_type,getenv("OUTDIR")+"pr.timeseries.son") + wks_aa_ann = gsn_open_wks(wks_type,getenv("OUTDIR")+"pr.timeseries.ann") + wks_aa_mon = gsn_open_wks(wks_type,getenv("OUTDIR")+"pr.timeseries.mon") + + wks_rt_mon = gsn_open_wks(wks_type,getenv("OUTDIR")+"pr.runtrend.mon") + + if (COLORMAP.eq.0) then + gsn_define_colormap(wks_trends_djf,"precip_diff_12lev") + gsn_define_colormap(wks_trends_mam,"precip_diff_12lev") + gsn_define_colormap(wks_trends_jja,"precip_diff_12lev") + gsn_define_colormap(wks_trends_son,"precip_diff_12lev") + gsn_define_colormap(wks_trends_ann,"precip_diff_12lev") + gsn_define_colormap(wks_trends_mon,"precip_diff_12lev") + gsn_define_colormap(wks_aa_djf,"ncl_default") + gsn_define_colormap(wks_aa_mam,"ncl_default") + gsn_define_colormap(wks_aa_jja,"ncl_default") + gsn_define_colormap(wks_aa_son,"ncl_default") + gsn_define_colormap(wks_aa_ann,"ncl_default") + gsn_define_colormap(wks_aa_mon,"ncl_default") + gsn_define_colormap(wks_rt_mon,"ncl_default") + end if + if (COLORMAP.eq.1) then + gsn_define_colormap(wks_trends_djf,"BrownBlue12") + gsn_define_colormap(wks_trends_mam,"BrownBlue12") + gsn_define_colormap(wks_trends_jja,"BrownBlue12") + gsn_define_colormap(wks_trends_son,"BrownBlue12") + gsn_define_colormap(wks_trends_ann,"BrownBlue12") + gsn_define_colormap(wks_trends_mon,"BrownBlue12") + gsn_define_colormap(wks_aa_djf,"ncl_default") + gsn_define_colormap(wks_aa_mam,"ncl_default") + gsn_define_colormap(wks_aa_jja,"ncl_default") + gsn_define_colormap(wks_aa_son,"ncl_default") + gsn_define_colormap(wks_aa_ann,"ncl_default") + gsn_define_colormap(wks_aa_mon,"ncl_default") + gsn_define_colormap(wks_rt_mon,"ncl_default") + end if + + map_djf = new(nsim,"graphic") + map_mam = new(nsim,"graphic") + map_jja = new(nsim,"graphic") + map_son = new(nsim,"graphic") + map_ann = new(nsim,"graphic") + map_mon = new(nsim,"graphic") + xy_djf = new(nsim,"graphic") + xy_mam = new(nsim,"graphic") + xy_jja = new(nsim,"graphic") + xy_son = new(nsim,"graphic") + xy_ann = new(nsim,"graphic") + xy_mon = new(nsim,"graphic") + + xy_rt_mon_8 = new(nsim,"graphic") + xy_rt_mon_10 = new(nsim,"graphic") + xy_rt_mon_12 = new(nsim,"graphic") + xy_rt_mon_14 = new(nsim,"graphic") + xy_rt_mon_16 = new(nsim,"graphic") + + if (isfilepresent2("obs_prect")) then + xy_obs_djf = new(nsim,"graphic") + xy_obs_mam = new(nsim,"graphic") + xy_obs_jja = new(nsim,"graphic") + xy_obs_son = new(nsim,"graphic") + xy_obs_ann = new(nsim,"graphic") + xy_obs_mon = new(nsim,"graphic") + end if + do ee = 0,nsim-1 + ppt = data_read_in(paths(ee),"PRECT",syear(ee),eyear(ee)) ; read in data, orient lats/lons correctly, set time coordinate variable up + if (isatt(ppt,"is_all_missing")) then + delete(ppt) + continue + end if + + if (OPT_CLIMO.eq."Full") then + ppt = rmMonAnnCycTLL(ppt) + else + check_custom_climo(names(ee),syear(ee),eyear(ee),CLIMO_SYEAR,CLIMO_EYEAR) + temp_arr = ppt + delete(temp_arr&time) + temp_arr&time = cd_calendar(ppt&time,-1) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(temp_arr({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(temp_arr({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + delete(temp_arr) + ppt = calcMonAnomTLL(ppt,climo) + delete(climo) + end if + + coswgt=cos(rad*ppt&lat) + coswgt!0 = "lat" + coswgt&lat= ppt&lat + + ppt_aa_mon = wgt_areaave_Wrap(ppt,coswgt,1.0,0) + tttt = dtrend_msg_n(ispan(0,dimsizes(ppt&time)-1,1),ppt,False,True,0) + ppt_trends_mon = ppt(0,:,:) + ppt_trends_mon = (/ onedtond(tttt@slope, (/dimsizes(ppt&lat),dimsizes(ppt&lon)/) ) /) + ppt_trends_mon = ppt_trends_mon*dimsizes(ppt&time) + ppt_trends_mon@units = ppt@units+" "+nyr(ee)+"yr~S~-1~N~" + delete(tttt) + + ppt_seas = runave_n_Wrap(ppt,3,0,0) + ppt_seas(0,:,:) = (/ dim_avg_n(ppt(:1,:,:),0) /) + ppt_seas(dimsizes(ppt&time)-1,:,:) = (/ dim_avg_n(ppt(dimsizes(ppt&time)-2:,:,:),0) /) + ppt_ann = runave_n_Wrap(ppt,12,0,0) + delete(ppt) + + ppt_trends_seas = ppt_seas(:3,:,:) + ppt_trends_seas = ppt_trends_seas@_FillValue + ppt_trends_ann = ppt_trends_seas(0,:,:) + ppt_aa_seas = new((/4,nyr(ee)/),typeof(ppt_seas)) + ppt_aa_seas!1 = "time" + ppt_aa_seas&time = ispan(syear(ee),eyear(ee),1) + ppt_aa_seas&time@units = "YYYY" + ppt_aa_seas&time@long_name = "time" + ppt_aa_ann = ppt_aa_seas(0,:) + do ff = 0,4 + if (ff.le.3) then + tarr = ppt_seas(ff*3::12,:,:) + end if + if (ff.eq.4) then + tarr = ppt_ann(5::12,:,:) + end if + tttt = dtrend_msg_n(ispan(0,dimsizes(tarr&time)-1,1),tarr,False,True,0) + if (ff.le.3) then + ppt_trends_seas(ff,:,:) = (/ onedtond(tttt@slope, (/dimsizes(tarr&lat),dimsizes(tarr&lon)/) ) /) + ppt_aa_seas(ff,:) = (/ wgt_areaave(tarr,coswgt,1.0,0) /) + end if + if (ff.eq.4) then + ppt_trends_ann = (/ onedtond(tttt@slope, (/dimsizes(tarr&lat),dimsizes(tarr&lon)/) ) /) + ppt_aa_ann = (/ wgt_areaave(tarr,coswgt,1.0,0) /) + end if + delete([/tarr,tttt/]) + end do + ppt_trends_seas = ppt_trends_seas*nyr(ee) + ppt_trends_seas@units = ppt_seas@units+" "+nyr(ee)+"yr~S~-1~N~" + ppt_trends_ann = ppt_trends_ann*nyr(ee) + ppt_trends_ann@units = ppt_ann@units+" "+nyr(ee)+"yr~S~-1~N~" + ppt_aa_seas@units = ppt_seas@units + ppt_aa_ann@units = ppt_ann@units + delete([/ppt_seas,ppt_ann,coswgt/]) + + if (isfilepresent2("obs_prect").and.ee.eq.0) then + ppt_aa_seas@syear = syear(ee) + ppt_aa_seas@eyear = eyear(ee) + ppt_aa_mon@syear = syear(ee) + ppt_aa_mon@eyear = eyear(ee) + ppt_aa_ann@syear = syear(ee) + ppt_aa_ann@eyear = eyear(ee) + ppt_aa_seas_obs = ppt_aa_seas + ppt_aa_mon_obs = ppt_aa_mon + ppt_aa_ann_obs = ppt_aa_ann + end if + + if (OUTPUT_DATA.eq."True") then + modname = str_sub_str(names(ee)," ","_") + bc = (/"/","'","(",")"/) + do gg = 0,dimsizes(bc)-1 + modname = str_sub_str(modname,bc(gg),"_") + end do + fn = getenv("OUTDIR")+modname+".cvdp_data.pr.trends_timeseries."+syear(ee)+"-"+eyear(ee)+".nc" + if (.not.isfilepresent2(fn)) then + z = addfile(fn,"c") + z@source = "NCAR Climate Analysis Section's Climate Variability Diagnostics Package v"+getenv("VERSION") + z@notes = "Data from "+names(ee)+" from "+syear(ee)+"-"+eyear(ee) + if (OPT_CLIMO.eq."Full") then + z@climatology = syear(ee)+"-"+eyear(ee)+" climatology removed prior to all calculations (other than means)" + else + if (CLIMO_SYEAR.lt.0) then + z@climatology = (eyear(ee)+CLIMO_SYEAR)+"-"+(eyear(ee)+CLIMO_EYEAR)+" climatology removed prior to all calculations (other than means)" + else + z@climatology = CLIMO_SYEAR+"-"+CLIMO_EYEAR+" climatology removed prior to all calculations (other than means)" + end if + end if + z@Conventions = "CF-1.6" + else + z = addfile(fn,"w") + end if + ppt_aa_seas2 = ppt_aa_seas + ppt_aa_seas2!1 = "TIME" + ppt_aa_seas2&TIME = ispan(syear(ee),eyear(ee),1) + ppt_aa_seas2&TIME@units = "YYYY" + ppt_aa_seas2&TIME@long_name = "time" + ppt_aa_ann2 = ppt_aa_ann + ppt_aa_ann2!0 = "TIME" + ppt_aa_ann2&TIME = ispan(syear(ee),eyear(ee),1) + ppt_aa_ann2&TIME@units = "YYYY" + ppt_aa_ann2&TIME@long_name = "time" + z->pr_global_avg_mon = set_varAtts(ppt_aa_mon,"pr global area-average (monthly)","","") + z->pr_global_avg_djf = set_varAtts(ppt_aa_seas2(0,:),"pr global area-average (DJF)","","") + z->pr_global_avg_mam = set_varAtts(ppt_aa_seas2(1,:),"pr global area-average (MAM)","","") + z->pr_global_avg_jja = set_varAtts(ppt_aa_seas2(2,:),"pr global area-average (JJA)","","") + z->pr_global_avg_son = set_varAtts(ppt_aa_seas2(3,:),"pr global area-average (SON)","","") + z->pr_global_avg_ann = set_varAtts(ppt_aa_ann2,"pr global area-average (annual)","","") + z->pr_trends_djf = set_varAtts(ppt_trends_seas(0,:,:),"pr linear trends (DJF)","","") + z->pr_trends_mam = set_varAtts(ppt_trends_seas(1,:,:),"pr linear trends (MAM)","","") + z->pr_trends_jja = set_varAtts(ppt_trends_seas(2,:,:),"pr linear trends (JJA)","","") + z->pr_trends_son = set_varAtts(ppt_trends_seas(3,:,:),"pr linear trends (SON)","","") + z->pr_trends_ann = set_varAtts(ppt_trends_ann,"pr linear trends (annual)","","") + z->pr_trends_mon = set_varAtts(ppt_trends_mon,"pr linear trends (monthly)","","") + delete(z) + delete(ppt_aa_seas2) + delete(ppt_aa_ann2) + delete([/modname,fn/]) + end if +;======================================================================== + res = True + res@mpProjection = "WinkelTripel" + res@mpGeophysicalLineColor = "gray42" + if (wks_type.eq."png") then + res@mpGeophysicalLineThicknessF = 2. + else + res@mpGeophysicalLineThicknessF = 1. + end if + res@mpPerimOn = False + res@mpGridLatSpacingF = 90 ; change latitude line spacing + res@mpGridLonSpacingF = 180. ; change longitude line spacing + res@mpGridLineColor = "transparent" ; trick ncl into drawing perimeter + res@mpGridAndLimbOn = True ; turn on lat/lon lines + res@mpFillOn = False + res@mpCenterLonF = 210. + res@mpOutlineOn = True + res@gsnDraw = False + res@gsnFrame = False + + res@cnLevelSelectionMode = "ExplicitLevels" + if (COLORMAP.eq.0) then + res@cnLevels = (/-6,-4,-2,-1,-0.5,-0.2,0,0.2,0.5,1,2,4,6/) + res@cnFillColors = (/2,3,4,5,6,7,8,8,9,10,11,12,13,14/) + end if + if (COLORMAP.eq.1) then + res@cnLevels = (/-4,-2,-1,-0.5,-0.2,0,0.2,0.5,1,2,4/) + end if + res@cnLineLabelsOn = False + res@cnFillOn = True + res@cnLinesOn = False + res@lbLabelBarOn = False + + res@gsnLeftStringOrthogonalPosF = -0.05 + res@gsnLeftStringParallelPosF = .005 + res@gsnRightStringOrthogonalPosF = -0.05 + res@gsnRightStringParallelPosF = 0.975 + res@gsnRightString = "" + res@gsnLeftString = "" + res@gsnLeftStringFontHeightF = 0.014 + res@gsnCenterStringFontHeightF = 0.018 + res@gsnRightStringFontHeightF = 0.014 + res@gsnLeftString = syear(ee)+"-"+eyear(ee) + + res@gsnRightString = ppt_trends_seas@units + res@gsnCenterString = names(ee) + map_djf(ee) = gsn_csm_contour_map(wks_trends_djf,ppt_trends_seas(0,:,:),res) + map_mam(ee) = gsn_csm_contour_map(wks_trends_mam,ppt_trends_seas(1,:,:),res) + map_jja(ee) = gsn_csm_contour_map(wks_trends_jja,ppt_trends_seas(2,:,:),res) + map_son(ee) = gsn_csm_contour_map(wks_trends_son,ppt_trends_seas(3,:,:),res) + map_ann(ee) = gsn_csm_contour_map(wks_trends_ann,ppt_trends_ann,res) + map_mon(ee) = gsn_csm_contour_map(wks_trends_mon,ppt_trends_mon,res) + + + xyres = True + xyres@gsnDraw = False + xyres@gsnFrame = False + xyres@gsnFrame = False + xyres@gsnYRefLine = 0.0 + xyres@gsnYRefLineColor = "gray42" + + if (wks_type.eq."png") then + xyres@xyLineThicknessF = 4. + else + xyres@xyLineThicknessF = 2.0 + end if + if (isfilepresent2("obs_prect").and.ee.eq.0) then + xyres@xyLineColor = "black" + else + xyres@xyLineColor = "royalblue" + end if + xyres@tiYAxisString = "" + if (nsim.le.5) then + xyres@tmXBLabelFontHeightF = 0.0125 + xyres@tmYLLabelFontHeightF = 0.0125 + xyres@gsnLeftStringFontHeightF = 0.017 + xyres@gsnRightStringFontHeightF = 0.013 + else + xyres@tmXBLabelFontHeightF = 0.018 + xyres@tmYLLabelFontHeightF = 0.018 + xyres@gsnLeftStringFontHeightF = 0.024 + xyres@gsnRightStringFontHeightF = 0.020 + end if + xyres@gsnLeftStringOrthogonalPosF = 0.025 + xyres@gsnRightStringOrthogonalPosF = xyres@gsnLeftStringOrthogonalPosF + xyres@vpXF = 0.05 + xyres@vpHeightF = 0.15 + if (SCALE_TIMESERIES.eq."True") then + xyres@vpWidthF = 0.9*((nyr(ee)*1.)/nyr_max) + else + xyres@vpWidthF = 0.9 + end if + xyres@gsnLeftString = "" + xyres@gsnCenterString = "" + xyres@gsnRightString = "" + + xyres@trXMinF = syear(ee)-.5 + xyres@trXMaxF = eyear(ee)+0.5 + + xyres2 = xyres + xyres2@xyLineColor = "gray60" + xyres2@xyCurveDrawOrder = "PreDraw" + + xyres@gsnLeftString = names(ee) + tttt = dtrend_msg(ispan(0,dimsizes(ppt_aa_seas&time)-1,1),ppt_aa_seas(0,:),False,True) + if (isfilepresent2("obs_prect").and.ee.ge.1) then + xyres@trYMinF = min((/min(ppt_aa_seas(0,:)),min(ppt_aa_seas_obs(0,:))/))-.005 + xyres@trYMaxF = max((/max(ppt_aa_seas(0,:)),max(ppt_aa_seas_obs(0,:))/))+.005 + end if + xyres@gsnRightString = decimalPlaces(tttt@slope*nyr(ee),2,True)+ppt_trends_seas@units + xy_djf(ee) = gsn_csm_xy(wks_aa_djf,ispan(syear(ee),eyear(ee),1),ppt_aa_seas(0,:),xyres) + if (isfilepresent2("obs_prect").and.ee.ge.1) then + xy_obs_djf(ee) = gsn_csm_xy(wks_aa_djf,ispan(ppt_aa_seas_obs@syear,ppt_aa_seas_obs@eyear,1),ppt_aa_seas_obs(0,:),xyres2) + overlay(xy_djf(ee),xy_obs_djf(ee)) + end if + + tttt = dtrend_msg(ispan(0,dimsizes(ppt_aa_seas&time)-1,1),ppt_aa_seas(1,:),False,True) + if (isfilepresent2("obs_prect").and.ee.ge.1) then + xyres@trYMinF = min((/min(ppt_aa_seas(1,:)),min(ppt_aa_seas_obs(1,:))/))-.005 + xyres@trYMaxF = max((/max(ppt_aa_seas(1,:)),max(ppt_aa_seas_obs(1,:))/))+.005 + end if + xyres@gsnRightString = decimalPlaces(tttt@slope*nyr(ee),2,True)+ppt_trends_seas@units + xy_mam(ee) = gsn_csm_xy(wks_aa_mam,ispan(syear(ee),eyear(ee),1),ppt_aa_seas(1,:),xyres) + if (isfilepresent2("obs_prect").and.ee.ge.1) then + xy_obs_mam(ee) = gsn_csm_xy(wks_aa_mam,ispan(ppt_aa_seas_obs@syear,ppt_aa_seas_obs@eyear,1),ppt_aa_seas_obs(1,:),xyres2) + overlay(xy_mam(ee),xy_obs_mam(ee)) + end if + + tttt = dtrend_msg(ispan(0,dimsizes(ppt_aa_seas&time)-1,1),ppt_aa_seas(2,:),False,True) + if (isfilepresent2("obs_prect").and.ee.ge.1) then + xyres@trYMinF = min((/min(ppt_aa_seas(2,:)),min(ppt_aa_seas_obs(2,:))/))-.005 + xyres@trYMaxF = max((/max(ppt_aa_seas(2,:)),max(ppt_aa_seas_obs(2,:))/))+.005 + end if + xyres@gsnRightString = decimalPlaces(tttt@slope*nyr(ee),2,True)+ppt_trends_seas@units + xy_jja(ee) = gsn_csm_xy(wks_aa_jja,ispan(syear(ee),eyear(ee),1),ppt_aa_seas(2,:),xyres) + if (isfilepresent2("obs_prect").and.ee.ge.1) then + xy_obs_jja(ee) = gsn_csm_xy(wks_aa_jja,ispan(ppt_aa_seas_obs@syear,ppt_aa_seas_obs@eyear,1),ppt_aa_seas_obs(2,:),xyres2) + overlay(xy_jja(ee),xy_obs_jja(ee)) + end if + + tttt = dtrend_msg(ispan(0,dimsizes(ppt_aa_seas&time)-1,1),ppt_aa_seas(3,:),False,True) + if (isfilepresent2("obs_prect").and.ee.ge.1) then + xyres@trYMinF = min((/min(ppt_aa_seas(3,:)),min(ppt_aa_seas_obs(3,:))/))-.005 + xyres@trYMaxF = max((/max(ppt_aa_seas(3,:)),max(ppt_aa_seas_obs(3,:))/))+.005 + end if + xyres@gsnRightString = decimalPlaces(tttt@slope*nyr(ee),2,True)+ppt_trends_seas@units + xy_son(ee) = gsn_csm_xy(wks_aa_son,ispan(syear(ee),eyear(ee),1),ppt_aa_seas(3,:),xyres) + if (isfilepresent2("obs_prect").and.ee.ge.1) then + xy_obs_son(ee) = gsn_csm_xy(wks_aa_son,ispan(ppt_aa_seas_obs@syear,ppt_aa_seas_obs@eyear,1),ppt_aa_seas_obs(3,:),xyres2) + overlay(xy_son(ee),xy_obs_son(ee)) + end if + delete(tttt) + + tttt = dtrend_msg(ispan(0,dimsizes(ppt_aa_ann&time)-1,1),ppt_aa_ann,False,True) + if (isfilepresent2("obs_prect").and.ee.ge.1) then + xyres@trYMinF = min((/min(ppt_aa_ann),min(ppt_aa_ann_obs)/))-.005 + xyres@trYMaxF = max((/max(ppt_aa_ann),max(ppt_aa_ann_obs)/))+.005 + end if + xyres@gsnRightString = decimalPlaces(tttt@slope*nyr(ee),2,True)+ppt_trends_ann@units + xy_ann(ee) = gsn_csm_xy(wks_aa_ann,ispan(syear(ee),eyear(ee),1),ppt_aa_ann,xyres) + if (isfilepresent2("obs_prect").and.ee.ge.1) then + xy_obs_ann(ee) = gsn_csm_xy(wks_aa_ann,ispan(ppt_aa_seas_obs@syear,ppt_aa_seas_obs@eyear,1),ppt_aa_ann_obs,xyres2) + overlay(xy_ann(ee),xy_obs_ann(ee)) + delete(xyres@trYMinF) + delete(xyres@trYMaxF) + end if + delete(tttt) + + xyres@trXMaxF = eyear(ee)+1.5 + xyres2@trXMaxF = eyear(ee)+1.5 + tttt = dtrend_msg(ispan(0,dimsizes(ppt_aa_mon&time)-1,1),ppt_aa_mon,False,True) + if (isfilepresent2("obs_prect").and.ee.ge.1) then + xyres@trYMinF = min((/min(ppt_aa_mon),min(ppt_aa_mon_obs)/))-.005 + xyres@trYMaxF = max((/max(ppt_aa_mon),max(ppt_aa_mon_obs)/))+.005 + end if + xyres@gsnRightString = decimalPlaces(tttt@slope*dimsizes(ppt_aa_mon&time),2,True)+ppt_trends_mon@units + xy_mon(ee) = gsn_csm_xy(wks_aa_mon,fspan(syear(ee),eyear(ee)+.91667,dimsizes(ppt_aa_mon)),ppt_aa_mon,xyres) + if (isfilepresent2("obs_prect").and.ee.ge.1) then + xy_obs_mon(ee) = gsn_csm_xy(wks_aa_mon,fspan(ppt_aa_seas_obs@syear,ppt_aa_seas_obs@eyear+.91667,dimsizes(ppt_aa_mon_obs)),ppt_aa_mon_obs,xyres2) + overlay(xy_mon(ee),xy_obs_mon(ee)) + end if + + delete([/ppt_trends_seas,ppt_trends_ann,ppt_trends_mon/]) + delete([/ppt_aa_seas,ppt_aa_mon,ppt_aa_ann,xyres,xyres2,res,tttt/]) + end do + if (isfilepresent2("obs_prect")) then + delete([/ppt_aa_seas_obs,ppt_aa_mon_obs,ppt_aa_ann_obs/]) + end if + panres = True + panres@gsnMaximize = True + panres@gsnPaperOrientation = "portrait" + panres@gsnPanelLabelBar = True + panres@gsnPanelYWhiteSpacePercent = 3.0 + panres@pmLabelBarHeightF = 0.05 + panres@pmLabelBarWidthF = 0.65 + panres@lbTitleOn = False + panres@lbBoxLineColor = "gray70" + panres@lbLabelFontHeightF = 0.013 + if (nsim.le.4) then + if (nsim.eq.1) then + panres@txFontHeightF = 0.022 + panres@gsnPanelBottom = 0.50 + else + panres@txFontHeightF = 0.0145 + panres@gsnPanelBottom = 0.50 + end if + else + panres@txFontHeightF = 0.016 + panres@gsnPanelBottom = 0.05 + end if + panres@lbLabelStride = 1 + + + panres@txString = "PPT Trends (DJF)" + ncol = floattointeger(sqrt(nsim)) + nrow = (nsim/ncol)+mod(nsim,ncol) + gsn_panel2(wks_trends_djf,map_djf,(/nrow,ncol/),panres) + delete(wks_trends_djf) + + panres@txString = "PPT Trends (MAM)" + gsn_panel2(wks_trends_mam,map_mam,(/nrow,ncol/),panres) + delete(wks_trends_mam) + + panres@txString = "PPT Trends (JJA)" + gsn_panel2(wks_trends_jja,map_jja,(/nrow,ncol/),panres) + delete(wks_trends_jja) + + panres@txString = "PPT Trends (SON)" + gsn_panel2(wks_trends_son,map_son,(/nrow,ncol/),panres) + delete(wks_trends_son) + + panres@txString = "PPT Trends (Annual)" + gsn_panel2(wks_trends_ann,map_ann,(/nrow,ncol/),panres) + delete(wks_trends_ann) + + panres@txString = "PPT Trends (Monthly)" + gsn_panel2(wks_trends_mon,map_mon,(/nrow,ncol/),panres) + delete(wks_trends_mon) + + panres2 = True + panres2@gsnMaximize = True + panres2@gsnPaperOrientation = "portrait" + panres2@gsnPanelYWhiteSpacePercent = 3.0 + if (nsim.le.5) then + panres2@txFontHeightF = 0.024 + else + panres2@txFontHeightF = 0.016 + end if + if (SCALE_TIMESERIES.eq."True") then + tt = ind(nyr.eq.nyr_max) + panres2@gsnPanelScalePlotIndex = tt(0) + delete(tt) + end if + if (nsim.le.12) then + lp = (/nsim,1/) + else + lp = (/nrow,ncol/) ;(/nsim/2+1,nsim/8+1/) + end if + panres2@txString = "PR Global Average (DJF)" + gsn_panel2(wks_aa_djf,xy_djf,lp,panres2) + delete(wks_aa_djf) + + panres2@txString = "PR Global Average (MAM)" + gsn_panel2(wks_aa_mam,xy_mam,lp,panres2) + delete(wks_aa_mam) + + panres2@txString = "PR Global Average (JJA)" + gsn_panel2(wks_aa_jja,xy_jja,lp,panres2) + delete(wks_aa_jja) + + panres2@txString = "PR Global Average (SON)" + gsn_panel2(wks_aa_son,xy_son,lp,panres2) + delete(wks_aa_son) + + panres2@txString = "PR Global Average (Annual)" + gsn_panel2(wks_aa_ann,xy_ann,lp,panres2) + delete(wks_aa_ann) + + panres2@txString = "PR Global Average (Monthly)" + gsn_panel2(wks_aa_mon,xy_mon,lp,panres2) + delete(wks_aa_mon) + + delete([/nrow,ncol,lp,map_djf,map_mam,map_jja,map_son,map_ann,map_mon,xy_djf,xy_mam,xy_jja,xy_son,xy_ann,xy_mon/]) + delete(panres2) + if (isfilepresent2("obs_prect")) then + delete([/xy_obs_djf,xy_obs_mam,xy_obs_jja,xy_obs_son,xy_obs_ann,xy_obs_mon/]) + end if + print("Finished: pr.trends_timeseries.ncl") +end diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/psl.mean_stddev.ncl b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/psl.mean_stddev.ncl new file mode 100644 index 0000000000..aa5645bbd3 --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/psl.mean_stddev.ncl @@ -0,0 +1,382 @@ +; Calculates PSL global means and standard deviations +; +; Variables used: psl +; +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/shea_util.ncl" +load "$CVDP_SCRIPTS/functions.ncl" + +begin + print("Starting: psl.mean_stddev.ncl") + + SCALE_TIMESERIES = getenv("SCALE_TIMESERIES") + OUTPUT_DATA = getenv("OUTPUT_DATA") + PNG_SCALE = tofloat(getenv("PNG_SCALE")) + OPT_CLIMO = getenv("OPT_CLIMO") + CLIMO_SYEAR = toint(getenv("CLIMO_SYEAR")) + CLIMO_EYEAR = toint(getenv("CLIMO_EYEAR")) + OUTPUT_TYPE = getenv("OUTPUT_TYPE") + COLORMAP = getenv("COLORMAP") + + nsim = numAsciiRow("namelist_byvar/namelist_psl") + na = asciiread("namelist_byvar/namelist_psl",(/nsim/),"string") + names = new(nsim,"string") + paths = new(nsim,"string") + syear = new(nsim,"integer",-999) + eyear = new(nsim,"integer",-999) + delim = "|" + + do gg = 0,nsim-1 + names(gg) = str_strip(str_get_field(na(gg),1,delim)) + paths(gg) = str_strip(str_get_field(na(gg),2,delim)) + syear(gg) = stringtointeger(str_strip(str_get_field(na(gg),3,delim))) + eyear(gg) = stringtointeger(str_strip(str_get_field(na(gg),4,delim))) + end do + nyr = eyear-syear+1 + nyr_max = max(nyr) + + wks_type = OUTPUT_TYPE + if (wks_type.eq."png") then + wks_type@wkWidth = 1500*PNG_SCALE + wks_type@wkHeight = 1500*PNG_SCALE + end if + wks_stddev_djf = gsn_open_wks(wks_type,getenv("OUTDIR")+"psl.stddev.djf") + wks_stddev_mam = gsn_open_wks(wks_type,getenv("OUTDIR")+"psl.stddev.mam") + wks_stddev_jja = gsn_open_wks(wks_type,getenv("OUTDIR")+"psl.stddev.jja") + wks_stddev_son = gsn_open_wks(wks_type,getenv("OUTDIR")+"psl.stddev.son") + wks_stddev_ann = gsn_open_wks(wks_type,getenv("OUTDIR")+"psl.stddev.ann") + wks_mean_djf = gsn_open_wks(wks_type,getenv("OUTDIR")+"psl.mean.djf") + wks_mean_mam = gsn_open_wks(wks_type,getenv("OUTDIR")+"psl.mean.mam") + wks_mean_jja = gsn_open_wks(wks_type,getenv("OUTDIR")+"psl.mean.jja") + wks_mean_son = gsn_open_wks(wks_type,getenv("OUTDIR")+"psl.mean.son") + wks_mean_ann = gsn_open_wks(wks_type,getenv("OUTDIR")+"psl.mean.ann") + + + if (COLORMAP.eq.0) then + gsn_define_colormap(wks_stddev_djf,"precip3_16lev") + gsn_define_colormap(wks_stddev_mam,"precip3_16lev") + gsn_define_colormap(wks_stddev_jja,"precip3_16lev") + gsn_define_colormap(wks_stddev_son,"precip3_16lev") + gsn_define_colormap(wks_stddev_ann,"precip3_16lev") + gsn_define_colormap(wks_mean_djf,"ncl_default") + gsn_define_colormap(wks_mean_mam,"ncl_default") + gsn_define_colormap(wks_mean_jja,"ncl_default") + gsn_define_colormap(wks_mean_son,"ncl_default") + gsn_define_colormap(wks_mean_ann,"ncl_default") + end if + if (COLORMAP.eq.1) then + gsn_define_colormap(wks_stddev_djf,"cb_rainbow") + gsn_define_colormap(wks_stddev_mam,"cb_rainbow") + gsn_define_colormap(wks_stddev_jja,"cb_rainbow") + gsn_define_colormap(wks_stddev_son,"cb_rainbow") + gsn_define_colormap(wks_stddev_ann,"cb_rainbow") + gsn_define_colormap(wks_mean_djf,"BlueDarkRed18") + gsn_define_colormap(wks_mean_mam,"BlueDarkRed18") + gsn_define_colormap(wks_mean_jja,"BlueDarkRed18") + gsn_define_colormap(wks_mean_son,"BlueDarkRed18") + gsn_define_colormap(wks_mean_ann,"BlueDarkRed18") + end if + + plot_mean_djf = new(nsim,"graphic") + plot_mean_mam = new(nsim,"graphic") + plot_mean_jja = new(nsim,"graphic") + plot_mean_son = new(nsim,"graphic") + plot_mean_ann = new(nsim,"graphic") + plot_stddev_djf = new(nsim,"graphic") + plot_stddev_mam = new(nsim,"graphic") + plot_stddev_jja = new(nsim,"graphic") + plot_stddev_son = new(nsim,"graphic") + plot_stddev_ann = new(nsim,"graphic") + do ee = 0,nsim-1 + psl = data_read_in(paths(ee),"PSL",syear(ee),eyear(ee)) ; read in data, orient lats/lons correctly, set time coordinate variable up + if (isatt(psl,"is_all_missing")) then + delete(psl) + continue + end if + do ff = 0,1 + pslT = psl + if (ff.eq.1) then + if (OPT_CLIMO.eq."Full") then + pslT = rmMonAnnCycTLL(pslT) + else + check_custom_climo(names(ee),syear(ee),eyear(ee),CLIMO_SYEAR,CLIMO_EYEAR) + temp_arr = pslT + delete(temp_arr&time) + temp_arr&time = cd_calendar(pslT&time,-1) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(temp_arr({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(temp_arr({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + delete(temp_arr) + pslT = calcMonAnomTLL(pslT,climo) + delete(climo) + end if + end if + psl_seas = runave_n_Wrap(pslT,3,0,0) + psl_seas(0,:,:) = (/ dim_avg_n(pslT(:1,:,:),0) /) + psl_seas(dimsizes(pslT&time)-1,:,:) = (/ dim_avg_n(pslT(dimsizes(pslT&time)-2:,:,:),0) /) + psl_ann = runave_n_Wrap(pslT,12,0,0) + delete(pslT) + + if (ff.eq.0) then + psl_mean_djf = dim_avg_n_Wrap(psl_seas(0::12,:,:),0) + psl_mean_mam = dim_avg_n_Wrap(psl_seas(3::12,:,:),0) + psl_mean_jja = dim_avg_n_Wrap(psl_seas(6::12,:,:),0) + psl_mean_son = dim_avg_n_Wrap(psl_seas(9::12,:,:),0) + psl_mean_ann = dim_avg_n_Wrap(psl_ann(5::12,:,:),0) + end if + if (ff.eq.1) then + psl_sd_djf = dim_stddev_n_Wrap(dtrend_msg_n(ispan(0,nyr(ee)-1,1),psl_seas(0::12,:,:),False,False,0),0) + psl_sd_mam = dim_stddev_n_Wrap(dtrend_msg_n(ispan(0,nyr(ee)-1,1),psl_seas(3::12,:,:),False,False,0),0) + psl_sd_jja = dim_stddev_n_Wrap(dtrend_msg_n(ispan(0,nyr(ee)-1,1),psl_seas(6::12,:,:),False,False,0),0) + psl_sd_son = dim_stddev_n_Wrap(dtrend_msg_n(ispan(0,nyr(ee)-1,1),psl_seas(9::12,:,:),False,False,0),0) + psl_sd_ann = dim_stddev_n_Wrap(dtrend_msg_n(ispan(0,nyr(ee)-1,1),psl_ann(5::12,:,:),False,False,0),0) + end if + delete([/psl_seas,psl_ann/]) + end do + delete(psl) + copy_VarMeta(psl_mean_djf,psl_sd_djf) + copy_VarMeta(psl_mean_mam,psl_sd_mam) + copy_VarMeta(psl_mean_jja,psl_sd_jja) + copy_VarMeta(psl_mean_son,psl_sd_son) + copy_VarMeta(psl_mean_ann,psl_sd_ann) + + if (OUTPUT_DATA.eq."True") then + modname = str_sub_str(names(ee)," ","_") + bc = (/"/","'","(",")"/) + do gg = 0,dimsizes(bc)-1 + modname = str_sub_str(modname,bc(gg),"_") + end do + fn = getenv("OUTDIR")+modname+".cvdp_data.psl.mean_stddev."+syear(ee)+"-"+eyear(ee)+".nc" + if (.not.isfilepresent2(fn)) then + z = addfile(fn,"c") + z@source = "NCAR Climate Analysis Section's Climate Variability Diagnostics Package v"+getenv("VERSION") + z@notes = "Data from "+names(ee)+" from "+syear(ee)+"-"+eyear(ee) + if (OPT_CLIMO.eq."Full") then + z@climatology = syear(ee)+"-"+eyear(ee)+" climatology removed prior to all calculations (other than means)" + else + if (CLIMO_SYEAR.lt.0) then + z@climatology = (eyear(ee)+CLIMO_SYEAR)+"-"+(eyear(ee)+CLIMO_EYEAR)+" climatology removed prior to all calculations (other than means)" + else + z@climatology = CLIMO_SYEAR+"-"+CLIMO_EYEAR+" climatology removed prior to all calculations (other than means)" + end if + end if + z@Conventions = "CF-1.6" + else + z = addfile(fn,"w") + end if + + z->psl_spatialmean_djf = set_varAtts(psl_mean_djf,"psl mean (DJF)","","") + z->psl_spatialmean_mam = set_varAtts(psl_mean_djf,"psl mean (MAM)","","") + z->psl_spatialmean_jja = set_varAtts(psl_mean_djf,"psl mean (JJA)","","") + z->psl_spatialmean_son = set_varAtts(psl_mean_djf,"psl mean (SON)","","") + z->psl_spatialmean_ann = set_varAtts(psl_mean_djf,"psl mean (annual)","","") + + z->psl_spatialstddev_djf = set_varAtts(psl_sd_djf,"psl standard deviation (DJF)","","") + z->psl_spatialstddev_mam = set_varAtts(psl_sd_mam,"psl standard deviation (MAM)","","") + z->psl_spatialstddev_jja = set_varAtts(psl_sd_jja,"psl standard deviation (JJA)","","") + z->psl_spatialstddev_son = set_varAtts(psl_sd_son,"psl standard deviation (SON)","","") + z->psl_spatialstddev_ann = set_varAtts(psl_sd_ann,"psl standard deviation (annual)","","") + delete(z) + delete([/modname,fn/]) + end if +;========================================================================================== + res = True + res@mpProjection = "WinkelTripel" + res@mpGeophysicalLineColor = "gray42" + res@mpPerimOn = False + res@mpGridLatSpacingF = 90 ; change latitude line spacing + res@mpGridLonSpacingF = 180. ; change longitude line spacing + res@mpGridLineColor = "transparent" ; trick ncl into drawing perimeter + res@mpGridAndLimbOn = True ; turn on lat/lon lines + res@mpFillOn = False + res@mpCenterLonF = 210. + res@mpOutlineOn = True + if (wks_type.eq."png") then + res@mpGeophysicalLineThicknessF = 2. + else + res@mpGeophysicalLineThicknessF = 1. + end if + res@gsnDraw = False + res@gsnFrame = False + + res@cnLineLabelsOn = False + res@cnFillOn = True + res@cnLinesOn = False + res@lbLabelBarOn = False + + res@cnLevelSelectionMode = "ExplicitLevels" + + + res@gsnLeftStringOrthogonalPosF = -0.05 + res@gsnLeftStringParallelPosF = .005 + res@gsnRightStringOrthogonalPosF = -0.05 + res@gsnRightStringParallelPosF = 0.96 + res@gsnRightString = "" + res@gsnLeftString = "" + res@gsnLeftStringFontHeightF = 0.014 + res@gsnCenterStringFontHeightF = 0.018 + res@gsnRightStringFontHeightF = 0.014 + + sres = res + + res@cnLevels = fspan(.4,6.0,8) + if (COLORMAP.eq.0) then + res@cnFillColors = (/2,4,6,8,10,12,14,16,18/) + sres@cnLevels = ispan(972,1044,4) + end if + if (COLORMAP.eq.1) then + res@cnFillColors = (/35,47,63,79,95,111,124,155,175/) + sres@cnLevels = ispan(980,1036,4) + end if + + + + if (isfilepresent2("obs_psl").and.ee.eq.0) then ; for pattern correlation table + patcor = new((/nsim,dimsizes(psl_sd_ann&lat),dimsizes(psl_sd_ann&lon)/),typeof(psl_sd_ann)) + patcor!1 = "lat" + patcor&lat = psl_sd_ann&lat + patcor!2 = "lon" + patcor&lon = psl_sd_ann&lon + patcor(ee,:,:) = (/ psl_sd_ann /) + end if + if (isfilepresent2("obs_psl").and.ee.ge.1.and.isvar("patcor")) then + patcor(ee,:,:) = (/ totype(linint2(psl_sd_ann&lon,psl_sd_ann&lat,psl_sd_ann,True,patcor&lon,patcor&lat,0),typeof(patcor)) /) + end if + + res@gsnLeftString = syear(ee)+"-"+eyear(ee) + res@gsnRightString = psl_mean_djf@units + res@gsnCenterString = names(ee) + plot_stddev_djf(ee) = gsn_csm_contour_map(wks_stddev_djf,psl_sd_djf,res) + plot_stddev_mam(ee) = gsn_csm_contour_map(wks_stddev_mam,psl_sd_mam,res) + plot_stddev_jja(ee) = gsn_csm_contour_map(wks_stddev_jja,psl_sd_jja,res) + plot_stddev_son(ee) = gsn_csm_contour_map(wks_stddev_son,psl_sd_son,res) + plot_stddev_ann(ee) = gsn_csm_contour_map(wks_stddev_ann,psl_sd_ann,res) + + sres@gsnLeftString = syear(ee)+"-"+eyear(ee) + sres@gsnRightString = psl_mean_djf@units + sres@gsnCenterString = names(ee) + plot_mean_djf(ee) = gsn_csm_contour_map(wks_mean_djf,psl_mean_djf,sres) + plot_mean_mam(ee) = gsn_csm_contour_map(wks_mean_mam,psl_mean_mam,sres) + plot_mean_jja(ee) = gsn_csm_contour_map(wks_mean_jja,psl_mean_jja,sres) + plot_mean_son(ee) = gsn_csm_contour_map(wks_mean_son,psl_mean_son,sres) + plot_mean_ann(ee) = gsn_csm_contour_map(wks_mean_ann,psl_mean_ann,sres) + delete([/psl_sd_djf,psl_sd_mam,psl_sd_jja,psl_sd_son,psl_sd_ann,psl_mean_djf,psl_mean_mam,psl_mean_jja,psl_mean_son,psl_mean_ann,res,sres/]) + end do + + if (isvar("patcor")) then ; for pattern correlation table + clat = cos(0.01745329*patcor&lat) + finpr = "psl Std Dev (Ann) " ; Must be 18 characters long + line3 = " " ; Must be 18 characters long + line4 = line3 + header = (/"","Pattern Correlations/RMS Differences Observations vs. Model(s)",""/) + do hh = 1,nsim-1 + dimY = dimsizes(tochar(names(hh))) + nchar = dimY + nchar = where(nchar.le.10,10,nchar) + if (dimY.lt.10) then + ntb = "" + do ii = 0,10-dimY-1 + ntb = ntb+" " + end do + ntb = ntb+names(hh) + else + ntb = names(hh) + end if + + ntc = "" + do ii = 0,nchar-1 + ntc = ntc+"-" + end do + format2 = "%"+(nchar-5+1)+".2f" + format3 = "%4.2f" + line3 = line3+" "+ntb + line4 = line4+" "+ntc + if (all(ismissing(patcor(hh,:,:)))) then + finpr = finpr+sprintf(format2,9.99)+"/"+sprintf(format3,9.99) + else + finpr = finpr+sprintf(format2,(pattern_cor(patcor(0,:,:),patcor(hh,:,:),clat,0)))+"/"+sprintf(format3,(dim_rmsd(ndtooned(NewCosWeight(patcor(0,:,:))),ndtooned(NewCosWeight(patcor(hh,:,:)))))) + end if + end do + if (dimsizes(tochar(line4)).ge.8190) then ; system or fortran compiler limit + print("Metrics table warning: Not creating metrics table as size of comparison results in a invalid ascii row size.") + else + write_table(getenv("OUTDIR")+"metrics.psl.mean_stddev.txt","w",[/header/],"%s") + write_table(getenv("OUTDIR")+"metrics.psl.mean_stddev.txt","a",[/line3/],"%s") + write_table(getenv("OUTDIR")+"metrics.psl.mean_stddev.txt","a",[/line4/],"%s") + write_table(getenv("OUTDIR")+"metrics.psl.mean_stddev.txt","a",[/finpr/],"%s") + end if + delete([/finpr,line3,line4,format2,format3,nchar,ntc,clat,patcor,dimY,ntb,header/]) + end if + + panres = True + panres@gsnMaximize = True + panres@gsnPaperOrientation = "portrait" + panres@gsnPanelLabelBar = True + panres@gsnPanelYWhiteSpacePercent = 3.0 + panres@pmLabelBarHeightF = 0.05 + panres@pmLabelBarWidthF = 0.65 + panres@lbTitleOn = False + panres@lbBoxLineColor = "gray70" + if (nsim.le.4) then + if (nsim.eq.1) then + panres@txFontHeightF = 0.022 + panres@gsnPanelBottom = 0.50 + else + panres@txFontHeightF = 0.0145 + panres@gsnPanelBottom = 0.50 + end if + else + panres@txFontHeightF = 0.016 + panres@gsnPanelBottom = 0.05 + end if + panres@lbLabelFontHeightF = 0.013 + panres@lbLabelStride = 1 + ncol = floattointeger(sqrt(nsim)) + nrow = (nsim/ncol)+mod(nsim,ncol) + + panres@txString = "PSL Standard Deviations (DJF)" + gsn_panel2(wks_stddev_djf,plot_stddev_djf,(/nrow,ncol/),panres) + delete(wks_stddev_djf) + + panres@txString = "PSL Standard Deviations (MAM)" + gsn_panel2(wks_stddev_mam,plot_stddev_mam,(/nrow,ncol/),panres) + delete(wks_stddev_mam) + + panres@txString = "PSL Standard Deviations (JJA)" + gsn_panel2(wks_stddev_jja,plot_stddev_jja,(/nrow,ncol/),panres) + delete(wks_stddev_jja) + + panres@txString = "PSL Standard Deviations (SON)" + gsn_panel2(wks_stddev_son,plot_stddev_son,(/nrow,ncol/),panres) + delete(wks_stddev_son) + + panres@txString = "PSL Standard Deviations (Annual)" + gsn_panel2(wks_stddev_ann,plot_stddev_ann,(/nrow,ncol/),panres) + delete(wks_stddev_ann) + + panres@txString = "PSL Means (DJF)" + gsn_panel2(wks_mean_djf,plot_mean_djf,(/nrow,ncol/),panres) + delete(wks_mean_djf) + + panres@txString = "PSL Means (MAM)" + gsn_panel2(wks_mean_mam,plot_mean_mam,(/nrow,ncol/),panres) + delete(wks_mean_mam) + + panres@txString = "PSL Means (JJA)" + gsn_panel2(wks_mean_jja,plot_mean_jja,(/nrow,ncol/),panres) + delete(wks_mean_jja) + + panres@txString = "PSL Means (SON)" + gsn_panel2(wks_mean_son,plot_mean_son,(/nrow,ncol/),panres) + delete(wks_mean_son) + + panres@txString = "PSL Means (Annual)" + gsn_panel2(wks_mean_ann,plot_mean_ann,(/nrow,ncol/),panres) + delete(wks_mean_ann) + delete(panres) + print("Finished: psl.mean_stddev.ncl") +end diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/psl.modes_indices.ncl b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/psl.modes_indices.ncl new file mode 100644 index 0000000000..0a72ae82a8 --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/psl.modes_indices.ncl @@ -0,0 +1,3000 @@ +; Calculates the coupled and atmospheric modes of variability (patterns and timeseries), +; as well as regressions of those PC timeseries onto the land-masked TS field. +; +; Variables used: PSL and TS +; +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/shea_util.ncl" +load "$CVDP_SCRIPTS/functions.ncl" + +begin + print("Starting: psl.modes_indices.ncl") + + SCALE_TIMESERIES = getenv("SCALE_TIMESERIES") + OUTPUT_DATA = getenv("OUTPUT_DATA") + PNG_SCALE = tofloat(getenv("PNG_SCALE")) + OPT_CLIMO = getenv("OPT_CLIMO") + CLIMO_SYEAR = toint(getenv("CLIMO_SYEAR")) + CLIMO_EYEAR = toint(getenv("CLIMO_EYEAR")) + OUTPUT_TYPE = getenv("OUTPUT_TYPE") + COMPUTE_MODES_MON = getenv("COMPUTE_MODES_MON") + COLORMAP = getenv("COLORMAP") + + nsim = numAsciiRow("namelist_byvar/namelist_psl") + na = asciiread("namelist_byvar/namelist_psl",(/nsim/),"string") + names = new(nsim,"string") + paths = new(nsim,"string") + syear = new(nsim,"integer",-999) + eyear = new(nsim,"integer",-999) + delim = "|" + + do gg = 0,nsim-1 + names(gg) = str_strip(str_get_field(na(gg),1,delim)) + paths(gg) = str_strip(str_get_field(na(gg),2,delim)) + syear(gg) = stringtointeger(str_strip(str_get_field(na(gg),3,delim))) + eyear(gg) = stringtointeger(str_strip(str_get_field(na(gg),4,delim))) + end do + nyr = eyear-syear+1 + nyr_max = max(nyr) + + pi=4.*atan(1.0) + rad=(pi/180.) + +;---------SST Regressions coding------------------------------------------------- + nsim_ts = numAsciiRow("namelist_byvar/namelist_ts") + na_ts = asciiread("namelist_byvar/namelist_ts",(/nsim_ts/),"string") + names_ts = new(nsim_ts,"string") + paths_ts = new(nsim_ts,"string") + syear_ts = new(nsim_ts,"integer",-999) + eyear_ts = new(nsim_ts,"integer",-999) + + do gg = 0,nsim_ts-1 + names_ts(gg) = str_strip(str_get_field(na_ts(gg),1,delim)) + paths_ts(gg) = str_strip(str_get_field(na_ts(gg),2,delim)) + syear_ts(gg) = stringtointeger(str_strip(str_get_field(na_ts(gg),3,delim))) + eyear_ts(gg) = stringtointeger(str_strip(str_get_field(na_ts(gg),4,delim))) + end do + delete(na_ts) + nyr_ts = eyear_ts-syear_ts+1 + + if (isfilepresent2("obs_psl")) then + if (nsim.eq.nsim_ts) then + ; print("Total number of simulations/obs are equal between psl/ts namelists") + sstreg_obs = 0 + else + print("Total number of simulations/obs between psl/ts do not match, skipping observational SST regressions in psl.modes.indices.ncl.") + sstreg_obs = 1 + ; delete([/nsim_ts,na_ts,names_ts,paths_ts,syear_ts,eyear_ts,nyr_ts/]) + end if + else + sstreg_obs = 1 + end if + + if (sstreg_obs.eq.0) then ; further check for obs + if (syear(0).eq.syear_ts(0)) then + if (eyear(0).eq.eyear_ts(0)) then +; print("Obs years match") + sstreg_obs = 0 + else + print("Obs end years do not match, skipping observed SST regressions in psl.modes.indices.ncl.") + sstreg_obs = 1 + end if + else + print("Obs start years do not match, skipping observed SST regressions in psl.modes.indices.ncl.") + sstreg_obs = 1 + end if + end if +;------------------------------------------------------------------------------------------------- + + wks_type = OUTPUT_TYPE + if (wks_type.eq."png") then + wks_type@wkWidth = 1500*PNG_SCALE + wks_type@wkHeight = 1500*PNG_SCALE + end if + wks_nam = gsn_open_wks(wks_type,getenv("OUTDIR")+"nam") + wks_nam_ts = gsn_open_wks(wks_type,getenv("OUTDIR")+"nam.timeseries") + + wks_sam = gsn_open_wks(wks_type,getenv("OUTDIR")+"sam") + wks_sam_ts = gsn_open_wks(wks_type,getenv("OUTDIR")+"sam.timeseries") + + wks_nao = gsn_open_wks(wks_type,getenv("OUTDIR")+"nao") + wks_nao_ts = gsn_open_wks(wks_type,getenv("OUTDIR")+"nao.timeseries") + + wks_pna = gsn_open_wks(wks_type,getenv("OUTDIR")+"pna") + wks_pna_ts = gsn_open_wks(wks_type,getenv("OUTDIR")+"pna.timeseries") + + wks_npo = gsn_open_wks(wks_type,getenv("OUTDIR")+"npo") + wks_npo_ts = gsn_open_wks(wks_type,getenv("OUTDIR")+"npo.timeseries") + + wks_psa1 = gsn_open_wks(wks_type,getenv("OUTDIR")+"psa1") + wks_psa1_ts = gsn_open_wks(wks_type,getenv("OUTDIR")+"psa1.timeseries") + + wks_psa2 = gsn_open_wks(wks_type,getenv("OUTDIR")+"psa2") + wks_psa2_ts = gsn_open_wks(wks_type,getenv("OUTDIR")+"psa2.timeseries") + +; Note: Currently near maximum number of open workstations: using 14 out of 15. + + if (COLORMAP.eq.0) then + gsn_define_colormap(wks_nam,"ncl_default") + gsn_define_colormap(wks_nam_ts,"ncl_default") + gsn_define_colormap(wks_sam,"ncl_default") + gsn_define_colormap(wks_sam_ts,"ncl_default") + gsn_define_colormap(wks_nao,"ncl_default") + gsn_define_colormap(wks_nao_ts,"ncl_default") + gsn_define_colormap(wks_pna,"ncl_default") + gsn_define_colormap(wks_pna_ts,"ncl_default") + gsn_define_colormap(wks_npo,"ncl_default") + gsn_define_colormap(wks_npo_ts,"ncl_default") + gsn_define_colormap(wks_psa1,"ncl_default") + gsn_define_colormap(wks_psa1_ts,"ncl_default") + gsn_define_colormap(wks_psa2,"ncl_default") + gsn_define_colormap(wks_psa2_ts,"ncl_default") + end if + if (COLORMAP.eq.1) then + gsn_define_colormap(wks_nam,"BlueDarkRed18") + gsn_define_colormap(wks_nam_ts,"ncl_default") + gsn_define_colormap(wks_sam,"BlueDarkRed18") + gsn_define_colormap(wks_sam_ts,"ncl_default") + gsn_define_colormap(wks_nao,"BlueDarkRed18") + gsn_define_colormap(wks_nao_ts,"ncl_default") + gsn_define_colormap(wks_pna,"BlueDarkRed18") + gsn_define_colormap(wks_pna_ts,"ncl_default") + gsn_define_colormap(wks_npo,"BlueDarkRed18") + gsn_define_colormap(wks_npo_ts,"ncl_default") + gsn_define_colormap(wks_psa1,"BlueDarkRed18") + gsn_define_colormap(wks_psa1_ts,"ncl_default") + gsn_define_colormap(wks_psa2,"BlueDarkRed18") + gsn_define_colormap(wks_psa2_ts,"ncl_default") + end if + + + + map_nam_djf = new(nsim,"graphic") + map_nam_mam = new(nsim,"graphic") + map_nam_jja = new(nsim,"graphic") + map_nam_son = new(nsim,"graphic") + map_nam_ann = new(nsim,"graphic") + map_nam_mon = new(nsim,"graphic") + xy_nam_djf = new(nsim,"graphic") + xy_nam_mam = new(nsim,"graphic") + xy_nam_jja = new(nsim,"graphic") + xy_nam_son = new(nsim,"graphic") + xy_nam_ann = new(nsim,"graphic") + xy_nam_mon = new(nsim,"graphic") + reg_nam_djf = new(nsim,"graphic") + reg_nam_mam = new(nsim,"graphic") + reg_nam_jja = new(nsim,"graphic") + reg_nam_son = new(nsim,"graphic") + reg_nam_ann = new(nsim,"graphic") + reg_nam_mon = new(nsim,"graphic") + + map_sam_djf = new(nsim,"graphic") + map_sam_mam = new(nsim,"graphic") + map_sam_jja = new(nsim,"graphic") + map_sam_son = new(nsim,"graphic") + map_sam_ann = new(nsim,"graphic") + map_sam_mon = new(nsim,"graphic") + xy_sam_djf = new(nsim,"graphic") + xy_sam_mam = new(nsim,"graphic") + xy_sam_jja = new(nsim,"graphic") + xy_sam_son = new(nsim,"graphic") + xy_sam_ann = new(nsim,"graphic") + xy_sam_mon = new(nsim,"graphic") + reg_sam_djf = new(nsim,"graphic") + reg_sam_mam = new(nsim,"graphic") + reg_sam_jja = new(nsim,"graphic") + reg_sam_son = new(nsim,"graphic") + reg_sam_ann = new(nsim,"graphic") + reg_sam_mon = new(nsim,"graphic") + + map_nao_djf = new(nsim,"graphic") + map_nao_mam = new(nsim,"graphic") + map_nao_jja = new(nsim,"graphic") + map_nao_son = new(nsim,"graphic") + map_nao_ann = new(nsim,"graphic") + map_nao_mon = new(nsim,"graphic") + xy_nao_djf = new(nsim,"graphic") + xy_nao_mam = new(nsim,"graphic") + xy_nao_jja = new(nsim,"graphic") + xy_nao_son = new(nsim,"graphic") + xy_nao_ann = new(nsim,"graphic") + xy_nao_mon = new(nsim,"graphic") + reg_nao_djf = new(nsim,"graphic") + reg_nao_mam = new(nsim,"graphic") + reg_nao_jja = new(nsim,"graphic") + reg_nao_son = new(nsim,"graphic") + reg_nao_ann = new(nsim,"graphic") + reg_nao_mon = new(nsim,"graphic") + + map_pna_djf = new(nsim,"graphic") + map_pna_mam = new(nsim,"graphic") + map_pna_jja = new(nsim,"graphic") + map_pna_son = new(nsim,"graphic") + map_pna_ann = new(nsim,"graphic") + map_pna_mon = new(nsim,"graphic") + xy_pna_djf = new(nsim,"graphic") + xy_pna_mam = new(nsim,"graphic") + xy_pna_jja = new(nsim,"graphic") + xy_pna_son = new(nsim,"graphic") + xy_pna_ann = new(nsim,"graphic") + xy_pna_mon = new(nsim,"graphic") + reg_pna_djf = new(nsim,"graphic") + reg_pna_mam = new(nsim,"graphic") + reg_pna_jja = new(nsim,"graphic") + reg_pna_son = new(nsim,"graphic") + reg_pna_ann = new(nsim,"graphic") + reg_pna_mon = new(nsim,"graphic") + + map_npo_djf = new(nsim,"graphic") + map_npo_mam = new(nsim,"graphic") + map_npo_jja = new(nsim,"graphic") + map_npo_son = new(nsim,"graphic") + map_npo_ann = new(nsim,"graphic") + map_npo_mon = new(nsim,"graphic") + xy_npo_djf = new(nsim,"graphic") + xy_npo_mam = new(nsim,"graphic") + xy_npo_jja = new(nsim,"graphic") + xy_npo_son = new(nsim,"graphic") + xy_npo_ann = new(nsim,"graphic") + xy_npo_mon = new(nsim,"graphic") + reg_npo_djf = new(nsim,"graphic") + reg_npo_mam = new(nsim,"graphic") + reg_npo_jja = new(nsim,"graphic") + reg_npo_son = new(nsim,"graphic") + reg_npo_ann = new(nsim,"graphic") + reg_npo_mon = new(nsim,"graphic") + + map_psa1_djf = new(nsim,"graphic") + map_psa1_mam = new(nsim,"graphic") + map_psa1_jja = new(nsim,"graphic") + map_psa1_son = new(nsim,"graphic") + map_psa1_ann = new(nsim,"graphic") + map_psa1_mon = new(nsim,"graphic") + xy_psa1_djf = new(nsim,"graphic") + xy_psa1_mam = new(nsim,"graphic") + xy_psa1_jja = new(nsim,"graphic") + xy_psa1_son = new(nsim,"graphic") + xy_psa1_ann = new(nsim,"graphic") + xy_psa1_mon = new(nsim,"graphic") + reg_psa1_djf = new(nsim,"graphic") + reg_psa1_mam = new(nsim,"graphic") + reg_psa1_jja = new(nsim,"graphic") + reg_psa1_son = new(nsim,"graphic") + reg_psa1_ann = new(nsim,"graphic") + reg_psa1_mon = new(nsim,"graphic") + + map_psa2_djf = new(nsim,"graphic") + map_psa2_mam = new(nsim,"graphic") + map_psa2_jja = new(nsim,"graphic") + map_psa2_son = new(nsim,"graphic") + map_psa2_ann = new(nsim,"graphic") + map_psa2_mon = new(nsim,"graphic") + xy_psa2_djf = new(nsim,"graphic") + xy_psa2_mam = new(nsim,"graphic") + xy_psa2_jja = new(nsim,"graphic") + xy_psa2_son = new(nsim,"graphic") + xy_psa2_ann = new(nsim,"graphic") + xy_psa2_mon = new(nsim,"graphic") + reg_psa2_djf = new(nsim,"graphic") + reg_psa2_mam = new(nsim,"graphic") + reg_psa2_jja = new(nsim,"graphic") + reg_psa2_son = new(nsim,"graphic") + reg_psa2_ann = new(nsim,"graphic") + reg_psa2_mon = new(nsim,"graphic") + + xy_npi = new(nsim,"graphic") + sstreg_frame = 1 ; sstreg_frame = flag to create regressions .ps/.png files. Created/used instead of sstreg_plot_flag + ; so that if sst regressions are not created for the last simulation listed that .ps/png files are created + do ee = 0,nsim-1 +; print(paths(ee)+" "+syear(ee)+" "+eyear(ee)) + arr = data_read_in(paths(ee),"PSL",syear(ee),eyear(ee)) ; read in data, orient lats/lons correctly, set time coordinate variable up + if (isatt(arr,"is_all_missing")) then + delete(arr) + continue + end if + + if (OPT_CLIMO.eq."Full") then + arr = rmMonAnnCycTLL(arr) + else + check_custom_climo(names(ee),syear(ee),eyear(ee),CLIMO_SYEAR,CLIMO_EYEAR) + temp_arr = arr + delete(temp_arr&time) + temp_arr&time = cd_calendar(arr&time,-1) + climo = clmMonTLL(temp_arr({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + delete(temp_arr) + arr = calcMonAnomTLL(arr,climo) + delete(climo) + end if + + arrT = runave_n_Wrap(arr,3,0,0) ; form DJF averages + arrT(0,:,:) = (/ dim_avg_n(arr(:1,:,:),0) /) + arr_djf = arrT(0::12,:,:) + arr_mam = arrT(3::12,:,:) + arr_jja = arrT(6::12,:,:) ; form JJA averages + arr_son = arrT(9::12,:,:) + delete(arrT) + + arrU = runave_n_Wrap(arr,5,0,0) + arrU(0,:,:) = (/ dim_avg_n(arr(:2,:,:),0) /) + arr_ndjfm = arrU(0::12,:,:) + delete(arrU) + + arrV = runave_n_Wrap(arr,12,0,0) + arr_ann = arrV(5::12,:,:) + delete(arrV) +; +; arr_djf = (/ dtrend_msg_n(ispan(0,dimsizes(arr_djf&time)-1,1),arr_djf,True,False,0) /) +; arr_mam = (/ dtrend_msg_n(ispan(0,dimsizes(arr_mam&time)-1,1),arr_mam,True,False,0) /) +; arr_jja = (/ dtrend_msg_n(ispan(0,dimsizes(arr_jja&time)-1,1),arr_jja,True,False,0) /) +; arr_son = (/ dtrend_msg_n(ispan(0,dimsizes(arr_son&time)-1,1),arr_son,True,False,0) /) +; +; arr_ann = (/ dtrend_msg_n(ispan(0,dimsizes(arr_ann&time)-1,1),arr_ann,True,False,0) /) +; +; arr_ndjfm = (/ dtrend_msg_n(ispan(0,dimsizes(arr_ndjfm&time)-1,1),arr_ndjfm,True,False,0) /) +; +; arr = (/ dtrend_msg_n(ispan(0,dimsizes(arr&time)-1,1),arr,True,False,0) /) +;---------SST Regressions coding------------------------------------------------- + sstreg_plot_flag = 0 + if (ee.eq.0.and.sstreg_obs.eq.1) then ; first time thru, and obs = False + sstreg_plot_flag = 1 ; do not plot + if (.not.isfilepresent2("obs_psl")) then ; however, if there are no PSL obs whatsoever, the 1st iteration of the + sstreg_plot_flag = 0 ; ee do loop will be the first model, so turn it on. + end if + end if + + if (sstreg_plot_flag.eq.0) then + ts_offset = 0 ; offsets necessary if OBS is present for one variable, but not another. + if (ee.eq.0.and.sstreg_obs.eq.0) then + else + if (isfilepresent2("obs_psl")) then + if (.not.isfilepresent2("obs_ts")) then + ts_offset = -1 + end if + else + if (isfilepresent2("obs_ts")) then + ts_offset = 1 + end if + end if + end if + + ; print("Data to be read in: "+paths_ts(ee+ts_offset)+" from "+syear_ts(ee+ts_offset)+":"+eyear_ts(ee+ts_offset)) + sst = data_read_in(paths_ts(ee+ts_offset),"TS",syear_ts(ee+ts_offset),eyear_ts(ee+ts_offset)) + if (isatt(sst,"is_all_missing")) then + sstreg_plot_flag = 1 + delete(sst) + end if + + if (sstreg_plot_flag.eq.0) then ; only continue if both PSL/TS fields are present + sst = where(sst.le.-1.8,-1.8,sst) + d = addfile("$NCARG_ROOT/lib/ncarg/data/cdf/landsea.nc","r") + basemap = d->LSMASK + lsm = landsea_mask(basemap,sst&lat,sst&lon) + sst = mask(sst,conform(sst,lsm,(/1,2/)).ge.1,False) + delete(lsm) + + if (OPT_CLIMO.eq."Full") then + sst = rmMonAnnCycTLL(sst) + else + check_custom_climo(names_ts(ee+ts_offset),syear_ts(ee+ts_offset),eyear_ts(ee+ts_offset),CLIMO_SYEAR,CLIMO_EYEAR) + temp_arr = sst + delete(temp_arr&time) + temp_arr&time = cd_calendar(sst&time,1) + climo = clmMonTLL(temp_arr({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + delete(temp_arr) + sst = calcMonAnomTLL(sst,climo) + delete(climo) + end if +; sst = (/ dtrend_msg_n(ispan(0,dimsizes(sst&time)-1,1),sst,False,False,0) /) + + sstT = runave_n_Wrap(sst,3,0,0) ; form DJF averages + sstT(0,:,:) = (/ dim_avg_n(sst(:1,:,:),0) /) + sst_djf = sstT(0::12,:,:) + sst_mam = sstT(3::12,:,:) + sst_jja = sstT(6::12,:,:) ; form JJA averages + sst_son = sstT(9::12,:,:) + delete(sstT) + + sstV = runave_n_Wrap(sst,12,0,0) + sst_ann = sstV(5::12,:,:) + delete(sstV) + end if + end if +;----------------NPI calculation----------------------------------- + coswgt=cos(rad*arr_djf&lat) + coswgt!0 = "lat" + coswgt&lat = arr_djf&lat + npi_ndjfm = wgt_areaave(arr_ndjfm(:,{30:65},{160:220}),coswgt({30.:65.}),1.0,0) + npi_ndjfm!0 = "TIME" + npi_ndjfm&TIME = ispan(syear(ee),eyear(ee),1) + npi_ndjfm&TIME@units = "YYYY" + npi_ndjfm&TIME@long_name = "time" + + npi_ndjfm@area = "30:65N, 160:220E" + npi_ndjfm@units = arr_ndjfm@units + npi_ndjfm@long_name = "North Pacific Index" + delete([/coswgt,arr_ndjfm/]) +;---------NAM calculations---------------------------------------------------------- + arr_djf_CW = SqrtCosWeight(arr_djf) + arr_mam_CW = SqrtCosWeight(arr_mam) + arr_jja_CW = SqrtCosWeight(arr_jja) + arr_son_CW = SqrtCosWeight(arr_son) + arr_ann_CW = SqrtCosWeight(arr_ann) + if (COMPUTE_MODES_MON.eq."True") then + arr_mon_CW = SqrtCosWeight(arr) + else + delete(arr) + if (sstreg_plot_flag.eq.0) then + delete(sst) + end if + end if + + evecv = eofunc(arr_djf_CW({lat|20:},lon|:,time|:),1,75) + pcts = eofunc_ts(arr_djf_CW({lat|20:},lon|:,time|:),evecv,False) + nam_pc_djf = dim_standardize(pcts(0,:),0) + nam_djf = arr_djf(0,:,:) + nam_djf = (/ regCoef(nam_pc_djf,arr_djf(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + nam_sst_djf = sst_djf(0,:,:) + nam_sst_djf = (/ regCoef(nam_pc_djf,sst_djf(lat|:,lon|:,time|:)) /) + end if + if (.not.ismissing(nam_djf({85},{5}))) then + if (nam_djf({85},{5}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + nam_djf = nam_djf*-1. + nam_pc_djf = nam_pc_djf*-1. + if (sstreg_plot_flag.eq.0) then + nam_sst_djf = nam_sst_djf*-1. + end if + end if + end if + nam_djf@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0))) + copy_VarCoords(npi_ndjfm,nam_pc_djf) + delete([/evecv,pcts/]) + + evecv = eofunc(arr_mam_CW({lat|20:},lon|:,time|:),1,75) + pcts = eofunc_ts(arr_mam_CW({lat|20:},lon|:,time|:),evecv,False) + nam_pc_mam = dim_standardize(pcts(0,:),0) + nam_mam = arr_mam(0,:,:) + nam_mam = (/ regCoef(nam_pc_mam,arr_mam(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + nam_sst_mam = sst_mam(0,:,:) + nam_sst_mam = (/ regCoef(nam_pc_mam,sst_mam(lat|:,lon|:,time|:)) /) + end if + if (.not.ismissing(nam_mam({85},{5}))) then + if (nam_mam({85},{5}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + nam_mam = nam_mam*-1. + nam_pc_mam = nam_pc_mam*-1. + if (sstreg_plot_flag.eq.0) then + nam_sst_mam = nam_sst_mam*-1. + end if + end if + end if + nam_mam@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0))) + copy_VarCoords(npi_ndjfm,nam_pc_mam) + delete([/evecv,pcts/]) + + evecv = eofunc(arr_jja_CW({lat|20:},lon|:,time|:),1,75) + pcts = eofunc_ts(arr_jja_CW({lat|20:},lon|:,time|:),evecv,False) + nam_pc_jja = dim_standardize(pcts(0,:),0) + nam_jja = arr_jja(0,:,:) + nam_jja = (/ regCoef(nam_pc_jja,arr_jja(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + nam_sst_jja = sst_jja(0,:,:) + nam_sst_jja = (/ regCoef(nam_pc_jja,sst_jja(lat|:,lon|:,time|:)) /) + end if + if (.not.ismissing(nam_jja({85},{5}))) then + if (nam_jja({85},{5}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + nam_jja = nam_jja*-1. + nam_pc_jja = nam_pc_jja*-1. + if (sstreg_plot_flag.eq.0) then + nam_sst_jja = nam_sst_jja*-1. + end if + end if + end if + nam_jja@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0))) + copy_VarCoords(npi_ndjfm,nam_pc_jja) + delete([/evecv,pcts/]) + + evecv = eofunc(arr_son_CW({lat|20:},lon|:,time|:),1,75) + pcts = eofunc_ts(arr_son_CW({lat|20:},lon|:,time|:),evecv,False) + nam_pc_son = dim_standardize(pcts(0,:),0) + nam_son = arr_son(0,:,:) + nam_son = (/ regCoef(nam_pc_son,arr_son(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + nam_sst_son = sst_son(0,:,:) + nam_sst_son = (/ regCoef(nam_pc_son,sst_son(lat|:,lon|:,time|:)) /) + end if + if (.not.ismissing(nam_son({85},{5}))) then + if (nam_son({85},{5}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + nam_son = nam_son*-1. + nam_pc_son = nam_pc_son*-1. + if (sstreg_plot_flag.eq.0) then + nam_sst_son = nam_sst_son*-1. + end if + end if + end if + nam_son@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0))) + copy_VarCoords(npi_ndjfm,nam_pc_son) + delete([/evecv,pcts/]) + + evecv = eofunc(arr_ann_CW({lat|20:},lon|:,time|:),1,75) + pcts = eofunc_ts(arr_ann_CW({lat|20:},lon|:,time|:),evecv,False) + nam_pc_ann = dim_standardize(pcts(0,:),0) + nam_ann = arr_ann(0,:,:) + nam_ann = (/ regCoef(nam_pc_ann,arr_ann(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + nam_sst_ann = sst_ann(0,:,:) + nam_sst_ann = (/ regCoef(nam_pc_ann,sst_ann(lat|:,lon|:,time|:)) /) + end if + if (.not.ismissing(nam_ann({85},{5}))) then + if (nam_ann({85},{5}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + nam_ann = nam_ann*-1. + nam_pc_ann = nam_pc_ann*-1. + if (sstreg_plot_flag.eq.0) then + nam_sst_ann = nam_sst_ann*-1. + end if + end if + end if + nam_ann@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0))) + copy_VarCoords(npi_ndjfm,nam_pc_ann) + delete([/evecv,pcts/]) + + if (COMPUTE_MODES_MON.eq."True") then + evecv = eofunc(arr_mon_CW({lat|20:},lon|:,time|:),1,75) + pcts = eofunc_ts(arr_mon_CW({lat|20:},lon|:,time|:),evecv,False) + nam_pc_mon = dim_standardize(pcts(0,:),0) + nam_mon = arr(0,:,:) + nam_mon = (/ regCoef(nam_pc_mon,arr(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + nam_sst_mon = sst(0,:,:) + nam_sst_mon = (/ regCoef(nam_pc_mon,sst(lat|:,lon|:,time|:)) /) + end if + if (.not.ismissing(nam_mon({85},{5}))) then + if (nam_mon({85},{5}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + nam_mon = nam_mon*-1. + nam_pc_mon = nam_pc_mon*-1. + if (sstreg_plot_flag.eq.0) then + nam_sst_mon = nam_sst_mon*-1. + end if + end if + end if + nam_mon@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0))) + nam_pc_mon!0 = "time" + nam_pc_mon&time = arr&time + delete([/evecv,pcts/]) + end if +;----------SAM/PSA1/PSA2 calculations---------------------------------------------------------------------- + evecv = eofunc(arr_djf_CW({lat|:-20},lon|:,time|:),3,75) + pcts = eofunc_ts(arr_djf_CW({lat|:-20},lon|:,time|:),evecv,False) + sam_pc_djf = dim_standardize(pcts(0,:),0) + psa1_pc_djf = dim_standardize(pcts(1,:),0) + psa2_pc_djf = dim_standardize(pcts(2,:),0) + sam_djf = arr_djf(0,:,:) + sam_djf = (/ regCoef(sam_pc_djf,arr_djf(lat|:,lon|:,time|:)) /) + psa1_djf = arr_djf(0,:,:) + psa1_djf = (/ regCoef(psa1_pc_djf,arr_djf(lat|:,lon|:,time|:)) /) + psa2_djf = arr_djf(0,:,:) + psa2_djf = (/ regCoef(psa2_pc_djf,arr_djf(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + sam_sst_djf = sst_djf(0,:,:) + sam_sst_djf = (/ regCoef(sam_pc_djf,sst_djf(lat|:,lon|:,time|:)) /) + psa1_sst_djf = sst_djf(0,:,:) + psa1_sst_djf = (/ regCoef(psa1_pc_djf,sst_djf(lat|:,lon|:,time|:)) /) + psa2_sst_djf = sst_djf(0,:,:) + psa2_sst_djf = (/ regCoef(psa2_pc_djf,sst_djf(lat|:,lon|:,time|:)) /) + end if + + if (.not.ismissing(sam_djf({-85},{5}))) then + if (sam_djf({-85},{5}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + sam_djf = sam_djf*-1. + sam_pc_djf = sam_pc_djf*-1. + if (sstreg_plot_flag.eq.0) then + sam_sst_djf = sam_sst_djf*-1. + end if + end if + end if + if (.not.ismissing(psa1_djf({-62},{270}))) then + if (psa1_djf({-62},{270}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + psa1_djf = psa1_djf*-1. + psa1_pc_djf = psa1_pc_djf*-1. + if (sstreg_plot_flag.eq.0) then + psa1_sst_djf = psa1_sst_djf*-1. + end if + end if + end if + if (.not.ismissing(psa2_djf({-52},{150}))) then + if (psa2_djf({-52},{150}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + psa2_djf = psa2_djf*-1. + psa2_pc_djf = psa2_pc_djf*-1. + if (sstreg_plot_flag.eq.0) then + psa2_sst_djf = psa2_sst_djf*-1. + end if + end if + end if + sam_djf@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0))) + psa1_djf@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(1))) + psa2_djf@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(2))) + copy_VarCoords(npi_ndjfm,sam_pc_djf) + copy_VarCoords(npi_ndjfm,psa1_pc_djf) + copy_VarCoords(npi_ndjfm,psa2_pc_djf) + delete([/evecv,pcts/]) + + evecv = eofunc(arr_mam_CW({lat|:-20},lon|:,time|:),3,75) + pcts = eofunc_ts(arr_mam_CW({lat|:-20},lon|:,time|:),evecv,False) + sam_pc_mam = dim_standardize(pcts(0,:),0) + psa1_pc_mam = dim_standardize(pcts(1,:),0) + psa2_pc_mam = dim_standardize(pcts(2,:),0) + sam_mam = arr_mam(0,:,:) + sam_mam = (/ regCoef(sam_pc_mam,arr_mam(lat|:,lon|:,time|:)) /) + psa1_mam = arr_mam(0,:,:) + psa1_mam = (/ regCoef(psa1_pc_mam,arr_mam(lat|:,lon|:,time|:)) /) + psa2_mam = arr_mam(0,:,:) + psa2_mam = (/ regCoef(psa2_pc_mam,arr_mam(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + sam_sst_mam = sst_mam(0,:,:) + sam_sst_mam = (/ regCoef(sam_pc_mam,sst_mam(lat|:,lon|:,time|:)) /) + psa1_sst_mam = sst_mam(0,:,:) + psa1_sst_mam = (/ regCoef(psa1_pc_mam,sst_mam(lat|:,lon|:,time|:)) /) + psa2_sst_mam = sst_mam(0,:,:) + psa2_sst_mam = (/ regCoef(psa2_pc_mam,sst_mam(lat|:,lon|:,time|:)) /) + end if + + if (.not.ismissing(sam_mam({-85},{5}))) then + if (sam_mam({-85},{5}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + sam_mam = sam_mam*-1. + sam_pc_mam = sam_pc_mam*-1. + if (sstreg_plot_flag.eq.0) then + sam_sst_mam = sam_sst_mam*-1. + end if + end if + end if + if (.not.ismissing(psa1_mam({-62},{270}))) then + if (psa1_mam({-62},{270}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + psa1_mam = psa1_mam*-1. + psa1_pc_mam = psa1_pc_mam*-1. + if (sstreg_plot_flag.eq.0) then + psa1_sst_mam = psa1_sst_mam*-1. + end if + end if + end if + if (.not.ismissing(psa2_mam({-52},{150}))) then + if (psa2_mam({-52},{150}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + psa2_mam = psa2_mam*-1. + psa2_pc_mam = psa2_pc_mam*-1. + if (sstreg_plot_flag.eq.0) then + psa2_sst_mam = psa2_sst_mam*-1. + end if + end if + end if + sam_mam@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0))) + psa1_mam@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(1))) + psa2_mam@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(2))) + copy_VarCoords(npi_ndjfm,sam_pc_mam) + copy_VarCoords(npi_ndjfm,psa1_pc_mam) + copy_VarCoords(npi_ndjfm,psa2_pc_mam) + delete([/evecv,pcts/]) + + evecv = eofunc(arr_jja_CW({lat|:-20},lon|:,time|:),3,75) + pcts = eofunc_ts(arr_jja_CW({lat|:-20},lon|:,time|:),evecv,False) + sam_pc_jja = dim_standardize(pcts(0,:),0) + psa1_pc_jja = dim_standardize(pcts(1,:),0) + psa2_pc_jja = dim_standardize(pcts(2,:),0) + sam_jja = arr_jja(0,:,:) + sam_jja = (/ regCoef(sam_pc_jja,arr_jja(lat|:,lon|:,time|:)) /) + psa1_jja = arr_jja(0,:,:) + psa1_jja = (/ regCoef(psa1_pc_jja,arr_jja(lat|:,lon|:,time|:)) /) + psa2_jja = arr_jja(0,:,:) + psa2_jja = (/ regCoef(psa2_pc_jja,arr_jja(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + sam_sst_jja = sst_jja(0,:,:) + sam_sst_jja = (/ regCoef(sam_pc_jja,sst_jja(lat|:,lon|:,time|:)) /) + psa1_sst_jja = sst_jja(0,:,:) + psa1_sst_jja = (/ regCoef(psa1_pc_jja,sst_jja(lat|:,lon|:,time|:)) /) + psa2_sst_jja = sst_jja(0,:,:) + psa2_sst_jja = (/ regCoef(psa2_pc_jja,sst_jja(lat|:,lon|:,time|:)) /) + end if + if (.not.ismissing(sam_jja({-85},{5}))) then + if (sam_jja({-85},{5}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + sam_jja = sam_jja*-1. + sam_pc_jja = sam_pc_jja*-1. + if (sstreg_plot_flag.eq.0) then + sam_sst_jja = sam_sst_jja*-1. + end if + end if + end if + if (.not.ismissing(psa1_jja({-62},{270}))) then + if (psa1_jja({-62},{270}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + psa1_jja = psa1_jja*-1. + psa1_pc_jja = psa1_pc_jja*-1. + if (sstreg_plot_flag.eq.0) then + psa1_sst_jja = psa1_sst_jja*-1. + end if + end if + end if + if (.not.ismissing(psa2_jja({-52},{150}))) then + if (psa2_jja({-52},{150}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + psa2_jja = psa2_jja*-1. + psa2_pc_jja = psa2_pc_jja*-1. + if (sstreg_plot_flag.eq.0) then + psa2_sst_jja = psa2_sst_jja*-1. + end if + end if + end if + sam_jja@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0))) + psa1_jja@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(1))) + psa2_jja@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(2))) + copy_VarCoords(npi_ndjfm,sam_pc_jja) + copy_VarCoords(npi_ndjfm,psa1_pc_jja) + copy_VarCoords(npi_ndjfm,psa2_pc_jja) + delete([/evecv,pcts/]) + + evecv = eofunc(arr_son_CW({lat|:-20},lon|:,time|:),3,75) + pcts = eofunc_ts(arr_son_CW({lat|:-20},lon|:,time|:),evecv,False) + sam_pc_son = dim_standardize(pcts(0,:),0) + psa1_pc_son = dim_standardize(pcts(1,:),0) + psa2_pc_son = dim_standardize(pcts(2,:),0) + sam_son = arr_son(0,:,:) + sam_son = (/ regCoef(sam_pc_son,arr_son(lat|:,lon|:,time|:)) /) + psa1_son = arr_son(0,:,:) + psa1_son = (/ regCoef(psa1_pc_son,arr_son(lat|:,lon|:,time|:)) /) + psa2_son = arr_son(0,:,:) + psa2_son = (/ regCoef(psa2_pc_son,arr_son(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + sam_sst_son = sst_son(0,:,:) + sam_sst_son = (/ regCoef(sam_pc_son,sst_son(lat|:,lon|:,time|:)) /) + psa1_sst_son = sst_son(0,:,:) + psa1_sst_son = (/ regCoef(psa1_pc_son,sst_son(lat|:,lon|:,time|:)) /) + psa2_sst_son = sst_son(0,:,:) + psa2_sst_son = (/ regCoef(psa2_pc_son,sst_son(lat|:,lon|:,time|:)) /) + end if + if (.not.ismissing(sam_son({-85},{5}))) then + if (sam_son({-85},{5}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + sam_son = sam_son*-1. + sam_pc_son = sam_pc_son*-1. + if (sstreg_plot_flag.eq.0) then + sam_sst_son = sam_sst_son*-1. + end if + end if + end if + if (.not.ismissing(psa1_son({-62},{270}))) then + if (psa1_son({-62},{270}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + psa1_son = psa1_son*-1. + psa1_pc_son = psa1_pc_son*-1. + if (sstreg_plot_flag.eq.0) then + psa1_sst_son = psa1_sst_son*-1. + end if + end if + end if + if (.not.ismissing(psa2_son({-52},{150}))) then + if (psa2_son({-52},{150}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + psa2_son = psa2_son*-1. + psa2_pc_son = psa2_pc_son*-1. + if (sstreg_plot_flag.eq.0) then + psa2_sst_son = psa2_sst_son*-1. + end if + end if + end if + sam_son@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0))) + psa1_son@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(1))) + psa2_son@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(2))) + copy_VarCoords(npi_ndjfm,sam_pc_son) + copy_VarCoords(npi_ndjfm,psa1_pc_son) + copy_VarCoords(npi_ndjfm,psa2_pc_son) + delete([/evecv,pcts/]) + + evecv = eofunc(arr_ann_CW({lat|:-20},lon|:,time|:),3,75) + pcts = eofunc_ts(arr_ann_CW({lat|:-20},lon|:,time|:),evecv,False) + sam_pc_ann = dim_standardize(pcts(0,:),0) + psa1_pc_ann = dim_standardize(pcts(1,:),0) + psa2_pc_ann = dim_standardize(pcts(2,:),0) + sam_ann = arr_ann(0,:,:) + sam_ann = (/ regCoef(sam_pc_ann,arr_ann(lat|:,lon|:,time|:)) /) + psa1_ann = arr_ann(0,:,:) + psa1_ann = (/ regCoef(psa1_pc_ann,arr_ann(lat|:,lon|:,time|:)) /) + psa2_ann = arr_ann(0,:,:) + psa2_ann = (/ regCoef(psa2_pc_ann,arr_ann(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + sam_sst_ann = sst_ann(0,:,:) + sam_sst_ann = (/ regCoef(sam_pc_ann,sst_ann(lat|:,lon|:,time|:)) /) + psa1_sst_ann = sst_ann(0,:,:) + psa1_sst_ann = (/ regCoef(psa1_pc_ann,sst_ann(lat|:,lon|:,time|:)) /) + psa2_sst_ann = sst_ann(0,:,:) + psa2_sst_ann = (/ regCoef(psa2_pc_ann,sst_ann(lat|:,lon|:,time|:)) /) + end if + if (.not.ismissing(sam_ann({-85},{5}))) then + if (sam_ann({-85},{5}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + sam_ann = sam_ann*-1. + sam_pc_ann = sam_pc_ann*-1. + if (sstreg_plot_flag.eq.0) then + sam_sst_ann = sam_sst_ann*-1. + end if + end if + end if + if (.not.ismissing(psa1_ann({-62},{270}))) then + if (psa1_ann({-62},{270}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + psa1_ann = psa1_ann*-1. + psa1_pc_ann = psa1_pc_ann*-1. + if (sstreg_plot_flag.eq.0) then + psa1_sst_ann = psa1_sst_ann*-1. + end if + end if + end if + if (.not.ismissing(psa2_ann({-52},{150}))) then + if (psa2_ann({-52},{150}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + psa2_ann = psa2_ann*-1. + psa2_pc_ann = psa2_pc_ann*-1. + if (sstreg_plot_flag.eq.0) then + psa2_sst_ann = psa2_sst_ann*-1. + end if + end if + end if + sam_ann@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0))) + psa1_ann@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(1))) + psa2_ann@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(2))) + copy_VarCoords(npi_ndjfm,sam_pc_ann) + copy_VarCoords(npi_ndjfm,psa1_pc_ann) + copy_VarCoords(npi_ndjfm,psa2_pc_ann) + delete([/evecv,pcts/]) + + if (COMPUTE_MODES_MON.eq."True") then + evecv = eofunc(arr_mon_CW({lat|:-20},lon|:,time|:),3,75) + pcts = eofunc_ts(arr_mon_CW({lat|:-20},lon|:,time|:),evecv,False) + sam_pc_mon = dim_standardize(pcts(0,:),0) + psa1_pc_mon = dim_standardize(pcts(1,:),0) + psa2_pc_mon = dim_standardize(pcts(2,:),0) + sam_mon = arr(0,:,:) + sam_mon = (/ regCoef(sam_pc_mon,arr(lat|:,lon|:,time|:)) /) + psa1_mon = arr(0,:,:) + psa1_mon = (/ regCoef(psa1_pc_mon,arr(lat|:,lon|:,time|:)) /) + psa2_mon = arr(0,:,:) + psa2_mon = (/ regCoef(psa2_pc_mon,arr(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + sam_sst_mon = sst(0,:,:) + sam_sst_mon = (/ regCoef(sam_pc_mon,sst(lat|:,lon|:,time|:)) /) + psa1_sst_mon = sst(0,:,:) + psa1_sst_mon = (/ regCoef(psa1_pc_mon,sst(lat|:,lon|:,time|:)) /) + psa2_sst_mon = sst(0,:,:) + psa2_sst_mon = (/ regCoef(psa2_pc_mon,sst(lat|:,lon|:,time|:)) /) + end if + if (.not.ismissing(sam_mon({-85},{5}))) then + if (sam_mon({-85},{5}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + sam_mon = sam_mon*-1. + sam_pc_mon = sam_pc_mon*-1. + if (sstreg_plot_flag.eq.0) then + sam_sst_mon = sam_sst_mon*-1. + end if + end if + end if + if (.not.ismissing(psa1_mon({-62},{270}))) then + if (psa1_mon({-62},{270}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + psa1_mon = psa1_mon*-1. + psa1_pc_mon = psa1_pc_mon*-1. + if (sstreg_plot_flag.eq.0) then + psa1_sst_mon = psa1_sst_mon*-1. + end if + end if + end if + if (.not.ismissing(psa2_mon({-52},{150}))) then + if (psa2_mon({-52},{150}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + psa2_mon = psa2_mon*-1. + psa2_pc_mon = psa2_pc_mon*-1. + if (sstreg_plot_flag.eq.0) then + psa2_sst_mon = psa2_sst_mon*-1. + end if + end if + end if + sam_mon@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0))) + psa1_mon@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(1))) + psa2_mon@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(2))) + sam_pc_mon!0 = "time" + sam_pc_mon&time = arr&time + psa1_pc_mon!0 = "time" + psa1_pc_mon&time = arr&time + psa2_pc_mon!0 = "time" + psa2_pc_mon&time = arr&time + delete([/evecv,pcts/]) + end if +;----------PNA/NPO calculations (EOF1/2 of NP PSL)---------------------------------------------------------- + evecv = eofunc(arr_djf_CW({lat|20:85},{lon|120:240},time|:),2,75) + pcts = eofunc_ts(arr_djf_CW({lat|20:85},{lon|120:240},time|:),evecv,False) + pna_pc_djf = dim_standardize(pcts(0,:),0) + npo_pc_djf = dim_standardize(pcts(1,:),0) + pna_djf = arr_djf(0,:,:) + pna_djf = (/ regCoef(pna_pc_djf,arr_djf(lat|:,lon|:,time|:)) /) + npo_djf = arr_djf(0,:,:) + npo_djf = (/ regCoef(npo_pc_djf,arr_djf(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + pna_sst_djf = sst_djf(0,:,:) + pna_sst_djf = (/ regCoef(pna_pc_djf,sst_djf(lat|:,lon|:,time|:)) /) + npo_sst_djf = sst_djf(0,:,:) + npo_sst_djf = (/ regCoef(npo_pc_djf,sst_djf(lat|:,lon|:,time|:)) /) + end if + if (.not.ismissing(pna_djf({50},{185}))) then + if (pna_djf({50},{185}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + pna_djf = pna_djf*-1. + pna_pc_djf = pna_pc_djf*-1. + if (sstreg_plot_flag.eq.0) then + pna_sst_djf = pna_sst_djf*-1. + end if + end if + end if + if (.not.ismissing(npo_djf({65},{185}))) then + if (npo_djf({65},{185}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + npo_djf = npo_djf*-1. + npo_pc_djf = npo_pc_djf*-1. + if (sstreg_plot_flag.eq.0) then + npo_sst_djf = npo_sst_djf*-1. + end if + end if + end if + pna_djf@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0))) + npo_djf@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(1))) + copy_VarCoords(npi_ndjfm,pna_pc_djf) + copy_VarCoords(npi_ndjfm,npo_pc_djf) + delete([/evecv,pcts/]) + + evecv = eofunc(arr_mam_CW({lat|20:85},{lon|120:240},time|:),2,75) + pcts = eofunc_ts(arr_mam_CW({lat|20:85},{lon|120:240},time|:),evecv,False) + pna_pc_mam = dim_standardize(pcts(0,:),0) + npo_pc_mam = dim_standardize(pcts(1,:),0) + pna_mam = arr_mam(0,:,:) + pna_mam = (/ regCoef(pna_pc_mam,arr_mam(lat|:,lon|:,time|:)) /) + npo_mam = arr_mam(0,:,:) + npo_mam = (/ regCoef(npo_pc_mam,arr_mam(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + pna_sst_mam = sst_mam(0,:,:) + pna_sst_mam = (/ regCoef(pna_pc_mam,sst_mam(lat|:,lon|:,time|:)) /) + npo_sst_mam = sst_mam(0,:,:) + npo_sst_mam = (/ regCoef(npo_pc_mam,sst_mam(lat|:,lon|:,time|:)) /) + end if + if (.not.ismissing(pna_mam({50},{185}))) then + if (pna_mam({50},{185}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + pna_mam = pna_mam*-1. + pna_pc_mam = pna_pc_mam*-1. + if (sstreg_plot_flag.eq.0) then + pna_sst_mam = pna_sst_mam*-1. + end if + end if + end if + if (.not.ismissing(npo_mam({65},{185}))) then + if (npo_mam({65},{185}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + npo_mam = npo_mam*-1. + npo_pc_mam = npo_pc_mam*-1. + if (sstreg_plot_flag.eq.0) then + npo_sst_mam = npo_sst_mam*-1. + end if + end if + end if + pna_mam@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0))) + npo_mam@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(1))) + copy_VarCoords(npi_ndjfm,pna_pc_mam) + copy_VarCoords(npi_ndjfm,npo_pc_mam) + delete([/evecv,pcts/]) + + evecv = eofunc(arr_jja_CW({lat|20:85},{lon|120:240},time|:),2,75) + pcts = eofunc_ts(arr_jja_CW({lat|20:85},{lon|120:240},time|:),evecv,False) + pna_pc_jja = dim_standardize(pcts(0,:),0) + npo_pc_jja = dim_standardize(pcts(1,:),0) + pna_jja = arr_jja(0,:,:) + pna_jja = (/ regCoef(pna_pc_jja,arr_jja(lat|:,lon|:,time|:)) /) + npo_jja = arr_jja(0,:,:) + npo_jja = (/ regCoef(npo_pc_jja,arr_jja(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + pna_sst_jja = sst_jja(0,:,:) + pna_sst_jja = (/ regCoef(pna_pc_jja,sst_jja(lat|:,lon|:,time|:)) /) + npo_sst_jja = sst_jja(0,:,:) + npo_sst_jja = (/ regCoef(npo_pc_jja,sst_jja(lat|:,lon|:,time|:)) /) + end if + if (.not.ismissing(pna_jja({50},{185}))) then + if (pna_jja({50},{185}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + pna_jja = pna_jja*-1. + pna_pc_jja = pna_pc_jja*-1. + if (sstreg_plot_flag.eq.0) then + pna_sst_jja = pna_sst_jja*-1. + end if + end if + end if + if (.not.ismissing(npo_jja({65},{185}))) then + if (npo_jja({65},{185}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + npo_jja = npo_jja*-1. + npo_pc_jja = npo_pc_jja*-1. + if (sstreg_plot_flag.eq.0) then + npo_sst_jja = npo_sst_jja*-1. + end if + end if + end if + pna_jja@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0))) + npo_jja@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(1))) + copy_VarCoords(npi_ndjfm,pna_pc_jja) + copy_VarCoords(npi_ndjfm,npo_pc_jja) + delete([/evecv,pcts/]) + + evecv = eofunc(arr_son_CW({lat|20:85},{lon|120:240},time|:),2,75) + pcts = eofunc_ts(arr_son_CW({lat|20:85},{lon|120:240},time|:),evecv,False) + pna_pc_son = dim_standardize(pcts(0,:),0) + npo_pc_son = dim_standardize(pcts(1,:),0) + pna_son = arr_son(0,:,:) + pna_son = (/ regCoef(pna_pc_son,arr_son(lat|:,lon|:,time|:)) /) + npo_son = arr_son(0,:,:) + npo_son = (/ regCoef(npo_pc_son,arr_son(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + pna_sst_son = sst_son(0,:,:) + pna_sst_son = (/ regCoef(pna_pc_son,sst_son(lat|:,lon|:,time|:)) /) + npo_sst_son = sst_son(0,:,:) + npo_sst_son = (/ regCoef(npo_pc_son,sst_son(lat|:,lon|:,time|:)) /) + end if + if (.not.ismissing(pna_son({50},{185}))) then + if (pna_son({50},{185}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + pna_son = pna_son*-1. + pna_pc_son = pna_pc_son*-1. + if (sstreg_plot_flag.eq.0) then + pna_sst_son = pna_sst_son*-1. + end if + end if + end if + if (.not.ismissing(npo_son({65},{185}))) then + if (npo_son({65},{185}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + npo_son = npo_son*-1. + npo_pc_son = npo_pc_son*-1. + if (sstreg_plot_flag.eq.0) then + npo_sst_son = npo_sst_son*-1. + end if + end if + end if + pna_son@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0))) + npo_son@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(1))) + copy_VarCoords(npi_ndjfm,pna_pc_son) + copy_VarCoords(npi_ndjfm,npo_pc_son) + delete([/evecv,pcts/]) + + evecv = eofunc(arr_ann_CW({lat|20:85},{lon|120:240},time|:),2,75) + pcts = eofunc_ts(arr_ann_CW({lat|20:85},{lon|120:240},time|:),evecv,False) + pna_pc_ann = dim_standardize(pcts(0,:),0) + npo_pc_ann = dim_standardize(pcts(1,:),0) + pna_ann = arr_ann(0,:,:) + pna_ann = (/ regCoef(pna_pc_ann,arr_ann(lat|:,lon|:,time|:)) /) + npo_ann = arr_ann(0,:,:) + npo_ann = (/ regCoef(npo_pc_ann,arr_ann(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + pna_sst_ann = sst_ann(0,:,:) + pna_sst_ann = (/ regCoef(pna_pc_ann,sst_ann(lat|:,lon|:,time|:)) /) + npo_sst_ann = sst_ann(0,:,:) + npo_sst_ann = (/ regCoef(npo_pc_ann,sst_ann(lat|:,lon|:,time|:)) /) + end if + if (.not.ismissing(pna_ann({50},{185}))) then + if (pna_ann({50},{185}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + pna_ann = pna_ann*-1. + pna_pc_ann = pna_pc_ann*-1. + if (sstreg_plot_flag.eq.0) then + pna_sst_ann = pna_sst_ann*-1. + end if + end if + end if + if (.not.ismissing(npo_ann({65},{185}))) then + if (npo_ann({65},{185}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + npo_ann = npo_ann*-1. + npo_pc_ann = npo_pc_ann*-1. + if (sstreg_plot_flag.eq.0) then + npo_sst_ann = npo_sst_ann*-1. + end if + end if + end if + pna_ann@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0))) + npo_ann@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(1))) + copy_VarCoords(npi_ndjfm,pna_pc_ann) + copy_VarCoords(npi_ndjfm,npo_pc_ann) + delete([/evecv,pcts/]) + + if (COMPUTE_MODES_MON.eq."True") then + evecv = eofunc(arr_mon_CW({lat|20:85},{lon|120:240},time|:),2,75) + pcts = eofunc_ts(arr_mon_CW({lat|20:85},{lon|120:240},time|:),evecv,False) + pna_pc_mon = dim_standardize(pcts(0,:),0) + npo_pc_mon = dim_standardize(pcts(1,:),0) + pna_mon = arr(0,:,:) + pna_mon = (/ regCoef(pna_pc_mon,arr(lat|:,lon|:,time|:)) /) + npo_mon = arr(0,:,:) + npo_mon = (/ regCoef(npo_pc_mon,arr(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + pna_sst_mon = sst(0,:,:) + pna_sst_mon = (/ regCoef(pna_pc_mon,sst(lat|:,lon|:,time|:)) /) + npo_sst_mon = sst(0,:,:) + npo_sst_mon = (/ regCoef(npo_pc_mon,sst(lat|:,lon|:,time|:)) /) + end if + if (.not.ismissing(pna_mon({50},{185}))) then + if (pna_mon({50},{185}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + pna_mon = pna_mon*-1. + pna_pc_mon = pna_pc_mon*-1. + if (sstreg_plot_flag.eq.0) then + pna_sst_mon = pna_sst_mon*-1. + end if + end if + end if + if (.not.ismissing(npo_mon({65},{185}))) then + if (npo_mon({65},{185}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + npo_mon = npo_mon*-1. + npo_pc_mon = npo_pc_mon*-1. + if (sstreg_plot_flag.eq.0) then + npo_sst_mon = npo_sst_mon*-1. + end if + end if + end if + pna_mon@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0))) + npo_mon@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(1))) + pna_pc_mon!0 = "time" + pna_pc_mon&time = arr&time + npo_pc_mon!0 = "time" + npo_pc_mon&time = arr&time + delete([/evecv,pcts/]) + end if +;----------NAO calculations-------------------------------------------------------------------------------- + arr_djf_CW_LF = lonFlip(arr_djf_CW) + arr_mam_CW_LF = lonFlip(arr_mam_CW) + arr_jja_CW_LF = lonFlip(arr_jja_CW) + arr_son_CW_LF = lonFlip(arr_son_CW) + arr_ann_CW_LF = lonFlip(arr_ann_CW) + if (COMPUTE_MODES_MON.eq."True") then + arr_mon_CW_LF = lonFlip(arr_mon_CW) + delete(arr_mon_CW) + end if + delete([/arr_djf_CW,arr_mam_CW,arr_jja_CW,arr_son_CW,arr_ann_CW/]) + + evecv = eofunc(arr_djf_CW_LF({lat|20:80},{lon|-90.:40},time|:),1,75) + pcts = eofunc_ts(arr_djf_CW_LF({lat|20:80},{lon|-90.:40},time|:),evecv,False) + nao_pc_djf = dim_standardize(pcts(0,:),0) + nao_djf = arr_djf(0,:,:) + nao_djf = (/ regCoef(nao_pc_djf,arr_djf(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + nao_sst_djf = sst_djf(0,:,:) + nao_sst_djf = (/ regCoef(nao_pc_djf,sst_djf(lat|:,lon|:,time|:)) /) + delete(sst_djf) + end if + if (.not.ismissing(nao_djf({70},{350}))) then + if (nao_djf({70},{350}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + nao_djf = nao_djf*-1. + nao_pc_djf = nao_pc_djf*-1. + if (sstreg_plot_flag.eq.0) then + nao_sst_djf = nao_sst_djf*-1. + end if + end if + end if + nao_djf@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0))) + copy_VarCoords(npi_ndjfm,nao_pc_djf) + delete([/evecv,pcts,arr_djf,arr_djf_CW_LF/]) + + evecv = eofunc(arr_mam_CW_LF({lat|20:80},{lon|-90.:40},time|:),1,75) + pcts = eofunc_ts(arr_mam_CW_LF({lat|20:80},{lon|-90.:40},time|:),evecv,False) + nao_pc_mam = dim_standardize(pcts(0,:),0) + nao_mam = arr_mam(0,:,:) + nao_mam = (/ regCoef(nao_pc_mam,arr_mam(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + nao_sst_mam = sst_mam(0,:,:) + nao_sst_mam = (/ regCoef(nao_pc_mam,sst_mam(lat|:,lon|:,time|:)) /) + delete(sst_mam) + end if + if (.not.ismissing(nao_mam({70},{350}))) then + if (nao_mam({70},{350}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + nao_mam = nao_mam*-1. + nao_pc_mam = nao_pc_mam*-1. + if (sstreg_plot_flag.eq.0) then + nao_sst_mam = nao_sst_mam*-1. + end if + end if + end if + nao_mam@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0))) + copy_VarCoords(npi_ndjfm,nao_pc_mam) + delete([/evecv,pcts,arr_mam,arr_mam_CW_LF/]) + + evecv = eofunc(arr_jja_CW_LF({lat|20:80},{lon|-90.:40},time|:),1,75) + pcts = eofunc_ts(arr_jja_CW_LF({lat|20:80},{lon|-90.:40},time|:),evecv,False) + nao_pc_jja = dim_standardize(pcts(0,:),0) + nao_jja = arr_jja(0,:,:) + nao_jja = (/ regCoef(nao_pc_jja,arr_jja(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + nao_sst_jja = sst_jja(0,:,:) + nao_sst_jja = (/ regCoef(nao_pc_jja,sst_jja(lat|:,lon|:,time|:)) /) + delete(sst_jja) + end if + if (.not.ismissing(nao_jja({70},{350}))) then + if (nao_jja({70},{350}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + nao_jja = nao_jja*-1. + nao_pc_jja = nao_pc_jja*-1. + if (sstreg_plot_flag.eq.0) then + nao_sst_jja = nao_sst_jja*-1. + end if + end if + end if + nao_jja@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0))) + copy_VarCoords(npi_ndjfm,nao_pc_jja) + delete([/evecv,pcts,arr_jja,arr_jja_CW_LF/]) + + evecv = eofunc(arr_son_CW_LF({lat|20:80},{lon|-90.:40},time|:),1,75) + pcts = eofunc_ts(arr_son_CW_LF({lat|20:80},{lon|-90.:40},time|:),evecv,False) + nao_pc_son = dim_standardize(pcts(0,:),0) + nao_son = arr_son(0,:,:) + nao_son = (/ regCoef(nao_pc_son,arr_son(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + nao_sst_son = sst_son(0,:,:) + nao_sst_son = (/ regCoef(nao_pc_son,sst_son(lat|:,lon|:,time|:)) /) + delete(sst_son) + end if + if (.not.ismissing(nao_son({70},{350}))) then + if (nao_son({70},{350}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + nao_son = nao_son*-1. + nao_pc_son = nao_pc_son*-1. + if (sstreg_plot_flag.eq.0) then + nao_sst_son = nao_sst_son*-1. + end if + end if + end if + nao_son@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0))) + copy_VarCoords(npi_ndjfm,nao_pc_son) + delete([/evecv,pcts,arr_son,arr_son_CW_LF/]) + + evecv = eofunc(arr_ann_CW_LF({lat|20:80},{lon|-90.:40},time|:),1,75) + pcts = eofunc_ts(arr_ann_CW_LF({lat|20:80},{lon|-90.:40},time|:),evecv,False) + nao_pc_ann = dim_standardize(pcts(0,:),0) + nao_ann = arr_ann(0,:,:) + nao_ann = (/ regCoef(nao_pc_ann,arr_ann(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + nao_sst_ann = sst_ann(0,:,:) + nao_sst_ann = (/ regCoef(nao_pc_ann,sst_ann(lat|:,lon|:,time|:)) /) + delete(sst_ann) + end if + if (.not.ismissing(nao_ann({70},{350}))) then + if (nao_ann({70},{350}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + nao_ann = nao_ann*-1. + nao_pc_ann = nao_pc_ann*-1. + if (sstreg_plot_flag.eq.0) then + nao_sst_ann = nao_sst_ann*-1. + end if + end if + end if + nao_ann@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0))) + copy_VarCoords(npi_ndjfm,nao_pc_ann) + delete([/evecv,pcts,arr_ann,arr_ann_CW_LF/]) + + if (COMPUTE_MODES_MON.eq."True") then + evecv = eofunc(arr_mon_CW_LF({lat|20:80},{lon|-90.:40},time|:),1,75) + pcts = eofunc_ts(arr_mon_CW_LF({lat|20:80},{lon|-90.:40},time|:),evecv,False) + nao_pc_mon = dim_standardize(pcts(0,:),0) + nao_mon = arr(0,:,:) + nao_mon = (/ regCoef(nao_pc_mon,arr(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + nao_sst_mon = sst(0,:,:) + nao_sst_mon = (/ regCoef(nao_pc_mon,sst(lat|:,lon|:,time|:)) /) + delete(sst) + end if + if (.not.ismissing(nao_mon({70},{350}))) then + if (nao_mon({70},{350}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + nao_mon = nao_mon*-1. + nao_pc_mon = nao_pc_mon*-1. + if (sstreg_plot_flag.eq.0) then + nao_sst_mon = nao_sst_mon*-1. + end if + end if + end if + nao_mon@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0))) + nao_pc_mon!0 = "time" + nao_pc_mon&time = arr&time + delete([/evecv,pcts,arr,arr_mon_CW_LF/]) + end if +;------------------------------------------------------------------------------------------------------ + if (sstreg_frame.eq.1.and.sstreg_plot_flag.eq.0) then ; sstreg_frame = flag to create regressions .ps/.png files + sstreg_frame = 0 + end if +;------------------------------------------------------------------------------------------------------ + if (OUTPUT_DATA.eq."True") then + modname = str_sub_str(names(ee)," ","_") + fn = getenv("OUTDIR")+modname+".cvdp_data."+syear(ee)+"-"+eyear(ee)+".nc" + if (.not.isfilepresent2(fn)) then + z = addfile(fn,"c") + z@source = "NCAR Climate Analysis Section's Climate Variability Diagnostics Package v"+getenv("VERSION") + z@notes = "Data from "+names(ee)+" from "+syear(ee)+"-"+eyear(ee) + if (OPT_CLIMO.eq."Full") then + z@climatology = syear(ee)+"-"+eyear(ee)+" climatology removed prior to all calculations (other than means)" + else + z@climatology = CLIMO_SYEAR+"-"+CLIMO_EYEAR+" climatology removed prior to all calculations (other than means)" + end if + else + z = addfile(fn,"w") + end if + z->npi_ndjfm = npi_ndjfm + + z->nao_pc_djf = nao_pc_djf + z->nao_pc_mam = nao_pc_mam + z->nao_pc_jja = nao_pc_jja + z->nao_pc_son = nao_pc_son + z->nao_pc_ann = nao_pc_ann + + z->sam_pc_djf = sam_pc_djf + z->sam_pc_mam = sam_pc_mam + z->sam_pc_jja = sam_pc_jja + z->sam_pc_son = sam_pc_son + z->sam_pc_ann = sam_pc_ann + + z->psa1_pc_djf = psa1_pc_djf + z->psa1_pc_mam = psa1_pc_mam + z->psa1_pc_jja = psa1_pc_jja + z->psa1_pc_son = psa1_pc_son + z->psa1_pc_ann = psa1_pc_ann + + z->psa2_pc_djf = psa2_pc_djf + z->psa2_pc_mam = psa2_pc_mam + z->psa2_pc_jja = psa2_pc_jja + z->psa2_pc_son = psa2_pc_son + z->psa2_pc_ann = psa2_pc_ann + + z->nam_pc_djf = nam_pc_djf + z->nam_pc_mam = nam_pc_mam + z->nam_pc_jja = nam_pc_jja + z->nam_pc_son = nam_pc_son + z->nam_pc_ann = nam_pc_ann + + z->pna_pc_djf = pna_pc_djf + z->pna_pc_mam = pna_pc_mam + z->pna_pc_jja = pna_pc_jja + z->pna_pc_son = pna_pc_son + z->pna_pc_ann = pna_pc_ann + + z->npo_pc_djf = npo_pc_djf + z->npo_pc_mam = npo_pc_mam + z->npo_pc_jja = npo_pc_jja + z->npo_pc_son = npo_pc_son + z->npo_pc_ann = npo_pc_ann + + z->nao_djf = nao_djf + z->nao_mam = nao_mam + z->nao_jja = nao_jja + z->nao_son = nao_son + z->nao_ann = nao_ann + + z->sam_djf = sam_djf + z->sam_mam = sam_mam + z->sam_jja = sam_jja + z->sam_son = sam_son + z->sam_ann = sam_ann + + z->psa1_djf = psa1_djf + z->psa1_mam = psa1_mam + z->psa1_jja = psa1_jja + z->psa1_son = psa1_son + z->psa1_ann = psa1_ann + + z->psa2_djf = psa2_djf + z->psa2_mam = psa2_mam + z->psa2_jja = psa2_jja + z->psa2_son = psa2_son + z->psa2_ann = psa2_ann + + z->nam_djf = nam_djf + z->nam_mam = nam_mam + z->nam_jja = nam_jja + z->nam_son = nam_son + z->nam_ann = nam_ann + + z->pna_djf = pna_djf + z->pna_mam = pna_mam + z->pna_jja = pna_jja + z->pna_son = pna_son + z->pna_ann = pna_ann + + z->npo_djf = npo_djf + z->npo_mam = npo_mam + z->npo_jja = npo_jja + z->npo_son = npo_son + z->npo_ann = npo_ann + + if (isfilepresent2("obs_psl").and.ee.eq.0) then ; will not write out SST regressions to OBS files due to different + else ; variable going into PSL OBS file.. + if (sstreg_plot_flag.eq.0) then + z->nao_sst_regression_djf = nao_sst_djf + z->nao_sst_regression_mam = nao_sst_mam + z->nao_sst_regression_jja = nao_sst_jja + z->nao_sst_regression_son = nao_sst_son + z->nao_sst_regression_ann = nao_sst_ann + + z->sam_sst_regression_djf = sam_sst_djf + z->sam_sst_regression_mam = sam_sst_mam + z->sam_sst_regression_jja = sam_sst_jja + z->sam_sst_regression_son = sam_sst_son + z->sam_sst_regression_ann = sam_sst_ann + + z->psa1_sst_regression_djf = psa1_sst_djf + z->psa1_sst_regression_mam = psa1_sst_mam + z->psa1_sst_regression_jja = psa1_sst_jja + z->psa1_sst_regression_son = psa1_sst_son + z->psa1_sst_regression_ann = psa1_sst_ann + + z->psa2_sst_regression_djf = psa2_sst_djf + z->psa2_sst_regression_mam = psa2_sst_mam + z->psa2_sst_regression_jja = psa2_sst_jja + z->psa2_sst_regression_son = psa2_sst_son + z->psa2_sst_regression_ann = psa2_sst_ann + + z->nam_sst_regression_djf = nam_sst_djf + z->nam_sst_regression_mam = nam_sst_mam + z->nam_sst_regression_jja = nam_sst_jja + z->nam_sst_regression_son = nam_sst_son + z->nam_sst_regression_ann = nam_sst_ann + + z->pna_sst_regression_djf = pna_sst_djf + z->pna_sst_regression_mam = pna_sst_mam + z->pna_sst_regression_jja = pna_sst_jja + z->pna_sst_regression_son = pna_sst_son + z->pna_sst_regression_ann = pna_sst_ann + + z->npo_sst_regression_djf = npo_sst_djf + z->npo_sst_regression_mam = npo_sst_mam + z->npo_sst_regression_jja = npo_sst_jja + z->npo_sst_regression_son = npo_sst_son + z->npo_sst_regression_ann = npo_sst_ann + end if + end if + + if (COMPUTE_MODES_MON.eq."True") then + z->nao_pc_mon = nao_pc_mon + z->sam_pc_mon = sam_pc_mon + z->psa1_pc_mon = psa1_pc_mon + z->psa2_pc_mon = psa2_pc_mon + z->nam_pc_mon = nam_pc_mon + z->pna_pc_mon = pna_pc_mon + z->npo_pc_mon = npo_pc_mon + z->nao_mon = nao_mon + z->sam_mon = sam_mon + z->psa1_mon = psa1_mon + z->psa2_mon = psa2_mon + z->nam_mon = nam_mon + z->pna_mon = pna_mon + z->npo_mon = npo_mon + if (isfilepresent2("obs_psl").and.ee.eq.0) then ; will not write out SST regressions to OBS files due to different + else ; variable going into PSL OBS file.. + if (sstreg_plot_flag.eq.0) then + z->nao_sst_regression_mon = nao_sst_mon + z->sam_sst_regression_mon = sam_sst_mon + z->psa1_sst_regression_mon = psa1_sst_mon + z->psa2_sst_regression_mon = psa2_sst_mon + z->nam_sst_regression_mon = nam_sst_mon + z->pna_sst_regression_mon = pna_sst_mon + z->npo_sst_regression_mon = npo_sst_mon + end if + end if + end if + delete(z) + end if + +;======================================================================== + res = True + res@mpGeophysicalLineColor = "gray42" + res@mpGeophysicalLineThicknessF = 2. + res@mpGridAndLimbOn = False + res@mpFillOn = False + res@mpOutlineOn = True + res@mpDataSetName = "Earth..4" + res@gsnDraw = False + res@gsnFrame = False + res@cnLevelSelectionMode = "ExplicitLevels" + res@cnLineLabelsOn = False + res@cnFillOn = True + res@cnLinesOn = False + res@lbLabelBarOn = False + + res@gsnLeftStringOrthogonalPosF = -0.03 + res@gsnLeftStringParallelPosF = .005 + res@gsnRightStringOrthogonalPosF = -0.03 + res@gsnRightStringParallelPosF = 0.96 + res@gsnRightString = "" + res@gsnLeftString = "" + if (nsim.le.5) then + res@gsnLeftStringFontHeightF = 0.018 + res@gsnCenterStringFontHeightF = 0.022 + res@gsnRightStringFontHeightF = 0.018 + else + res@gsnLeftStringFontHeightF = 0.024 + res@gsnCenterStringFontHeightF = 0.028 + res@gsnRightStringFontHeightF = 0.024 + end if + res@gsnPolar = "NH" + res@mpMinLatF = 20. + res@mpCenterLonF = 0. + + res@cnLevels = (/-7,-6,-5,-4,-3,-2,-1,0,1,2,3,4,5,6,7./) + res@gsnLeftString = syear(ee)+"-"+eyear(ee) + res@gsnCenterString = names(ee) + + if (isfilepresent2("obs_psl").and.ee.eq.0) then ; for pattern correlation table. Save entire lat/lon array + patcor_nam_djf = new((/nsim,dimsizes(nam_djf&lat),dimsizes(nam_djf&lon)/),typeof(nam_djf)) + patcor_nam_djf!1 = "lat" + patcor_nam_djf&lat = nam_djf&lat + patcor_nam_djf!2 = "lon" + patcor_nam_djf&lon = nam_djf&lon + patcor_nam_jja = patcor_nam_djf + patcor_nam_ann = patcor_nam_djf + patcor_sam_djf = patcor_nam_djf + patcor_sam_jja = patcor_nam_djf + patcor_sam_ann = patcor_nam_djf + patcor_pna_djf = patcor_nam_djf + patcor_pna_jja = patcor_nam_djf + patcor_pna_ann = patcor_nam_djf + patcor_npo_djf = patcor_nam_djf + patcor_npo_jja = patcor_nam_djf + patcor_npo_ann = patcor_nam_djf + patcor_nao_djf = patcor_nam_djf + patcor_nao_jja = patcor_nam_djf + patcor_nao_ann = patcor_nam_djf + patcor_psa1_djf = patcor_nam_djf + patcor_psa1_jja = patcor_nam_djf + patcor_psa1_ann = patcor_nam_djf + patcor_psa2_djf = patcor_nam_djf + patcor_psa2_jja = patcor_nam_djf + patcor_psa2_ann = patcor_nam_djf + patcor_nam_djf(ee,:,:) = (/ nam_djf /) + patcor_nam_jja(ee,:,:) = (/ nam_jja /) + patcor_nam_ann(ee,:,:) = (/ nam_ann /) + patcor_sam_djf(ee,:,:) = (/ sam_djf /) + patcor_sam_jja(ee,:,:) = (/ sam_jja /) + patcor_sam_ann(ee,:,:) = (/ sam_ann /) + patcor_nao_djf(ee,:,:) = (/ nao_djf /) + patcor_nao_jja(ee,:,:) = (/ nao_jja /) + patcor_nao_ann(ee,:,:) = (/ nao_ann /) + patcor_pna_djf(ee,:,:) = (/ pna_djf /) + patcor_pna_jja(ee,:,:) = (/ pna_jja /) + patcor_pna_ann(ee,:,:) = (/ pna_ann /) + patcor_npo_djf(ee,:,:) = (/ npo_djf /) + patcor_npo_jja(ee,:,:) = (/ npo_jja /) + patcor_npo_ann(ee,:,:) = (/ npo_ann /) + patcor_psa1_djf(ee,:,:) = (/ psa1_djf /) + patcor_psa1_jja(ee,:,:) = (/ psa1_jja /) + patcor_psa1_ann(ee,:,:) = (/ psa1_ann /) + patcor_psa2_djf(ee,:,:) = (/ psa2_djf /) + patcor_psa2_jja(ee,:,:) = (/ psa2_jja /) + patcor_psa2_ann(ee,:,:) = (/ psa2_ann /) + end if + if (isfilepresent2("obs_psl").and.ee.ge.1) then + patcor_nam_djf(ee,:,:) = (/ linint2(nam_djf&lon,nam_djf&lat,nam_djf,True,patcor_nam_djf&lon,patcor_nam_djf&lat,0) /) + patcor_nam_jja(ee,:,:) = (/ linint2(nam_jja&lon,nam_jja&lat,nam_jja,True,patcor_nam_jja&lon,patcor_nam_jja&lat,0) /) + patcor_nam_ann(ee,:,:) = (/ linint2(nam_ann&lon,nam_ann&lat,nam_ann,True,patcor_nam_ann&lon,patcor_nam_ann&lat,0) /) + + patcor_sam_djf(ee,:,:) = (/ linint2(sam_djf&lon,sam_djf&lat,sam_djf,True,patcor_sam_djf&lon,patcor_sam_djf&lat,0) /) + patcor_sam_jja(ee,:,:) = (/ linint2(sam_jja&lon,sam_jja&lat,sam_jja,True,patcor_sam_jja&lon,patcor_sam_jja&lat,0) /) + patcor_sam_ann(ee,:,:) = (/ linint2(sam_ann&lon,sam_ann&lat,sam_ann,True,patcor_sam_ann&lon,patcor_sam_ann&lat,0) /) + + patcor_nao_djf(ee,:,:) = (/ linint2(nao_djf&lon,nao_djf&lat,nao_djf,True,patcor_nao_djf&lon,patcor_nao_djf&lat,0) /) + patcor_nao_jja(ee,:,:) = (/ linint2(nao_jja&lon,nao_jja&lat,nao_jja,True,patcor_nao_jja&lon,patcor_nao_jja&lat,0) /) + patcor_nao_ann(ee,:,:) = (/ linint2(nao_ann&lon,nao_ann&lat,nao_ann,True,patcor_nao_ann&lon,patcor_nao_ann&lat,0) /) + + patcor_pna_djf(ee,:,:) = (/ linint2(pna_djf&lon,pna_djf&lat,pna_djf,True,patcor_pna_djf&lon,patcor_pna_djf&lat,0) /) + patcor_pna_jja(ee,:,:) = (/ linint2(pna_jja&lon,pna_jja&lat,pna_jja,True,patcor_pna_jja&lon,patcor_pna_jja&lat,0) /) + patcor_pna_ann(ee,:,:) = (/ linint2(pna_ann&lon,pna_ann&lat,pna_ann,True,patcor_pna_ann&lon,patcor_pna_ann&lat,0) /) + + patcor_npo_djf(ee,:,:) = (/ linint2(npo_djf&lon,npo_djf&lat,npo_djf,True,patcor_npo_djf&lon,patcor_npo_djf&lat,0) /) + patcor_npo_jja(ee,:,:) = (/ linint2(npo_jja&lon,npo_jja&lat,npo_jja,True,patcor_npo_jja&lon,patcor_npo_jja&lat,0) /) + patcor_npo_ann(ee,:,:) = (/ linint2(npo_ann&lon,npo_ann&lat,npo_ann,True,patcor_npo_ann&lon,patcor_npo_ann&lat,0) /) + + patcor_psa1_djf(ee,:,:) = (/ linint2(psa1_djf&lon,psa1_djf&lat,psa1_djf,True,patcor_psa1_djf&lon,patcor_psa1_djf&lat,0) /) + patcor_psa1_jja(ee,:,:) = (/ linint2(psa1_jja&lon,psa1_jja&lat,psa1_jja,True,patcor_psa1_jja&lon,patcor_psa1_jja&lat,0) /) + patcor_psa1_ann(ee,:,:) = (/ linint2(psa1_ann&lon,psa1_ann&lat,psa1_ann,True,patcor_psa1_ann&lon,patcor_psa1_ann&lat,0) /) + + patcor_psa2_djf(ee,:,:) = (/ linint2(psa2_djf&lon,psa2_djf&lat,psa2_djf,True,patcor_psa2_djf&lon,patcor_psa2_djf&lat,0) /) + patcor_psa2_jja(ee,:,:) = (/ linint2(psa2_jja&lon,psa2_jja&lat,psa2_jja,True,patcor_psa2_jja&lon,patcor_psa2_jja&lat,0) /) + patcor_psa2_ann(ee,:,:) = (/ linint2(psa2_ann&lon,psa2_ann&lat,psa2_ann,True,patcor_psa2_ann&lon,patcor_psa2_ann&lat,0) /) + end if + + res@gsnRightString = nam_djf@pcvar+"%" + map_nam_djf(ee) = gsn_csm_contour_map_polar(wks_nam,nam_djf,res) + res@gsnRightString = nam_mam@pcvar+"%" + map_nam_mam(ee) = gsn_csm_contour_map_polar(wks_nam,nam_mam,res) + res@gsnRightString = nam_jja@pcvar+"%" + map_nam_jja(ee) = gsn_csm_contour_map_polar(wks_nam,nam_jja,res) + res@gsnRightString = nam_son@pcvar+"%" + map_nam_son(ee) = gsn_csm_contour_map_polar(wks_nam,nam_son,res) + res@gsnRightString = nam_ann@pcvar+"%" + map_nam_ann(ee) = gsn_csm_contour_map_polar(wks_nam,nam_ann,res) + delete([/nam_djf,nam_mam,nam_jja,nam_son,nam_ann/]) + if (COMPUTE_MODES_MON.eq."True") then + res@gsnRightString = nam_mon@pcvar+"%" + map_nam_mon(ee) = gsn_csm_contour_map_polar(wks_nam,nam_mon,res) + delete([/nam_mon/]) + end if + + res@gsnRightString = nao_djf@pcvar+"%" + map_nao_djf(ee) = gsn_csm_contour_map_polar(wks_nao,nao_djf,res) + res@gsnRightString = nao_mam@pcvar+"%" + map_nao_mam(ee) = gsn_csm_contour_map_polar(wks_nao,nao_mam,res) + res@gsnRightString = nao_jja@pcvar+"%" + map_nao_jja(ee) = gsn_csm_contour_map_polar(wks_nao,nao_jja,res) + res@gsnRightString = nao_son@pcvar+"%" + map_nao_son(ee) = gsn_csm_contour_map_polar(wks_nao,nao_son,res) + res@gsnRightString = nao_ann@pcvar+"%" + map_nao_ann(ee) = gsn_csm_contour_map_polar(wks_nao,nao_ann,res) + delete([/nao_djf,nao_mam,nao_jja,nao_son,nao_ann/]) + if (COMPUTE_MODES_MON.eq."True") then + res@gsnRightString = nao_mon@pcvar+"%" + map_nao_mon(ee) = gsn_csm_contour_map_polar(wks_nao,nao_mon,res) + delete([/nao_mon/]) + end if + + res@gsnRightString = pna_djf@pcvar+"%" + map_pna_djf(ee) = gsn_csm_contour_map_polar(wks_pna,pna_djf,res) + res@gsnRightString = pna_mam@pcvar+"%" + map_pna_mam(ee) = gsn_csm_contour_map_polar(wks_pna,pna_mam,res) + res@gsnRightString = pna_jja@pcvar+"%" + map_pna_jja(ee) = gsn_csm_contour_map_polar(wks_pna,pna_jja,res) + res@gsnRightString = pna_son@pcvar+"%" + map_pna_son(ee) = gsn_csm_contour_map_polar(wks_pna,pna_son,res) + res@gsnRightString = pna_ann@pcvar+"%" + map_pna_ann(ee) = gsn_csm_contour_map_polar(wks_pna,pna_ann,res) + delete([/pna_djf,pna_mam,pna_jja,pna_son,pna_ann/]) + if (COMPUTE_MODES_MON.eq."True") then + res@gsnRightString = pna_mon@pcvar+"%" + map_pna_mon(ee) = gsn_csm_contour_map_polar(wks_pna,pna_mon,res) + delete([/pna_mon/]) + end if + + res@cnLevels = (/-4,-3,-2.5,-2,-1.5,-1,-0.5,0,0.5,1,1.5,2,2.5,3,4/) + res@gsnRightString = npo_djf@pcvar+"%" + map_npo_djf(ee) = gsn_csm_contour_map_polar(wks_npo,npo_djf,res) + res@gsnRightString = npo_mam@pcvar+"%" + map_npo_mam(ee) = gsn_csm_contour_map_polar(wks_npo,npo_mam,res) + res@gsnRightString = npo_jja@pcvar+"%" + map_npo_jja(ee) = gsn_csm_contour_map_polar(wks_npo,npo_jja,res) + res@gsnRightString = npo_son@pcvar+"%" + map_npo_son(ee) = gsn_csm_contour_map_polar(wks_npo,npo_son,res) + res@gsnRightString = npo_ann@pcvar+"%" + map_npo_ann(ee) = gsn_csm_contour_map_polar(wks_npo,npo_ann,res) + delete([/npo_djf,npo_mam,npo_jja,npo_son,npo_ann/]) + if (COMPUTE_MODES_MON.eq."True") then + res@gsnRightString = npo_mon@pcvar+"%" + map_npo_mon(ee) = gsn_csm_contour_map_polar(wks_npo,npo_mon,res) + delete([/npo_mon/]) + end if + + if (sstreg_plot_flag.eq.0) then + res@cnLevels = fspan(-1.05,1.05,15) + res@gsnRightString = "" + reg_nam_djf(ee) = gsn_csm_contour_map_polar(wks_nam,nam_sst_djf,res) + reg_nam_mam(ee) = gsn_csm_contour_map_polar(wks_nam,nam_sst_mam,res) + reg_nam_jja(ee) = gsn_csm_contour_map_polar(wks_nam,nam_sst_jja,res) + reg_nam_son(ee) = gsn_csm_contour_map_polar(wks_nam,nam_sst_son,res) + reg_nam_ann(ee) = gsn_csm_contour_map_polar(wks_nam,nam_sst_ann,res) + delete([/nam_sst_djf,nam_sst_mam,nam_sst_jja,nam_sst_son,nam_sst_ann/]) + if (COMPUTE_MODES_MON.eq."True") then + reg_nam_mon(ee) = gsn_csm_contour_map_polar(wks_nam,nam_sst_mon,res) + delete([/nam_sst_mon/]) + end if + + reg_nao_djf(ee) = gsn_csm_contour_map_polar(wks_nao,nao_sst_djf,res) + reg_nao_mam(ee) = gsn_csm_contour_map_polar(wks_nao,nao_sst_mam,res) + reg_nao_jja(ee) = gsn_csm_contour_map_polar(wks_nao,nao_sst_jja,res) + reg_nao_son(ee) = gsn_csm_contour_map_polar(wks_nao,nao_sst_son,res) + reg_nao_ann(ee) = gsn_csm_contour_map_polar(wks_nao,nao_sst_ann,res) + delete([/nao_sst_djf,nao_sst_mam,nao_sst_jja,nao_sst_son,nao_sst_ann/]) + if (COMPUTE_MODES_MON.eq."True") then + reg_nao_mon(ee) = gsn_csm_contour_map_polar(wks_nao,nao_sst_mon,res) + delete([/nao_sst_mon/]) + end if + + reg_pna_djf(ee) = gsn_csm_contour_map_polar(wks_pna,pna_sst_djf,res) + reg_pna_mam(ee) = gsn_csm_contour_map_polar(wks_pna,pna_sst_mam,res) + reg_pna_jja(ee) = gsn_csm_contour_map_polar(wks_pna,pna_sst_jja,res) + reg_pna_son(ee) = gsn_csm_contour_map_polar(wks_pna,pna_sst_son,res) + reg_pna_ann(ee) = gsn_csm_contour_map_polar(wks_pna,pna_sst_ann,res) + delete([/pna_sst_djf,pna_sst_mam,pna_sst_jja,pna_sst_son,pna_sst_ann/]) + if (COMPUTE_MODES_MON.eq."True") then + reg_pna_mon(ee) = gsn_csm_contour_map_polar(wks_pna,pna_sst_mon,res) + delete([/pna_sst_mon/]) + end if + + reg_npo_djf(ee) = gsn_csm_contour_map_polar(wks_npo,npo_sst_djf,res) + reg_npo_mam(ee) = gsn_csm_contour_map_polar(wks_npo,npo_sst_mam,res) + reg_npo_jja(ee) = gsn_csm_contour_map_polar(wks_npo,npo_sst_jja,res) + reg_npo_son(ee) = gsn_csm_contour_map_polar(wks_npo,npo_sst_son,res) + reg_npo_ann(ee) = gsn_csm_contour_map_polar(wks_npo,npo_sst_ann,res) + delete([/npo_sst_djf,npo_sst_mam,npo_sst_jja,npo_sst_son,npo_sst_ann/]) + if (COMPUTE_MODES_MON.eq."True") then + reg_npo_mon(ee) = gsn_csm_contour_map_polar(wks_npo,npo_sst_mon,res) + delete([/npo_sst_mon/]) + end if + end if + + res@cnLevels = (/-7,-6,-5,-4,-3,-2,-1,0,1,2,3,4,5,6,7./) + res@gsnPolar = "SH" + delete(res@mpMinLatF) + res@mpMaxLatF = -20. + res@mpCenterLonF = 0. + res@gsnLeftString = syear(ee)+"-"+eyear(ee) + res@gsnCenterString = names(ee) + res@gsnRightString = sam_djf@pcvar+"%" + map_sam_djf(ee) = gsn_csm_contour_map_polar(wks_sam,sam_djf,res) + res@gsnRightString = sam_mam@pcvar+"%" + map_sam_mam(ee) = gsn_csm_contour_map_polar(wks_sam,sam_mam,res) + res@gsnRightString = sam_jja@pcvar+"%" + map_sam_jja(ee) = gsn_csm_contour_map_polar(wks_sam,sam_jja,res) + res@gsnRightString = sam_son@pcvar+"%" + map_sam_son(ee) = gsn_csm_contour_map_polar(wks_sam,sam_son,res) + res@gsnRightString = sam_ann@pcvar+"%" + map_sam_ann(ee) = gsn_csm_contour_map_polar(wks_sam,sam_ann,res) + delete([/sam_djf,sam_mam,sam_jja,sam_son,sam_ann/]) + if (COMPUTE_MODES_MON.eq."True") then + res@gsnRightString = sam_mon@pcvar+"%" + map_sam_mon(ee) = gsn_csm_contour_map_polar(wks_sam,sam_mon,res) + delete([/sam_mon/]) + end if + + res@cnLevels = (/-4,-3,-2.5,-2,-1.5,-1,-0.5,0,0.5,1,1.5,2,2.5,3,4/) + res@gsnRightString = psa1_djf@pcvar+"%" + map_psa1_djf(ee) = gsn_csm_contour_map_polar(wks_psa1,psa1_djf,res) + res@gsnRightString = psa1_mam@pcvar+"%" + map_psa1_mam(ee) = gsn_csm_contour_map_polar(wks_psa1,psa1_mam,res) + res@gsnRightString = psa1_jja@pcvar+"%" + map_psa1_jja(ee) = gsn_csm_contour_map_polar(wks_psa1,psa1_jja,res) + res@gsnRightString = psa1_son@pcvar+"%" + map_psa1_son(ee) = gsn_csm_contour_map_polar(wks_psa1,psa1_son,res) + res@gsnRightString = psa1_ann@pcvar+"%" + map_psa1_ann(ee) = gsn_csm_contour_map_polar(wks_psa1,psa1_ann,res) + delete([/psa1_djf,psa1_mam,psa1_jja,psa1_son,psa1_ann/]) + if (COMPUTE_MODES_MON.eq."True") then + res@gsnRightString = psa1_mon@pcvar+"%" + map_psa1_mon(ee) = gsn_csm_contour_map_polar(wks_psa1,psa1_mon,res) + delete([/psa1_mon/]) + end if + + res@gsnRightString = psa2_djf@pcvar+"%" + map_psa2_djf(ee) = gsn_csm_contour_map_polar(wks_psa2,psa2_djf,res) + res@gsnRightString = psa2_mam@pcvar+"%" + map_psa2_mam(ee) = gsn_csm_contour_map_polar(wks_psa2,psa2_mam,res) + res@gsnRightString = psa2_jja@pcvar+"%" + map_psa2_jja(ee) = gsn_csm_contour_map_polar(wks_psa2,psa2_jja,res) + res@gsnRightString = psa2_son@pcvar+"%" + map_psa2_son(ee) = gsn_csm_contour_map_polar(wks_psa2,psa2_son,res) + res@gsnRightString = psa2_ann@pcvar+"%" + map_psa2_ann(ee) = gsn_csm_contour_map_polar(wks_psa2,psa2_ann,res) + delete([/psa2_djf,psa2_mam,psa2_jja,psa2_son,psa2_ann/]) + if (COMPUTE_MODES_MON.eq."True") then + res@gsnRightString = psa2_mon@pcvar+"%" + map_psa2_mon(ee) = gsn_csm_contour_map_polar(wks_psa2,psa2_mon,res) + delete([/psa2_mon/]) + end if + + if (sstreg_plot_flag.eq.0) then + res@cnLevels = fspan(-1.05,1.05,15) + res@gsnRightString = "" + reg_sam_djf(ee) = gsn_csm_contour_map_polar(wks_sam,sam_sst_djf,res) + reg_sam_mam(ee) = gsn_csm_contour_map_polar(wks_sam,sam_sst_mam,res) + reg_sam_jja(ee) = gsn_csm_contour_map_polar(wks_sam,sam_sst_jja,res) + reg_sam_son(ee) = gsn_csm_contour_map_polar(wks_sam,sam_sst_son,res) + reg_sam_ann(ee) = gsn_csm_contour_map_polar(wks_sam,sam_sst_ann,res) + delete([/sam_sst_djf,sam_sst_mam,sam_sst_jja,sam_sst_son,sam_sst_ann/]) + if (COMPUTE_MODES_MON.eq."True") then + reg_sam_mon(ee) = gsn_csm_contour_map_polar(wks_sam,sam_sst_mon,res) + delete([/sam_sst_mon/]) + end if + + reg_psa1_djf(ee) = gsn_csm_contour_map_polar(wks_psa1,psa1_sst_djf,res) + reg_psa1_mam(ee) = gsn_csm_contour_map_polar(wks_psa1,psa1_sst_mam,res) + reg_psa1_jja(ee) = gsn_csm_contour_map_polar(wks_psa1,psa1_sst_jja,res) + reg_psa1_son(ee) = gsn_csm_contour_map_polar(wks_psa1,psa1_sst_son,res) + reg_psa1_ann(ee) = gsn_csm_contour_map_polar(wks_psa1,psa1_sst_ann,res) + delete([/psa1_sst_djf,psa1_sst_mam,psa1_sst_jja,psa1_sst_son,psa1_sst_ann/]) + if (COMPUTE_MODES_MON.eq."True") then + reg_psa1_mon(ee) = gsn_csm_contour_map_polar(wks_psa1,psa1_sst_mon,res) + delete([/psa1_sst_mon/]) + end if + + reg_psa2_djf(ee) = gsn_csm_contour_map_polar(wks_psa2,psa2_sst_djf,res) + reg_psa2_mam(ee) = gsn_csm_contour_map_polar(wks_psa2,psa2_sst_mam,res) + reg_psa2_jja(ee) = gsn_csm_contour_map_polar(wks_psa2,psa2_sst_jja,res) + reg_psa2_son(ee) = gsn_csm_contour_map_polar(wks_psa2,psa2_sst_son,res) + reg_psa2_ann(ee) = gsn_csm_contour_map_polar(wks_psa2,psa2_sst_ann,res) + delete([/psa2_sst_djf,psa2_sst_mam,psa2_sst_jja,psa2_sst_son,psa2_sst_ann/]) + if (COMPUTE_MODES_MON.eq."True") then + reg_psa2_mon(ee) = gsn_csm_contour_map_polar(wks_psa2,psa2_sst_mon,res) + delete([/psa2_sst_mon/]) + end if + end if + delete(res@mpMaxLatF) + + xyres = True + xyres@gsnDraw = False + xyres@gsnFrame = False + xyres@gsnXYBarChart = False + xyres@gsnYRefLine = 0.0 + xyres@gsnYRefLineColor = "gray42" + xyres@gsnAboveYRefLineColor = 185 + xyres@gsnBelowYRefLineColor = 35 + if (wks_type.eq."png") then + xyres@xyLineThicknessF = .5 + else + xyres@xyLineThicknessF = .2 + end if + xyres@xyLineColor = "gray52" + xyres@tiYAxisString = "" + xyres@tiXAxisString = "" + if (nsim.le.5) then + xyres@tmXBLabelFontHeightF = 0.0125 + xyres@tmYLLabelFontHeightF = 0.0125 + xyres@gsnStringFontHeightF = 0.017 + else + xyres@tmXBLabelFontHeightF = 0.018 + xyres@tmYLLabelFontHeightF = 0.018 + xyres@gsnStringFontHeightF = 0.024 + end if + + xyres@vpXF = 0.05 + xyres@vpHeightF = 0.15 + if (SCALE_TIMESERIES.eq."True") then + xyres@vpWidthF = 0.9*((nyr(ee)*1.)/nyr_max) + else + xyres@vpWidthF = 0.9 + end if + xyres@gsnLeftString = "" + xyres@gsnRightString = "" + xyres@trXMinF = syear(ee)-.5 + xyres@trXMaxF = eyear(ee)+1.5 + + xyres@gsnCenterString = names(ee) + + xyresmon = xyres + xyresmon@gsnXYBarChart = False + xyresmon@xyLineThicknessF = .1 + + xy_nam_djf(ee) = gsn_csm_xy(wks_nam_ts,fspan(syear(ee),eyear(ee),dimsizes(nam_pc_djf)),nam_pc_djf,xyres) ; use standardized timeseries + xy_nam_mam(ee) = gsn_csm_xy(wks_nam_ts,fspan(syear(ee),eyear(ee),dimsizes(nam_pc_mam)),nam_pc_mam,xyres) ; use standardized timeseries + xy_nam_jja(ee) = gsn_csm_xy(wks_nam_ts,fspan(syear(ee),eyear(ee),dimsizes(nam_pc_jja)),nam_pc_jja,xyres) ; use standardized timeseries + xy_nam_son(ee) = gsn_csm_xy(wks_nam_ts,fspan(syear(ee),eyear(ee),dimsizes(nam_pc_son)),nam_pc_son,xyres) ; use standardized timeseries + xy_nam_ann(ee) = gsn_csm_xy(wks_nam_ts,fspan(syear(ee),eyear(ee),dimsizes(nam_pc_ann)),nam_pc_ann,xyres) ; use standardized timeseries + delete([/nam_pc_djf,nam_pc_mam,nam_pc_jja,nam_pc_son,nam_pc_ann/]) + if (COMPUTE_MODES_MON.eq."True") then + xy_nam_mon(ee) = gsn_csm_xy(wks_nam_ts,fspan(syear(ee),eyear(ee),dimsizes(nam_pc_mon)),nam_pc_mon,xyresmon) ; use standardized timeseries + delete([/nam_pc_mon/]) + end if + + xy_nao_djf(ee) = gsn_csm_xy(wks_nao_ts,fspan(syear(ee),eyear(ee),dimsizes(nao_pc_djf)),nao_pc_djf,xyres) ; use standardized timeseries + xy_nao_mam(ee) = gsn_csm_xy(wks_nao_ts,fspan(syear(ee),eyear(ee),dimsizes(nao_pc_mam)),nao_pc_mam,xyres) ; use standardized timeseries + xy_nao_jja(ee) = gsn_csm_xy(wks_nao_ts,fspan(syear(ee),eyear(ee),dimsizes(nao_pc_jja)),nao_pc_jja,xyres) ; use standardized timeseries + xy_nao_son(ee) = gsn_csm_xy(wks_nao_ts,fspan(syear(ee),eyear(ee),dimsizes(nao_pc_son)),nao_pc_son,xyres) ; use standardized timeseries + xy_nao_ann(ee) = gsn_csm_xy(wks_nao_ts,fspan(syear(ee),eyear(ee),dimsizes(nao_pc_ann)),nao_pc_ann,xyres) ; use standardized timeseries + delete([/nao_pc_djf,nao_pc_mam,nao_pc_jja,nao_pc_son,nao_pc_ann/]) + if (COMPUTE_MODES_MON.eq."True") then + xy_nao_mon(ee) = gsn_csm_xy(wks_nao_ts,fspan(syear(ee),eyear(ee),dimsizes(nao_pc_mon)),nao_pc_mon,xyresmon) ; use standardized timeseries + delete([/nao_pc_mon/]) + end if + + xy_pna_djf(ee) = gsn_csm_xy(wks_pna_ts,fspan(syear(ee),eyear(ee),dimsizes(pna_pc_djf)),pna_pc_djf,xyres) ; use standardized timeseries + xy_pna_mam(ee) = gsn_csm_xy(wks_pna_ts,fspan(syear(ee),eyear(ee),dimsizes(pna_pc_mam)),pna_pc_mam,xyres) ; use standardized timeseries + xy_pna_jja(ee) = gsn_csm_xy(wks_pna_ts,fspan(syear(ee),eyear(ee),dimsizes(pna_pc_jja)),pna_pc_jja,xyres) ; use standardized timeseries + xy_pna_son(ee) = gsn_csm_xy(wks_pna_ts,fspan(syear(ee),eyear(ee),dimsizes(pna_pc_son)),pna_pc_son,xyres) ; use standardized timeseries + xy_pna_ann(ee) = gsn_csm_xy(wks_pna_ts,fspan(syear(ee),eyear(ee),dimsizes(pna_pc_ann)),pna_pc_ann,xyres) ; use standardized timeseries + delete([/pna_pc_djf,pna_pc_mam,pna_pc_jja,pna_pc_son,pna_pc_ann/]) + if (COMPUTE_MODES_MON.eq."True") then + xy_pna_mon(ee) = gsn_csm_xy(wks_pna_ts,fspan(syear(ee),eyear(ee),dimsizes(pna_pc_mon)),pna_pc_mon,xyresmon) ; use standardized timeseries + delete([/pna_pc_mon/]) + end if + + xy_npo_djf(ee) = gsn_csm_xy(wks_npo_ts,fspan(syear(ee),eyear(ee),dimsizes(npo_pc_djf)),npo_pc_djf,xyres) ; use standardized timeseries + xy_npo_mam(ee) = gsn_csm_xy(wks_npo_ts,fspan(syear(ee),eyear(ee),dimsizes(npo_pc_mam)),npo_pc_mam,xyres) ; use standardized timeseries + xy_npo_jja(ee) = gsn_csm_xy(wks_npo_ts,fspan(syear(ee),eyear(ee),dimsizes(npo_pc_jja)),npo_pc_jja,xyres) ; use standardized timeseries + xy_npo_son(ee) = gsn_csm_xy(wks_npo_ts,fspan(syear(ee),eyear(ee),dimsizes(npo_pc_son)),npo_pc_son,xyres) ; use standardized timeseries + xy_npo_ann(ee) = gsn_csm_xy(wks_npo_ts,fspan(syear(ee),eyear(ee),dimsizes(npo_pc_ann)),npo_pc_ann,xyres) ; use standardized timeseries + delete([/npo_pc_djf,npo_pc_mam,npo_pc_jja,npo_pc_son,npo_pc_ann/]) + if (COMPUTE_MODES_MON.eq."True") then + xy_npo_mon(ee) = gsn_csm_xy(wks_npo_ts,fspan(syear(ee),eyear(ee),dimsizes(npo_pc_mon)),npo_pc_mon,xyresmon) ; use standardized timeseries + delete([/npo_pc_mon/]) + end if + + xy_sam_djf(ee) = gsn_csm_xy(wks_sam_ts,fspan(syear(ee),eyear(ee),dimsizes(sam_pc_djf)),sam_pc_djf,xyres) ; use standardized timeseries + xy_sam_mam(ee) = gsn_csm_xy(wks_sam_ts,fspan(syear(ee),eyear(ee),dimsizes(sam_pc_mam)),sam_pc_mam,xyres) ; use standardized timeseries + xy_sam_jja(ee) = gsn_csm_xy(wks_sam_ts,fspan(syear(ee),eyear(ee),dimsizes(sam_pc_jja)),sam_pc_jja,xyres) ; use standardized timeseries + xy_sam_son(ee) = gsn_csm_xy(wks_sam_ts,fspan(syear(ee),eyear(ee),dimsizes(sam_pc_son)),sam_pc_son,xyres) ; use standardized timeseries + xy_sam_ann(ee) = gsn_csm_xy(wks_sam_ts,fspan(syear(ee),eyear(ee),dimsizes(sam_pc_ann)),sam_pc_ann,xyres) ; use standardized timeseries + delete([/sam_pc_djf,sam_pc_mam,sam_pc_jja,sam_pc_son,sam_pc_ann/]) + if (COMPUTE_MODES_MON.eq."True") then + xy_sam_mon(ee) = gsn_csm_xy(wks_sam_ts,fspan(syear(ee),eyear(ee),dimsizes(sam_pc_mon)),sam_pc_mon,xyresmon) ; use standardized timeseries + delete([/sam_pc_mon/]) + end if + + xy_psa1_djf(ee) = gsn_csm_xy(wks_psa1_ts,fspan(syear(ee),eyear(ee),dimsizes(psa1_pc_djf)),psa1_pc_djf,xyres) ; use standardized timeseries + xy_psa1_mam(ee) = gsn_csm_xy(wks_psa1_ts,fspan(syear(ee),eyear(ee),dimsizes(psa1_pc_mam)),psa1_pc_mam,xyres) ; use standardized timeseries + xy_psa1_jja(ee) = gsn_csm_xy(wks_psa1_ts,fspan(syear(ee),eyear(ee),dimsizes(psa1_pc_jja)),psa1_pc_jja,xyres) ; use standardized timeseries + xy_psa1_son(ee) = gsn_csm_xy(wks_psa1_ts,fspan(syear(ee),eyear(ee),dimsizes(psa1_pc_son)),psa1_pc_son,xyres) ; use standardized timeseries + xy_psa1_ann(ee) = gsn_csm_xy(wks_psa1_ts,fspan(syear(ee),eyear(ee),dimsizes(psa1_pc_ann)),psa1_pc_ann,xyres) ; use standardized timeseries + delete([/psa1_pc_djf,psa1_pc_mam,psa1_pc_jja,psa1_pc_son,psa1_pc_ann/]) + if (COMPUTE_MODES_MON.eq."True") then + xy_psa1_mon(ee) = gsn_csm_xy(wks_psa1_ts,fspan(syear(ee),eyear(ee),dimsizes(psa1_pc_mon)),psa1_pc_mon,xyresmon) ; use standardized timeseries + delete([/psa1_pc_mon/]) + end if + + xy_psa2_djf(ee) = gsn_csm_xy(wks_psa2_ts,fspan(syear(ee),eyear(ee),dimsizes(psa2_pc_djf)),psa2_pc_djf,xyres) ; use standardized timeseries + xy_psa2_mam(ee) = gsn_csm_xy(wks_psa2_ts,fspan(syear(ee),eyear(ee),dimsizes(psa2_pc_mam)),psa2_pc_mam,xyres) ; use standardized timeseries + xy_psa2_jja(ee) = gsn_csm_xy(wks_psa2_ts,fspan(syear(ee),eyear(ee),dimsizes(psa2_pc_jja)),psa2_pc_jja,xyres) ; use standardized timeseries + xy_psa2_son(ee) = gsn_csm_xy(wks_psa2_ts,fspan(syear(ee),eyear(ee),dimsizes(psa2_pc_son)),psa2_pc_son,xyres) ; use standardized timeseries + xy_psa2_ann(ee) = gsn_csm_xy(wks_psa2_ts,fspan(syear(ee),eyear(ee),dimsizes(psa2_pc_ann)),psa2_pc_ann,xyres) ; use standardized timeseries + delete([/psa2_pc_djf,psa2_pc_mam,psa2_pc_jja,psa2_pc_son,psa2_pc_ann/]) + if (COMPUTE_MODES_MON.eq."True") then + xy_psa2_mon(ee) = gsn_csm_xy(wks_psa2_ts,fspan(syear(ee),eyear(ee),dimsizes(psa2_pc_mon)),psa2_pc_mon,xyresmon) ; use standardized timeseries + delete([/psa2_pc_mon/]) + end if + + xy_npi(ee) = gsn_csm_xy(wks_psa2_ts,fspan(syear(ee),eyear(ee),dimsizes(npi_ndjfm)),npi_ndjfm,xyres) ; throw NPI into wks_psa2_ts workstation + delete(npi_ndjfm) + delete(sstreg_plot_flag) + end do + + if (isvar("clim_syear")) then + delete(clim_syear) + end if + if (isvar("clim_eyear")) then + delete(clim_eyear) + end if + + if (isfilepresent2("obs_psl")) then ; for pattern correlation table + clat = cos(0.01745329*patcor_nam_djf&lat) + clat!0 = "lat" + clat&lat = patcor_nam_djf&lat + line3 = " " ; Must be 18 characters long + line4 = line3 + + finpr_nam_djf = "NAM (DJF) " ; Must be 18 characters long + finpr_sam_djf = "SAM (DJF) " + do hh = 1,nsim-1 + dimY = dimsizes(tochar(names(hh))) + nchar = dimY + nchar = where(nchar.le.10,10,nchar) + if (dimY.lt.10) then + ntb = "" + do ii = 0,10-dimY-1 + ntb = ntb+" " + end do + ntb = ntb+names(hh) + else + ntb = names(hh) + end if + + ntc = "" + do ii = 0,nchar-1 + ntc = ntc+"-" + end do + format2 = "%"+(nchar-5+1)+".2f" + format3 = "%4.2f" + line3 = line3+" "+ntb + line4 = line4+" "+ntc + if (all(ismissing(patcor_nam_djf(hh,{20:},:)))) then + finpr_nam_djf = finpr_nam_djf+sprintf(format2,9.99)+"/"+sprintf(format3,9.99) + else + finpr_nam_djf = finpr_nam_djf+sprintf(format2,(pattern_cor(patcor_nam_djf(0,{20:},:),patcor_nam_djf(hh,{20:},:),clat({20:}),0)))+"/"+sprintf(format3,(dim_rmsd(ndtooned(NewCosWeight(patcor_nam_djf(0,{20:},:))),ndtooned(NewCosWeight(patcor_nam_djf(hh,{20:},:)))))) + end if + if (all(ismissing(patcor_sam_djf(hh,{:-20},:)))) then + finpr_sam_djf = finpr_sam_djf+sprintf(format2,9.99)+"/"+sprintf(format3,9.99) + else + finpr_sam_djf = finpr_sam_djf+sprintf(format2,(pattern_cor(patcor_sam_djf(0,{:-20},:),patcor_sam_djf(hh,{:-20},:),clat({:-20}),0)))+"/"+sprintf(format3,(dim_rmsd(ndtooned(NewCosWeight(patcor_sam_djf(0,{:-20},:))),ndtooned(NewCosWeight(patcor_sam_djf(hh,{:-20},:)))))) + end if + end do +; + if (dimsizes(tochar(line4)).ge.8190) then ; system or fortran compiler limit + print("Metrics table warning: Not creating metrics table as size of comparison results in a invalid ascii row size.") + else + if (.not.isfilepresent2(getenv("OUTDIR")+"metrics_orig.txt")) then + header = (/"","Pattern Correlations Observations vs. Model(s)",""/) + write_table(getenv("OUTDIR")+"metrics_orig.txt","w",[/header/],"%s") + write_table(getenv("OUTDIR")+"metrics_orig.txt","a",[/line3/],"%s") + write_table(getenv("OUTDIR")+"metrics_orig.txt","a",[/line4/],"%s") + delete(header) + end if + write_table(getenv("OUTDIR")+"metrics_orig.txt","a",[/finpr_nam_djf/],"%s") + write_table(getenv("OUTDIR")+"metrics_orig.txt","a",[/finpr_sam_djf/],"%s") + end if + delete([/line3,line4,format2,format3,nchar,ntc,clat,patcor_nam_djf,patcor_nam_jja,patcor_nam_ann,patcor_sam_djf,patcor_sam_jja,patcor_sam_ann/]) + delete([/patcor_nao_djf,patcor_nao_jja,patcor_nao_ann,patcor_npo_djf,patcor_npo_jja,patcor_npo_ann,patcor_pna_djf,patcor_pna_jja,patcor_pna_ann/]) + delete([/patcor_psa1_djf,patcor_psa1_jja,patcor_psa1_ann,patcor_psa2_djf,patcor_psa2_jja,patcor_psa2_ann,dimY,ntb/]) + end if + + panres = True + panres@gsnMaximize = True + panres@gsnPaperOrientation = "portrait" + panres@gsnPanelLabelBar = True + panres@gsnPanelYWhiteSpacePercent = 3.0 + panres@gsnPanelBottom = 0.04 + panres@pmLabelBarHeightF = 0.05 + panres@pmLabelBarWidthF = 0.55 + panres@lbTitleOn = False + panres@lbBoxLineColor = "gray70" + if (nsim.le.5) then + panres@txFontHeightF = 0.024 + else + panres@txFontHeightF = 0.016 + end if + ncol = floattointeger(sqrt(nsim)) + nrow = (nsim/ncol)+mod(nsim,ncol) + + panres@txString = "NAM (DJF)" + gsn_panel(wks_nam,map_nam_djf,(/nrow,ncol/),panres) + delete(map_nam_djf) + panres@txString = "NAM (MAM)" + gsn_panel(wks_nam,map_nam_mam,(/nrow,ncol/),panres) + delete(map_nam_mam) + panres@txString = "NAM (JJA)" + gsn_panel(wks_nam,map_nam_jja,(/nrow,ncol/),panres) + delete(map_nam_jja) + panres@txString = "NAM (SON)" + gsn_panel(wks_nam,map_nam_son,(/nrow,ncol/),panres) + delete(map_nam_son) + panres@txString = "NAM (Annual)" + gsn_panel(wks_nam,map_nam_ann,(/nrow,ncol/),panres) + delete(map_nam_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres@txString = "NAM (Monthly)" + gsn_panel(wks_nam,map_nam_mon,(/nrow,ncol/),panres) + delete(map_nam_mon) + end if + + if (sstreg_frame.eq.0) then + panres@txString = "NAM SST Regressions (DJF)" + gsn_panel(wks_nam,reg_nam_djf,(/nrow,ncol/),panres) + delete(reg_nam_djf) + panres@txString = "NAM SST Regressions (MAM)" + gsn_panel(wks_nam,reg_nam_mam,(/nrow,ncol/),panres) + delete(reg_nam_mam) + panres@txString = "NAM SST Regressions (JJA)" + gsn_panel(wks_nam,reg_nam_jja,(/nrow,ncol/),panres) + delete(reg_nam_jja) + panres@txString = "NAM SST Regressions (SON)" + gsn_panel(wks_nam,reg_nam_son,(/nrow,ncol/),panres) + delete(reg_nam_son) + panres@txString = "NAM SST Regressions (Annual)" + gsn_panel(wks_nam,reg_nam_ann,(/nrow,ncol/),panres) + delete(reg_nam_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres@txString = "NAM SST Regressions (Monthly)" + gsn_panel(wks_nam,reg_nam_mon,(/nrow,ncol/),panres) + delete(reg_nam_mon) + end if + delete(wks_nam) + end if + + panres@txString = "NAO (DJF)" + gsn_panel(wks_nao,map_nao_djf,(/nrow,ncol/),panres) + delete(map_nao_djf) + panres@txString = "NAO (MAM)" + gsn_panel(wks_nao,map_nao_mam,(/nrow,ncol/),panres) + delete(map_nao_mam) + panres@txString = "NAO (JJA)" + gsn_panel(wks_nao,map_nao_jja,(/nrow,ncol/),panres) + delete(map_nao_jja) + panres@txString = "NAO (SON)" + gsn_panel(wks_nao,map_nao_son,(/nrow,ncol/),panres) + delete(map_nao_son) + panres@txString = "NAO (Annual)" + gsn_panel(wks_nao,map_nao_ann,(/nrow,ncol/),panres) + delete(map_nao_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres@txString = "NAO (Monthly)" + gsn_panel(wks_nao,map_nao_mon,(/nrow,ncol/),panres) + delete(map_nao_mon) + end if + + if (sstreg_frame.eq.0) then + panres@txString = "NAO SST Regressions (DJF)" + gsn_panel(wks_nao,reg_nao_djf,(/nrow,ncol/),panres) + delete(reg_nao_djf) + panres@txString = "NAO SST Regressions (MAM)" + gsn_panel(wks_nao,reg_nao_mam,(/nrow,ncol/),panres) + delete(reg_nao_mam) + panres@txString = "NAO SST Regressions (JJA)" + gsn_panel(wks_nao,reg_nao_jja,(/nrow,ncol/),panres) + delete(reg_nao_jja) + panres@txString = "NAO SST Regressions (SON)" + gsn_panel(wks_nao,reg_nao_son,(/nrow,ncol/),panres) + delete(reg_nao_son) + panres@txString = "NAO SST Regressions (Annual)" + gsn_panel(wks_nao,reg_nao_ann,(/nrow,ncol/),panres) + delete(reg_nao_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres@txString = "NAO SST Regressions (Monthly)" + gsn_panel(wks_nao,reg_nao_mon,(/nrow,ncol/),panres) + delete(reg_nao_mon) + end if + delete(wks_nao) + end if + + panres@txString = "PNA (DJF)" + gsn_panel(wks_pna,map_pna_djf,(/nrow,ncol/),panres) + delete(map_pna_djf) + panres@txString = "PNA (MAM)" + gsn_panel(wks_pna,map_pna_mam,(/nrow,ncol/),panres) + delete(map_pna_mam) + panres@txString = "PNA (JJA)" + gsn_panel(wks_pna,map_pna_jja,(/nrow,ncol/),panres) + delete(map_pna_jja) + panres@txString = "PNA (SON)" + gsn_panel(wks_pna,map_pna_son,(/nrow,ncol/),panres) + delete(map_pna_son) + panres@txString = "PNA (Annual)" + gsn_panel(wks_pna,map_pna_ann,(/nrow,ncol/),panres) + delete(map_pna_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres@txString = "PNA (Monthly)" + gsn_panel(wks_pna,map_pna_mon,(/nrow,ncol/),panres) + delete(map_pna_mon) + end if + + if (sstreg_frame.eq.0) then + panres@txString = "PNA SST Regressions (DJF)" + gsn_panel(wks_pna,reg_pna_djf,(/nrow,ncol/),panres) + delete(reg_pna_djf) + panres@txString = "PNA SST Regressions (MAM)" + gsn_panel(wks_pna,reg_pna_mam,(/nrow,ncol/),panres) + delete(reg_pna_mam) + panres@txString = "PNA SST Regressions (JJA)" + gsn_panel(wks_pna,reg_pna_jja,(/nrow,ncol/),panres) + delete(reg_pna_jja) + panres@txString = "PNA SST Regressions (SON)" + gsn_panel(wks_pna,reg_pna_son,(/nrow,ncol/),panres) + delete(reg_pna_son) + panres@txString = "PNA SST Regressions (Annual)" + gsn_panel(wks_pna,reg_pna_ann,(/nrow,ncol/),panres) + delete(reg_pna_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres@txString = "PNA SST Regressions (Monthly)" + gsn_panel(wks_pna,reg_pna_mon,(/nrow,ncol/),panres) + delete(reg_pna_mon) + end if + delete(wks_pna) + end if + + panres@txString = "NPO (DJF)" + gsn_panel(wks_npo,map_npo_djf,(/nrow,ncol/),panres) + delete(map_npo_djf) + panres@txString = "NPO (MAM)" + gsn_panel(wks_npo,map_npo_mam,(/nrow,ncol/),panres) + delete(map_npo_mam) + panres@txString = "NPO (JJA)" + gsn_panel(wks_npo,map_npo_jja,(/nrow,ncol/),panres) + delete(map_npo_jja) + panres@txString = "NPO (SON)" + gsn_panel(wks_npo,map_npo_son,(/nrow,ncol/),panres) + delete(map_npo_son) + panres@txString = "NPO (Annual)" + gsn_panel(wks_npo,map_npo_ann,(/nrow,ncol/),panres) + delete(map_npo_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres@txString = "NPO (Monthly)" + gsn_panel(wks_npo,map_npo_mon,(/nrow,ncol/),panres) + delete(map_npo_mon) + end if + + if (sstreg_frame.eq.0) then + panres@txString = "NPO SST Regressions (DJF)" + gsn_panel(wks_npo,reg_npo_djf,(/nrow,ncol/),panres) + delete(reg_npo_djf) + panres@txString = "NPO SST Regressions (MAM)" + gsn_panel(wks_npo,reg_npo_mam,(/nrow,ncol/),panres) + delete(reg_npo_mam) + panres@txString = "NPO SST Regressions (JJA)" + gsn_panel(wks_npo,reg_npo_jja,(/nrow,ncol/),panres) + delete(reg_npo_jja) + panres@txString = "NPO SST Regressions (SON)" + gsn_panel(wks_npo,reg_npo_son,(/nrow,ncol/),panres) + delete(reg_npo_son) + panres@txString = "NPO SST Regressions (Annual)" + gsn_panel(wks_npo,reg_npo_ann,(/nrow,ncol/),panres) + delete(reg_npo_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres@txString = "NPO SST Regressions (Monthly)" + gsn_panel(wks_npo,reg_npo_mon,(/nrow,ncol/),panres) + delete(reg_npo_mon) + end if + delete(wks_npo) + end if + + panres@txString = "SAM (DJF)" + gsn_panel(wks_sam,map_sam_djf,(/nrow,ncol/),panres) + delete(map_sam_djf) + panres@txString = "SAM (MAM)" + gsn_panel(wks_sam,map_sam_mam,(/nrow,ncol/),panres) + delete(map_sam_mam) + panres@txString = "SAM (JJA)" + gsn_panel(wks_sam,map_sam_jja,(/nrow,ncol/),panres) + delete(map_sam_jja) + panres@txString = "SAM (SON)" + gsn_panel(wks_sam,map_sam_son,(/nrow,ncol/),panres) + delete(map_sam_son) + panres@txString = "SAM (Annual)" + gsn_panel(wks_sam,map_sam_ann,(/nrow,ncol/),panres) + delete(map_sam_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres@txString = "SAM (Monthly)" + gsn_panel(wks_sam,map_sam_mon,(/nrow,ncol/),panres) + delete(map_sam_mon) + end if + + if (sstreg_frame.eq.0) then + panres@txString = "SAM SST Regressions (DJF)" + gsn_panel(wks_sam,reg_sam_djf,(/nrow,ncol/),panres) + delete(reg_sam_djf) + panres@txString = "SAM SST Regressions (MAM)" + gsn_panel(wks_sam,reg_sam_mam,(/nrow,ncol/),panres) + delete(reg_sam_mam) + panres@txString = "SAM SST Regressions (JJA)" + gsn_panel(wks_sam,reg_sam_jja,(/nrow,ncol/),panres) + delete(reg_sam_jja) + panres@txString = "SAM SST Regressions (SON)" + gsn_panel(wks_sam,reg_sam_son,(/nrow,ncol/),panres) + delete(reg_sam_son) + panres@txString = "SAM SST Regressions (Annual)" + gsn_panel(wks_sam,reg_sam_ann,(/nrow,ncol/),panres) + delete(reg_sam_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres@txString = "SAM SST Regressions (Monthly)" + gsn_panel(wks_sam,reg_sam_mon,(/nrow,ncol/),panres) + delete(reg_sam_mon) + end if + delete(wks_sam) + end if + + panres@txString = "PSA1 (DJF)" + gsn_panel(wks_psa1,map_psa1_djf,(/nrow,ncol/),panres) + delete(map_psa1_djf) + panres@txString = "PSA1 (MAM)" + gsn_panel(wks_psa1,map_psa1_mam,(/nrow,ncol/),panres) + delete(map_psa1_mam) + panres@txString = "PSA1 (JJA)" + gsn_panel(wks_psa1,map_psa1_jja,(/nrow,ncol/),panres) + delete(map_psa1_jja) + panres@txString = "PSA1 (SON)" + gsn_panel(wks_psa1,map_psa1_son,(/nrow,ncol/),panres) + delete(map_psa1_son) + panres@txString = "PSA1 (Annual)" + gsn_panel(wks_psa1,map_psa1_ann,(/nrow,ncol/),panres) + delete(map_psa1_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres@txString = "PSA1 (Monthly)" + gsn_panel(wks_psa1,map_psa1_mon,(/nrow,ncol/),panres) + delete(map_psa1_mon) + end if + + if (sstreg_frame.eq.0) then + panres@txString = "PSA1 SST Regressions (DJF)" + gsn_panel(wks_psa1,reg_psa1_djf,(/nrow,ncol/),panres) + delete(reg_psa1_djf) + panres@txString = "PSA1 SST Regressions (MAM)" + gsn_panel(wks_psa1,reg_psa1_mam,(/nrow,ncol/),panres) + delete(reg_psa1_mam) + panres@txString = "PSA1 SST Regressions (JJA)" + gsn_panel(wks_psa1,reg_psa1_jja,(/nrow,ncol/),panres) + delete(reg_psa1_jja) + panres@txString = "PSA1 SST Regressions (SON)" + gsn_panel(wks_psa1,reg_psa1_son,(/nrow,ncol/),panres) + delete(reg_psa1_son) + panres@txString = "PSA1 SST Regressions (Annual)" + gsn_panel(wks_psa1,reg_psa1_ann,(/nrow,ncol/),panres) + delete(reg_psa1_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres@txString = "PSA1 SST Regressions (Monthly)" + gsn_panel(wks_psa1,reg_psa1_mon,(/nrow,ncol/),panres) + delete(reg_psa1_mon) + end if + delete(wks_psa1) + end if + + panres@txString = "PSA2 (DJF)" + gsn_panel(wks_psa2,map_psa2_djf,(/nrow,ncol/),panres) + delete(map_psa2_djf) + panres@txString = "PSA2 (MAM)" + gsn_panel(wks_psa2,map_psa2_mam,(/nrow,ncol/),panres) + delete(map_psa2_mam) + panres@txString = "PSA2 (JJA)" + gsn_panel(wks_psa2,map_psa2_jja,(/nrow,ncol/),panres) + delete(map_psa2_jja) + panres@txString = "PSA2 (SON)" + gsn_panel(wks_psa2,map_psa2_son,(/nrow,ncol/),panres) + delete(map_psa2_son) + panres@txString = "PSA2 (Annual)" + gsn_panel(wks_psa2,map_psa2_ann,(/nrow,ncol/),panres) + delete(map_psa2_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres@txString = "PSA2 (Monthly)" + gsn_panel(wks_psa2,map_psa2_mon,(/nrow,ncol/),panres) + delete(map_psa2_mon) + end if + + if (sstreg_frame.eq.0) then + panres@txString = "PSA2 SST Regressions (DJF)" + gsn_panel(wks_psa2,reg_psa2_djf,(/nrow,ncol/),panres) + delete(reg_psa2_djf) + panres@txString = "PSA2 SST Regressions (MAM)" + gsn_panel(wks_psa2,reg_psa2_mam,(/nrow,ncol/),panres) + delete(reg_psa2_mam) + panres@txString = "PSA2 SST Regressions (JJA)" + gsn_panel(wks_psa2,reg_psa2_jja,(/nrow,ncol/),panres) + delete(reg_psa2_jja) + panres@txString = "PSA2 SST Regressions (SON)" + gsn_panel(wks_psa2,reg_psa2_son,(/nrow,ncol/),panres) + delete(reg_psa2_son) + panres@txString = "PSA2 SST Regressions (Annual)" + gsn_panel(wks_psa2,reg_psa2_ann,(/nrow,ncol/),panres) + delete(reg_psa2_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres@txString = "PSA2 SST Regressions (Monthly)" + gsn_panel(wks_psa2,reg_psa2_mon,(/nrow,ncol/),panres) + delete(reg_psa2_mon) + end if + delete(wks_psa2) + end if + + panres2 = True + if (nsim.le.5) then + panres2@txFontHeightF = 0.024 + else + panres2@txFontHeightF = 0.016 + end if + panres2@gsnMaximize = True + panres2@gsnPaperOrientation = "portrait" + if (SCALE_TIMESERIES.eq."True") then + tt = ind(nyr.eq.nyr_max) + panres2@gsnPanelScalePlotIndex = tt(0) + delete(tt) + end if + if (nsim.le.12) then + lp = (/nsim,1/) + else + lp = (/nrow,ncol/) + end if + + panres2@txString = "NAM (DJF)" + gsn_panel(wks_nam_ts,xy_nam_djf,lp,panres2) + delete(xy_nam_djf) + panres2@txString = "NAM (MAM)" + gsn_panel(wks_nam_ts,xy_nam_mam,lp,panres2) + delete(xy_nam_mam) + panres2@txString = "NAM (JJA)" + gsn_panel(wks_nam_ts,xy_nam_jja,lp,panres2) + delete(xy_nam_jja) + panres2@txString = "NAM (SON)" + gsn_panel(wks_nam_ts,xy_nam_son,lp,panres2) + delete(xy_nam_son) + panres2@txString = "NAM (Annual)" + gsn_panel(wks_nam_ts,xy_nam_ann,lp,panres2) + delete(xy_nam_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres2@txString = "NAM (Monthly)" + gsn_panel(wks_nam_ts,xy_nam_mon,lp,panres2) + delete(xy_nam_mon) + end if + delete(wks_nam_ts) + + panres2@txString = "NAO (DJF)" + gsn_panel(wks_nao_ts,xy_nao_djf,lp,panres2) + delete(xy_nao_djf) + panres2@txString = "NAO (MAM)" + gsn_panel(wks_nao_ts,xy_nao_mam,lp,panres2) + delete(xy_nao_mam) + panres2@txString = "NAO (JJA)" + gsn_panel(wks_nao_ts,xy_nao_jja,lp,panres2) + delete(xy_nao_jja) + panres2@txString = "NAO (SON)" + gsn_panel(wks_nao_ts,xy_nao_son,lp,panres2) + delete(xy_nao_son) + panres2@txString = "NAO (Annual)" + gsn_panel(wks_nao_ts,xy_nao_ann,lp,panres2) + delete(xy_nao_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres2@txString = "NAO (Monthly)" + gsn_panel(wks_nao_ts,xy_nao_mon,lp,panres2) + delete(xy_nao_mon) + end if + delete(wks_nao_ts) + + panres2@txString = "PNA (DJF)" + gsn_panel(wks_pna_ts,xy_pna_djf,lp,panres2) + delete(xy_pna_djf) + panres2@txString = "PNA (MAM)" + gsn_panel(wks_pna_ts,xy_pna_mam,lp,panres2) + delete(xy_pna_mam) + panres2@txString = "PNA (JJA)" + gsn_panel(wks_pna_ts,xy_pna_jja,lp,panres2) + delete(xy_pna_jja) + panres2@txString = "PNA (SON)" + gsn_panel(wks_pna_ts,xy_pna_son,lp,panres2) + delete(xy_pna_son) + panres2@txString = "PNA (Annual)" + gsn_panel(wks_pna_ts,xy_pna_ann,lp,panres2) + delete(xy_pna_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres2@txString = "PNA (Monthly)" + gsn_panel(wks_pna_ts,xy_pna_mon,lp,panres2) + delete(xy_pna_mon) + end if + delete(wks_pna_ts) + + panres2@txString = "NPO (DJF)" + gsn_panel(wks_npo_ts,xy_npo_djf,lp,panres2) + delete(xy_npo_djf) + panres2@txString = "NPO (MAM)" + gsn_panel(wks_npo_ts,xy_npo_mam,lp,panres2) + delete(xy_npo_mam) + panres2@txString = "NPO (JJA)" + gsn_panel(wks_npo_ts,xy_npo_jja,lp,panres2) + delete(xy_npo_jja) + panres2@txString = "NPO (SON)" + gsn_panel(wks_npo_ts,xy_npo_son,lp,panres2) + delete(xy_npo_son) + panres2@txString = "NPO (Annual)" + gsn_panel(wks_npo_ts,xy_npo_ann,lp,panres2) + delete(xy_npo_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres2@txString = "NPO (Monthly)" + gsn_panel(wks_npo_ts,xy_npo_mon,lp,panres2) + delete(xy_npo_mon) + end if + delete(wks_npo_ts) + + panres2@txString = "SAM (DJF)" + gsn_panel(wks_sam_ts,xy_sam_djf,lp,panres2) + delete(xy_sam_djf) + panres2@txString = "SAM (MAM)" + gsn_panel(wks_sam_ts,xy_sam_mam,lp,panres2) + delete(xy_sam_mam) + panres2@txString = "SAM (JJA)" + gsn_panel(wks_sam_ts,xy_sam_jja,lp,panres2) + delete(xy_sam_jja) + panres2@txString = "SAM (SON)" + gsn_panel(wks_sam_ts,xy_sam_son,lp,panres2) + delete(xy_sam_son) + panres2@txString = "SAM (Annual)" + gsn_panel(wks_sam_ts,xy_sam_ann,lp,panres2) + delete(xy_sam_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres2@txString = "SAM (Monthly)" + gsn_panel(wks_sam_ts,xy_sam_mon,lp,panres2) + delete(xy_sam_mon) + end if + delete(wks_sam_ts) + + panres2@txString = "PSA1 (DJF)" + gsn_panel(wks_psa1_ts,xy_psa1_djf,lp,panres2) + delete(xy_psa1_djf) + panres2@txString = "PSA1 (MAM)" + gsn_panel(wks_psa1_ts,xy_psa1_mam,lp,panres2) + delete(xy_psa1_mam) + panres2@txString = "PSA1 (JJA)" + gsn_panel(wks_psa1_ts,xy_psa1_jja,lp,panres2) + delete(xy_psa1_jja) + panres2@txString = "PSA1 (SON)" + gsn_panel(wks_psa1_ts,xy_psa1_son,lp,panres2) + delete(xy_psa1_son) + panres2@txString = "PSA1 (Annual)" + gsn_panel(wks_psa1_ts,xy_psa1_ann,lp,panres2) + delete(xy_psa1_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres2@txString = "PSA1 (Monthly)" + gsn_panel(wks_psa1_ts,xy_psa1_mon,lp,panres2) + delete(xy_psa1_mon) + end if + delete(wks_psa1_ts) + + panres2@txString = "PSA2 (DJF)" + gsn_panel(wks_psa2_ts,xy_psa2_djf,lp,panres2) + delete(xy_psa2_djf) + panres2@txString = "PSA2 (MAM)" + gsn_panel(wks_psa2_ts,xy_psa2_mam,lp,panres2) + delete(xy_psa2_mam) + panres2@txString = "PSA2 (JJA)" + gsn_panel(wks_psa2_ts,xy_psa2_jja,lp,panres2) + delete(xy_psa2_jja) + panres2@txString = "PSA2 (SON)" + gsn_panel(wks_psa2_ts,xy_psa2_son,lp,panres2) + delete(xy_psa2_son) + panres2@txString = "PSA2 (Annual)" + gsn_panel(wks_psa2_ts,xy_psa2_ann,lp,panres2) + delete(xy_psa2_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres2@txString = "PSA2 (Monthly)" + gsn_panel(wks_psa2_ts,xy_psa2_mon,lp,panres2) + delete(xy_psa2_mon) + end if + + panres2@txString = "NPI (NDJFM)" + gsn_panel(wks_psa2_ts,xy_npi,lp,panres2) + delete(xy_npi) + delete(wks_psa2_ts) +;-------------------------------------------------------------------------------------------------- + OUTDIR = getenv("OUTDIR") + if (wks_type.eq."png") then + system("mv "+OUTDIR+"nam.000001.png "+OUTDIR+"nam.djf.png") + system("mv "+OUTDIR+"nam.000002.png "+OUTDIR+"nam.mam.png") + system("mv "+OUTDIR+"nam.000003.png "+OUTDIR+"nam.jja.png") + system("mv "+OUTDIR+"nam.000004.png "+OUTDIR+"nam.son.png") + system("mv "+OUTDIR+"nam.000005.png "+OUTDIR+"nam.ann.png") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"nam.000006.png "+OUTDIR+"nam.mon.png") + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"nam.000007.png "+OUTDIR+"nam.sstreg.djf.png") + system("mv "+OUTDIR+"nam.000008.png "+OUTDIR+"nam.sstreg.mam.png") + system("mv "+OUTDIR+"nam.000009.png "+OUTDIR+"nam.sstreg.jja.png") + system("mv "+OUTDIR+"nam.000010.png "+OUTDIR+"nam.sstreg.son.png") + system("mv "+OUTDIR+"nam.000011.png "+OUTDIR+"nam.sstreg.ann.png") + system("mv "+OUTDIR+"nam.000012.png "+OUTDIR+"nam.sstreg.mon.png") + end if + else + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"nam.000006.png "+OUTDIR+"nam.sstreg.djf.png") + system("mv "+OUTDIR+"nam.000007.png "+OUTDIR+"nam.sstreg.mam.png") + system("mv "+OUTDIR+"nam.000008.png "+OUTDIR+"nam.sstreg.jja.png") + system("mv "+OUTDIR+"nam.000009.png "+OUTDIR+"nam.sstreg.son.png") + system("mv "+OUTDIR+"nam.000010.png "+OUTDIR+"nam.sstreg.ann.png") + end if + end if + + system("mv "+OUTDIR+"nao.000001.png "+OUTDIR+"nao.djf.png") + system("mv "+OUTDIR+"nao.000002.png "+OUTDIR+"nao.mam.png") + system("mv "+OUTDIR+"nao.000003.png "+OUTDIR+"nao.jja.png") + system("mv "+OUTDIR+"nao.000004.png "+OUTDIR+"nao.son.png") + system("mv "+OUTDIR+"nao.000005.png "+OUTDIR+"nao.ann.png") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"nao.000006.png "+OUTDIR+"nao.mon.png") + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"nao.000007.png "+OUTDIR+"nao.sstreg.djf.png") + system("mv "+OUTDIR+"nao.000008.png "+OUTDIR+"nao.sstreg.mam.png") + system("mv "+OUTDIR+"nao.000009.png "+OUTDIR+"nao.sstreg.jja.png") + system("mv "+OUTDIR+"nao.000010.png "+OUTDIR+"nao.sstreg.son.png") + system("mv "+OUTDIR+"nao.000011.png "+OUTDIR+"nao.sstreg.ann.png") + system("mv "+OUTDIR+"nao.000012.png "+OUTDIR+"nao.sstreg.mon.png") + end if + else + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"nao.000006.png "+OUTDIR+"nao.sstreg.djf.png") + system("mv "+OUTDIR+"nao.000007.png "+OUTDIR+"nao.sstreg.mam.png") + system("mv "+OUTDIR+"nao.000008.png "+OUTDIR+"nao.sstreg.jja.png") + system("mv "+OUTDIR+"nao.000009.png "+OUTDIR+"nao.sstreg.son.png") + system("mv "+OUTDIR+"nao.000010.png "+OUTDIR+"nao.sstreg.ann.png") + end if + end if + + system("mv "+OUTDIR+"pna.000001.png "+OUTDIR+"pna.djf.png") + system("mv "+OUTDIR+"pna.000002.png "+OUTDIR+"pna.mam.png") + system("mv "+OUTDIR+"pna.000003.png "+OUTDIR+"pna.jja.png") + system("mv "+OUTDIR+"pna.000004.png "+OUTDIR+"pna.son.png") + system("mv "+OUTDIR+"pna.000005.png "+OUTDIR+"pna.ann.png") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"pna.000006.png "+OUTDIR+"pna.mon.png") + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"pna.000007.png "+OUTDIR+"pna.sstreg.djf.png") + system("mv "+OUTDIR+"pna.000008.png "+OUTDIR+"pna.sstreg.mam.png") + system("mv "+OUTDIR+"pna.000009.png "+OUTDIR+"pna.sstreg.jja.png") + system("mv "+OUTDIR+"pna.000010.png "+OUTDIR+"pna.sstreg.son.png") + system("mv "+OUTDIR+"pna.000011.png "+OUTDIR+"pna.sstreg.ann.png") + system("mv "+OUTDIR+"pna.000012.png "+OUTDIR+"pna.sstreg.mon.png") + end if + else + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"pna.000006.png "+OUTDIR+"pna.sstreg.djf.png") + system("mv "+OUTDIR+"pna.000007.png "+OUTDIR+"pna.sstreg.mam.png") + system("mv "+OUTDIR+"pna.000008.png "+OUTDIR+"pna.sstreg.jja.png") + system("mv "+OUTDIR+"pna.000009.png "+OUTDIR+"pna.sstreg.son.png") + system("mv "+OUTDIR+"pna.000010.png "+OUTDIR+"pna.sstreg.ann.png") + end if + end if + + system("mv "+OUTDIR+"npo.000001.png "+OUTDIR+"npo.djf.png") + system("mv "+OUTDIR+"npo.000002.png "+OUTDIR+"npo.mam.png") + system("mv "+OUTDIR+"npo.000003.png "+OUTDIR+"npo.jja.png") + system("mv "+OUTDIR+"npo.000004.png "+OUTDIR+"npo.son.png") + system("mv "+OUTDIR+"npo.000005.png "+OUTDIR+"npo.ann.png") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"npo.000006.png "+OUTDIR+"npo.mon.png") + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"npo.000007.png "+OUTDIR+"npo.sstreg.djf.png") + system("mv "+OUTDIR+"npo.000008.png "+OUTDIR+"npo.sstreg.mam.png") + system("mv "+OUTDIR+"npo.000009.png "+OUTDIR+"npo.sstreg.jja.png") + system("mv "+OUTDIR+"npo.000010.png "+OUTDIR+"npo.sstreg.son.png") + system("mv "+OUTDIR+"npo.000011.png "+OUTDIR+"npo.sstreg.ann.png") + system("mv "+OUTDIR+"npo.000012.png "+OUTDIR+"npo.sstreg.mon.png") + end if + else + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"npo.000006.png "+OUTDIR+"npo.sstreg.djf.png") + system("mv "+OUTDIR+"npo.000007.png "+OUTDIR+"npo.sstreg.mam.png") + system("mv "+OUTDIR+"npo.000008.png "+OUTDIR+"npo.sstreg.jja.png") + system("mv "+OUTDIR+"npo.000009.png "+OUTDIR+"npo.sstreg.son.png") + system("mv "+OUTDIR+"npo.000010.png "+OUTDIR+"npo.sstreg.ann.png") + end if + end if + + system("mv "+OUTDIR+"sam.000001.png "+OUTDIR+"sam.djf.png") + system("mv "+OUTDIR+"sam.000002.png "+OUTDIR+"sam.mam.png") + system("mv "+OUTDIR+"sam.000003.png "+OUTDIR+"sam.jja.png") + system("mv "+OUTDIR+"sam.000004.png "+OUTDIR+"sam.son.png") + system("mv "+OUTDIR+"sam.000005.png "+OUTDIR+"sam.ann.png") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"sam.000006.png "+OUTDIR+"sam.mon.png") + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"sam.000007.png "+OUTDIR+"sam.sstreg.djf.png") + system("mv "+OUTDIR+"sam.000008.png "+OUTDIR+"sam.sstreg.mam.png") + system("mv "+OUTDIR+"sam.000009.png "+OUTDIR+"sam.sstreg.jja.png") + system("mv "+OUTDIR+"sam.000010.png "+OUTDIR+"sam.sstreg.son.png") + system("mv "+OUTDIR+"sam.000011.png "+OUTDIR+"sam.sstreg.ann.png") + system("mv "+OUTDIR+"sam.000012.png "+OUTDIR+"sam.sstreg.mon.png") + end if + else + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"sam.000006.png "+OUTDIR+"sam.sstreg.djf.png") + system("mv "+OUTDIR+"sam.000007.png "+OUTDIR+"sam.sstreg.mam.png") + system("mv "+OUTDIR+"sam.000008.png "+OUTDIR+"sam.sstreg.jja.png") + system("mv "+OUTDIR+"sam.000009.png "+OUTDIR+"sam.sstreg.son.png") + system("mv "+OUTDIR+"sam.000010.png "+OUTDIR+"sam.sstreg.ann.png") + end if + end if + + system("mv "+OUTDIR+"psa1.000001.png "+OUTDIR+"psa1.djf.png") + system("mv "+OUTDIR+"psa1.000002.png "+OUTDIR+"psa1.mam.png") + system("mv "+OUTDIR+"psa1.000003.png "+OUTDIR+"psa1.jja.png") + system("mv "+OUTDIR+"psa1.000004.png "+OUTDIR+"psa1.son.png") + system("mv "+OUTDIR+"psa1.000005.png "+OUTDIR+"psa1.ann.png") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"psa1.000006.png "+OUTDIR+"psa1.mon.png") + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"psa1.000007.png "+OUTDIR+"psa1.sstreg.djf.png") + system("mv "+OUTDIR+"psa1.000008.png "+OUTDIR+"psa1.sstreg.mam.png") + system("mv "+OUTDIR+"psa1.000009.png "+OUTDIR+"psa1.sstreg.jja.png") + system("mv "+OUTDIR+"psa1.000010.png "+OUTDIR+"psa1.sstreg.son.png") + system("mv "+OUTDIR+"psa1.000011.png "+OUTDIR+"psa1.sstreg.ann.png") + system("mv "+OUTDIR+"psa1.000012.png "+OUTDIR+"psa1.sstreg.mon.png") + end if + else + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"psa1.000006.png "+OUTDIR+"psa1.sstreg.djf.png") + system("mv "+OUTDIR+"psa1.000007.png "+OUTDIR+"psa1.sstreg.mam.png") + system("mv "+OUTDIR+"psa1.000008.png "+OUTDIR+"psa1.sstreg.jja.png") + system("mv "+OUTDIR+"psa1.000009.png "+OUTDIR+"psa1.sstreg.son.png") + system("mv "+OUTDIR+"psa1.000010.png "+OUTDIR+"psa1.sstreg.ann.png") + end if + end if + + system("mv "+OUTDIR+"psa2.000001.png "+OUTDIR+"psa2.djf.png") + system("mv "+OUTDIR+"psa2.000002.png "+OUTDIR+"psa2.mam.png") + system("mv "+OUTDIR+"psa2.000003.png "+OUTDIR+"psa2.jja.png") + system("mv "+OUTDIR+"psa2.000004.png "+OUTDIR+"psa2.son.png") + system("mv "+OUTDIR+"psa2.000005.png "+OUTDIR+"psa2.ann.png") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"psa2.000006.png "+OUTDIR+"psa2.mon.png") + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"psa2.000007.png "+OUTDIR+"psa2.sstreg.djf.png") + system("mv "+OUTDIR+"psa2.000008.png "+OUTDIR+"psa2.sstreg.mam.png") + system("mv "+OUTDIR+"psa2.000009.png "+OUTDIR+"psa2.sstreg.jja.png") + system("mv "+OUTDIR+"psa2.000010.png "+OUTDIR+"psa2.sstreg.son.png") + system("mv "+OUTDIR+"psa2.000011.png "+OUTDIR+"psa2.sstreg.ann.png") + system("mv "+OUTDIR+"psa2.000012.png "+OUTDIR+"psa2.sstreg.mon.png") + end if + else + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"psa2.000006.png "+OUTDIR+"psa2.sstreg.djf.png") + system("mv "+OUTDIR+"psa2.000007.png "+OUTDIR+"psa2.sstreg.mam.png") + system("mv "+OUTDIR+"psa2.000008.png "+OUTDIR+"psa2.sstreg.jja.png") + system("mv "+OUTDIR+"psa2.000009.png "+OUTDIR+"psa2.sstreg.son.png") + system("mv "+OUTDIR+"psa2.000010.png "+OUTDIR+"psa2.sstreg.ann.png") + end if + end if + + system("mv "+OUTDIR+"nam.timeseries.000001.png "+OUTDIR+"nam.timeseries.djf.png") + system("mv "+OUTDIR+"nam.timeseries.000002.png "+OUTDIR+"nam.timeseries.mam.png") + system("mv "+OUTDIR+"nam.timeseries.000003.png "+OUTDIR+"nam.timeseries.jja.png") + system("mv "+OUTDIR+"nam.timeseries.000004.png "+OUTDIR+"nam.timeseries.son.png") + system("mv "+OUTDIR+"nam.timeseries.000005.png "+OUTDIR+"nam.timeseries.ann.png") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"nam.timeseries.000006.png "+OUTDIR+"nam.timeseries.mon.png") + end if + + system("mv "+OUTDIR+"nao.timeseries.000001.png "+OUTDIR+"nao.timeseries.djf.png") + system("mv "+OUTDIR+"nao.timeseries.000002.png "+OUTDIR+"nao.timeseries.mam.png") + system("mv "+OUTDIR+"nao.timeseries.000003.png "+OUTDIR+"nao.timeseries.jja.png") + system("mv "+OUTDIR+"nao.timeseries.000004.png "+OUTDIR+"nao.timeseries.son.png") + system("mv "+OUTDIR+"nao.timeseries.000005.png "+OUTDIR+"nao.timeseries.ann.png") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"nao.timeseries.000006.png "+OUTDIR+"nao.timeseries.mon.png") + end if + + system("mv "+OUTDIR+"pna.timeseries.000001.png "+OUTDIR+"pna.timeseries.djf.png") + system("mv "+OUTDIR+"pna.timeseries.000002.png "+OUTDIR+"pna.timeseries.mam.png") + system("mv "+OUTDIR+"pna.timeseries.000003.png "+OUTDIR+"pna.timeseries.jja.png") + system("mv "+OUTDIR+"pna.timeseries.000004.png "+OUTDIR+"pna.timeseries.son.png") + system("mv "+OUTDIR+"pna.timeseries.000005.png "+OUTDIR+"pna.timeseries.ann.png") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"pna.timeseries.000006.png "+OUTDIR+"pna.timeseries.mon.png") + end if + + system("mv "+OUTDIR+"npo.timeseries.000001.png "+OUTDIR+"npo.timeseries.djf.png") + system("mv "+OUTDIR+"npo.timeseries.000002.png "+OUTDIR+"npo.timeseries.mam.png") + system("mv "+OUTDIR+"npo.timeseries.000003.png "+OUTDIR+"npo.timeseries.jja.png") + system("mv "+OUTDIR+"npo.timeseries.000004.png "+OUTDIR+"npo.timeseries.son.png") + system("mv "+OUTDIR+"npo.timeseries.000005.png "+OUTDIR+"npo.timeseries.ann.png") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"npo.timeseries.000006.png "+OUTDIR+"npo.timeseries.mon.png") + end if + + system("mv "+OUTDIR+"sam.timeseries.000001.png "+OUTDIR+"sam.timeseries.djf.png") + system("mv "+OUTDIR+"sam.timeseries.000002.png "+OUTDIR+"sam.timeseries.mam.png") + system("mv "+OUTDIR+"sam.timeseries.000003.png "+OUTDIR+"sam.timeseries.jja.png") + system("mv "+OUTDIR+"sam.timeseries.000004.png "+OUTDIR+"sam.timeseries.son.png") + system("mv "+OUTDIR+"sam.timeseries.000005.png "+OUTDIR+"sam.timeseries.ann.png") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"sam.timeseries.000006.png "+OUTDIR+"sam.timeseries.mon.png") + end if + + system("mv "+OUTDIR+"psa1.timeseries.000001.png "+OUTDIR+"psa1.timeseries.djf.png") + system("mv "+OUTDIR+"psa1.timeseries.000002.png "+OUTDIR+"psa1.timeseries.mam.png") + system("mv "+OUTDIR+"psa1.timeseries.000003.png "+OUTDIR+"psa1.timeseries.jja.png") + system("mv "+OUTDIR+"psa1.timeseries.000004.png "+OUTDIR+"psa1.timeseries.son.png") + system("mv "+OUTDIR+"psa1.timeseries.000005.png "+OUTDIR+"psa1.timeseries.ann.png") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"psa1.timeseries.000006.png "+OUTDIR+"psa1.timeseries.mon.png") + end if + + system("mv "+OUTDIR+"psa2.timeseries.000001.png "+OUTDIR+"psa2.timeseries.djf.png") + system("mv "+OUTDIR+"psa2.timeseries.000002.png "+OUTDIR+"psa2.timeseries.mam.png") + system("mv "+OUTDIR+"psa2.timeseries.000003.png "+OUTDIR+"psa2.timeseries.jja.png") + system("mv "+OUTDIR+"psa2.timeseries.000004.png "+OUTDIR+"psa2.timeseries.son.png") + system("mv "+OUTDIR+"psa2.timeseries.000005.png "+OUTDIR+"psa2.timeseries.ann.png") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"psa2.timeseries.000006.png "+OUTDIR+"psa2.timeseries.mon.png") + system("mv "+OUTDIR+"psa2.timeseries.000007.png "+OUTDIR+"npi.timeseries.ndjfm.png") + else + system("mv "+OUTDIR+"psa2.timeseries.000006.png "+OUTDIR+"npi.timeseries.ndjfm.png") + end if + else + system("psplit "+OUTDIR+"nam.ps "+OUTDIR+"pict") + system("mv "+OUTDIR+"pict0001.ps "+OUTDIR+"nam.djf.ps") + system("mv "+OUTDIR+"pict0002.ps "+OUTDIR+"nam.mam.ps") + system("mv "+OUTDIR+"pict0003.ps "+OUTDIR+"nam.jja.ps") + system("mv "+OUTDIR+"pict0004.ps "+OUTDIR+"nam.son.ps") + system("mv "+OUTDIR+"pict0005.ps "+OUTDIR+"nam.ann.ps") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"pict0006.ps "+OUTDIR+"nam.mon.ps") + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"pict0007.ps "+OUTDIR+"nam.sstreg.djf.ps") + system("mv "+OUTDIR+"pict0008.ps "+OUTDIR+"nam.sstreg.mam.ps") + system("mv "+OUTDIR+"pict0009.ps "+OUTDIR+"nam.sstreg.jja.ps") + system("mv "+OUTDIR+"pict0010.ps "+OUTDIR+"nam.sstreg.son.ps") + system("mv "+OUTDIR+"pict0011.ps "+OUTDIR+"nam.sstreg.ann.ps") + system("mv "+OUTDIR+"pict0012.ps "+OUTDIR+"nam.sstreg.mon.ps") + end if + else + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"pict0006.ps "+OUTDIR+"nam.sstreg.djf.ps") + system("mv "+OUTDIR+"pict0007.ps "+OUTDIR+"nam.sstreg.mam.ps") + system("mv "+OUTDIR+"pict0008.ps "+OUTDIR+"nam.sstreg.jja.ps") + system("mv "+OUTDIR+"pict0009.ps "+OUTDIR+"nam.sstreg.son.ps") + system("mv "+OUTDIR+"pict0010.ps "+OUTDIR+"nam.sstreg.ann.ps") + end if + end if + + system("psplit "+OUTDIR+"nao.ps "+OUTDIR+"pict") + system("mv "+OUTDIR+"pict0001.ps "+OUTDIR+"nao.djf.ps") + system("mv "+OUTDIR+"pict0002.ps "+OUTDIR+"nao.mam.ps") + system("mv "+OUTDIR+"pict0003.ps "+OUTDIR+"nao.jja.ps") + system("mv "+OUTDIR+"pict0004.ps "+OUTDIR+"nao.son.ps") + system("mv "+OUTDIR+"pict0005.ps "+OUTDIR+"nao.ann.ps") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"pict0006.ps "+OUTDIR+"nao.mon.ps") + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"pict0007.ps "+OUTDIR+"nao.sstreg.djf.ps") + system("mv "+OUTDIR+"pict0008.ps "+OUTDIR+"nao.sstreg.mam.ps") + system("mv "+OUTDIR+"pict0009.ps "+OUTDIR+"nao.sstreg.jja.ps") + system("mv "+OUTDIR+"pict0010.ps "+OUTDIR+"nao.sstreg.son.ps") + system("mv "+OUTDIR+"pict0011.ps "+OUTDIR+"nao.sstreg.ann.ps") + system("mv "+OUTDIR+"pict0012.ps "+OUTDIR+"nao.sstreg.mon.ps") + end if + else + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"pict0006.ps "+OUTDIR+"nao.sstreg.djf.ps") + system("mv "+OUTDIR+"pict0007.ps "+OUTDIR+"nao.sstreg.mam.ps") + system("mv "+OUTDIR+"pict0008.ps "+OUTDIR+"nao.sstreg.jja.ps") + system("mv "+OUTDIR+"pict0009.ps "+OUTDIR+"nao.sstreg.son.ps") + system("mv "+OUTDIR+"pict0010.ps "+OUTDIR+"nao.sstreg.ann.ps") + end if + end if + + system("psplit "+OUTDIR+"pna.ps "+OUTDIR+"pict") + system("mv "+OUTDIR+"pict0001.ps "+OUTDIR+"pna.djf.ps") + system("mv "+OUTDIR+"pict0002.ps "+OUTDIR+"pna.mam.ps") + system("mv "+OUTDIR+"pict0003.ps "+OUTDIR+"pna.jja.ps") + system("mv "+OUTDIR+"pict0004.ps "+OUTDIR+"pna.son.ps") + system("mv "+OUTDIR+"pict0005.ps "+OUTDIR+"pna.ann.ps") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"pict0006.ps "+OUTDIR+"pna.mon.ps") + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"pict0007.ps "+OUTDIR+"pna.sstreg.djf.ps") + system("mv "+OUTDIR+"pict0008.ps "+OUTDIR+"pna.sstreg.mam.ps") + system("mv "+OUTDIR+"pict0009.ps "+OUTDIR+"pna.sstreg.jja.ps") + system("mv "+OUTDIR+"pict0010.ps "+OUTDIR+"pna.sstreg.son.ps") + system("mv "+OUTDIR+"pict0011.ps "+OUTDIR+"pna.sstreg.ann.ps") + system("mv "+OUTDIR+"pict0012.ps "+OUTDIR+"pna.sstreg.mon.ps") + end if + else + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"pict0006.ps "+OUTDIR+"pna.sstreg.djf.ps") + system("mv "+OUTDIR+"pict0007.ps "+OUTDIR+"pna.sstreg.mam.ps") + system("mv "+OUTDIR+"pict0008.ps "+OUTDIR+"pna.sstreg.jja.ps") + system("mv "+OUTDIR+"pict0009.ps "+OUTDIR+"pna.sstreg.son.ps") + system("mv "+OUTDIR+"pict0010.ps "+OUTDIR+"pna.sstreg.ann.ps") + end if + end if + + system("psplit "+OUTDIR+"npo.ps "+OUTDIR+"pict") + system("mv "+OUTDIR+"pict0001.ps "+OUTDIR+"npo.djf.ps") + system("mv "+OUTDIR+"pict0002.ps "+OUTDIR+"npo.mam.ps") + system("mv "+OUTDIR+"pict0003.ps "+OUTDIR+"npo.jja.ps") + system("mv "+OUTDIR+"pict0004.ps "+OUTDIR+"npo.son.ps") + system("mv "+OUTDIR+"pict0005.ps "+OUTDIR+"npo.ann.ps") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"pict0006.ps "+OUTDIR+"npo.mon.ps") + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"pict0007.ps "+OUTDIR+"npo.sstreg.djf.ps") + system("mv "+OUTDIR+"pict0008.ps "+OUTDIR+"npo.sstreg.mam.ps") + system("mv "+OUTDIR+"pict0009.ps "+OUTDIR+"npo.sstreg.jja.ps") + system("mv "+OUTDIR+"pict0010.ps "+OUTDIR+"npo.sstreg.son.ps") + system("mv "+OUTDIR+"pict0011.ps "+OUTDIR+"npo.sstreg.ann.ps") + system("mv "+OUTDIR+"pict0012.ps "+OUTDIR+"npo.sstreg.mon.ps") + end if + else + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"pict0006.ps "+OUTDIR+"npo.sstreg.djf.ps") + system("mv "+OUTDIR+"pict0007.ps "+OUTDIR+"npo.sstreg.mam.ps") + system("mv "+OUTDIR+"pict0008.ps "+OUTDIR+"npo.sstreg.jja.ps") + system("mv "+OUTDIR+"pict0009.ps "+OUTDIR+"npo.sstreg.son.ps") + system("mv "+OUTDIR+"pict0010.ps "+OUTDIR+"npo.sstreg.ann.ps") + end if + end if + + system("psplit "+OUTDIR+"sam.ps "+OUTDIR+"pict") + system("mv "+OUTDIR+"pict0001.ps "+OUTDIR+"sam.djf.ps") + system("mv "+OUTDIR+"pict0002.ps "+OUTDIR+"sam.mam.ps") + system("mv "+OUTDIR+"pict0003.ps "+OUTDIR+"sam.jja.ps") + system("mv "+OUTDIR+"pict0004.ps "+OUTDIR+"sam.son.ps") + system("mv "+OUTDIR+"pict0005.ps "+OUTDIR+"sam.ann.ps") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"pict0006.ps "+OUTDIR+"sam.mon.ps") + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"pict0007.ps "+OUTDIR+"sam.sstreg.djf.ps") + system("mv "+OUTDIR+"pict0008.ps "+OUTDIR+"sam.sstreg.mam.ps") + system("mv "+OUTDIR+"pict0009.ps "+OUTDIR+"sam.sstreg.jja.ps") + system("mv "+OUTDIR+"pict0010.ps "+OUTDIR+"sam.sstreg.son.ps") + system("mv "+OUTDIR+"pict0011.ps "+OUTDIR+"sam.sstreg.ann.ps") + system("mv "+OUTDIR+"pict0012.ps "+OUTDIR+"sam.sstreg.mon.ps") + end if + else + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"pict0006.ps "+OUTDIR+"sam.sstreg.djf.ps") + system("mv "+OUTDIR+"pict0007.ps "+OUTDIR+"sam.sstreg.mam.ps") + system("mv "+OUTDIR+"pict0008.ps "+OUTDIR+"sam.sstreg.jja.ps") + system("mv "+OUTDIR+"pict0009.ps "+OUTDIR+"sam.sstreg.son.ps") + system("mv "+OUTDIR+"pict0010.ps "+OUTDIR+"sam.sstreg.ann.ps") + end if + end if + + system("psplit "+OUTDIR+"psa1.ps "+OUTDIR+"pict") + system("mv "+OUTDIR+"pict0001.ps "+OUTDIR+"psa1.djf.ps") + system("mv "+OUTDIR+"pict0002.ps "+OUTDIR+"psa1.mam.ps") + system("mv "+OUTDIR+"pict0003.ps "+OUTDIR+"psa1.jja.ps") + system("mv "+OUTDIR+"pict0004.ps "+OUTDIR+"psa1.son.ps") + system("mv "+OUTDIR+"pict0005.ps "+OUTDIR+"psa1.ann.ps") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"pict0006.ps "+OUTDIR+"psa1.mon.ps") + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"pict0007.ps "+OUTDIR+"psa1.sstreg.djf.ps") + system("mv "+OUTDIR+"pict0008.ps "+OUTDIR+"psa1.sstreg.mam.ps") + system("mv "+OUTDIR+"pict0009.ps "+OUTDIR+"psa1.sstreg.jja.ps") + system("mv "+OUTDIR+"pict0010.ps "+OUTDIR+"psa1.sstreg.son.ps") + system("mv "+OUTDIR+"pict0011.ps "+OUTDIR+"psa1.sstreg.ann.ps") + system("mv "+OUTDIR+"pict0012.ps "+OUTDIR+"psa1.sstreg.mon.ps") + end if + else + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"pict0006.ps "+OUTDIR+"psa1.sstreg.djf.ps") + system("mv "+OUTDIR+"pict0007.ps "+OUTDIR+"psa1.sstreg.mam.ps") + system("mv "+OUTDIR+"pict0008.ps "+OUTDIR+"psa1.sstreg.jja.ps") + system("mv "+OUTDIR+"pict0009.ps "+OUTDIR+"psa1.sstreg.son.ps") + system("mv "+OUTDIR+"pict0010.ps "+OUTDIR+"psa1.sstreg.ann.ps") + end if + end if + + system("psplit "+OUTDIR+"psa2.ps "+OUTDIR+"pict") + system("mv "+OUTDIR+"pict0001.ps "+OUTDIR+"psa2.djf.ps") + system("mv "+OUTDIR+"pict0002.ps "+OUTDIR+"psa2.mam.ps") + system("mv "+OUTDIR+"pict0003.ps "+OUTDIR+"psa2.jja.ps") + system("mv "+OUTDIR+"pict0004.ps "+OUTDIR+"psa2.son.ps") + system("mv "+OUTDIR+"pict0005.ps "+OUTDIR+"psa2.ann.ps") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"pict0006.ps "+OUTDIR+"psa2.mon.ps") + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"pict0007.ps "+OUTDIR+"psa2.sstreg.djf.ps") + system("mv "+OUTDIR+"pict0008.ps "+OUTDIR+"psa2.sstreg.mam.ps") + system("mv "+OUTDIR+"pict0009.ps "+OUTDIR+"psa2.sstreg.jja.ps") + system("mv "+OUTDIR+"pict0010.ps "+OUTDIR+"psa2.sstreg.son.ps") + system("mv "+OUTDIR+"pict0011.ps "+OUTDIR+"psa2.sstreg.ann.ps") + system("mv "+OUTDIR+"pict0012.ps "+OUTDIR+"psa2.sstreg.mon.ps") + end if + else + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"pict0006.ps "+OUTDIR+"psa2.sstreg.djf.ps") + system("mv "+OUTDIR+"pict0007.ps "+OUTDIR+"psa2.sstreg.mam.ps") + system("mv "+OUTDIR+"pict0008.ps "+OUTDIR+"psa2.sstreg.jja.ps") + system("mv "+OUTDIR+"pict0009.ps "+OUTDIR+"psa2.sstreg.son.ps") + system("mv "+OUTDIR+"pict0010.ps "+OUTDIR+"psa2.sstreg.ann.ps") + end if + end if + + system("psplit "+OUTDIR+"nam.timeseries.ps "+OUTDIR+"pict") + system("mv "+OUTDIR+"pict0001.ps "+OUTDIR+"nam.timeseries.djf.ps") + system("mv "+OUTDIR+"pict0002.ps "+OUTDIR+"nam.timeseries.mam.ps") + system("mv "+OUTDIR+"pict0003.ps "+OUTDIR+"nam.timeseries.jja.ps") + system("mv "+OUTDIR+"pict0004.ps "+OUTDIR+"nam.timeseries.son.ps") + system("mv "+OUTDIR+"pict0005.ps "+OUTDIR+"nam.timeseries.ann.ps") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"pict0006.ps "+OUTDIR+"nam.timeseries.mon.ps") + end if + + system("psplit "+OUTDIR+"nao.timeseries.ps "+OUTDIR+"pict") + system("mv "+OUTDIR+"pict0001.ps "+OUTDIR+"nao.timeseries.djf.ps") + system("mv "+OUTDIR+"pict0002.ps "+OUTDIR+"nao.timeseries.mam.ps") + system("mv "+OUTDIR+"pict0003.ps "+OUTDIR+"nao.timeseries.jja.ps") + system("mv "+OUTDIR+"pict0004.ps "+OUTDIR+"nao.timeseries.son.ps") + system("mv "+OUTDIR+"pict0005.ps "+OUTDIR+"nao.timeseries.ann.ps") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"pict0006.ps "+OUTDIR+"nao.timeseries.mon.ps") + end if + + system("psplit "+OUTDIR+"pna.timeseries.ps "+OUTDIR+"pict") + system("mv "+OUTDIR+"pict0001.ps "+OUTDIR+"pna.timeseries.djf.ps") + system("mv "+OUTDIR+"pict0002.ps "+OUTDIR+"pna.timeseries.mam.ps") + system("mv "+OUTDIR+"pict0003.ps "+OUTDIR+"pna.timeseries.jja.ps") + system("mv "+OUTDIR+"pict0004.ps "+OUTDIR+"pna.timeseries.son.ps") + system("mv "+OUTDIR+"pict0005.ps "+OUTDIR+"pna.timeseries.ann.ps") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"pict0006.ps "+OUTDIR+"pna.timeseries.mon.ps") + end if + + system("psplit "+OUTDIR+"npo.timeseries.ps "+OUTDIR+"pict") + system("mv "+OUTDIR+"pict0001.ps "+OUTDIR+"npo.timeseries.djf.ps") + system("mv "+OUTDIR+"pict0002.ps "+OUTDIR+"npo.timeseries.mam.ps") + system("mv "+OUTDIR+"pict0003.ps "+OUTDIR+"npo.timeseries.jja.ps") + system("mv "+OUTDIR+"pict0004.ps "+OUTDIR+"npo.timeseries.son.ps") + system("mv "+OUTDIR+"pict0005.ps "+OUTDIR+"npo.timeseries.ann.ps") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"pict0006.ps "+OUTDIR+"npo.timeseries.mon.ps") + end if + + system("psplit "+OUTDIR+"sam.timeseries.ps "+OUTDIR+"pict") + system("mv "+OUTDIR+"pict0001.ps "+OUTDIR+"sam.timeseries.djf.ps") + system("mv "+OUTDIR+"pict0002.ps "+OUTDIR+"sam.timeseries.mam.ps") + system("mv "+OUTDIR+"pict0003.ps "+OUTDIR+"sam.timeseries.jja.ps") + system("mv "+OUTDIR+"pict0004.ps "+OUTDIR+"sam.timeseries.son.ps") + system("mv "+OUTDIR+"pict0005.ps "+OUTDIR+"sam.timeseries.ann.ps") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"pict0006.ps "+OUTDIR+"sam.timeseries.mon.ps") + end if + + system("psplit "+OUTDIR+"psa1.timeseries.ps "+OUTDIR+"pict") + system("mv "+OUTDIR+"pict0001.ps "+OUTDIR+"psa1.timeseries.djf.ps") + system("mv "+OUTDIR+"pict0002.ps "+OUTDIR+"psa1.timeseries.mam.ps") + system("mv "+OUTDIR+"pict0003.ps "+OUTDIR+"psa1.timeseries.jja.ps") + system("mv "+OUTDIR+"pict0004.ps "+OUTDIR+"psa1.timeseries.son.ps") + system("mv "+OUTDIR+"pict0005.ps "+OUTDIR+"psa1.timeseries.ann.ps") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"pict0006.ps "+OUTDIR+"psa1.timeseries.mon.ps") + end if + + system("psplit "+OUTDIR+"psa2.timeseries.ps "+OUTDIR+"pict") + system("mv "+OUTDIR+"pict0001.ps "+OUTDIR+"psa2.timeseries.djf.ps") + system("mv "+OUTDIR+"pict0002.ps "+OUTDIR+"psa2.timeseries.mam.ps") + system("mv "+OUTDIR+"pict0003.ps "+OUTDIR+"psa2.timeseries.jja.ps") + system("mv "+OUTDIR+"pict0004.ps "+OUTDIR+"psa2.timeseries.son.ps") + system("mv "+OUTDIR+"pict0005.ps "+OUTDIR+"psa2.timeseries.ann.ps") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"pict0006.ps "+OUTDIR+"psa2.timeseries.mon.ps") + system("mv "+OUTDIR+"pict0007.ps "+OUTDIR+"npi.timeseries.ndjfm.ps") + else + system("mv "+OUTDIR+"pict0006.ps "+OUTDIR+"npi.timeseries.ndjfm.ps") + end if + end if + print("Finished: psl.modes_indices.ncl") +end diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/psl.nam_nao.ncl b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/psl.nam_nao.ncl new file mode 100644 index 0000000000..9226d9ac66 --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/psl.nam_nao.ncl @@ -0,0 +1,1942 @@ +; Calculates NAM and NAO (patterns and PC timeseries), as well as +; regressions of those PC timeseries onto ts, tas, and pr. +; +; Variables used: psl, ts, tas, and pr +; +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/shea_util.ncl" +load "$CVDP_SCRIPTS/functions.ncl" + +begin + print("Starting: psl.nam_nao.ncl") + + SCALE_TIMESERIES = getenv("SCALE_TIMESERIES") + OUTPUT_DATA = getenv("OUTPUT_DATA") + PNG_SCALE = tofloat(getenv("PNG_SCALE")) + OPT_CLIMO = getenv("OPT_CLIMO") + CLIMO_SYEAR = toint(getenv("CLIMO_SYEAR")) + CLIMO_EYEAR = toint(getenv("CLIMO_EYEAR")) + OUTPUT_TYPE = getenv("OUTPUT_TYPE") + COMPUTE_MODES_MON = getenv("COMPUTE_MODES_MON") + COLORMAP = getenv("COLORMAP") + + nsim = numAsciiRow("namelist_byvar/namelist_psl") + na = asciiread("namelist_byvar/namelist_psl",(/nsim/),"string") + names = new(nsim,"string") + paths = new(nsim,"string") + syear = new(nsim,"integer",-999) + eyear = new(nsim,"integer",-999) + delim = "|" + + do gg = 0,nsim-1 + names(gg) = str_strip(str_get_field(na(gg),1,delim)) + paths(gg) = str_strip(str_get_field(na(gg),2,delim)) + syear(gg) = stringtointeger(str_strip(str_get_field(na(gg),3,delim))) + eyear(gg) = stringtointeger(str_strip(str_get_field(na(gg),4,delim))) + end do + nyr = eyear-syear+1 + nyr_max = max(nyr) + + pi=4.*atan(1.0) + rad=(pi/180.) + +;---------SST Regressions coding------------------------------------------------- + nsim_ts = numAsciiRow("namelist_byvar/namelist_ts") + na_ts = asciiread("namelist_byvar/namelist_ts",(/nsim_ts/),"string") + names_ts = new(nsim_ts,"string") + paths_ts = new(nsim_ts,"string") + syear_ts = new(nsim_ts,"integer",-999) + eyear_ts = new(nsim_ts,"integer",-999) + + do gg = 0,nsim_ts-1 + names_ts(gg) = str_strip(str_get_field(na_ts(gg),1,delim)) + paths_ts(gg) = str_strip(str_get_field(na_ts(gg),2,delim)) + syear_ts(gg) = stringtointeger(str_strip(str_get_field(na_ts(gg),3,delim))) + eyear_ts(gg) = stringtointeger(str_strip(str_get_field(na_ts(gg),4,delim))) + end do + delete(na_ts) + nyr_ts = eyear_ts-syear_ts+1 +;---------TAS Regressions coding------------------------------------------------- + nsim_tas = numAsciiRow("namelist_byvar/namelist_trefht") + na_tas = asciiread("namelist_byvar/namelist_trefht",(/nsim_tas/),"string") + names_tas = new(nsim_tas,"string") + paths_tas = new(nsim_tas,"string") + syear_tas = new(nsim_tas,"integer",-999) + eyear_tas = new(nsim_tas,"integer",-999) + + do gg = 0,nsim_tas-1 + names_tas(gg) = str_strip(str_get_field(na_tas(gg),1,delim)) + paths_tas(gg) = str_strip(str_get_field(na_tas(gg),2,delim)) + syear_tas(gg) = stringtointeger(str_strip(str_get_field(na_tas(gg),3,delim))) + eyear_tas(gg) = stringtointeger(str_strip(str_get_field(na_tas(gg),4,delim))) + end do + delete(na_tas) + nyr_tas = eyear_tas-syear_tas+1 +;---------PR Regressions coding------------------------------------------------- + nsim_pr = numAsciiRow("namelist_byvar/namelist_prect") + na_pr = asciiread("namelist_byvar/namelist_prect",(/nsim_pr/),"string") + names_pr = new(nsim_pr,"string") + paths_pr = new(nsim_pr,"string") + syear_pr = new(nsim_pr,"integer",-999) + eyear_pr = new(nsim_pr,"integer",-999) + + do gg = 0,nsim_pr-1 + names_pr(gg) = str_strip(str_get_field(na_pr(gg),1,delim)) + paths_pr(gg) = str_strip(str_get_field(na_pr(gg),2,delim)) + syear_pr(gg) = stringtointeger(str_strip(str_get_field(na_pr(gg),3,delim))) + eyear_pr(gg) = stringtointeger(str_strip(str_get_field(na_pr(gg),4,delim))) + end do + delete(na_pr) + nyr_pr = eyear_pr-syear_pr+1 +;------------------------------------------------------------------------------------------------- + wks_type = OUTPUT_TYPE + if (wks_type.eq."png") then + wks_type@wkWidth = 1500*PNG_SCALE + wks_type@wkHeight = 1500*PNG_SCALE + end if + wks_nam = gsn_open_wks(wks_type,getenv("OUTDIR")+"nam") + wks_nam_pr = gsn_open_wks(wks_type,getenv("OUTDIR")+"nam.prreg") + wks_nam_ts = gsn_open_wks(wks_type,getenv("OUTDIR")+"nam.timeseries") + + wks_nao = gsn_open_wks(wks_type,getenv("OUTDIR")+"nao") + wks_nao_pr = gsn_open_wks(wks_type,getenv("OUTDIR")+"nao.prreg") + wks_nao_ts = gsn_open_wks(wks_type,getenv("OUTDIR")+"nao.timeseries") + + if (COLORMAP.eq.0) then + gsn_define_colormap(wks_nam,"ncl_default") + gsn_define_colormap(wks_nam_ts,"ncl_default") + gsn_define_colormap(wks_nao,"ncl_default") + gsn_define_colormap(wks_nao_ts,"ncl_default") + gsn_define_colormap(wks_nam_pr,"MPL_BrBG") + gsn_define_colormap(wks_nao_pr,"MPL_BrBG") + end if + if (COLORMAP.eq.1) then + gsn_define_colormap(wks_nam,"BlueDarkRed18") + gsn_define_colormap(wks_nam_ts,"ncl_default") + gsn_define_colormap(wks_nao,"BlueDarkRed18") + gsn_define_colormap(wks_nao_ts,"ncl_default") + gsn_define_colormap(wks_nam_pr,"BrownBlue12") + gsn_define_colormap(wks_nao_pr,"BrownBlue12") + end if + + map_nam_djf = new(nsim,"graphic") + map_nam_mam = new(nsim,"graphic") + map_nam_jja = new(nsim,"graphic") + map_nam_son = new(nsim,"graphic") + map_nam_ann = new(nsim,"graphic") + map_nam_mon = new(nsim,"graphic") + xy_nam_djf = new(nsim,"graphic") + xy_nam_mam = new(nsim,"graphic") + xy_nam_jja = new(nsim,"graphic") + xy_nam_son = new(nsim,"graphic") + xy_nam_ann = new(nsim,"graphic") + xy_nam_mon = new(nsim,"graphic") + reg_nam_djf = new(nsim,"graphic") + reg_nam_mam = new(nsim,"graphic") + reg_nam_jja = new(nsim,"graphic") + reg_nam_son = new(nsim,"graphic") + reg_nam_ann = new(nsim,"graphic") + reg_nam_mon = new(nsim,"graphic") + reg_nam_pr_djf = new(nsim,"graphic") + reg_nam_pr_mam = new(nsim,"graphic") + reg_nam_pr_jja = new(nsim,"graphic") + reg_nam_pr_son = new(nsim,"graphic") + reg_nam_pr_ann = new(nsim,"graphic") + reg_nam_pr_mon = new(nsim,"graphic") + + map_nao_djf = new(nsim,"graphic") + map_nao_mam = new(nsim,"graphic") + map_nao_jja = new(nsim,"graphic") + map_nao_son = new(nsim,"graphic") + map_nao_ann = new(nsim,"graphic") + map_nao_mon = new(nsim,"graphic") + xy_nao_djf = new(nsim,"graphic") + xy_nao_mam = new(nsim,"graphic") + xy_nao_jja = new(nsim,"graphic") + xy_nao_son = new(nsim,"graphic") + xy_nao_ann = new(nsim,"graphic") + xy_nao_mon = new(nsim,"graphic") + reg_nao_djf = new(nsim,"graphic") + reg_nao_mam = new(nsim,"graphic") + reg_nao_jja = new(nsim,"graphic") + reg_nao_son = new(nsim,"graphic") + reg_nao_ann = new(nsim,"graphic") + reg_nao_mon = new(nsim,"graphic") + reg_nao_pr_djf = new(nsim,"graphic") + reg_nao_pr_mam = new(nsim,"graphic") + reg_nao_pr_jja = new(nsim,"graphic") + reg_nao_pr_son = new(nsim,"graphic") + reg_nao_pr_ann = new(nsim,"graphic") + reg_nao_pr_mon = new(nsim,"graphic") + + xy_npi = new(nsim,"graphic") + sstreg_frame = 1 ; *reg_frame = flag to create regressions .ps/.png files. Created/used instead of *reg_plot_flag + ; so that if {sst,tas,pr} regressions are not created for the last simulation listed that .ps/png files are created + tasreg_frame = 1 + prreg_frame = 1 + + do ee = 0,nsim-1 +; print(paths(ee)+" "+syear(ee)+" "+eyear(ee)) + arr = data_read_in(paths(ee),"PSL",syear(ee),eyear(ee)) ; read in data, orient lats/lons correctly, set time coordinate variable up + if (isatt(arr,"is_all_missing")) then + delete(arr) + continue + end if + + if (OPT_CLIMO.eq."Full") then + arr = rmMonAnnCycTLL(arr) + else + check_custom_climo(names(ee),syear(ee),eyear(ee),CLIMO_SYEAR,CLIMO_EYEAR) + temp_arr = arr + delete(temp_arr&time) + temp_arr&time = cd_calendar(arr&time,-1) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(temp_arr({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(temp_arr({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + delete(temp_arr) + arr = calcMonAnomTLL(arr,climo) + delete(climo) + end if + + arrT = runave_n_Wrap(arr,3,0,0) ; form DJF averages + arrT(0,:,:) = (/ dim_avg_n(arr(:1,:,:),0) /) + arr_djf = arrT(0::12,:,:) + arr_mam = arrT(3::12,:,:) + arr_jja = arrT(6::12,:,:) ; form JJA averages + arr_son = arrT(9::12,:,:) + delete(arrT) + + arrV = runave_n_Wrap(arr,12,0,0) + arr_ann = arrV(5::12,:,:) + delete(arrV) +; +; arr_djf = (/ dtrend_msg_n(ispan(0,dimsizes(arr_djf&time)-1,1),arr_djf,True,False,0) /) +; arr_mam = (/ dtrend_msg_n(ispan(0,dimsizes(arr_mam&time)-1,1),arr_mam,True,False,0) /) +; arr_jja = (/ dtrend_msg_n(ispan(0,dimsizes(arr_jja&time)-1,1),arr_jja,True,False,0) /) +; arr_son = (/ dtrend_msg_n(ispan(0,dimsizes(arr_son&time)-1,1),arr_son,True,False,0) /) +; +; arr_ann = (/ dtrend_msg_n(ispan(0,dimsizes(arr_ann&time)-1,1),arr_ann,True,False,0) /) +; +; arr_ndjfm = (/ dtrend_msg_n(ispan(0,dimsizes(arr_ndjfm&time)-1,1),arr_ndjfm,True,False,0) /) +; +; arr = (/ dtrend_msg_n(ispan(0,dimsizes(arr&time)-1,1),arr,True,False,0) /) +;---------SST Regressions coding------------------------------------------------- + if (any(ismissing((/syear(ee),syear_ts(ee),eyear(ee),eyear_ts(ee)/)))) then + sstreg_plot_flag = 1 + else + if (syear(ee).eq.syear_ts(ee)) then ; check that the start and end years match for ts, tas, and psl + if (eyear(ee).eq.eyear_ts(ee)) then + sstreg_plot_flag = 0 + else + sstreg_plot_flag = 1 + end if + else + sstreg_plot_flag = 1 + end if + end if + + if (sstreg_plot_flag.eq.0) then + ; print("Data to be read in: "+paths_ts(ee)+" from "+syear_ts(ee)+":"+eyear_ts(ee)) + sst = data_read_in(paths_ts(ee),"TS",syear_ts(ee),eyear_ts(ee)) + if (isatt(sst,"is_all_missing")) then + sstreg_plot_flag = 1 + delete(sst) + end if + + if (sstreg_plot_flag.eq.0) then ; only continue if both PSL/TS fields are present + sst = where(sst.le.-1.8,-1.8,sst) + d = addfile("$NCARG_ROOT/lib/ncarg/data/cdf/landsea.nc","r") + basemap = d->LSMASK + lsm = landsea_mask(basemap,sst&lat,sst&lon) + sst = mask(sst,conform(sst,lsm,(/1,2/)).ge.1,False) + delete(lsm) + + if (OPT_CLIMO.eq."Full") then + sst = rmMonAnnCycTLL(sst) + else + check_custom_climo(names_ts(ee),syear_ts(ee),eyear_ts(ee),CLIMO_SYEAR,CLIMO_EYEAR) + temp_arr = sst + delete(temp_arr&time) + temp_arr&time = cd_calendar(sst&time,1) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(temp_arr({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(temp_arr({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + delete(temp_arr) + sst = calcMonAnomTLL(sst,climo) + delete(climo) + end if +; sst = (/ dtrend_msg_n(ispan(0,dimsizes(sst&time)-1,1),sst,False,False,0) /) + + sstT = runave_n_Wrap(sst,3,0,0) ; form DJF averages + sstT(0,:,:) = (/ dim_avg_n(sst(:1,:,:),0) /) + sst_djf = sstT(0::12,:,:) + sst_mam = sstT(3::12,:,:) + sst_jja = sstT(6::12,:,:) ; form JJA averages + sst_son = sstT(9::12,:,:) + delete(sstT) + + sstV = runave_n_Wrap(sst,12,0,0) + sst_ann = sstV(5::12,:,:) + delete([/sstV/]) + end if + end if +;---------TAS Regressions coding------------------------------------------------- + if (any(ismissing((/syear(ee),syear_tas(ee),eyear(ee),eyear_tas(ee)/)))) then + tasreg_plot_flag = 1 + else + if (syear(ee).eq.syear_tas(ee)) then ; check that the start and end years match for ts, tas, and psl + if (eyear(ee).eq.eyear_tas(ee)) then + tasreg_plot_flag = 0 + else + tasreg_plot_flag = 1 + end if + else + tasreg_plot_flag = 1 + end if + if (sstreg_plot_flag.eq.1) then ; if the ts dataset is missing but the tas is not, do not + tasreg_plot_flag = 1 ; run through the tas calculations as both currently required + end if + end if + + if (tasreg_plot_flag.eq.0) then + tas = data_read_in(paths_tas(ee),"TREFHT",syear_tas(ee),eyear_tas(ee)) + if (isatt(tas,"is_all_missing")) then + tasreg_plot_flag = 1 + delete(tas) + end if + + if (tasreg_plot_flag.eq.0) then ; only continue if both PSL/TS fields are present + d = addfile("$NCARG_ROOT/lib/ncarg/data/cdf/landsea.nc","r") + basemap = d->LSMASK + lsm = landsea_mask(basemap,tas&lat,tas&lon) + tas = mask(tas,conform(tas,lsm,(/1,2/)).eq.0,False) + delete(lsm) + + if (OPT_CLIMO.eq."Full") then + tas = rmMonAnnCycTLL(tas) + else + check_custom_climo(names_tas(ee),syear_tas(ee),eyear_tas(ee),CLIMO_SYEAR,CLIMO_EYEAR) + temp_arr = tas + delete(temp_arr&time) + temp_arr&time = cd_calendar(tas&time,1) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(temp_arr({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(temp_arr({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + delete(temp_arr) + tas = calcMonAnomTLL(tas,climo) + delete(climo) + end if +; tas = (/ dtrend_msg_n(ispan(0,dimsizes(tas&time)-1,1),tas,False,False,0) /) + + tasT = runave_n_Wrap(tas,3,0,0) ; form DJF averages + tasT(0,:,:) = (/ dim_avg_n(tas(:1,:,:),0) /) + tas_djf = tasT(0::12,:,:) + tas_mam = tasT(3::12,:,:) + tas_jja = tasT(6::12,:,:) ; form JJA averages + tas_son = tasT(9::12,:,:) + delete(tasT) + + tasV = runave_n_Wrap(tas,12,0,0) + tas_ann = tasV(5::12,:,:) + delete([/tasV/]) + end if + end if +;---------PR Regressions coding------------------------------------------------- + if (any(ismissing((/syear(ee),syear_pr(ee),eyear(ee),eyear_pr(ee)/)))) then + prreg_plot_flag = 1 + else + if (syear(ee).eq.syear_pr(ee)) then ; check that the start and end years match for pr and psl + if (eyear(ee).eq.eyear_pr(ee)) then + prreg_plot_flag = 0 + else + prreg_plot_flag = 1 + end if + else + prreg_plot_flag = 1 + end if + end if + + if (prreg_plot_flag.eq.0) then + pr = data_read_in(paths_pr(ee),"PRECT",syear_pr(ee),eyear_pr(ee)) + if (isatt(pr,"is_all_missing")) then + prreg_plot_flag = 1 + delete(pr) + end if + + if (prreg_plot_flag.eq.0) then ; only continue if both PSL/PR fields are present + if (OPT_CLIMO.eq."Full") then + pr = rmMonAnnCycTLL(pr) + else + check_custom_climo(names_pr(ee),syear_pr(ee),eyear_pr(ee),CLIMO_SYEAR,CLIMO_EYEAR) + temp_arr = pr + delete(temp_arr&time) + temp_arr&time = cd_calendar(pr&time,1) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(temp_arr({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(temp_arr({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + delete(temp_arr) + pr = calcMonAnomTLL(pr,climo) + delete(climo) + end if +; pr = (/ dtrend_msg_n(ispan(0,dimsizes(pr&time)-1,1),pr,False,False,0) /) + + prT = runave_n_Wrap(pr,3,0,0) ; form DJF averages + prT(0,:,:) = (/ dim_avg_n(pr(:1,:,:),0) /) + pr_djf = prT(0::12,:,:) + pr_mam = prT(3::12,:,:) + pr_jja = prT(6::12,:,:) ; form JJA averages + pr_son = prT(9::12,:,:) + delete(prT) + + prV = runave_n_Wrap(pr,12,0,0) + pr_ann = prV(5::12,:,:) + delete([/prV/]) + end if + end if + +;------------------------------------------------------------------- + arr_djf_CW = SqrtCosWeight(arr_djf) + arr_mam_CW = SqrtCosWeight(arr_mam) + arr_jja_CW = SqrtCosWeight(arr_jja) + arr_son_CW = SqrtCosWeight(arr_son) + arr_ann_CW = SqrtCosWeight(arr_ann) + if (COMPUTE_MODES_MON.eq."True") then + arr_mon_CW = SqrtCosWeight(arr) + else + if (isvar("arr")) then + delete(arr) + end if + if (isvar("sst")) then + delete(sst) + end if + if (isvar("tas")) then + delete(tas) + end if + if (isvar("pr")) then + delete(pr) + end if + end if +;---------NAM calculations---------------------------------------------------------- + evecv = eofunc(arr_djf_CW({lat|20:},lon|:,time|:),2,75) + pcts = eofunc_ts(arr_djf_CW({lat|20:},lon|:,time|:),evecv,False) + nam_pc_djf = dim_standardize(pcts(0,:),0) + nam_djf = arr_djf(0,:,:) + nam_djf = (/ regCoef(nam_pc_djf,arr_djf(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + nam_sst_djf = sst_djf(0,:,:) + nam_sst_djf = (/ regCoef(nam_pc_djf,sst_djf(lat|:,lon|:,time|:)) /) + end if + if (tasreg_plot_flag.eq.0) then + nam_tas_djf = tas_djf(0,:,:) + nam_tas_djf = (/ regCoef(nam_pc_djf,tas_djf(lat|:,lon|:,time|:)) /) + end if + if (prreg_plot_flag.eq.0) then + nam_pr_djf = pr_djf(0,:,:) + nam_pr_djf = (/ regCoef(nam_pc_djf,pr_djf(lat|:,lon|:,time|:)) /) + end if + if (.not.ismissing(nam_djf({85},{5}))) then + if (nam_djf({85},{5}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + nam_djf = nam_djf*-1. + nam_pc_djf = nam_pc_djf*-1. + if (sstreg_plot_flag.eq.0) then + nam_sst_djf = nam_sst_djf*-1. + end if + if (tasreg_plot_flag.eq.0) then + nam_tas_djf = nam_tas_djf*-1. + end if + if (prreg_plot_flag.eq.0) then + nam_pr_djf = nam_pr_djf*-1. + end if + end if + end if + sig_pcv = eofunc_north2(evecv@pcvar,dimsizes(nam_pc_djf),False) + if (sig_pcv(0)) then ; if True then significant + nam_djf@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%*" + else + nam_djf@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%" + end if + delete(sig_pcv) + nam_pc_djf!0 = "TIME" + nam_pc_djf&TIME = ispan(syear(ee),eyear(ee),1) + nam_pc_djf&TIME@units = "YYYY" + nam_pc_djf&TIME@long_name = "time" + delete([/evecv,pcts/]) + + evecv = eofunc(arr_mam_CW({lat|20:},lon|:,time|:),2,75) + pcts = eofunc_ts(arr_mam_CW({lat|20:},lon|:,time|:),evecv,False) + nam_pc_mam = dim_standardize(pcts(0,:),0) + nam_mam = arr_mam(0,:,:) + nam_mam = (/ regCoef(nam_pc_mam,arr_mam(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + nam_sst_mam = sst_mam(0,:,:) + nam_sst_mam = (/ regCoef(nam_pc_mam,sst_mam(lat|:,lon|:,time|:)) /) + end if + if (tasreg_plot_flag.eq.0) then + nam_tas_mam = tas_mam(0,:,:) + nam_tas_mam = (/ regCoef(nam_pc_mam,tas_mam(lat|:,lon|:,time|:)) /) + end if + if (prreg_plot_flag.eq.0) then + nam_pr_mam = pr_mam(0,:,:) + nam_pr_mam = (/ regCoef(nam_pc_mam,pr_mam(lat|:,lon|:,time|:)) /) + end if + if (.not.ismissing(nam_mam({85},{5}))) then + if (nam_mam({85},{5}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + nam_mam = nam_mam*-1. + nam_pc_mam = nam_pc_mam*-1. + if (sstreg_plot_flag.eq.0) then + nam_sst_mam = nam_sst_mam*-1. + end if + if (tasreg_plot_flag.eq.0) then + nam_tas_mam = nam_tas_mam*-1. + end if + if (prreg_plot_flag.eq.0) then + nam_pr_mam = nam_pr_mam*-1. + end if + end if + end if + sig_pcv = eofunc_north2(evecv@pcvar,dimsizes(nam_pc_mam),False) + if (sig_pcv(0)) then ; if True then significant + nam_mam@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%*" + else + nam_mam@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%" + end if + delete(sig_pcv) + copy_VarCoords(nam_pc_djf,nam_pc_mam) + delete([/evecv,pcts/]) + + evecv = eofunc(arr_jja_CW({lat|20:},lon|:,time|:),2,75) + pcts = eofunc_ts(arr_jja_CW({lat|20:},lon|:,time|:),evecv,False) + nam_pc_jja = dim_standardize(pcts(0,:),0) + nam_jja = arr_jja(0,:,:) + nam_jja = (/ regCoef(nam_pc_jja,arr_jja(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + nam_sst_jja = sst_jja(0,:,:) + nam_sst_jja = (/ regCoef(nam_pc_jja,sst_jja(lat|:,lon|:,time|:)) /) + end if + if (tasreg_plot_flag.eq.0) then + nam_tas_jja = tas_jja(0,:,:) + nam_tas_jja = (/ regCoef(nam_pc_jja,tas_jja(lat|:,lon|:,time|:)) /) + end if + if (prreg_plot_flag.eq.0) then + nam_pr_jja = pr_jja(0,:,:) + nam_pr_jja = (/ regCoef(nam_pc_jja,pr_jja(lat|:,lon|:,time|:)) /) + end if + if (.not.ismissing(nam_jja({85},{5}))) then + if (nam_jja({85},{5}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + nam_jja = nam_jja*-1. + nam_pc_jja = nam_pc_jja*-1. + if (sstreg_plot_flag.eq.0) then + nam_sst_jja = nam_sst_jja*-1. + end if + if (tasreg_plot_flag.eq.0) then + nam_tas_jja = nam_tas_jja*-1. + end if + if (prreg_plot_flag.eq.0) then + nam_pr_jja = nam_pr_jja*-1. + end if + end if + end if + sig_pcv = eofunc_north2(evecv@pcvar,dimsizes(nam_pc_jja),False) + if (sig_pcv(0)) then ; if True then significant + nam_jja@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%*" + else + nam_jja@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%" + end if + delete(sig_pcv) + copy_VarCoords(nam_pc_djf,nam_pc_jja) + delete([/evecv,pcts/]) + + evecv = eofunc(arr_son_CW({lat|20:},lon|:,time|:),2,75) + pcts = eofunc_ts(arr_son_CW({lat|20:},lon|:,time|:),evecv,False) + nam_pc_son = dim_standardize(pcts(0,:),0) + nam_son = arr_son(0,:,:) + nam_son = (/ regCoef(nam_pc_son,arr_son(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + nam_sst_son = sst_son(0,:,:) + nam_sst_son = (/ regCoef(nam_pc_son,sst_son(lat|:,lon|:,time|:)) /) + end if + if (tasreg_plot_flag.eq.0) then + nam_tas_son = tas_son(0,:,:) + nam_tas_son = (/ regCoef(nam_pc_son,tas_son(lat|:,lon|:,time|:)) /) + end if + if (prreg_plot_flag.eq.0) then + nam_pr_son = pr_son(0,:,:) + nam_pr_son = (/ regCoef(nam_pc_son,pr_son(lat|:,lon|:,time|:)) /) + end if + if (.not.ismissing(nam_son({85},{5}))) then + if (nam_son({85},{5}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + nam_son = nam_son*-1. + nam_pc_son = nam_pc_son*-1. + if (sstreg_plot_flag.eq.0) then + nam_sst_son = nam_sst_son*-1. + end if + if (tasreg_plot_flag.eq.0) then + nam_tas_son = nam_tas_son*-1. + end if + if (prreg_plot_flag.eq.0) then + nam_pr_son = nam_pr_son*-1. + end if + end if + end if + sig_pcv = eofunc_north2(evecv@pcvar,dimsizes(nam_pc_son),False) + if (sig_pcv(0)) then ; if True then significant + nam_son@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%*" + else + nam_son@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%" + end if + delete(sig_pcv) + copy_VarCoords(nam_pc_djf,nam_pc_son) + delete([/evecv,pcts/]) + + evecv = eofunc(arr_ann_CW({lat|20:},lon|:,time|:),2,75) + pcts = eofunc_ts(arr_ann_CW({lat|20:},lon|:,time|:),evecv,False) + nam_pc_ann = dim_standardize(pcts(0,:),0) + nam_ann = arr_ann(0,:,:) + nam_ann = (/ regCoef(nam_pc_ann,arr_ann(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + nam_sst_ann = sst_ann(0,:,:) + nam_sst_ann = (/ regCoef(nam_pc_ann,sst_ann(lat|:,lon|:,time|:)) /) + end if + if (tasreg_plot_flag.eq.0) then + nam_tas_ann = tas_ann(0,:,:) + nam_tas_ann = (/ regCoef(nam_pc_ann,tas_ann(lat|:,lon|:,time|:)) /) + end if + if (prreg_plot_flag.eq.0) then + nam_pr_ann = pr_ann(0,:,:) + nam_pr_ann = (/ regCoef(nam_pc_ann,pr_ann(lat|:,lon|:,time|:)) /) + end if + if (.not.ismissing(nam_ann({85},{5}))) then + if (nam_ann({85},{5}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + nam_ann = nam_ann*-1. + nam_pc_ann = nam_pc_ann*-1. + if (sstreg_plot_flag.eq.0) then + nam_sst_ann = nam_sst_ann*-1. + end if + if (tasreg_plot_flag.eq.0) then + nam_tas_ann = nam_tas_ann*-1. + end if + if (prreg_plot_flag.eq.0) then + nam_pr_ann = nam_pr_ann*-1. + end if + end if + end if + sig_pcv = eofunc_north2(evecv@pcvar,dimsizes(nam_pc_ann),False) + if (sig_pcv(0)) then ; if True then significant + nam_ann@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%*" + else + nam_ann@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%" + end if + delete(sig_pcv) + copy_VarCoords(nam_pc_djf,nam_pc_ann) + delete([/evecv,pcts/]) + + if (COMPUTE_MODES_MON.eq."True") then + evecv = eofunc(arr_mon_CW({lat|20:},lon|:,time|:),2,75) + pcts = eofunc_ts(arr_mon_CW({lat|20:},lon|:,time|:),evecv,False) + nam_pc_mon = dim_standardize(pcts(0,:),0) + nam_mon = arr(0,:,:) + nam_mon = (/ regCoef(nam_pc_mon,arr(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + nam_sst_mon = sst(0,:,:) + nam_sst_mon = (/ regCoef(nam_pc_mon,sst(lat|:,lon|:,time|:)) /) + end if + if (tasreg_plot_flag.eq.0) then + nam_tas_mon = tas(0,:,:) + nam_tas_mon = (/ regCoef(nam_pc_mon,tas(lat|:,lon|:,time|:)) /) + end if + if (prreg_plot_flag.eq.0) then + nam_pr_mon = pr(0,:,:) + nam_pr_mon = (/ regCoef(nam_pc_mon,pr(lat|:,lon|:,time|:)) /) + end if + if (.not.ismissing(nam_mon({85},{5}))) then + if (nam_mon({85},{5}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + nam_mon = nam_mon*-1. + nam_pc_mon = nam_pc_mon*-1. + if (sstreg_plot_flag.eq.0) then + nam_sst_mon = nam_sst_mon*-1. + end if + if (tasreg_plot_flag.eq.0) then + nam_tas_mon = nam_tas_mon*-1. + end if + if (prreg_plot_flag.eq.0) then + nam_pr_mon = nam_pr_mon*-1. + end if + end if + end if + sig_pcv = eofunc_north2(evecv@pcvar,dimsizes(nam_pc_mon),False) + if (sig_pcv(0)) then ; if True then significant + nam_mon@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%*" + else + nam_mon@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%" + end if + delete(sig_pcv) + nam_pc_mon!0 = "time" + nam_pc_mon&time = arr&time + delete([/evecv,pcts/]) + end if +;----------NAO calculations-------------------------------------------------------------------------------- + arr_djf_CW_LF = lonFlip(arr_djf_CW) + arr_mam_CW_LF = lonFlip(arr_mam_CW) + arr_jja_CW_LF = lonFlip(arr_jja_CW) + arr_son_CW_LF = lonFlip(arr_son_CW) + arr_ann_CW_LF = lonFlip(arr_ann_CW) + if (COMPUTE_MODES_MON.eq."True") then + arr_mon_CW_LF = lonFlip(arr_mon_CW) + delete(arr_mon_CW) + end if + delete([/arr_djf_CW,arr_mam_CW,arr_jja_CW,arr_son_CW,arr_ann_CW/]) + + evecv = eofunc(arr_djf_CW_LF({lat|20:80},{lon|-90.:40},time|:),2,75) + pcts = eofunc_ts(arr_djf_CW_LF({lat|20:80},{lon|-90.:40},time|:),evecv,False) + nao_pc_djf = dim_standardize(pcts(0,:),0) + nao_djf = arr_djf(0,:,:) + nao_djf = (/ regCoef(nao_pc_djf,arr_djf(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + nao_sst_djf = sst_djf(0,:,:) + nao_sst_djf = (/ regCoef(nao_pc_djf,sst_djf(lat|:,lon|:,time|:)) /) + delete(sst_djf) + end if + if (tasreg_plot_flag.eq.0) then + nao_tas_djf = tas_djf(0,:,:) + nao_tas_djf = (/ regCoef(nao_pc_djf,tas_djf(lat|:,lon|:,time|:)) /) + delete(tas_djf) + end if + if (prreg_plot_flag.eq.0) then + nao_pr_djf = pr_djf(0,:,:) + nao_pr_djf = (/ regCoef(nao_pc_djf,pr_djf(lat|:,lon|:,time|:)) /) + delete(pr_djf) + end if + if (.not.ismissing(nao_djf({70},{350}))) then + if (nao_djf({70},{350}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + nao_djf = nao_djf*-1. + nao_pc_djf = nao_pc_djf*-1. + if (sstreg_plot_flag.eq.0) then + nao_sst_djf = nao_sst_djf*-1. + end if + if (tasreg_plot_flag.eq.0) then + nao_tas_djf = nao_tas_djf*-1. + end if + if (prreg_plot_flag.eq.0) then + nao_pr_djf = nao_pr_djf*-1. + end if + end if + end if + sig_pcv = eofunc_north2(evecv@pcvar,dimsizes(nao_pc_djf),False) + if (sig_pcv(0)) then ; if True then significant + nao_djf@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%*" + else + nao_djf@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%" + end if + delete(sig_pcv) + copy_VarCoords(nam_pc_djf,nao_pc_djf) + delete([/evecv,pcts,arr_djf,arr_djf_CW_LF/]) + + evecv = eofunc(arr_mam_CW_LF({lat|20:80},{lon|-90.:40},time|:),2,75) + pcts = eofunc_ts(arr_mam_CW_LF({lat|20:80},{lon|-90.:40},time|:),evecv,False) + nao_pc_mam = dim_standardize(pcts(0,:),0) + nao_mam = arr_mam(0,:,:) + nao_mam = (/ regCoef(nao_pc_mam,arr_mam(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + nao_sst_mam = sst_mam(0,:,:) + nao_sst_mam = (/ regCoef(nao_pc_mam,sst_mam(lat|:,lon|:,time|:)) /) + delete(sst_mam) + end if + if (tasreg_plot_flag.eq.0) then + nao_tas_mam = tas_mam(0,:,:) + nao_tas_mam = (/ regCoef(nao_pc_mam,tas_mam(lat|:,lon|:,time|:)) /) + delete(tas_mam) + end if + if (prreg_plot_flag.eq.0) then + nao_pr_mam = pr_mam(0,:,:) + nao_pr_mam = (/ regCoef(nao_pc_mam,pr_mam(lat|:,lon|:,time|:)) /) + delete(pr_mam) + end if + if (.not.ismissing(nao_mam({70},{350}))) then + if (nao_mam({70},{350}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + nao_mam = nao_mam*-1. + nao_pc_mam = nao_pc_mam*-1. + if (sstreg_plot_flag.eq.0) then + nao_sst_mam = nao_sst_mam*-1. + end if + if (tasreg_plot_flag.eq.0) then + nao_tas_mam = nao_tas_mam*-1. + end if + if (prreg_plot_flag.eq.0) then + nao_pr_mam = nao_pr_mam*-1. + end if + end if + end if + sig_pcv = eofunc_north2(evecv@pcvar,dimsizes(nao_pc_mam),False) + if (sig_pcv(0)) then ; if True then significant + nao_mam@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%*" + else + nao_mam@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%" + end if + delete(sig_pcv) + copy_VarCoords(nam_pc_djf,nao_pc_mam) + delete([/evecv,pcts,arr_mam,arr_mam_CW_LF/]) + + evecv = eofunc(arr_jja_CW_LF({lat|20:80},{lon|-90.:40},time|:),2,75) + pcts = eofunc_ts(arr_jja_CW_LF({lat|20:80},{lon|-90.:40},time|:),evecv,False) + nao_pc_jja = dim_standardize(pcts(0,:),0) + nao_jja = arr_jja(0,:,:) + nao_jja = (/ regCoef(nao_pc_jja,arr_jja(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + nao_sst_jja = sst_jja(0,:,:) + nao_sst_jja = (/ regCoef(nao_pc_jja,sst_jja(lat|:,lon|:,time|:)) /) + delete(sst_jja) + end if + if (tasreg_plot_flag.eq.0) then + nao_tas_jja = tas_jja(0,:,:) + nao_tas_jja = (/ regCoef(nao_pc_jja,tas_jja(lat|:,lon|:,time|:)) /) + delete(tas_jja) + end if + if (prreg_plot_flag.eq.0) then + nao_pr_jja = pr_jja(0,:,:) + nao_pr_jja = (/ regCoef(nao_pc_jja,pr_jja(lat|:,lon|:,time|:)) /) + delete(pr_jja) + end if + if (.not.ismissing(nao_jja({70},{350}))) then + if (nao_jja({70},{350}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + nao_jja = nao_jja*-1. + nao_pc_jja = nao_pc_jja*-1. + if (sstreg_plot_flag.eq.0) then + nao_sst_jja = nao_sst_jja*-1. + end if + if (tasreg_plot_flag.eq.0) then + nao_tas_jja = nao_tas_jja*-1. + end if + if (prreg_plot_flag.eq.0) then + nao_pr_jja = nao_pr_jja*-1. + end if + end if + end if + sig_pcv = eofunc_north2(evecv@pcvar,dimsizes(nao_pc_jja),False) + if (sig_pcv(0)) then ; if True then significant + nao_jja@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%*" + else + nao_jja@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%" + end if + delete(sig_pcv) + copy_VarCoords(nam_pc_djf,nao_pc_jja) + delete([/evecv,pcts,arr_jja,arr_jja_CW_LF/]) + + evecv = eofunc(arr_son_CW_LF({lat|20:80},{lon|-90.:40},time|:),2,75) + pcts = eofunc_ts(arr_son_CW_LF({lat|20:80},{lon|-90.:40},time|:),evecv,False) + nao_pc_son = dim_standardize(pcts(0,:),0) + nao_son = arr_son(0,:,:) + nao_son = (/ regCoef(nao_pc_son,arr_son(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + nao_sst_son = sst_son(0,:,:) + nao_sst_son = (/ regCoef(nao_pc_son,sst_son(lat|:,lon|:,time|:)) /) + delete(sst_son) + end if + if (tasreg_plot_flag.eq.0) then + nao_tas_son = tas_son(0,:,:) + nao_tas_son = (/ regCoef(nao_pc_son,tas_son(lat|:,lon|:,time|:)) /) + delete(tas_son) + end if + if (prreg_plot_flag.eq.0) then + nao_pr_son = pr_son(0,:,:) + nao_pr_son = (/ regCoef(nao_pc_son,pr_son(lat|:,lon|:,time|:)) /) + delete(pr_son) + end if + if (.not.ismissing(nao_son({70},{350}))) then + if (nao_son({70},{350}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + nao_son = nao_son*-1. + nao_pc_son = nao_pc_son*-1. + if (sstreg_plot_flag.eq.0) then + nao_sst_son = nao_sst_son*-1. + end if + if (tasreg_plot_flag.eq.0) then + nao_tas_son = nao_tas_son*-1. + end if + if (prreg_plot_flag.eq.0) then + nao_pr_son = nao_pr_son*-1. + end if + end if + end if + sig_pcv = eofunc_north2(evecv@pcvar,dimsizes(nao_pc_son),False) + if (sig_pcv(0)) then ; if True then significant + nao_son@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%*" + else + nao_son@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%" + end if + delete(sig_pcv) + copy_VarCoords(nam_pc_djf,nao_pc_son) + delete([/evecv,pcts,arr_son,arr_son_CW_LF/]) + + evecv = eofunc(arr_ann_CW_LF({lat|20:80},{lon|-90.:40},time|:),2,75) + pcts = eofunc_ts(arr_ann_CW_LF({lat|20:80},{lon|-90.:40},time|:),evecv,False) + nao_pc_ann = dim_standardize(pcts(0,:),0) + nao_ann = arr_ann(0,:,:) + nao_ann = (/ regCoef(nao_pc_ann,arr_ann(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + nao_sst_ann = sst_ann(0,:,:) + nao_sst_ann = (/ regCoef(nao_pc_ann,sst_ann(lat|:,lon|:,time|:)) /) + delete(sst_ann) + end if + if (tasreg_plot_flag.eq.0) then + nao_tas_ann = tas_ann(0,:,:) + nao_tas_ann = (/ regCoef(nao_pc_ann,tas_ann(lat|:,lon|:,time|:)) /) + delete(tas_ann) + end if + if (prreg_plot_flag.eq.0) then + nao_pr_ann = pr_ann(0,:,:) + nao_pr_ann = (/ regCoef(nao_pc_ann,pr_ann(lat|:,lon|:,time|:)) /) + delete(pr_ann) + end if + if (.not.ismissing(nao_ann({70},{350}))) then + if (nao_ann({70},{350}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + nao_ann = nao_ann*-1. + nao_pc_ann = nao_pc_ann*-1. + if (sstreg_plot_flag.eq.0) then + nao_sst_ann = nao_sst_ann*-1. + end if + if (tasreg_plot_flag.eq.0) then + nao_tas_ann = nao_tas_ann*-1. + end if + if (prreg_plot_flag.eq.0) then + nao_pr_ann = nao_pr_ann*-1. + end if + end if + end if + sig_pcv = eofunc_north2(evecv@pcvar,dimsizes(nao_pc_ann),False) + if (sig_pcv(0)) then ; if True then significant + nao_ann@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%*" + else + nao_ann@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%" + end if + delete(sig_pcv) + copy_VarCoords(nam_pc_djf,nao_pc_ann) + delete([/evecv,pcts,arr_ann,arr_ann_CW_LF/]) + + if (COMPUTE_MODES_MON.eq."True") then + evecv = eofunc(arr_mon_CW_LF({lat|20:80},{lon|-90.:40},time|:),2,75) + pcts = eofunc_ts(arr_mon_CW_LF({lat|20:80},{lon|-90.:40},time|:),evecv,False) + nao_pc_mon = dim_standardize(pcts(0,:),0) + nao_mon = arr(0,:,:) + nao_mon = (/ regCoef(nao_pc_mon,arr(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + nao_sst_mon = sst(0,:,:) + nao_sst_mon = (/ regCoef(nao_pc_mon,sst(lat|:,lon|:,time|:)) /) + delete(sst) + end if + if (tasreg_plot_flag.eq.0) then + nao_tas_mon = tas(0,:,:) + nao_tas_mon = (/ regCoef(nao_pc_mon,tas(lat|:,lon|:,time|:)) /) + delete(tas) + end if + if (prreg_plot_flag.eq.0) then + nao_pr_mon = pr(0,:,:) + nao_pr_mon = (/ regCoef(nao_pc_mon,pr(lat|:,lon|:,time|:)) /) + delete(pr) + end if + if (.not.ismissing(nao_mon({70},{350}))) then + if (nao_mon({70},{350}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + nao_mon = nao_mon*-1. + nao_pc_mon = nao_pc_mon*-1. + if (sstreg_plot_flag.eq.0) then + nao_sst_mon = nao_sst_mon*-1. + end if + if (tasreg_plot_flag.eq.0) then + nao_tas_mon = nao_tas_mon*-1. + end if + if (prreg_plot_flag.eq.0) then + nao_pr_mon = nao_pr_mon*-1. + end if + end if + end if + sig_pcv = eofunc_north2(evecv@pcvar,dimsizes(nao_pc_mon),False) + if (sig_pcv(0)) then ; if True then significant + nao_mon@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%*" + else + nao_mon@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%" + end if + delete(sig_pcv) + nao_pc_mon!0 = "time" + nao_pc_mon&time = arr&time + delete([/evecv,pcts,arr,arr_mon_CW_LF/]) + end if +;------------------------------------------------------------------------------------------------------ + if (sstreg_frame.eq.1.and.sstreg_plot_flag.eq.0) then ; sstreg_frame = flag to create regressions .ps/.png files + sstreg_frame = 0 + end if + if (tasreg_frame.eq.1.and.tasreg_plot_flag.eq.0) then ; tasreg_frame = flag to create regressions .ps/.png files + tasreg_frame = 0 + end if + if (prreg_frame.eq.1.and.prreg_plot_flag.eq.0) then ; prreg_frame = flag to create regressions .ps/.png files + prreg_frame = 0 + end if +;------------------------------------------------------------------------------------------------------ + if (OUTPUT_DATA.eq."True") then + modname = str_sub_str(names(ee)," ","_") + bc = (/"/","'","(",")"/) + do gg = 0,dimsizes(bc)-1 + modname = str_sub_str(modname,bc(gg),"_") + end do + fn = getenv("OUTDIR")+modname+".cvdp_data.psl.nam_nao."+syear(ee)+"-"+eyear(ee)+".nc" + if (.not.isfilepresent2(fn)) then + z = addfile(fn,"c") + z@source = "NCAR Climate Analysis Section's Climate Variability Diagnostics Package v"+getenv("VERSION") + z@notes = "Data from "+names(ee)+" from "+syear(ee)+"-"+eyear(ee) + if (OPT_CLIMO.eq."Full") then + z@climatology = syear(ee)+"-"+eyear(ee)+" climatology removed prior to all calculations (other than means)" + else + if (CLIMO_SYEAR.lt.0) then + z@climatology = (eyear(ee)+CLIMO_SYEAR)+"-"+(eyear(ee)+CLIMO_EYEAR)+" climatology removed prior to all calculations (other than means)" + else + z@climatology = CLIMO_SYEAR+"-"+CLIMO_EYEAR+" climatology removed prior to all calculations (other than means)" + end if + end if + z@Conventions = "CF-1.6" + else + z = addfile(fn,"w") + end if + + z->nao_timeseries_djf = set_varAtts(nao_pc_djf,"NAO normalized principal component timeseries (DJF)","1","") + z->nao_timeseries_mam = set_varAtts(nao_pc_mam,"NAO normalized principal component timeseries (MAM)","1","") + z->nao_timeseries_jja = set_varAtts(nao_pc_jja,"NAO normalized principal component timeseries (JJA)","1","") + z->nao_timeseries_son = set_varAtts(nao_pc_son,"NAO normalized principal component timeseries (SON)","1","") + z->nao_timeseries_ann = set_varAtts(nao_pc_ann,"NAO normalized principal component timeseries (ANN)","1","") + + z->nam_timeseries_djf = set_varAtts(nam_pc_djf,"NAM normalized principal component timeseries (DJF)","1","") + z->nam_timeseries_mam = set_varAtts(nam_pc_mam,"NAM normalized principal component timeseries (MAM)","1","") + z->nam_timeseries_jja = set_varAtts(nam_pc_jja,"NAM normalized principal component timeseries (JJA)","1","") + z->nam_timeseries_son = set_varAtts(nam_pc_son,"NAM normalized principal component timeseries (SON)","1","") + z->nam_timeseries_ann = set_varAtts(nam_pc_ann,"NAM normalized principal component timeseries (ANN)","1","") + + z->nao_pattern_djf = set_varAtts(nao_djf,"NAO spatial pattern (DJF)","","") + z->nao_pattern_mam = set_varAtts(nao_mam,"NAO spatial pattern (MAM)","","") + z->nao_pattern_jja = set_varAtts(nao_jja,"NAO spatial pattern (JJA)","","") + z->nao_pattern_son = set_varAtts(nao_son,"NAO spatial pattern (SON)","","") + z->nao_pattern_ann = set_varAtts(nao_ann,"NAO spatial pattern (annual)","","") + + z->nam_pattern_djf = set_varAtts(nam_djf,"NAM spatial pattern (DJF)","","") + z->nam_pattern_mam = set_varAtts(nam_mam,"NAM spatial pattern (MAM)","","") + z->nam_pattern_jja = set_varAtts(nam_jja,"NAM spatial pattern (JJA)","","") + z->nam_pattern_son = set_varAtts(nam_son,"NAM spatial pattern (SON)","","") + z->nam_pattern_ann = set_varAtts(nam_ann,"NAM spatial pattern (annual)","","") + + if (COMPUTE_MODES_MON.eq."True") then + z->nao_timeseries_mon = set_varAtts(nao_pc_mon,"NAO principal component timeseries (monthly)","","") + z->nam_timeseries_mon = set_varAtts(nam_pc_mon,"NAM principal component timeseries (monthly)","","") + z->nao_pattern_mon = set_varAtts(nao_mon,"NAO spatial pattern (monthly)","","") + z->nam_pattern_mon = set_varAtts(nam_mon,"NAM spatial pattern (monthly)","","") + end if + delete(z) + delete([/modname,fn/]) + + if (sstreg_plot_flag.eq.0) then + modname = str_sub_str(names_ts(ee)," ","_") + bc = (/"/","'","(",")"/) + do gg = 0,dimsizes(bc)-1 + modname = str_sub_str(modname,bc(gg),"_") + end do + fn = getenv("OUTDIR")+modname+".cvdp_data.psl.nam_nao.ts."+syear_ts(ee)+"-"+eyear_ts(ee)+".nc" + if (.not.isfilepresent2(fn)) then + z = addfile(fn,"c") + z@source = "NCAR Climate Analysis Section's Climate Variability Diagnostics Package v"+getenv("VERSION") + z@notes = "Data from "+names_ts(ee)+" from "+syear_ts(ee)+"-"+eyear_ts(ee) + if (OPT_CLIMO.eq."Full") then + z@climatology = syear_ts(ee)+"-"+eyear_ts(ee)+" climatology removed prior to all calculations (other than means)" + else + if (CLIMO_SYEAR.lt.0) then + z@climatology = (eyear(ee)+CLIMO_SYEAR)+"-"+(eyear(ee)+CLIMO_EYEAR)+" climatology removed prior to all calculations (other than means)" + else + z@climatology = CLIMO_SYEAR+"-"+CLIMO_EYEAR+" climatology removed prior to all calculations (other than means)" + end if + end if + z@Conventions = "CF-1.6" + else + z = addfile(fn,"w") + end if + z->nao_sst_regression_djf = set_varAtts(nao_sst_djf,"sst regression onto NAO principal component timeseries (DJF)","","") + z->nao_sst_regression_mam = set_varAtts(nao_sst_mam,"sst regression onto NAO principal component timeseries (MAM)","","") + z->nao_sst_regression_jja = set_varAtts(nao_sst_jja,"sst regression onto NAO principal component timeseries (JJA)","","") + z->nao_sst_regression_son = set_varAtts(nao_sst_son,"sst regression onto NAO principal component timeseries (SON)","","") + z->nao_sst_regression_ann = set_varAtts(nao_sst_ann,"sst regression onto NAO principal component timeseries (annual)","","") + + z->nam_sst_regression_djf = set_varAtts(nam_sst_djf,"sst regression onto NAM principal component timeseries (DJF)","","") + z->nam_sst_regression_mam = set_varAtts(nam_sst_mam,"sst regression onto NAM principal component timeseries (MAM)","","") + z->nam_sst_regression_jja = set_varAtts(nam_sst_jja,"sst regression onto NAM principal component timeseries (JJA)","","") + z->nam_sst_regression_son = set_varAtts(nam_sst_son,"sst regression onto NAM principal component timeseries (SON)","","") + z->nam_sst_regression_ann = set_varAtts(nam_sst_ann,"sst regression onto NAM principal component timeseries (annual)","","") + if (COMPUTE_MODES_MON.eq."True") then + z->nao_sst_regression_mon = set_varAtts(nao_sst_mon,"sst regression onto NAO principal component timeseries (monthly)","","") + z->nam_sst_regression_mon = set_varAtts(nam_sst_mon,"sst regression onto NAM principal component timeseries (monthly)","","") + end if + delete(z) + delete([/modname,fn/]) + end if + if (tasreg_plot_flag.eq.0) then + modname = str_sub_str(names_tas(ee)," ","_") + bc = (/"/","'","(",")"/) + do gg = 0,dimsizes(bc)-1 + modname = str_sub_str(modname,bc(gg),"_") + end do + fn = getenv("OUTDIR")+modname+".cvdp_data.psl.nam_nao.tas."+syear_tas(ee)+"-"+eyear_tas(ee)+".nc" + if (.not.isfilepresent2(fn)) then + z = addfile(fn,"c") + z@source = "NCAR Climate Analysis Section's Climate Variability Diagnostics Package v"+getenv("VERSION") + z@notes = "Data from "+names_tas(ee)+" from "+syear_tas(ee)+"-"+eyear_tas(ee) + if (OPT_CLIMO.eq."Full") then + z@climatology = syear_tas(ee)+"-"+eyear_tas(ee)+" climatology removed prior to all calculations (other than means)" + else + if (CLIMO_SYEAR.lt.0) then + z@climatology = (eyear(ee)+CLIMO_SYEAR)+"-"+(eyear(ee)+CLIMO_EYEAR)+" climatology removed prior to all calculations (other than means)" + else + z@climatology = CLIMO_SYEAR+"-"+CLIMO_EYEAR+" climatology removed prior to all calculations (other than means)" + end if + end if + z@Conventions = "CF-1.6" + else + z = addfile(fn,"w") + end if + z->nao_tas_regression_djf = set_varAtts(nao_tas_djf,"tas regression onto NAO principal component timeseries (DJF)","","") + z->nao_tas_regression_mam = set_varAtts(nao_tas_mam,"tas regression onto NAO principal component timeseries (MAM)","","") + z->nao_tas_regression_jja = set_varAtts(nao_tas_jja,"tas regression onto NAO principal component timeseries (JJA)","","") + z->nao_tas_regression_son = set_varAtts(nao_tas_son,"tas regression onto NAO principal component timeseries (SON)","","") + z->nao_tas_regression_ann = set_varAtts(nao_tas_ann,"tas regression onto NAO principal component timeseries (annual)","","") + + z->nam_tas_regression_djf = set_varAtts(nam_tas_djf,"tas regression onto NAM principal component timeseries (DJF)","","") + z->nam_tas_regression_mam = set_varAtts(nam_tas_mam,"tas regression onto NAM principal component timeseries (MAM)","","") + z->nam_tas_regression_jja = set_varAtts(nam_tas_jja,"tas regression onto NAM principal component timeseries (JJA)","","") + z->nam_tas_regression_son = set_varAtts(nam_tas_son,"tas regression onto NAM principal component timeseries (SON)","","") + z->nam_tas_regression_ann = set_varAtts(nam_tas_ann,"tas regression onto NAM principal component timeseries (annual)","","") + if (COMPUTE_MODES_MON.eq."True") then + z->nao_tas_regression_mon = set_varAtts(nao_tas_mon,"tas regression onto NAO principal component timeseries (monthly)","","") + z->nam_tas_regression_mon = set_varAtts(nam_tas_mon,"tas regression onto NAM principal component timeseries (monthly)","","") + end if + delete(z) + delete([/modname,fn/]) + end if + if (prreg_plot_flag.eq.0) then + modname = str_sub_str(names_pr(ee)," ","_") + bc = (/"/","'","(",")"/) + do gg = 0,dimsizes(bc)-1 + modname = str_sub_str(modname,bc(gg),"_") + end do + fn = getenv("OUTDIR")+modname+".cvdp_data.psl.nam_nao.pr."+syear_pr(ee)+"-"+eyear_pr(ee)+".nc" + if (.not.isfilepresent2(fn)) then + z = addfile(fn,"c") + z@source = "NCAR Climate Analysis Section's Climate Variability Diagnostics Package v"+getenv("VERSION") + z@notes = "Data from "+names_pr(ee)+" from "+syear_pr(ee)+"-"+eyear_pr(ee) + if (OPT_CLIMO.eq."Full") then + z@climatology = syear_pr(ee)+"-"+eyear_pr(ee)+" climatology removed prior to all calculations (other than means)" + else + if (CLIMO_SYEAR.lt.0) then + z@climatology = (eyear(ee)+CLIMO_SYEAR)+"-"+(eyear(ee)+CLIMO_EYEAR)+" climatology removed prior to all calculations (other than means)" + else + z@climatology = CLIMO_SYEAR+"-"+CLIMO_EYEAR+" climatology removed prior to all calculations (other than means)" + end if + end if + z@Conventions = "CF-1.6" + else + z = addfile(fn,"w") + end if + z->nao_pr_regression_djf = set_varAtts(nao_pr_djf,"pr regression onto NAO principal component timeseries (DJF)","","") + z->nao_pr_regression_mam = set_varAtts(nao_pr_mam,"pr regression onto NAO principal component timeseries (MAM)","","") + z->nao_pr_regression_jja = set_varAtts(nao_pr_jja,"pr regression onto NAO principal component timeseries (JJA)","","") + z->nao_pr_regression_son = set_varAtts(nao_pr_son,"pr regression onto NAO principal component timeseries (SON)","","") + z->nao_pr_regression_ann = set_varAtts(nao_pr_ann,"pr regression onto NAO principal component timeseries (annual)","","") + + z->nam_pr_regression_djf = set_varAtts(nam_pr_djf,"pr regression onto NAM principal component timeseries (DJF)","","") + z->nam_pr_regression_mam = set_varAtts(nam_pr_mam,"pr regression onto NAM principal component timeseries (MAM)","","") + z->nam_pr_regression_jja = set_varAtts(nam_pr_jja,"pr regression onto NAM principal component timeseries (JJA)","","") + z->nam_pr_regression_son = set_varAtts(nam_pr_son,"pr regression onto NAM principal component timeseries (SON)","","") + z->nam_pr_regression_ann = set_varAtts(nam_pr_ann,"pr regression onto NAM principal component timeseries (annual)","","") + if (COMPUTE_MODES_MON.eq."True") then + z->nao_pr_regression_mon = set_varAtts(nao_pr_mon,"pr regression onto NAO principal component timeseries (monthly)","","") + z->nam_pr_regression_mon = set_varAtts(nam_pr_mon,"pr regression onto NAM principal component timeseries (monthly)","","") + end if + delete(z) + delete([/modname,fn/]) + end if + end if +;======================================================================== + res = True + res@mpGeophysicalLineColor = "gray42" + res@mpGeophysicalLineThicknessF = 2. + res@mpGridAndLimbOn = False + res@mpFillOn = False + res@mpOutlineOn = True + res@gsnDraw = False + res@gsnFrame = False + res@cnLevelSelectionMode = "ExplicitLevels" + res@cnLineLabelsOn = False + res@cnFillOn = True + res@cnLinesOn = False + res@lbLabelBarOn = False + + res@gsnLeftStringOrthogonalPosF = -0.03 + res@gsnLeftStringParallelPosF = .005 + res@gsnRightStringOrthogonalPosF = -0.03 + res@gsnRightStringParallelPosF = 0.96 + res@gsnRightString = "" + res@gsnLeftString = "" + if (nsim.le.5) then + res@gsnLeftStringFontHeightF = 0.018 + res@gsnCenterStringFontHeightF = 0.022 + res@gsnRightStringFontHeightF = 0.018 + else + res@gsnLeftStringFontHeightF = 0.024 + res@gsnCenterStringFontHeightF = 0.028 + res@gsnRightStringFontHeightF = 0.024 + end if + res@gsnPolar = "NH" + res@mpMinLatF = 20. + res@mpCenterLonF = 0. + res@cnLevels = (/-7,-6,-5,-4,-3,-2,-1,0,1,2,3,4,5,6,7./) + + res@gsnLeftString = syear(ee)+"-"+eyear(ee) + res@gsnCenterString = names(ee) + + res4 = res ; res4 = pr regression resources + if (COLORMAP.eq.0) then + res4@cnLevels := fspan(-.7,.7,15) + else + res4@cnLevels := fspan(-.5,.5,11) + end if + + res2 = True + res2@gsnDraw = False + res2@gsnFrame = False + res2@cnLevelSelectionMode = "ExplicitLevels" + res2@cnLevels = res@cnLevels + + res2@cnLineLabelsOn = False + res2@cnFillOn = True + res2@cnLinesOn = False + res2@cnFillMode = "AreaFill" + res2@lbLabelBarOn = False + res2@cnInfoLabelOn = False + res2@gsnRightString = "" + res2@gsnLeftString = "" + res2@gsnCenterString = "" + res2@gsnAddCyclic = True + + + if (isfilepresent2("obs_psl").and.ee.eq.0) then ; for pattern correlation table. Save entire lat/lon array + patcor_nam_djf = new((/nsim,dimsizes(nam_djf&lat),dimsizes(nam_djf&lon)/),typeof(nam_djf)) + patcor_nam_djf!1 = "lat" + patcor_nam_djf&lat = nam_djf&lat + patcor_nam_djf!2 = "lon" + patcor_nam_djf&lon = nam_djf&lon + patcor_nam_jja = patcor_nam_djf + patcor_nam_ann = patcor_nam_djf + patcor_nao_djf = patcor_nam_djf + patcor_nao_jja = patcor_nam_djf + patcor_nao_ann = patcor_nam_djf + patcor_nam_djf(ee,:,:) = (/ nam_djf /) + patcor_nam_jja(ee,:,:) = (/ nam_jja /) + patcor_nam_ann(ee,:,:) = (/ nam_ann /) + patcor_nao_djf(ee,:,:) = (/ nao_djf /) + patcor_nao_jja(ee,:,:) = (/ nao_jja /) + patcor_nao_ann(ee,:,:) = (/ nao_ann /) + end if + if (isfilepresent2("obs_psl").and.ee.ge.1.and.isvar("patcor_nam_djf")) then + patcor_nam_djf(ee,:,:) = (/ totype(linint2(nam_djf&lon,nam_djf&lat,nam_djf,True,patcor_nam_djf&lon,patcor_nam_djf&lat,0),typeof(patcor_nam_djf)) /) + patcor_nam_jja(ee,:,:) = (/ totype(linint2(nam_jja&lon,nam_jja&lat,nam_jja,True,patcor_nam_jja&lon,patcor_nam_jja&lat,0),typeof(patcor_nam_jja)) /) + patcor_nam_ann(ee,:,:) = (/ totype(linint2(nam_ann&lon,nam_ann&lat,nam_ann,True,patcor_nam_ann&lon,patcor_nam_ann&lat,0),typeof(patcor_nam_ann)) /) + + patcor_nao_djf(ee,:,:) = (/ totype(linint2(nao_djf&lon,nao_djf&lat,nao_djf,True,patcor_nao_djf&lon,patcor_nao_djf&lat,0),typeof(patcor_nao_djf)) /) + patcor_nao_jja(ee,:,:) = (/ totype(linint2(nao_jja&lon,nao_jja&lat,nao_jja,True,patcor_nao_jja&lon,patcor_nao_jja&lat,0),typeof(patcor_nao_jja)) /) + patcor_nao_ann(ee,:,:) = (/ totype(linint2(nao_ann&lon,nao_ann&lat,nao_ann,True,patcor_nao_ann&lon,patcor_nao_ann&lat,0),typeof(patcor_nao_ann)) /) + end if + + res@gsnRightString = nam_djf@pcvar + map_nam_djf(ee) = gsn_csm_contour_map_polar(wks_nam,nam_djf,res) + res@gsnRightString = nam_mam@pcvar + map_nam_mam(ee) = gsn_csm_contour_map_polar(wks_nam,nam_mam,res) + res@gsnRightString = nam_jja@pcvar + map_nam_jja(ee) = gsn_csm_contour_map_polar(wks_nam,nam_jja,res) + res@gsnRightString = nam_son@pcvar + map_nam_son(ee) = gsn_csm_contour_map_polar(wks_nam,nam_son,res) + res@gsnRightString = nam_ann@pcvar + map_nam_ann(ee) = gsn_csm_contour_map_polar(wks_nam,nam_ann,res) + delete([/nam_djf,nam_mam,nam_jja,nam_son,nam_ann/]) + if (COMPUTE_MODES_MON.eq."True") then + res@gsnRightString = nam_mon@pcvar + map_nam_mon(ee) = gsn_csm_contour_map_polar(wks_nam,nam_mon,res) + delete([/nam_mon/]) + end if + + res@gsnRightString = nao_djf@pcvar + map_nao_djf(ee) = gsn_csm_contour_map_polar(wks_nao,nao_djf,res) + res@gsnRightString = nao_mam@pcvar + map_nao_mam(ee) = gsn_csm_contour_map_polar(wks_nao,nao_mam,res) + res@gsnRightString = nao_jja@pcvar + map_nao_jja(ee) = gsn_csm_contour_map_polar(wks_nao,nao_jja,res) + res@gsnRightString = nao_son@pcvar + map_nao_son(ee) = gsn_csm_contour_map_polar(wks_nao,nao_son,res) + res@gsnRightString = nao_ann@pcvar + map_nao_ann(ee) = gsn_csm_contour_map_polar(wks_nao,nao_ann,res) + delete([/nao_djf,nao_mam,nao_jja,nao_son,nao_ann/]) + if (COMPUTE_MODES_MON.eq."True") then + res@gsnRightString = nao_mon@pcvar + map_nao_mon(ee) = gsn_csm_contour_map_polar(wks_nao,nao_mon,res) + delete([/nao_mon/]) + end if + + if (sstreg_plot_flag.eq.0) then ; SSTs have to be present for regressions. TASs are optional + res@cnLevels := fspan(-.7,.7,15) + if (tasreg_plot_flag.eq.0) then + if (names_ts(ee).eq.names_tas(ee)) then + res@gsnCenterString = names_ts(ee) + else + res@gsnCenterString = names_ts(ee)+" / "+names_tas(ee) + end if + else + res@gsnCenterString = names_ts(ee) + end if + res@gsnRightString = "" + reg_nam_djf(ee) = gsn_csm_contour_map_polar(wks_nam,nam_sst_djf,res) + reg_nam_mam(ee) = gsn_csm_contour_map_polar(wks_nam,nam_sst_mam,res) + reg_nam_jja(ee) = gsn_csm_contour_map_polar(wks_nam,nam_sst_jja,res) + reg_nam_son(ee) = gsn_csm_contour_map_polar(wks_nam,nam_sst_son,res) + reg_nam_ann(ee) = gsn_csm_contour_map_polar(wks_nam,nam_sst_ann,res) + delete([/nam_sst_djf,nam_sst_mam,nam_sst_jja,nam_sst_son,nam_sst_ann/]) + if (tasreg_plot_flag.eq.0) then + o_djf = gsn_csm_contour(wks_nam,nam_tas_djf,res2) + o_mam = gsn_csm_contour(wks_nam,nam_tas_mam,res2) + o_jja = gsn_csm_contour(wks_nam,nam_tas_jja,res2) + o_son = gsn_csm_contour(wks_nam,nam_tas_son,res2) + o_ann = gsn_csm_contour(wks_nam,nam_tas_ann,res2) + delete([/nam_tas_djf,nam_tas_mam,nam_tas_jja,nam_tas_son,nam_tas_ann/]) + overlay(reg_nam_djf(ee),o_djf) + overlay(reg_nam_mam(ee),o_mam) + overlay(reg_nam_jja(ee),o_jja) + overlay(reg_nam_son(ee),o_son) + overlay(reg_nam_ann(ee),o_ann) + delete([/o_djf,o_mam,o_jja,o_son,o_ann/]) + end if + if (COMPUTE_MODES_MON.eq."True") then + reg_nam_mon(ee) = gsn_csm_contour_map_polar(wks_nam,nam_sst_mon,res) + delete([/nam_sst_mon/]) + if (tasreg_plot_flag.eq.0) then + o_mon = gsn_csm_contour(wks_nam,nam_tas_mon,res2) + overlay(reg_nam_mon(ee),o_mon) + delete([/o_mon,nam_tas_mon/]) + end if + end if + + reg_nao_djf(ee) = gsn_csm_contour_map_polar(wks_nao,nao_sst_djf,res) + reg_nao_mam(ee) = gsn_csm_contour_map_polar(wks_nao,nao_sst_mam,res) + reg_nao_jja(ee) = gsn_csm_contour_map_polar(wks_nao,nao_sst_jja,res) + reg_nao_son(ee) = gsn_csm_contour_map_polar(wks_nao,nao_sst_son,res) + reg_nao_ann(ee) = gsn_csm_contour_map_polar(wks_nao,nao_sst_ann,res) + delete([/nao_sst_djf,nao_sst_mam,nao_sst_jja,nao_sst_son,nao_sst_ann/]) + if (tasreg_plot_flag.eq.0) then + o_djf = gsn_csm_contour(wks_nao,nao_tas_djf,res2) + o_mam = gsn_csm_contour(wks_nao,nao_tas_mam,res2) + o_jja = gsn_csm_contour(wks_nao,nao_tas_jja,res2) + o_son = gsn_csm_contour(wks_nao,nao_tas_son,res2) + o_ann = gsn_csm_contour(wks_nao,nao_tas_ann,res2) + delete([/nao_tas_djf,nao_tas_mam,nao_tas_jja,nao_tas_son,nao_tas_ann/]) + overlay(reg_nao_djf(ee),o_djf) + overlay(reg_nao_mam(ee),o_mam) + overlay(reg_nao_jja(ee),o_jja) + overlay(reg_nao_son(ee),o_son) + overlay(reg_nao_ann(ee),o_ann) + delete([/o_djf,o_mam,o_jja,o_son,o_ann/]) + end if + if (COMPUTE_MODES_MON.eq."True") then + reg_nao_mon(ee) = gsn_csm_contour_map_polar(wks_nao,nao_sst_mon,res) + delete([/nao_sst_mon/]) + if (tasreg_plot_flag.eq.0) then + o_mon = gsn_csm_contour(wks_nao,nao_tas_mon,res2) + overlay(reg_nao_mon(ee),o_mon) + delete([/o_mon,nao_tas_mon/]) + end if + end if + end if + + if (prreg_plot_flag.eq.0) then ; PR regressions + res4@gsnRightString = "" + res4@gsnCenterString = names_pr(ee) + reg_nam_pr_djf(ee) = gsn_csm_contour_map_polar(wks_nam_pr,nam_pr_djf,res4) + reg_nam_pr_mam(ee) = gsn_csm_contour_map_polar(wks_nam_pr,nam_pr_mam,res4) + reg_nam_pr_jja(ee) = gsn_csm_contour_map_polar(wks_nam_pr,nam_pr_jja,res4) + reg_nam_pr_son(ee) = gsn_csm_contour_map_polar(wks_nam_pr,nam_pr_son,res4) + reg_nam_pr_ann(ee) = gsn_csm_contour_map_polar(wks_nam_pr,nam_pr_ann,res4) + delete([/nam_pr_djf,nam_pr_mam,nam_pr_jja,nam_pr_son,nam_pr_ann/]) + if (COMPUTE_MODES_MON.eq."True") then + reg_nam_pr_mon(ee) = gsn_csm_contour_map_polar(wks_nam_pr,nam_pr_mon,res4) + delete([/nam_pr_mon/]) + end if + + reg_nao_pr_djf(ee) = gsn_csm_contour_map_polar(wks_nao_pr,nao_pr_djf,res4) + reg_nao_pr_mam(ee) = gsn_csm_contour_map_polar(wks_nao_pr,nao_pr_mam,res4) + reg_nao_pr_jja(ee) = gsn_csm_contour_map_polar(wks_nao_pr,nao_pr_jja,res4) + reg_nao_pr_son(ee) = gsn_csm_contour_map_polar(wks_nao_pr,nao_pr_son,res4) + reg_nao_pr_ann(ee) = gsn_csm_contour_map_polar(wks_nao_pr,nao_pr_ann,res4) + delete([/nao_pr_djf,nao_pr_mam,nao_pr_jja,nao_pr_son,nao_pr_ann/]) + if (COMPUTE_MODES_MON.eq."True") then + reg_nao_pr_mon(ee) = gsn_csm_contour_map_polar(wks_nao_pr,nao_pr_mon,res4) + delete([/nao_pr_mon/]) + end if + end if + + xyres = True + xyres@gsnDraw = False + xyres@gsnFrame = False + xyres@gsnXYBarChart = False + xyres@gsnYRefLine = 0.0 + xyres@gsnYRefLineColor = "gray42" + xyres@gsnAboveYRefLineColor = 185 + xyres@gsnBelowYRefLineColor = 35 + if (wks_type.eq."png") then + xyres@xyLineThicknessF = .5 + else + xyres@xyLineThicknessF = .2 + end if + xyres@xyLineColor = "gray52" + xyres@tiYAxisString = "" + xyres@tiXAxisString = "" + if (nsim.le.5) then + xyres@tmXBLabelFontHeightF = 0.0125 + xyres@tmYLLabelFontHeightF = 0.0125 + xyres@gsnStringFontHeightF = 0.017 + else + xyres@tmXBLabelFontHeightF = 0.018 + xyres@tmYLLabelFontHeightF = 0.018 + xyres@gsnStringFontHeightF = 0.024 + end if + xyres@gsnCenterStringOrthogonalPosF = 0.025 + xyres@vpXF = 0.05 + xyres@vpHeightF = 0.15 + if (SCALE_TIMESERIES.eq."True") then + xyres@vpWidthF = 0.9*((nyr(ee)*1.)/nyr_max) + else + xyres@vpWidthF = 0.9 + end if + xyres@gsnLeftString = "" + xyres@gsnRightString = "" + xyres@trXMinF = syear(ee)-.5 + xyres@trXMaxF = eyear(ee)+1.5 + + xyres@gsnCenterString = names(ee) + + xyresmon = xyres + xyresmon@gsnXYBarChart = False + xyresmon@xyLineThicknessF = .1 + + xy_nam_djf(ee) = gsn_csm_xy(wks_nam_ts,fspan(syear(ee),eyear(ee),dimsizes(nam_pc_djf)),nam_pc_djf,xyres) ; use standardized timeseries + xy_nam_mam(ee) = gsn_csm_xy(wks_nam_ts,fspan(syear(ee),eyear(ee),dimsizes(nam_pc_mam)),nam_pc_mam,xyres) ; use standardized timeseries + xy_nam_jja(ee) = gsn_csm_xy(wks_nam_ts,fspan(syear(ee),eyear(ee),dimsizes(nam_pc_jja)),nam_pc_jja,xyres) ; use standardized timeseries + xy_nam_son(ee) = gsn_csm_xy(wks_nam_ts,fspan(syear(ee),eyear(ee),dimsizes(nam_pc_son)),nam_pc_son,xyres) ; use standardized timeseries + xy_nam_ann(ee) = gsn_csm_xy(wks_nam_ts,fspan(syear(ee),eyear(ee),dimsizes(nam_pc_ann)),nam_pc_ann,xyres) ; use standardized timeseries + delete([/nam_pc_djf,nam_pc_mam,nam_pc_jja,nam_pc_son,nam_pc_ann/]) + if (COMPUTE_MODES_MON.eq."True") then + xy_nam_mon(ee) = gsn_csm_xy(wks_nam_ts,fspan(syear(ee),eyear(ee)+.91667,dimsizes(nam_pc_mon)),nam_pc_mon,xyresmon) ; use standardized timeseries + delete([/nam_pc_mon/]) + end if + + xy_nao_djf(ee) = gsn_csm_xy(wks_nao_ts,fspan(syear(ee),eyear(ee),dimsizes(nao_pc_djf)),nao_pc_djf,xyres) ; use standardized timeseries + xy_nao_mam(ee) = gsn_csm_xy(wks_nao_ts,fspan(syear(ee),eyear(ee),dimsizes(nao_pc_mam)),nao_pc_mam,xyres) ; use standardized timeseries + xy_nao_jja(ee) = gsn_csm_xy(wks_nao_ts,fspan(syear(ee),eyear(ee),dimsizes(nao_pc_jja)),nao_pc_jja,xyres) ; use standardized timeseries + xy_nao_son(ee) = gsn_csm_xy(wks_nao_ts,fspan(syear(ee),eyear(ee),dimsizes(nao_pc_son)),nao_pc_son,xyres) ; use standardized timeseries + xy_nao_ann(ee) = gsn_csm_xy(wks_nao_ts,fspan(syear(ee),eyear(ee),dimsizes(nao_pc_ann)),nao_pc_ann,xyres) ; use standardized timeseries + delete([/nao_pc_djf,nao_pc_mam,nao_pc_jja,nao_pc_son,nao_pc_ann/]) + if (COMPUTE_MODES_MON.eq."True") then + xy_nao_mon(ee) = gsn_csm_xy(wks_nao_ts,fspan(syear(ee),eyear(ee)+.91667,dimsizes(nao_pc_mon)),nao_pc_mon,xyresmon) ; use standardized timeseries + delete([/nao_pc_mon/]) + end if + delete(sstreg_plot_flag) + end do + + if (isvar("clim_syear")) then + delete(clim_syear) + end if + if (isvar("clim_eyear")) then + delete(clim_eyear) + end if + + if (isvar("patcor_nam_djf")) then ; for pattern correlation table + clat = cos(0.01745329*patcor_nam_djf&lat) + clat!0 = "lat" + clat&lat = patcor_nam_djf&lat + line3 = " " ; Must be 18 characters long + line4 = line3 + header = (/"","Pattern Correlations Observations vs. Model(s)",""/) + finpr_nam_djf = "NAM (DJF) " ; Must be 18 characters long + do hh = 1,nsim-1 + dimY = dimsizes(tochar(names(hh))) + nchar = dimY + nchar = where(nchar.le.10,10,nchar) + if (dimY.lt.10) then + ntb = "" + do ii = 0,10-dimY-1 + ntb = ntb+" " + end do + ntb = ntb+names(hh) + else + ntb = names(hh) + end if + + ntc = "" + do ii = 0,nchar-1 + ntc = ntc+"-" + end do + format2 = "%"+(nchar-5+1)+".2f" + format3 = "%4.2f" + line3 = line3+" "+ntb + line4 = line4+" "+ntc + if (all(ismissing(patcor_nam_djf(hh,{20:},:)))) then + finpr_nam_djf = finpr_nam_djf+sprintf(format2,9.99)+"/"+sprintf(format3,9.99) + else + finpr_nam_djf = finpr_nam_djf+sprintf(format2,(pattern_cor(patcor_nam_djf(0,{20:},:),patcor_nam_djf(hh,{20:},:),clat({20:}),0)))+"/"+sprintf(format3,(dim_rmsd(ndtooned(NewCosWeight(patcor_nam_djf(0,{20:},:))),ndtooned(NewCosWeight(patcor_nam_djf(hh,{20:},:)))))) + end if + end do +; + if (dimsizes(tochar(line4)).ge.8190) then ; system or fortran compiler limit + print("Metrics table warning: Not creating metrics table as size of comparison results in a invalid ascii row size.") + else + write_table(getenv("OUTDIR")+"metrics.psl.nam_nao.txt","w",[/header/],"%s") + write_table(getenv("OUTDIR")+"metrics.psl.nam_nao.txt","a",[/line3/],"%s") + write_table(getenv("OUTDIR")+"metrics.psl.nam_nao.txt","a",[/line4/],"%s") + write_table(getenv("OUTDIR")+"metrics.psl.nam_nao.txt","a",[/finpr_nam_djf/],"%s") + end if + delete([/line3,line4,format2,format3,nchar,ntc,clat,patcor_nam_djf,patcor_nam_jja,patcor_nam_ann/]) + delete([/patcor_nao_djf,patcor_nao_jja,patcor_nao_ann/]) + delete([/dimY,ntb,header/]) + end if + + panres = True + panres@gsnMaximize = True + panres@gsnPaperOrientation = "portrait" + panres@gsnPanelLabelBar = True + panres@gsnPanelYWhiteSpacePercent = 3.0 + panres@pmLabelBarHeightF = 0.05 + panres@pmLabelBarWidthF = 0.55 + panres@lbTitleOn = False + panres@lbBoxLineColor = "gray70" + if (nsim.le.4) then + if (nsim.eq.1) then + panres@txFontHeightF = 0.022 + panres@gsnPanelBottom = 0.50 + else + panres@txFontHeightF = 0.0145 + panres@gsnPanelBottom = 0.50 + end if + else + panres@txFontHeightF = 0.016 + panres@gsnPanelBottom = 0.05 + end if + ncol = floattointeger(sqrt(nsim)) + nrow = (nsim/ncol)+mod(nsim,ncol) + + panres@txString = "NAM (DJF)" + gsn_panel2(wks_nam,map_nam_djf,(/nrow,ncol/),panres) + delete(map_nam_djf) + panres@txString = "NAM (MAM)" + gsn_panel2(wks_nam,map_nam_mam,(/nrow,ncol/),panres) + delete(map_nam_mam) + panres@txString = "NAM (JJA)" + gsn_panel2(wks_nam,map_nam_jja,(/nrow,ncol/),panres) + delete(map_nam_jja) + panres@txString = "NAM (SON)" + gsn_panel2(wks_nam,map_nam_son,(/nrow,ncol/),panres) + delete(map_nam_son) + panres@txString = "NAM (Annual)" + gsn_panel2(wks_nam,map_nam_ann,(/nrow,ncol/),panres) + delete(map_nam_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres@txString = "NAM (Monthly)" + gsn_panel2(wks_nam,map_nam_mon,(/nrow,ncol/),panres) + delete(map_nam_mon) + end if + + if (sstreg_frame.eq.0) then + if (tasreg_frame.eq.0) then + txt0 = "SST/TAS" + else + txt0 = "SST" + end if + panres@txString = "NAM "+txt0+" Regressions (DJF)" + gsn_panel2(wks_nam,reg_nam_djf,(/nrow,ncol/),panres) + delete(reg_nam_djf) + panres@txString = "NAM "+txt0+" Regressions (MAM)" + gsn_panel2(wks_nam,reg_nam_mam,(/nrow,ncol/),panres) + delete(reg_nam_mam) + panres@txString = "NAM "+txt0+" Regressions (JJA)" + gsn_panel2(wks_nam,reg_nam_jja,(/nrow,ncol/),panres) + delete(reg_nam_jja) + panres@txString = "NAM "+txt0+" Regressions (SON)" + gsn_panel2(wks_nam,reg_nam_son,(/nrow,ncol/),panres) + delete(reg_nam_son) + panres@txString = "NAM "+txt0+" Regressions (Annual)" + gsn_panel2(wks_nam,reg_nam_ann,(/nrow,ncol/),panres) + delete(reg_nam_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres@txString = "NAM "+txt0+" Regressions (Monthly)" + gsn_panel2(wks_nam,reg_nam_mon,(/nrow,ncol/),panres) + delete(reg_nam_mon) + end if + delete(wks_nam) + end if + if (prreg_frame.eq.0) then + panres@txString = "NAM PR Regressions (DJF)" + gsn_panel2(wks_nam_pr,reg_nam_pr_djf,(/nrow,ncol/),panres) + delete(reg_nam_pr_djf) + panres@txString = "NAM PR Regressions (MAM)" + gsn_panel2(wks_nam_pr,reg_nam_pr_mam,(/nrow,ncol/),panres) + delete(reg_nam_pr_mam) + panres@txString = "NAM PR Regressions (JJA)" + gsn_panel2(wks_nam_pr,reg_nam_pr_jja,(/nrow,ncol/),panres) + delete(reg_nam_pr_jja) + panres@txString = "NAM PR Regressions (SON)" + gsn_panel2(wks_nam_pr,reg_nam_pr_son,(/nrow,ncol/),panres) + delete(reg_nam_pr_son) + panres@txString = "NAM PR Regressions (Annual)" + gsn_panel2(wks_nam_pr,reg_nam_pr_ann,(/nrow,ncol/),panres) + delete(reg_nam_pr_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres@txString = "NAM PR Regressions (Monthly)" + gsn_panel2(wks_nam_pr,reg_nam_pr_mon,(/nrow,ncol/),panres) + delete(reg_nam_pr_mon) + end if + delete(wks_nam_pr) + end if + + panres@txString = "NAO (DJF)" + gsn_panel2(wks_nao,map_nao_djf,(/nrow,ncol/),panres) + delete(map_nao_djf) + panres@txString = "NAO (MAM)" + gsn_panel2(wks_nao,map_nao_mam,(/nrow,ncol/),panres) + delete(map_nao_mam) + panres@txString = "NAO (JJA)" + gsn_panel2(wks_nao,map_nao_jja,(/nrow,ncol/),panres) + delete(map_nao_jja) + panres@txString = "NAO (SON)" + gsn_panel2(wks_nao,map_nao_son,(/nrow,ncol/),panres) + delete(map_nao_son) + panres@txString = "NAO (Annual)" + gsn_panel2(wks_nao,map_nao_ann,(/nrow,ncol/),panres) + delete(map_nao_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres@txString = "NAO (Monthly)" + gsn_panel2(wks_nao,map_nao_mon,(/nrow,ncol/),panres) + delete(map_nao_mon) + end if + + if (sstreg_frame.eq.0) then + if (tasreg_frame.eq.0) then + txt0 = "SST/TAS" + else + txt0 = "SST" + end if + panres@txString = "NAO "+txt0+" Regressions (DJF)" + gsn_panel2(wks_nao,reg_nao_djf,(/nrow,ncol/),panres) + delete(reg_nao_djf) + panres@txString = "NAO "+txt0+" Regressions (MAM)" + gsn_panel2(wks_nao,reg_nao_mam,(/nrow,ncol/),panres) + delete(reg_nao_mam) + panres@txString = "NAO "+txt0+" Regressions (JJA)" + gsn_panel2(wks_nao,reg_nao_jja,(/nrow,ncol/),panres) + delete(reg_nao_jja) + panres@txString = "NAO "+txt0+" Regressions (SON)" + gsn_panel2(wks_nao,reg_nao_son,(/nrow,ncol/),panres) + delete(reg_nao_son) + panres@txString = "NAO "+txt0+" Regressions (Annual)" + gsn_panel2(wks_nao,reg_nao_ann,(/nrow,ncol/),panres) + delete(reg_nao_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres@txString = "NAO "+txt0+" Regressions (Monthly)" + gsn_panel2(wks_nao,reg_nao_mon,(/nrow,ncol/),panres) + delete(reg_nao_mon) + end if + delete(wks_nao) + end if + if (prreg_frame.eq.0) then + panres@txString = "NAO PR Regressions (DJF)" + gsn_panel2(wks_nao_pr,reg_nao_pr_djf,(/nrow,ncol/),panres) + delete(reg_nao_pr_djf) + panres@txString = "NAO PR Regressions (MAM)" + gsn_panel2(wks_nao_pr,reg_nao_pr_mam,(/nrow,ncol/),panres) + delete(reg_nao_pr_mam) + panres@txString = "NAO PR Regressions (JJA)" + gsn_panel2(wks_nao_pr,reg_nao_pr_jja,(/nrow,ncol/),panres) + delete(reg_nao_pr_jja) + panres@txString = "NAO PR Regressions (SON)" + gsn_panel2(wks_nao_pr,reg_nao_pr_son,(/nrow,ncol/),panres) + delete(reg_nao_pr_son) + panres@txString = "NAO PR Regressions (Annual)" + gsn_panel2(wks_nao_pr,reg_nao_pr_ann,(/nrow,ncol/),panres) + delete(reg_nao_pr_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres@txString = "NAO PR Regressions (Monthly)" + gsn_panel2(wks_nao_pr,reg_nao_pr_mon,(/nrow,ncol/),panres) + delete(reg_nao_pr_mon) + end if + delete(wks_nao_pr) + end if + + panres2 = True + if (nsim.le.5) then + panres2@txFontHeightF = 0.024 + else + panres2@txFontHeightF = 0.016 + end if + panres2@gsnMaximize = True + panres2@gsnPaperOrientation = "portrait" + if (SCALE_TIMESERIES.eq."True") then + tt = ind(nyr.eq.nyr_max) + panres2@gsnPanelScalePlotIndex = tt(0) + delete(tt) + end if + if (nsim.le.12) then + lp = (/nsim,1/) + else + lp = (/nrow,ncol/) + end if + + panres2@txString = "NAM (DJF)" + gsn_panel2(wks_nam_ts,xy_nam_djf,lp,panres2) + delete(xy_nam_djf) + panres2@txString = "NAM (MAM)" + gsn_panel2(wks_nam_ts,xy_nam_mam,lp,panres2) + delete(xy_nam_mam) + panres2@txString = "NAM (JJA)" + gsn_panel2(wks_nam_ts,xy_nam_jja,lp,panres2) + delete(xy_nam_jja) + panres2@txString = "NAM (SON)" + gsn_panel2(wks_nam_ts,xy_nam_son,lp,panres2) + delete(xy_nam_son) + panres2@txString = "NAM (Annual)" + gsn_panel2(wks_nam_ts,xy_nam_ann,lp,panres2) + delete(xy_nam_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres2@txString = "NAM (Monthly)" + gsn_panel2(wks_nam_ts,xy_nam_mon,lp,panres2) + delete(xy_nam_mon) + end if + delete(wks_nam_ts) + + panres2@txString = "NAO (DJF)" + gsn_panel2(wks_nao_ts,xy_nao_djf,lp,panres2) + delete(xy_nao_djf) + panres2@txString = "NAO (MAM)" + gsn_panel2(wks_nao_ts,xy_nao_mam,lp,panres2) + delete(xy_nao_mam) + panres2@txString = "NAO (JJA)" + gsn_panel2(wks_nao_ts,xy_nao_jja,lp,panres2) + delete(xy_nao_jja) + panres2@txString = "NAO (SON)" + gsn_panel2(wks_nao_ts,xy_nao_son,lp,panres2) + delete(xy_nao_son) + panres2@txString = "NAO (Annual)" + gsn_panel2(wks_nao_ts,xy_nao_ann,lp,panres2) + delete(xy_nao_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres2@txString = "NAO (Monthly)" + gsn_panel2(wks_nao_ts,xy_nao_mon,lp,panres2) + delete(xy_nao_mon) + end if + delete(wks_nao_ts) +;-------------------------------------------------------------------------------------------------- + OUTDIR = getenv("OUTDIR") + if (wks_type.eq."png") then + system("mv "+OUTDIR+"nam.000001.png "+OUTDIR+"nam.djf.png") + system("mv "+OUTDIR+"nam.000002.png "+OUTDIR+"nam.mam.png") + system("mv "+OUTDIR+"nam.000003.png "+OUTDIR+"nam.jja.png") + system("mv "+OUTDIR+"nam.000004.png "+OUTDIR+"nam.son.png") + system("mv "+OUTDIR+"nam.000005.png "+OUTDIR+"nam.ann.png") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"nam.000006.png "+OUTDIR+"nam.mon.png") + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"nam.000007.png "+OUTDIR+"nam.tempreg.djf.png") + system("mv "+OUTDIR+"nam.000008.png "+OUTDIR+"nam.tempreg.mam.png") + system("mv "+OUTDIR+"nam.000009.png "+OUTDIR+"nam.tempreg.jja.png") + system("mv "+OUTDIR+"nam.000010.png "+OUTDIR+"nam.tempreg.son.png") + system("mv "+OUTDIR+"nam.000011.png "+OUTDIR+"nam.tempreg.ann.png") + system("mv "+OUTDIR+"nam.000012.png "+OUTDIR+"nam.tempreg.mon.png") + end if + else + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"nam.000006.png "+OUTDIR+"nam.tempreg.djf.png") + system("mv "+OUTDIR+"nam.000007.png "+OUTDIR+"nam.tempreg.mam.png") + system("mv "+OUTDIR+"nam.000008.png "+OUTDIR+"nam.tempreg.jja.png") + system("mv "+OUTDIR+"nam.000009.png "+OUTDIR+"nam.tempreg.son.png") + system("mv "+OUTDIR+"nam.000010.png "+OUTDIR+"nam.tempreg.ann.png") + end if + end if + + if (prreg_frame.eq.0) then + system("mv "+OUTDIR+"nam.prreg.000001.png "+OUTDIR+"nam.prreg.djf.png") + system("mv "+OUTDIR+"nam.prreg.000002.png "+OUTDIR+"nam.prreg.mam.png") + system("mv "+OUTDIR+"nam.prreg.000003.png "+OUTDIR+"nam.prreg.jja.png") + system("mv "+OUTDIR+"nam.prreg.000004.png "+OUTDIR+"nam.prreg.son.png") + system("mv "+OUTDIR+"nam.prreg.000005.png "+OUTDIR+"nam.prreg.ann.png") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"nam.prreg.000006.png "+OUTDIR+"nam.prreg.mon.png") + end if + end if + + system("mv "+OUTDIR+"nao.000001.png "+OUTDIR+"nao.djf.png") + system("mv "+OUTDIR+"nao.000002.png "+OUTDIR+"nao.mam.png") + system("mv "+OUTDIR+"nao.000003.png "+OUTDIR+"nao.jja.png") + system("mv "+OUTDIR+"nao.000004.png "+OUTDIR+"nao.son.png") + system("mv "+OUTDIR+"nao.000005.png "+OUTDIR+"nao.ann.png") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"nao.000006.png "+OUTDIR+"nao.mon.png") + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"nao.000007.png "+OUTDIR+"nao.tempreg.djf.png") + system("mv "+OUTDIR+"nao.000008.png "+OUTDIR+"nao.tempreg.mam.png") + system("mv "+OUTDIR+"nao.000009.png "+OUTDIR+"nao.tempreg.jja.png") + system("mv "+OUTDIR+"nao.000010.png "+OUTDIR+"nao.tempreg.son.png") + system("mv "+OUTDIR+"nao.000011.png "+OUTDIR+"nao.tempreg.ann.png") + system("mv "+OUTDIR+"nao.000012.png "+OUTDIR+"nao.tempreg.mon.png") + end if + else + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"nao.000006.png "+OUTDIR+"nao.tempreg.djf.png") + system("mv "+OUTDIR+"nao.000007.png "+OUTDIR+"nao.tempreg.mam.png") + system("mv "+OUTDIR+"nao.000008.png "+OUTDIR+"nao.tempreg.jja.png") + system("mv "+OUTDIR+"nao.000009.png "+OUTDIR+"nao.tempreg.son.png") + system("mv "+OUTDIR+"nao.000010.png "+OUTDIR+"nao.tempreg.ann.png") + end if + end if + + if (prreg_frame.eq.0) then + system("mv "+OUTDIR+"nao.prreg.000001.png "+OUTDIR+"nao.prreg.djf.png") + system("mv "+OUTDIR+"nao.prreg.000002.png "+OUTDIR+"nao.prreg.mam.png") + system("mv "+OUTDIR+"nao.prreg.000003.png "+OUTDIR+"nao.prreg.jja.png") + system("mv "+OUTDIR+"nao.prreg.000004.png "+OUTDIR+"nao.prreg.son.png") + system("mv "+OUTDIR+"nao.prreg.000005.png "+OUTDIR+"nao.prreg.ann.png") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"nao.prreg.000006.png "+OUTDIR+"nao.prreg.mon.png") + end if + end if + + system("mv "+OUTDIR+"nam.timeseries.000001.png "+OUTDIR+"nam.timeseries.djf.png") + system("mv "+OUTDIR+"nam.timeseries.000002.png "+OUTDIR+"nam.timeseries.mam.png") + system("mv "+OUTDIR+"nam.timeseries.000003.png "+OUTDIR+"nam.timeseries.jja.png") + system("mv "+OUTDIR+"nam.timeseries.000004.png "+OUTDIR+"nam.timeseries.son.png") + system("mv "+OUTDIR+"nam.timeseries.000005.png "+OUTDIR+"nam.timeseries.ann.png") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"nam.timeseries.000006.png "+OUTDIR+"nam.timeseries.mon.png") + end if + + system("mv "+OUTDIR+"nao.timeseries.000001.png "+OUTDIR+"nao.timeseries.djf.png") + system("mv "+OUTDIR+"nao.timeseries.000002.png "+OUTDIR+"nao.timeseries.mam.png") + system("mv "+OUTDIR+"nao.timeseries.000003.png "+OUTDIR+"nao.timeseries.jja.png") + system("mv "+OUTDIR+"nao.timeseries.000004.png "+OUTDIR+"nao.timeseries.son.png") + system("mv "+OUTDIR+"nao.timeseries.000005.png "+OUTDIR+"nao.timeseries.ann.png") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"nao.timeseries.000006.png "+OUTDIR+"nao.timeseries.mon.png") + end if + + + else + system("psplit "+OUTDIR+"nam.ps "+OUTDIR+"psl_nn") + system("mv "+OUTDIR+"psl_nn0001.ps "+OUTDIR+"nam.djf.ps") + system("mv "+OUTDIR+"psl_nn0002.ps "+OUTDIR+"nam.mam.ps") + system("mv "+OUTDIR+"psl_nn0003.ps "+OUTDIR+"nam.jja.ps") + system("mv "+OUTDIR+"psl_nn0004.ps "+OUTDIR+"nam.son.ps") + system("mv "+OUTDIR+"psl_nn0005.ps "+OUTDIR+"nam.ann.ps") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"psl_nn0006.ps "+OUTDIR+"nam.mon.ps") + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"psl_nn0007.ps "+OUTDIR+"nam.tempreg.djf.ps") + system("mv "+OUTDIR+"psl_nn0008.ps "+OUTDIR+"nam.tempreg.mam.ps") + system("mv "+OUTDIR+"psl_nn0009.ps "+OUTDIR+"nam.tempreg.jja.ps") + system("mv "+OUTDIR+"psl_nn0010.ps "+OUTDIR+"nam.tempreg.son.ps") + system("mv "+OUTDIR+"psl_nn0011.ps "+OUTDIR+"nam.tempreg.ann.ps") + system("mv "+OUTDIR+"psl_nn0012.ps "+OUTDIR+"nam.tempreg.mon.ps") + end if + else + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"psl_nn0006.ps "+OUTDIR+"nam.tempreg.djf.ps") + system("mv "+OUTDIR+"psl_nn0007.ps "+OUTDIR+"nam.tempreg.mam.ps") + system("mv "+OUTDIR+"psl_nn0008.ps "+OUTDIR+"nam.tempreg.jja.ps") + system("mv "+OUTDIR+"psl_nn0009.ps "+OUTDIR+"nam.tempreg.son.ps") + system("mv "+OUTDIR+"psl_nn0010.ps "+OUTDIR+"nam.tempreg.ann.ps") + end if + end if + + if (prreg_frame.eq.0) then + system("psplit "+OUTDIR+"nam.prreg.ps "+OUTDIR+"pr_nn") + system("mv "+OUTDIR+"pr_nn0001.ps "+OUTDIR+"nam.prreg.djf.ps") + system("mv "+OUTDIR+"pr_nn0002.ps "+OUTDIR+"nam.prreg.mam.ps") + system("mv "+OUTDIR+"pr_nn0003.ps "+OUTDIR+"nam.prreg.jja.ps") + system("mv "+OUTDIR+"pr_nn0004.ps "+OUTDIR+"nam.prreg.son.ps") + system("mv "+OUTDIR+"pr_nn0005.ps "+OUTDIR+"nam.prreg.ann.ps") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"pr_nn0006.ps "+OUTDIR+"nam.prreg.mon.ps") + end if + end if + + system("psplit "+OUTDIR+"nao.ps "+OUTDIR+"psl_nn") + system("mv "+OUTDIR+"psl_nn0001.ps "+OUTDIR+"nao.djf.ps") + system("mv "+OUTDIR+"psl_nn0002.ps "+OUTDIR+"nao.mam.ps") + system("mv "+OUTDIR+"psl_nn0003.ps "+OUTDIR+"nao.jja.ps") + system("mv "+OUTDIR+"psl_nn0004.ps "+OUTDIR+"nao.son.ps") + system("mv "+OUTDIR+"psl_nn0005.ps "+OUTDIR+"nao.ann.ps") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"psl_nn0006.ps "+OUTDIR+"nao.mon.ps") + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"psl_nn0007.ps "+OUTDIR+"nao.tempreg.djf.ps") + system("mv "+OUTDIR+"psl_nn0008.ps "+OUTDIR+"nao.tempreg.mam.ps") + system("mv "+OUTDIR+"psl_nn0009.ps "+OUTDIR+"nao.tempreg.jja.ps") + system("mv "+OUTDIR+"psl_nn0010.ps "+OUTDIR+"nao.tempreg.son.ps") + system("mv "+OUTDIR+"psl_nn0011.ps "+OUTDIR+"nao.tempreg.ann.ps") + system("mv "+OUTDIR+"psl_nn0012.ps "+OUTDIR+"nao.tempreg.mon.ps") + end if + else + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"psl_nn0006.ps "+OUTDIR+"nao.tempreg.djf.ps") + system("mv "+OUTDIR+"psl_nn0007.ps "+OUTDIR+"nao.tempreg.mam.ps") + system("mv "+OUTDIR+"psl_nn0008.ps "+OUTDIR+"nao.tempreg.jja.ps") + system("mv "+OUTDIR+"psl_nn0009.ps "+OUTDIR+"nao.tempreg.son.ps") + system("mv "+OUTDIR+"psl_nn0010.ps "+OUTDIR+"nao.tempreg.ann.ps") + end if + end if + + if (prreg_frame.eq.0) then + system("psplit "+OUTDIR+"nao.prreg.ps "+OUTDIR+"pr_nn") + system("mv "+OUTDIR+"pr_nn0001.ps "+OUTDIR+"nao.prreg.djf.ps") + system("mv "+OUTDIR+"pr_nn0002.ps "+OUTDIR+"nao.prreg.mam.ps") + system("mv "+OUTDIR+"pr_nn0003.ps "+OUTDIR+"nao.prreg.jja.ps") + system("mv "+OUTDIR+"pr_nn0004.ps "+OUTDIR+"nao.prreg.son.ps") + system("mv "+OUTDIR+"pr_nn0005.ps "+OUTDIR+"nao.prreg.ann.ps") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"pr_nn0006.ps "+OUTDIR+"nao.prreg.mon.ps") + end if + system("rm "+OUTDIR+"nam.prreg.ps "+OUTDIR+"nao.prreg.ps") + end if + + system("psplit "+OUTDIR+"nam.timeseries.ps "+OUTDIR+"psl_nn") + system("mv "+OUTDIR+"psl_nn0001.ps "+OUTDIR+"nam.timeseries.djf.ps") + system("mv "+OUTDIR+"psl_nn0002.ps "+OUTDIR+"nam.timeseries.mam.ps") + system("mv "+OUTDIR+"psl_nn0003.ps "+OUTDIR+"nam.timeseries.jja.ps") + system("mv "+OUTDIR+"psl_nn0004.ps "+OUTDIR+"nam.timeseries.son.ps") + system("mv "+OUTDIR+"psl_nn0005.ps "+OUTDIR+"nam.timeseries.ann.ps") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"psl_nn0006.ps "+OUTDIR+"nam.timeseries.mon.ps") + end if + + system("psplit "+OUTDIR+"nao.timeseries.ps "+OUTDIR+"psl_nn") + system("mv "+OUTDIR+"psl_nn0001.ps "+OUTDIR+"nao.timeseries.djf.ps") + system("mv "+OUTDIR+"psl_nn0002.ps "+OUTDIR+"nao.timeseries.mam.ps") + system("mv "+OUTDIR+"psl_nn0003.ps "+OUTDIR+"nao.timeseries.jja.ps") + system("mv "+OUTDIR+"psl_nn0004.ps "+OUTDIR+"nao.timeseries.son.ps") + system("mv "+OUTDIR+"psl_nn0005.ps "+OUTDIR+"nao.timeseries.ann.ps") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"psl_nn0006.ps "+OUTDIR+"nao.timeseries.mon.ps") + end if + system("rm "+OUTDIR+"nao.timeseries.ps "+OUTDIR+"nam.timeseries.ps "+OUTDIR+"nao.ps "+OUTDIR+"nam.ps") + end if + print("Finished: psl.nam_nao.ncl") +end diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/psl.pna_npo.ncl b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/psl.pna_npo.ncl new file mode 100644 index 0000000000..c945ef6d68 --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/psl.pna_npo.ncl @@ -0,0 +1,1812 @@ +; Calculates PNA and NPO (patterns and PC timeseries), as well as regressions +; of those PC timeseries onto ts, tas, and pr. Also calculates the NPI. +; +; Variables used: psl, ts, tas, and pr +; +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/shea_util.ncl" +load "$CVDP_SCRIPTS/functions.ncl" + +begin + print("Starting: psl.pna_npo.ncl") + + SCALE_TIMESERIES = getenv("SCALE_TIMESERIES") + OUTPUT_DATA = getenv("OUTPUT_DATA") + PNG_SCALE = tofloat(getenv("PNG_SCALE")) + OPT_CLIMO = getenv("OPT_CLIMO") + CLIMO_SYEAR = toint(getenv("CLIMO_SYEAR")) + CLIMO_EYEAR = toint(getenv("CLIMO_EYEAR")) + OUTPUT_TYPE = getenv("OUTPUT_TYPE") + COMPUTE_MODES_MON = getenv("COMPUTE_MODES_MON") + COLORMAP = getenv("COLORMAP") + + nsim = numAsciiRow("namelist_byvar/namelist_psl") + na = asciiread("namelist_byvar/namelist_psl",(/nsim/),"string") + names = new(nsim,"string") + paths = new(nsim,"string") + syear = new(nsim,"integer",-999) + eyear = new(nsim,"integer",-999) + delim = "|" + + do gg = 0,nsim-1 + names(gg) = str_strip(str_get_field(na(gg),1,delim)) + paths(gg) = str_strip(str_get_field(na(gg),2,delim)) + syear(gg) = stringtointeger(str_strip(str_get_field(na(gg),3,delim))) + eyear(gg) = stringtointeger(str_strip(str_get_field(na(gg),4,delim))) + end do + nyr = eyear-syear+1 + nyr_max = max(nyr) + + pi=4.*atan(1.0) + rad=(pi/180.) + +;---------SST Regressions coding------------------------------------------------- + nsim_ts = numAsciiRow("namelist_byvar/namelist_ts") + na_ts = asciiread("namelist_byvar/namelist_ts",(/nsim_ts/),"string") + names_ts = new(nsim_ts,"string") + paths_ts = new(nsim_ts,"string") + syear_ts = new(nsim_ts,"integer",-999) + eyear_ts = new(nsim_ts,"integer",-999) + + do gg = 0,nsim_ts-1 + names_ts(gg) = str_strip(str_get_field(na_ts(gg),1,delim)) + paths_ts(gg) = str_strip(str_get_field(na_ts(gg),2,delim)) + syear_ts(gg) = stringtointeger(str_strip(str_get_field(na_ts(gg),3,delim))) + eyear_ts(gg) = stringtointeger(str_strip(str_get_field(na_ts(gg),4,delim))) + end do + delete(na_ts) + nyr_ts = eyear_ts-syear_ts+1 +;---------TAS Regressions coding------------------------------------------------- + nsim_tas = numAsciiRow("namelist_byvar/namelist_trefht") + na_tas = asciiread("namelist_byvar/namelist_trefht",(/nsim_tas/),"string") + names_tas = new(nsim_tas,"string") + paths_tas = new(nsim_tas,"string") + syear_tas = new(nsim_tas,"integer",-999) + eyear_tas = new(nsim_tas,"integer",-999) + + do gg = 0,nsim_tas-1 + names_tas(gg) = str_strip(str_get_field(na_tas(gg),1,delim)) + paths_tas(gg) = str_strip(str_get_field(na_tas(gg),2,delim)) + syear_tas(gg) = stringtointeger(str_strip(str_get_field(na_tas(gg),3,delim))) + eyear_tas(gg) = stringtointeger(str_strip(str_get_field(na_tas(gg),4,delim))) + end do + delete(na_tas) + nyr_tas = eyear_tas-syear_tas+1 +;---------PR Regressions coding------------------------------------------------- + nsim_pr = numAsciiRow("namelist_byvar/namelist_prect") + na_pr = asciiread("namelist_byvar/namelist_prect",(/nsim_pr/),"string") + names_pr = new(nsim_pr,"string") + paths_pr = new(nsim_pr,"string") + syear_pr = new(nsim_pr,"integer",-999) + eyear_pr = new(nsim_pr,"integer",-999) + + do gg = 0,nsim_pr-1 + names_pr(gg) = str_strip(str_get_field(na_pr(gg),1,delim)) + paths_pr(gg) = str_strip(str_get_field(na_pr(gg),2,delim)) + syear_pr(gg) = stringtointeger(str_strip(str_get_field(na_pr(gg),3,delim))) + eyear_pr(gg) = stringtointeger(str_strip(str_get_field(na_pr(gg),4,delim))) + end do + delete(na_pr) + nyr_pr = eyear_pr-syear_pr+1 +;------------------------------------------------------------------------------------------------- + wks_type = OUTPUT_TYPE + if (wks_type.eq."png") then + wks_type@wkWidth = 1500*PNG_SCALE + wks_type@wkHeight = 1500*PNG_SCALE + end if + + wks_pna = gsn_open_wks(wks_type,getenv("OUTDIR")+"pna") + wks_pna_pr = gsn_open_wks(wks_type,getenv("OUTDIR")+"pna.prreg") + wks_pna_ts = gsn_open_wks(wks_type,getenv("OUTDIR")+"pna.timeseries") + + wks_npo = gsn_open_wks(wks_type,getenv("OUTDIR")+"npo") + wks_npo_pr = gsn_open_wks(wks_type,getenv("OUTDIR")+"npo.prreg") + wks_npo_ts = gsn_open_wks(wks_type,getenv("OUTDIR")+"npo.timeseries") + + wks_npi_ts = gsn_open_wks(wks_type,getenv("OUTDIR")+"npi.timeseries.ndjfm") + + if (COLORMAP.eq.0) then + gsn_define_colormap(wks_pna,"ncl_default") + gsn_define_colormap(wks_pna_ts,"ncl_default") + gsn_define_colormap(wks_npo,"ncl_default") + gsn_define_colormap(wks_npo_ts,"ncl_default") + gsn_define_colormap(wks_npi_ts,"ncl_default") + gsn_define_colormap(wks_pna_pr,"MPL_BrBG") + gsn_define_colormap(wks_npo_pr,"MPL_BrBG") + end if + if (COLORMAP.eq.1) then + gsn_define_colormap(wks_pna,"BlueDarkRed18") + gsn_define_colormap(wks_pna_ts,"ncl_default") + gsn_define_colormap(wks_npo,"BlueDarkRed18") + gsn_define_colormap(wks_npo_ts,"ncl_default") + gsn_define_colormap(wks_npi_ts,"ncl_default") + gsn_define_colormap(wks_pna_pr,"MPL_BrBG") + gsn_define_colormap(wks_npo_pr,"MPL_BrBG") + end if + + map_pna_djf = new(nsim,"graphic") + map_pna_mam = new(nsim,"graphic") + map_pna_jja = new(nsim,"graphic") + map_pna_son = new(nsim,"graphic") + map_pna_ann = new(nsim,"graphic") + map_pna_mon = new(nsim,"graphic") + xy_pna_djf = new(nsim,"graphic") + xy_pna_mam = new(nsim,"graphic") + xy_pna_jja = new(nsim,"graphic") + xy_pna_son = new(nsim,"graphic") + xy_pna_ann = new(nsim,"graphic") + xy_pna_mon = new(nsim,"graphic") + reg_pna_djf = new(nsim,"graphic") + reg_pna_mam = new(nsim,"graphic") + reg_pna_jja = new(nsim,"graphic") + reg_pna_son = new(nsim,"graphic") + reg_pna_ann = new(nsim,"graphic") + reg_pna_mon = new(nsim,"graphic") + reg_pna_pr_djf = new(nsim,"graphic") + reg_pna_pr_mam = new(nsim,"graphic") + reg_pna_pr_jja = new(nsim,"graphic") + reg_pna_pr_son = new(nsim,"graphic") + reg_pna_pr_ann = new(nsim,"graphic") + reg_pna_pr_mon = new(nsim,"graphic") + + map_npo_djf = new(nsim,"graphic") + map_npo_mam = new(nsim,"graphic") + map_npo_jja = new(nsim,"graphic") + map_npo_son = new(nsim,"graphic") + map_npo_ann = new(nsim,"graphic") + map_npo_mon = new(nsim,"graphic") + xy_npo_djf = new(nsim,"graphic") + xy_npo_mam = new(nsim,"graphic") + xy_npo_jja = new(nsim,"graphic") + xy_npo_son = new(nsim,"graphic") + xy_npo_ann = new(nsim,"graphic") + xy_npo_mon = new(nsim,"graphic") + reg_npo_djf = new(nsim,"graphic") + reg_npo_mam = new(nsim,"graphic") + reg_npo_jja = new(nsim,"graphic") + reg_npo_son = new(nsim,"graphic") + reg_npo_ann = new(nsim,"graphic") + reg_npo_mon = new(nsim,"graphic") + reg_npo_pr_djf = new(nsim,"graphic") + reg_npo_pr_mam = new(nsim,"graphic") + reg_npo_pr_jja = new(nsim,"graphic") + reg_npo_pr_son = new(nsim,"graphic") + reg_npo_pr_ann = new(nsim,"graphic") + reg_npo_pr_mon = new(nsim,"graphic") + + xy_npi = new(nsim,"graphic") + + sstreg_frame = 1 ; sstreg_frame = flag to create regressions .ps/.png files. Created/used instead of sstreg_plot_flag + ; so that if sst regressions are not created for the last simulation listed that .ps/png files are created + tasreg_frame = 1 + prreg_frame = 1 + + do ee = 0,nsim-1 +; print(paths(ee)+" "+syear(ee)+" "+eyear(ee)) + arr = data_read_in(paths(ee),"PSL",syear(ee),eyear(ee)) ; read in data, orient lats/lons correctly, set time coordinate variable up + if (isatt(arr,"is_all_missing")) then + delete(arr) + continue + end if + + if (OPT_CLIMO.eq."Full") then + arr = rmMonAnnCycTLL(arr) + else + check_custom_climo(names(ee),syear(ee),eyear(ee),CLIMO_SYEAR,CLIMO_EYEAR) + temp_arr = arr + delete(temp_arr&time) + temp_arr&time = cd_calendar(arr&time,-1) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(temp_arr({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(temp_arr({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + delete(temp_arr) + arr = calcMonAnomTLL(arr,climo) + delete(climo) + end if + + arrT = runave_n_Wrap(arr,3,0,0) ; form DJF averages + arrT(0,:,:) = (/ dim_avg_n(arr(:1,:,:),0) /) + arr_djf = arrT(0::12,:,:) + arr_mam = arrT(3::12,:,:) + arr_jja = arrT(6::12,:,:) ; form JJA averages + arr_son = arrT(9::12,:,:) + delete(arrT) + + arrU = runave_n_Wrap(arr,5,0,0) + arrU(0,:,:) = (/ dim_avg_n(arr(:2,:,:),0) /) + arr_ndjfm = arrU(0::12,:,:) + delete(arrU) + + arrV = runave_n_Wrap(arr,12,0,0) + arr_ann = arrV(5::12,:,:) + delete(arrV) +; +; arr_djf = (/ dtrend_msg_n(ispan(0,dimsizes(arr_djf&time)-1,1),arr_djf,True,False,0) /) +; arr_mam = (/ dtrend_msg_n(ispan(0,dimsizes(arr_mam&time)-1,1),arr_mam,True,False,0) /) +; arr_jja = (/ dtrend_msg_n(ispan(0,dimsizes(arr_jja&time)-1,1),arr_jja,True,False,0) /) +; arr_son = (/ dtrend_msg_n(ispan(0,dimsizes(arr_son&time)-1,1),arr_son,True,False,0) /) +; +; arr_ann = (/ dtrend_msg_n(ispan(0,dimsizes(arr_ann&time)-1,1),arr_ann,True,False,0) /) +; +; arr_ndjfm = (/ dtrend_msg_n(ispan(0,dimsizes(arr_ndjfm&time)-1,1),arr_ndjfm,True,False,0) /) +; +; arr = (/ dtrend_msg_n(ispan(0,dimsizes(arr&time)-1,1),arr,True,False,0) /) +;---------SST Regressions coding------------------------------------------------- + if (any(ismissing((/syear(ee),syear_ts(ee),eyear(ee),eyear_ts(ee)/)))) then + sstreg_plot_flag = 1 + else + if (syear(ee).eq.syear_ts(ee)) then ; check that the start and end years match for ts, trefht, and psl + if (eyear(ee).eq.eyear_ts(ee)) then + sstreg_plot_flag = 0 + else + sstreg_plot_flag = 1 + end if + else + sstreg_plot_flag = 1 + end if + end if + + if (sstreg_plot_flag.eq.0) then + ; print("Data to be read in: "+paths_ts(ee)+" from "+syear_ts(ee)+":"+eyear_ts(ee)) + sst = data_read_in(paths_ts(ee),"TS",syear_ts(ee),eyear_ts(ee)) + if (isatt(sst,"is_all_missing")) then + sstreg_plot_flag = 1 + delete(sst) + end if + + if (sstreg_plot_flag.eq.0) then ; only continue if both PSL/TS fields are present + sst = where(sst.le.-1.8,-1.8,sst) + d = addfile("$NCARG_ROOT/lib/ncarg/data/cdf/landsea.nc","r") + basemap = d->LSMASK + lsm = landsea_mask(basemap,sst&lat,sst&lon) + sst = mask(sst,conform(sst,lsm,(/1,2/)).ge.1,False) + delete(lsm) + + if (OPT_CLIMO.eq."Full") then + sst = rmMonAnnCycTLL(sst) + else + check_custom_climo(names_ts(ee),syear_ts(ee),eyear_ts(ee),CLIMO_SYEAR,CLIMO_EYEAR) + temp_arr = sst + delete(temp_arr&time) + temp_arr&time = cd_calendar(sst&time,1) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(temp_arr({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(temp_arr({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + delete(temp_arr) + sst = calcMonAnomTLL(sst,climo) + delete(climo) + end if +; sst = (/ dtrend_msg_n(ispan(0,dimsizes(sst&time)-1,1),sst,False,False,0) /) + + sstT = runave_n_Wrap(sst,3,0,0) ; form DJF averages + sstT(0,:,:) = (/ dim_avg_n(sst(:1,:,:),0) /) + sst_djf = sstT(0::12,:,:) + sst_mam = sstT(3::12,:,:) + sst_jja = sstT(6::12,:,:) ; form JJA averages + sst_son = sstT(9::12,:,:) + delete(sstT) + + sstV = runave_n_Wrap(sst,12,0,0) + sst_ann = sstV(5::12,:,:) + delete(sstV) + end if + end if +;---------TAS Regressions coding------------------------------------------------- + if (any(ismissing((/syear(ee),syear_tas(ee),eyear(ee),eyear_tas(ee)/)))) then + tasreg_plot_flag = 1 + else + if (syear(ee).eq.syear_tas(ee)) then ; check that the start and end years match for ts, tas, and psl + if (eyear(ee).eq.eyear_tas(ee)) then + tasreg_plot_flag = 0 + else + tasreg_plot_flag = 1 + end if + else + tasreg_plot_flag = 1 + end if + if (sstreg_plot_flag.eq.1) then ; if the ts dataset is missing but the tas is not, do not + tasreg_plot_flag = 1 ; run through the tas calculations as both currently required + end if + end if + + if (tasreg_plot_flag.eq.0) then + tas = data_read_in(paths_tas(ee),"TREFHT",syear_tas(ee),eyear_tas(ee)) + if (isatt(tas,"is_all_missing")) then + tasreg_plot_flag = 1 + delete(tas) + end if + + if (tasreg_plot_flag.eq.0) then ; only continue if both PSL/TS fields are present + d = addfile("$NCARG_ROOT/lib/ncarg/data/cdf/landsea.nc","r") + basemap = d->LSMASK + lsm = landsea_mask(basemap,tas&lat,tas&lon) + tas = mask(tas,conform(tas,lsm,(/1,2/)).eq.0,False) + delete(lsm) + + if (OPT_CLIMO.eq."Full") then + tas = rmMonAnnCycTLL(tas) + else + check_custom_climo(names_tas(ee),syear_tas(ee),eyear_tas(ee),CLIMO_SYEAR,CLIMO_EYEAR) + temp_arr = tas + delete(temp_arr&time) + temp_arr&time = cd_calendar(tas&time,1) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(temp_arr({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(temp_arr({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + delete(temp_arr) + tas = calcMonAnomTLL(tas,climo) + delete(climo) + end if +; tas = (/ dtrend_msg_n(ispan(0,dimsizes(tas&time)-1,1),tas,False,False,0) /) + + tasT = runave_n_Wrap(tas,3,0,0) ; form DJF averages + tasT(0,:,:) = (/ dim_avg_n(tas(:1,:,:),0) /) + tas_djf = tasT(0::12,:,:) + tas_mam = tasT(3::12,:,:) + tas_jja = tasT(6::12,:,:) ; form JJA averages + tas_son = tasT(9::12,:,:) + delete(tasT) + + tasV = runave_n_Wrap(tas,12,0,0) + tas_ann = tasV(5::12,:,:) + delete([/tasV/]) + end if + end if +;---------PR Regressions coding------------------------------------------------- + if (any(ismissing((/syear(ee),syear_pr(ee),eyear(ee),eyear_pr(ee)/)))) then + prreg_plot_flag = 1 + else + if (syear(ee).eq.syear_pr(ee)) then ; check that the start and end years match for ts, tas, and psl + if (eyear(ee).eq.eyear_pr(ee)) then + prreg_plot_flag = 0 + else + prreg_plot_flag = 1 + end if + else + prreg_plot_flag = 1 + end if + end if + + if (prreg_plot_flag.eq.0) then + pr = data_read_in(paths_pr(ee),"PRECT",syear_pr(ee),eyear_pr(ee)) + if (isatt(pr,"is_all_missing")) then + prreg_plot_flag = 1 + delete(pr) + end if + + if (prreg_plot_flag.eq.0) then ; only continue if both PSL/TS fields are present + if (OPT_CLIMO.eq."Full") then + pr = rmMonAnnCycTLL(pr) + else + check_custom_climo(names_pr(ee),syear_pr(ee),eyear_pr(ee),CLIMO_SYEAR,CLIMO_EYEAR) + temp_arr = pr + delete(temp_arr&time) + temp_arr&time = cd_calendar(pr&time,1) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(temp_arr({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(temp_arr({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + delete(temp_arr) + pr = calcMonAnomTLL(pr,climo) + delete(climo) + end if +; pr = (/ dtrend_msg_n(ispan(0,dimsizes(pr&time)-1,1),pr,False,False,0) /) + + prT = runave_n_Wrap(pr,3,0,0) ; form DJF averages + prT(0,:,:) = (/ dim_avg_n(pr(:1,:,:),0) /) + pr_djf = prT(0::12,:,:) + pr_mam = prT(3::12,:,:) + pr_jja = prT(6::12,:,:) ; form JJA averages + pr_son = prT(9::12,:,:) + delete(prT) + + prV = runave_n_Wrap(pr,12,0,0) + pr_ann = prV(5::12,:,:) + delete([/prV/]) + end if + end if +;----------------NPI calculation----------------------------------- + coswgt=cos(rad*arr_djf&lat) + coswgt!0 = "lat" + coswgt&lat = arr_djf&lat + npi_ndjfm = wgt_areaave(arr_ndjfm(:,{30:65},{160:220}),coswgt({30.:65.}),1.0,0) + npi_ndjfm!0 = "TIME" + npi_ndjfm&TIME = ispan(syear(ee),eyear(ee),1) + npi_ndjfm&TIME@units = "YYYY" + npi_ndjfm&TIME@long_name = "time" + + npi_ndjfm@area = "30:65N, 160:220E" + npi_ndjfm@units = arr_ndjfm@units + npi_ndjfm@long_name = "North Pacific Index" + delete([/coswgt,arr_ndjfm/]) + +;------------------------------------------------------------------ + arr_djf_CW = SqrtCosWeight(arr_djf) + arr_mam_CW = SqrtCosWeight(arr_mam) + arr_jja_CW = SqrtCosWeight(arr_jja) + arr_son_CW = SqrtCosWeight(arr_son) + arr_ann_CW = SqrtCosWeight(arr_ann) + if (COMPUTE_MODES_MON.eq."True") then + arr_mon_CW = SqrtCosWeight(arr) + else + if (isvar("arr")) then + delete(arr) + end if + if (isvar("sst")) then + delete(sst) + end if + if (isvar("tas")) then + delete(tas) + end if + if (isvar("pr")) then + delete(pr) + end if + end if +;----------PNA/NPO calculations (EOF1/2 of NP PSL)---------------------------------------------------------- + evecv = eofunc(arr_djf_CW({lat|20:85},{lon|120:240},time|:),3,75) + pcts = eofunc_ts(arr_djf_CW({lat|20:85},{lon|120:240},time|:),evecv,False) + pna_pc_djf = dim_standardize(pcts(0,:),0) + npo_pc_djf = dim_standardize(pcts(1,:),0) + pna_djf = arr_djf(0,:,:) + pna_djf = (/ regCoef(pna_pc_djf,arr_djf(lat|:,lon|:,time|:)) /) + npo_djf = arr_djf(0,:,:) + npo_djf = (/ regCoef(npo_pc_djf,arr_djf(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + pna_sst_djf = sst_djf(0,:,:) + pna_sst_djf = (/ regCoef(pna_pc_djf,sst_djf(lat|:,lon|:,time|:)) /) + npo_sst_djf = sst_djf(0,:,:) + npo_sst_djf = (/ regCoef(npo_pc_djf,sst_djf(lat|:,lon|:,time|:)) /) + delete(sst_djf) + end if + if (tasreg_plot_flag.eq.0) then + pna_tas_djf = tas_djf(0,:,:) + pna_tas_djf = (/ regCoef(pna_pc_djf,tas_djf(lat|:,lon|:,time|:)) /) + npo_tas_djf = tas_djf(0,:,:) + npo_tas_djf = (/ regCoef(npo_pc_djf,tas_djf(lat|:,lon|:,time|:)) /) + delete(tas_djf) + end if + if (prreg_plot_flag.eq.0) then + pna_pr_djf = pr_djf(0,:,:) + pna_pr_djf = (/ regCoef(pna_pc_djf,pr_djf(lat|:,lon|:,time|:)) /) + npo_pr_djf = pr_djf(0,:,:) + npo_pr_djf = (/ regCoef(npo_pc_djf,pr_djf(lat|:,lon|:,time|:)) /) + delete(pr_djf) + end if + + if (.not.ismissing(pna_djf({50},{185}))) then + if (pna_djf({50},{185}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + pna_djf = pna_djf*-1. + pna_pc_djf = pna_pc_djf*-1. + if (sstreg_plot_flag.eq.0) then + pna_sst_djf = pna_sst_djf*-1. + end if + if (tasreg_plot_flag.eq.0) then + pna_tas_djf = pna_tas_djf*-1. + end if + if (prreg_plot_flag.eq.0) then + pna_pr_djf = pna_pr_djf*-1. + end if + end if + end if + if (.not.ismissing(npo_djf({65},{185}))) then + if (npo_djf({65},{185}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + npo_djf = npo_djf*-1. + npo_pc_djf = npo_pc_djf*-1. + if (sstreg_plot_flag.eq.0) then + npo_sst_djf = npo_sst_djf*-1. + end if + if (tasreg_plot_flag.eq.0) then + npo_tas_djf = npo_tas_djf*-1. + end if + if (prreg_plot_flag.eq.0) then + npo_pr_djf = npo_pr_djf*-1. + end if + end if + end if + sig_pcv = eofunc_north2(evecv@pcvar,dimsizes(pna_pc_djf),False) + if (sig_pcv(0)) then ; if True then significant + pna_djf@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%*" + else + pna_djf@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%" + end if + if (sig_pcv(1)) then ; if True then significant + npo_djf@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(1)))+"%*" + else + npo_djf@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(1)))+"%" + end if + delete(sig_pcv) + copy_VarCoords(npi_ndjfm,pna_pc_djf) + copy_VarCoords(npi_ndjfm,npo_pc_djf) + delete([/evecv,pcts/]) + + evecv = eofunc(arr_mam_CW({lat|20:85},{lon|120:240},time|:),3,75) + pcts = eofunc_ts(arr_mam_CW({lat|20:85},{lon|120:240},time|:),evecv,False) + pna_pc_mam = dim_standardize(pcts(0,:),0) + npo_pc_mam = dim_standardize(pcts(1,:),0) + pna_mam = arr_mam(0,:,:) + pna_mam = (/ regCoef(pna_pc_mam,arr_mam(lat|:,lon|:,time|:)) /) + npo_mam = arr_mam(0,:,:) + npo_mam = (/ regCoef(npo_pc_mam,arr_mam(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + pna_sst_mam = sst_mam(0,:,:) + pna_sst_mam = (/ regCoef(pna_pc_mam,sst_mam(lat|:,lon|:,time|:)) /) + npo_sst_mam = sst_mam(0,:,:) + npo_sst_mam = (/ regCoef(npo_pc_mam,sst_mam(lat|:,lon|:,time|:)) /) + delete(sst_mam) + end if + if (tasreg_plot_flag.eq.0) then + pna_tas_mam = tas_mam(0,:,:) + pna_tas_mam = (/ regCoef(pna_pc_mam,tas_mam(lat|:,lon|:,time|:)) /) + npo_tas_mam = tas_mam(0,:,:) + npo_tas_mam = (/ regCoef(npo_pc_mam,tas_mam(lat|:,lon|:,time|:)) /) + delete(tas_mam) + end if + if (prreg_plot_flag.eq.0) then + pna_pr_mam = pr_mam(0,:,:) + pna_pr_mam = (/ regCoef(pna_pc_mam,pr_mam(lat|:,lon|:,time|:)) /) + npo_pr_mam = pr_mam(0,:,:) + npo_pr_mam = (/ regCoef(npo_pc_mam,pr_mam(lat|:,lon|:,time|:)) /) + delete(pr_mam) + end if + + if (.not.ismissing(pna_mam({50},{185}))) then + if (pna_mam({50},{185}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + pna_mam = pna_mam*-1. + pna_pc_mam = pna_pc_mam*-1. + if (sstreg_plot_flag.eq.0) then + pna_sst_mam = pna_sst_mam*-1. + end if + if (tasreg_plot_flag.eq.0) then + pna_tas_mam = pna_tas_mam*-1. + end if + if (prreg_plot_flag.eq.0) then + pna_pr_mam = pna_pr_mam*-1. + end if + end if + end if + if (.not.ismissing(npo_mam({65},{185}))) then + if (npo_mam({65},{185}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + npo_mam = npo_mam*-1. + npo_pc_mam = npo_pc_mam*-1. + if (sstreg_plot_flag.eq.0) then + npo_sst_mam = npo_sst_mam*-1. + end if + if (tasreg_plot_flag.eq.0) then + npo_tas_mam = npo_tas_mam*-1. + end if + if (prreg_plot_flag.eq.0) then + npo_pr_mam = npo_pr_mam*-1. + end if + end if + end if + sig_pcv = eofunc_north2(evecv@pcvar,dimsizes(pna_pc_mam),False) + if (sig_pcv(0)) then ; if True then significant + pna_mam@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%*" + else + pna_mam@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%" + end if + if (sig_pcv(1)) then ; if True then significant + npo_mam@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(1)))+"%*" + else + npo_mam@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(1)))+"%" + end if + delete(sig_pcv) + copy_VarCoords(npi_ndjfm,pna_pc_mam) + copy_VarCoords(npi_ndjfm,npo_pc_mam) + delete([/evecv,pcts/]) + + evecv = eofunc(arr_jja_CW({lat|20:85},{lon|120:240},time|:),3,75) + pcts = eofunc_ts(arr_jja_CW({lat|20:85},{lon|120:240},time|:),evecv,False) + pna_pc_jja = dim_standardize(pcts(0,:),0) + npo_pc_jja = dim_standardize(pcts(1,:),0) + pna_jja = arr_jja(0,:,:) + pna_jja = (/ regCoef(pna_pc_jja,arr_jja(lat|:,lon|:,time|:)) /) + npo_jja = arr_jja(0,:,:) + npo_jja = (/ regCoef(npo_pc_jja,arr_jja(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + pna_sst_jja = sst_jja(0,:,:) + pna_sst_jja = (/ regCoef(pna_pc_jja,sst_jja(lat|:,lon|:,time|:)) /) + npo_sst_jja = sst_jja(0,:,:) + npo_sst_jja = (/ regCoef(npo_pc_jja,sst_jja(lat|:,lon|:,time|:)) /) + delete(sst_jja) + end if + if (tasreg_plot_flag.eq.0) then + pna_tas_jja = tas_jja(0,:,:) + pna_tas_jja = (/ regCoef(pna_pc_jja,tas_jja(lat|:,lon|:,time|:)) /) + npo_tas_jja = tas_jja(0,:,:) + npo_tas_jja = (/ regCoef(npo_pc_jja,tas_jja(lat|:,lon|:,time|:)) /) + delete(tas_jja) + end if + if (prreg_plot_flag.eq.0) then + pna_pr_jja = pr_jja(0,:,:) + pna_pr_jja = (/ regCoef(pna_pc_jja,pr_jja(lat|:,lon|:,time|:)) /) + npo_pr_jja = pr_jja(0,:,:) + npo_pr_jja = (/ regCoef(npo_pc_jja,pr_jja(lat|:,lon|:,time|:)) /) + delete(pr_jja) + end if + + if (.not.ismissing(pna_jja({50},{185}))) then + if (pna_jja({50},{185}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + pna_jja = pna_jja*-1. + pna_pc_jja = pna_pc_jja*-1. + if (sstreg_plot_flag.eq.0) then + pna_sst_jja = pna_sst_jja*-1. + end if + if (tasreg_plot_flag.eq.0) then + pna_tas_jja = pna_tas_jja*-1. + end if + if (prreg_plot_flag.eq.0) then + pna_pr_jja = pna_pr_jja*-1. + end if + end if + end if + if (.not.ismissing(npo_jja({65},{185}))) then + if (npo_jja({65},{185}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + npo_jja = npo_jja*-1. + npo_pc_jja = npo_pc_jja*-1. + if (sstreg_plot_flag.eq.0) then + npo_sst_jja = npo_sst_jja*-1. + end if + if (tasreg_plot_flag.eq.0) then + npo_tas_jja = npo_tas_jja*-1. + end if + if (prreg_plot_flag.eq.0) then + npo_pr_jja = npo_pr_jja*-1. + end if + end if + end if + sig_pcv = eofunc_north2(evecv@pcvar,dimsizes(pna_pc_jja),False) + if (sig_pcv(0)) then ; if True then significant + pna_jja@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%*" + else + pna_jja@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%" + end if + if (sig_pcv(1)) then ; if True then significant + npo_jja@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(1)))+"%*" + else + npo_jja@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(1)))+"%" + end if + delete(sig_pcv) + copy_VarCoords(npi_ndjfm,pna_pc_jja) + copy_VarCoords(npi_ndjfm,npo_pc_jja) + delete([/evecv,pcts/]) + + evecv = eofunc(arr_son_CW({lat|20:85},{lon|120:240},time|:),3,75) + pcts = eofunc_ts(arr_son_CW({lat|20:85},{lon|120:240},time|:),evecv,False) + pna_pc_son = dim_standardize(pcts(0,:),0) + npo_pc_son = dim_standardize(pcts(1,:),0) + pna_son = arr_son(0,:,:) + pna_son = (/ regCoef(pna_pc_son,arr_son(lat|:,lon|:,time|:)) /) + npo_son = arr_son(0,:,:) + npo_son = (/ regCoef(npo_pc_son,arr_son(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + pna_sst_son = sst_son(0,:,:) + pna_sst_son = (/ regCoef(pna_pc_son,sst_son(lat|:,lon|:,time|:)) /) + npo_sst_son = sst_son(0,:,:) + npo_sst_son = (/ regCoef(npo_pc_son,sst_son(lat|:,lon|:,time|:)) /) + delete(sst_son) + end if + if (tasreg_plot_flag.eq.0) then + pna_tas_son = tas_son(0,:,:) + pna_tas_son = (/ regCoef(pna_pc_son,tas_son(lat|:,lon|:,time|:)) /) + npo_tas_son = tas_son(0,:,:) + npo_tas_son = (/ regCoef(npo_pc_son,tas_son(lat|:,lon|:,time|:)) /) + delete(tas_son) + end if + if (prreg_plot_flag.eq.0) then + pna_pr_son = pr_son(0,:,:) + pna_pr_son = (/ regCoef(pna_pc_son,pr_son(lat|:,lon|:,time|:)) /) + npo_pr_son = pr_son(0,:,:) + npo_pr_son = (/ regCoef(npo_pc_son,pr_son(lat|:,lon|:,time|:)) /) + delete(pr_son) + end if + + if (.not.ismissing(pna_son({50},{185}))) then + if (pna_son({50},{185}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + pna_son = pna_son*-1. + pna_pc_son = pna_pc_son*-1. + if (sstreg_plot_flag.eq.0) then + pna_sst_son = pna_sst_son*-1. + end if + if (tasreg_plot_flag.eq.0) then + pna_tas_son = pna_tas_son*-1. + end if + if (prreg_plot_flag.eq.0) then + pna_pr_son = pna_pr_son*-1. + end if + end if + end if + if (.not.ismissing(npo_son({65},{185}))) then + if (npo_son({65},{185}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + npo_son = npo_son*-1. + npo_pc_son = npo_pc_son*-1. + if (sstreg_plot_flag.eq.0) then + npo_sst_son = npo_sst_son*-1. + end if + if (tasreg_plot_flag.eq.0) then + npo_tas_son = npo_tas_son*-1. + end if + if (prreg_plot_flag.eq.0) then + npo_pr_son = npo_pr_son*-1. + end if + end if + end if + sig_pcv = eofunc_north2(evecv@pcvar,dimsizes(pna_pc_son),False) + if (sig_pcv(0)) then ; if True then significant + pna_son@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%*" + else + pna_son@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%" + end if + if (sig_pcv(1)) then ; if True then significant + npo_son@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(1)))+"%*" + else + npo_son@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(1)))+"%" + end if + delete(sig_pcv) + copy_VarCoords(npi_ndjfm,pna_pc_son) + copy_VarCoords(npi_ndjfm,npo_pc_son) + delete([/evecv,pcts/]) + + evecv = eofunc(arr_ann_CW({lat|20:85},{lon|120:240},time|:),3,75) + pcts = eofunc_ts(arr_ann_CW({lat|20:85},{lon|120:240},time|:),evecv,False) + pna_pc_ann = dim_standardize(pcts(0,:),0) + npo_pc_ann = dim_standardize(pcts(1,:),0) + pna_ann = arr_ann(0,:,:) + pna_ann = (/ regCoef(pna_pc_ann,arr_ann(lat|:,lon|:,time|:)) /) + npo_ann = arr_ann(0,:,:) + npo_ann = (/ regCoef(npo_pc_ann,arr_ann(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + pna_sst_ann = sst_ann(0,:,:) + pna_sst_ann = (/ regCoef(pna_pc_ann,sst_ann(lat|:,lon|:,time|:)) /) + npo_sst_ann = sst_ann(0,:,:) + npo_sst_ann = (/ regCoef(npo_pc_ann,sst_ann(lat|:,lon|:,time|:)) /) + delete(sst_ann) + end if + if (tasreg_plot_flag.eq.0) then + pna_tas_ann = tas_ann(0,:,:) + pna_tas_ann = (/ regCoef(pna_pc_ann,tas_ann(lat|:,lon|:,time|:)) /) + npo_tas_ann = tas_ann(0,:,:) + npo_tas_ann = (/ regCoef(npo_pc_ann,tas_ann(lat|:,lon|:,time|:)) /) + delete(tas_ann) + end if + if (prreg_plot_flag.eq.0) then + pna_pr_ann = pr_ann(0,:,:) + pna_pr_ann = (/ regCoef(pna_pc_ann,pr_ann(lat|:,lon|:,time|:)) /) + npo_pr_ann = pr_ann(0,:,:) + npo_pr_ann = (/ regCoef(npo_pc_ann,pr_ann(lat|:,lon|:,time|:)) /) + delete(pr_ann) + end if + + if (.not.ismissing(pna_ann({50},{185}))) then + if (pna_ann({50},{185}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + pna_ann = pna_ann*-1. + pna_pc_ann = pna_pc_ann*-1. + if (sstreg_plot_flag.eq.0) then + pna_sst_ann = pna_sst_ann*-1. + end if + if (tasreg_plot_flag.eq.0) then + pna_tas_ann = pna_tas_ann*-1. + end if + if (prreg_plot_flag.eq.0) then + pna_pr_ann = pna_pr_ann*-1. + end if + end if + end if + if (.not.ismissing(npo_ann({65},{185}))) then + if (npo_ann({65},{185}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + npo_ann = npo_ann*-1. + npo_pc_ann = npo_pc_ann*-1. + if (sstreg_plot_flag.eq.0) then + npo_sst_ann = npo_sst_ann*-1. + end if + if (tasreg_plot_flag.eq.0) then + npo_tas_ann = npo_tas_ann*-1. + end if + if (prreg_plot_flag.eq.0) then + npo_pr_ann = npo_pr_ann*-1. + end if + end if + end if + sig_pcv = eofunc_north2(evecv@pcvar,dimsizes(pna_pc_ann),False) + if (sig_pcv(0)) then ; if True then significant + pna_ann@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%*" + else + pna_ann@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%" + end if + if (sig_pcv(1)) then ; if True then significant + npo_ann@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(1)))+"%*" + else + npo_ann@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(1)))+"%" + end if + delete(sig_pcv) + copy_VarCoords(npi_ndjfm,pna_pc_ann) + copy_VarCoords(npi_ndjfm,npo_pc_ann) + delete([/evecv,pcts/]) + + if (COMPUTE_MODES_MON.eq."True") then + evecv = eofunc(arr_mon_CW({lat|20:85},{lon|120:240},time|:),3,75) + pcts = eofunc_ts(arr_mon_CW({lat|20:85},{lon|120:240},time|:),evecv,False) + pna_pc_mon = dim_standardize(pcts(0,:),0) + npo_pc_mon = dim_standardize(pcts(1,:),0) + pna_mon = arr(0,:,:) + pna_mon = (/ regCoef(pna_pc_mon,arr(lat|:,lon|:,time|:)) /) + npo_mon = arr(0,:,:) + npo_mon = (/ regCoef(npo_pc_mon,arr(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + pna_sst_mon = sst(0,:,:) + pna_sst_mon = (/ regCoef(pna_pc_mon,sst(lat|:,lon|:,time|:)) /) + npo_sst_mon = sst(0,:,:) + npo_sst_mon = (/ regCoef(npo_pc_mon,sst(lat|:,lon|:,time|:)) /) + delete(sst) + end if + if (tasreg_plot_flag.eq.0) then + pna_tas_mon = tas(0,:,:) + pna_tas_mon = (/ regCoef(pna_pc_mon,tas(lat|:,lon|:,time|:)) /) + npo_tas_mon = tas(0,:,:) + npo_tas_mon = (/ regCoef(npo_pc_mon,tas(lat|:,lon|:,time|:)) /) + delete(tas) + end if + if (prreg_plot_flag.eq.0) then + pna_pr_mon = pr(0,:,:) + pna_pr_mon = (/ regCoef(pna_pc_mon,pr(lat|:,lon|:,time|:)) /) + npo_pr_mon = pr(0,:,:) + npo_pr_mon = (/ regCoef(npo_pc_mon,pr(lat|:,lon|:,time|:)) /) + delete(pr) + end if + if (.not.ismissing(pna_mon({50},{185}))) then + if (pna_mon({50},{185}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + pna_mon = pna_mon*-1. + pna_pc_mon = pna_pc_mon*-1. + if (sstreg_plot_flag.eq.0) then + pna_sst_mon = pna_sst_mon*-1. + end if + if (tasreg_plot_flag.eq.0) then + pna_tas_mon = pna_tas_mon*-1. + end if + if (prreg_plot_flag.eq.0) then + pna_pr_mon = pna_pr_mon*-1. + end if + end if + end if + if (.not.ismissing(npo_mon({65},{185}))) then + if (npo_mon({65},{185}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + npo_mon = npo_mon*-1. + npo_pc_mon = npo_pc_mon*-1. + if (sstreg_plot_flag.eq.0) then + npo_sst_mon = npo_sst_mon*-1. + end if + if (tasreg_plot_flag.eq.0) then + npo_tas_mon = npo_tas_mon*-1. + end if + if (prreg_plot_flag.eq.0) then + npo_pr_mon = npo_pr_mon*-1. + end if + end if + end if + sig_pcv = eofunc_north2(evecv@pcvar,dimsizes(pna_pc_mon),False) + if (sig_pcv(0)) then ; if True then significant + pna_mon@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%*" + else + pna_mon@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%" + end if + if (sig_pcv(1)) then ; if True then significant + npo_mon@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(1)))+"%*" + else + npo_mon@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(1)))+"%" + end if + delete(sig_pcv) + pna_pc_mon!0 = "time" + pna_pc_mon&time = arr&time + npo_pc_mon!0 = "time" + npo_pc_mon&time = arr&time + delete([/evecv,pcts,arr,arr_mon_CW/]) + end if + delete([/arr_djf_CW,arr_mam_CW,arr_jja_CW,arr_son_CW,arr_ann_CW/]) + delete([/arr_djf,arr_mam,arr_jja,arr_son,arr_ann/]) +;------------------------------------------------------------------------------------------------------ + if (sstreg_frame.eq.1.and.sstreg_plot_flag.eq.0) then ; sstreg_frame = flag to create regressions .ps/.png files + sstreg_frame = 0 + end if + if (tasreg_frame.eq.1.and.tasreg_plot_flag.eq.0) then ; tasreg_frame = flag to create regressions .ps/.png files + tasreg_frame = 0 + end if + if (prreg_frame.eq.1.and.prreg_plot_flag.eq.0) then ; prreg_frame = flag to create regressions .ps/.png files + prreg_frame = 0 + end if +;------------------------------------------------------------------------------------------------------ + if (OUTPUT_DATA.eq."True") then + modname = str_sub_str(names(ee)," ","_") + bc = (/"/","'","(",")"/) + do gg = 0,dimsizes(bc)-1 + modname = str_sub_str(modname,bc(gg),"_") + end do + fn = getenv("OUTDIR")+modname+".cvdp_data.psl.pna_npo."+syear(ee)+"-"+eyear(ee)+".nc" + if (.not.isfilepresent2(fn)) then + z = addfile(fn,"c") + z@source = "NCAR Climate Analysis Section's Climate Variability Diagnostics Package v"+getenv("VERSION") + z@notes = "Data from "+names(ee)+" from "+syear(ee)+"-"+eyear(ee) + if (OPT_CLIMO.eq."Full") then + z@climatology = syear(ee)+"-"+eyear(ee)+" climatology removed prior to all calculations (other than means)" + else + if (CLIMO_SYEAR.lt.0) then + z@climatology = (eyear(ee)+CLIMO_SYEAR)+"-"+(eyear(ee)+CLIMO_EYEAR)+" climatology removed prior to all calculations (other than means)" + else + z@climatology = CLIMO_SYEAR+"-"+CLIMO_EYEAR+" climatology removed prior to all calculations (other than means)" + end if + end if + z@Conventions = "CF-1.6" + else + z = addfile(fn,"w") + end if + z->npi_ndjfm = npi_ndjfm + + z->pna_timeseries_djf = set_varAtts(pna_pc_djf,"PNA normalized principal component timeseries (DJF)","1","") + z->pna_timeseries_mam = set_varAtts(pna_pc_mam,"PNA normalized principal component timeseries (MAM)","1","") + z->pna_timeseries_jja = set_varAtts(pna_pc_jja,"PNA normalized principal component timeseries (JJA)","1","") + z->pna_timeseries_son = set_varAtts(pna_pc_son,"PNA normalized principal component timeseries (SON)","1","") + z->pna_timeseries_ann = set_varAtts(pna_pc_ann,"PNA normalized principal component timeseries (annual)","1","") + + z->npo_timeseries_djf = set_varAtts(npo_pc_djf,"NPO normalized principal component timeseries (DJF)","1","") + z->npo_timeseries_mam = set_varAtts(npo_pc_mam,"NPO normalized principal component timeseries (MAM)","1","") + z->npo_timeseries_jja = set_varAtts(npo_pc_jja,"NPO normalized principal component timeseries (JJA)","1","") + z->npo_timeseries_son = set_varAtts(npo_pc_son,"NPO normalized principal component timeseries (SON)","1","") + z->npo_timeseries_ann = set_varAtts(npo_pc_ann,"NPO normalized principal component timeseries (annual)","1","") + + z->pna_pattern_djf = set_varAtts(pna_djf,"PNA spatial pattern (DJF)","","") + z->pna_pattern_mam = set_varAtts(pna_mam,"PNA spatial pattern (MAM)","","") + z->pna_pattern_jja = set_varAtts(pna_jja,"PNA spatial pattern (JJA)","","") + z->pna_pattern_son = set_varAtts(pna_son,"PNA spatial pattern (SON)","","") + z->pna_pattern_ann = set_varAtts(pna_ann,"PNA spatial pattern (annual)","","") + + z->npo_pattern_djf = set_varAtts(npo_djf,"NPO spatial pattern (DJF)","","") + z->npo_pattern_mam = set_varAtts(npo_mam,"NPO spatial pattern (MAM)","","") + z->npo_pattern_jja = set_varAtts(npo_jja,"NPO spatial pattern (JJA)","","") + z->npo_pattern_son = set_varAtts(npo_son,"NPO spatial pattern (SON)","","") + z->npo_pattern_ann = set_varAtts(npo_ann,"NPO spatial pattern (annual)","","") + + if (COMPUTE_MODES_MON.eq."True") then + z->pna_timeseries_mon = set_varAtts(pna_pc_mon,"PNA principal component timeseries (monthly)","","") + z->npo_timeseries_mon = set_varAtts(npo_pc_mon,"NPO principal component timeseries (monthly)","","") + z->pna_pattern_mon = set_varAtts(pna_mon,"PNA spatial pattern (monthly)","","") + z->npo_pattern_mon = set_varAtts(npo_mon,"NPO spatial pattern (monthly)","","") + end if + delete(z) + delete([/modname,fn/]) + + if (sstreg_plot_flag.eq.0) then + modname = str_sub_str(names_ts(ee)," ","_") + bc = (/"/","'","(",")"/) + do gg = 0,dimsizes(bc)-1 + modname = str_sub_str(modname,bc(gg),"_") + end do + fn = getenv("OUTDIR")+modname+".cvdp_data.psl.pna_npo.ts."+syear_ts(ee)+"-"+eyear_ts(ee)+".nc" + if (.not.isfilepresent2(fn)) then + z = addfile(fn,"c") + z@source = "NCAR Climate Analysis Section's Climate Variability Diagnostics Package v"+getenv("VERSION") + z@notes = "Data from "+names_ts(ee)+" from "+syear_ts(ee)+"-"+eyear_ts(ee) + if (OPT_CLIMO.eq."Full") then + z@climatology = syear_ts(ee)+"-"+eyear_ts(ee)+" climatology removed prior to all calculations (other than means)" + else + if (CLIMO_SYEAR.lt.0) then + z@climatology = (eyear(ee)+CLIMO_SYEAR)+"-"+(eyear(ee)+CLIMO_EYEAR)+" climatology removed prior to all calculations (other than means)" + else + z@climatology = CLIMO_SYEAR+"-"+CLIMO_EYEAR+" climatology removed prior to all calculations (other than means)" + end if + end if + z@Conventions = "CF-1.6" + else + z = addfile(fn,"w") + end if + z->pna_sst_regression_djf = set_varAtts(pna_sst_djf,"sst regression onto PNA principal component timeseries (DJF)","","") + z->pna_sst_regression_mam = set_varAtts(pna_sst_mam,"sst regression onto PNA principal component timeseries (MAM)","","") + z->pna_sst_regression_jja = set_varAtts(pna_sst_jja,"sst regression onto PNA principal component timeseries (JJA)","","") + z->pna_sst_regression_son = set_varAtts(pna_sst_son,"sst regression onto PNA principal component timeseries (SON)","","") + z->pna_sst_regression_ann = set_varAtts(pna_sst_ann,"sst regression onto PNA principal component timeseries (annual)","","") + + z->npo_sst_regression_djf = set_varAtts(npo_sst_djf,"sst regression onto NPO principal component timeseries (DJF)","","") + z->npo_sst_regression_mam = set_varAtts(npo_sst_mam,"sst regression onto NPO principal component timeseries (MAM)","","") + z->npo_sst_regression_jja = set_varAtts(npo_sst_jja,"sst regression onto NPO principal component timeseries (JJA)","","") + z->npo_sst_regression_son = set_varAtts(npo_sst_son,"sst regression onto NPO principal component timeseries (SON)","","") + z->npo_sst_regression_ann = set_varAtts(npo_sst_ann,"sst regression onto NPO principal component timeseries (annual)","","") + if (COMPUTE_MODES_MON.eq."True") then + z->pna_sst_regression_mon = set_varAtts(pna_sst_mon,"sst regression onto PNA principal component timeseries (monthly)","","") + z->npo_sst_regression_mon = set_varAtts(npo_sst_mon,"sst regression onto NPO principal component timeseries (monthly)","","") + end if + delete(z) + delete([/modname,fn/]) + end if + if (tasreg_plot_flag.eq.0) then + modname = str_sub_str(names_tas(ee)," ","_") + bc = (/"/","'","(",")"/) + do gg = 0,dimsizes(bc)-1 + modname = str_sub_str(modname,bc(gg),"_") + end do + fn = getenv("OUTDIR")+modname+".cvdp_data.psl.pna_npo.tas."+syear_tas(ee)+"-"+eyear_tas(ee)+".nc" + if (.not.isfilepresent2(fn)) then + z = addfile(fn,"c") + z@source = "NCAR Climate Analysis Section's Climate Variability Diagnostics Package v"+getenv("VERSION") + z@notes = "Data from "+names_tas(ee)+" from "+syear_tas(ee)+"-"+eyear_tas(ee) + if (OPT_CLIMO.eq."Full") then + z@climatology = syear_tas(ee)+"-"+eyear_tas(ee)+" climatology removed prior to all calculations (other than means)" + else + if (CLIMO_SYEAR.lt.0) then + z@climatology = (eyear(ee)+CLIMO_SYEAR)+"-"+(eyear(ee)+CLIMO_EYEAR)+" climatology removed prior to all calculations (other than means)" + else + z@climatology = CLIMO_SYEAR+"-"+CLIMO_EYEAR+" climatology removed prior to all calculations (other than means)" + end if + end if + z@Conventions = "CF-1.6" + else + z = addfile(fn,"w") + end if + z->pna_tas_regression_djf = set_varAtts(pna_tas_djf,"tas regression onto PNA principal component timeseries (DJF)","","") + z->pna_tas_regression_mam = set_varAtts(pna_tas_mam,"tas regression onto PNA principal component timeseries (MAM)","","") + z->pna_tas_regression_jja = set_varAtts(pna_tas_jja,"tas regression onto PNA principal component timeseries (JJA)","","") + z->pna_tas_regression_son = set_varAtts(pna_tas_son,"tas regression onto PNA principal component timeseries (SON)","","") + z->pna_tas_regression_ann = set_varAtts(pna_tas_ann,"tas regression onto PNA principal component timeseries (annual)","","") + + z->npo_tas_regression_djf = set_varAtts(npo_tas_djf,"tas regression onto NPO principal component timeseries (DJF)","","") + z->npo_tas_regression_mam = set_varAtts(npo_tas_mam,"tas regression onto NPO principal component timeseries (MAM)","","") + z->npo_tas_regression_jja = set_varAtts(npo_tas_jja,"tas regression onto NPO principal component timeseries (JJA)","","") + z->npo_tas_regression_son = set_varAtts(npo_tas_son,"tas regression onto NPO principal component timeseries (SON)","","") + z->npo_tas_regression_ann = set_varAtts(npo_tas_ann,"tas regression onto NPO principal component timeseries (annual)","","") + if (COMPUTE_MODES_MON.eq."True") then + z->pna_tas_regression_mon = set_varAtts(pna_tas_mon,"tas regression onto PNA principal component timeseries (monthly)","","") + z->npo_tas_regression_mon = set_varAtts(npo_tas_mon,"tas regression onto NPO principal component timeseries (monthly)","","") + end if + delete(z) + delete([/modname,fn/]) + end if + if (prreg_plot_flag.eq.0) then + modname = str_sub_str(names_pr(ee)," ","_") + bc = (/"/","'","(",")"/) + do gg = 0,dimsizes(bc)-1 + modname = str_sub_str(modname,bc(gg),"_") + end do + fn = getenv("OUTDIR")+modname+".cvdp_data.psl.pna_npo.pr."+syear_pr(ee)+"-"+eyear_pr(ee)+".nc" + if (.not.isfilepresent2(fn)) then + z = addfile(fn,"c") + z@source = "NCAR Climate Analysis Section's Climate Variability Diagnostics Package v"+getenv("VERSION") + z@notes = "Data from "+names_pr(ee)+" from "+syear_pr(ee)+"-"+eyear_pr(ee) + if (OPT_CLIMO.eq."Full") then + z@climatology = syear_pr(ee)+"-"+eyear_pr(ee)+" climatology removed prior to all calculations (other than means)" + else + z@climatology = CLIMO_SYEAR+"-"+CLIMO_EYEAR+" climatology removed prior to all calculations (other than means)" + end if + z@Conventions = "CF-1.6" + else + z = addfile(fn,"w") + end if + z->pna_pr_regression_djf = set_varAtts(pna_pr_djf,"pr regression onto PNA principal component timeseries (DJF)","","") + z->pna_pr_regression_mam = set_varAtts(pna_pr_mam,"pr regression onto PNA principal component timeseries (MAM)","","") + z->pna_pr_regression_jja = set_varAtts(pna_pr_jja,"pr regression onto PNA principal component timeseries (JJA)","","") + z->pna_pr_regression_son = set_varAtts(pna_pr_son,"pr regression onto PNA principal component timeseries (SON)","","") + z->pna_pr_regression_ann = set_varAtts(pna_pr_ann,"pr regression onto PNA principal component timeseries (annual)","","") + + z->npo_pr_regression_djf = set_varAtts(npo_pr_djf,"pr regression onto NPO principal component timeseries (DJF)","","") + z->npo_pr_regression_mam = set_varAtts(npo_pr_mam,"pr regression onto NPO principal component timeseries (MAM)","","") + z->npo_pr_regression_jja = set_varAtts(npo_pr_jja,"pr regression onto NPO principal component timeseries (JJA)","","") + z->npo_pr_regression_son = set_varAtts(npo_pr_son,"pr regression onto NPO principal component timeseries (SON)","","") + z->npo_pr_regression_ann = set_varAtts(npo_pr_ann,"pr regression onto NPO principal component timeseries (annual)","","") + if (COMPUTE_MODES_MON.eq."True") then + z->pna_pr_regression_mon = set_varAtts(pna_pr_mon,"pr regression onto PNA principal component timeseries (monthly)","","") + z->npo_pr_regression_mon = set_varAtts(npo_pr_mon,"pr regression onto NPO principal component timeseries (monthly)","","") + end if + delete(z) + delete([/modname,fn/]) + end if + end if +;======================================================================== + res = True + res@mpGeophysicalLineColor = "gray42" + res@mpGeophysicalLineThicknessF = 2. + res@mpGridAndLimbOn = False + res@mpFillOn = False + res@mpOutlineOn = True + res@gsnDraw = False + res@gsnFrame = False + res@cnLevelSelectionMode = "ExplicitLevels" + res@cnLineLabelsOn = False + res@cnFillOn = True + res@cnLinesOn = False + res@lbLabelBarOn = False + + res@gsnLeftStringOrthogonalPosF = -0.03 + res@gsnLeftStringParallelPosF = .005 + res@gsnRightStringOrthogonalPosF = -0.03 + res@gsnRightStringParallelPosF = 0.96 + res@gsnRightString = "" + res@gsnLeftString = "" + if (nsim.le.5) then + res@gsnLeftStringFontHeightF = 0.018 + res@gsnCenterStringFontHeightF = 0.022 + res@gsnRightStringFontHeightF = 0.018 + else + res@gsnLeftStringFontHeightF = 0.024 + res@gsnCenterStringFontHeightF = 0.028 + res@gsnRightStringFontHeightF = 0.024 + end if + res@gsnPolar = "NH" + res@mpMinLatF = 20. + res@mpCenterLonF = 0. + + res@cnLevels = (/-7,-6,-5,-4,-3,-2,-1,0,1,2,3,4,5,6,7./) + res@gsnLeftString = syear(ee)+"-"+eyear(ee) + res@gsnCenterString = names(ee) + + res4 = res ; res4 = pr regression resources + if (COLORMAP.eq.0) then + res4@cnLevels := fspan(-.7,.7,15) + else + res4@cnLevels := fspan(-.5,.5,11) + end if + + res2 = True + res2@gsnDraw = False + res2@gsnFrame = False + res2@cnLevelSelectionMode = "ExplicitLevels" + res2@cnLevels = res@cnLevels + + res2@cnLineLabelsOn = False + res2@cnFillOn = True + res2@cnLinesOn = False + res2@cnFillMode = "AreaFill" + res2@lbLabelBarOn = False + res2@cnInfoLabelOn = False + res2@gsnRightString = "" + res2@gsnLeftString = "" + res2@gsnCenterString = "" + res2@gsnAddCyclic = True + + res@gsnRightString = pna_djf@pcvar + map_pna_djf(ee) = gsn_csm_contour_map_polar(wks_pna,pna_djf,res) + res@gsnRightString = pna_mam@pcvar + map_pna_mam(ee) = gsn_csm_contour_map_polar(wks_pna,pna_mam,res) + res@gsnRightString = pna_jja@pcvar + map_pna_jja(ee) = gsn_csm_contour_map_polar(wks_pna,pna_jja,res) + res@gsnRightString = pna_son@pcvar + map_pna_son(ee) = gsn_csm_contour_map_polar(wks_pna,pna_son,res) + res@gsnRightString = pna_ann@pcvar + map_pna_ann(ee) = gsn_csm_contour_map_polar(wks_pna,pna_ann,res) + delete([/pna_djf,pna_mam,pna_jja,pna_son,pna_ann/]) + if (COMPUTE_MODES_MON.eq."True") then + res@gsnRightString = pna_mon@pcvar + map_pna_mon(ee) = gsn_csm_contour_map_polar(wks_pna,pna_mon,res) + delete([/pna_mon/]) + end if + + res@cnLevels = (/-4,-3,-2.5,-2,-1.5,-1,-0.5,0,0.5,1,1.5,2,2.5,3,4/) + res@gsnRightString = npo_djf@pcvar + map_npo_djf(ee) = gsn_csm_contour_map_polar(wks_npo,npo_djf,res) + res@gsnRightString = npo_mam@pcvar + map_npo_mam(ee) = gsn_csm_contour_map_polar(wks_npo,npo_mam,res) + res@gsnRightString = npo_jja@pcvar + map_npo_jja(ee) = gsn_csm_contour_map_polar(wks_npo,npo_jja,res) + res@gsnRightString = npo_son@pcvar + map_npo_son(ee) = gsn_csm_contour_map_polar(wks_npo,npo_son,res) + res@gsnRightString = npo_ann@pcvar + map_npo_ann(ee) = gsn_csm_contour_map_polar(wks_npo,npo_ann,res) + delete([/npo_djf,npo_mam,npo_jja,npo_son,npo_ann/]) + if (COMPUTE_MODES_MON.eq."True") then + res@gsnRightString = npo_mon@pcvar + map_npo_mon(ee) = gsn_csm_contour_map_polar(wks_npo,npo_mon,res) + delete([/npo_mon/]) + end if + + if (sstreg_plot_flag.eq.0) then + res@cnLevels := fspan(-.7,.7,15) + if (tasreg_plot_flag.eq.0) then + if (names_ts(ee).eq.names_tas(ee)) then + res@gsnCenterString = names_ts(ee) + else + res@gsnCenterString = names_ts(ee)+" / "+names_tas(ee) + end if + else + res@gsnCenterString = names_ts(ee) + end if + res@gsnRightString = "" + reg_pna_djf(ee) = gsn_csm_contour_map_polar(wks_pna,pna_sst_djf,res) + reg_pna_mam(ee) = gsn_csm_contour_map_polar(wks_pna,pna_sst_mam,res) + reg_pna_jja(ee) = gsn_csm_contour_map_polar(wks_pna,pna_sst_jja,res) + reg_pna_son(ee) = gsn_csm_contour_map_polar(wks_pna,pna_sst_son,res) + reg_pna_ann(ee) = gsn_csm_contour_map_polar(wks_pna,pna_sst_ann,res) + delete([/pna_sst_djf,pna_sst_mam,pna_sst_jja,pna_sst_son,pna_sst_ann/]) + if (tasreg_plot_flag.eq.0) then + o_djf = gsn_csm_contour(wks_pna,pna_tas_djf,res2) + o_mam = gsn_csm_contour(wks_pna,pna_tas_mam,res2) + o_jja = gsn_csm_contour(wks_pna,pna_tas_jja,res2) + o_son = gsn_csm_contour(wks_pna,pna_tas_son,res2) + o_ann = gsn_csm_contour(wks_pna,pna_tas_ann,res2) + delete([/pna_tas_djf,pna_tas_mam,pna_tas_jja,pna_tas_son,pna_tas_ann/]) + overlay(reg_pna_djf(ee),o_djf) + overlay(reg_pna_mam(ee),o_mam) + overlay(reg_pna_jja(ee),o_jja) + overlay(reg_pna_son(ee),o_son) + overlay(reg_pna_ann(ee),o_ann) + delete([/o_djf,o_mam,o_jja,o_son,o_ann/]) + end if + if (COMPUTE_MODES_MON.eq."True") then + reg_pna_mon(ee) = gsn_csm_contour_map_polar(wks_pna,pna_sst_mon,res) + delete([/pna_sst_mon/]) + if (tasreg_plot_flag.eq.0) then + o_mon = gsn_csm_contour(wks_pna,pna_tas_mon,res2) + overlay(reg_pna_mon(ee),o_mon) + delete([/o_mon,pna_tas_mon/]) + end if + end if + + reg_npo_djf(ee) = gsn_csm_contour_map_polar(wks_npo,npo_sst_djf,res) + reg_npo_mam(ee) = gsn_csm_contour_map_polar(wks_npo,npo_sst_mam,res) + reg_npo_jja(ee) = gsn_csm_contour_map_polar(wks_npo,npo_sst_jja,res) + reg_npo_son(ee) = gsn_csm_contour_map_polar(wks_npo,npo_sst_son,res) + reg_npo_ann(ee) = gsn_csm_contour_map_polar(wks_npo,npo_sst_ann,res) + delete([/npo_sst_djf,npo_sst_mam,npo_sst_jja,npo_sst_son,npo_sst_ann/]) + if (tasreg_plot_flag.eq.0) then + o_djf = gsn_csm_contour(wks_npo,npo_tas_djf,res2) + o_mam = gsn_csm_contour(wks_npo,npo_tas_mam,res2) + o_jja = gsn_csm_contour(wks_npo,npo_tas_jja,res2) + o_son = gsn_csm_contour(wks_npo,npo_tas_son,res2) + o_ann = gsn_csm_contour(wks_npo,npo_tas_ann,res2) + delete([/npo_tas_djf,npo_tas_mam,npo_tas_jja,npo_tas_son,npo_tas_ann/]) + overlay(reg_npo_djf(ee),o_djf) + overlay(reg_npo_mam(ee),o_mam) + overlay(reg_npo_jja(ee),o_jja) + overlay(reg_npo_son(ee),o_son) + overlay(reg_npo_ann(ee),o_ann) + delete([/o_djf,o_mam,o_jja,o_son,o_ann/]) + end if + if (COMPUTE_MODES_MON.eq."True") then + reg_npo_mon(ee) = gsn_csm_contour_map_polar(wks_npo,npo_sst_mon,res) + delete([/npo_sst_mon/]) + if (tasreg_plot_flag.eq.0) then + o_mon = gsn_csm_contour(wks_npo,npo_tas_mon,res2) + overlay(reg_npo_mon(ee),o_mon) + delete([/o_mon,npo_tas_mon/]) + end if + end if + end if + + if (prreg_plot_flag.eq.0) then ; PR regressions + res4@gsnRightString = "" + res4@gsnCenterString = names_pr(ee) + reg_pna_pr_djf(ee) = gsn_csm_contour_map_polar(wks_pna_pr,pna_pr_djf,res4) + reg_pna_pr_mam(ee) = gsn_csm_contour_map_polar(wks_pna_pr,pna_pr_mam,res4) + reg_pna_pr_jja(ee) = gsn_csm_contour_map_polar(wks_pna_pr,pna_pr_jja,res4) + reg_pna_pr_son(ee) = gsn_csm_contour_map_polar(wks_pna_pr,pna_pr_son,res4) + reg_pna_pr_ann(ee) = gsn_csm_contour_map_polar(wks_pna_pr,pna_pr_ann,res4) + delete([/pna_pr_djf,pna_pr_mam,pna_pr_jja,pna_pr_son,pna_pr_ann/]) + if (COMPUTE_MODES_MON.eq."True") then + reg_pna_pr_mon(ee) = gsn_csm_contour_map_polar(wks_pna_pr,pna_pr_mon,res4) + delete([/pna_pr_mon/]) + end if + + reg_npo_pr_djf(ee) = gsn_csm_contour_map_polar(wks_npo_pr,npo_pr_djf,res4) + reg_npo_pr_mam(ee) = gsn_csm_contour_map_polar(wks_npo_pr,npo_pr_mam,res4) + reg_npo_pr_jja(ee) = gsn_csm_contour_map_polar(wks_npo_pr,npo_pr_jja,res4) + reg_npo_pr_son(ee) = gsn_csm_contour_map_polar(wks_npo_pr,npo_pr_son,res4) + reg_npo_pr_ann(ee) = gsn_csm_contour_map_polar(wks_npo_pr,npo_pr_ann,res4) + delete([/npo_pr_djf,npo_pr_mam,npo_pr_jja,npo_pr_son,npo_pr_ann/]) + if (COMPUTE_MODES_MON.eq."True") then + reg_npo_pr_mon(ee) = gsn_csm_contour_map_polar(wks_npo_pr,npo_pr_mon,res4) + delete([/npo_pr_mon/]) + end if + end if + + + xyres = True + xyres@gsnDraw = False + xyres@gsnFrame = False + xyres@gsnXYBarChart = False + xyres@gsnYRefLine = 0.0 + xyres@gsnYRefLineColor = "gray42" + xyres@gsnAboveYRefLineColor = 185 + xyres@gsnBelowYRefLineColor = 35 + if (wks_type.eq."png") then + xyres@xyLineThicknessF = .5 + else + xyres@xyLineThicknessF = .2 + end if + xyres@xyLineColor = "gray52" + xyres@tiYAxisString = "" + xyres@tiXAxisString = "" + if (nsim.le.5) then + xyres@tmXBLabelFontHeightF = 0.0125 + xyres@tmYLLabelFontHeightF = 0.0125 + xyres@gsnStringFontHeightF = 0.017 + else + xyres@tmXBLabelFontHeightF = 0.018 + xyres@tmYLLabelFontHeightF = 0.018 + xyres@gsnStringFontHeightF = 0.024 + end if + xyres@gsnCenterStringOrthogonalPosF = 0.025 + xyres@vpXF = 0.05 + xyres@vpHeightF = 0.15 + if (SCALE_TIMESERIES.eq."True") then + xyres@vpWidthF = 0.9*((nyr(ee)*1.)/nyr_max) + else + xyres@vpWidthF = 0.9 + end if + xyres@gsnLeftString = "" + xyres@gsnRightString = "" + xyres@trXMinF = syear(ee)-.5 + xyres@trXMaxF = eyear(ee)+1.5 + xyres@gsnCenterStringOrthogonalPosF = 0.025 + + xyres@gsnCenterString = names(ee) + + xyresmon = xyres + xyresmon@gsnXYBarChart = False + xyresmon@xyLineThicknessF = .1 + + xy_pna_djf(ee) = gsn_csm_xy(wks_pna_ts,fspan(syear(ee),eyear(ee),dimsizes(pna_pc_djf)),pna_pc_djf,xyres) ; use standardized timeseries + xy_pna_mam(ee) = gsn_csm_xy(wks_pna_ts,fspan(syear(ee),eyear(ee),dimsizes(pna_pc_mam)),pna_pc_mam,xyres) ; use standardized timeseries + xy_pna_jja(ee) = gsn_csm_xy(wks_pna_ts,fspan(syear(ee),eyear(ee),dimsizes(pna_pc_jja)),pna_pc_jja,xyres) ; use standardized timeseries + xy_pna_son(ee) = gsn_csm_xy(wks_pna_ts,fspan(syear(ee),eyear(ee),dimsizes(pna_pc_son)),pna_pc_son,xyres) ; use standardized timeseries + xy_pna_ann(ee) = gsn_csm_xy(wks_pna_ts,fspan(syear(ee),eyear(ee),dimsizes(pna_pc_ann)),pna_pc_ann,xyres) ; use standardized timeseries + delete([/pna_pc_djf,pna_pc_mam,pna_pc_jja,pna_pc_son,pna_pc_ann/]) + if (COMPUTE_MODES_MON.eq."True") then + xy_pna_mon(ee) = gsn_csm_xy(wks_pna_ts,fspan(syear(ee),eyear(ee)+.91667,dimsizes(pna_pc_mon)),pna_pc_mon,xyresmon) ; use standardized timeseries + delete([/pna_pc_mon/]) + end if + + xy_npo_djf(ee) = gsn_csm_xy(wks_npo_ts,fspan(syear(ee),eyear(ee),dimsizes(npo_pc_djf)),npo_pc_djf,xyres) ; use standardized timeseries + xy_npo_mam(ee) = gsn_csm_xy(wks_npo_ts,fspan(syear(ee),eyear(ee),dimsizes(npo_pc_mam)),npo_pc_mam,xyres) ; use standardized timeseries + xy_npo_jja(ee) = gsn_csm_xy(wks_npo_ts,fspan(syear(ee),eyear(ee),dimsizes(npo_pc_jja)),npo_pc_jja,xyres) ; use standardized timeseries + xy_npo_son(ee) = gsn_csm_xy(wks_npo_ts,fspan(syear(ee),eyear(ee),dimsizes(npo_pc_son)),npo_pc_son,xyres) ; use standardized timeseries + xy_npo_ann(ee) = gsn_csm_xy(wks_npo_ts,fspan(syear(ee),eyear(ee),dimsizes(npo_pc_ann)),npo_pc_ann,xyres) ; use standardized timeseries + delete([/npo_pc_djf,npo_pc_mam,npo_pc_jja,npo_pc_son,npo_pc_ann/]) + if (COMPUTE_MODES_MON.eq."True") then + xy_npo_mon(ee) = gsn_csm_xy(wks_npo_ts,fspan(syear(ee),eyear(ee)+.91667,dimsizes(npo_pc_mon)),npo_pc_mon,xyresmon) ; use standardized timeseries + delete([/npo_pc_mon/]) + end if + + xy_npi(ee) = gsn_csm_xy(wks_npi_ts,fspan(syear(ee),eyear(ee),dimsizes(npi_ndjfm)),npi_ndjfm,xyres) ; throw NPI into wks_psa2_ts workstation + delete(npi_ndjfm) + delete(sstreg_plot_flag) + end do + + if (isvar("clim_syear")) then + delete(clim_syear) + end if + if (isvar("clim_eyear")) then + delete(clim_eyear) + end if + + panres = True + panres@gsnMaximize = True + panres@gsnPaperOrientation = "portrait" + panres@gsnPanelLabelBar = True + panres@gsnPanelYWhiteSpacePercent = 3.0 + panres@pmLabelBarHeightF = 0.05 + panres@pmLabelBarWidthF = 0.55 + panres@lbTitleOn = False + panres@lbBoxLineColor = "gray70" + if (nsim.le.4) then + if (nsim.eq.1) then + panres@txFontHeightF = 0.022 + panres@gsnPanelBottom = 0.50 + else + panres@txFontHeightF = 0.0145 + panres@gsnPanelBottom = 0.50 + end if + else + panres@txFontHeightF = 0.016 + panres@gsnPanelBottom = 0.05 + end if + ncol = floattointeger(sqrt(nsim)) + nrow = (nsim/ncol)+mod(nsim,ncol) + + panres@txString = "PNA (DJF)" + gsn_panel2(wks_pna,map_pna_djf,(/nrow,ncol/),panres) + delete(map_pna_djf) + panres@txString = "PNA (MAM)" + gsn_panel2(wks_pna,map_pna_mam,(/nrow,ncol/),panres) + delete(map_pna_mam) + panres@txString = "PNA (JJA)" + gsn_panel2(wks_pna,map_pna_jja,(/nrow,ncol/),panres) + delete(map_pna_jja) + panres@txString = "PNA (SON)" + gsn_panel2(wks_pna,map_pna_son,(/nrow,ncol/),panres) + delete(map_pna_son) + panres@txString = "PNA (Annual)" + gsn_panel2(wks_pna,map_pna_ann,(/nrow,ncol/),panres) + delete(map_pna_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres@txString = "PNA (Monthly)" + gsn_panel2(wks_pna,map_pna_mon,(/nrow,ncol/),panres) + delete(map_pna_mon) + end if + + if (sstreg_frame.eq.0) then + if (tasreg_frame.eq.0) then + txt0 = "SST/TAS" + else + txt0 = "SST" + end if + panres@txString = "PNA "+txt0+" Regressions (DJF)" + gsn_panel2(wks_pna,reg_pna_djf,(/nrow,ncol/),panres) + delete(reg_pna_djf) + panres@txString = "PNA "+txt0+" Regressions (MAM)" + gsn_panel2(wks_pna,reg_pna_mam,(/nrow,ncol/),panres) + delete(reg_pna_mam) + panres@txString = "PNA "+txt0+" Regressions (JJA)" + gsn_panel2(wks_pna,reg_pna_jja,(/nrow,ncol/),panres) + delete(reg_pna_jja) + panres@txString = "PNA "+txt0+" Regressions (SON)" + gsn_panel2(wks_pna,reg_pna_son,(/nrow,ncol/),panres) + delete(reg_pna_son) + panres@txString = "PNA "+txt0+" Regressions (Annual)" + gsn_panel2(wks_pna,reg_pna_ann,(/nrow,ncol/),panres) + delete(reg_pna_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres@txString = "PNA "+txt0+" Regressions (Monthly)" + gsn_panel2(wks_pna,reg_pna_mon,(/nrow,ncol/),panres) + delete(reg_pna_mon) + end if + delete(wks_pna) + end if + if (prreg_frame.eq.0) then + panres@txString = "PNA PR Regressions (DJF)" + gsn_panel2(wks_pna_pr,reg_pna_pr_djf,(/nrow,ncol/),panres) + delete(reg_pna_pr_djf) + panres@txString = "PNA PR Regressions (MAM)" + gsn_panel2(wks_pna_pr,reg_pna_pr_mam,(/nrow,ncol/),panres) + delete(reg_pna_pr_mam) + panres@txString = "PNA PR Regressions (JJA)" + gsn_panel2(wks_pna_pr,reg_pna_pr_jja,(/nrow,ncol/),panres) + delete(reg_pna_pr_jja) + panres@txString = "PNA PR Regressions (SON)" + gsn_panel2(wks_pna_pr,reg_pna_pr_son,(/nrow,ncol/),panres) + delete(reg_pna_pr_son) + panres@txString = "PNA PR Regressions (Annual)" + gsn_panel2(wks_pna_pr,reg_pna_pr_ann,(/nrow,ncol/),panres) + delete(reg_pna_pr_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres@txString = "PNA PR Regressions (Monthly)" + gsn_panel2(wks_pna_pr,reg_pna_pr_mon,(/nrow,ncol/),panres) + delete(reg_pna_pr_mon) + end if + delete(wks_pna_pr) + end if + + panres@txString = "NPO (DJF)" + gsn_panel2(wks_npo,map_npo_djf,(/nrow,ncol/),panres) + delete(map_npo_djf) + panres@txString = "NPO (MAM)" + gsn_panel2(wks_npo,map_npo_mam,(/nrow,ncol/),panres) + delete(map_npo_mam) + panres@txString = "NPO (JJA)" + gsn_panel2(wks_npo,map_npo_jja,(/nrow,ncol/),panres) + delete(map_npo_jja) + panres@txString = "NPO (SON)" + gsn_panel2(wks_npo,map_npo_son,(/nrow,ncol/),panres) + delete(map_npo_son) + panres@txString = "NPO (Annual)" + gsn_panel2(wks_npo,map_npo_ann,(/nrow,ncol/),panres) + delete(map_npo_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres@txString = "NPO (Monthly)" + gsn_panel2(wks_npo,map_npo_mon,(/nrow,ncol/),panres) + delete(map_npo_mon) + end if + + if (sstreg_frame.eq.0) then + if (tasreg_frame.eq.0) then + txt0 = "SST/TAS" + else + txt0 = "SST" + end if + panres@txString = "NPO "+txt0+" Regressions (DJF)" + gsn_panel2(wks_npo,reg_npo_djf,(/nrow,ncol/),panres) + delete(reg_npo_djf) + panres@txString = "NPO "+txt0+" Regressions (MAM)" + gsn_panel2(wks_npo,reg_npo_mam,(/nrow,ncol/),panres) + delete(reg_npo_mam) + panres@txString = "NPO "+txt0+" Regressions (JJA)" + gsn_panel2(wks_npo,reg_npo_jja,(/nrow,ncol/),panres) + delete(reg_npo_jja) + panres@txString = "NPO "+txt0+" Regressions (SON)" + gsn_panel2(wks_npo,reg_npo_son,(/nrow,ncol/),panres) + delete(reg_npo_son) + panres@txString = "NPO "+txt0+" Regressions (Annual)" + gsn_panel2(wks_npo,reg_npo_ann,(/nrow,ncol/),panres) + delete(reg_npo_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres@txString = "NPO "+txt0+" Regressions (Monthly)" + gsn_panel2(wks_npo,reg_npo_mon,(/nrow,ncol/),panres) + delete(reg_npo_mon) + end if + delete(wks_npo) + end if + if (prreg_frame.eq.0) then + panres@txString = "NPO PR Regressions (DJF)" + gsn_panel2(wks_npo_pr,reg_npo_pr_djf,(/nrow,ncol/),panres) + delete(reg_npo_pr_djf) + panres@txString = "NPO PR Regressions (MAM)" + gsn_panel2(wks_npo_pr,reg_npo_pr_mam,(/nrow,ncol/),panres) + delete(reg_npo_pr_mam) + panres@txString = "NPO PR Regressions (JJA)" + gsn_panel2(wks_npo_pr,reg_npo_pr_jja,(/nrow,ncol/),panres) + delete(reg_npo_pr_jja) + panres@txString = "NPO PR Regressions (SON)" + gsn_panel2(wks_npo_pr,reg_npo_pr_son,(/nrow,ncol/),panres) + delete(reg_npo_pr_son) + panres@txString = "NPO PR Regressions (Annual)" + gsn_panel2(wks_npo_pr,reg_npo_pr_ann,(/nrow,ncol/),panres) + delete(reg_npo_pr_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres@txString = "NPO PR Regressions (Monthly)" + gsn_panel2(wks_npo_pr,reg_npo_pr_mon,(/nrow,ncol/),panres) + delete(reg_npo_pr_mon) + end if + delete(wks_npo_pr) + end if + + panres2 = True + if (nsim.le.5) then + panres2@txFontHeightF = 0.024 + else + panres2@txFontHeightF = 0.016 + end if + panres2@gsnMaximize = True + panres2@gsnPaperOrientation = "portrait" + if (SCALE_TIMESERIES.eq."True") then + tt = ind(nyr.eq.nyr_max) + panres2@gsnPanelScalePlotIndex = tt(0) + delete(tt) + end if + if (nsim.le.12) then + lp = (/nsim,1/) + else + lp = (/nrow,ncol/) + end if + + panres2@txString = "PNA (DJF)" + gsn_panel2(wks_pna_ts,xy_pna_djf,lp,panres2) + delete(xy_pna_djf) + panres2@txString = "PNA (MAM)" + gsn_panel2(wks_pna_ts,xy_pna_mam,lp,panres2) + delete(xy_pna_mam) + panres2@txString = "PNA (JJA)" + gsn_panel2(wks_pna_ts,xy_pna_jja,lp,panres2) + delete(xy_pna_jja) + panres2@txString = "PNA (SON)" + gsn_panel2(wks_pna_ts,xy_pna_son,lp,panres2) + delete(xy_pna_son) + panres2@txString = "PNA (Annual)" + gsn_panel2(wks_pna_ts,xy_pna_ann,lp,panres2) + delete(xy_pna_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres2@txString = "PNA (Monthly)" + gsn_panel2(wks_pna_ts,xy_pna_mon,lp,panres2) + delete(xy_pna_mon) + end if + delete(wks_pna_ts) + + panres2@txString = "NPO (DJF)" + gsn_panel2(wks_npo_ts,xy_npo_djf,lp,panres2) + delete(xy_npo_djf) + panres2@txString = "NPO (MAM)" + gsn_panel2(wks_npo_ts,xy_npo_mam,lp,panres2) + delete(xy_npo_mam) + panres2@txString = "NPO (JJA)" + gsn_panel2(wks_npo_ts,xy_npo_jja,lp,panres2) + delete(xy_npo_jja) + panres2@txString = "NPO (SON)" + gsn_panel2(wks_npo_ts,xy_npo_son,lp,panres2) + delete(xy_npo_son) + panres2@txString = "NPO (Annual)" + gsn_panel2(wks_npo_ts,xy_npo_ann,lp,panres2) + delete(xy_npo_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres2@txString = "NPO (Monthly)" + gsn_panel2(wks_npo_ts,xy_npo_mon,lp,panres2) + delete(xy_npo_mon) + end if + delete(wks_npo_ts) + + panres2@txString = "NPI (NDJFM)" + gsn_panel2(wks_npi_ts,xy_npi,lp,panres2) + delete(xy_npi) + delete(wks_npi_ts) +;-------------------------------------------------------------------------------------------------- + OUTDIR = getenv("OUTDIR") + if (wks_type.eq."png") then + system("mv "+OUTDIR+"pna.000001.png "+OUTDIR+"pna.djf.png") + system("mv "+OUTDIR+"pna.000002.png "+OUTDIR+"pna.mam.png") + system("mv "+OUTDIR+"pna.000003.png "+OUTDIR+"pna.jja.png") + system("mv "+OUTDIR+"pna.000004.png "+OUTDIR+"pna.son.png") + system("mv "+OUTDIR+"pna.000005.png "+OUTDIR+"pna.ann.png") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"pna.000006.png "+OUTDIR+"pna.mon.png") + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"pna.000007.png "+OUTDIR+"pna.tempreg.djf.png") + system("mv "+OUTDIR+"pna.000008.png "+OUTDIR+"pna.tempreg.mam.png") + system("mv "+OUTDIR+"pna.000009.png "+OUTDIR+"pna.tempreg.jja.png") + system("mv "+OUTDIR+"pna.000010.png "+OUTDIR+"pna.tempreg.son.png") + system("mv "+OUTDIR+"pna.000011.png "+OUTDIR+"pna.tempreg.ann.png") + system("mv "+OUTDIR+"pna.000012.png "+OUTDIR+"pna.tempreg.mon.png") + end if + else + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"pna.000006.png "+OUTDIR+"pna.tempreg.djf.png") + system("mv "+OUTDIR+"pna.000007.png "+OUTDIR+"pna.tempreg.mam.png") + system("mv "+OUTDIR+"pna.000008.png "+OUTDIR+"pna.tempreg.jja.png") + system("mv "+OUTDIR+"pna.000009.png "+OUTDIR+"pna.tempreg.son.png") + system("mv "+OUTDIR+"pna.000010.png "+OUTDIR+"pna.tempreg.ann.png") + end if + end if + + if (prreg_frame.eq.0) then + system("mv "+OUTDIR+"pna.prreg.000001.png "+OUTDIR+"pna.prreg.djf.png") + system("mv "+OUTDIR+"pna.prreg.000002.png "+OUTDIR+"pna.prreg.mam.png") + system("mv "+OUTDIR+"pna.prreg.000003.png "+OUTDIR+"pna.prreg.jja.png") + system("mv "+OUTDIR+"pna.prreg.000004.png "+OUTDIR+"pna.prreg.son.png") + system("mv "+OUTDIR+"pna.prreg.000005.png "+OUTDIR+"pna.prreg.ann.png") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"pna.prreg.000006.png "+OUTDIR+"pna.prreg.mon.png") + end if + end if + + system("mv "+OUTDIR+"npo.000001.png "+OUTDIR+"npo.djf.png") + system("mv "+OUTDIR+"npo.000002.png "+OUTDIR+"npo.mam.png") + system("mv "+OUTDIR+"npo.000003.png "+OUTDIR+"npo.jja.png") + system("mv "+OUTDIR+"npo.000004.png "+OUTDIR+"npo.son.png") + system("mv "+OUTDIR+"npo.000005.png "+OUTDIR+"npo.ann.png") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"npo.000006.png "+OUTDIR+"npo.mon.png") + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"npo.000007.png "+OUTDIR+"npo.tempreg.djf.png") + system("mv "+OUTDIR+"npo.000008.png "+OUTDIR+"npo.tempreg.mam.png") + system("mv "+OUTDIR+"npo.000009.png "+OUTDIR+"npo.tempreg.jja.png") + system("mv "+OUTDIR+"npo.000010.png "+OUTDIR+"npo.tempreg.son.png") + system("mv "+OUTDIR+"npo.000011.png "+OUTDIR+"npo.tempreg.ann.png") + system("mv "+OUTDIR+"npo.000012.png "+OUTDIR+"npo.tempreg.mon.png") + end if + else + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"npo.000006.png "+OUTDIR+"npo.tempreg.djf.png") + system("mv "+OUTDIR+"npo.000007.png "+OUTDIR+"npo.tempreg.mam.png") + system("mv "+OUTDIR+"npo.000008.png "+OUTDIR+"npo.tempreg.jja.png") + system("mv "+OUTDIR+"npo.000009.png "+OUTDIR+"npo.tempreg.son.png") + system("mv "+OUTDIR+"npo.000010.png "+OUTDIR+"npo.tempreg.ann.png") + end if + end if + + if (prreg_frame.eq.0) then + system("mv "+OUTDIR+"npo.prreg.000001.png "+OUTDIR+"npo.prreg.djf.png") + system("mv "+OUTDIR+"npo.prreg.000002.png "+OUTDIR+"npo.prreg.mam.png") + system("mv "+OUTDIR+"npo.prreg.000003.png "+OUTDIR+"npo.prreg.jja.png") + system("mv "+OUTDIR+"npo.prreg.000004.png "+OUTDIR+"npo.prreg.son.png") + system("mv "+OUTDIR+"npo.prreg.000005.png "+OUTDIR+"npo.prreg.ann.png") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"npo.prreg.000006.png "+OUTDIR+"npo.prreg.mon.png") + end if + end if + + system("mv "+OUTDIR+"pna.timeseries.000001.png "+OUTDIR+"pna.timeseries.djf.png") + system("mv "+OUTDIR+"pna.timeseries.000002.png "+OUTDIR+"pna.timeseries.mam.png") + system("mv "+OUTDIR+"pna.timeseries.000003.png "+OUTDIR+"pna.timeseries.jja.png") + system("mv "+OUTDIR+"pna.timeseries.000004.png "+OUTDIR+"pna.timeseries.son.png") + system("mv "+OUTDIR+"pna.timeseries.000005.png "+OUTDIR+"pna.timeseries.ann.png") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"pna.timeseries.000006.png "+OUTDIR+"pna.timeseries.mon.png") + end if + + system("mv "+OUTDIR+"npo.timeseries.000001.png "+OUTDIR+"npo.timeseries.djf.png") + system("mv "+OUTDIR+"npo.timeseries.000002.png "+OUTDIR+"npo.timeseries.mam.png") + system("mv "+OUTDIR+"npo.timeseries.000003.png "+OUTDIR+"npo.timeseries.jja.png") + system("mv "+OUTDIR+"npo.timeseries.000004.png "+OUTDIR+"npo.timeseries.son.png") + system("mv "+OUTDIR+"npo.timeseries.000005.png "+OUTDIR+"npo.timeseries.ann.png") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"npo.timeseries.000006.png "+OUTDIR+"npo.timeseries.mon.png") + end if + else + system("psplit "+OUTDIR+"pna.ps "+OUTDIR+"psl_pn") + system("mv "+OUTDIR+"psl_pn0001.ps "+OUTDIR+"pna.djf.ps") + system("mv "+OUTDIR+"psl_pn0002.ps "+OUTDIR+"pna.mam.ps") + system("mv "+OUTDIR+"psl_pn0003.ps "+OUTDIR+"pna.jja.ps") + system("mv "+OUTDIR+"psl_pn0004.ps "+OUTDIR+"pna.son.ps") + system("mv "+OUTDIR+"psl_pn0005.ps "+OUTDIR+"pna.ann.ps") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"psl_pn0006.ps "+OUTDIR+"pna.mon.ps") + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"psl_pn0007.ps "+OUTDIR+"pna.tempreg.djf.ps") + system("mv "+OUTDIR+"psl_pn0008.ps "+OUTDIR+"pna.tempreg.mam.ps") + system("mv "+OUTDIR+"psl_pn0009.ps "+OUTDIR+"pna.tempreg.jja.ps") + system("mv "+OUTDIR+"psl_pn0010.ps "+OUTDIR+"pna.tempreg.son.ps") + system("mv "+OUTDIR+"psl_pn0011.ps "+OUTDIR+"pna.tempreg.ann.ps") + system("mv "+OUTDIR+"psl_pn0012.ps "+OUTDIR+"pna.tempreg.mon.ps") + end if + else + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"psl_pn0006.ps "+OUTDIR+"pna.tempreg.djf.ps") + system("mv "+OUTDIR+"psl_pn0007.ps "+OUTDIR+"pna.tempreg.mam.ps") + system("mv "+OUTDIR+"psl_pn0008.ps "+OUTDIR+"pna.tempreg.jja.ps") + system("mv "+OUTDIR+"psl_pn0009.ps "+OUTDIR+"pna.tempreg.son.ps") + system("mv "+OUTDIR+"psl_pn0010.ps "+OUTDIR+"pna.tempreg.ann.ps") + end if + end if + + if (prreg_frame.eq.0) then + system("psplit "+OUTDIR+"pna.prreg.ps "+OUTDIR+"pr_nn") + system("mv "+OUTDIR+"pr_nn0001.ps "+OUTDIR+"pna.prreg.djf.ps") + system("mv "+OUTDIR+"pr_nn0002.ps "+OUTDIR+"pna.prreg.mam.ps") + system("mv "+OUTDIR+"pr_nn0003.ps "+OUTDIR+"pna.prreg.jja.ps") + system("mv "+OUTDIR+"pr_nn0004.ps "+OUTDIR+"pna.prreg.son.ps") + system("mv "+OUTDIR+"pr_nn0005.ps "+OUTDIR+"pna.prreg.ann.ps") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"pr_nn0006.ps "+OUTDIR+"pna.prreg.mon.ps") + end if + end if + + system("psplit "+OUTDIR+"npo.ps "+OUTDIR+"psl_pn") + system("mv "+OUTDIR+"psl_pn0001.ps "+OUTDIR+"npo.djf.ps") + system("mv "+OUTDIR+"psl_pn0002.ps "+OUTDIR+"npo.mam.ps") + system("mv "+OUTDIR+"psl_pn0003.ps "+OUTDIR+"npo.jja.ps") + system("mv "+OUTDIR+"psl_pn0004.ps "+OUTDIR+"npo.son.ps") + system("mv "+OUTDIR+"psl_pn0005.ps "+OUTDIR+"npo.ann.ps") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"psl_pn0006.ps "+OUTDIR+"npo.mon.ps") + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"psl_pn0007.ps "+OUTDIR+"npo.tempreg.djf.ps") + system("mv "+OUTDIR+"psl_pn0008.ps "+OUTDIR+"npo.tempreg.mam.ps") + system("mv "+OUTDIR+"psl_pn0009.ps "+OUTDIR+"npo.tempreg.jja.ps") + system("mv "+OUTDIR+"psl_pn0010.ps "+OUTDIR+"npo.tempreg.son.ps") + system("mv "+OUTDIR+"psl_pn0011.ps "+OUTDIR+"npo.tempreg.ann.ps") + system("mv "+OUTDIR+"psl_pn0012.ps "+OUTDIR+"npo.tempreg.mon.ps") + end if + else + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"psl_pn0006.ps "+OUTDIR+"npo.tempreg.djf.ps") + system("mv "+OUTDIR+"psl_pn0007.ps "+OUTDIR+"npo.tempreg.mam.ps") + system("mv "+OUTDIR+"psl_pn0008.ps "+OUTDIR+"npo.tempreg.jja.ps") + system("mv "+OUTDIR+"psl_pn0009.ps "+OUTDIR+"npo.tempreg.son.ps") + system("mv "+OUTDIR+"psl_pn0010.ps "+OUTDIR+"npo.tempreg.ann.ps") + end if + end if + if (prreg_frame.eq.0) then + system("psplit "+OUTDIR+"npo.prreg.ps "+OUTDIR+"pr_nn") + system("mv "+OUTDIR+"pr_nn0001.ps "+OUTDIR+"npo.prreg.djf.ps") + system("mv "+OUTDIR+"pr_nn0002.ps "+OUTDIR+"npo.prreg.mam.ps") + system("mv "+OUTDIR+"pr_nn0003.ps "+OUTDIR+"npo.prreg.jja.ps") + system("mv "+OUTDIR+"pr_nn0004.ps "+OUTDIR+"npo.prreg.son.ps") + system("mv "+OUTDIR+"pr_nn0005.ps "+OUTDIR+"npo.prreg.ann.ps") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"pr_nn0006.ps "+OUTDIR+"npo.prreg.mon.ps") + end if + system("rm "+OUTDIR+"pna.prreg.ps "+OUTDIR+"npo.prreg.ps") + end if + + system("psplit "+OUTDIR+"pna.timeseries.ps "+OUTDIR+"psl_pn") + system("mv "+OUTDIR+"psl_pn0001.ps "+OUTDIR+"pna.timeseries.djf.ps") + system("mv "+OUTDIR+"psl_pn0002.ps "+OUTDIR+"pna.timeseries.mam.ps") + system("mv "+OUTDIR+"psl_pn0003.ps "+OUTDIR+"pna.timeseries.jja.ps") + system("mv "+OUTDIR+"psl_pn0004.ps "+OUTDIR+"pna.timeseries.son.ps") + system("mv "+OUTDIR+"psl_pn0005.ps "+OUTDIR+"pna.timeseries.ann.ps") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"psl_pn0006.ps "+OUTDIR+"pna.timeseries.mon.ps") + end if + + system("psplit "+OUTDIR+"npo.timeseries.ps "+OUTDIR+"psl_pn") + system("mv "+OUTDIR+"psl_pn0001.ps "+OUTDIR+"npo.timeseries.djf.ps") + system("mv "+OUTDIR+"psl_pn0002.ps "+OUTDIR+"npo.timeseries.mam.ps") + system("mv "+OUTDIR+"psl_pn0003.ps "+OUTDIR+"npo.timeseries.jja.ps") + system("mv "+OUTDIR+"psl_pn0004.ps "+OUTDIR+"npo.timeseries.son.ps") + system("mv "+OUTDIR+"psl_pn0005.ps "+OUTDIR+"npo.timeseries.ann.ps") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"psl_pn0006.ps "+OUTDIR+"npo.timeseries.mon.ps") + end if + system("rm "+OUTDIR+"npo.timeseries.ps "+OUTDIR+"pna.timeseries.ps "+OUTDIR+"npo.ps "+OUTDIR+"pna.ps") + end if + print("Finished: psl.pna_npo.ncl") +end diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/psl.sam_psa.ncl b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/psl.sam_psa.ncl new file mode 100644 index 0000000000..be53678933 --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/psl.sam_psa.ncl @@ -0,0 +1,2383 @@ +; Calculates SAM, PSA1 and PSA2 (patterns and PC timeseries), as well as +; regressions of those PC timeseries onto ts, tas, and pr. +; +; Variables used: psl, ts, tas, and pr +; +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/shea_util.ncl" +load "$CVDP_SCRIPTS/functions.ncl" + +begin + print("Starting: psl.sam_psa.ncl") + + SCALE_TIMESERIES = getenv("SCALE_TIMESERIES") + OUTPUT_DATA = getenv("OUTPUT_DATA") + PNG_SCALE = tofloat(getenv("PNG_SCALE")) + OPT_CLIMO = getenv("OPT_CLIMO") + CLIMO_SYEAR = toint(getenv("CLIMO_SYEAR")) + CLIMO_EYEAR = toint(getenv("CLIMO_EYEAR")) + OUTPUT_TYPE = getenv("OUTPUT_TYPE") + COMPUTE_MODES_MON = getenv("COMPUTE_MODES_MON") + COLORMAP = getenv("COLORMAP") + + nsim = numAsciiRow("namelist_byvar/namelist_psl") + na = asciiread("namelist_byvar/namelist_psl",(/nsim/),"string") + names = new(nsim,"string") + paths = new(nsim,"string") + syear = new(nsim,"integer",-999) + eyear = new(nsim,"integer",-999) + delim = "|" + + do gg = 0,nsim-1 + names(gg) = str_strip(str_get_field(na(gg),1,delim)) + paths(gg) = str_strip(str_get_field(na(gg),2,delim)) + syear(gg) = stringtointeger(str_strip(str_get_field(na(gg),3,delim))) + eyear(gg) = stringtointeger(str_strip(str_get_field(na(gg),4,delim))) + end do + nyr = eyear-syear+1 + nyr_max = max(nyr) + + pi=4.*atan(1.0) + rad=(pi/180.) + +;---------SST Regressions coding------------------------------------------------- + nsim_ts = numAsciiRow("namelist_byvar/namelist_ts") + na_ts = asciiread("namelist_byvar/namelist_ts",(/nsim_ts/),"string") + names_ts = new(nsim_ts,"string") + paths_ts = new(nsim_ts,"string") + syear_ts = new(nsim_ts,"integer",-999) + eyear_ts = new(nsim_ts,"integer",-999) + + do gg = 0,nsim_ts-1 + names_ts(gg) = str_strip(str_get_field(na_ts(gg),1,delim)) + paths_ts(gg) = str_strip(str_get_field(na_ts(gg),2,delim)) + syear_ts(gg) = stringtointeger(str_strip(str_get_field(na_ts(gg),3,delim))) + eyear_ts(gg) = stringtointeger(str_strip(str_get_field(na_ts(gg),4,delim))) + end do + delete(na_ts) + nyr_ts = eyear_ts-syear_ts+1 +;---------TAS Regressions coding------------------------------------------------- + nsim_tas = numAsciiRow("namelist_byvar/namelist_trefht") + na_tas = asciiread("namelist_byvar/namelist_trefht",(/nsim_tas/),"string") + names_tas = new(nsim_tas,"string") + paths_tas = new(nsim_tas,"string") + syear_tas = new(nsim_tas,"integer",-999) + eyear_tas = new(nsim_tas,"integer",-999) + + do gg = 0,nsim_tas-1 + names_tas(gg) = str_strip(str_get_field(na_tas(gg),1,delim)) + paths_tas(gg) = str_strip(str_get_field(na_tas(gg),2,delim)) + syear_tas(gg) = stringtointeger(str_strip(str_get_field(na_tas(gg),3,delim))) + eyear_tas(gg) = stringtointeger(str_strip(str_get_field(na_tas(gg),4,delim))) + end do + delete(na_tas) + nyr_tas = eyear_tas-syear_tas+1 +;---------PR Regressions coding------------------------------------------------- + nsim_pr = numAsciiRow("namelist_byvar/namelist_prect") + na_pr = asciiread("namelist_byvar/namelist_prect",(/nsim_pr/),"string") + names_pr = new(nsim_pr,"string") + paths_pr = new(nsim_pr,"string") + syear_pr = new(nsim_pr,"integer",-999) + eyear_pr = new(nsim_pr,"integer",-999) + + do gg = 0,nsim_pr-1 + names_pr(gg) = str_strip(str_get_field(na_pr(gg),1,delim)) + paths_pr(gg) = str_strip(str_get_field(na_pr(gg),2,delim)) + syear_pr(gg) = stringtointeger(str_strip(str_get_field(na_pr(gg),3,delim))) + eyear_pr(gg) = stringtointeger(str_strip(str_get_field(na_pr(gg),4,delim))) + end do + delete(na_pr) + nyr_pr = eyear_pr-syear_pr+1 +;------------------------------------------------------------------------------------------------- + + wks_type = OUTPUT_TYPE + if (wks_type.eq."png") then + wks_type@wkWidth = 1500*PNG_SCALE + wks_type@wkHeight = 1500*PNG_SCALE + end if + + wks_sam = gsn_open_wks(wks_type,getenv("OUTDIR")+"sam") + wks_sam_pr = gsn_open_wks(wks_type,getenv("OUTDIR")+"sam.prreg") + wks_sam_ts = gsn_open_wks(wks_type,getenv("OUTDIR")+"sam.timeseries") + + wks_psa1 = gsn_open_wks(wks_type,getenv("OUTDIR")+"psa1") + wks_psa1_pr = gsn_open_wks(wks_type,getenv("OUTDIR")+"psa1.prreg") + wks_psa1_ts = gsn_open_wks(wks_type,getenv("OUTDIR")+"psa1.timeseries") + + wks_psa2 = gsn_open_wks(wks_type,getenv("OUTDIR")+"psa2") + wks_psa2_pr = gsn_open_wks(wks_type,getenv("OUTDIR")+"psa2.prreg") + wks_psa2_ts = gsn_open_wks(wks_type,getenv("OUTDIR")+"psa2.timeseries") + + if (COLORMAP.eq.0) then + gsn_define_colormap(wks_sam,"ncl_default") + gsn_define_colormap(wks_sam_ts,"ncl_default") + gsn_define_colormap(wks_psa1,"ncl_default") + gsn_define_colormap(wks_psa1_ts,"ncl_default") + gsn_define_colormap(wks_psa2,"ncl_default") + gsn_define_colormap(wks_psa2_ts,"ncl_default") + gsn_define_colormap(wks_sam_pr,"MPL_BrBG") + gsn_define_colormap(wks_psa1_pr,"MPL_BrBG") + gsn_define_colormap(wks_psa2_pr,"MPL_BrBG") + end if + if (COLORMAP.eq.1) then + gsn_define_colormap(wks_sam,"BlueDarkRed18") + gsn_define_colormap(wks_sam_ts,"ncl_default") + gsn_define_colormap(wks_psa1,"BlueDarkRed18") + gsn_define_colormap(wks_psa1_ts,"ncl_default") + gsn_define_colormap(wks_psa2,"BlueDarkRed18") + gsn_define_colormap(wks_psa2_ts,"ncl_default") + gsn_define_colormap(wks_sam_pr,"MPL_BrBG") + gsn_define_colormap(wks_psa1_pr,"MPL_BrBG") + gsn_define_colormap(wks_psa2_pr,"MPL_BrBG") + end if + + map_sam_djf = new(nsim,"graphic") + map_sam_mam = new(nsim,"graphic") + map_sam_jja = new(nsim,"graphic") + map_sam_son = new(nsim,"graphic") + map_sam_ann = new(nsim,"graphic") + map_sam_mon = new(nsim,"graphic") + xy_sam_djf = new(nsim,"graphic") + xy_sam_mam = new(nsim,"graphic") + xy_sam_jja = new(nsim,"graphic") + xy_sam_son = new(nsim,"graphic") + xy_sam_ann = new(nsim,"graphic") + xy_sam_mon = new(nsim,"graphic") + reg_sam_djf = new(nsim,"graphic") + reg_sam_mam = new(nsim,"graphic") + reg_sam_jja = new(nsim,"graphic") + reg_sam_son = new(nsim,"graphic") + reg_sam_ann = new(nsim,"graphic") + reg_sam_mon = new(nsim,"graphic") + reg_sam_pr_djf = new(nsim,"graphic") + reg_sam_pr_mam = new(nsim,"graphic") + reg_sam_pr_jja = new(nsim,"graphic") + reg_sam_pr_son = new(nsim,"graphic") + reg_sam_pr_ann = new(nsim,"graphic") + reg_sam_pr_mon = new(nsim,"graphic") + + map_psa1_djf = new(nsim,"graphic") + map_psa1_mam = new(nsim,"graphic") + map_psa1_jja = new(nsim,"graphic") + map_psa1_son = new(nsim,"graphic") + map_psa1_ann = new(nsim,"graphic") + map_psa1_mon = new(nsim,"graphic") + xy_psa1_djf = new(nsim,"graphic") + xy_psa1_mam = new(nsim,"graphic") + xy_psa1_jja = new(nsim,"graphic") + xy_psa1_son = new(nsim,"graphic") + xy_psa1_ann = new(nsim,"graphic") + xy_psa1_mon = new(nsim,"graphic") + reg_psa1_djf = new(nsim,"graphic") + reg_psa1_mam = new(nsim,"graphic") + reg_psa1_jja = new(nsim,"graphic") + reg_psa1_son = new(nsim,"graphic") + reg_psa1_ann = new(nsim,"graphic") + reg_psa1_mon = new(nsim,"graphic") + reg_psa1_pr_djf = new(nsim,"graphic") + reg_psa1_pr_mam = new(nsim,"graphic") + reg_psa1_pr_jja = new(nsim,"graphic") + reg_psa1_pr_son = new(nsim,"graphic") + reg_psa1_pr_ann = new(nsim,"graphic") + reg_psa1_pr_mon = new(nsim,"graphic") + + map_psa2_djf = new(nsim,"graphic") + map_psa2_mam = new(nsim,"graphic") + map_psa2_jja = new(nsim,"graphic") + map_psa2_son = new(nsim,"graphic") + map_psa2_ann = new(nsim,"graphic") + map_psa2_mon = new(nsim,"graphic") + xy_psa2_djf = new(nsim,"graphic") + xy_psa2_mam = new(nsim,"graphic") + xy_psa2_jja = new(nsim,"graphic") + xy_psa2_son = new(nsim,"graphic") + xy_psa2_ann = new(nsim,"graphic") + xy_psa2_mon = new(nsim,"graphic") + reg_psa2_djf = new(nsim,"graphic") + reg_psa2_mam = new(nsim,"graphic") + reg_psa2_jja = new(nsim,"graphic") + reg_psa2_son = new(nsim,"graphic") + reg_psa2_ann = new(nsim,"graphic") + reg_psa2_mon = new(nsim,"graphic") + reg_psa2_pr_djf = new(nsim,"graphic") + reg_psa2_pr_mam = new(nsim,"graphic") + reg_psa2_pr_jja = new(nsim,"graphic") + reg_psa2_pr_son = new(nsim,"graphic") + reg_psa2_pr_ann = new(nsim,"graphic") + reg_psa2_pr_mon = new(nsim,"graphic") + + sstreg_frame = 1 ; sstreg_frame = flag to create regressions .ps/.png files. Created/used instead of sstreg_plot_flag + ; so that if sst regressions are not created for the last simulation listed that .ps/png files are created + tasreg_frame = 1 + prreg_frame = 1 + + do ee = 0,nsim-1 +; print(paths(ee)+" "+syear(ee)+" "+eyear(ee)) + arr = data_read_in(paths(ee),"PSL",syear(ee),eyear(ee)) ; read in data, orient lats/lons correctly, set time coordinate variable up + if (isatt(arr,"is_all_missing")) then + delete(arr) + continue + end if + + if (OPT_CLIMO.eq."Full") then + arr = rmMonAnnCycTLL(arr) + else + check_custom_climo(names(ee),syear(ee),eyear(ee),CLIMO_SYEAR,CLIMO_EYEAR) + temp_arr = arr + delete(temp_arr&time) + temp_arr&time = cd_calendar(arr&time,-1) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(temp_arr({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(temp_arr({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + delete(temp_arr) + arr = calcMonAnomTLL(arr,climo) + delete(climo) + end if + + arrT = runave_n_Wrap(arr,3,0,0) ; form DJF averages + arrT(0,:,:) = (/ dim_avg_n(arr(:1,:,:),0) /) + arr_djf = arrT(0::12,:,:) + arr_mam = arrT(3::12,:,:) + arr_jja = arrT(6::12,:,:) ; form JJA averages + arr_son = arrT(9::12,:,:) + delete(arrT) + + arrV = runave_n_Wrap(arr,12,0,0) + arr_ann = arrV(5::12,:,:) + delete(arrV) +; +; arr_djf = (/ dtrend_msg_n(ispan(0,dimsizes(arr_djf&time)-1,1),arr_djf,True,False,0) /) +; arr_mam = (/ dtrend_msg_n(ispan(0,dimsizes(arr_mam&time)-1,1),arr_mam,True,False,0) /) +; arr_jja = (/ dtrend_msg_n(ispan(0,dimsizes(arr_jja&time)-1,1),arr_jja,True,False,0) /) +; arr_son = (/ dtrend_msg_n(ispan(0,dimsizes(arr_son&time)-1,1),arr_son,True,False,0) /) +; +; arr_ann = (/ dtrend_msg_n(ispan(0,dimsizes(arr_ann&time)-1,1),arr_ann,True,False,0) /) +; +; arr_ndjfm = (/ dtrend_msg_n(ispan(0,dimsizes(arr_ndjfm&time)-1,1),arr_ndjfm,True,False,0) /) +; +; arr = (/ dtrend_msg_n(ispan(0,dimsizes(arr&time)-1,1),arr,True,False,0) /) +;---------SST Regressions coding------------------------------------------------- + if (any(ismissing((/syear(ee),syear_ts(ee),eyear(ee),eyear_ts(ee)/)))) then + sstreg_plot_flag = 1 + else + if (syear(ee).eq.syear_ts(ee)) then ; check that the start and end years match for ts, trefht, and psl + if (eyear(ee).eq.eyear_ts(ee)) then + sstreg_plot_flag = 0 + else + sstreg_plot_flag = 1 + end if + else + sstreg_plot_flag = 1 + end if + end if + + if (sstreg_plot_flag.eq.0) then + ; print("Data to be read in: "+paths_ts(ee)+" from "+syear_ts(ee)+":"+eyear_ts(ee)) + sst = data_read_in(paths_ts(ee),"TS",syear_ts(ee),eyear_ts(ee)) + if (isatt(sst,"is_all_missing")) then + sstreg_plot_flag = 1 + delete(sst) + end if + + if (sstreg_plot_flag.eq.0) then ; only continue if both PSL/TS fields are present + sst = where(sst.le.-1.8,-1.8,sst) + d = addfile("$NCARG_ROOT/lib/ncarg/data/cdf/landsea.nc","r") + basemap = d->LSMASK + lsm = landsea_mask(basemap,sst&lat,sst&lon) + sst = mask(sst,conform(sst,lsm,(/1,2/)).ge.1,False) + delete(lsm) + + if (OPT_CLIMO.eq."Full") then + sst = rmMonAnnCycTLL(sst) + else + check_custom_climo(names_ts(ee),syear_ts(ee),eyear_ts(ee),CLIMO_SYEAR,CLIMO_EYEAR) + temp_arr = sst + delete(temp_arr&time) + temp_arr&time = cd_calendar(sst&time,1) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(temp_arr({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(temp_arr({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + delete(temp_arr) + sst = calcMonAnomTLL(sst,climo) + delete(climo) + end if +; sst = (/ dtrend_msg_n(ispan(0,dimsizes(sst&time)-1,1),sst,False,False,0) /) + + sstT = runave_n_Wrap(sst,3,0,0) ; form DJF averages + sstT(0,:,:) = (/ dim_avg_n(sst(:1,:,:),0) /) + sst_djf = sstT(0::12,:,:) + sst_mam = sstT(3::12,:,:) + sst_jja = sstT(6::12,:,:) ; form JJA averages + sst_son = sstT(9::12,:,:) + delete(sstT) + + sstV = runave_n_Wrap(sst,12,0,0) + sst_ann = sstV(5::12,:,:) + delete(sstV) + end if + end if +;---------TAS Regressions coding------------------------------------------------- + if (any(ismissing((/syear(ee),syear_tas(ee),eyear(ee),eyear_tas(ee)/)))) then + tasreg_plot_flag = 1 + else + if (syear(ee).eq.syear_tas(ee)) then ; check that the start and end years match for ts, tas, and psl + if (eyear(ee).eq.eyear_tas(ee)) then + tasreg_plot_flag = 0 + else + tasreg_plot_flag = 1 + end if + else + tasreg_plot_flag = 1 + end if + if (sstreg_plot_flag.eq.1) then ; if the ts dataset is missing but the tas is not, do not + tasreg_plot_flag = 1 ; run through the tas calculations as both currently required + end if + end if + + if (tasreg_plot_flag.eq.0) then + tas = data_read_in(paths_tas(ee),"TREFHT",syear_tas(ee),eyear_tas(ee)) + if (isatt(tas,"is_all_missing")) then + tasreg_plot_flag = 1 + delete(tas) + end if + + if (tasreg_plot_flag.eq.0) then ; only continue if both PSL/TS fields are present + d = addfile("$NCARG_ROOT/lib/ncarg/data/cdf/landsea.nc","r") + basemap = d->LSMASK + lsm = landsea_mask(basemap,tas&lat,tas&lon) + tas = mask(tas,conform(tas,lsm,(/1,2/)).eq.0,False) + delete(lsm) + + if (OPT_CLIMO.eq."Full") then + tas = rmMonAnnCycTLL(tas) + else + check_custom_climo(names_tas(ee),syear_tas(ee),eyear_tas(ee),CLIMO_SYEAR,CLIMO_EYEAR) + temp_arr = tas + delete(temp_arr&time) + temp_arr&time = cd_calendar(tas&time,1) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(temp_arr({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(temp_arr({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + delete(temp_arr) + tas = calcMonAnomTLL(tas,climo) + delete(climo) + end if +; tas = (/ dtrend_msg_n(ispan(0,dimsizes(tas&time)-1,1),tas,False,False,0) /) + + tasT = runave_n_Wrap(tas,3,0,0) ; form DJF averages + tasT(0,:,:) = (/ dim_avg_n(tas(:1,:,:),0) /) + tas_djf = tasT(0::12,:,:) + tas_mam = tasT(3::12,:,:) + tas_jja = tasT(6::12,:,:) ; form JJA averages + tas_son = tasT(9::12,:,:) + delete(tasT) + + tasV = runave_n_Wrap(tas,12,0,0) + tas_ann = tasV(5::12,:,:) + delete([/tasV/]) + end if + end if +;---------PR Regressions coding------------------------------------------------- + if (any(ismissing((/syear(ee),syear_pr(ee),eyear(ee),eyear_pr(ee)/)))) then + prreg_plot_flag = 1 + else + if (syear(ee).eq.syear_pr(ee)) then ; check that the start and end years match for ts, tas, and psl + if (eyear(ee).eq.eyear_pr(ee)) then + prreg_plot_flag = 0 + else + prreg_plot_flag = 1 + end if + else + prreg_plot_flag = 1 + end if + end if + + if (prreg_plot_flag.eq.0) then + pr = data_read_in(paths_pr(ee),"PRECT",syear_pr(ee),eyear_pr(ee)) + if (isatt(pr,"is_all_missing")) then + prreg_plot_flag = 1 + delete(pr) + end if + + if (prreg_plot_flag.eq.0) then ; only continue if both PSL/TS fields are present + if (OPT_CLIMO.eq."Full") then + pr = rmMonAnnCycTLL(pr) + else + check_custom_climo(names_pr(ee),syear_pr(ee),eyear_pr(ee),CLIMO_SYEAR,CLIMO_EYEAR) + temp_arr = pr + delete(temp_arr&time) + temp_arr&time = cd_calendar(pr&time,1) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(temp_arr({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(temp_arr({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + delete(temp_arr) + pr = calcMonAnomTLL(pr,climo) + delete(climo) + end if +; pr = (/ dtrend_msg_n(ispan(0,dimsizes(pr&time)-1,1),pr,False,False,0) /) + + prT = runave_n_Wrap(pr,3,0,0) ; form DJF averages + prT(0,:,:) = (/ dim_avg_n(pr(:1,:,:),0) /) + pr_djf = prT(0::12,:,:) + pr_mam = prT(3::12,:,:) + pr_jja = prT(6::12,:,:) ; form JJA averages + pr_son = prT(9::12,:,:) + delete(prT) + + prV = runave_n_Wrap(pr,12,0,0) + pr_ann = prV(5::12,:,:) + delete([/prV/]) + end if + end if +;------------------------------------------------------------------ + arr_djf_CW = SqrtCosWeight(arr_djf) + arr_mam_CW = SqrtCosWeight(arr_mam) + arr_jja_CW = SqrtCosWeight(arr_jja) + arr_son_CW = SqrtCosWeight(arr_son) + arr_ann_CW = SqrtCosWeight(arr_ann) + if (COMPUTE_MODES_MON.eq."True") then + arr_mon_CW = SqrtCosWeight(arr) + else + if (isvar("arr")) then + delete(arr) + end if + if (isvar("sst")) then + delete(sst) + end if + if (isvar("tas")) then + delete(tas) + end if + if (isvar("pr")) then + delete(pr) + end if + end if +;----------SAM/PSA1/PSA2 calculations---------------------------------------------------------------------- + evecv = eofunc(arr_djf_CW({lat|:-20},lon|:,time|:),4,75) + pcts = eofunc_ts(arr_djf_CW({lat|:-20},lon|:,time|:),evecv,False) + sam_pc_djf = dim_standardize(pcts(0,:),0) + psa1_pc_djf = dim_standardize(pcts(1,:),0) + psa2_pc_djf = dim_standardize(pcts(2,:),0) + sam_djf = arr_djf(0,:,:) + sam_djf = (/ regCoef(sam_pc_djf,arr_djf(lat|:,lon|:,time|:)) /) + psa1_djf = arr_djf(0,:,:) + psa1_djf = (/ regCoef(psa1_pc_djf,arr_djf(lat|:,lon|:,time|:)) /) + psa2_djf = arr_djf(0,:,:) + psa2_djf = (/ regCoef(psa2_pc_djf,arr_djf(lat|:,lon|:,time|:)) /) + + if (sstreg_plot_flag.eq.0) then + sam_sst_djf = sst_djf(0,:,:) + sam_sst_djf = (/ regCoef(sam_pc_djf,sst_djf(lat|:,lon|:,time|:)) /) + psa1_sst_djf = sst_djf(0,:,:) + psa1_sst_djf = (/ regCoef(psa1_pc_djf,sst_djf(lat|:,lon|:,time|:)) /) + psa2_sst_djf = sst_djf(0,:,:) + psa2_sst_djf = (/ regCoef(psa2_pc_djf,sst_djf(lat|:,lon|:,time|:)) /) + delete(sst_djf) + end if + if (tasreg_plot_flag.eq.0) then + sam_tas_djf = tas_djf(0,:,:) + sam_tas_djf = (/ regCoef(sam_pc_djf,tas_djf(lat|:,lon|:,time|:)) /) + psa1_tas_djf = tas_djf(0,:,:) + psa1_tas_djf = (/ regCoef(psa1_pc_djf,tas_djf(lat|:,lon|:,time|:)) /) + psa2_tas_djf = tas_djf(0,:,:) + psa2_tas_djf = (/ regCoef(psa2_pc_djf,tas_djf(lat|:,lon|:,time|:)) /) + delete(tas_djf) + end if + if (prreg_plot_flag.eq.0) then + sam_pr_djf = pr_djf(0,:,:) + sam_pr_djf = (/ regCoef(sam_pc_djf,pr_djf(lat|:,lon|:,time|:)) /) + psa1_pr_djf = pr_djf(0,:,:) + psa1_pr_djf = (/ regCoef(psa1_pc_djf,pr_djf(lat|:,lon|:,time|:)) /) + psa2_pr_djf = pr_djf(0,:,:) + psa2_pr_djf = (/ regCoef(psa2_pc_djf,pr_djf(lat|:,lon|:,time|:)) /) + delete(pr_djf) + end if + + if (.not.ismissing(sam_djf({-85},{5}))) then + if (sam_djf({-85},{5}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + sam_djf = sam_djf*-1. + sam_pc_djf = sam_pc_djf*-1. + if (sstreg_plot_flag.eq.0) then + sam_sst_djf = sam_sst_djf*-1. + end if + if (tasreg_plot_flag.eq.0) then + sam_tas_djf = sam_tas_djf*-1. + end if + if (prreg_plot_flag.eq.0) then + sam_pr_djf = sam_pr_djf*-1. + end if + end if + end if + if (.not.ismissing(psa1_djf({-62},{210}))) then + if (psa1_djf({-62},{210}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + psa1_djf = psa1_djf*-1. + psa1_pc_djf = psa1_pc_djf*-1. + if (sstreg_plot_flag.eq.0) then + psa1_sst_djf = psa1_sst_djf*-1. + end if + if (tasreg_plot_flag.eq.0) then + psa1_tas_djf = psa1_tas_djf*-1. + end if + if (prreg_plot_flag.eq.0) then + psa1_pr_djf = psa1_pr_djf*-1. + end if + end if + end if + if (.not.ismissing(psa2_djf({-60},{280}))) then + if (psa2_djf({-60},{280}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + psa2_djf = psa2_djf*-1. + psa2_pc_djf = psa2_pc_djf*-1. + if (sstreg_plot_flag.eq.0) then + psa2_sst_djf = psa2_sst_djf*-1. + end if + if (tasreg_plot_flag.eq.0) then + psa2_tas_djf = psa2_tas_djf*-1. + end if + if (prreg_plot_flag.eq.0) then + psa2_pr_djf = psa2_pr_djf*-1. + end if + end if + end if + sig_pcv = eofunc_north2(evecv@pcvar,dimsizes(sam_pc_djf),False) + if (sig_pcv(0)) then ; if True then significant + sam_djf@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%*" + else + sam_djf@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%" + end if + if (sig_pcv(1)) then ; if True then significant + psa1_djf@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(1)))+"%*" + else + psa1_djf@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(1)))+"%" + end if + if (sig_pcv(2)) then ; if True then significant + psa2_djf@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(2)))+"%*" + else + psa2_djf@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(2)))+"%" + end if + delete(sig_pcv) + sam_pc_djf!0 = "TIME" + sam_pc_djf&TIME = ispan(syear(ee),eyear(ee),1) + sam_pc_djf&TIME@units = "YYYY" + sam_pc_djf&TIME@long_name = "time" + + copy_VarCoords(sam_pc_djf,psa1_pc_djf) + copy_VarCoords(sam_pc_djf,psa2_pc_djf) + delete([/evecv,pcts/]) + + evecv = eofunc(arr_mam_CW({lat|:-20},lon|:,time|:),4,75) + pcts = eofunc_ts(arr_mam_CW({lat|:-20},lon|:,time|:),evecv,False) + sam_pc_mam = dim_standardize(pcts(0,:),0) + psa1_pc_mam = dim_standardize(pcts(1,:),0) + psa2_pc_mam = dim_standardize(pcts(2,:),0) + sam_mam = arr_mam(0,:,:) + sam_mam = (/ regCoef(sam_pc_mam,arr_mam(lat|:,lon|:,time|:)) /) + psa1_mam = arr_mam(0,:,:) + psa1_mam = (/ regCoef(psa1_pc_mam,arr_mam(lat|:,lon|:,time|:)) /) + psa2_mam = arr_mam(0,:,:) + psa2_mam = (/ regCoef(psa2_pc_mam,arr_mam(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + sam_sst_mam = sst_mam(0,:,:) + sam_sst_mam = (/ regCoef(sam_pc_mam,sst_mam(lat|:,lon|:,time|:)) /) + psa1_sst_mam = sst_mam(0,:,:) + psa1_sst_mam = (/ regCoef(psa1_pc_mam,sst_mam(lat|:,lon|:,time|:)) /) + psa2_sst_mam = sst_mam(0,:,:) + psa2_sst_mam = (/ regCoef(psa2_pc_mam,sst_mam(lat|:,lon|:,time|:)) /) + delete(sst_mam) + end if + if (tasreg_plot_flag.eq.0) then + sam_tas_mam = tas_mam(0,:,:) + sam_tas_mam = (/ regCoef(sam_pc_mam,tas_mam(lat|:,lon|:,time|:)) /) + psa1_tas_mam = tas_mam(0,:,:) + psa1_tas_mam = (/ regCoef(psa1_pc_mam,tas_mam(lat|:,lon|:,time|:)) /) + psa2_tas_mam = tas_mam(0,:,:) + psa2_tas_mam = (/ regCoef(psa2_pc_mam,tas_mam(lat|:,lon|:,time|:)) /) + delete(tas_mam) + end if + if (prreg_plot_flag.eq.0) then + sam_pr_mam = pr_mam(0,:,:) + sam_pr_mam = (/ regCoef(sam_pc_mam,pr_mam(lat|:,lon|:,time|:)) /) + psa1_pr_mam = pr_mam(0,:,:) + psa1_pr_mam = (/ regCoef(psa1_pc_mam,pr_mam(lat|:,lon|:,time|:)) /) + psa2_pr_mam = pr_mam(0,:,:) + psa2_pr_mam = (/ regCoef(psa2_pc_mam,pr_mam(lat|:,lon|:,time|:)) /) + delete(pr_mam) + end if + + if (.not.ismissing(sam_mam({-85},{5}))) then + if (sam_mam({-85},{5}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + sam_mam = sam_mam*-1. + sam_pc_mam = sam_pc_mam*-1. + if (sstreg_plot_flag.eq.0) then + sam_sst_mam = sam_sst_mam*-1. + end if + if (tasreg_plot_flag.eq.0) then + sam_tas_mam = sam_tas_mam*-1. + end if + if (prreg_plot_flag.eq.0) then + sam_pr_mam = sam_pr_mam*-1. + end if + end if + end if + if (.not.ismissing(psa1_mam({-62},{210}))) then + if (psa1_mam({-62},{210}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + psa1_mam = psa1_mam*-1. + psa1_pc_mam = psa1_pc_mam*-1. + if (sstreg_plot_flag.eq.0) then + psa1_sst_mam = psa1_sst_mam*-1. + end if + if (tasreg_plot_flag.eq.0) then + psa1_tas_mam = psa1_tas_mam*-1. + end if + if (prreg_plot_flag.eq.0) then + psa1_pr_mam = psa1_pr_mam*-1. + end if + end if + end if + if (.not.ismissing(psa2_mam({-60},{280}))) then + if (psa2_mam({-60},{280}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + psa2_mam = psa2_mam*-1. + psa2_pc_mam = psa2_pc_mam*-1. + if (sstreg_plot_flag.eq.0) then + psa2_sst_mam = psa2_sst_mam*-1. + end if + if (tasreg_plot_flag.eq.0) then + psa2_tas_mam = psa2_tas_mam*-1. + end if + if (prreg_plot_flag.eq.0) then + psa2_pr_mam = psa2_pr_mam*-1. + end if + end if + end if + sig_pcv = eofunc_north2(evecv@pcvar,dimsizes(sam_pc_mam),False) + if (sig_pcv(0)) then ; if True then significant + sam_mam@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%*" + else + sam_mam@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%" + end if + if (sig_pcv(1)) then ; if True then significant + psa1_mam@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(1)))+"%*" + else + psa1_mam@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(1)))+"%" + end if + if (sig_pcv(2)) then ; if True then significant + psa2_mam@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(2)))+"%*" + else + psa2_mam@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(2)))+"%" + end if + delete(sig_pcv) + copy_VarCoords(sam_pc_djf,sam_pc_mam) + copy_VarCoords(sam_pc_djf,psa1_pc_mam) + copy_VarCoords(sam_pc_djf,psa2_pc_mam) + delete([/evecv,pcts/]) + + evecv = eofunc(arr_jja_CW({lat|:-20},lon|:,time|:),4,75) + pcts = eofunc_ts(arr_jja_CW({lat|:-20},lon|:,time|:),evecv,False) + sam_pc_jja = dim_standardize(pcts(0,:),0) + psa1_pc_jja = dim_standardize(pcts(1,:),0) + psa2_pc_jja = dim_standardize(pcts(2,:),0) + sam_jja = arr_jja(0,:,:) + sam_jja = (/ regCoef(sam_pc_jja,arr_jja(lat|:,lon|:,time|:)) /) + psa1_jja = arr_jja(0,:,:) + psa1_jja = (/ regCoef(psa1_pc_jja,arr_jja(lat|:,lon|:,time|:)) /) + psa2_jja = arr_jja(0,:,:) + psa2_jja = (/ regCoef(psa2_pc_jja,arr_jja(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + sam_sst_jja = sst_jja(0,:,:) + sam_sst_jja = (/ regCoef(sam_pc_jja,sst_jja(lat|:,lon|:,time|:)) /) + psa1_sst_jja = sst_jja(0,:,:) + psa1_sst_jja = (/ regCoef(psa1_pc_jja,sst_jja(lat|:,lon|:,time|:)) /) + psa2_sst_jja = sst_jja(0,:,:) + psa2_sst_jja = (/ regCoef(psa2_pc_jja,sst_jja(lat|:,lon|:,time|:)) /) + delete(sst_jja) + end if + if (tasreg_plot_flag.eq.0) then + sam_tas_jja = tas_jja(0,:,:) + sam_tas_jja = (/ regCoef(sam_pc_jja,tas_jja(lat|:,lon|:,time|:)) /) + psa1_tas_jja = tas_jja(0,:,:) + psa1_tas_jja = (/ regCoef(psa1_pc_jja,tas_jja(lat|:,lon|:,time|:)) /) + psa2_tas_jja = tas_jja(0,:,:) + psa2_tas_jja = (/ regCoef(psa2_pc_jja,tas_jja(lat|:,lon|:,time|:)) /) + delete(tas_jja) + end if + if (prreg_plot_flag.eq.0) then + sam_pr_jja = pr_jja(0,:,:) + sam_pr_jja = (/ regCoef(sam_pc_jja,pr_jja(lat|:,lon|:,time|:)) /) + psa1_pr_jja = pr_jja(0,:,:) + psa1_pr_jja = (/ regCoef(psa1_pc_jja,pr_jja(lat|:,lon|:,time|:)) /) + psa2_pr_jja = pr_jja(0,:,:) + psa2_pr_jja = (/ regCoef(psa2_pc_jja,pr_jja(lat|:,lon|:,time|:)) /) + delete(pr_jja) + end if + + if (.not.ismissing(sam_jja({-85},{5}))) then + if (sam_jja({-85},{5}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + sam_jja = sam_jja*-1. + sam_pc_jja = sam_pc_jja*-1. + if (sstreg_plot_flag.eq.0) then + sam_sst_jja = sam_sst_jja*-1. + end if + if (tasreg_plot_flag.eq.0) then + sam_tas_jja = sam_tas_jja*-1. + end if + if (prreg_plot_flag.eq.0) then + sam_pr_jja = sam_pr_jja*-1. + end if + end if + end if + if (.not.ismissing(psa1_jja({-62},{210}))) then + if (psa1_jja({-62},{210}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + psa1_jja = psa1_jja*-1. + psa1_pc_jja = psa1_pc_jja*-1. + if (sstreg_plot_flag.eq.0) then + psa1_sst_jja = psa1_sst_jja*-1. + end if + if (tasreg_plot_flag.eq.0) then + psa1_tas_jja = psa1_tas_jja*-1. + end if + if (prreg_plot_flag.eq.0) then + psa1_pr_jja = psa1_pr_jja*-1. + end if + end if + end if + if (.not.ismissing(psa2_jja({-60},{280}))) then + if (psa2_jja({-60},{280}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + psa2_jja = psa2_jja*-1. + psa2_pc_jja = psa2_pc_jja*-1. + if (sstreg_plot_flag.eq.0) then + psa2_sst_jja = psa2_sst_jja*-1. + end if + if (tasreg_plot_flag.eq.0) then + psa2_tas_jja = psa2_tas_jja*-1. + end if + if (prreg_plot_flag.eq.0) then + psa2_pr_jja = psa2_pr_jja*-1. + end if + end if + end if + sig_pcv = eofunc_north2(evecv@pcvar,dimsizes(sam_pc_jja),False) + if (sig_pcv(0)) then ; if True then significant + sam_jja@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%*" + else + sam_jja@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%" + end if + if (sig_pcv(1)) then ; if True then significant + psa1_jja@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(1)))+"%*" + else + psa1_jja@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(1)))+"%" + end if + if (sig_pcv(2)) then ; if True then significant + psa2_jja@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(2)))+"%*" + else + psa2_jja@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(2)))+"%" + end if + delete(sig_pcv) + copy_VarCoords(sam_pc_djf,sam_pc_jja) + copy_VarCoords(sam_pc_djf,psa1_pc_jja) + copy_VarCoords(sam_pc_djf,psa2_pc_jja) + delete([/evecv,pcts/]) + + evecv = eofunc(arr_son_CW({lat|:-20},lon|:,time|:),4,75) + pcts = eofunc_ts(arr_son_CW({lat|:-20},lon|:,time|:),evecv,False) + sam_pc_son = dim_standardize(pcts(0,:),0) + psa1_pc_son = dim_standardize(pcts(1,:),0) + psa2_pc_son = dim_standardize(pcts(2,:),0) + sam_son = arr_son(0,:,:) + sam_son = (/ regCoef(sam_pc_son,arr_son(lat|:,lon|:,time|:)) /) + psa1_son = arr_son(0,:,:) + psa1_son = (/ regCoef(psa1_pc_son,arr_son(lat|:,lon|:,time|:)) /) + psa2_son = arr_son(0,:,:) + psa2_son = (/ regCoef(psa2_pc_son,arr_son(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + sam_sst_son = sst_son(0,:,:) + sam_sst_son = (/ regCoef(sam_pc_son,sst_son(lat|:,lon|:,time|:)) /) + psa1_sst_son = sst_son(0,:,:) + psa1_sst_son = (/ regCoef(psa1_pc_son,sst_son(lat|:,lon|:,time|:)) /) + psa2_sst_son = sst_son(0,:,:) + psa2_sst_son = (/ regCoef(psa2_pc_son,sst_son(lat|:,lon|:,time|:)) /) + delete(sst_son) + end if + if (tasreg_plot_flag.eq.0) then + sam_tas_son = tas_son(0,:,:) + sam_tas_son = (/ regCoef(sam_pc_son,tas_son(lat|:,lon|:,time|:)) /) + psa1_tas_son = tas_son(0,:,:) + psa1_tas_son = (/ regCoef(psa1_pc_son,tas_son(lat|:,lon|:,time|:)) /) + psa2_tas_son = tas_son(0,:,:) + psa2_tas_son = (/ regCoef(psa2_pc_son,tas_son(lat|:,lon|:,time|:)) /) + delete(tas_son) + end if + if (prreg_plot_flag.eq.0) then + sam_pr_son = pr_son(0,:,:) + sam_pr_son = (/ regCoef(sam_pc_son,pr_son(lat|:,lon|:,time|:)) /) + psa1_pr_son = pr_son(0,:,:) + psa1_pr_son = (/ regCoef(psa1_pc_son,pr_son(lat|:,lon|:,time|:)) /) + psa2_pr_son = pr_son(0,:,:) + psa2_pr_son = (/ regCoef(psa2_pc_son,pr_son(lat|:,lon|:,time|:)) /) + delete(pr_son) + end if + + if (.not.ismissing(sam_son({-85},{5}))) then + if (sam_son({-85},{5}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + sam_son = sam_son*-1. + sam_pc_son = sam_pc_son*-1. + if (sstreg_plot_flag.eq.0) then + sam_sst_son = sam_sst_son*-1. + end if + if (tasreg_plot_flag.eq.0) then + sam_tas_son = sam_tas_son*-1. + end if + if (prreg_plot_flag.eq.0) then + sam_pr_son = sam_pr_son*-1. + end if + end if + end if + if (.not.ismissing(psa1_son({-62},{210}))) then + if (psa1_son({-62},{210}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + psa1_son = psa1_son*-1. + psa1_pc_son = psa1_pc_son*-1. + if (sstreg_plot_flag.eq.0) then + psa1_sst_son = psa1_sst_son*-1. + end if + if (tasreg_plot_flag.eq.0) then + psa1_tas_son = psa1_tas_son*-1. + end if + if (prreg_plot_flag.eq.0) then + psa1_pr_son = psa1_pr_son*-1. + end if + end if + end if + if (.not.ismissing(psa2_son({-60},{280}))) then + if (psa2_son({-60},{280}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + psa2_son = psa2_son*-1. + psa2_pc_son = psa2_pc_son*-1. + if (sstreg_plot_flag.eq.0) then + psa2_sst_son = psa2_sst_son*-1. + end if + if (tasreg_plot_flag.eq.0) then + psa2_tas_son = psa2_tas_son*-1. + end if + if (prreg_plot_flag.eq.0) then + psa2_pr_son = psa2_pr_son*-1. + end if + end if + end if + sig_pcv = eofunc_north2(evecv@pcvar,dimsizes(sam_pc_son),False) + if (sig_pcv(0)) then ; if True then significant + sam_son@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%*" + else + sam_son@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%" + end if + if (sig_pcv(1)) then ; if True then significant + psa1_son@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(1)))+"%*" + else + psa1_son@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(1)))+"%" + end if + if (sig_pcv(2)) then ; if True then significant + psa2_son@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(2)))+"%*" + else + psa2_son@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(2)))+"%" + end if + delete(sig_pcv) + copy_VarCoords(sam_pc_djf,sam_pc_son) + copy_VarCoords(sam_pc_djf,psa1_pc_son) + copy_VarCoords(sam_pc_djf,psa2_pc_son) + delete([/evecv,pcts/]) + + evecv = eofunc(arr_ann_CW({lat|:-20},lon|:,time|:),4,75) + pcts = eofunc_ts(arr_ann_CW({lat|:-20},lon|:,time|:),evecv,False) + sam_pc_ann = dim_standardize(pcts(0,:),0) + psa1_pc_ann = dim_standardize(pcts(1,:),0) + psa2_pc_ann = dim_standardize(pcts(2,:),0) + sam_ann = arr_ann(0,:,:) + sam_ann = (/ regCoef(sam_pc_ann,arr_ann(lat|:,lon|:,time|:)) /) + psa1_ann = arr_ann(0,:,:) + psa1_ann = (/ regCoef(psa1_pc_ann,arr_ann(lat|:,lon|:,time|:)) /) + psa2_ann = arr_ann(0,:,:) + psa2_ann = (/ regCoef(psa2_pc_ann,arr_ann(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + sam_sst_ann = sst_ann(0,:,:) + sam_sst_ann = (/ regCoef(sam_pc_ann,sst_ann(lat|:,lon|:,time|:)) /) + psa1_sst_ann = sst_ann(0,:,:) + psa1_sst_ann = (/ regCoef(psa1_pc_ann,sst_ann(lat|:,lon|:,time|:)) /) + psa2_sst_ann = sst_ann(0,:,:) + psa2_sst_ann = (/ regCoef(psa2_pc_ann,sst_ann(lat|:,lon|:,time|:)) /) + delete(sst_ann) + end if + if (tasreg_plot_flag.eq.0) then + sam_tas_ann = tas_ann(0,:,:) + sam_tas_ann = (/ regCoef(sam_pc_ann,tas_ann(lat|:,lon|:,time|:)) /) + psa1_tas_ann = tas_ann(0,:,:) + psa1_tas_ann = (/ regCoef(psa1_pc_ann,tas_ann(lat|:,lon|:,time|:)) /) + psa2_tas_ann = tas_ann(0,:,:) + psa2_tas_ann = (/ regCoef(psa2_pc_ann,tas_ann(lat|:,lon|:,time|:)) /) + delete(tas_ann) + end if + if (prreg_plot_flag.eq.0) then + sam_pr_ann = pr_ann(0,:,:) + sam_pr_ann = (/ regCoef(sam_pc_ann,pr_ann(lat|:,lon|:,time|:)) /) + psa1_pr_ann = pr_ann(0,:,:) + psa1_pr_ann = (/ regCoef(psa1_pc_ann,pr_ann(lat|:,lon|:,time|:)) /) + psa2_pr_ann = pr_ann(0,:,:) + psa2_pr_ann = (/ regCoef(psa2_pc_ann,pr_ann(lat|:,lon|:,time|:)) /) + delete(pr_ann) + end if + + if (.not.ismissing(sam_ann({-85},{5}))) then + if (sam_ann({-85},{5}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + sam_ann = sam_ann*-1. + sam_pc_ann = sam_pc_ann*-1. + if (sstreg_plot_flag.eq.0) then + sam_sst_ann = sam_sst_ann*-1. + end if + if (tasreg_plot_flag.eq.0) then + sam_tas_ann = sam_tas_ann*-1. + end if + if (prreg_plot_flag.eq.0) then + sam_pr_ann = sam_pr_ann*-1. + end if + end if + end if + if (.not.ismissing(psa1_ann({-62},{210}))) then + if (psa1_ann({-62},{210}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + psa1_ann = psa1_ann*-1. + psa1_pc_ann = psa1_pc_ann*-1. + if (sstreg_plot_flag.eq.0) then + psa1_sst_ann = psa1_sst_ann*-1. + end if + if (tasreg_plot_flag.eq.0) then + psa1_tas_ann = psa1_tas_ann*-1. + end if + if (prreg_plot_flag.eq.0) then + psa1_pr_ann = psa1_pr_ann*-1. + end if + end if + end if + if (.not.ismissing(psa2_ann({-60},{280}))) then + if (psa2_ann({-60},{280}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + psa2_ann = psa2_ann*-1. + psa2_pc_ann = psa2_pc_ann*-1. + if (sstreg_plot_flag.eq.0) then + psa2_sst_ann = psa2_sst_ann*-1. + end if + if (tasreg_plot_flag.eq.0) then + psa2_tas_ann = psa2_tas_ann*-1. + end if + if (prreg_plot_flag.eq.0) then + psa2_pr_ann = psa2_pr_ann*-1. + end if + end if + end if + sig_pcv = eofunc_north2(evecv@pcvar,dimsizes(sam_pc_ann),False) + if (sig_pcv(0)) then ; if True then significant + sam_ann@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%*" + else + sam_ann@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%" + end if + if (sig_pcv(1)) then ; if True then significant + psa1_ann@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(1)))+"%*" + else + psa1_ann@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(1)))+"%" + end if + if (sig_pcv(2)) then ; if True then significant + psa2_ann@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(2)))+"%*" + else + psa2_ann@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(2)))+"%" + end if + delete(sig_pcv) + copy_VarCoords(sam_pc_djf,sam_pc_ann) + copy_VarCoords(sam_pc_djf,psa1_pc_ann) + copy_VarCoords(sam_pc_djf,psa2_pc_ann) + delete([/evecv,pcts/]) + + if (COMPUTE_MODES_MON.eq."True") then + evecv = eofunc(arr_mon_CW({lat|:-20},lon|:,time|:),4,75) + pcts = eofunc_ts(arr_mon_CW({lat|:-20},lon|:,time|:),evecv,False) + sam_pc_mon = dim_standardize(pcts(0,:),0) + psa1_pc_mon = dim_standardize(pcts(1,:),0) + psa2_pc_mon = dim_standardize(pcts(2,:),0) + sam_mon = arr(0,:,:) + sam_mon = (/ regCoef(sam_pc_mon,arr(lat|:,lon|:,time|:)) /) + psa1_mon = arr(0,:,:) + psa1_mon = (/ regCoef(psa1_pc_mon,arr(lat|:,lon|:,time|:)) /) + psa2_mon = arr(0,:,:) + psa2_mon = (/ regCoef(psa2_pc_mon,arr(lat|:,lon|:,time|:)) /) + if (sstreg_plot_flag.eq.0) then + sam_sst_mon = sst(0,:,:) + sam_sst_mon = (/ regCoef(sam_pc_mon,sst(lat|:,lon|:,time|:)) /) + psa1_sst_mon = sst(0,:,:) + psa1_sst_mon = (/ regCoef(psa1_pc_mon,sst(lat|:,lon|:,time|:)) /) + psa2_sst_mon = sst(0,:,:) + psa2_sst_mon = (/ regCoef(psa2_pc_mon,sst(lat|:,lon|:,time|:)) /) + delete(sst) + end if + if (tasreg_plot_flag.eq.0) then + sam_tas_mon = tas(0,:,:) + sam_tas_mon = (/ regCoef(sam_pc_mon,tas(lat|:,lon|:,time|:)) /) + psa1_tas_mon = tas(0,:,:) + psa1_tas_mon = (/ regCoef(psa1_pc_mon,tas(lat|:,lon|:,time|:)) /) + psa2_tas_mon = tas(0,:,:) + psa2_tas_mon = (/ regCoef(psa2_pc_mon,tas(lat|:,lon|:,time|:)) /) + delete(tas) + end if + if (prreg_plot_flag.eq.0) then + sam_pr_mon = pr(0,:,:) + sam_pr_mon = (/ regCoef(sam_pc_mon,pr(lat|:,lon|:,time|:)) /) + psa1_pr_mon = pr(0,:,:) + psa1_pr_mon = (/ regCoef(psa1_pc_mon,pr(lat|:,lon|:,time|:)) /) + psa2_pr_mon = pr(0,:,:) + psa2_pr_mon = (/ regCoef(psa2_pc_mon,pr(lat|:,lon|:,time|:)) /) + delete(pr) + end if + + if (.not.ismissing(sam_mon({-85},{5}))) then + if (sam_mon({-85},{5}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + sam_mon = sam_mon*-1. + sam_pc_mon = sam_pc_mon*-1. + if (sstreg_plot_flag.eq.0) then + sam_sst_mon = sam_sst_mon*-1. + end if + if (tasreg_plot_flag.eq.0) then + sam_tas_mon = sam_tas_mon*-1. + end if + if (prreg_plot_flag.eq.0) then + sam_pr_mon = sam_pr_mon*-1. + end if + end if + end if + if (.not.ismissing(psa1_mon({-62},{210}))) then + if (psa1_mon({-62},{210}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + psa1_mon = psa1_mon*-1. + psa1_pc_mon = psa1_pc_mon*-1. + if (sstreg_plot_flag.eq.0) then + psa1_sst_mon = psa1_sst_mon*-1. + end if + if (tasreg_plot_flag.eq.0) then + psa1_tas_mon = psa1_tas_mon*-1. + end if + if (prreg_plot_flag.eq.0) then + psa1_pr_mon = psa1_pr_mon*-1. + end if + end if + end if + if (.not.ismissing(psa2_mon({-60},{280}))) then + if (psa2_mon({-60},{280}).ge.0) then ; arbitrary attempt to make all plots have the same sign.. + psa2_mon = psa2_mon*-1. + psa2_pc_mon = psa2_pc_mon*-1. + if (sstreg_plot_flag.eq.0) then + psa2_sst_mon = psa2_sst_mon*-1. + end if + if (tasreg_plot_flag.eq.0) then + psa2_tas_mon = psa2_tas_mon*-1. + end if + if (prreg_plot_flag.eq.0) then + psa2_pr_mon = psa2_pr_mon*-1. + end if + end if + end if + sig_pcv = eofunc_north2(evecv@pcvar,dimsizes(sam_pc_mon),False) + if (sig_pcv(0)) then ; if True then significant + sam_mon@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%*" + else + sam_mon@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(0)))+"%" + end if + if (sig_pcv(1)) then ; if True then significant + psa1_mon@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(1)))+"%*" + else + psa1_mon@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(1)))+"%" + end if + if (sig_pcv(2)) then ; if True then significant + psa2_mon@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(2)))+"%*" + else + psa2_mon@pcvar = tofloat(sprintf("%4.1f", evecv@pcvar(2)))+"%" + end if + delete(sig_pcv) + sam_pc_mon!0 = "time" + sam_pc_mon&time = arr&time + psa1_pc_mon!0 = "time" + psa1_pc_mon&time = arr&time + psa2_pc_mon!0 = "time" + psa2_pc_mon&time = arr&time + delete([/evecv,pcts,arr_mon_CW,arr/]) + end if + delete([/arr_djf_CW,arr_mam_CW,arr_jja_CW,arr_son_CW,arr_ann_CW/]) + delete([/arr_djf,arr_mam,arr_jja,arr_son,arr_ann/]) +;------------------------------------------------------------------------------------------------------ + if (sstreg_frame.eq.1.and.sstreg_plot_flag.eq.0) then ; sstreg_frame = flag to create regressions .ps/.png files + sstreg_frame = 0 + end if + if (tasreg_frame.eq.1.and.tasreg_plot_flag.eq.0) then ; tasreg_frame = flag to create regressions .ps/.png files + tasreg_frame = 0 + end if + if (prreg_frame.eq.1.and.prreg_plot_flag.eq.0) then ; prreg_frame = flag to create regressions .ps/.png files + prreg_frame = 0 + end if +;------------------------------------------------------------------------------------------------------ + if (OUTPUT_DATA.eq."True") then + modname = str_sub_str(names(ee)," ","_") + bc = (/"/","'","(",")"/) + do gg = 0,dimsizes(bc)-1 + modname = str_sub_str(modname,bc(gg),"_") + end do + fn = getenv("OUTDIR")+modname+".cvdp_data.psl.sam_psa."+syear(ee)+"-"+eyear(ee)+".nc" + if (.not.isfilepresent2(fn)) then + z = addfile(fn,"c") + z@source = "NCAR Climate Analysis Section's Climate Variability Diagnostics Package v"+getenv("VERSION") + z@notes = "Data from "+names(ee)+" from "+syear(ee)+"-"+eyear(ee) + if (OPT_CLIMO.eq."Full") then + z@climatology = syear(ee)+"-"+eyear(ee)+" climatology removed prior to all calculations (other than means)" + else + if (CLIMO_SYEAR.lt.0) then + z@climatology = (eyear(ee)+CLIMO_SYEAR)+"-"+(eyear(ee)+CLIMO_EYEAR)+" climatology removed prior to all calculations (other than means)" + else + z@climatology = CLIMO_SYEAR+"-"+CLIMO_EYEAR+" climatology removed prior to all calculations (other than means)" + end if + end if + z@Conventions = "CF-1.6" + else + z = addfile(fn,"w") + end if + z->sam_timeseries_djf = set_varAtts(sam_pc_djf,"SAM normalized principal component timeseries (DJF)","1","") + z->sam_timeseries_mam = set_varAtts(sam_pc_mam,"SAM normalized principal component timeseries (MAM)","1","") + z->sam_timeseries_jja = set_varAtts(sam_pc_jja,"SAM normalized principal component timeseries (JJA)","1","") + z->sam_timeseries_son = set_varAtts(sam_pc_son,"SAM normalized principal component timeseries (SON)","1","") + z->sam_timeseries_ann = set_varAtts(sam_pc_ann,"SAM normalized principal component timeseries (annual)","1","") + + z->psa1_timeseries_djf = set_varAtts(psa1_pc_djf,"PSA1 normalized principal component timeseries (DJF)","1","") + z->psa1_timeseries_mam = set_varAtts(psa1_pc_mam,"PSA1 normalized principal component timeseries (MAM)","1","") + z->psa1_timeseries_jja = set_varAtts(psa1_pc_jja,"PSA1 normalized principal component timeseries (JJA)","1","") + z->psa1_timeseries_son = set_varAtts(psa1_pc_son,"PSA1 normalized principal component timeseries (SON)","1","") + z->psa1_timeseries_ann = set_varAtts(psa1_pc_ann,"PSA1 normalized principal component timeseries (annual)","1","") + + z->psa2_timeseries_djf = set_varAtts(psa2_pc_djf,"PSA2 normalized principal component timeseries (DJF)","1","") + z->psa2_timeseries_mam = set_varAtts(psa2_pc_mam,"PSA2 normalized principal component timeseries (MAM)","1","") + z->psa2_timeseries_jja = set_varAtts(psa2_pc_jja,"PSA2 normalized principal component timeseries (JJA)","1","") + z->psa2_timeseries_son = set_varAtts(psa2_pc_son,"PSA2 normalized principal component timeseries (SON)","1","") + z->psa2_timeseries_ann = set_varAtts(psa2_pc_ann,"PSA2 normalized principal component timeseries (annual)","1","") + + z->sam_djf = set_varAtts(sam_djf,"SAM spatial pattern (DJF)","","") + z->sam_mam = set_varAtts(sam_mam,"SAM spatial pattern (MAM)","","") + z->sam_jja = set_varAtts(sam_jja,"SAM spatial pattern (JJA)","","") + z->sam_son = set_varAtts(sam_son,"SAM spatial pattern (SON)","","") + z->sam_ann = set_varAtts(sam_ann,"SAM spatial pattern (annual)","","") + + z->psa1_djf = set_varAtts(psa1_djf,"PSA1 spatial pattern (DJF)","","") + z->psa1_mam = set_varAtts(psa1_mam,"PSA1 spatial pattern (MAM)","","") + z->psa1_jja = set_varAtts(psa1_jja,"PSA1 spatial pattern (JJA)","","") + z->psa1_son = set_varAtts(psa1_son,"PSA1 spatial pattern (SON)","","") + z->psa1_ann = set_varAtts(psa1_ann,"PSA1 spatial pattern (annual)","","") + + z->psa2_djf = set_varAtts(psa2_djf,"PSA2 spatial pattern (DJF)","","") + z->psa2_mam = set_varAtts(psa2_mam,"PSA2 spatial pattern (MAM)","","") + z->psa2_jja = set_varAtts(psa2_jja,"PSA2 spatial pattern (JJA)","","") + z->psa2_son = set_varAtts(psa2_son,"PSA2 spatial pattern (SON)","","") + z->psa2_ann = set_varAtts(psa2_ann,"PSA2 spatial pattern (annual)","","") + + if (COMPUTE_MODES_MON.eq."True") then + z->sam_timeseries_mon = set_varAtts(sam_pc_mon,"SAM principal component timeseries (monthly)","","") + z->psa1_timeseries_mon = set_varAtts(psa1_pc_mon,"PSA1 principal component timeseries (monthly)","","") + z->psa2_timeseries_mon = set_varAtts(psa2_pc_mon,"PSA2 principal component timeseries (monthly)","","") + z->sam_mon = set_varAtts(sam_mon,"SAM spatial pattern (monthly)","","") + z->psa1_mon = set_varAtts(psa1_mon,"PSA1 spatial pattern (monthly)","","") + z->psa2_mon = set_varAtts(psa2_mon,"PSA2 spatial pattern (monthly)","","") + end if + delete(z) + delete([/modname,fn/]) + + if (sstreg_plot_flag.eq.0) then + modname = str_sub_str(names_ts(ee)," ","_") + bc = (/"/","'","(",")"/) + do gg = 0,dimsizes(bc)-1 + modname = str_sub_str(modname,bc(gg),"_") + end do + fn = getenv("OUTDIR")+modname+".cvdp_data.psl.sam_psa.ts."+syear_ts(ee)+"-"+eyear_ts(ee)+".nc" + if (.not.isfilepresent2(fn)) then + z = addfile(fn,"c") + z@source = "NCAR Climate Analysis Section's Climate Variability Diagnostics Package v"+getenv("VERSION") + z@notes = "Data from "+names_ts(ee)+" from "+syear_ts(ee)+"-"+eyear_ts(ee) + if (OPT_CLIMO.eq."Full") then + z@climatology = syear_ts(ee)+"-"+eyear_ts(ee)+" climatology removed prior to all calculations (other than means)" + else + if (CLIMO_SYEAR.lt.0) then + z@climatology = (eyear(ee)+CLIMO_SYEAR)+"-"+(eyear(ee)+CLIMO_EYEAR)+" climatology removed prior to all calculations (other than means)" + else + z@climatology = CLIMO_SYEAR+"-"+CLIMO_EYEAR+" climatology removed prior to all calculations (other than means)" + end if + end if + z@Conventions = "CF-1.6" + else + z = addfile(fn,"w") + end if + z->sam_sst_regression_djf = set_varAtts(sam_sst_djf,"sst regression onto SAM principal component timeseries (DJF)","","") + z->sam_sst_regression_mam = set_varAtts(sam_sst_mam,"sst regression onto SAM principal component timeseries (MAM)","","") + z->sam_sst_regression_jja = set_varAtts(sam_sst_jja,"sst regression onto SAM principal component timeseries (JJA)","","") + z->sam_sst_regression_son = set_varAtts(sam_sst_son,"sst regression onto SAM principal component timeseries (SON)","","") + z->sam_sst_regression_ann = set_varAtts(sam_sst_ann,"sst regression onto SAM principal component timeseries (annual)","","") + + z->psa1_sst_regression_djf = set_varAtts(psa1_sst_djf,"sst regression onto PSA1 principal component timeseries (DJF)","","") + z->psa1_sst_regression_mam = set_varAtts(psa1_sst_mam,"sst regression onto PSA1 principal component timeseries (MAM)","","") + z->psa1_sst_regression_jja = set_varAtts(psa1_sst_jja,"sst regression onto PSA1 principal component timeseries (JJA)","","") + z->psa1_sst_regression_son = set_varAtts(psa1_sst_son,"sst regression onto PSA1 principal component timeseries (SON)","","") + z->psa1_sst_regression_ann = set_varAtts(psa1_sst_ann,"sst regression onto PSA1 principal component timeseries (annual)","","") + + z->psa2_sst_regression_djf = set_varAtts(psa2_sst_djf,"sst regression onto PSA2 principal component timeseries (DJF)","","") + z->psa2_sst_regression_mam = set_varAtts(psa2_sst_mam,"sst regression onto PSA2 principal component timeseries (MAM)","","") + z->psa2_sst_regression_jja = set_varAtts(psa2_sst_jja,"sst regression onto PSA2 principal component timeseries (JJA)","","") + z->psa2_sst_regression_son = set_varAtts(psa2_sst_son,"sst regression onto PSA2 principal component timeseries (SON)","","") + z->psa2_sst_regression_ann = set_varAtts(psa2_sst_ann,"sst regression onto PSA2 principal component timeseries (annual)","","") + if (COMPUTE_MODES_MON.eq."True") then + z->sam_sst_regression_mon = set_varAtts(sam_sst_mon,"sst regression onto SAM principal component timeseries (monthly)","","") + z->psa1_sst_regression_mon = set_varAtts(psa1_sst_mon,"sst regression onto PSA1 principal component timeseries (monthly)","","") + z->psa2_sst_regression_mon = set_varAtts(psa2_sst_mon,"sst regression onto PSA2 principal component timeseries (monthly)","","") + end if + delete(z) + delete([/modname,fn/]) + end if + if (tasreg_plot_flag.eq.0) then + modname = str_sub_str(names_tas(ee)," ","_") + bc = (/"/","'","(",")"/) + do gg = 0,dimsizes(bc)-1 + modname = str_sub_str(modname,bc(gg),"_") + end do + fn = getenv("OUTDIR")+modname+".cvdp_data.psl.sam_psa.tas."+syear_tas(ee)+"-"+eyear_tas(ee)+".nc" + if (.not.isfilepresent2(fn)) then + z = addfile(fn,"c") + z@source = "NCAR Climate Analysis Section's Climate Variability Diagnostics Package v"+getenv("VERSION") + z@notes = "Data from "+names_tas(ee)+" from "+syear_tas(ee)+"-"+eyear_tas(ee) + if (OPT_CLIMO.eq."Full") then + z@climatology = syear_tas(ee)+"-"+eyear_tas(ee)+" climatology removed prior to all calculations (other than means)" + else + if (CLIMO_SYEAR.lt.0) then + z@climatology = (eyear(ee)+CLIMO_SYEAR)+"-"+(eyear(ee)+CLIMO_EYEAR)+" climatology removed prior to all calculations (other than means)" + else + z@climatology = CLIMO_SYEAR+"-"+CLIMO_EYEAR+" climatology removed prior to all calculations (other than means)" + end if + end if + z@Conventions = "CF-1.6" + else + z = addfile(fn,"w") + end if + z->sam_tas_regression_djf = set_varAtts(sam_tas_djf,"tas regression onto SAM principal component timeseries (DJF)","","") + z->sam_tas_regression_mam = set_varAtts(sam_tas_mam,"tas regression onto SAM principal component timeseries (MAM)","","") + z->sam_tas_regression_jja = set_varAtts(sam_tas_jja,"tas regression onto SAM principal component timeseries (JJA)","","") + z->sam_tas_regression_son = set_varAtts(sam_tas_son,"tas regression onto SAM principal component timeseries (SON)","","") + z->sam_tas_regression_ann = set_varAtts(sam_tas_ann,"tas regression onto SAM principal component timeseries (annual)","","") + + z->psa1_tas_regression_djf = set_varAtts(psa1_tas_djf,"tas regression onto PSA1 principal component timeseries (DJF)","","") + z->psa1_tas_regression_mam = set_varAtts(psa1_tas_mam,"tas regression onto PSA1 principal component timeseries (MAM)","","") + z->psa1_tas_regression_jja = set_varAtts(psa1_tas_jja,"tas regression onto PSA1 principal component timeseries (JJA)","","") + z->psa1_tas_regression_son = set_varAtts(psa1_tas_son,"tas regression onto PSA1 principal component timeseries (SON)","","") + z->psa1_tas_regression_ann = set_varAtts(psa1_tas_ann,"tas regression onto PSA1 principal component timeseries (annual)","","") + + z->psa2_tas_regression_djf = set_varAtts(psa2_tas_djf,"tas regression onto PSA2 principal component timeseries (DJF)","","") + z->psa2_tas_regression_mam = set_varAtts(psa2_tas_mam,"tas regression onto PSA2 principal component timeseries (MAM)","","") + z->psa2_tas_regression_jja = set_varAtts(psa2_tas_jja,"tas regression onto PSA2 principal component timeseries (JJA)","","") + z->psa2_tas_regression_son = set_varAtts(psa2_tas_son,"tas regression onto PSA2 principal component timeseries (SON)","","") + z->psa2_tas_regression_ann = set_varAtts(psa2_tas_ann,"tas regression onto PSA2 principal component timeseries (annual)","","") + if (COMPUTE_MODES_MON.eq."True") then + z->sam_tas_regression_mon = set_varAtts(sam_tas_mon,"tas regression onto SAM principal component timeseries (monthly)","","") + z->psa1_tas_regression_mon = set_varAtts(psa1_tas_mon,"tas regression onto PSA1 principal component timeseries (monthly)","","") + z->psa2_tas_regression_mon = set_varAtts(psa2_tas_mon,"tas regression onto PSA2 principal component timeseries (monthly)","","") + end if + delete(z) + delete([/modname,fn/]) + end if + if (prreg_plot_flag.eq.0) then + modname = str_sub_str(names_pr(ee)," ","_") + bc = (/"/","'","(",")"/) + do gg = 0,dimsizes(bc)-1 + modname = str_sub_str(modname,bc(gg),"_") + end do + fn = getenv("OUTDIR")+modname+".cvdp_data.psl.sam_psa.pr."+syear_pr(ee)+"-"+eyear_pr(ee)+".nc" + if (.not.isfilepresent2(fn)) then + z = addfile(fn,"c") + z@source = "NCAR Climate Analysis Section's Climate Variability Diagnostics Package v"+getenv("VERSION") + z@notes = "Data from "+names_pr(ee)+" from "+syear_pr(ee)+"-"+eyear_pr(ee) + if (OPT_CLIMO.eq."Full") then + z@climatology = syear_pr(ee)+"-"+eyear_pr(ee)+" climatology removed prior to all calculations (other than means)" + else + if (CLIMO_SYEAR.lt.0) then + z@climatology = (eyear(ee)+CLIMO_SYEAR)+"-"+(eyear(ee)+CLIMO_EYEAR)+" climatology removed prior to all calculations (other than means)" + else + z@climatology = CLIMO_SYEAR+"-"+CLIMO_EYEAR+" climatology removed prior to all calculations (other than means)" + end if + end if + z@Conventions = "CF-1.6" + else + z = addfile(fn,"w") + end if + z->sam_pr_regression_djf = set_varAtts(sam_pr_djf,"pr regression onto SAM principal component timeseries (DJF)","","") + z->sam_pr_regression_mam = set_varAtts(sam_pr_mam,"pr regression onto SAM principal component timeseries (MAM)","","") + z->sam_pr_regression_jja = set_varAtts(sam_pr_jja,"pr regression onto SAM principal component timeseries (JJA)","","") + z->sam_pr_regression_son = set_varAtts(sam_pr_son,"pr regression onto SAM principal component timeseries (SON)","","") + z->sam_pr_regression_ann = set_varAtts(sam_pr_ann,"pr regression onto SAM principal component timeseries (annual)","","") + + z->psa1_pr_regression_djf = set_varAtts(psa1_pr_djf,"pr regression onto PSA1 principal component timeseries (DJF)","","") + z->psa1_pr_regression_mam = set_varAtts(psa1_pr_mam,"pr regression onto PSA1 principal component timeseries (MAM)","","") + z->psa1_pr_regression_jja = set_varAtts(psa1_pr_jja,"pr regression onto PSA1 principal component timeseries (JJA)","","") + z->psa1_pr_regression_son = set_varAtts(psa1_pr_son,"pr regression onto PSA1 principal component timeseries (SON)","","") + z->psa1_pr_regression_ann = set_varAtts(psa1_pr_ann,"pr regression onto PSA1 principal component timeseries (annual)","","") + + z->psa2_pr_regression_djf = set_varAtts(psa2_pr_djf,"pr regression onto PSA2 principal component timeseries (DJF)","","") + z->psa2_pr_regression_mam = set_varAtts(psa2_pr_mam,"pr regression onto PSA2 principal component timeseries (MAM)","","") + z->psa2_pr_regression_jja = set_varAtts(psa2_pr_jja,"pr regression onto PSA2 principal component timeseries (JJA)","","") + z->psa2_pr_regression_son = set_varAtts(psa2_pr_son,"pr regression onto PSA2 principal component timeseries (SON)","","") + z->psa2_pr_regression_ann = set_varAtts(psa2_pr_ann,"pr regression onto PSA2 principal component timeseries (annual)","","") + if (COMPUTE_MODES_MON.eq."True") then + z->sam_pr_regression_mon = set_varAtts(sam_pr_mon,"pr regression onto SAM principal component timeseries (monthly)","","") + z->psa1_pr_regression_mon = set_varAtts(psa1_pr_mon,"pr regression onto PSA1 principal component timeseries (monthly)","","") + z->psa2_pr_regression_mon = set_varAtts(psa2_pr_mon,"pr regression onto PSA2 principal component timeseries (monthly)","","") + end if + delete(z) + delete([/modname,fn/]) + end if + end if +;======================================================================== + res = True + res@mpGeophysicalLineColor = "gray42" + res@mpGeophysicalLineThicknessF = 2. + res@mpGridAndLimbOn = False + res@mpFillOn = False + res@mpOutlineOn = True + res@gsnDraw = False + res@gsnFrame = False + res@cnLevelSelectionMode = "ExplicitLevels" + res@cnLineLabelsOn = False + res@cnFillOn = True + res@cnLinesOn = False + res@lbLabelBarOn = False + + res@gsnLeftStringOrthogonalPosF = -0.03 + res@gsnLeftStringParallelPosF = .005 + res@gsnRightStringOrthogonalPosF = -0.03 + res@gsnRightStringParallelPosF = 0.96 + res@gsnRightString = "" + res@gsnLeftString = "" + if (nsim.le.5) then + res@gsnLeftStringFontHeightF = 0.018 + res@gsnCenterStringFontHeightF = 0.022 + res@gsnRightStringFontHeightF = 0.018 + else + res@gsnLeftStringFontHeightF = 0.024 + res@gsnCenterStringFontHeightF = 0.028 + res@gsnRightStringFontHeightF = 0.024 + end if + res@gsnPolar = "SH" + res@mpMaxLatF = -20. + res@mpCenterLonF = 0. + + res@cnLevels = (/-7,-6,-5,-4,-3,-2,-1,0,1,2,3,4,5,6,7./) + res@gsnLeftString = syear(ee)+"-"+eyear(ee) + res@gsnCenterString = names(ee) + + res4 = res ; res4 = pr regression resources + if (COLORMAP.eq.0) then + res4@cnLevels := fspan(-.7,.7,15) + else + res4@cnLevels := fspan(-.5,.5,11) + end if + + res2 = True + res2@gsnDraw = False + res2@gsnFrame = False + res2@cnLevelSelectionMode = "ExplicitLevels" + res2@cnLevels = res@cnLevels + + res2@cnLineLabelsOn = False + res2@cnFillOn = True + res2@cnLinesOn = False + res2@cnFillMode = "AreaFill" + res2@lbLabelBarOn = False + res2@cnInfoLabelOn = False + res2@gsnRightString = "" + res2@gsnLeftString = "" + res2@gsnCenterString = "" + res2@gsnAddCyclic = True + + if (isfilepresent2("obs_psl").and.ee.eq.0) then ; for pattern correlation table. Save entire lat/lon array + patcor_sam_djf = new((/nsim,dimsizes(sam_djf&lat),dimsizes(sam_djf&lon)/),typeof(sam_djf)) + patcor_sam_djf!1 = "lat" + patcor_sam_djf&lat = sam_djf&lat + patcor_sam_djf!2 = "lon" + patcor_sam_djf&lon = sam_djf&lon + patcor_sam_jja = patcor_sam_djf + patcor_sam_ann = patcor_sam_djf + patcor_psa1_djf = patcor_sam_djf + patcor_psa1_jja = patcor_sam_djf + patcor_psa1_ann = patcor_sam_djf + patcor_psa2_djf = patcor_sam_djf + patcor_psa2_jja = patcor_sam_djf + patcor_psa2_ann = patcor_sam_djf + patcor_sam_djf(ee,:,:) = (/ sam_djf /) + patcor_sam_jja(ee,:,:) = (/ sam_jja /) + patcor_sam_ann(ee,:,:) = (/ sam_ann /) + patcor_psa1_djf(ee,:,:) = (/ psa1_djf /) + patcor_psa1_jja(ee,:,:) = (/ psa1_jja /) + patcor_psa1_ann(ee,:,:) = (/ psa1_ann /) + patcor_psa2_djf(ee,:,:) = (/ psa2_djf /) + patcor_psa2_jja(ee,:,:) = (/ psa2_jja /) + patcor_psa2_ann(ee,:,:) = (/ psa2_ann /) + end if + if (isfilepresent2("obs_psl").and.ee.ge.1.and.isvar("patcor_sam_djf")) then + patcor_sam_djf(ee,:,:) = (/ totype(linint2(sam_djf&lon,sam_djf&lat,sam_djf,True,patcor_sam_djf&lon,patcor_sam_djf&lat,0),typeof(patcor_sam_djf)) /) + patcor_sam_jja(ee,:,:) = (/ totype(linint2(sam_jja&lon,sam_jja&lat,sam_jja,True,patcor_sam_jja&lon,patcor_sam_jja&lat,0),typeof(patcor_sam_jja)) /) + patcor_sam_ann(ee,:,:) = (/ totype(linint2(sam_ann&lon,sam_ann&lat,sam_ann,True,patcor_sam_ann&lon,patcor_sam_ann&lat,0),typeof(patcor_sam_ann)) /) + + patcor_psa1_djf(ee,:,:) = (/ totype(linint2(psa1_djf&lon,psa1_djf&lat,psa1_djf,True,patcor_psa1_djf&lon,patcor_psa1_djf&lat,0),typeof(patcor_psa1_djf)) /) + patcor_psa1_jja(ee,:,:) = (/ totype(linint2(psa1_jja&lon,psa1_jja&lat,psa1_jja,True,patcor_psa1_jja&lon,patcor_psa1_jja&lat,0),typeof(patcor_psa1_jja)) /) + patcor_psa1_ann(ee,:,:) = (/ totype(linint2(psa1_ann&lon,psa1_ann&lat,psa1_ann,True,patcor_psa1_ann&lon,patcor_psa1_ann&lat,0),typeof(patcor_psa1_ann)) /) + + patcor_psa2_djf(ee,:,:) = (/ totype(linint2(psa2_djf&lon,psa2_djf&lat,psa2_djf,True,patcor_psa2_djf&lon,patcor_psa2_djf&lat,0),typeof(patcor_psa2_djf)) /) + patcor_psa2_jja(ee,:,:) = (/ totype(linint2(psa2_jja&lon,psa2_jja&lat,psa2_jja,True,patcor_psa2_jja&lon,patcor_psa2_jja&lat,0),typeof(patcor_psa2_jja)) /) + patcor_psa2_ann(ee,:,:) = (/ totype(linint2(psa2_ann&lon,psa2_ann&lat,psa2_ann,True,patcor_psa2_ann&lon,patcor_psa2_ann&lat,0),typeof(patcor_psa2_ann)) /) + end if + + res@gsnLeftString = syear(ee)+"-"+eyear(ee) + res@gsnCenterString = names(ee) + res@gsnRightString = sam_djf@pcvar + map_sam_djf(ee) = gsn_csm_contour_map_polar(wks_sam,sam_djf,res) + res@gsnRightString = sam_mam@pcvar + map_sam_mam(ee) = gsn_csm_contour_map_polar(wks_sam,sam_mam,res) + res@gsnRightString = sam_jja@pcvar + map_sam_jja(ee) = gsn_csm_contour_map_polar(wks_sam,sam_jja,res) + res@gsnRightString = sam_son@pcvar + map_sam_son(ee) = gsn_csm_contour_map_polar(wks_sam,sam_son,res) + res@gsnRightString = sam_ann@pcvar + map_sam_ann(ee) = gsn_csm_contour_map_polar(wks_sam,sam_ann,res) + delete([/sam_djf,sam_mam,sam_jja,sam_son,sam_ann/]) + if (COMPUTE_MODES_MON.eq."True") then + res@gsnRightString = sam_mon@pcvar + map_sam_mon(ee) = gsn_csm_contour_map_polar(wks_sam,sam_mon,res) + delete([/sam_mon/]) + end if + + res@cnLevels = (/-4,-3,-2.5,-2,-1.5,-1,-0.5,0,0.5,1,1.5,2,2.5,3,4/) + res@gsnRightString = psa1_djf@pcvar + map_psa1_djf(ee) = gsn_csm_contour_map_polar(wks_psa1,psa1_djf,res) + res@gsnRightString = psa1_mam@pcvar + map_psa1_mam(ee) = gsn_csm_contour_map_polar(wks_psa1,psa1_mam,res) + res@gsnRightString = psa1_jja@pcvar + map_psa1_jja(ee) = gsn_csm_contour_map_polar(wks_psa1,psa1_jja,res) + res@gsnRightString = psa1_son@pcvar + map_psa1_son(ee) = gsn_csm_contour_map_polar(wks_psa1,psa1_son,res) + res@gsnRightString = psa1_ann@pcvar + map_psa1_ann(ee) = gsn_csm_contour_map_polar(wks_psa1,psa1_ann,res) + delete([/psa1_djf,psa1_mam,psa1_jja,psa1_son,psa1_ann/]) + if (COMPUTE_MODES_MON.eq."True") then + res@gsnRightString = psa1_mon@pcvar + map_psa1_mon(ee) = gsn_csm_contour_map_polar(wks_psa1,psa1_mon,res) + delete([/psa1_mon/]) + end if + + res@gsnRightString = psa2_djf@pcvar + map_psa2_djf(ee) = gsn_csm_contour_map_polar(wks_psa2,psa2_djf,res) + res@gsnRightString = psa2_mam@pcvar + map_psa2_mam(ee) = gsn_csm_contour_map_polar(wks_psa2,psa2_mam,res) + res@gsnRightString = psa2_jja@pcvar + map_psa2_jja(ee) = gsn_csm_contour_map_polar(wks_psa2,psa2_jja,res) + res@gsnRightString = psa2_son@pcvar + map_psa2_son(ee) = gsn_csm_contour_map_polar(wks_psa2,psa2_son,res) + res@gsnRightString = psa2_ann@pcvar + map_psa2_ann(ee) = gsn_csm_contour_map_polar(wks_psa2,psa2_ann,res) + delete([/psa2_djf,psa2_mam,psa2_jja,psa2_son,psa2_ann/]) + if (COMPUTE_MODES_MON.eq."True") then + res@gsnRightString = psa2_mon@pcvar + map_psa2_mon(ee) = gsn_csm_contour_map_polar(wks_psa2,psa2_mon,res) + delete([/psa2_mon/]) + end if + + if (sstreg_plot_flag.eq.0) then + res@cnLevels := fspan(-.7,.7,15) + if (tasreg_plot_flag.eq.0) then + if (names_ts(ee).eq.names_tas(ee)) then + res@gsnCenterString = names_ts(ee) + else + res@gsnCenterString = names_ts(ee)+" / "+names_tas(ee) + end if + else + res@gsnCenterString = names_ts(ee) + end if + res@gsnRightString = "" + reg_sam_djf(ee) = gsn_csm_contour_map_polar(wks_sam,sam_sst_djf,res) + reg_sam_mam(ee) = gsn_csm_contour_map_polar(wks_sam,sam_sst_mam,res) + reg_sam_jja(ee) = gsn_csm_contour_map_polar(wks_sam,sam_sst_jja,res) + reg_sam_son(ee) = gsn_csm_contour_map_polar(wks_sam,sam_sst_son,res) + reg_sam_ann(ee) = gsn_csm_contour_map_polar(wks_sam,sam_sst_ann,res) + delete([/sam_sst_djf,sam_sst_mam,sam_sst_jja,sam_sst_son,sam_sst_ann/]) + if (tasreg_plot_flag.eq.0) then + o_djf = gsn_csm_contour(wks_sam,sam_tas_djf,res2) + o_mam = gsn_csm_contour(wks_sam,sam_tas_mam,res2) + o_jja = gsn_csm_contour(wks_sam,sam_tas_jja,res2) + o_son = gsn_csm_contour(wks_sam,sam_tas_son,res2) + o_ann = gsn_csm_contour(wks_sam,sam_tas_ann,res2) + delete([/sam_tas_djf,sam_tas_mam,sam_tas_jja,sam_tas_son,sam_tas_ann/]) + overlay(reg_sam_djf(ee),o_djf) + overlay(reg_sam_mam(ee),o_mam) + overlay(reg_sam_jja(ee),o_jja) + overlay(reg_sam_son(ee),o_son) + overlay(reg_sam_ann(ee),o_ann) + delete([/o_djf,o_mam,o_jja,o_son,o_ann/]) + end if + if (COMPUTE_MODES_MON.eq."True") then + reg_sam_mon(ee) = gsn_csm_contour_map_polar(wks_sam,sam_sst_mon,res) + delete([/sam_sst_mon/]) + if (tasreg_plot_flag.eq.0) then + o_mon = gsn_csm_contour(wks_sam,sam_tas_mon,res2) + overlay(reg_sam_mon(ee),o_mon) + delete([/o_mon,sam_tas_mon/]) + end if + end if + + reg_psa1_djf(ee) = gsn_csm_contour_map_polar(wks_psa1,psa1_sst_djf,res) + reg_psa1_mam(ee) = gsn_csm_contour_map_polar(wks_psa1,psa1_sst_mam,res) + reg_psa1_jja(ee) = gsn_csm_contour_map_polar(wks_psa1,psa1_sst_jja,res) + reg_psa1_son(ee) = gsn_csm_contour_map_polar(wks_psa1,psa1_sst_son,res) + reg_psa1_ann(ee) = gsn_csm_contour_map_polar(wks_psa1,psa1_sst_ann,res) + delete([/psa1_sst_djf,psa1_sst_mam,psa1_sst_jja,psa1_sst_son,psa1_sst_ann/]) + if (tasreg_plot_flag.eq.0) then + o_djf = gsn_csm_contour(wks_psa1,psa1_tas_djf,res2) + o_mam = gsn_csm_contour(wks_psa1,psa1_tas_mam,res2) + o_jja = gsn_csm_contour(wks_psa1,psa1_tas_jja,res2) + o_son = gsn_csm_contour(wks_psa1,psa1_tas_son,res2) + o_ann = gsn_csm_contour(wks_psa1,psa1_tas_ann,res2) + delete([/psa1_tas_djf,psa1_tas_mam,psa1_tas_jja,psa1_tas_son,psa1_tas_ann/]) + overlay(reg_psa1_djf(ee),o_djf) + overlay(reg_psa1_mam(ee),o_mam) + overlay(reg_psa1_jja(ee),o_jja) + overlay(reg_psa1_son(ee),o_son) + overlay(reg_psa1_ann(ee),o_ann) + delete([/o_djf,o_mam,o_jja,o_son,o_ann/]) + end if + if (COMPUTE_MODES_MON.eq."True") then + reg_psa1_mon(ee) = gsn_csm_contour_map_polar(wks_psa1,psa1_sst_mon,res) + delete([/psa1_sst_mon/]) + if (tasreg_plot_flag.eq.0) then + o_mon = gsn_csm_contour(wks_psa1,psa1_tas_mon,res2) + overlay(reg_psa1_mon(ee),o_mon) + delete([/o_mon,psa1_tas_mon/]) + end if + end if + + reg_psa2_djf(ee) = gsn_csm_contour_map_polar(wks_psa2,psa2_sst_djf,res) + reg_psa2_mam(ee) = gsn_csm_contour_map_polar(wks_psa2,psa2_sst_mam,res) + reg_psa2_jja(ee) = gsn_csm_contour_map_polar(wks_psa2,psa2_sst_jja,res) + reg_psa2_son(ee) = gsn_csm_contour_map_polar(wks_psa2,psa2_sst_son,res) + reg_psa2_ann(ee) = gsn_csm_contour_map_polar(wks_psa2,psa2_sst_ann,res) + delete([/psa2_sst_djf,psa2_sst_mam,psa2_sst_jja,psa2_sst_son,psa2_sst_ann/]) + if (tasreg_plot_flag.eq.0) then + o_djf = gsn_csm_contour(wks_psa2,psa2_tas_djf,res2) + o_mam = gsn_csm_contour(wks_psa2,psa2_tas_mam,res2) + o_jja = gsn_csm_contour(wks_psa2,psa2_tas_jja,res2) + o_son = gsn_csm_contour(wks_psa2,psa2_tas_son,res2) + o_ann = gsn_csm_contour(wks_psa2,psa2_tas_ann,res2) + delete([/psa2_tas_djf,psa2_tas_mam,psa2_tas_jja,psa2_tas_son,psa2_tas_ann/]) + overlay(reg_psa2_djf(ee),o_djf) + overlay(reg_psa2_mam(ee),o_mam) + overlay(reg_psa2_jja(ee),o_jja) + overlay(reg_psa2_son(ee),o_son) + overlay(reg_psa2_ann(ee),o_ann) + delete([/o_djf,o_mam,o_jja,o_son,o_ann/]) + end if + if (COMPUTE_MODES_MON.eq."True") then + reg_psa2_mon(ee) = gsn_csm_contour_map_polar(wks_psa2,psa2_sst_mon,res) + delete([/psa2_sst_mon/]) + if (tasreg_plot_flag.eq.0) then + o_mon = gsn_csm_contour(wks_psa2,psa2_tas_mon,res2) + overlay(reg_psa2_mon(ee),o_mon) + delete([/o_mon,psa2_tas_mon/]) + end if + end if + end if + + if (prreg_plot_flag.eq.0) then ; PR regressions + res4@gsnRightString = "" + res4@gsnCenterString = names_pr(ee) + reg_sam_pr_djf(ee) = gsn_csm_contour_map_polar(wks_sam_pr,sam_pr_djf,res4) + reg_sam_pr_mam(ee) = gsn_csm_contour_map_polar(wks_sam_pr,sam_pr_mam,res4) + reg_sam_pr_jja(ee) = gsn_csm_contour_map_polar(wks_sam_pr,sam_pr_jja,res4) + reg_sam_pr_son(ee) = gsn_csm_contour_map_polar(wks_sam_pr,sam_pr_son,res4) + reg_sam_pr_ann(ee) = gsn_csm_contour_map_polar(wks_sam_pr,sam_pr_ann,res4) + delete([/sam_pr_djf,sam_pr_mam,sam_pr_jja,sam_pr_son,sam_pr_ann/]) + if (COMPUTE_MODES_MON.eq."True") then + reg_sam_pr_mon(ee) = gsn_csm_contour_map_polar(wks_sam_pr,sam_pr_mon,res4) + delete([/sam_pr_mon/]) + end if + + reg_psa1_pr_djf(ee) = gsn_csm_contour_map_polar(wks_psa1_pr,psa1_pr_djf,res4) + reg_psa1_pr_mam(ee) = gsn_csm_contour_map_polar(wks_psa1_pr,psa1_pr_mam,res4) + reg_psa1_pr_jja(ee) = gsn_csm_contour_map_polar(wks_psa1_pr,psa1_pr_jja,res4) + reg_psa1_pr_son(ee) = gsn_csm_contour_map_polar(wks_psa1_pr,psa1_pr_son,res4) + reg_psa1_pr_ann(ee) = gsn_csm_contour_map_polar(wks_psa1_pr,psa1_pr_ann,res4) + delete([/psa1_pr_djf,psa1_pr_mam,psa1_pr_jja,psa1_pr_son,psa1_pr_ann/]) + if (COMPUTE_MODES_MON.eq."True") then + reg_psa1_pr_mon(ee) = gsn_csm_contour_map_polar(wks_psa1_pr,psa1_pr_mon,res4) + delete([/psa1_pr_mon/]) + end if + + reg_psa2_pr_djf(ee) = gsn_csm_contour_map_polar(wks_psa2_pr,psa2_pr_djf,res4) + reg_psa2_pr_mam(ee) = gsn_csm_contour_map_polar(wks_psa2_pr,psa2_pr_mam,res4) + reg_psa2_pr_jja(ee) = gsn_csm_contour_map_polar(wks_psa2_pr,psa2_pr_jja,res4) + reg_psa2_pr_son(ee) = gsn_csm_contour_map_polar(wks_psa2_pr,psa2_pr_son,res4) + reg_psa2_pr_ann(ee) = gsn_csm_contour_map_polar(wks_psa2_pr,psa2_pr_ann,res4) + delete([/psa2_pr_djf,psa2_pr_mam,psa2_pr_jja,psa2_pr_son,psa2_pr_ann/]) + if (COMPUTE_MODES_MON.eq."True") then + reg_psa2_pr_mon(ee) = gsn_csm_contour_map_polar(wks_psa2_pr,psa2_pr_mon,res4) + delete([/psa2_pr_mon/]) + end if + end if + + xyres = True + xyres@gsnDraw = False + xyres@gsnFrame = False + xyres@gsnXYBarChart = False + xyres@gsnYRefLine = 0.0 + xyres@gsnYRefLineColor = "gray42" + xyres@gsnAboveYRefLineColor = 185 + xyres@gsnBelowYRefLineColor = 35 + if (wks_type.eq."png") then + xyres@xyLineThicknessF = .5 + else + xyres@xyLineThicknessF = .2 + end if + xyres@xyLineColor = "gray52" + xyres@tiYAxisString = "" + xyres@tiXAxisString = "" + if (nsim.le.5) then + xyres@tmXBLabelFontHeightF = 0.0125 + xyres@tmYLLabelFontHeightF = 0.0125 + xyres@gsnStringFontHeightF = 0.017 + else + xyres@tmXBLabelFontHeightF = 0.018 + xyres@tmYLLabelFontHeightF = 0.018 + xyres@gsnStringFontHeightF = 0.024 + end if + xyres@gsnCenterStringOrthogonalPosF = 0.025 + xyres@vpXF = 0.05 + xyres@vpHeightF = 0.15 + if (SCALE_TIMESERIES.eq."True") then + xyres@vpWidthF = 0.9*((nyr(ee)*1.)/nyr_max) + else + xyres@vpWidthF = 0.9 + end if + xyres@gsnLeftString = "" + xyres@gsnRightString = "" + xyres@trXMinF = syear(ee)-.5 + xyres@trXMaxF = eyear(ee)+1.5 + + xyres@gsnCenterString = names(ee) + + xyresmon = xyres + xyresmon@gsnXYBarChart = False + xyresmon@xyLineThicknessF = .1 + + xy_sam_djf(ee) = gsn_csm_xy(wks_sam_ts,fspan(syear(ee),eyear(ee),dimsizes(sam_pc_djf)),sam_pc_djf,xyres) ; use standardized timeseries + xy_sam_mam(ee) = gsn_csm_xy(wks_sam_ts,fspan(syear(ee),eyear(ee),dimsizes(sam_pc_mam)),sam_pc_mam,xyres) ; use standardized timeseries + xy_sam_jja(ee) = gsn_csm_xy(wks_sam_ts,fspan(syear(ee),eyear(ee),dimsizes(sam_pc_jja)),sam_pc_jja,xyres) ; use standardized timeseries + xy_sam_son(ee) = gsn_csm_xy(wks_sam_ts,fspan(syear(ee),eyear(ee),dimsizes(sam_pc_son)),sam_pc_son,xyres) ; use standardized timeseries + xy_sam_ann(ee) = gsn_csm_xy(wks_sam_ts,fspan(syear(ee),eyear(ee),dimsizes(sam_pc_ann)),sam_pc_ann,xyres) ; use standardized timeseries + delete([/sam_pc_djf,sam_pc_mam,sam_pc_jja,sam_pc_son,sam_pc_ann/]) + if (COMPUTE_MODES_MON.eq."True") then + xy_sam_mon(ee) = gsn_csm_xy(wks_sam_ts,fspan(syear(ee),eyear(ee)+.91667,dimsizes(sam_pc_mon)),sam_pc_mon,xyresmon) ; use standardized timeseries + delete([/sam_pc_mon/]) + end if + + xy_psa1_djf(ee) = gsn_csm_xy(wks_psa1_ts,fspan(syear(ee),eyear(ee),dimsizes(psa1_pc_djf)),psa1_pc_djf,xyres) ; use standardized timeseries + xy_psa1_mam(ee) = gsn_csm_xy(wks_psa1_ts,fspan(syear(ee),eyear(ee),dimsizes(psa1_pc_mam)),psa1_pc_mam,xyres) ; use standardized timeseries + xy_psa1_jja(ee) = gsn_csm_xy(wks_psa1_ts,fspan(syear(ee),eyear(ee),dimsizes(psa1_pc_jja)),psa1_pc_jja,xyres) ; use standardized timeseries + xy_psa1_son(ee) = gsn_csm_xy(wks_psa1_ts,fspan(syear(ee),eyear(ee),dimsizes(psa1_pc_son)),psa1_pc_son,xyres) ; use standardized timeseries + xy_psa1_ann(ee) = gsn_csm_xy(wks_psa1_ts,fspan(syear(ee),eyear(ee),dimsizes(psa1_pc_ann)),psa1_pc_ann,xyres) ; use standardized timeseries + delete([/psa1_pc_djf,psa1_pc_mam,psa1_pc_jja,psa1_pc_son,psa1_pc_ann/]) + if (COMPUTE_MODES_MON.eq."True") then + xy_psa1_mon(ee) = gsn_csm_xy(wks_psa1_ts,fspan(syear(ee),eyear(ee)+.91667,dimsizes(psa1_pc_mon)),psa1_pc_mon,xyresmon) ; use standardized timeseries + delete([/psa1_pc_mon/]) + end if + + xy_psa2_djf(ee) = gsn_csm_xy(wks_psa2_ts,fspan(syear(ee),eyear(ee),dimsizes(psa2_pc_djf)),psa2_pc_djf,xyres) ; use standardized timeseries + xy_psa2_mam(ee) = gsn_csm_xy(wks_psa2_ts,fspan(syear(ee),eyear(ee),dimsizes(psa2_pc_mam)),psa2_pc_mam,xyres) ; use standardized timeseries + xy_psa2_jja(ee) = gsn_csm_xy(wks_psa2_ts,fspan(syear(ee),eyear(ee),dimsizes(psa2_pc_jja)),psa2_pc_jja,xyres) ; use standardized timeseries + xy_psa2_son(ee) = gsn_csm_xy(wks_psa2_ts,fspan(syear(ee),eyear(ee),dimsizes(psa2_pc_son)),psa2_pc_son,xyres) ; use standardized timeseries + xy_psa2_ann(ee) = gsn_csm_xy(wks_psa2_ts,fspan(syear(ee),eyear(ee),dimsizes(psa2_pc_ann)),psa2_pc_ann,xyres) ; use standardized timeseries + delete([/psa2_pc_djf,psa2_pc_mam,psa2_pc_jja,psa2_pc_son,psa2_pc_ann/]) + if (COMPUTE_MODES_MON.eq."True") then + xy_psa2_mon(ee) = gsn_csm_xy(wks_psa2_ts,fspan(syear(ee),eyear(ee)+.91667,dimsizes(psa2_pc_mon)),psa2_pc_mon,xyresmon) ; use standardized timeseries + delete([/psa2_pc_mon/]) + end if + + delete(sstreg_plot_flag) + end do + + if (isvar("clim_syear")) then + delete(clim_syear) + end if + if (isvar("clim_eyear")) then + delete(clim_eyear) + end if + + if (isvar("patcor_sam_djf")) then ; for pattern correlation table + clat = cos(0.01745329*patcor_sam_djf&lat) + clat!0 = "lat" + clat&lat = patcor_sam_djf&lat + line3 = " " ; Must be 18 characters long + line4 = line3 + header = (/"","Pattern Correlations Observations vs. Model(s)",""/) + finpr_sam_djf = "SAM (DJF) " + do hh = 1,nsim-1 + dimY = dimsizes(tochar(names(hh))) + nchar = dimY + nchar = where(nchar.le.10,10,nchar) + if (dimY.lt.10) then + ntb = "" + do ii = 0,10-dimY-1 + ntb = ntb+" " + end do + ntb = ntb+names(hh) + else + ntb = names(hh) + end if + + ntc = "" + do ii = 0,nchar-1 + ntc = ntc+"-" + end do + format2 = "%"+(nchar-5+1)+".2f" + format3 = "%4.2f" + line3 = line3+" "+ntb + line4 = line4+" "+ntc + if (all(ismissing(patcor_sam_djf(hh,{:-20},:)))) then + finpr_sam_djf = finpr_sam_djf+sprintf(format2,9.99)+"/"+sprintf(format3,9.99) + else + finpr_sam_djf = finpr_sam_djf+sprintf(format2,(pattern_cor(patcor_sam_djf(0,{:-20},:),patcor_sam_djf(hh,{:-20},:),clat({:-20}),0)))+"/"+sprintf(format3,(dim_rmsd(ndtooned(NewCosWeight(patcor_sam_djf(0,{:-20},:))),ndtooned(NewCosWeight(patcor_sam_djf(hh,{:-20},:)))))) + end if + end do +; + if (dimsizes(tochar(line4)).ge.8190) then ; system or fortran compiler limit + print("Metrics table warning: Not creating metrics table as size of comparison results in a invalid ascii row size.") + else + write_table(getenv("OUTDIR")+"metrics.psl.sam_psa.txt","w",[/header/],"%s") + write_table(getenv("OUTDIR")+"metrics.psl.sam_psa.txt","a",[/line3/],"%s") + write_table(getenv("OUTDIR")+"metrics.psl.sam_psa.txt","a",[/line4/],"%s") + write_table(getenv("OUTDIR")+"metrics.psl.sam_psa.txt","a",[/finpr_sam_djf/],"%s") + end if + delete([/line3,line4,format2,format3,nchar,ntc,clat,patcor_sam_djf,patcor_sam_jja,patcor_sam_ann/]) + delete([/patcor_psa1_djf,patcor_psa1_jja,patcor_psa1_ann,patcor_psa2_djf,patcor_psa2_jja,patcor_psa2_ann,dimY,ntb,header/]) + end if + + panres = True + panres@gsnMaximize = True + panres@gsnPaperOrientation = "portrait" + panres@gsnPanelLabelBar = True + panres@gsnPanelYWhiteSpacePercent = 3.0 + panres@pmLabelBarHeightF = 0.05 + panres@pmLabelBarWidthF = 0.55 + panres@lbTitleOn = False + panres@lbBoxLineColor = "gray70" + if (nsim.le.4) then + if (nsim.eq.1) then + panres@txFontHeightF = 0.022 + panres@gsnPanelBottom = 0.50 + else + panres@txFontHeightF = 0.0145 + panres@gsnPanelBottom = 0.50 + end if + else + panres@txFontHeightF = 0.016 + panres@gsnPanelBottom = 0.05 + end if + ncol = floattointeger(sqrt(nsim)) + nrow = (nsim/ncol)+mod(nsim,ncol) + + panres@txString = "SAM (DJF)" + gsn_panel2(wks_sam,map_sam_djf,(/nrow,ncol/),panres) + delete(map_sam_djf) + panres@txString = "SAM (MAM)" + gsn_panel2(wks_sam,map_sam_mam,(/nrow,ncol/),panres) + delete(map_sam_mam) + panres@txString = "SAM (JJA)" + gsn_panel2(wks_sam,map_sam_jja,(/nrow,ncol/),panres) + delete(map_sam_jja) + panres@txString = "SAM (SON)" + gsn_panel2(wks_sam,map_sam_son,(/nrow,ncol/),panres) + delete(map_sam_son) + panres@txString = "SAM (Annual)" + gsn_panel2(wks_sam,map_sam_ann,(/nrow,ncol/),panres) + delete(map_sam_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres@txString = "SAM (Monthly)" + gsn_panel2(wks_sam,map_sam_mon,(/nrow,ncol/),panres) + delete(map_sam_mon) + end if + + if (sstreg_frame.eq.0) then + if (tasreg_frame.eq.0) then + txt0 = "SST/TAS" + else + txt0 = "SST" + end if + panres@txString = "SAM "+txt0+" Regressions (DJF)" + gsn_panel2(wks_sam,reg_sam_djf,(/nrow,ncol/),panres) + delete(reg_sam_djf) + panres@txString = "SAM "+txt0+" Regressions (MAM)" + gsn_panel2(wks_sam,reg_sam_mam,(/nrow,ncol/),panres) + delete(reg_sam_mam) + panres@txString = "SAM "+txt0+" Regressions (JJA)" + gsn_panel2(wks_sam,reg_sam_jja,(/nrow,ncol/),panres) + delete(reg_sam_jja) + panres@txString = "SAM "+txt0+" Regressions (SON)" + gsn_panel2(wks_sam,reg_sam_son,(/nrow,ncol/),panres) + delete(reg_sam_son) + panres@txString = "SAM "+txt0+" Regressions (Annual)" + gsn_panel2(wks_sam,reg_sam_ann,(/nrow,ncol/),panres) + delete(reg_sam_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres@txString = "SAM "+txt0+" Regressions (Monthly)" + gsn_panel2(wks_sam,reg_sam_mon,(/nrow,ncol/),panres) + delete(reg_sam_mon) + end if + delete(wks_sam) + end if + if (prreg_frame.eq.0) then + panres@txString = "SAM PR Regressions (DJF)" + gsn_panel2(wks_sam_pr,reg_sam_pr_djf,(/nrow,ncol/),panres) + delete(reg_sam_pr_djf) + panres@txString = "SAM PR Regressions (MAM)" + gsn_panel2(wks_sam_pr,reg_sam_pr_mam,(/nrow,ncol/),panres) + delete(reg_sam_pr_mam) + panres@txString = "SAM PR Regressions (JJA)" + gsn_panel2(wks_sam_pr,reg_sam_pr_jja,(/nrow,ncol/),panres) + delete(reg_sam_pr_jja) + panres@txString = "SAM PR Regressions (SON)" + gsn_panel2(wks_sam_pr,reg_sam_pr_son,(/nrow,ncol/),panres) + delete(reg_sam_pr_son) + panres@txString = "SAM PR Regressions (Annual)" + gsn_panel2(wks_sam_pr,reg_sam_pr_ann,(/nrow,ncol/),panres) + delete(reg_sam_pr_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres@txString = "SAM PR Regressions (Monthly)" + gsn_panel2(wks_sam_pr,reg_sam_pr_mon,(/nrow,ncol/),panres) + delete(reg_sam_pr_mon) + end if + delete(wks_sam_pr) + end if + + panres@txString = "PSA1 (DJF)" + gsn_panel2(wks_psa1,map_psa1_djf,(/nrow,ncol/),panres) + delete(map_psa1_djf) + panres@txString = "PSA1 (MAM)" + gsn_panel2(wks_psa1,map_psa1_mam,(/nrow,ncol/),panres) + delete(map_psa1_mam) + panres@txString = "PSA1 (JJA)" + gsn_panel2(wks_psa1,map_psa1_jja,(/nrow,ncol/),panres) + delete(map_psa1_jja) + panres@txString = "PSA1 (SON)" + gsn_panel2(wks_psa1,map_psa1_son,(/nrow,ncol/),panres) + delete(map_psa1_son) + panres@txString = "PSA1 (Annual)" + gsn_panel2(wks_psa1,map_psa1_ann,(/nrow,ncol/),panres) + delete(map_psa1_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres@txString = "PSA1 (Monthly)" + gsn_panel2(wks_psa1,map_psa1_mon,(/nrow,ncol/),panres) + delete(map_psa1_mon) + end if + + if (sstreg_frame.eq.0) then + if (tasreg_frame.eq.0) then + txt0 = "SST/TAS" + else + txt0 = "SST" + end if + panres@txString = "PSA1 "+txt0+" Regressions (DJF)" + gsn_panel2(wks_psa1,reg_psa1_djf,(/nrow,ncol/),panres) + delete(reg_psa1_djf) + panres@txString = "PSA1 "+txt0+" Regressions (MAM)" + gsn_panel2(wks_psa1,reg_psa1_mam,(/nrow,ncol/),panres) + delete(reg_psa1_mam) + panres@txString = "PSA1 "+txt0+" Regressions (JJA)" + gsn_panel2(wks_psa1,reg_psa1_jja,(/nrow,ncol/),panres) + delete(reg_psa1_jja) + panres@txString = "PSA1 "+txt0+" Regressions (SON)" + gsn_panel2(wks_psa1,reg_psa1_son,(/nrow,ncol/),panres) + delete(reg_psa1_son) + panres@txString = "PSA1 "+txt0+" Regressions (Annual)" + gsn_panel2(wks_psa1,reg_psa1_ann,(/nrow,ncol/),panres) + delete(reg_psa1_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres@txString = "PSA1 "+txt0+" Regressions (Monthly)" + gsn_panel2(wks_psa1,reg_psa1_mon,(/nrow,ncol/),panres) + delete(reg_psa1_mon) + end if + delete(wks_psa1) + end if + if (prreg_frame.eq.0) then + panres@txString = "PSA1 PR Regressions (DJF)" + gsn_panel2(wks_psa1_pr,reg_psa1_pr_djf,(/nrow,ncol/),panres) + delete(reg_psa1_pr_djf) + panres@txString = "PSA1 PR Regressions (MAM)" + gsn_panel2(wks_psa1_pr,reg_psa1_pr_mam,(/nrow,ncol/),panres) + delete(reg_psa1_pr_mam) + panres@txString = "PSA1 PR Regressions (JJA)" + gsn_panel2(wks_psa1_pr,reg_psa1_pr_jja,(/nrow,ncol/),panres) + delete(reg_psa1_pr_jja) + panres@txString = "PSA1 PR Regressions (SON)" + gsn_panel2(wks_psa1_pr,reg_psa1_pr_son,(/nrow,ncol/),panres) + delete(reg_psa1_pr_son) + panres@txString = "PSA1 PR Regressions (Annual)" + gsn_panel2(wks_psa1_pr,reg_psa1_pr_ann,(/nrow,ncol/),panres) + delete(reg_psa1_pr_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres@txString = "PSA1 PR Regressions (Monthly)" + gsn_panel2(wks_psa1_pr,reg_psa1_pr_mon,(/nrow,ncol/),panres) + delete(reg_psa1_pr_mon) + end if + delete(wks_psa1_pr) + end if + + panres@txString = "PSA2 (DJF)" + gsn_panel2(wks_psa2,map_psa2_djf,(/nrow,ncol/),panres) + delete(map_psa2_djf) + panres@txString = "PSA2 (MAM)" + gsn_panel2(wks_psa2,map_psa2_mam,(/nrow,ncol/),panres) + delete(map_psa2_mam) + panres@txString = "PSA2 (JJA)" + gsn_panel2(wks_psa2,map_psa2_jja,(/nrow,ncol/),panres) + delete(map_psa2_jja) + panres@txString = "PSA2 (SON)" + gsn_panel2(wks_psa2,map_psa2_son,(/nrow,ncol/),panres) + delete(map_psa2_son) + panres@txString = "PSA2 (Annual)" + gsn_panel2(wks_psa2,map_psa2_ann,(/nrow,ncol/),panres) + delete(map_psa2_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres@txString = "PSA2 (Monthly)" + gsn_panel2(wks_psa2,map_psa2_mon,(/nrow,ncol/),panres) + delete(map_psa2_mon) + end if + + if (sstreg_frame.eq.0) then + if (tasreg_frame.eq.0) then + txt0 = "SST/TAS" + else + txt0 = "SST" + end if + panres@txString = "PSA2 "+txt0+" Regressions (DJF)" + gsn_panel2(wks_psa2,reg_psa2_djf,(/nrow,ncol/),panres) + delete(reg_psa2_djf) + panres@txString = "PSA2 "+txt0+" Regressions (MAM)" + gsn_panel2(wks_psa2,reg_psa2_mam,(/nrow,ncol/),panres) + delete(reg_psa2_mam) + panres@txString = "PSA2 "+txt0+" Regressions (JJA)" + gsn_panel2(wks_psa2,reg_psa2_jja,(/nrow,ncol/),panres) + delete(reg_psa2_jja) + panres@txString = "PSA2 "+txt0+" Regressions (SON)" + gsn_panel2(wks_psa2,reg_psa2_son,(/nrow,ncol/),panres) + delete(reg_psa2_son) + panres@txString = "PSA2 "+txt0+" Regressions (Annual)" + gsn_panel2(wks_psa2,reg_psa2_ann,(/nrow,ncol/),panres) + delete(reg_psa2_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres@txString = "PSA2 "+txt0+" Regressions (Monthly)" + gsn_panel2(wks_psa2,reg_psa2_mon,(/nrow,ncol/),panres) + delete(reg_psa2_mon) + end if + delete(wks_psa2) + end if + if (prreg_frame.eq.0) then + panres@txString = "PSA2 PR Regressions (DJF)" + gsn_panel2(wks_psa2_pr,reg_psa2_pr_djf,(/nrow,ncol/),panres) + delete(reg_psa2_pr_djf) + panres@txString = "PSA2 PR Regressions (MAM)" + gsn_panel2(wks_psa2_pr,reg_psa2_pr_mam,(/nrow,ncol/),panres) + delete(reg_psa2_pr_mam) + panres@txString = "PSA2 PR Regressions (JJA)" + gsn_panel2(wks_psa2_pr,reg_psa2_pr_jja,(/nrow,ncol/),panres) + delete(reg_psa2_pr_jja) + panres@txString = "PSA2 PR Regressions (SON)" + gsn_panel2(wks_psa2_pr,reg_psa2_pr_son,(/nrow,ncol/),panres) + delete(reg_psa2_pr_son) + panres@txString = "PSA2 PR Regressions (Annual)" + gsn_panel2(wks_psa2_pr,reg_psa2_pr_ann,(/nrow,ncol/),panres) + delete(reg_psa2_pr_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres@txString = "PSA2 PR Regressions (Monthly)" + gsn_panel2(wks_psa2_pr,reg_psa2_pr_mon,(/nrow,ncol/),panres) + delete(reg_psa2_pr_mon) + end if + delete(wks_psa2_pr) + end if + + panres2 = True + if (nsim.le.5) then + panres2@txFontHeightF = 0.024 + else + panres2@txFontHeightF = 0.016 + end if + panres2@gsnMaximize = True + panres2@gsnPaperOrientation = "portrait" + if (SCALE_TIMESERIES.eq."True") then + tt = ind(nyr.eq.nyr_max) + panres2@gsnPanelScalePlotIndex = tt(0) + delete(tt) + end if + if (nsim.le.12) then + lp = (/nsim,1/) + else + lp = (/nrow,ncol/) + end if + panres2@txString = "SAM (DJF)" + gsn_panel2(wks_sam_ts,xy_sam_djf,lp,panres2) + delete(xy_sam_djf) + panres2@txString = "SAM (MAM)" + gsn_panel2(wks_sam_ts,xy_sam_mam,lp,panres2) + delete(xy_sam_mam) + panres2@txString = "SAM (JJA)" + gsn_panel2(wks_sam_ts,xy_sam_jja,lp,panres2) + delete(xy_sam_jja) + panres2@txString = "SAM (SON)" + gsn_panel2(wks_sam_ts,xy_sam_son,lp,panres2) + delete(xy_sam_son) + panres2@txString = "SAM (Annual)" + gsn_panel2(wks_sam_ts,xy_sam_ann,lp,panres2) + delete(xy_sam_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres2@txString = "SAM (Monthly)" + gsn_panel2(wks_sam_ts,xy_sam_mon,lp,panres2) + delete(xy_sam_mon) + end if + delete(wks_sam_ts) + + panres2@txString = "PSA1 (DJF)" + gsn_panel2(wks_psa1_ts,xy_psa1_djf,lp,panres2) + delete(xy_psa1_djf) + panres2@txString = "PSA1 (MAM)" + gsn_panel2(wks_psa1_ts,xy_psa1_mam,lp,panres2) + delete(xy_psa1_mam) + panres2@txString = "PSA1 (JJA)" + gsn_panel2(wks_psa1_ts,xy_psa1_jja,lp,panres2) + delete(xy_psa1_jja) + panres2@txString = "PSA1 (SON)" + gsn_panel2(wks_psa1_ts,xy_psa1_son,lp,panres2) + delete(xy_psa1_son) + panres2@txString = "PSA1 (Annual)" + gsn_panel2(wks_psa1_ts,xy_psa1_ann,lp,panres2) + delete(xy_psa1_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres2@txString = "PSA1 (Monthly)" + gsn_panel2(wks_psa1_ts,xy_psa1_mon,lp,panres2) + delete(xy_psa1_mon) + end if + delete(wks_psa1_ts) + + panres2@txString = "PSA2 (DJF)" + gsn_panel2(wks_psa2_ts,xy_psa2_djf,lp,panres2) + delete(xy_psa2_djf) + panres2@txString = "PSA2 (MAM)" + gsn_panel2(wks_psa2_ts,xy_psa2_mam,lp,panres2) + delete(xy_psa2_mam) + panres2@txString = "PSA2 (JJA)" + gsn_panel2(wks_psa2_ts,xy_psa2_jja,lp,panres2) + delete(xy_psa2_jja) + panres2@txString = "PSA2 (SON)" + gsn_panel2(wks_psa2_ts,xy_psa2_son,lp,panres2) + delete(xy_psa2_son) + panres2@txString = "PSA2 (Annual)" + gsn_panel2(wks_psa2_ts,xy_psa2_ann,lp,panres2) + delete(xy_psa2_ann) + if (COMPUTE_MODES_MON.eq."True") then + panres2@txString = "PSA2 (Monthly)" + gsn_panel2(wks_psa2_ts,xy_psa2_mon,lp,panres2) + delete(xy_psa2_mon) + end if + delete(wks_psa2_ts) +;-------------------------------------------------------------------------------------------------- + OUTDIR = getenv("OUTDIR") + if (wks_type.eq."png") then + system("mv "+OUTDIR+"sam.000001.png "+OUTDIR+"sam.djf.png") + system("mv "+OUTDIR+"sam.000002.png "+OUTDIR+"sam.mam.png") + system("mv "+OUTDIR+"sam.000003.png "+OUTDIR+"sam.jja.png") + system("mv "+OUTDIR+"sam.000004.png "+OUTDIR+"sam.son.png") + system("mv "+OUTDIR+"sam.000005.png "+OUTDIR+"sam.ann.png") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"sam.000006.png "+OUTDIR+"sam.mon.png") + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"sam.000007.png "+OUTDIR+"sam.tempreg.djf.png") + system("mv "+OUTDIR+"sam.000008.png "+OUTDIR+"sam.tempreg.mam.png") + system("mv "+OUTDIR+"sam.000009.png "+OUTDIR+"sam.tempreg.jja.png") + system("mv "+OUTDIR+"sam.000010.png "+OUTDIR+"sam.tempreg.son.png") + system("mv "+OUTDIR+"sam.000011.png "+OUTDIR+"sam.tempreg.ann.png") + system("mv "+OUTDIR+"sam.000012.png "+OUTDIR+"sam.tempreg.mon.png") + end if + else + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"sam.000006.png "+OUTDIR+"sam.tempreg.djf.png") + system("mv "+OUTDIR+"sam.000007.png "+OUTDIR+"sam.tempreg.mam.png") + system("mv "+OUTDIR+"sam.000008.png "+OUTDIR+"sam.tempreg.jja.png") + system("mv "+OUTDIR+"sam.000009.png "+OUTDIR+"sam.tempreg.son.png") + system("mv "+OUTDIR+"sam.000010.png "+OUTDIR+"sam.tempreg.ann.png") + end if + end if + + if (prreg_frame.eq.0) then + system("mv "+OUTDIR+"sam.prreg.000001.png "+OUTDIR+"sam.prreg.djf.png") + system("mv "+OUTDIR+"sam.prreg.000002.png "+OUTDIR+"sam.prreg.mam.png") + system("mv "+OUTDIR+"sam.prreg.000003.png "+OUTDIR+"sam.prreg.jja.png") + system("mv "+OUTDIR+"sam.prreg.000004.png "+OUTDIR+"sam.prreg.son.png") + system("mv "+OUTDIR+"sam.prreg.000005.png "+OUTDIR+"sam.prreg.ann.png") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"sam.prreg.000006.png "+OUTDIR+"sam.prreg.mon.png") + end if + end if + + system("mv "+OUTDIR+"psa1.000001.png "+OUTDIR+"psa1.djf.png") + system("mv "+OUTDIR+"psa1.000002.png "+OUTDIR+"psa1.mam.png") + system("mv "+OUTDIR+"psa1.000003.png "+OUTDIR+"psa1.jja.png") + system("mv "+OUTDIR+"psa1.000004.png "+OUTDIR+"psa1.son.png") + system("mv "+OUTDIR+"psa1.000005.png "+OUTDIR+"psa1.ann.png") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"psa1.000006.png "+OUTDIR+"psa1.mon.png") + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"psa1.000007.png "+OUTDIR+"psa1.tempreg.djf.png") + system("mv "+OUTDIR+"psa1.000008.png "+OUTDIR+"psa1.tempreg.mam.png") + system("mv "+OUTDIR+"psa1.000009.png "+OUTDIR+"psa1.tempreg.jja.png") + system("mv "+OUTDIR+"psa1.000010.png "+OUTDIR+"psa1.tempreg.son.png") + system("mv "+OUTDIR+"psa1.000011.png "+OUTDIR+"psa1.tempreg.ann.png") + system("mv "+OUTDIR+"psa1.000012.png "+OUTDIR+"psa1.tempreg.mon.png") + end if + else + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"psa1.000006.png "+OUTDIR+"psa1.tempreg.djf.png") + system("mv "+OUTDIR+"psa1.000007.png "+OUTDIR+"psa1.tempreg.mam.png") + system("mv "+OUTDIR+"psa1.000008.png "+OUTDIR+"psa1.tempreg.jja.png") + system("mv "+OUTDIR+"psa1.000009.png "+OUTDIR+"psa1.tempreg.son.png") + system("mv "+OUTDIR+"psa1.000010.png "+OUTDIR+"psa1.tempreg.ann.png") + end if + end if + + if (prreg_frame.eq.0) then + system("mv "+OUTDIR+"psa1.prreg.000001.png "+OUTDIR+"psa1.prreg.djf.png") + system("mv "+OUTDIR+"psa1.prreg.000002.png "+OUTDIR+"psa1.prreg.mam.png") + system("mv "+OUTDIR+"psa1.prreg.000003.png "+OUTDIR+"psa1.prreg.jja.png") + system("mv "+OUTDIR+"psa1.prreg.000004.png "+OUTDIR+"psa1.prreg.son.png") + system("mv "+OUTDIR+"psa1.prreg.000005.png "+OUTDIR+"psa1.prreg.ann.png") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"psa1.prreg.000006.png "+OUTDIR+"psa1.prreg.mon.png") + end if + end if + + system("mv "+OUTDIR+"psa2.000001.png "+OUTDIR+"psa2.djf.png") + system("mv "+OUTDIR+"psa2.000002.png "+OUTDIR+"psa2.mam.png") + system("mv "+OUTDIR+"psa2.000003.png "+OUTDIR+"psa2.jja.png") + system("mv "+OUTDIR+"psa2.000004.png "+OUTDIR+"psa2.son.png") + system("mv "+OUTDIR+"psa2.000005.png "+OUTDIR+"psa2.ann.png") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"psa2.000006.png "+OUTDIR+"psa2.mon.png") + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"psa2.000007.png "+OUTDIR+"psa2.tempreg.djf.png") + system("mv "+OUTDIR+"psa2.000008.png "+OUTDIR+"psa2.tempreg.mam.png") + system("mv "+OUTDIR+"psa2.000009.png "+OUTDIR+"psa2.tempreg.jja.png") + system("mv "+OUTDIR+"psa2.000010.png "+OUTDIR+"psa2.tempreg.son.png") + system("mv "+OUTDIR+"psa2.000011.png "+OUTDIR+"psa2.tempreg.ann.png") + system("mv "+OUTDIR+"psa2.000012.png "+OUTDIR+"psa2.tempreg.mon.png") + end if + else + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"psa2.000006.png "+OUTDIR+"psa2.tempreg.djf.png") + system("mv "+OUTDIR+"psa2.000007.png "+OUTDIR+"psa2.tempreg.mam.png") + system("mv "+OUTDIR+"psa2.000008.png "+OUTDIR+"psa2.tempreg.jja.png") + system("mv "+OUTDIR+"psa2.000009.png "+OUTDIR+"psa2.tempreg.son.png") + system("mv "+OUTDIR+"psa2.000010.png "+OUTDIR+"psa2.tempreg.ann.png") + end if + end if + + if (prreg_frame.eq.0) then + system("mv "+OUTDIR+"psa2.prreg.000001.png "+OUTDIR+"psa2.prreg.djf.png") + system("mv "+OUTDIR+"psa2.prreg.000002.png "+OUTDIR+"psa2.prreg.mam.png") + system("mv "+OUTDIR+"psa2.prreg.000003.png "+OUTDIR+"psa2.prreg.jja.png") + system("mv "+OUTDIR+"psa2.prreg.000004.png "+OUTDIR+"psa2.prreg.son.png") + system("mv "+OUTDIR+"psa2.prreg.000005.png "+OUTDIR+"psa2.prreg.ann.png") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"psa2.prreg.000006.png "+OUTDIR+"psa2.prreg.mon.png") + end if + end if + + system("mv "+OUTDIR+"sam.timeseries.000001.png "+OUTDIR+"sam.timeseries.djf.png") + system("mv "+OUTDIR+"sam.timeseries.000002.png "+OUTDIR+"sam.timeseries.mam.png") + system("mv "+OUTDIR+"sam.timeseries.000003.png "+OUTDIR+"sam.timeseries.jja.png") + system("mv "+OUTDIR+"sam.timeseries.000004.png "+OUTDIR+"sam.timeseries.son.png") + system("mv "+OUTDIR+"sam.timeseries.000005.png "+OUTDIR+"sam.timeseries.ann.png") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"sam.timeseries.000006.png "+OUTDIR+"sam.timeseries.mon.png") + end if + + system("mv "+OUTDIR+"psa1.timeseries.000001.png "+OUTDIR+"psa1.timeseries.djf.png") + system("mv "+OUTDIR+"psa1.timeseries.000002.png "+OUTDIR+"psa1.timeseries.mam.png") + system("mv "+OUTDIR+"psa1.timeseries.000003.png "+OUTDIR+"psa1.timeseries.jja.png") + system("mv "+OUTDIR+"psa1.timeseries.000004.png "+OUTDIR+"psa1.timeseries.son.png") + system("mv "+OUTDIR+"psa1.timeseries.000005.png "+OUTDIR+"psa1.timeseries.ann.png") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"psa1.timeseries.000006.png "+OUTDIR+"psa1.timeseries.mon.png") + end if + + system("mv "+OUTDIR+"psa2.timeseries.000001.png "+OUTDIR+"psa2.timeseries.djf.png") + system("mv "+OUTDIR+"psa2.timeseries.000002.png "+OUTDIR+"psa2.timeseries.mam.png") + system("mv "+OUTDIR+"psa2.timeseries.000003.png "+OUTDIR+"psa2.timeseries.jja.png") + system("mv "+OUTDIR+"psa2.timeseries.000004.png "+OUTDIR+"psa2.timeseries.son.png") + system("mv "+OUTDIR+"psa2.timeseries.000005.png "+OUTDIR+"psa2.timeseries.ann.png") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"psa2.timeseries.000006.png "+OUTDIR+"psa2.timeseries.mon.png") + end if + else + system("psplit "+OUTDIR+"sam.ps "+OUTDIR+"psl_sp") + system("mv "+OUTDIR+"psl_sp0001.ps "+OUTDIR+"sam.djf.ps") + system("mv "+OUTDIR+"psl_sp0002.ps "+OUTDIR+"sam.mam.ps") + system("mv "+OUTDIR+"psl_sp0003.ps "+OUTDIR+"sam.jja.ps") + system("mv "+OUTDIR+"psl_sp0004.ps "+OUTDIR+"sam.son.ps") + system("mv "+OUTDIR+"psl_sp0005.ps "+OUTDIR+"sam.ann.ps") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"psl_sp0006.ps "+OUTDIR+"sam.mon.ps") + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"psl_sp0007.ps "+OUTDIR+"sam.tempreg.djf.ps") + system("mv "+OUTDIR+"psl_sp0008.ps "+OUTDIR+"sam.tempreg.mam.ps") + system("mv "+OUTDIR+"psl_sp0009.ps "+OUTDIR+"sam.tempreg.jja.ps") + system("mv "+OUTDIR+"psl_sp0010.ps "+OUTDIR+"sam.tempreg.son.ps") + system("mv "+OUTDIR+"psl_sp0011.ps "+OUTDIR+"sam.tempreg.ann.ps") + system("mv "+OUTDIR+"psl_sp0012.ps "+OUTDIR+"sam.tempreg.mon.ps") + end if + else + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"psl_sp0006.ps "+OUTDIR+"sam.tempreg.djf.ps") + system("mv "+OUTDIR+"psl_sp0007.ps "+OUTDIR+"sam.tempreg.mam.ps") + system("mv "+OUTDIR+"psl_sp0008.ps "+OUTDIR+"sam.tempreg.jja.ps") + system("mv "+OUTDIR+"psl_sp0009.ps "+OUTDIR+"sam.tempreg.son.ps") + system("mv "+OUTDIR+"psl_sp0010.ps "+OUTDIR+"sam.tempreg.ann.ps") + end if + end if + + if (prreg_frame.eq.0) then + system("psplit "+OUTDIR+"sam.prreg.ps "+OUTDIR+"pr_nn") + system("mv "+OUTDIR+"pr_nn0001.ps "+OUTDIR+"sam.prreg.djf.ps") + system("mv "+OUTDIR+"pr_nn0002.ps "+OUTDIR+"sam.prreg.mam.ps") + system("mv "+OUTDIR+"pr_nn0003.ps "+OUTDIR+"sam.prreg.jja.ps") + system("mv "+OUTDIR+"pr_nn0004.ps "+OUTDIR+"sam.prreg.son.ps") + system("mv "+OUTDIR+"pr_nn0005.ps "+OUTDIR+"sam.prreg.ann.ps") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"pr_nn0006.ps "+OUTDIR+"sam.prreg.mon.ps") + end if + end if + + system("psplit "+OUTDIR+"psa1.ps "+OUTDIR+"psl_sp") + system("mv "+OUTDIR+"psl_sp0001.ps "+OUTDIR+"psa1.djf.ps") + system("mv "+OUTDIR+"psl_sp0002.ps "+OUTDIR+"psa1.mam.ps") + system("mv "+OUTDIR+"psl_sp0003.ps "+OUTDIR+"psa1.jja.ps") + system("mv "+OUTDIR+"psl_sp0004.ps "+OUTDIR+"psa1.son.ps") + system("mv "+OUTDIR+"psl_sp0005.ps "+OUTDIR+"psa1.ann.ps") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"psl_sp0006.ps "+OUTDIR+"psa1.mon.ps") + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"psl_sp0007.ps "+OUTDIR+"psa1.tempreg.djf.ps") + system("mv "+OUTDIR+"psl_sp0008.ps "+OUTDIR+"psa1.tempreg.mam.ps") + system("mv "+OUTDIR+"psl_sp0009.ps "+OUTDIR+"psa1.tempreg.jja.ps") + system("mv "+OUTDIR+"psl_sp0010.ps "+OUTDIR+"psa1.tempreg.son.ps") + system("mv "+OUTDIR+"psl_sp0011.ps "+OUTDIR+"psa1.tempreg.ann.ps") + system("mv "+OUTDIR+"psl_sp0012.ps "+OUTDIR+"psa1.tempreg.mon.ps") + end if + else + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"psl_sp0006.ps "+OUTDIR+"psa1.tempreg.djf.ps") + system("mv "+OUTDIR+"psl_sp0007.ps "+OUTDIR+"psa1.tempreg.mam.ps") + system("mv "+OUTDIR+"psl_sp0008.ps "+OUTDIR+"psa1.tempreg.jja.ps") + system("mv "+OUTDIR+"psl_sp0009.ps "+OUTDIR+"psa1.tempreg.son.ps") + system("mv "+OUTDIR+"psl_sp0010.ps "+OUTDIR+"psa1.tempreg.ann.ps") + end if + end if + + if (prreg_frame.eq.0) then + system("psplit "+OUTDIR+"psa1.prreg.ps "+OUTDIR+"pr_nn") + system("mv "+OUTDIR+"pr_nn0001.ps "+OUTDIR+"psa1.prreg.djf.ps") + system("mv "+OUTDIR+"pr_nn0002.ps "+OUTDIR+"psa1.prreg.mam.ps") + system("mv "+OUTDIR+"pr_nn0003.ps "+OUTDIR+"psa1.prreg.jja.ps") + system("mv "+OUTDIR+"pr_nn0004.ps "+OUTDIR+"psa1.prreg.son.ps") + system("mv "+OUTDIR+"pr_nn0005.ps "+OUTDIR+"psa1.prreg.ann.ps") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"pr_nn0006.ps "+OUTDIR+"psa1.prreg.mon.ps") + end if + end if + + system("psplit "+OUTDIR+"psa2.ps "+OUTDIR+"psl_sp") + system("mv "+OUTDIR+"psl_sp0001.ps "+OUTDIR+"psa2.djf.ps") + system("mv "+OUTDIR+"psl_sp0002.ps "+OUTDIR+"psa2.mam.ps") + system("mv "+OUTDIR+"psl_sp0003.ps "+OUTDIR+"psa2.jja.ps") + system("mv "+OUTDIR+"psl_sp0004.ps "+OUTDIR+"psa2.son.ps") + system("mv "+OUTDIR+"psl_sp0005.ps "+OUTDIR+"psa2.ann.ps") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"psl_sp0006.ps "+OUTDIR+"psa2.mon.ps") + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"psl_sp0007.ps "+OUTDIR+"psa2.tempreg.djf.ps") + system("mv "+OUTDIR+"psl_sp0008.ps "+OUTDIR+"psa2.tempreg.mam.ps") + system("mv "+OUTDIR+"psl_sp0009.ps "+OUTDIR+"psa2.tempreg.jja.ps") + system("mv "+OUTDIR+"psl_sp0010.ps "+OUTDIR+"psa2.tempreg.son.ps") + system("mv "+OUTDIR+"psl_sp0011.ps "+OUTDIR+"psa2.tempreg.ann.ps") + system("mv "+OUTDIR+"psl_sp0012.ps "+OUTDIR+"psa2.tempreg.mon.ps") + end if + else + if (sstreg_frame.eq.0) then + system("mv "+OUTDIR+"psl_sp0006.ps "+OUTDIR+"psa2.tempreg.djf.ps") + system("mv "+OUTDIR+"psl_sp0007.ps "+OUTDIR+"psa2.tempreg.mam.ps") + system("mv "+OUTDIR+"psl_sp0008.ps "+OUTDIR+"psa2.tempreg.jja.ps") + system("mv "+OUTDIR+"psl_sp0009.ps "+OUTDIR+"psa2.tempreg.son.ps") + system("mv "+OUTDIR+"psl_sp0010.ps "+OUTDIR+"psa2.tempreg.ann.ps") + end if + end if + + if (prreg_frame.eq.0) then + system("psplit "+OUTDIR+"psa2.prreg.ps "+OUTDIR+"pr_nn") + system("mv "+OUTDIR+"pr_nn0001.ps "+OUTDIR+"psa2.prreg.djf.ps") + system("mv "+OUTDIR+"pr_nn0002.ps "+OUTDIR+"psa2.prreg.mam.ps") + system("mv "+OUTDIR+"pr_nn0003.ps "+OUTDIR+"psa2.prreg.jja.ps") + system("mv "+OUTDIR+"pr_nn0004.ps "+OUTDIR+"psa2.prreg.son.ps") + system("mv "+OUTDIR+"pr_nn0005.ps "+OUTDIR+"psa2.prreg.ann.ps") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"pr_nn0006.ps "+OUTDIR+"psa2.prreg.mon.ps") + end if + system("rm "+OUTDIR+"sam.prreg.ps "+OUTDIR+"psa1.prreg.ps "+OUTDIR+"psa2.prreg.ps") + end if + + system("psplit "+OUTDIR+"sam.timeseries.ps "+OUTDIR+"psl_sp") + system("mv "+OUTDIR+"psl_sp0001.ps "+OUTDIR+"sam.timeseries.djf.ps") + system("mv "+OUTDIR+"psl_sp0002.ps "+OUTDIR+"sam.timeseries.mam.ps") + system("mv "+OUTDIR+"psl_sp0003.ps "+OUTDIR+"sam.timeseries.jja.ps") + system("mv "+OUTDIR+"psl_sp0004.ps "+OUTDIR+"sam.timeseries.son.ps") + system("mv "+OUTDIR+"psl_sp0005.ps "+OUTDIR+"sam.timeseries.ann.ps") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"psl_sp0006.ps "+OUTDIR+"sam.timeseries.mon.ps") + end if + + system("psplit "+OUTDIR+"psa1.timeseries.ps "+OUTDIR+"psl_sp") + system("mv "+OUTDIR+"psl_sp0001.ps "+OUTDIR+"psa1.timeseries.djf.ps") + system("mv "+OUTDIR+"psl_sp0002.ps "+OUTDIR+"psa1.timeseries.mam.ps") + system("mv "+OUTDIR+"psl_sp0003.ps "+OUTDIR+"psa1.timeseries.jja.ps") + system("mv "+OUTDIR+"psl_sp0004.ps "+OUTDIR+"psa1.timeseries.son.ps") + system("mv "+OUTDIR+"psl_sp0005.ps "+OUTDIR+"psa1.timeseries.ann.ps") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"psl_sp0006.ps "+OUTDIR+"psa1.timeseries.mon.ps") + end if + + system("psplit "+OUTDIR+"psa2.timeseries.ps "+OUTDIR+"psl_sp") + system("mv "+OUTDIR+"psl_sp0001.ps "+OUTDIR+"psa2.timeseries.djf.ps") + system("mv "+OUTDIR+"psl_sp0002.ps "+OUTDIR+"psa2.timeseries.mam.ps") + system("mv "+OUTDIR+"psl_sp0003.ps "+OUTDIR+"psa2.timeseries.jja.ps") + system("mv "+OUTDIR+"psl_sp0004.ps "+OUTDIR+"psa2.timeseries.son.ps") + system("mv "+OUTDIR+"psl_sp0005.ps "+OUTDIR+"psa2.timeseries.ann.ps") + if (COMPUTE_MODES_MON.eq."True") then + system("mv "+OUTDIR+"psl_sp0006.ps "+OUTDIR+"psa2.timeseries.mon.ps") + end if + system("rm "+OUTDIR+"psa2.timeseries.ps "+OUTDIR+"psa1.timeseries.ps "+OUTDIR+"sam.timeseries.ps "+OUTDIR+"psa2.ps "+OUTDIR+"psa1.ps "+OUTDIR+"sam.ps") + end if + print("Finished: psl.sam_psa.ncl") +end diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/psl.trends.ncl b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/psl.trends.ncl new file mode 100644 index 0000000000..7a412ce55f --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/psl.trends.ncl @@ -0,0 +1,271 @@ +; Calculates PSL global trends +; +; Variables used: psl +; +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/shea_util.ncl" +load "$CVDP_SCRIPTS/functions.ncl" + +begin + print("Starting: psl.trends.ncl") + + SCALE_TIMESERIES = getenv("SCALE_TIMESERIES") + OUTPUT_DATA = getenv("OUTPUT_DATA") + PNG_SCALE = tofloat(getenv("PNG_SCALE")) + OPT_CLIMO = getenv("OPT_CLIMO") + CLIMO_SYEAR = toint(getenv("CLIMO_SYEAR")) + CLIMO_EYEAR = toint(getenv("CLIMO_EYEAR")) + OUTPUT_TYPE = getenv("OUTPUT_TYPE") + COLORMAP = getenv("COLORMAP") + + nsim = numAsciiRow("namelist_byvar/namelist_psl") + na = asciiread("namelist_byvar/namelist_psl",(/nsim/),"string") + names = new(nsim,"string") + paths = new(nsim,"string") + syear = new(nsim,"integer",-999) + eyear = new(nsim,"integer",-999) + delim = "|" + + do gg = 0,nsim-1 + names(gg) = str_strip(str_get_field(na(gg),1,delim)) + paths(gg) = str_strip(str_get_field(na(gg),2,delim)) + syear(gg) = stringtointeger(str_strip(str_get_field(na(gg),3,delim))) + eyear(gg) = stringtointeger(str_strip(str_get_field(na(gg),4,delim))) + end do + nyr = eyear-syear+1 + nyr_max = max(nyr) + + pi=4.*atan(1.0) + rad=(pi/180.) + + wks_type = OUTPUT_TYPE + if (wks_type.eq."png") then + wks_type@wkWidth = 1500*PNG_SCALE + wks_type@wkHeight = 1500*PNG_SCALE + end if + wks_trends_djf = gsn_open_wks(wks_type,getenv("OUTDIR")+"psl.trends.djf") + wks_trends_mam = gsn_open_wks(wks_type,getenv("OUTDIR")+"psl.trends.mam") + wks_trends_jja = gsn_open_wks(wks_type,getenv("OUTDIR")+"psl.trends.jja") + wks_trends_son = gsn_open_wks(wks_type,getenv("OUTDIR")+"psl.trends.son") + wks_trends_ann = gsn_open_wks(wks_type,getenv("OUTDIR")+"psl.trends.ann") + wks_trends_mon = gsn_open_wks(wks_type,getenv("OUTDIR")+"psl.trends.mon") + + if (COLORMAP.eq.0) then + gsn_define_colormap(wks_trends_djf,"ncl_default") + gsn_define_colormap(wks_trends_mam,"ncl_default") + gsn_define_colormap(wks_trends_jja,"ncl_default") + gsn_define_colormap(wks_trends_son,"ncl_default") + gsn_define_colormap(wks_trends_ann,"ncl_default") + gsn_define_colormap(wks_trends_mon,"ncl_default") + end if + if (COLORMAP.eq.1) then + gsn_define_colormap(wks_trends_djf,"BlueDarkRed18") + gsn_define_colormap(wks_trends_mam,"BlueDarkRed18") + gsn_define_colormap(wks_trends_jja,"BlueDarkRed18") + gsn_define_colormap(wks_trends_son,"BlueDarkRed18") + gsn_define_colormap(wks_trends_ann,"BlueDarkRed18") + gsn_define_colormap(wks_trends_mon,"BlueDarkRed18") + end if + + map_djf = new(nsim,"graphic") + map_mam = new(nsim,"graphic") + map_jja = new(nsim,"graphic") + map_son = new(nsim,"graphic") + map_ann = new(nsim,"graphic") + map_mon = new(nsim,"graphic") + + do ee = 0,nsim-1 + psl = data_read_in(paths(ee),"PSL",syear(ee),eyear(ee)) ; read in data, orient lats/lons correctly, set time coordinate variable up + if (isatt(psl,"is_all_missing")) then + delete(psl) + continue + end if + if (OPT_CLIMO.eq."Full") then + psl = rmMonAnnCycTLL(psl) + else + check_custom_climo(names(ee),syear(ee),eyear(ee),CLIMO_SYEAR,CLIMO_EYEAR) + temp_arr = psl + delete(temp_arr&time) + temp_arr&time = cd_calendar(psl&time,-1) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(temp_arr({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(temp_arr({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + delete(temp_arr) + psl = calcMonAnomTLL(psl,climo) + delete(climo) + end if + + tttt = dtrend_msg_n(ispan(0,dimsizes(psl&time)-1,1),psl,False,True,0) + psl_trends_mon = psl(0,:,:) + psl_trends_mon = (/ onedtond(tttt@slope, (/dimsizes(psl&lat),dimsizes(psl&lon)/) ) /) + psl_trends_mon = psl_trends_mon*dimsizes(psl&time) + psl_trends_mon@units = psl@units+" "+nyr(ee)+"yr~S~-1~N~" + delete(tttt) + + psl_seas = runave_n_Wrap(psl,3,0,0) + psl_seas(0,:,:) = (/ dim_avg_n(psl(:1,:,:),0) /) + psl_seas(dimsizes(psl&time)-1,:,:) = (/ dim_avg_n(psl(dimsizes(psl&time)-2:,:,:),0) /) + psl_ann = runave_n_Wrap(psl,12,0,0) + delete(psl) + + psl_trends_seas = psl_seas(:3,:,:) + psl_trends_seas = psl_trends_seas@_FillValue + psl_trends_ann = psl_trends_seas(0,:,:) + do ff = 0,4 + if (ff.le.3) then + tarr = psl_seas(ff*3::12,:,:) + end if + if (ff.eq.4) then + tarr = psl_ann(5::12,:,:) + end if + tttt = dtrend_msg_n(ispan(0,dimsizes(tarr&time)-1,1),tarr,False,True,0) + if (ff.le.3) then + psl_trends_seas(ff,:,:) = (/ onedtond(tttt@slope, (/dimsizes(tarr&lat),dimsizes(tarr&lon)/) ) /) + end if + if (ff.eq.4) then + psl_trends_ann = (/ onedtond(tttt@slope, (/dimsizes(tarr&lat),dimsizes(tarr&lon)/) ) /) + end if + delete([/tarr,tttt/]) + end do + psl_trends_seas = psl_trends_seas*nyr(ee) + psl_trends_seas@units = psl_seas@units+" "+nyr(ee)+"yr~S~-1~N~" + psl_trends_ann = psl_trends_ann*nyr(ee) + psl_trends_ann@units = psl_ann@units+" "+nyr(ee)+"yr~S~-1~N~" + delete([/psl_seas,psl_ann/]) + + + if (OUTPUT_DATA.eq."True") then + modname = str_sub_str(names(ee)," ","_") + bc = (/"/","'","(",")"/) + do gg = 0,dimsizes(bc)-1 + modname = str_sub_str(modname,bc(gg),"_") + end do + fn = getenv("OUTDIR")+modname+".cvdp_data.psl.trends."+syear(ee)+"-"+eyear(ee)+".nc" + if (.not.isfilepresent2(fn)) then + z = addfile(fn,"c") + z@source = "NCAR Climate Analysis Section's Climate Variability Diagnostics Package v"+getenv("VERSION") + z@notes = "Data from "+names(ee)+" from "+syear(ee)+"-"+eyear(ee) + if (OPT_CLIMO.eq."Full") then + z@climatology = syear(ee)+"-"+eyear(ee)+" climatology removed prior to all calculations (other than means)" + else + if (CLIMO_SYEAR.lt.0) then + z@climatology = (eyear(ee)+CLIMO_SYEAR)+"-"+(eyear(ee)+CLIMO_EYEAR)+" climatology removed prior to all calculations (other than means)" + else + z@climatology = CLIMO_SYEAR+"-"+CLIMO_EYEAR+" climatology removed prior to all calculations (other than means)" + end if + end if + z@Conventions = "CF-1.6" + else + z = addfile(fn,"w") + end if + z->psl_trends_djf = set_varAtts(psl_trends_seas(0,:,:),"psl linear trends (DJF)","","") + z->psl_trends_mam = set_varAtts(psl_trends_seas(1,:,:),"psl linear trends (MAM)","","") + z->psl_trends_jja = set_varAtts(psl_trends_seas(2,:,:),"psl linear trends (JJA)","","") + z->psl_trends_son = set_varAtts(psl_trends_seas(3,:,:),"psl linear trends (SON)","","") + z->psl_trends_ann = set_varAtts(psl_trends_ann,"psl linear trends (annual)","","") + z->psl_trends_mon = set_varAtts(psl_trends_mon,"psl linear trends (monthly)","","") + delete(z) + delete([/modname,fn/]) + end if + +;======================================================================== + res = True + res@mpProjection = "WinkelTripel" + res@mpGeophysicalLineColor = "gray42" + res@mpGeophysicalLineThicknessF = 2. + res@mpPerimOn = False + res@mpGridLatSpacingF = 90 ; change latitude line spacing + res@mpGridLonSpacingF = 180. ; change longitude line spacing + res@mpGridLineColor = "transparent" ; trick ncl into drawing perimeter + res@mpGridAndLimbOn = True ; turn on lat/lon lines + res@mpFillOn = False + res@mpCenterLonF = 210. + res@mpOutlineOn = True + res@gsnDraw = False + res@gsnFrame = False + + res@cnLevelSelectionMode = "ExplicitLevels" + res@cnLevels = ispan(-8,8,1) + res@cnLineLabelsOn = False + res@cnFillOn = True + res@cnLinesOn = False + res@lbLabelBarOn = False + + res@gsnLeftStringOrthogonalPosF = -0.05 + res@gsnLeftStringParallelPosF = .005 + res@gsnRightStringOrthogonalPosF = -0.05 + res@gsnRightStringParallelPosF = 0.96 + res@gsnRightString = "" + res@gsnLeftString = "" + res@gsnLeftStringFontHeightF = 0.014 + res@gsnCenterStringFontHeightF = 0.018 + res@gsnRightStringFontHeightF = 0.014 + res@gsnLeftString = syear(ee)+"-"+eyear(ee) + + res@gsnRightString = psl_trends_seas@units + res@gsnCenterString = names(ee) + map_djf(ee) = gsn_csm_contour_map(wks_trends_djf,psl_trends_seas(0,:,:),res) + map_mam(ee) = gsn_csm_contour_map(wks_trends_mam,psl_trends_seas(1,:,:),res) + map_jja(ee) = gsn_csm_contour_map(wks_trends_jja,psl_trends_seas(2,:,:),res) + map_son(ee) = gsn_csm_contour_map(wks_trends_son,psl_trends_seas(3,:,:),res) + map_ann(ee) = gsn_csm_contour_map(wks_trends_ann,psl_trends_ann,res) + map_mon(ee) = gsn_csm_contour_map(wks_trends_mon,psl_trends_mon,res) + + delete([/psl_trends_seas,psl_trends_ann,psl_trends_mon,res/]) + end do + panres = True + panres@gsnMaximize = True + panres@gsnPaperOrientation = "portrait" + panres@gsnPanelLabelBar = True + panres@gsnPanelYWhiteSpacePercent = 3.0 + panres@pmLabelBarHeightF = 0.05 + panres@pmLabelBarWidthF = 0.65 + panres@lbTitleOn = False + panres@lbBoxLineColor = "gray70" + panres@lbLabelFontHeightF = 0.013 + if (nsim.le.4) then + if (nsim.eq.1) then + panres@txFontHeightF = 0.022 + panres@gsnPanelBottom = 0.50 + else + panres@txFontHeightF = 0.0145 + panres@gsnPanelBottom = 0.50 + end if + else + panres@txFontHeightF = 0.016 + panres@gsnPanelBottom = 0.05 + end if + panres@lbLabelStride = 1 + + panres@txString = "PSL Trends (DJF)" + ncol = floattointeger(sqrt(nsim)) + nrow = (nsim/ncol)+mod(nsim,ncol) + gsn_panel2(wks_trends_djf,map_djf,(/nrow,ncol/),panres) + delete(wks_trends_djf) + + panres@txString = "PSL Trends (MAM)" + gsn_panel2(wks_trends_mam,map_mam,(/nrow,ncol/),panres) + delete(wks_trends_mam) + + panres@txString = "PSL Trends (JJA)" + gsn_panel2(wks_trends_jja,map_jja,(/nrow,ncol/),panres) + delete(wks_trends_jja) + + panres@txString = "PSL Trends (SON)" + gsn_panel2(wks_trends_son,map_son,(/nrow,ncol/),panres) + delete(wks_trends_son) + + panres@txString = "PSL Trends (Annual)" + gsn_panel2(wks_trends_ann,map_ann,(/nrow,ncol/),panres) + delete(wks_trends_ann) + + panres@txString = "PSL Trends (Monthly)" + gsn_panel2(wks_trends_mon,map_mon,(/nrow,ncol/),panres) + delete(wks_trends_mon) + delete([/nrow,ncol,map_djf,map_mam,map_jja,map_son,map_ann,map_mon,panres/]) + print("Finished: psl.trends.ncl") +end diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/snd.mean_stddev.ncl b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/snd.mean_stddev.ncl new file mode 100644 index 0000000000..c22c8abdc0 --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/snd.mean_stddev.ncl @@ -0,0 +1,366 @@ +; Calculates 2m air temperature global means and standard deviations +; +; Variables used: snd +; +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/shea_util.ncl" +load "$CVDP_SCRIPTS/functions.ncl" + +begin + print("Starting: snd.mean_stddev.ncl") + + SCALE_TIMESERIES = getenv("SCALE_TIMESERIES") + OUTPUT_DATA = getenv("OUTPUT_DATA") + PNG_SCALE = tofloat(getenv("PNG_SCALE")) + OPT_CLIMO = getenv("OPT_CLIMO") + CLIMO_SYEAR = toint(getenv("CLIMO_SYEAR")) + CLIMO_EYEAR = toint(getenv("CLIMO_EYEAR")) + OUTPUT_TYPE = getenv("OUTPUT_TYPE") + COLORMAP = getenv("COLORMAP") + + nsim = numAsciiRow("namelist_byvar/namelist_snowdp") + na = asciiread("namelist_byvar/namelist_snowdp",(/nsim/),"string") + names = new(nsim,"string") + paths = new(nsim,"string") + syear = new(nsim,"integer",-999) + eyear = new(nsim,"integer",-999) + delim = "|" + + do gg = 0,nsim-1 + names(gg) = str_strip(str_get_field(na(gg),1,delim)) + paths(gg) = str_strip(str_get_field(na(gg),2,delim)) + syear(gg) = stringtointeger(str_strip(str_get_field(na(gg),3,delim))) + eyear(gg) = stringtointeger(str_strip(str_get_field(na(gg),4,delim))) + end do + nyr = eyear-syear+1 + nyr_max = max(nyr) + + wks_type = OUTPUT_TYPE + if (wks_type.eq."png") then + wks_type@wkWidth = 1500*PNG_SCALE + wks_type@wkHeight = 1500*PNG_SCALE + end if + wks_stddev_djf = gsn_open_wks(wks_type,getenv("OUTDIR")+"snd.stddev.djf") + wks_stddev_mam = gsn_open_wks(wks_type,getenv("OUTDIR")+"snd.stddev.mam") + wks_stddev_jja = gsn_open_wks(wks_type,getenv("OUTDIR")+"snd.stddev.jja") + wks_stddev_son = gsn_open_wks(wks_type,getenv("OUTDIR")+"snd.stddev.son") + wks_stddev_ann = gsn_open_wks(wks_type,getenv("OUTDIR")+"snd.stddev.ann") + wks_mean_djf = gsn_open_wks(wks_type,getenv("OUTDIR")+"snd.mean.djf") + wks_mean_mam = gsn_open_wks(wks_type,getenv("OUTDIR")+"snd.mean.mam") + wks_mean_jja = gsn_open_wks(wks_type,getenv("OUTDIR")+"snd.mean.jja") + wks_mean_son = gsn_open_wks(wks_type,getenv("OUTDIR")+"snd.mean.son") + wks_mean_ann = gsn_open_wks(wks_type,getenv("OUTDIR")+"snd.mean.ann") + + if (COLORMAP.eq.0) then + gsn_define_colormap(wks_stddev_djf,"rainbow+white") + gsn_define_colormap(wks_stddev_mam,"rainbow+white") + gsn_define_colormap(wks_stddev_jja,"rainbow+white") + gsn_define_colormap(wks_stddev_son,"rainbow+white") + gsn_define_colormap(wks_stddev_ann,"rainbow+white") + gsn_define_colormap(wks_mean_djf,"ncl_default") + gsn_define_colormap(wks_mean_mam,"ncl_default") + gsn_define_colormap(wks_mean_jja,"ncl_default") + gsn_define_colormap(wks_mean_son,"ncl_default") + gsn_define_colormap(wks_mean_ann,"ncl_default") + end if + if (COLORMAP.eq.1) then + gsn_define_colormap(wks_stddev_djf,"cb_rainbow") + gsn_define_colormap(wks_stddev_mam,"cb_rainbow") + gsn_define_colormap(wks_stddev_jja,"cb_rainbow") + gsn_define_colormap(wks_stddev_son,"cb_rainbow") + gsn_define_colormap(wks_stddev_ann,"cb_rainbow") + gsn_define_colormap(wks_mean_djf,"BlueDarkRed18") + gsn_define_colormap(wks_mean_mam,"BlueDarkRed18") + gsn_define_colormap(wks_mean_jja,"BlueDarkRed18") + gsn_define_colormap(wks_mean_son,"BlueDarkRed18") + gsn_define_colormap(wks_mean_ann,"BlueDarkRed18") + end if + + plot_mean_djf = new(nsim,"graphic") + plot_mean_mam = new(nsim,"graphic") + plot_mean_jja = new(nsim,"graphic") + plot_mean_son = new(nsim,"graphic") + plot_mean_ann = new(nsim,"graphic") + plot_stddev_djf = new(nsim,"graphic") + plot_stddev_mam = new(nsim,"graphic") + plot_stddev_jja = new(nsim,"graphic") + plot_stddev_son = new(nsim,"graphic") + plot_stddev_ann = new(nsim,"graphic") + do ee = 0,nsim-1 + snd = data_read_in(paths(ee),"SNOWDP",syear(ee),eyear(ee)) ; read in data, orient lats/lons correctly, set time coordinate variable up + if (isatt(snd&lat,"_FillValue")) then ; required in v6.2.0-beta to reset _FillValue to avoid error message + snd&lat@_FillValue = 1.e20 + snd&lat@missing_value = snd&lat@_FillValue + end if + if (isatt(snd&lon,"_FillValue")) then + snd&lon@_FillValue = 1.e20 + snd&lon@missing_value = snd&lon@_FillValue + end if + + if (isatt(snd,"is_all_missing")) then + delete(snd) + continue + end if + do ff = 0,1 + sndT = snd + if (ff.eq.1) then + if (OPT_CLIMO.eq."Full") then + sndT = rmMonAnnCycTLL(sndT) + else + check_custom_climo(names(ee),syear(ee),eyear(ee),CLIMO_SYEAR,CLIMO_EYEAR) + temp_arr = sndT + delete(temp_arr&time) + temp_arr&time = cd_calendar(sndT&time,-1) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(temp_arr({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(temp_arr({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + delete(temp_arr) + sndT = calcMonAnomTLL(sndT,climo) + delete(climo) + end if + end if + snd_seas = runave_n_Wrap(sndT,3,0,0) + snd_seas(0,:,:) = (/ dim_avg_n(sndT(:1,:,:),0) /) + snd_seas(dimsizes(sndT&time)-1,:,:) = (/ dim_avg_n(sndT(dimsizes(sndT&time)-2:,:,:),0) /) + snd_ann = runave_n_Wrap(sndT,12,0,0) + delete(sndT) + + if (ff.eq.0) then + snd_mean_djf = dim_avg_n_Wrap(snd_seas(0::12,:,:),0) + snd_mean_mam = dim_avg_n_Wrap(snd_seas(3::12,:,:),0) + snd_mean_jja = dim_avg_n_Wrap(snd_seas(6::12,:,:),0) + snd_mean_son = dim_avg_n_Wrap(snd_seas(9::12,:,:),0) + snd_mean_ann = dim_avg_n_Wrap(snd_ann(5::12,:,:),0) + end if + if (ff.eq.1) then + snd_sd_djf = dim_stddev_n_Wrap(dtrend_msg_n(ispan(0,nyr(ee)-1,1),snd_seas(0::12,:,:),False,False,0),0) + snd_sd_mam = dim_stddev_n_Wrap(dtrend_msg_n(ispan(0,nyr(ee)-1,1),snd_seas(3::12,:,:),False,False,0),0) + snd_sd_jja = dim_stddev_n_Wrap(dtrend_msg_n(ispan(0,nyr(ee)-1,1),snd_seas(6::12,:,:),False,False,0),0) + snd_sd_son = dim_stddev_n_Wrap(dtrend_msg_n(ispan(0,nyr(ee)-1,1),snd_seas(9::12,:,:),False,False,0),0) + snd_sd_ann = dim_stddev_n_Wrap(dtrend_msg_n(ispan(0,nyr(ee)-1,1),snd_ann(5::12,:,:),False,False,0),0) + end if + delete([/snd_seas,snd_ann/]) + end do + delete(snd) + copy_VarMeta(snd_mean_djf,snd_sd_djf) + copy_VarMeta(snd_mean_mam,snd_sd_mam) + copy_VarMeta(snd_mean_jja,snd_sd_jja) + copy_VarMeta(snd_mean_son,snd_sd_son) + copy_VarMeta(snd_mean_ann,snd_sd_ann) + + if (OUTPUT_DATA.eq."True") then + modname = str_sub_str(names(ee)," ","_") + bc = (/"/","'","(",")"/) + do gg = 0,dimsizes(bc)-1 + modname = str_sub_str(modname,bc(gg),"_") + end do + fn = getenv("OUTDIR")+modname+".cvdp_data.snd.mean_stddev."+syear(ee)+"-"+eyear(ee)+".nc" + if (.not.isfilepresent2(fn)) then + z = addfile(fn,"c") + z@source = "NCAR Climate Analysis Section's Climate Variability Diagnostics Package v"+getenv("VERSION") + z@notes = "Data from "+names(ee)+" from "+syear(ee)+"-"+eyear(ee) + if (OPT_CLIMO.eq."Full") then + z@climatology = syear(ee)+"-"+eyear(ee)+" climatology removed prior to all calculations (other than means)" + else + if (CLIMO_SYEAR.lt.0) then + z@climatology = (eyear(ee)+CLIMO_SYEAR)+"-"+(eyear(ee)+CLIMO_EYEAR)+" climatology removed prior to all calculations (other than means)" + else + z@climatology = CLIMO_SYEAR+"-"+CLIMO_EYEAR+" climatology removed prior to all calculations (other than means)" + end if + end if + z@Conventions = "CF-1.6" + else + z = addfile(fn,"w") + end if + + mean_djf = (/ snd_mean_djf /) + mean_djf!0 = "LAT" + mean_djf&LAT = snd_mean_djf&lat + mean_djf!1 = "LON" + mean_djf&LON = snd_mean_djf&lon + copy_VarAtts(snd_mean_djf,mean_djf) + mean_mam = (/ snd_mean_mam /) + copy_VarMeta(mean_djf,mean_mam) + mean_jja = (/ snd_mean_jja /) + copy_VarMeta(mean_djf,mean_jja) + mean_son = (/ snd_mean_son /) + copy_VarMeta(mean_djf,mean_son) + mean_ann = (/ snd_mean_ann /) + copy_VarMeta(mean_djf,mean_ann) + + sd_djf = (/ snd_sd_djf /) + sd_djf!0 = "LAT" + sd_djf&LAT = snd_sd_djf&lat + sd_djf!1 = "LON" + sd_djf&LON = snd_sd_djf&lon + copy_VarAtts(snd_sd_djf,sd_djf) + sd_mam = (/ snd_sd_mam /) + copy_VarMeta(sd_djf,sd_mam) + sd_jja = (/ snd_sd_jja /) + copy_VarMeta(sd_djf,sd_jja) + sd_son = (/ snd_sd_son /) + copy_VarMeta(sd_djf,sd_son) + sd_ann = (/ snd_sd_ann /) + copy_VarMeta(sd_djf,sd_ann) + + z->snd_spatialmean_djf = set_varAtts(mean_djf,"snd mean (DJF)","","") + z->snd_spatialmean_mam = set_varAtts(mean_mam,"snd mean (MAM)","","") + z->snd_spatialmean_jja = set_varAtts(mean_jja,"snd mean (JJA)","","") + z->snd_spatialmean_son = set_varAtts(mean_son,"snd mean (SON)","","") + z->snd_spatialmean_ann = set_varAtts(mean_ann,"snd mean (annual)","","") + + z->snd_spatialstddev_djf = set_varAtts(sd_djf,"snd standard deviation (DJF)","","") + z->snd_spatialstddev_mam = set_varAtts(sd_mam,"snd standard deviation (MAM)","","") + z->snd_spatialstddev_jja = set_varAtts(sd_jja,"snd standard deviation (JJA)","","") + z->snd_spatialstddev_son = set_varAtts(sd_son,"snd standard deviation (SON)","","") + z->snd_spatialstddev_ann = set_varAtts(sd_ann,"snd standard deviation (annual)","","") + delete(z) + delete(modname) + delete([/mean_djf,mean_mam,mean_jja,mean_son,mean_ann,sd_djf,sd_mam,sd_jja,sd_son,sd_ann/]) + end if +;========================================================================================== + res = True + res@mpProjection = "WinkelTripel" + res@mpGeophysicalLineColor = "gray42" + + res@mpPerimOn = False + res@mpGridLatSpacingF = 90 ; change latitude line spacing + res@mpGridLonSpacingF = 180. ; change longitude line spacing + res@mpGridLineColor = "transparent" ; trick ncl into drawing perimeter + res@mpGridAndLimbOn = True ; turn on lat/lon lines + res@mpFillOn = False + res@mpCenterLonF = 210. + res@mpOutlineOn = True + if (wks_type.eq."png") then + res@mpGeophysicalLineThicknessF = 2. + else + res@mpGeophysicalLineThicknessF = 1. + end if + res@gsnDraw = False + res@gsnFrame = False + + res@cnLineLabelsOn = False + res@cnFillOn = True + res@cnLinesOn = False + res@lbLabelBarOn = False + res@cnFillMode = "RasterFill" + res@cnLevelSelectionMode = "ExplicitLevels" + + res@gsnLeftStringOrthogonalPosF = -0.05 + res@gsnLeftStringParallelPosF = .005 + res@gsnRightStringOrthogonalPosF = -0.05 + res@gsnRightStringParallelPosF = 0.96 + res@gsnRightString = "" + res@gsnLeftString = "" + res@gsnLeftStringFontHeightF = 0.014 + res@gsnCenterStringFontHeightF = 0.018 + res@gsnRightStringFontHeightF = 0.014 + + sres = res + + res@cnLevels = fspan(.05,.45,9) + if (COLORMAP.eq.0) then + res@cnFillColors = (/0,54,80,95,125,175,185,195,205,236/) + sres@cnLevels = fspan(0.05,1.5,30) + sres@cnFillColors = ispan(8,248,8) + sres@cnFillColors(0) = 0 + end if + if (COLORMAP.eq.1) then + res@cnFillColors = (/0,35,47,63,79,95,111,124,155,175/) + sres@cnLevels = fspan(0.05,1.45,15) + sres@cnFillColors = (/0,4,5,6,7,8,9,10, 11,12,13,14,15,16,17,18/) + end if + + res@gsnLeftString = syear(ee)+"-"+eyear(ee) + res@gsnRightString = snd_sd_djf@units + res@gsnCenterString = names(ee) + + plot_stddev_djf(ee) = gsn_csm_contour_map(wks_stddev_djf,snd_sd_djf,res) + plot_stddev_mam(ee) = gsn_csm_contour_map(wks_stddev_mam,snd_sd_mam,res) + plot_stddev_jja(ee) = gsn_csm_contour_map(wks_stddev_jja,snd_sd_jja,res) + plot_stddev_son(ee) = gsn_csm_contour_map(wks_stddev_son,snd_sd_son,res) + plot_stddev_ann(ee) = gsn_csm_contour_map(wks_stddev_ann,snd_sd_ann,res) + + sres@gsnLeftString = syear(ee)+"-"+eyear(ee) + sres@gsnRightString = snd_mean_djf@units + sres@gsnCenterString = names(ee) + plot_mean_djf(ee) = gsn_csm_contour_map(wks_mean_djf,snd_mean_djf,sres) + plot_mean_mam(ee) = gsn_csm_contour_map(wks_mean_mam,snd_mean_mam,sres) + plot_mean_jja(ee) = gsn_csm_contour_map(wks_mean_jja,snd_mean_jja,sres) + plot_mean_son(ee) = gsn_csm_contour_map(wks_mean_son,snd_mean_son,sres) + plot_mean_ann(ee) = gsn_csm_contour_map(wks_mean_ann,snd_mean_ann,sres) + delete([/snd_sd_djf,snd_sd_mam,snd_sd_jja,snd_sd_son,snd_sd_ann,snd_mean_djf,snd_mean_mam,snd_mean_jja,snd_mean_son,snd_mean_ann,res,sres/]) + end do + + panres = True + panres@gsnMaximize = True + panres@gsnPaperOrientation = "portrait" + panres@gsnPanelLabelBar = True + panres@gsnPanelYWhiteSpacePercent = 3.0 + panres@pmLabelBarHeightF = 0.05 + panres@pmLabelBarWidthF = 0.65 + panres@lbTitleOn = False + panres@lbBoxLineColor = "gray70" + if (nsim.le.4) then + if (nsim.eq.1) then + panres@txFontHeightF = 0.022 + panres@gsnPanelBottom = 0.50 + else + panres@txFontHeightF = 0.0145 + panres@gsnPanelBottom = 0.50 + end if + else + panres@txFontHeightF = 0.016 + panres@gsnPanelBottom = 0.05 + end if + panres@lbLabelFontHeightF = 0.013 + panres@lbLabelStride = 1 + ncol = floattointeger(sqrt(nsim)) + nrow = (nsim/ncol)+mod(nsim,ncol) + + panres@txString = "SND Standard Deviations (DJF)" + gsn_panel2(wks_stddev_djf,plot_stddev_djf,(/nrow,ncol/),panres) + delete(wks_stddev_djf) + + panres@txString = "SND Standard Deviations (MAM)" + gsn_panel2(wks_stddev_mam,plot_stddev_mam,(/nrow,ncol/),panres) + delete(wks_stddev_mam) + + panres@txString = "SND Standard Deviations (JJA)" + gsn_panel2(wks_stddev_jja,plot_stddev_jja,(/nrow,ncol/),panres) + delete(wks_stddev_jja) + + panres@txString = "SND Standard Deviations (SON)" + gsn_panel2(wks_stddev_son,plot_stddev_son,(/nrow,ncol/),panres) + delete(wks_stddev_son) + + panres@txString = "SND Standard Deviations (Annual)" + gsn_panel2(wks_stddev_ann,plot_stddev_ann,(/nrow,ncol/),panres) + delete(wks_stddev_ann) + + panres@txString = "SND Means (DJF)" + gsn_panel2(wks_mean_djf,plot_mean_djf,(/nrow,ncol/),panres) + delete(wks_mean_djf) + + panres@txString = "SND Means (MAM)" + gsn_panel2(wks_mean_mam,plot_mean_mam,(/nrow,ncol/),panres) + delete(wks_mean_mam) + + panres@txString = "SND Means (JJA)" + gsn_panel2(wks_mean_jja,plot_mean_jja,(/nrow,ncol/),panres) + delete(wks_mean_jja) + + panres@txString = "SND Means (SON)" + gsn_panel2(wks_mean_son,plot_mean_son,(/nrow,ncol/),panres) + delete(wks_mean_son) + + panres@txString = "SND Means (Annual)" + gsn_panel2(wks_mean_ann,plot_mean_ann,(/nrow,ncol/),panres) + delete(wks_mean_ann) + delete(panres) + print("Finished: snd.mean_stddev.ncl") +end diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/snd.trends.ncl b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/snd.trends.ncl new file mode 100644 index 0000000000..76607b69b3 --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/snd.trends.ncl @@ -0,0 +1,320 @@ +; Calculates snow depth global trends +; +; Variables used: snd +; +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/shea_util.ncl" +load "$CVDP_SCRIPTS/functions.ncl" + +begin + print("Starting: snd.trends.ncl") + + SCALE_TIMESERIES = getenv("SCALE_TIMESERIES") + OUTPUT_DATA = getenv("OUTPUT_DATA") + PNG_SCALE = tofloat(getenv("PNG_SCALE")) + OPT_CLIMO = getenv("OPT_CLIMO") + CLIMO_SYEAR = toint(getenv("CLIMO_SYEAR")) + CLIMO_EYEAR = toint(getenv("CLIMO_EYEAR")) + OUTPUT_TYPE = getenv("OUTPUT_TYPE") + COLORMAP = getenv("COLORMAP") + + nsim = numAsciiRow("namelist_byvar/namelist_snowdp") + na = asciiread("namelist_byvar/namelist_snowdp",(/nsim/),"string") + names = new(nsim,"string") + paths = new(nsim,"string") + syear = new(nsim,"integer",-999) + eyear = new(nsim,"integer",-999) + delim = "|" + + do gg = 0,nsim-1 + names(gg) = str_strip(str_get_field(na(gg),1,delim)) + paths(gg) = str_strip(str_get_field(na(gg),2,delim)) + syear(gg) = stringtointeger(str_strip(str_get_field(na(gg),3,delim))) + eyear(gg) = stringtointeger(str_strip(str_get_field(na(gg),4,delim))) + end do + nyr = eyear-syear+1 + nyr_max = max(nyr) + + pi=4.*atan(1.0) + rad=(pi/180.) + + wks_type = OUTPUT_TYPE + if (wks_type.eq."png") then + wks_type@wkWidth = 1500*PNG_SCALE + wks_type@wkHeight = 1500*PNG_SCALE + end if + wks_trends_djf = gsn_open_wks(wks_type,getenv("OUTDIR")+"snd.trends.djf") + wks_trends_mam = gsn_open_wks(wks_type,getenv("OUTDIR")+"snd.trends.mam") + wks_trends_jja = gsn_open_wks(wks_type,getenv("OUTDIR")+"snd.trends.jja") + wks_trends_son = gsn_open_wks(wks_type,getenv("OUTDIR")+"snd.trends.son") + wks_trends_ann = gsn_open_wks(wks_type,getenv("OUTDIR")+"snd.trends.ann") + wks_trends_mon = gsn_open_wks(wks_type,getenv("OUTDIR")+"snd.trends.mon") + + if (COLORMAP.eq.0) then + gsn_define_colormap(wks_trends_djf,"ncl_default") + gsn_define_colormap(wks_trends_mam,"ncl_default") + gsn_define_colormap(wks_trends_jja,"ncl_default") + gsn_define_colormap(wks_trends_son,"ncl_default") + gsn_define_colormap(wks_trends_ann,"ncl_default") + gsn_define_colormap(wks_trends_mon,"ncl_default") + end if + if (COLORMAP.eq.1) then + gsn_define_colormap(wks_trends_djf,"BlueDarkRed18") + gsn_define_colormap(wks_trends_mam,"BlueDarkRed18") + gsn_define_colormap(wks_trends_jja,"BlueDarkRed18") + gsn_define_colormap(wks_trends_son,"BlueDarkRed18") + gsn_define_colormap(wks_trends_ann,"BlueDarkRed18") + gsn_define_colormap(wks_trends_mon,"BlueDarkRed18") + end if + cmap = gsn_retrieve_colormap(wks_trends_djf) + + map_djf = new(nsim,"graphic") + map_mam = new(nsim,"graphic") + map_jja = new(nsim,"graphic") + map_son = new(nsim,"graphic") + map_ann = new(nsim,"graphic") + map_mon = new(nsim,"graphic") + + do ee = 0,nsim-1 + snd = data_read_in(paths(ee),"SNOWDP",syear(ee),eyear(ee)) ; read in data, orient lats/lons correctly, set time coordinate variable up + if (isatt(snd&lat,"_FillValue")) then ; required in v6.2.0-beta to reset _FillValue to avoid error message + snd&lat@_FillValue = 1.e20 + snd&lat@missing_value = snd&lat@_FillValue + end if + if (isatt(snd&lon,"_FillValue")) then + snd&lon@_FillValue = 1.e20 + snd&lon@missing_value = snd&lon@_FillValue + end if + + if (isatt(snd,"is_all_missing")) then + delete(snd) + continue + end if + if (OPT_CLIMO.eq."Full") then + snd = rmMonAnnCycTLL(snd) + else + check_custom_climo(names(ee),syear(ee),eyear(ee),CLIMO_SYEAR,CLIMO_EYEAR) + temp_arr = snd + delete(temp_arr&time) + temp_arr&time = cd_calendar(snd&time,-1) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(temp_arr({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(temp_arr({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + delete(temp_arr) + snd = calcMonAnomTLL(snd,climo) + delete(climo) + end if + + tttt = dtrend_msg_n(ispan(0,dimsizes(snd&time)-1,1),snd,False,True,0) + snd_trends_mon = snd(0,:,:) + snd_trends_mon = (/ onedtond(tttt@slope, (/dimsizes(snd&lat),dimsizes(snd&lon)/) ) /) + snd_trends_mon = snd_trends_mon*dimsizes(snd&time) + snd_trends_mon@units = snd@units+" "+nyr(ee)+"yr~S~-1~N~" + delete(tttt) + + snd_seas = runave_n_Wrap(snd,3,0,0) + snd_seas(0,:,:) = (/ dim_avg_n(snd(:1,:,:),0) /) + snd_seas(dimsizes(snd&time)-1,:,:) = (/ dim_avg_n(snd(dimsizes(snd&time)-2:,:,:),0) /) + snd_ann = runave_n_Wrap(snd,12,0,0) + delete(snd) + + snd_trends_seas = snd_seas(:3,:,:) + snd_trends_seas = snd_trends_seas@_FillValue + snd_trends_ann = snd_trends_seas(0,:,:) + do ff = 0,4 + if (ff.le.3) then + tarr = snd_seas(ff*3::12,:,:) + end if + if (ff.eq.4) then + tarr = snd_ann(5::12,:,:) + end if + tttt = dtrend_msg_n(ispan(0,dimsizes(tarr&time)-1,1),tarr,False,True,0) + if (ff.le.3) then + snd_trends_seas(ff,:,:) = (/ onedtond(tttt@slope, (/dimsizes(tarr&lat),dimsizes(tarr&lon)/) ) /) + end if + if (ff.eq.4) then + snd_trends_ann = (/ onedtond(tttt@slope, (/dimsizes(tarr&lat),dimsizes(tarr&lon)/) ) /) + end if + delete([/tarr,tttt/]) + end do + snd_trends_seas = snd_trends_seas*nyr(ee) + snd_trends_seas@units = snd_seas@units+" "+nyr(ee)+"yr~S~-1~N~" + snd_trends_ann = snd_trends_ann*nyr(ee) + snd_trends_ann@units = snd_ann@units+" "+nyr(ee)+"yr~S~-1~N~" + delete([/snd_seas,snd_ann/]) + + + if (OUTPUT_DATA.eq."True") then + modname = str_sub_str(names(ee)," ","_") + bc = (/"/","'","(",")"/) + do gg = 0,dimsizes(bc)-1 + modname = str_sub_str(modname,bc(gg),"_") + end do + fn = getenv("OUTDIR")+modname+".cvdp_data.snd.trends."+syear(ee)+"-"+eyear(ee)+".nc" + if (.not.isfilepresent2(fn)) then + z = addfile(fn,"c") + z@source = "NCAR Climate Analysis Section's Climate Variability Diagnostics Package v"+getenv("VERSION") + z@notes = "Data from "+names(ee)+" from "+syear(ee)+"-"+eyear(ee) + if (OPT_CLIMO.eq."Full") then + z@climatology = syear(ee)+"-"+eyear(ee)+" climatology removed prior to all calculations (other than means)" + else + if (CLIMO_SYEAR.lt.0) then + z@climatology = (eyear(ee)+CLIMO_SYEAR)+"-"+(eyear(ee)+CLIMO_EYEAR)+" climatology removed prior to all calculations (other than means)" + else + z@climatology = CLIMO_SYEAR+"-"+CLIMO_EYEAR+" climatology removed prior to all calculations (other than means)" + end if + end if + z@Conventions = "CF-1.6" + else + z = addfile(fn,"w") + end if + snd_seas = (/ snd_trends_seas /) + snd_seas!1 = "LAT" + snd_seas&LAT = snd_trends_seas&lat + snd_seas!2 = "LON" + snd_seas&LON = snd_trends_seas&lon + copy_VarAtts(snd_trends_seas,snd_seas) + + snd_ann = (/ snd_trends_ann /) + snd_ann!0 = "LAT" + snd_ann&LAT = snd_trends_ann&lat + snd_ann!1 = "LON" + snd_ann&LON = snd_trends_ann&lon + copy_VarAtts(snd_trends_ann,snd_ann) + + snd_mon = (/ snd_trends_mon /) + snd_mon!0 = "LAT" + snd_mon&LAT = snd_trends_mon&lat + snd_mon!1 = "LON" + snd_mon&LON = snd_trends_mon&lon + copy_VarAtts(snd_trends_mon,snd_mon) + + z->snd_trends_djf = set_varAtts(snd_seas(0,:,:),"snd linear trends (DJF)","","") + z->snd_trends_mam = set_varAtts(snd_seas(1,:,:),"snd linear trends (MAM)","","") + z->snd_trends_jja = set_varAtts(snd_seas(2,:,:),"snd linear trends (JJA)","","") + z->snd_trends_son = set_varAtts(snd_seas(3,:,:),"snd linear trends (SON)","","") + z->snd_trends_ann = set_varAtts(snd_ann,"snd linear trends (annual)","","") + z->snd_trends_mon = set_varAtts(snd_mon,"snd linear trends (monthly)","","") + delete(z) + delete([/snd_seas,snd_ann,snd_mon/]) + end if + + snd_trends_seas = where(abs(snd_trends_seas).le..005,snd_trends_seas@_FillValue,snd_trends_seas) ; .005m = arbitrary # to white out + snd_trends_ann = where(abs(snd_trends_ann).le..005,snd_trends_ann@_FillValue,snd_trends_ann) ; areas w/very very small trends.. + snd_trends_mon = where(abs(snd_trends_mon).le..005,snd_trends_mon@_FillValue,snd_trends_mon) +;======================================================================== +; cmap = read_colormap_file("ncl_default") + + + res = True + res@mpProjection = "WinkelTripel" + res@mpGeophysicalLineColor = "gray42" + if (wks_type.eq."png") then + res@mpGeophysicalLineThicknessF = 2. + else + res@mpGeophysicalLineThicknessF = 1. + end if + res@mpPerimOn = False + res@mpGridLatSpacingF = 90 ; change latitude line spacing + res@mpGridLonSpacingF = 180. ; change longitude line spacing + res@mpGridLineColor = "transparent" ; trick ncl into drawing perimeter + res@mpGridAndLimbOn = True ; turn on lat/lon lines + res@mpFillOn = False + res@mpCenterLonF = 0. + res@mpOutlineOn = True + res@gsnDraw = False + res@gsnFrame = False + + res@cnFillPalette = cmap(2::-1,:) + res@cnFillMode = "RasterFill" + res@cnLevelSelectionMode = "ExplicitLevels" + if (COLORMAP.eq.0) then + res@cnLevels = fspan(-.5,.5,21) + end if + if (COLORMAP.eq.1) then + res@cnLevels = fspan(-.8,.8,17) + end if + res@cnLineLabelsOn = False + res@cnFillOn = True + res@cnLinesOn = False + res@lbLabelBarOn = False + + res@gsnLeftStringOrthogonalPosF = -0.05 + res@gsnLeftStringParallelPosF = .005 + res@gsnRightStringOrthogonalPosF = -0.05 + res@gsnRightStringParallelPosF = 0.96 + res@gsnRightString = "" + res@gsnLeftString = "" + res@gsnLeftStringFontHeightF = 0.014 + res@gsnCenterStringFontHeightF = 0.018 + res@gsnRightStringFontHeightF = 0.014 + res@gsnLeftString = syear(ee)+"-"+eyear(ee) + + res@gsnRightString = snd_trends_seas@units + res@gsnCenterString = names(ee) + map_djf(ee) = gsn_csm_contour_map(wks_trends_djf,snd_trends_seas(0,:,:),res) + map_mam(ee) = gsn_csm_contour_map(wks_trends_mam,snd_trends_seas(1,:,:),res) + map_jja(ee) = gsn_csm_contour_map(wks_trends_jja,snd_trends_seas(2,:,:),res) + map_son(ee) = gsn_csm_contour_map(wks_trends_son,snd_trends_seas(3,:,:),res) + map_ann(ee) = gsn_csm_contour_map(wks_trends_ann,snd_trends_ann,res) + map_mon(ee) = gsn_csm_contour_map(wks_trends_mon,snd_trends_mon,res) + + delete([/snd_trends_seas,snd_trends_ann,snd_trends_mon/]) + delete(res) + end do + panres = True + panres@gsnMaximize = True + panres@gsnPaperOrientation = "portrait" + panres@gsnPanelLabelBar = True + panres@gsnPanelYWhiteSpacePercent = 3.0 + panres@pmLabelBarHeightF = 0.05 + panres@pmLabelBarWidthF = 0.65 + panres@lbTitleOn = False + panres@lbBoxLineColor = "gray70" + panres@lbLabelFontHeightF = 0.013 + if (nsim.le.4) then + if (nsim.eq.1) then + panres@txFontHeightF = 0.022 + panres@gsnPanelBottom = 0.50 + else + panres@txFontHeightF = 0.0145 + panres@gsnPanelBottom = 0.50 + end if + else + panres@txFontHeightF = 0.016 + panres@gsnPanelBottom = 0.05 + end if + panres@lbLabelStride = 1 + + panres@txString = "SND Trends (DJF)" + ncol = floattointeger(sqrt(nsim)) + nrow = (nsim/ncol)+mod(nsim,ncol) + gsn_panel2(wks_trends_djf,map_djf,(/nrow,ncol/),panres) + delete(wks_trends_djf) + + panres@txString = "SND Trends (MAM)" + gsn_panel2(wks_trends_mam,map_mam,(/nrow,ncol/),panres) + delete(wks_trends_mam) + + panres@txString = "SND Trends (JJA)" + gsn_panel2(wks_trends_jja,map_jja,(/nrow,ncol/),panres) + delete(wks_trends_jja) + + panres@txString = "SND Trends (SON)" + gsn_panel2(wks_trends_son,map_son,(/nrow,ncol/),panres) + delete(wks_trends_son) + + panres@txString = "SND Trends (Annual)" + gsn_panel2(wks_trends_ann,map_ann,(/nrow,ncol/),panres) + delete(wks_trends_ann) + + panres@txString = "SND Trends (Monthly)" + gsn_panel2(wks_trends_mon,map_mon,(/nrow,ncol/),panres) + delete(wks_trends_mon) + delete([/nrow,ncol,map_djf,map_mam,map_jja,map_son,map_ann,map_mon,panres,cmap/]) + print("Finished: snd.trends.ncl") +end diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/sst.indices.ncl b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/sst.indices.ncl new file mode 100644 index 0000000000..75ba3a1df2 --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/sst.indices.ncl @@ -0,0 +1,2063 @@ +; Calculates a variety of oceanic indices, as well as hovmollers, spectra, +; monthly standard deviations, running standard deviations, and spatial +; composites based on the nino3.4 index. +; +; Variables used: ts, psl, and tas +; +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/shea_util.ncl" +load "$CVDP_SCRIPTS/functions.ncl" + +begin + print("Starting: sst.indices.ncl") + + SCALE_TIMESERIES = getenv("SCALE_TIMESERIES") + OUTPUT_DATA = getenv("OUTPUT_DATA") + PNG_SCALE = tofloat(getenv("PNG_SCALE")) + OPT_CLIMO = getenv("OPT_CLIMO") + CLIMO_SYEAR = toint(getenv("CLIMO_SYEAR")) + CLIMO_EYEAR = toint(getenv("CLIMO_EYEAR")) + OUTPUT_TYPE = getenv("OUTPUT_TYPE") + COLORMAP = getenv("COLORMAP") + + nsim = numAsciiRow("namelist_byvar/namelist_ts") + na = asciiread("namelist_byvar/namelist_ts",(/nsim/),"string") + names = new(nsim,"string") + paths = new(nsim,"string") + syear = new(nsim,"integer",-999) + eyear = new(nsim,"integer",-999) + delim = "|" + + do gg = 0,nsim-1 + names(gg) = str_strip(str_get_field(na(gg),1,delim)) + paths(gg) = str_strip(str_get_field(na(gg),2,delim)) + syear(gg) = stringtointeger(str_strip(str_get_field(na(gg),3,delim))) + eyear(gg) = stringtointeger(str_strip(str_get_field(na(gg),4,delim))) + end do + delete(na) + nyr = eyear-syear+1 + nyr_max = max(nyr) +;----------- nino3.4 spatial composite coding------------- + nsim_psl = numAsciiRow("namelist_byvar/namelist_psl") + na_psl = asciiread("namelist_byvar/namelist_psl",(/nsim_psl/),"string") + names_psl = new(nsim_psl,"string") + paths_psl = new(nsim_psl,"string") + syear_psl = new(nsim_psl,"integer",-999) + eyear_psl = new(nsim_psl,"integer",-999) + + do gg = 0,nsim_psl-1 + names_psl(gg) = str_strip(str_get_field(na_psl(gg),1,delim)) + paths_psl(gg) = str_strip(str_get_field(na_psl(gg),2,delim)) + syear_psl(gg) = stringtointeger(str_strip(str_get_field(na_psl(gg),3,delim))) + eyear_psl(gg) = stringtointeger(str_strip(str_get_field(na_psl(gg),4,delim))) + end do + delete(na_psl) + nyr_psl = eyear_psl-syear_psl+1 + + nsim_trefht = numAsciiRow("namelist_byvar/namelist_trefht") + na_trefht = asciiread("namelist_byvar/namelist_trefht",(/nsim_trefht/),"string") + names_trefht = new(nsim_trefht,"string") + paths_trefht = new(nsim_trefht,"string") + syear_trefht = new(nsim_trefht,"integer",-999) + eyear_trefht = new(nsim_trefht,"integer",-999) + + do gg = 0,nsim_trefht-1 + names_trefht(gg) = str_strip(str_get_field(na_trefht(gg),1,delim)) + paths_trefht(gg) = str_strip(str_get_field(na_trefht(gg),2,delim)) + syear_trefht(gg) = stringtointeger(str_strip(str_get_field(na_trefht(gg),3,delim))) + eyear_trefht(gg) = stringtointeger(str_strip(str_get_field(na_trefht(gg),4,delim))) + end do + delete(na_trefht) + nyr_trefht = eyear_trefht-syear_trefht+1 + + nsim_prect = numAsciiRow("namelist_byvar/namelist_prect") + na_prect = asciiread("namelist_byvar/namelist_prect",(/nsim_prect/),"string") + names_prect = new(nsim_prect,"string") + paths_prect = new(nsim_prect,"string") + syear_prect = new(nsim_prect,"integer",-999) + eyear_prect = new(nsim_prect,"integer",-999) + + do gg = 0,nsim_prect-1 + names_prect(gg) = str_strip(str_get_field(na_prect(gg),1,delim)) + paths_prect(gg) = str_strip(str_get_field(na_prect(gg),2,delim)) + syear_prect(gg) = stringtointeger(str_strip(str_get_field(na_prect(gg),3,delim))) + eyear_prect(gg) = stringtointeger(str_strip(str_get_field(na_prect(gg),4,delim))) + end do + delete(na_prect) + nyr_prect = eyear_prect-syear_prect+1 +;------------------------------------------------------------------------------------------------- + + wks_type = OUTPUT_TYPE + if (wks_type.eq."png") then + wks_type@wkWidth = 1500*PNG_SCALE + wks_type@wkHeight = 1500*PNG_SCALE + end if + wks_n34 = gsn_open_wks(wks_type,getenv("OUTDIR")+"nino34.timeseries") + wks_n4 = gsn_open_wks(wks_type,getenv("OUTDIR")+"nino4.timeseries") + wks_n3 = gsn_open_wks(wks_type,getenv("OUTDIR")+"nino3.timeseries") + wks_n12 = gsn_open_wks(wks_type,getenv("OUTDIR")+"nino12.timeseries") + wks_tna = gsn_open_wks(wks_type,getenv("OUTDIR")+"tna.timeseries") + wks_tsa = gsn_open_wks(wks_type,getenv("OUTDIR")+"tsa.timeseries") + wks_tio = gsn_open_wks(wks_type,getenv("OUTDIR")+"tio.timeseries") + + wks_n34_tlon_hi = gsn_open_wks(wks_type,getenv("OUTDIR")+"nino34.hov.elnino") + wks_n34_tlon_lo = gsn_open_wks(wks_type,getenv("OUTDIR")+"nino34.hov.lanina") + + wks_n34_p = gsn_open_wks(wks_type,getenv("OUTDIR")+"nino34.powspec") + + wks_n34_rst = gsn_open_wks(wks_type,getenv("OUTDIR")+"nino34.runstddev") + + wks_n34_mst = gsn_open_wks(wks_type,getenv("OUTDIR")+"nino34.monstddev") + + wks_n34sc = gsn_open_wks(wks_type,getenv("OUTDIR")+"nino34.spatialcomp") + + wks_n34sc_ppt = gsn_open_wks(wks_type,getenv("OUTDIR")+"nino34.spatialcomp.ppt") + + if (COLORMAP.eq.0) then + gsn_define_colormap(wks_n34,"ncl_default") + gsn_define_colormap(wks_n4,"ncl_default") + gsn_define_colormap(wks_n3,"ncl_default") + gsn_define_colormap(wks_n12,"ncl_default") + gsn_define_colormap(wks_tna,"ncl_default") + gsn_define_colormap(wks_tsa,"ncl_default") + gsn_define_colormap(wks_tio,"ncl_default") + gsn_merge_colormaps(wks_n34_tlon_hi,"BlueDarkRed18",(/"gray30","gray50","gray70"/)) + gsn_merge_colormaps(wks_n34_tlon_lo,"BlueDarkRed18",(/"gray30","gray50","gray70"/)) + gsn_define_colormap(wks_n34_p,"cb_9step") + gsn_define_colormap(wks_n34_rst,"ncl_default") + gsn_define_colormap(wks_n34_mst,"ncl_default") + gsn_define_colormap(wks_n34sc,"ncl_default") + gsn_define_colormap(wks_n34sc_ppt,"MPL_BrBG") + end if + if (COLORMAP.eq.1) then + gsn_define_colormap(wks_n34,"ncl_default") + gsn_define_colormap(wks_n4,"ncl_default") + gsn_define_colormap(wks_n3,"ncl_default") + gsn_define_colormap(wks_n12,"ncl_default") + gsn_define_colormap(wks_tna,"ncl_default") + gsn_define_colormap(wks_tsa,"ncl_default") + gsn_define_colormap(wks_tio,"ncl_default") + gsn_merge_colormaps(wks_n34_tlon_hi,"BlueDarkRed18",(/"gray30","gray50","gray70"/)) + gsn_merge_colormaps(wks_n34_tlon_lo,"BlueDarkRed18",(/"gray30","gray50","gray70"/)) + gsn_define_colormap(wks_n34_p,"cb_9step") + gsn_define_colormap(wks_n34_rst,"ncl_default") + gsn_define_colormap(wks_n34_mst,"ncl_default") + gsn_define_colormap(wks_n34sc,"BlueDarkRed18") + gsn_define_colormap(wks_n34sc_ppt,"BrownBlue12") + end if + + xyn34 = new(nsim,"graphic") + xyn4 = new(nsim,"graphic") + xyn3 = new(nsim,"graphic") + xyn12 = new(nsim,"graphic") + xytna = new(nsim,"graphic") + xytsa = new(nsim,"graphic") + xytio = new(nsim,"graphic") + xyiod = new(nsim,"graphic") + xysocn = new(nsim,"graphic") + xyamm = new(nsim,"graphic") + xyatl3 = new(nsim,"graphic") + + plot_n34hi = new(nsim,"graphic") + plot_n34lo = new(nsim,"graphic") + + map_n34sc_jja0 = new(nsim,"graphic") + map_n34sc_son0 = new(nsim,"graphic") + map_n34sc_djf1 = new(nsim,"graphic") + map_n34sc_mam1 = new(nsim,"graphic") + + map_n34sc_ppt_jja0 = new(nsim,"graphic") + map_n34sc_ppt_son0 = new(nsim,"graphic") + map_n34sc_ppt_djf1 = new(nsim,"graphic") + map_n34sc_ppt_mam1 = new(nsim,"graphic") + + xyn34_rst = new(nsim,"graphic") + xyn34_mst = new(nsim,"graphic") + xyn34_ac = new(nsim,"graphic") + plot_wave34 = new(nsim,"graphic") + + pspec = new(nsim,"graphic") + if (isfilepresent2("obs_ts")) then + pspec_obs = new(nsim,"graphic") + xyn34_ac_obs = new(nsim,"graphic") + end if + + wgt = (/1.,2.,1./) + wgt = wgt/sum(wgt) + pi=4.*atan(1.0) + rad=(pi/180.) + + do ee = 0,nsim-1 + sst = data_read_in(paths(ee),"TS",syear(ee),eyear(ee)) ; read in data, orient lats/lons correctly, set time coordinate variable up + + if (isatt(sst,"is_all_missing")) then + delete(sst) + continue + end if + + sst = where(sst.le.-1.8,-1.8,sst) ; set all values below -1.8 to -1.8 + d = addfile("$NCARG_ROOT/lib/ncarg/data/cdf/landsea.nc","r") ; mask out land (this is redundant for data that is already masked) + basemap = d->LSMASK + lsm = landsea_mask(basemap,sst&lat,sst&lon) + sst = mask(sst,conform(sst,lsm,(/1,2/)).ge.1,False) + delete([/lsm,basemap/]) + delete(d) + + if (OPT_CLIMO.eq."Full") then + sst = rmMonAnnCycTLL(sst) + else + check_custom_climo(names(ee),syear(ee),eyear(ee),CLIMO_SYEAR,CLIMO_EYEAR) + temp_arr = sst + delete(temp_arr&time) + temp_arr&time = cd_calendar(sst&time,-1) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(temp_arr({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(temp_arr({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + delete(temp_arr) + sst = calcMonAnomTLL(sst,climo) + delete(climo) + end if + + coswgt=cos(rad*sst&lat) + coswgt!0 = "lat" + coswgt&lat= sst&lat + llats = -5. ; nino3.4 + llatn = 5. + llonw = 190. + llone = 240. + nino34 = wgt_areaave_Wrap(sst(:,{llats:llatn},{llonw:llone}),coswgt({llats:llatn}),1.0,0) + nino34@comment_cvdp = "area average domain ("+llats+":"+llatn+"N, "+llonw+":"+llone+"E)" + nino34@units = sst@units + nino34@long_name = "nino3.4 timeseries (monthly)" + + llats = -5. ; nino3 + llatn = 5. + llonw = 210. + llone = 270. + nino3 = wgt_areaave(sst(:,{llats:llatn},{llonw:llone}),coswgt({llats:llatn}),1.0,0) + nino3@comment_cvdp = "area average domain ("+llats+":"+llatn+"N, "+llonw+":"+llone+"E)" + copy_VarCoords(nino34,nino3) + nino3@units = sst@units + nino3@long_name = "nino3 timeseries (monthly)" + + llats = -5. ; nino4 + llatn = 5. + llonw = 160. + llone = 210. + nino4 = wgt_areaave(sst(:,{llats:llatn},{llonw:llone}),coswgt({llats:llatn}),1.0,0) + nino4@comment_cvdp = "area average domain ("+llats+":"+llatn+"N, "+llonw+":"+llone+"E)" + copy_VarCoords(nino34,nino4) + nino4@units = sst@units + nino4@long_name = "nino4 timeseries (monthly)" + + llats = -10. ; nino1+2 + llatn = 0. + llonw = 270. + llone = 280. + nino12 = wgt_areaave(sst(:,{llats:llatn},{llonw:llone}),coswgt({llats:llatn}),1.0,0) + nino12@comment_cvdp = "area average domain ("+llats+":"+llatn+"N, "+llonw+":"+llone+"E)" + copy_VarCoords(nino34,nino12) + nino12@units = sst@units + nino12@long_name = "nino1+2 timeseries (monthly)" + + ssttemp = lonFlip(sst) + amm_n = wgt_areaave(ssttemp(:,{5.:15.},{-50.:-20.}),coswgt({5.:15.}),1.0,0) ; Atlantic Meridional Mode + amm_s = wgt_areaave(ssttemp(:,{-15.:-5.},{-20.:10.}),coswgt({-15.:-5.}),1.0,0) + amm = amm_n + amm = (/ amm_n - amm_s /) + delete([/amm_n,amm_s/]) + amm@comment_cvdp = "area average domain (-5:20N, 80W:0E) - (-10:5N, 60W:15E)" + copy_VarCoords(nino34,amm) + amm@units = sst@units + amm@long_name = "Atlantic Meridional Mode Index (monthly)" + + llats = -3. ; Atlantic Nino (ATL3) + llatn = 3. + llonw = -20. + llone = 0. + atl3 = wgt_areaave(ssttemp(:,{llats:llatn},{llonw:llone}),coswgt({llats:llatn}),1.0,0) + atl3@comment_cvdp = "area average domain ("+llats+":"+llatn+"N, "+llonw+":"+llone+"E)" + copy_VarCoords(nino34,atl3) + atl3@units = sst@units + atl3@long_name = "Atlantic Nino Index (monthly)" + + llats = -20. ; Tropical Southern Atlantic Index + llatn = 0. + llonw = -30. + llone = 10. + tsa = wgt_areaave(ssttemp(:,{llats:llatn},{llonw:llone}),coswgt({llats:llatn}),1.0,0) + tsa@comment_cvdp = "area average domain ("+llats+":"+llatn+"N, "+llonw+":"+llone+"E)" + copy_VarCoords(nino34,tsa) + tsa@units = sst@units + tsa@long_name = "Tropical Southern Atlantic SST timeseries (monthly)" + delete(ssttemp) + + llats = 5.5 ; Tropical Northern Atlantic Index + llatn = 23.5 + llonw = 302.5 + llone = 345. + tna = wgt_areaave(sst(:,{llats:llatn},{llonw:llone}),coswgt({llats:llatn}),1.0,0) + tna@comment_cvdp = "area average domain ("+llats+":"+llatn+"N, "+llonw+":"+llone+"E)" + copy_VarCoords(nino34,tna) + tna@units = sst@units + tna@long_name = "Tropical Northern Atlantic SST timeseries (monthly)" + + llats = -15. ; Indian Ocean SST index + llatn = 15. + llonw = 40. + llone = 110. + tio = wgt_areaave(sst(:,{llats:llatn},{llonw:llone}),coswgt({llats:llatn}),1.0,0) + tio@comment_cvdp = "area average domain ("+llats+":"+llatn+"N, "+llonw+":"+llone+"E)" + copy_VarCoords(nino34,tio) + tio@units = sst@units + tio@long_name = "Tropical Indian Ocean SST timeseries (monthly)" + + ; Indian Ocean Dipole Index http://www.bom.gov.au/climate/IOD/about_IOD.shtml + iod = wgt_areaave(sst(:,{-10.:10.},{50.:70.}),coswgt({-10.:10.}),1.0,0) - wgt_areaave(sst(:,{-10.:0.},{90.:110.}),coswgt({-10.:0.}),1.0,0) + iod@comment_cvdp = "area average domain (-10:10N, 50:70E) - (-10:0N, 90:110E)" + copy_VarCoords(nino34,iod) + iod@units = sst@units + iod@long_name = "Indian Ocean Dipole Index (monthly)" + + socn = wgt_areaave(sst(:,{-70.:-50.},:),coswgt({-70.:-50.}),1.0,0) + socn@comment_cvdp = "area average domain (-70:-50N, 0:360E)" + copy_VarCoords(nino34,socn) + socn@units = sst@units + socn@long_name = "Southern Ocean Index (monthly)" +;--------------------------------------------------------------------------------------------- + if (OUTPUT_DATA.eq."True") then + modname = str_sub_str(names(ee)," ","_") + bc = (/"/","'","(",")"/) + do gg = 0,dimsizes(bc)-1 + modname = str_sub_str(modname,bc(gg),"_") + end do + fn = getenv("OUTDIR")+modname+".cvdp_data.sst.indices."+syear(ee)+"-"+eyear(ee)+".nc" + if (.not.isfilepresent2(fn)) then + z = addfile(fn,"c") + z@source = "NCAR Climate Analysis Section's Climate Variability Diagnostics Package v"+getenv("VERSION") + z@notes = "Data from "+names(ee)+" from "+syear(ee)+"-"+eyear(ee) + if (OPT_CLIMO.eq."Full") then + z@climatology = syear(ee)+"-"+eyear(ee)+" climatology removed prior to all calculations (other than means)" + else + if (CLIMO_SYEAR.lt.0) then + z@climatology = (eyear(ee)+CLIMO_SYEAR)+"-"+(eyear(ee)+CLIMO_EYEAR)+" climatology removed prior to all calculations (other than means)" + else + z@climatology = CLIMO_SYEAR+"-"+CLIMO_EYEAR+" climatology removed prior to all calculations (other than means)" + end if + end if + z@Conventions = "CF-1.6" + date = cd_calendar(nino34&time,-1) + date@long_name = "current date (YYYYMM)" + delete(date@calendar) + date!0 = "time" + date&time = nino34&time + date@units = "1" + z->date = date + delete(date) + else + z = addfile(fn,"w") + end if + z->nino34 = set_varAtts(nino34,"","","") + z->nino12 = set_varAtts(nino12,"","","") + z->nino3 = set_varAtts(nino3,"","","") + z->nino4 = set_varAtts(nino4,"","","") + z->north_tropical_atlantic = set_varAtts(tna,"","","") + z->south_tropical_atlantic = set_varAtts(tsa,"","","") + z->tropical_indian_ocean = set_varAtts(tio,"","","") + z->indian_ocean_dipole = set_varAtts(iod,"","","") + z->southern_ocean = set_varAtts(socn,"","","") + z->atlantic_meridional_mode = set_varAtts(amm,"","","") + z->atlantic_nino = set_varAtts(atl3,"","","") + delete([/modname,fn/]) + end if +;--------------------------------------------------------------------------------------------- + nino34T = wgt_runave(nino34,wgt,1) ; for use in ENSO composites / hovmuellers / running standard deviations + nino34_ndj = nino34T(11:dimsizes(nino34T)-13:12) ; cannot count last 1yr as spatial composite uses +1yrs data betond NDJ.. + nino34_ndj!0 = "time" + nino34_ndj&time = ispan(syear(ee),eyear(ee)-1,1) + nino34_ndj = dtrend_msg(ispan(0,dimsizes(nino34_ndj&time)-1,1),nino34_ndj,True,False) + nino34_ndj = dim_standardize(nino34_ndj,0) + + sst = (/ dtrend_msg_n(ispan(0,nyr(ee)*12-1,1),sst,False,False,0) /) ; detrend the sst array + + sstr = sst(:,{-3:3},{120:280}) ; ENSO hovmuellers based on NDJ nino34 + ; delete(sst) + finsst_hi = sstr(:60,0,:) ; for Jan-2 -> Jan+3 + finsst_hi!0 = "time" + finsst_hi&time = ispan(0,60,1) + finsst_hi = 0. + finsst_lo = finsst_hi + finsst_mid = finsst_hi + cntr_hi = 0 + cntr_lo = 0 + cntr_mid = 0 + cntr_lo@_FillValue = default_fillvalue(typeof(cntr_lo)) + cntr_mid@_FillValue = default_fillvalue(typeof(cntr_mid)) + cntr_hi@_FillValue = default_fillvalue(typeof(cntr_hi)) + + mocntr = 24 ; note: if this is set at 24 gg should start at 2 + do gg = 2,dimsizes(nino34_ndj)-3 ; remember that Dec is month 11. End @ -3 because we need to grab + 3 yrs and 1 month from there (nino34_ndj already ends at eyear-1) + if (.not.ismissing(nino34_ndj(gg))) then ; note that finsst_* indices 24:52 (Jan+0 -> May +2) are all that is shown in the hovmoller plots + if (nino34_ndj(gg).ge.1.) then + finsst_hi = (/ finsst_hi+dim_avg_n(sstr(mocntr-24:mocntr+36,:,:),1) /) ; nino34_ndj value is at sstr index mocntr+11 + cntr_hi = cntr_hi+1 + end if + if (nino34_ndj(gg).ge.-0.5.and.nino34_ndj(gg).le.0.5) then + finsst_mid = (/ finsst_mid+dim_avg_n(sstr(mocntr-24:mocntr+36,:,:),1) /) + cntr_mid = cntr_mid+1 + end if + if (nino34_ndj(gg).le.-1.) then + finsst_lo = (/ finsst_lo+dim_avg_n(sstr(mocntr-24:mocntr+36,:,:),1) /) + cntr_lo = cntr_lo+1 + end if + end if + mocntr = mocntr+12 + end do + delete([/sstr,mocntr/]) + + cntr_hi = where(cntr_hi.eq.0, cntr_hi@_FillValue, cntr_hi) + cntr_mid = where(cntr_mid.eq.0,cntr_mid@_FillValue,cntr_mid) + cntr_lo = where(cntr_lo.eq.0, cntr_lo@_FillValue, cntr_lo) + finsst_hi = (/ finsst_hi/cntr_hi /) + finsst_mid = (/ finsst_mid/cntr_mid /) + finsst_lo = (/ finsst_lo/cntr_lo /) + delete([/coswgt/]) + + if (OUTPUT_DATA.eq."True") then + hov_hi = (/ finsst_hi(24:52,:) /) ; 24:52 runs from Jan+0->May+2 and matches range shown in plot + time_mon1 = ispan(0,28,1) + time_mon1@units = "months since 0000-01-01 00:00:00" + time_mon1@long_name = "Time" + time_mon1@standard_name = "time" + time_mon1@calendar = "standard" + time_mon1!0 = "time_mon1" + time_mon1&time_mon1 = time_mon1 + hov_hi!0 = "time_mon1" + hov_hi&time_mon1 = time_mon1 + longitude = finsst_hi&lon + longitude@standard_name = "longitude" + hov_hi!1 = "longitude" + hov_hi&longitude = longitude + delete([/time_mon1,longitude/]) + hov_lo = (/ finsst_lo(24:52,:) /) ; 24:52 runs from Jan+0->May+2 and matches range shown in plot + copy_VarCoords(hov_hi,hov_lo) + hov_hi@number_of_events = cntr_hi + hov_lo@number_of_events = cntr_lo + hov_hi@units = "C" + hov_lo@units = "C" + z->nino34_hov_elnino = set_varAtts(hov_hi,"nino3.4 El Nino Hovmoller sst composite","","") + z->nino34_hov_lanina = set_varAtts(hov_lo,"nino3.4 La Nina Hovmoller sst composite","","") + delete([/hov_hi,hov_lo/]) + end if +;- - - - - -nino3.4 spatial composite section- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + if (any(ismissing((/syear(ee),syear_trefht(ee),syear_psl(ee),eyear(ee),eyear_trefht(ee),eyear_psl(ee)/)))) then + taspslreg_plot_flag = 1 + else + if (syear(ee).eq.syear_trefht(ee).and.syear(ee).eq.syear_psl(ee)) then ; check that the start and end years match for ts, trefht, and psl + if (eyear(ee).eq.eyear_trefht(ee).and.eyear(ee).eq.eyear_psl(ee)) then + taspslreg_plot_flag = 0 + else + taspslreg_plot_flag = 1 + end if + else + taspslreg_plot_flag = 1 + end if + end if + + if (taspslreg_plot_flag.eq.0) then + tas = data_read_in(paths_trefht(ee),"TREFHT",syear_trefht(ee),eyear_trefht(ee)) + psl = data_read_in(paths_psl(ee),"PSL",syear_psl(ee),eyear_psl(ee)) + + TIME = sst&time + yyyymm = cd_calendar(sst&time,-1) ; convert tas, ts, and sst from CF-conforming time to YYYYMM for coding below + delete(sst&time) + sst&time = yyyymm + delete(yyyymm) + + yyyymm = cd_calendar(tas&time,-1) + delete(tas&time) + tas&time = yyyymm + delete(yyyymm) + + yyyymm = cd_calendar(psl&time,-1) + delete(psl&time) + psl&time = yyyymm + delete(yyyymm) + + if (isatt(tas,"is_all_missing").or.isatt(psl,"is_all_missing")) then + taspslreg_plot_flag = 1 + delete([/tas,psl/]) + end if + + if (nyr(ee).lt.15) then ; 15+ years needed for composites + taspslreg_plot_flag = 1 + end if + + if (taspslreg_plot_flag.eq.0) then ; only continue if all 3 fields are present + if (OPT_CLIMO.eq."Full") then + tas = rmMonAnnCycTLL(tas) + psl = rmMonAnnCycTLL(psl) + else + check_custom_climo(names_trefht(ee),syear_trefht(ee),eyear_trefht(ee),CLIMO_SYEAR,CLIMO_EYEAR) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(tas({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(tas({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + tas = calcMonAnomTLL(tas,climo) + delete(climo) + + check_custom_climo(names_psl(ee),syear_psl(ee),eyear_psl(ee),CLIMO_SYEAR,CLIMO_EYEAR) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(psl({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(psl({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + psl = calcMonAnomTLL(psl,climo) + delete(climo) + end if + tas = (/ dtrend_msg_n(ispan(0,dimsizes(tas&time)-1,1),tas,False,False,0) /) ; sst detrended up above + psl = (/ dtrend_msg_n(ispan(0,dimsizes(psl&time)-1,1),psl,False,False,0) /) + + ta = dim_avg_n(sst(:1,:,:),0) + sst = runave_n_Wrap(sst,3,0,0) + sst(0,:,:) = (/ ta /) + delete(ta) + + ta = dim_avg_n(psl(:1,:,:),0) + psl = runave_n_Wrap(psl,3,0,0) + psl(0,:,:) = (/ ta /) + delete(ta) + + ta = dim_avg_n(tas(:1,:,:),0) + tas = runave_n_Wrap(tas,3,0,0) + tas(0,:,:) = (/ ta /) + delete(ta) + + hicntr = 0 + locntr = 0 + hiyr = new(dimsizes(nino34_ndj&time),integer) + loyr = hiyr + + do hh = 0,dimsizes(nino34_ndj)-1 + if (.not.ismissing(nino34_ndj(hh))) then + if (nino34_ndj(hh).ge.1) then + hiyr(hicntr) = nino34_ndj&time(hh) + hicntr = hicntr+1 + end if + if (nino34_ndj(hh).le.-1) then + loyr(locntr) = nino34_ndj&time(hh) + locntr = locntr+1 + end if + end if + end do + + if (hicntr.eq.0) then ; for simulations with climatological SSTs + highyr = hiyr(0) + else + highyr = hiyr(:hicntr-1) + end if + delete([/hiyr,hicntr/]) + if (locntr.eq.0) then + lowyr = loyr(0) + else + lowyr = loyr(:locntr-1) + end if + delete([/loyr,locntr/]) + + dimS = dimsizes(psl&time) ; change time from YYYYMM->YYYY.frac + tmin = psl&time(0)/100 + tmax = psl&time(dimS-1)/100 + delete(psl&time) + psl&time = fspan(tmin*1.,(tmax*1.)+(11/12.),dimS) + dimS = dimsizes(tas&time) + tmin = tas&time(0)/100 + tmax = tas&time(dimS-1)/100 + delete(tas&time) + tas&time = fspan(tmin*1.,(tmax*1.)+(11/12.),dimS) + dimS = dimsizes(sst&time) + tmin = sst&time(0)/100 + tmax = sst&time(dimS-1)/100 + delete(sst&time) + sst&time = fspan(tmin*1.,(tmax*1.)+(11/12.),dimS) + delete([/dimS,tmin,tmax/]) + ; print(sst&time) + + sc_tas_hi = tas(:23,:,:) + sc_tas_lo = tas(:23,:,:) + sc_sst_hi = sst(:23,:,:) + sc_sst_lo = sst(:23,:,:) + sc_psl_hi = psl(:23,:,:) + sc_psl_lo = psl(:23,:,:) + + sc_tas_hi = sc_tas_hi@_FillValue + sc_tas_lo = sc_tas_lo@_FillValue + sc_sst_hi = sc_sst_hi@_FillValue + sc_sst_lo = sc_sst_lo@_FillValue + sc_psl_hi = sc_psl_hi@_FillValue + sc_psl_lo = sc_psl_lo@_FillValue + + if (dimsizes(highyr).le.1) then + print("For "+names(ee)+", 1 or less (normalized) nino3.4 value greater than one standard deviation found, setting nino3.4 spatial composites to missing") ; sc_*_hi arrays left to _FillValue + else + do gg = 0,23 + tt = gg/12. + sc_psl_hi(gg,:,:) = (/ dim_avg_n(psl({highyr+tt},:,:),0) /) + sc_sst_hi(gg,:,:) = (/ dim_avg_n(sst({highyr+tt},:,:),0) /) + sc_tas_hi(gg,:,:) = (/ dim_avg_n(tas({highyr+tt},:,:),0) /) + end do + delete(tt) + end if + delete(highyr) + if (dimsizes(lowyr).le.1) then + print("For "+names(ee)+", 1 or less (normalized) nino3.4 value less than -1 standard deviation found, setting nino3.4 spatial composites to missing") ; sc_*_lo arrays left to _FillValue + else + do gg = 0,23 + tt = gg/12. + sc_psl_lo(gg,:,:) = (/ dim_avg_n(psl({lowyr+tt},:,:),0) /) + sc_sst_lo(gg,:,:) = (/ dim_avg_n(sst({lowyr+tt},:,:),0) /) + sc_tas_lo(gg,:,:) = (/ dim_avg_n(tas({lowyr+tt},:,:),0) /) + end do + delete(tt) + end if + delete(lowyr) + + n34sc_psl = sc_psl_hi + n34sc_psl = (/ sc_psl_hi - sc_psl_lo /) + n34sc_sst = sc_sst_hi + n34sc_sst = (/ sc_sst_hi - sc_sst_lo /) + n34sc_tas = sc_tas_hi + n34sc_tas = (/ sc_tas_hi - sc_tas_lo /) + delete([/sc_psl_hi,sc_psl_lo,sc_sst_hi,sc_sst_lo,sc_tas_hi,sc_tas_lo/]) + delete(sst&time) + sst&time = TIME + delete(TIME) + + if (OUTPUT_DATA.eq."True") then + n34sc_sst&lat@standard_name = "latitude" + n34sc_sst&lon@standard_name = "longitude" + z->nino34_spacomp_sst_jja0 = set_varAtts(n34sc_sst(6,:,:),"nino3.4 sst spatial composite (JJA+0)","","") + z->nino34_spacomp_sst_son0 = set_varAtts(n34sc_sst(9,:,:),"nino3.4 sst spatial composite (SON+0)","","") + z->nino34_spacomp_sst_djf1 = set_varAtts(n34sc_sst(12,:,:),"nino3.4 sst spatial composite (DJF+1)","","") + z->nino34_spacomp_sst_mam1 = set_varAtts(n34sc_sst(15,:,:),"nino3.4 sst spatial composite (MAM+1)","","") + + modname = str_sub_str(names_trefht(ee)," ","_") + bc = (/"/","'","(",")"/) + do gg = 0,dimsizes(bc)-1 + modname = str_sub_str(modname,bc(gg),"_") + end do + fn = getenv("OUTDIR")+modname+".cvdp_data.psl.sst.indices.tas."+syear_trefht(ee)+"-"+eyear_trefht(ee)+".nc" + if (.not.isfilepresent2(fn)) then + z_tas = addfile(fn,"c") + z_tas@source = "NCAR Climate Analysis Section's Climate Variability Diagnostics Package v"+getenv("VERSION") + z_tas@notes = "Data from "+names_trefht(ee)+" from "+syear_trefht(ee)+"-"+eyear_trefht(ee) + if (OPT_CLIMO.eq."Full") then + z_tas@climatology = syear_trefht(ee)+"-"+eyear_trefht(ee)+" climatology removed prior to all calculations (other than means)" + else + if (CLIMO_SYEAR.lt.0) then + z_tas@climatology = (eyear(ee)+CLIMO_SYEAR)+"-"+(eyear(ee)+CLIMO_EYEAR)+" climatology removed prior to all calculations (other than means)" + else + z_tas@climatology = CLIMO_SYEAR+"-"+CLIMO_EYEAR+" climatology removed prior to all calculations (other than means)" + end if + end if + else + z_tas = addfile(fn,"w") + end if + z_tas->nino34_spacomp_tas_jja0 = set_varAtts(n34sc_tas(6,:,:),"nino3.4 tas spatial composite (JJA+0)","","") + z_tas->nino34_spacomp_tas_son0 = set_varAtts(n34sc_tas(9,:,:),"nino3.4 tas spatial composite (SON+0)","","") + z_tas->nino34_spacomp_tas_djf1 = set_varAtts(n34sc_tas(12,:,:),"nino3.4 tas spatial composite (DJF+1)","","") + z_tas->nino34_spacomp_tas_mam1 = set_varAtts(n34sc_tas(15,:,:),"nino3.4 tas spatial composite (MAM+1)","","") + delete(z_tas) + delete(modname) + + modname = str_sub_str(names_psl(ee)," ","_") + bc = (/"/","'","(",")"/) + do gg = 0,dimsizes(bc)-1 + modname = str_sub_str(modname,bc(gg),"_") + end do + fn = getenv("OUTDIR")+modname+".cvdp_data.sst.indices.psl."+syear_trefht(ee)+"-"+eyear_trefht(ee)+".nc" + if (.not.isfilepresent2(fn)) then + z_psl = addfile(fn,"c") + z_psl@source = "NCAR Climate Analysis Section's Climate Variability Diagnostics Package v"+getenv("VERSION") + z_psl@notes = "Data from "+names_trefht(ee)+" from "+syear_trefht(ee)+"-"+eyear_trefht(ee) + if (OPT_CLIMO.eq."Full") then + z_psl@climatology = syear_trefht(ee)+"-"+eyear_trefht(ee)+" climatology removed prior to all calculations (other than means)" + else + if (CLIMO_SYEAR.lt.0) then + z_psl@climatology = (eyear(ee)+CLIMO_SYEAR)+"-"+(eyear(ee)+CLIMO_EYEAR)+" climatology removed prior to all calculations (other than means)" + else + z_psl@climatology = CLIMO_SYEAR+"-"+CLIMO_EYEAR+" climatology removed prior to all calculations (other than means)" + end if + end if + z@Conventions = "CF-1.6" + else + z_psl = addfile(fn,"w") + end if + z_psl->nino34_spacomp_psl_jja0 = set_varAtts(n34sc_psl(6,:,:),"nino3.4 psl spatial composite (JJA+0)","","") + z_psl->nino34_spacomp_psl_son0 = set_varAtts(n34sc_psl(9,:,:),"nino3.4 psl spatial composite (SON+0)","","") + z_psl->nino34_spacomp_psl_djf1 = set_varAtts(n34sc_psl(12,:,:),"nino3.4 psl spatial composite (DJF+1)","","") + z_psl->nino34_spacomp_psl_mam1 = set_varAtts(n34sc_psl(15,:,:),"nino3.4 psl spatial composite (MAM+1)","","") + delete(z_psl) + delete(modname) + end if + end if + end if + if (isvar("TIME")) then + delete(TIME) + end if + if (isvar("psl")) then + delete(psl) + end if + if (isvar("tas")) then + delete(tas) + end if +;-------------nino3.4 composite (precipitation)----------------------------------------------------- + if (any(ismissing((/syear(ee),syear_prect(ee),eyear(ee),eyear_prect(ee)/)))) then + pptreg_plot_flag = 1 + else + if (syear(ee).eq.syear_prect(ee)) then ; check that the start and end years match for ts, trefht, and psl + if (eyear(ee).eq.eyear_prect(ee)) then + pptreg_plot_flag = 0 + else + pptreg_plot_flag = 1 + end if + else + pptreg_plot_flag = 1 + end if + end if + + if (pptreg_plot_flag.eq.0) then + ppt = data_read_in(paths_prect(ee),"PRECT",syear_prect(ee),eyear_prect(ee)) + + yyyymm = cd_calendar(ppt&time,-1) ; convert ppt from CF-conforming time to YYYYMM for coding below + delete(ppt&time) + ppt&time = yyyymm + delete(yyyymm) + + if (isatt(ppt,"is_all_missing")) then + pptreg_plot_flag = 1 + delete(ppt) + end if + + if (nyr(ee).lt.15) then ; 15+ years needed for composites + pptreg_plot_flag = 1 + end if + + if (pptreg_plot_flag.eq.0) then ; only continue if all 3 fields are present + ; d = addfile("$NCARG_ROOT/lib/ncarg/data/cdf/landsea.nc","r") ; mask ocean for TAS array + ; basemap = d->LSMASK ; This is now done right before plotting. + ; lsm = landsea_mask(basemap,tas&lat,tas&lon) ; so that the entire TAS array is used + ; tas = mask(tas,conform(tas,lsm,(/1,2/)).eq.0,False) ; in the nino3.4 pattern correlations + ; delete([/lsm,basemap/]) ; (Even if the land portion of TAS is the + ; delete(d) ; only portion plotted as SST shown over oceans.) + + if (OPT_CLIMO.eq."Full") then + ppt = rmMonAnnCycTLL(ppt) + else + check_custom_climo(names_prect(ee),syear_prect(ee),eyear_prect(ee),CLIMO_SYEAR,CLIMO_EYEAR) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(ppt({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(ppt({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + ppt = calcMonAnomTLL(ppt,climo) + delete(climo) + end if + ppt = (/ dtrend_msg_n(ispan(0,dimsizes(ppt&time)-1,1),ppt,False,False,0) /) + + ta = dim_avg_n(ppt(:1,:,:),0) + ppt = runave_n_Wrap(ppt,3,0,0) + ppt(0,:,:) = (/ ta /) + delete(ta) + + hicntr = 0 + locntr = 0 + hiyr = new(dimsizes(nino34_ndj&time),integer) + loyr = hiyr + + do hh = 0,dimsizes(nino34_ndj)-1 + if (.not.ismissing(nino34_ndj(hh))) then + if (nino34_ndj(hh).ge.1) then + hiyr(hicntr) = nino34_ndj&time(hh) + hicntr = hicntr+1 + end if + if (nino34_ndj(hh).le.-1) then + loyr(locntr) = nino34_ndj&time(hh) + locntr = locntr+1 + end if + end if + end do + + if (hicntr.eq.0) then ; for simulations with climatological SSTs + highyr = hiyr(0) + else + highyr = hiyr(:hicntr-1) + end if + delete([/hiyr,hicntr/]) + if (locntr.eq.0) then + lowyr = loyr(0) + else + lowyr = loyr(:locntr-1) + end if + delete([/loyr,locntr/]) + + dimS = dimsizes(ppt&time) ; change time from YYYYMM->YYYY.frac + tmin = ppt&time(0)/100 + tmax = ppt&time(dimS-1)/100 + delete(ppt&time) + ppt&time = fspan(tmin*1.,(tmax*1.)+(11/12.),dimS) + delete([/dimS,tmin,tmax/]) + + sc_ppt_hi = ppt(:23,:,:) + sc_ppt_lo = ppt(:23,:,:) + + sc_ppt_hi = sc_ppt_hi@_FillValue + sc_ppt_lo = sc_ppt_lo@_FillValue + + if (dimsizes(highyr).le.1) then + print("For "+names(ee)+", 1 or less (normalized) nino3.4 value greater than one standard deviation found, setting nino3.4 spatial composites to missing") ; sc_*_hi arrays left to _FillValue + else + do gg = 0,23 + tt = gg/12. + sc_ppt_hi(gg,:,:) = (/ dim_avg_n(ppt({highyr+tt},:,:),0) /) + end do + delete(tt) + end if + delete(highyr) + if (dimsizes(lowyr).le.1) then + print("For "+names(ee)+", 1 or less (normalized) nino3.4 value less than -1 standard deviation found, setting nino3.4 spatial composites to missing") ; sc_*_lo arrays left to _FillValue + else + do gg = 0,23 + tt = gg/12. + sc_ppt_lo(gg,:,:) = (/ dim_avg_n(ppt({lowyr+tt},:,:),0) /) + end do + delete(tt) + end if + delete(lowyr) + + n34sc_ppt = sc_ppt_hi + n34sc_ppt = (/ sc_ppt_hi - sc_ppt_lo /) + delete([/sc_ppt_hi,sc_ppt_lo/]) + + if (OUTPUT_DATA.eq."True") then + modname = str_sub_str(names_prect(ee)," ","_") + bc = (/"/","'","(",")"/) + do gg = 0,dimsizes(bc)-1 + modname = str_sub_str(modname,bc(gg),"_") + end do + fn = getenv("OUTDIR")+modname+".cvdp_data.sst.indices.ppt."+syear_trefht(ee)+"-"+eyear_trefht(ee)+".nc" + if (.not.isfilepresent2(fn)) then + z_ppt = addfile(fn,"c") + z_ppt@source = "NCAR Climate Analysis Section's Climate Variability Diagnostics Package v"+getenv("VERSION") + z_ppt@notes = "Data from "+names_trefht(ee)+" from "+syear_trefht(ee)+"-"+eyear_trefht(ee) + if (OPT_CLIMO.eq."Full") then + z_ppt@climatology = syear_trefht(ee)+"-"+eyear_trefht(ee)+" climatology removed prior to all calculations (other than means)" + else + if (CLIMO_SYEAR.lt.0) then + z_ppt@climatology = (eyear(ee)+CLIMO_SYEAR)+"-"+(eyear(ee)+CLIMO_EYEAR)+" climatology removed prior to all calculations (other than means)" + else + z_ppt@climatology = CLIMO_SYEAR+"-"+CLIMO_EYEAR+" climatology removed prior to all calculations (other than means)" + end if + end if + z@Conventions = "CF-1.6" + else + z_ppt = addfile(fn,"w") + end if + z_ppt->nino34_spacomp_pr_jja0 = set_varAtts(n34sc_ppt(6,:,:),"nino3.4 pr spatial composite (JJA+0)","","") + z_ppt->nino34_spacomp_pr_son0 = set_varAtts(n34sc_ppt(9,:,:),"nino3.4 pr spatial composite (SON+0)","","") + z_ppt->nino34_spacomp_pr_djf1 = set_varAtts(n34sc_ppt(12,:,:),"nino3.4 pr spatial composite (DJF+1)","","") + z_ppt->nino34_spacomp_pr_mam1 = set_varAtts(n34sc_ppt(15,:,:),"nino3.4 pr spatial composite (MAM+1)","","") + delete(z_ppt) + delete(modname) + end if + end if + end if + if (isvar("TIME")) then + delete(TIME) + end if + if (isvar("ppt")) then + delete(ppt) + end if + delete([/sst,nino34_ndj/]) +;----------------------------------------------------------------------------------------- + if (nyr(ee).ge.35) then ; need a minimum number of years to compute running nino3.4 standard deviations + nino34T = dtrend_msg(ispan(0,dimsizes(nino34T)-1,1),nino34T,True,False) + nino34T!0 = "time" + nino34T&time = nino34&time + sd_run = nino34T + sd_run = sd_run@_FillValue + sd_run@units = nino34@units + sd_run@long_name = "nino3.4 30yr running standard deviation" + do gg = 180,dimsizes(nino34T)-180 + sd_run(gg) = (/ dim_stddev(nino34T(gg-180:gg+179)) /) + end do + if (OUTPUT_DATA.eq."True") then + z->nino34_runstddev = set_varAtts(sd_run,"","","") + end if + end if + delete(nino34T) +;----------------------------------------------------------------------------------------- + iopt = 0 ; nino3.4 power spectra + jave = (7*nyr(ee))/100 + val1 = .95 + val2 = .99 + pct = 0.1 + spectra_mvf = False ; missing value flag for nino3.4 + if (any(ismissing(nino34))) then ; check for missing data + print("Missing data detected for "+names(ee)+", not creating spectra in sst.indices.ncl") + spectra_mvf = True + if (isfilepresent2("obs_ts").and.ee.eq.0) then + spectra_mvf_obs = True + end if + else + if (isfilepresent2("obs_ts").and.ee.eq.0) then + spectra_mvf_obs = False ; missing value flag for obs nino3.4 + end if + nino34_dt = dtrend_msg(ispan(0,dimsizes(nino34)-1,1),nino34,True,False) + + sdof = specx_anal(nino34_dt,iopt,jave,pct) + mval = sum(1/(1.+((sdof@xlag1)^2)-((2*sdof@xlag1)*cos(6.28318*sdof@frq)))) + if (mval.eq.0) then ; check for cyclic data that results in sum of Markov elements = 0. + spectra_mvf = True + if (isfilepresent2("obs_ts").and.ee.eq.0) then + spectra_mvf_obs = True ; missing value flag for obs nino3.4 + end if + else + splt1 = specx_ci(sdof,val1,val2) + if (OUTPUT_DATA.eq."True") then + splt1!0 = "ncurves" + splt1&ncurves = ispan(0,3,1) + splt1&ncurves@long_name = "power spectra curves" + splt1&ncurves@units = "1" + splt1!1 = "frequency" + splt1&frequency = sdof@frq + splt1&frequency@long_name = "power spectra frequency" + splt1&frequency@units = "1" + splt1@units_info = "df refers to frequency interval" + splt1@units = "C^2/df" + splt1@comment_cvdp = "(0,:)=spectrum,(1,:)=Markov red noise spectrum, (2,:)="+val1+"% confidence bound for Markhov, (3,:)="+val2+"% confidence bound for Markhov" + z->nino34_spectra = set_varAtts(splt1,"nino3.4 power spectra","","") + end if + if (isfilepresent2("obs_ts").and.ee.eq.0) then + sdof_obs = sdof + end if + end if + delete([/nino34_dt,iopt,jave,pct,mval/]) + end if +;------------------------------------------------------------------------------------------ + nino34_dt = dtrend_msg(ispan(0,dimsizes(nino34&time)-1,1),nino34,True,False) + nino34_mon_sd = new(12,typeof(nino34)) + + do hh = 0,11 + nino34_mon_sd(hh) = (/ dim_stddev(nino34_dt(hh::12)) /) + end do + nino34_mon_sd@units = "C" + delete(nino34_dt) + if (OUTPUT_DATA.eq."True") then + time_mon2 = ispan(0,11,1) + time_mon2@units = "months since 0000-01-01 00:00:00" + time_mon2@long_name = "Time" + time_mon2@standard_name = "time" + time_mon2@calendar = "standard" + time_mon2!0 = "time_mon2" + time_mon2&time_mon2 = time_mon2 + nino34_mon_sd!0 = "time_mon2" + nino34_mon_sd&time_mon2 = time_mon2 + z->nino34_monthly_stddev = set_varAtts(nino34_mon_sd,"nino3.4 monthly standard deviation","","") + delete(time_mon2) + end if +;------------------------------------------------------------------------------------------ +; nino3.4 wavelet analysis, autocorrelation + + if (spectra_mvf.eq.False) then + N = dimsizes(nino34) + mother = 0 + param = 6.0 + dt = 1./12. + s0 = dt + dj = 1./12. + jtot = 1+floattointeger(((log10(N*dt/s0))/dj)/log10(2.)) + npad = N + nadof = 0 + noise = 1 + siglvl = .05 + isigtest= 0 + wave34 = wavelet(nino34,mother,dt,param,s0,dj,jtot,npad,noise,isigtest,siglvl,nadof) + + power34 = onedtond(wave34@power,(/jtot,N/)) + power34!0 = "period" + power34&period = wave34@period + power34&period@long_name = "wavelet period" + power34&period@units = "1" + power34!1 = "time" + power34&time = nino34&time + power34@units = nino34@units+"^2" + + sig34 = power34 + sig34 = power34/conform (power34,wave34@signif,0) + sig34@long_name = "wavelet significance" + sig34@units = "" + delete([/N,mother,param,dt,s0,dj,jtot,npad,nadof,noise,siglvl,isigtest/]) + + ac34 = esacr(nino34,48) + time_mon3 = ispan(0,48,1) + time_mon3@units = "months since 0000-01-01 00:00:00" + time_mon3@long_name = "Time" + time_mon3@standard_name = "time" + time_mon3@calendar = "standard" + time_mon3!0 = "time_mon3" + time_mon3&time_mon3 = time_mon3 + ac34!0 = "time_mon3" + ac34&time_mon3 = time_mon3 + ac34@units = "1" + if (OUTPUT_DATA.eq."True") then + z->nino34_wavelet_power = set_varAtts(power34,"nino3.4 wavelet power","","") + z->nino34_wavelet_significance = set_varAtts(sig34,"nino3.4 wavelet significance","","") + z->nino34_autocorrelation = set_varAtts(ac34,"nino3.4 autocorrelation","","") + end if + if (isfilepresent2("obs_ts").and.ee.eq.0) then + ac34_obs = ac34 + end if + end if + if (isvar("z")) then + delete(z) + end if +;========================================================================================== + xyres = True + xyres@gsnDraw = False + xyres@gsnFrame = False + xyres@gsnRightString = "" + xyres@gsnLeftString = "" + xyres@gsnYRefLine = 0.0 + xyres@gsnYRefLineColor = "gray42" + xyres@xyLineColor = "gray62" + if (wks_type.eq."png") then + xyres@xyLineThicknessF = .75 + else + xyres@xyLineThicknessF = .5 + end if + xyres@tiYAxisString = "" + if (nsim.le.5) then + xyres@tmXBLabelFontHeightF = 0.0125 + xyres@tmYLLabelFontHeightF = 0.0125 + xyres@gsnLeftStringFontHeightF = 0.017 + xyres@gsnCenterStringFontHeightF = 0.017 + xyres@gsnRightStringFontHeightF = 0.013 + else + xyres@tmXBLabelFontHeightF = 0.018 + xyres@tmYLLabelFontHeightF = 0.018 + xyres@gsnLeftStringFontHeightF = 0.024 + xyres@gsnCenterStringFontHeightF = 0.024 + xyres@gsnRightStringFontHeightF = 0.020 + end if +; xyres@vpXF = 0.05 + xyres@vpHeightF = 0.3 + if (SCALE_TIMESERIES.eq."True") then + xyres@vpWidthF = 0.9*((nyr(ee)*1.)/nyr_max) + else + xyres@vpWidthF = 0.9 + end if + xyres@gsnCenterString = "" + + xyres@trXMinF = syear(ee)-.5 + xyres@trXMaxF = eyear(ee)+1.5 + xyres@tiMainOn = False + xyres@gsnLeftStringOrthogonalPosF = 0.025 + xyres@gsnCenterStringOrthogonalPosF = xyres@gsnLeftStringOrthogonalPosF + xyres@gsnRightStringOrthogonalPosF = xyres@gsnLeftStringOrthogonalPosF + + xyres2 = xyres + xyres2@vpHeightF = 0.15 + xyres2@gsnXYBarChart = False + + xyres2@xyLineColor = "royalblue" + xyres2@trYMinF = 0.3 ; hard wire YMinF and YMaxF for running stddev plots + xyres2@trYMaxF = 1.8 + if (wks_type.eq."png") then + xyres2@xyLineThicknessF = 3.5 + else + xyres2@xyLineThicknessF = 1.75 + end if + delete(xyres2@gsnYRefLine) + xyres2@gsnYRefLine = (/.6,0.9,1.2,1.5/) + xyres2@gsnYRefLineColor = "gray85" + + xyres3 = xyres ; resource list for monthly nino3.4 standard deviations + xyres3@trXMinF = 0.5 + xyres3@trXMaxF = 12.5 + xyres3@vpWidthF = 0.65 + xyres3@vpHeightF = 0.35 + xyres3@trYMinF = 0.2 + xyres3@trYMaxF = 2.0 + xyres3@gsnAboveYRefLineColor = "gray50" + xyres3@xyLineColor = "black" + if (wks_type.eq."png") then + xyres3@xyLineThicknessF = 3.5 + else + xyres3@xyLineThicknessF = 1.75 + end if + xyres3@gsnXYBarChart = True + xyres3@gsnXYBarChartBarWidth = 0.75 + xyres3@tmXBMode = "Explicit" ; explicit labels + xyres3@tmXBValues = ispan(1,12,1) + xyres3@tmXBLabels = (/"Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"/) + xyres3@tmXTOn = False + + xyres4 = xyres ; resource list for nino3.4 autocorrelations + xyres4@trXMinF = 0.0 + xyres4@trXMaxF = 48.0 + xyres4@trYMinF = -1.05 + xyres4@trYMaxF = 1.05 + xyres4@vpHeightF = 0.3 + xyres4@vpWidthF = 0.3 + if (wks_type.eq."png") then + xyres4@xyLineThicknessF = 3.5 + else + xyres4@xyLineThicknessF = 1.75 + end if + xyres4@xyLineColor = "black" + xyres4@gsnAboveYRefLineColor = "firebrick2" + xyres4@gsnBelowYRefLineColor = "dodgerblue3" + xyres4@tmYLMode = "Explicit" + xyres4@tmYLValues = (/-1,0,1/) + xyres4@tmYLLabels = (/"-1","0","1"/) + xyres4@tmYLMinorValues = fspan(-1,1,9) + xyres4@tmXBMode = "Explicit" + xyres4@tmXBValues = (/0,12,24,36,48/) + xyres4@tmXBLabels = (/"0","12","24","36","48"/) + if (nsim.le.5) then + xyres4@tmXBLabelFontHeightF = 0.0105 + xyres4@tmYLLabelFontHeightF = 0.0105 + xyres4@gsnLeftStringFontHeightF = 0.015 + xyres4@gsnCenterStringFontHeightF = 0.015 + xyres4@gsnRightStringFontHeightF = 0.012 + else + xyres4@tmXBLabelFontHeightF = 0.015 + xyres4@tmYLLabelFontHeightF = 0.015 + xyres4@gsnLeftStringFontHeightF = 0.021 + xyres4@gsnCenterStringFontHeightF = 0.021 + xyres4@gsnRightStringFontHeightF = 0.016 + end if + xyres4@gsnRightStringOrthogonalPosF = -0.115 + xyres4@gsnRightStringParallelPosF = 0.96 + xyres4@gsnCenterStringOrthogonalPosF = 0.025 + + + xyres@gsnXYAboveFillColors = "red" + xyres@gsnXYBelowFillColors = "blue" + xyres@gsnLeftString = names(ee) + arr = new((/2,dimsizes(nino34)/),typeof(nino34)) + + tttt = dtrend_msg(ispan(0,dimsizes(nino34)-1,1),nino34,False,True) + arr(0,:) = (/ nino34 /) + arr(1,:) = (/ (ispan(0,dimsizes(nino34)-1,1)*tttt@slope)+tttt@y_intercept /) + xyres@gsnRightString = decimalPlaces(tttt@slope*dimsizes(nino34),2,True)+nino34@units+" "+nyr(ee)+"yr~S~-1~N~" + xyn34(ee) = gsn_csm_xy(wks_n34,fspan(syear(ee),eyear(ee)+.91667,dimsizes(nino34)),arr,xyres) + delete(tttt) + + tttt = dtrend_msg(ispan(0,dimsizes(nino3)-1,1),nino3,False,True) + arr(0,:) = (/ nino3 /) + arr(1,:) = (/ (ispan(0,dimsizes(nino3)-1,1)*tttt@slope)+tttt@y_intercept /) + xyres@gsnRightString = decimalPlaces(tttt@slope*dimsizes(nino3),2,True)+nino3@units+" "+nyr(ee)+"yr~S~-1~N~" + xyn3(ee) = gsn_csm_xy(wks_n3,fspan(syear(ee),eyear(ee)+.91667,dimsizes(nino3)),arr,xyres) + delete(tttt) + + tttt = dtrend_msg(ispan(0,dimsizes(nino4)-1,1),nino4,False,True) + arr(0,:) = (/ nino4 /) + arr(1,:) = (/ (ispan(0,dimsizes(nino4)-1,1)*tttt@slope)+tttt@y_intercept /) + xyres@gsnRightString = decimalPlaces(tttt@slope*dimsizes(nino4),2,True)+nino4@units+" "+nyr(ee)+"yr~S~-1~N~" + xyn4(ee) = gsn_csm_xy(wks_n4,fspan(syear(ee),eyear(ee)+.91667,dimsizes(nino4)),arr,xyres) + delete(tttt) + + tttt = dtrend_msg(ispan(0,dimsizes(nino12)-1,1),nino12,False,True) + arr(0,:) = (/ nino12 /) + arr(1,:) = (/ (ispan(0,dimsizes(nino12)-1,1)*tttt@slope)+tttt@y_intercept /) + xyres@gsnRightString = decimalPlaces(tttt@slope*dimsizes(nino12),2,True)+nino12@units+" "+nyr(ee)+"yr~S~-1~N~" + xyn12(ee) = gsn_csm_xy(wks_n12,fspan(syear(ee),eyear(ee)+.91667,dimsizes(nino12)),arr,xyres) + delete(tttt) + + tttt = dtrend_msg(ispan(0,dimsizes(tna)-1,1),tna,False,True) + arr(0,:) = (/ tna /) + arr(1,:) = (/ (ispan(0,dimsizes(tna)-1,1)*tttt@slope)+tttt@y_intercept /) + xyres@gsnRightString = decimalPlaces(tttt@slope*dimsizes(tna),2,True)+tna@units+" "+nyr(ee)+"yr~S~-1~N~" + xytna(ee) = gsn_csm_xy(wks_tna,fspan(syear(ee),eyear(ee)+.91667,dimsizes(tna)),arr,xyres) + delete(tttt) + + tttt = dtrend_msg(ispan(0,dimsizes(tsa)-1,1),tsa,False,True) + arr(0,:) = (/ tsa /) + arr(1,:) = (/ (ispan(0,dimsizes(tsa)-1,1)*tttt@slope)+tttt@y_intercept /) + xyres@gsnRightString = decimalPlaces(tttt@slope*dimsizes(tsa),2,True)+tsa@units+" "+nyr(ee)+"yr~S~-1~N~" + xytsa(ee) = gsn_csm_xy(wks_tsa,fspan(syear(ee),eyear(ee)+.91667,dimsizes(tsa)),arr,xyres) + delete(tttt) + + tttt = dtrend_msg(ispan(0,dimsizes(tio)-1,1),tio,False,True) + arr(0,:) = (/ tio /) + arr(1,:) = (/ (ispan(0,dimsizes(tio)-1,1)*tttt@slope)+tttt@y_intercept /) + xyres@gsnRightString = decimalPlaces(tttt@slope*dimsizes(tio),2,True)+tio@units+" "+nyr(ee)+"yr~S~-1~N~" + xytio(ee) = gsn_csm_xy(wks_tio,fspan(syear(ee),eyear(ee)+.91667,dimsizes(tio)),arr,xyres) + delete(tttt) + + tttt = dtrend_msg(ispan(0,dimsizes(iod)-1,1),iod,False,True) + arr(0,:) = (/ iod /) + arr(1,:) = (/ (ispan(0,dimsizes(iod)-1,1)*tttt@slope)+tttt@y_intercept /) + xyres@gsnRightString = decimalPlaces(tttt@slope*dimsizes(iod),2,True)+iod@units+" "+nyr(ee)+"yr~S~-1~N~" + xyiod(ee) = gsn_csm_xy(wks_tio,fspan(syear(ee),eyear(ee)+.91667,dimsizes(iod)),arr,xyres) + delete([/tttt/]) + + tttt = dtrend_msg(ispan(0,dimsizes(socn)-1,1),socn,False,True) + arr(0,:) = (/ socn /) + arr(1,:) = (/ (ispan(0,dimsizes(socn)-1,1)*tttt@slope)+tttt@y_intercept /) + xyres@gsnRightString = decimalPlaces(tttt@slope*dimsizes(socn),2,True)+socn@units+" "+nyr(ee)+"yr~S~-1~N~" + xysocn(ee) = gsn_csm_xy(wks_tio,fspan(syear(ee),eyear(ee)+.91667,dimsizes(socn)),arr,xyres) + delete([/tttt/]) + + tttt = dtrend_msg(ispan(0,dimsizes(amm)-1,1),amm,False,True) + arr(0,:) = (/ amm /) + arr(1,:) = (/ (ispan(0,dimsizes(amm)-1,1)*tttt@slope)+tttt@y_intercept /) + xyres@gsnRightString = decimalPlaces(tttt@slope*dimsizes(amm),2,True)+amm@units+" "+nyr(ee)+"yr~S~-1~N~" + xyamm(ee) = gsn_csm_xy(wks_tio,fspan(syear(ee),eyear(ee)+.91667,dimsizes(amm)),arr,xyres) + delete([/tttt/]) + + tttt = dtrend_msg(ispan(0,dimsizes(atl3)-1,1),atl3,False,True) + arr(0,:) = (/ atl3 /) + arr(1,:) = (/ (ispan(0,dimsizes(atl3)-1,1)*tttt@slope)+tttt@y_intercept /) + xyres@gsnRightString = decimalPlaces(tttt@slope*dimsizes(atl3),2,True)+atl3@units+" "+nyr(ee)+"yr~S~-1~N~" + xyatl3(ee) = gsn_csm_xy(wks_tio,fspan(syear(ee),eyear(ee)+.91667,dimsizes(atl3)),arr,xyres) + delete([/arr,tttt/]) + + xyres2@gsnLeftString = names(ee) + if (nyr(ee).ge.35) then + xyres2@gsnRightString = sprintf("%4.2f", min(sd_run))+" / "+sprintf("%4.2f", avg(sd_run))+" / "+sprintf("%4.2f", max(sd_run))+sd_run@units + xyn34_rst(ee) = gsn_csm_xy(wks_n34_rst,fspan(syear(ee),eyear(ee)+.91667,dimsizes(sd_run)),sd_run,xyres2) + end if + + xyres3@gsnRightStringFontHeightF = xyres3@gsnCenterStringFontHeightF + xyres3@gsnLeftString = syear(ee)+"-"+eyear(ee) + xyres3@gsnCenterString = names(ee) + xyres3@gsnRightString = "C" + if (max(nino34_mon_sd).gt.xyres3@gsnYRefLine) then + xyn34_mst(ee) = gsn_csm_xy(wks_n34_mst,ispan(1,12,1),nino34_mon_sd,xyres3) + end if + title_n34 = nino34@comment_cvdp + title_n4 = nino4@comment_cvdp + title_n3 = nino3@comment_cvdp + title_n12 = nino12@comment_cvdp + + title_tna = tna@comment_cvdp + title_tsa = tsa@comment_cvdp + + title_tio = tio@comment_cvdp + title_iod = iod@comment_cvdp + title_socn = socn@comment_cvdp + title_amm = amm@comment_cvdp + title_atl3 = atl3@comment_cvdp + delete([/nino34,nino3,nino4,nino12,tsa,tna,tio,iod,socn,amm,atl3/]) + + if (spectra_mvf.eq.False) then + xyres4@gsnCenterString = names(ee) + xyres4@gsnRightString = syear(ee)+"-"+eyear(ee) + xyn34_ac(ee) = gsn_csm_xy(wks_n34_p,ispan(0,48,1),ac34,xyres4) + if (ee.ge.1.and.isvar("ac34_obs")) then + delete([/xyres4@gsnAboveYRefLineColor,xyres4@gsnBelowYRefLineColor/]) + xyres4@xyLineColor = "gray62" + xyres4@xyCurveDrawOrder = "PreDraw" + xyres4@gsnCenterString = "" + xyres4@gsnRightString = "" + xyn34_ac_obs(ee) = gsn_csm_xy(wks_n34_p,ispan(0,48,1),ac34_obs,xyres4) + overlay(xyn34_ac(ee),xyn34_ac_obs(ee)) + delete(xyres4@xyCurveDrawOrder) + delete(ac34) + end if + end if + delete([/xyres,xyres2,xyres3,xyres4,nino34_mon_sd/]) +;- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + res = True + res@vpHeightF = 0.45 + res@vpWidthF = 0.35 + res@gsnFrame = False + res@gsnDraw = False + + + res@tmYLMode = "Explicit" +; res@tmYLValues = ispan(0,72,6) +; res@tmYLLabels = (/"Jan~S~-2~N~","Jul~S~-2~N~","Jan~S~-1~N~","Jul~S~-1~N~", \ +; "Jan~S~0~N~","Jul~S~0~N~","Jan~S~+1~N~","Jul~S~+1~N~", \ +; "Jan~S~+2~N~","Jul~S~+2~N~","Jan~S~+3~N~","Jul~S~+3~N~","Jan~S~+4~N~"/) + res@trYMinF = 24 + res@trYMaxF = 52 + res@tmYLValues = ispan(24,52,4) + res@tmYLLabels = (/"Jan~S~0~N~","May~S~0~N~","Sep~S~0~N~","Jan~S~+1~N~", \ + "May~S~+1~N~","Sep~S~+1~N~","Jan~S~+2~N~","May~S~+2~N~"/) + res@tmYLMinorValues = ispan(24,52,2) + res@tmYLLabelJust = "CenterCenter" + res@tmYLLabelDeltaF = 1.3 ;0.05 + res@cnFillOn = True + res@gsnSpreadColors = True + res@gsnSpreadColorEnd = 19 + + res@lbLabelBarOn = False + + res@tiMainOn = False + res@cnInfoLabelOn = False + res@cnLinesOn = True + res@cnLevelSelectionMode = "ExplicitLevels" + res@cnLevels = (/-3,-2.5,-2,-1.5,-1,-.75,-.5,-.25,0,.25,.5,.75,1,1.5,2,2.5,3/) ;fspan(-2.,2.,17) + carr = new(dimsizes(res@cnLevels),"string") + carr = "transparent" + carr(8) = "gray50" + res@cnMonoLineColor = False + res@cnLineColors = carr + res@cnLineLabelsOn = False + res@tmYLLabelFontHeightF = 0.014 + res@tmXBLabelFontHeightF = 0.014 + res@gsnMajorLonSpacing = 30. + res@gsnMinorLonSpacing = 10. + res@tiYAxisOn = False + + if (wks_type.eq."png") then + res@cnLineThicknessF = 2. + else + res@cnLineThicknessF = 1. + end if + res@gsnCenterStringOrthogonalPosF = 0.025 + res@gsnRightStringOrthogonalPosF = res@gsnCenterStringOrthogonalPosF + res@gsnCenterStringFontHeightF = 0.017 + res@gsnLeftStringFontHeightF = 0.017 + res@gsnRightStringFontHeightF = 0.017 + + res@gsnLeftString = "" + res@gsnCenterString= "" + res@gsnRightString = "" + + if (isfilepresent2("obs_ts").and.ee.eq.0) then ; for metrics table + patcor_hov_hi = new((/nsim,dimsizes(finsst_hi&time),dimsizes(finsst_hi&lon)/),typeof(finsst_hi)) + patcor_hov_hi!1 = "time" + patcor_hov_hi&time = finsst_hi&time + patcor_hov_hi!2 = "lon" + patcor_hov_hi&lon = finsst_hi&lon + + patcor_hov_lo = patcor_hov_hi + + patcor_hov_hi(ee,:,:) = (/ finsst_hi /) + patcor_hov_lo(ee,:,:) = (/ finsst_lo /) + end if + if (isfilepresent2("obs_ts").and.ee.ge.1.and.isvar("patcor_hov_hi")) then + dimT = dimsizes(finsst_hi&time) + do hh = 0,dimT-1 ; need to loop over each timestep, using linint1 to interpolate to set longitudes. + patcor_hov_hi(ee,hh,:) = (/ totype(linint1(finsst_hi&lon,finsst_hi(hh,:),False,patcor_hov_hi&lon,0),typeof(patcor_hov_hi)) /) + patcor_hov_lo(ee,hh,:) = (/ totype(linint1(finsst_lo&lon,finsst_lo(hh,:),False,patcor_hov_lo&lon,0),typeof(patcor_hov_lo)) /) + end do + end if + + res@gsnCenterString = names(ee) ;"El Nin~H-13V2F35~D~FV-2H3F21~o" + res@gsnRightString = cntr_hi + plot_n34hi(ee) = gsn_csm_hov(wks_n34_tlon_hi,finsst_hi,res) + + res@gsnRightString = cntr_lo + plot_n34lo(ee) = gsn_csm_hov(wks_n34_tlon_lo,finsst_lo,res) + delete([/finsst_hi,finsst_lo,finsst_mid/]) + delete(res) +;- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + wres = True + wres@gsnDraw = False + wres@gsnFrame = False + wres@vpWidthF = 0.7 + wres@vpHeightF = 0.3 + wres@cnFillOn = True + wres@cnLinesOn = False + wres@cnLineLabelsOn = False + wres@cnInfoLabelOn = False + wres@trYReverse = True + wres@trYMinF = 1.0 + wres@trYMaxF = (nyr(ee)/2) - (nyr(ee)*0.05) + wres@tmYLOn = True + wres@tmYLMode = "Explicit" + if (nyr(ee).lt.200) then + wres@tmYLValues = (/1,2,3,5,10,20,50,100,150/) + else + wres@tmYLValues = (/1,5,10,50,100,200,500,1000,2000,5000,10000/) + end if + wres@tmYLLabels = wres@tmYLValues + wres@cnLevelSelectionMode = "ExplicitLevels" + wres@cnLevels = ispan(0,70,5) + if (COLORMAP.eq.0) then + wres@cnFillPalette = "precip3_16lev" + else + wres@cnFillPalette = "cb_rainbow" + end if + wres@tmXTLabelFontHeightF = 0.018 + wres@tmXBLabelFontHeightF = 0.018 + wres@tmYLLabelFontHeightF = 0.018 + wres@tiYAxisString = "Period (years)" + wres@tiXAxisOn = False + wres@lbLabelBarOn = False + wres@gsnLeftString = "" + wres@gsnCenterString = "" + wres@gsnRightString = "" + wres@gsnCenterStringOrthogonalPosF = 0.025 + + wsres = True ; res2 probability plots + wsres@trYReverse = True + wsres@tmYLMode = "Explicit" + wsres@tmYLValues = wres@tmYLValues + wsres@tmYLLabels = wres@tmYLLabels + wsres@gsnDraw = False ; Do not draw plot + wsres@gsnFrame = False ; Do not advance frome + wsres@cnLevelSelectionMode = "ManualLevels" ; set manual contour levels + wsres@cnMinLevelValF = 0.00 ; set min contour level + wsres@cnMaxLevelValF = 2.00 ; set max contour level + wsres@cnLevelSpacingF = 1.00 ; set contour spacing + wsres@cnInfoLabelOn = False + wsres@cnLinesOn = False ; do not draw contour lines + wsres@cnLineLabelsOn = False ; do not draw contour labels + wsres@cnFillScaleF = 0.5 ; add extra density + wsres@cnFillDotSizeF = .0015 + wsres@gsnLeftString = "" + wsres@gsnCenterString = "" + wsres@gsnRightString = "" + + wavecoi = True + wavecoi@gsEdgeColor = "gray40" + wavecoi@gsFillColor = wavecoi@gsEdgeColor +; wavecoi@gsFillOpacityF = 0.15 + if (wks_type.eq."png") then + wavecoi@gsFillLineThicknessF = 2.0 + wavecoi@gsEdgeThicknessF = 2.0 + else + wavecoi@gsFillLineThicknessF = 1.25 + wavecoi@gsEdgeThicknessF = 1.25 + end if + wavecoi@gsFillIndex = 3 + wavecoi@gsFillScaleF = .65 + + + if (spectra_mvf.eq.False) then + wres@gsnLeftString = "" + wres@gsnCenterString = names(ee) + delete(power34&time) + power34&time = fspan(syear(ee),eyear(ee)+.91667,nyr(ee)*12) + delete(sig34&time) + sig34&time = power34&time + plot_wave34(ee) = gsn_csm_contour(wks_n34_p,power34,wres) + plot_wave34(ee) = ShadeCOI(wks_n34_p,plot_wave34(ee),wave34,power34&time,wavecoi) + o0 = gsn_csm_contour(wks_n34_p,sig34,wsres) + opt = True + opt@gsnShadeFillType = "pattern" + opt@gsnShadeHigh = 17 + o0 = gsn_contour_shade(o0,0, 0.8, opt) + overlay(plot_wave34(ee),o0) + delete([/o0,opt,power34,sig34,wave34/]) + end if + delete([/wres,wsres,wavecoi/]) +;- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + pres = True + pres@vpXF = 0.07 + pres@trYMinF = 0. + pres@trXMinF = 0.0 +; pres@trYMaxF = 82. + pres@trXMaxF = 0.0832 + pres@tiYAxisString = "Power" ; yaxis + pres@xyLineColor = "black" + pres@gsnFrame = False + pres@gsnDraw = False + + pres@tmXBLabelDeltaF = -.8 + pres@tmXTLabelDeltaF = -.8 + pres@pmLegendDisplayMode = "Never" + if (wks_type.eq."png") then + pres@xyLineThicknesses = (/3.5,2.,1.,1./) + else + pres@xyLineThicknesses = (/2.5,1.5,1.,1./) + end if + pres@xyDashPatterns = (/0,0,0,0/) + pres@xyLineColors = (/"foreground","red","blue","green"/) + pres@xyLabelMode = "custom" + pres@xyLineLabelFontColors = pres@xyLineColors + pres@xyExplicitLabels = (/"","",val1*100+"%",val2*100+"%"/) + pres@tmXTOn = True + pres@tmYROn = False + pres@tmXTLabelsOn = True + pres@tmXUseBottom = False + pres@tmXTMode = "Explicit" + pres@tmXBMode = "Explicit" + pres@tmXTValues = (/".00167",".00833",".01667",".02778",".0416",".0556",".0832"/) + pres@tmXTLabels = (/"50","10","5","3","2","1.5","1"/) + pres@tmXBValues = (/".0",".01",".02",".03",".042",".056",".083"/) + pres@tmXBLabels = pres@tmXBValues + pres@tmXTLabelFontHeightF = 0.018 + pres@tmXBLabelFontHeightF = 0.018 + pres@tmYLLabelFontHeightF = 0.018 + pres@tiYAxisString = "Power (~S~o~N~C~S~2~N~ / cycles mo~S~-1~N~)" ; yaxis + pres@tiXAxisString = "Frequency (cycles mo~S~-1~N~)" + pres@tiMainString = "" + pres@txFontHeightF = 0.015 + pres@xyLineLabelFontHeightF = 0.022 + pres@tiXAxisFontHeightF = 0.025 + pres@tiYAxisFontHeightF = 0.025 + pres@tiMainFontHeightF = 0.03 + pres@gsnRightStringOrthogonalPosF = -0.115 + + if (spectra_mvf.eq.False) then + if (isfilepresent2("obs_ts").and.ee.ge.1.and.spectra_mvf_obs.eq.False) then + val = new(2,typeof(sdof_obs@spcx)) + val(0) = max(sdof_obs@spcx) + val(1) = totype(max(splt1(0,:)),typeof(sdof_obs@spcx)) + mval = max(val) + delete(val) + else + mval = max(splt1(0,:)) + end if + if (mval.lt.70) then + pres@trYMaxF = 75. + pres@tmYLMode = "Explicit" + pres@tmYLValues = (/0,25,50,75/) + pres@tmYLLabels = pres@tmYLValues + pres@tmYLMinorValues = ispan(5,70,5) + end if + if (mval.ge.70.and.mval.lt.145) then + pres@trYMaxF = 150. + pres@tmYLMode = "Explicit" + pres@tmYLValues = (/0,50,100,150/) + pres@tmYLLabels = pres@tmYLValues + pres@tmYLMinorValues = ispan(10,140,10) + end if + if (mval.ge.145) then + pres@trYMaxF = mval+15. + end if + delete(mval) + end if + + pres@tiMainOn = False + pres@gsnCenterString = "Period (years)" + pres@gsnCenterStringFontHeightF = pres@tiYAxisFontHeightF + pres@gsnRightStringFontHeightF = pres@tiYAxisFontHeightF - 0.005 + pres@gsnRightString = syear(ee)+"-"+eyear(ee)+" " + pres@gsnLeftString = "" + pres@gsnCenterString = names(ee) + if (spectra_mvf.eq.False) then + pspec(ee) = gsn_csm_xy(wks_n34_p,sdof@frq,splt1,pres) + if (isfilepresent2("obs_ts").and.ee.ge.1.and.spectra_mvf_obs.eq.False) then + pres@xyLineColors = (/"gray70","black","black","black"/) + pres@xyCurveDrawOrder = "PreDraw" + pres@gsnCenterString = "" + pres@gsnRightString = "" + pspec_obs(ee) = gsn_csm_xy(wks_n34_p,sdof_obs@frq,sdof_obs@spcx,pres) + overlay(pspec(ee),pspec_obs(ee)) + delete(pres@xyCurveDrawOrder) + end if + delete([/sdof,splt1/]) + end if + delete([/val1,val2,pres/]) +;- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + if (nyr(ee).ge.35) then + delete(sd_run) + end if + + scres = True ; scres = spatial composite res + scres@mpProjection = "WinkelTripel" + scres@mpGeophysicalLineColor = "gray42" + + scres@mpPerimOn = False + scres@mpGridLatSpacingF = 90 ; change latitude line spacing + scres@mpGridLonSpacingF = 180. ; change longitude line spacing + scres@mpGridLineColor = "transparent" ; trick ncl into drawing perimeter + scres@mpGridAndLimbOn = True ; turn on lat/lon lines + scres@mpFillOn = False + scres@mpCenterLonF = 210. + scres@mpOutlineOn = True + scres@gsnDraw = False + scres@gsnFrame = False + + scres@cnLevelSelectionMode = "ExplicitLevels" + scres@cnLevels = (/-4,-3,-2,-1.5,-1,-.5,-.25,0,.25,.5,1,1.5,2,3.,4/) + + scres@cnLineLabelsOn = False + scres@cnFillOn = True + scres@cnLinesOn = False +; scres@mpOutlineDrawOrder = "PostDraw" +; scres@cnFillMode = "RasterFill" + scres@mpOutlineDrawOrder = "PostDraw" + scres@cnFillMode = "AreaFill" + scres@lbLabelBarOn = False + scres@cnInfoLabelOn = False + scres@gsnAddCyclic = True + + + scres@gsnLeftStringOrthogonalPosF = -0.05 + scres@gsnLeftStringParallelPosF = .005 + scres@gsnRightStringOrthogonalPosF = -0.05 + scres@gsnRightStringParallelPosF = 0.96 + scres@gsnRightString = cntr_hi+"/"+cntr_lo ; list number of El Nino / La Nina events that formed composites + scres@gsnLeftString = "" + scres@gsnLeftStringFontHeightF = 0.014 + scres@gsnCenterStringFontHeightF = 0.018 + scres@gsnRightStringFontHeightF = 0.014 + + delete([/cntr_hi,cntr_lo,cntr_mid/]) + + scres4 = scres ; scres4 = ppt composite resources + delete(scres4@cnLevels) + if (COLORMAP.eq.0) then + scres4@cnLevels = (/-10,-8,-6,-4,-3,-2,-1,-.5,-.25,0,.25,.5,1,2,3,4,6,8,10/) + else + scres4@cnLevels = (/-5,-3,-2,-1,-.5,0,.5,1,2,3,5/) + end if + + scres2 = True + scres2@gsnDraw = False + scres2@gsnFrame = False + scres2@cnLevelSelectionMode = "ExplicitLevels" + scres2@cnLevels = scres@cnLevels + + scres2@cnLineLabelsOn = False + scres2@cnFillOn = True + scres2@cnLinesOn = False + scres2@cnFillMode = "AreaFill" + scres2@lbLabelBarOn = False + scres2@cnInfoLabelOn = False + scres2@gsnRightString = "" + scres2@gsnLeftString = "" + scres2@gsnCenterString = "" + scres2@gsnAddCyclic = True + + + scres3 = True ; PSL resources + scres3@cnLineColor = "black" + scres3@cnLineLabelsOn = False + scres3@cnLevelSelectionMode = "ExplicitLevels" + scres3@cnInfoLabelOn = False + scres3@tiMainOn = False + new_index = NhlNewDashPattern(wks_n34sc,"$_$_$_$_$_$_$_$_$_") + scres3@gsnContourNegLineDashPattern = new_index + scres3@cnLineDashSegLenF = 0.08 + scres3@gsnDraw = False + scres3@gsnFrame = False + scres3@gsnLeftString = "" + scres3@gsnRightString = "" + scres3@gsnCenterString = "" + scres3@cnLevels = ispan(-16,16,2) + + scres4@gsnLeftString = syear_prect(ee)+"-"+eyear_prect(ee) + scres4@gsnCenterString = names_prect(ee) + + scres@gsnLeftString = syear(ee)+"-"+eyear(ee) + if (names(ee).eq.names_trefht(ee).and.names(ee).eq.names_psl(ee)) then + scres@gsnCenterString = names(ee) + else + scres@gsnCenterString = names(ee)+" / "+names_trefht(ee)+" / "+names_psl(ee) + end if + + if (wks_type.eq."png") then + scres3@cnLineThicknessF = 3. + scres@mpGeophysicalLineThicknessF = 2. + scres4@mpGeophysicalLineThicknessF = 2. + else + scres3@cnLineThicknessF = 1.25 + scres@mpGeophysicalLineThicknessF = 1. + scres4@mpGeophysicalLineThicknessF = 1. + end if + + if (taspslreg_plot_flag.eq.0) then + if (isvar("patcor_tas")) then ; for metrics table + patcor_tas(ee,:,:) = (/ totype(linint2(n34sc_tas&lon,n34sc_tas&lat,n34sc_tas(12,:,:),True,patcor_tas&lon,patcor_tas&lat,0),typeof(patcor_tas)) /) + patcor_psl(ee,:,:) = (/ totype(linint2(n34sc_psl&lon,n34sc_psl&lat,n34sc_psl(12,:,:),True,patcor_psl&lon,patcor_psl&lat,0),typeof(patcor_psl)) /) + else + if (isfilepresent2("obs_trefht")) then + patcor_tas = new((/nsim,dimsizes(n34sc_tas&lat),dimsizes(n34sc_tas&lon)/),typeof(n34sc_tas)) + patcor_tas!1 = "lat" + patcor_tas&lat = n34sc_tas&lat + patcor_tas!2 = "lon" + patcor_tas&lon = n34sc_tas&lon + patcor_psl = new((/nsim,dimsizes(n34sc_psl&lat),dimsizes(n34sc_psl&lon)/),typeof(n34sc_psl)) + patcor_psl!1 = "lat" + patcor_psl&lat = n34sc_psl&lat + patcor_psl!2 = "lon" + patcor_psl&lon = n34sc_psl&lon + patcor_tas(ee,:,:) = (/ n34sc_tas(12,:,:) /) + patcor_psl(ee,:,:) = (/ n34sc_psl(12,:,:) /) + end if + end if + + d = addfile("$NCARG_ROOT/lib/ncarg/data/cdf/landsea.nc","r") ; mask ocean for TAS array + basemap = d->LSMASK + lsm = landsea_mask(basemap,n34sc_tas&lat,n34sc_tas&lon) + n34sc_tas = mask(n34sc_tas,conform(n34sc_tas,lsm,(/1,2/)).eq.0,False) + delete([/lsm,basemap/]) + delete(d) + + map_n34sc_jja0(ee) = gsn_csm_contour_map(wks_n34sc,n34sc_sst(6,:,:),scres) ; 6 = JJA 0 + o1 = gsn_csm_contour(wks_n34sc,n34sc_tas(6,:,:),scres2) + o2 = gsn_csm_contour(wks_n34sc,n34sc_psl(6,:,:),scres3) + overlay(map_n34sc_jja0(ee),o1) + overlay(map_n34sc_jja0(ee),o2) + delete([/o1,o2/]) + + map_n34sc_son0(ee) = gsn_csm_contour_map(wks_n34sc,n34sc_sst(9,:,:),scres) ; 9 = SON 0 + o3 = gsn_csm_contour(wks_n34sc,n34sc_tas(9,:,:),scres2) + o4 = gsn_csm_contour(wks_n34sc,n34sc_psl(9,:,:),scres3) + overlay(map_n34sc_son0(ee),o3) + overlay(map_n34sc_son0(ee),o4) + delete([/o3,o4/]) + + + map_n34sc_djf1(ee) = gsn_csm_contour_map(wks_n34sc,n34sc_sst(12,:,:),scres) ; 12 = DJF+1 + o5 = gsn_csm_contour(wks_n34sc,n34sc_tas(12,:,:),scres2) + o6 = gsn_csm_contour(wks_n34sc,n34sc_psl(12,:,:),scres3) + overlay(map_n34sc_djf1(ee),o5) + overlay(map_n34sc_djf1(ee),o6) + delete([/o5,o6/]) + + map_n34sc_mam1(ee) = gsn_csm_contour_map(wks_n34sc,n34sc_sst(15,:,:),scres) ; 15 = MAM+1 + o7 = gsn_csm_contour(wks_n34sc,n34sc_tas(15,:,:),scres2) + o8 = gsn_csm_contour(wks_n34sc,n34sc_psl(15,:,:),scres3) + overlay(map_n34sc_mam1(ee),o7) + overlay(map_n34sc_mam1(ee),o8) + delete([/o7,o8/]) + delete([/n34sc_sst,n34sc_tas,n34sc_psl/]) + end if + if (pptreg_plot_flag.eq.0) then + map_n34sc_ppt_jja0(ee) = gsn_csm_contour_map(wks_n34sc_ppt,n34sc_ppt(6,:,:),scres4) ; 6 = JJA 0 + map_n34sc_ppt_son0(ee) = gsn_csm_contour_map(wks_n34sc_ppt,n34sc_ppt(9,:,:),scres4) ; 9 = SON 0 + map_n34sc_ppt_djf1(ee) = gsn_csm_contour_map(wks_n34sc_ppt,n34sc_ppt(12,:,:),scres4) ; 12 = DJF+1 + map_n34sc_ppt_mam1(ee) = gsn_csm_contour_map(wks_n34sc_ppt,n34sc_ppt(15,:,:),scres4) ; 15 = MAM+1 + delete([/n34sc_ppt/]) + end if + end do + + if (isvar("patcor_tas")) then ; for pattern correlation table + clat_sst = cos(0.01745329*patcor_tas&lat) + clat_psl = cos(0.01745329*patcor_psl&lat) + finpr_sst = "ENSO TAS (DJF+1) " ; Must be 18 characters long + finpr_psl = "ENSO PSL (DJF+1) " + line3 = " " ; Must be 18 characters long + line4 = line3 + header = (/"","Pattern Correlations/RMS Differences Observations vs. Model(s)",""/) + do hh = 1,nsim-1 + dimY = dimsizes(tochar(names(hh))) + nchar = dimY + nchar = where(nchar.le.10,10,nchar) + if (dimY.lt.10) then + ntb = "" + do ii = 0,10-dimY-1 + ntb = ntb+" " + end do + ntb = ntb+names(hh) + else + ntb = names(hh) + end if + + ntc = "" + do ii = 0,nchar-1 + ntc = ntc+"-" + end do + format2 = "%"+(nchar-5+1)+".2f" + format3 = "%4.2f" + line3 = line3+" "+ntb + line4 = line4+" "+ntc + if (all(ismissing(patcor_tas(hh,:,:)))) then + finpr_sst = finpr_sst+sprintf(format2,9.99)+"/"+sprintf(format3,9.99) + else + finpr_sst = finpr_sst+sprintf(format2,(pattern_cor(patcor_tas(0,:,:),patcor_tas(hh,:,:),clat_sst,0)))+"/"+sprintf(format3,(dim_rmsd(ndtooned(NewCosWeight(patcor_tas(0,:,:))),ndtooned(NewCosWeight(patcor_tas(hh,:,:)))))) + end if + if (all(ismissing(patcor_psl(hh,:,:)))) then + finpr_psl = finpr_psl+sprintf(format2,9.99)+"/"+sprintf(format3,9.99) + else + finpr_psl = finpr_psl+sprintf(format2,(pattern_cor(patcor_psl(0,:,:),patcor_psl(hh,:,:),clat_psl,0)))+"/"+sprintf(format3,(dim_rmsd(ndtooned(NewCosWeight(patcor_psl(0,:,:))),ndtooned(NewCosWeight(patcor_psl(hh,:,:)))))) + end if + end do + if (dimsizes(tochar(line4)).ge.8190) then ; system or fortran compiler limit + print("Metrics table warning: Not creating metrics table as size of comparison results in a invalid ascii row size.") + else + write_table(getenv("OUTDIR")+"metrics.sst.indices.1.txt","w",[/header/],"%s") + write_table(getenv("OUTDIR")+"metrics.sst.indices.1.txt","a",[/line3/],"%s") + write_table(getenv("OUTDIR")+"metrics.sst.indices.1.txt","a",[/line4/],"%s") + write_table(getenv("OUTDIR")+"metrics.sst.indices.1.txt","a",[/finpr_sst/],"%s") + write_table(getenv("OUTDIR")+"metrics.sst.indices.1.txt","a",[/finpr_psl/],"%s") + end if + delete([/finpr_sst,finpr_psl,line3,line4,format2,format3,nchar,ntc,clat_sst,clat_psl,patcor_tas,patcor_psl,dimY,ntb,header/]) + end if + + if (isvar("patcor_hov_hi")) then ; for pattern correlation table + finpr_hi = "El Nino Hovmoller " ; Must be 18 characters long + finpr_lo = "La Nina Hovmoller " + line3 = " " ; Must be 18 characters long patcor_hov_hi + line4 = line3 + header = (/"","Pattern Correlations/RMS Differences Observations vs. Model(s)",""/) + do hh = 1,nsim-1 + dimY = dimsizes(tochar(names(hh))) + nchar = dimY + nchar = where(nchar.le.10,10,nchar) + if (dimY.lt.10) then + ntb = "" + do ii = 0,10-dimY-1 + ntb = ntb+" " + end do + ntb = ntb+names(hh) + else + ntb = names(hh) + end if + + ntc = "" + do ii = 0,nchar-1 + ntc = ntc+"-" + end do + format2 = "%"+(nchar-5+1)+".2f" + format3 = "%4.2f" + line3 = line3+" "+ntb + line4 = line4+" "+ntc + if (all(ismissing(patcor_hov_hi(hh,24:52,:)))) then ; 24:52 refers to Jan+0->May+2, which is the range shown in the hovmoller plots. + finpr_hi = finpr_hi+sprintf(format2,9.99)+"/"+sprintf(format3,9.99) + else + finpr_hi = finpr_hi+sprintf(format2,(pattern_cor(patcor_hov_hi(0,24:52,:),patcor_hov_hi(hh,24:52,:),1.0,0)))+"/"+sprintf(format3,(dim_rmsd(ndtooned(patcor_hov_hi(0,24:52,:)),ndtooned(patcor_hov_hi(hh,24:52,:))))) + end if + if (all(ismissing(patcor_hov_lo(hh,24:52,:)))) then + finpr_lo = finpr_lo+sprintf(format2,9.99)+"/"+sprintf(format3,9.99) + else + finpr_lo = finpr_lo+sprintf(format2,(pattern_cor(patcor_hov_lo(0,24:52,:),patcor_hov_lo(hh,24:52,:),1.0,0)))+"/"+sprintf(format3,(dim_rmsd(ndtooned(patcor_hov_lo(0,24:52,:)),ndtooned(patcor_hov_lo(hh,24:52,:))))) + end if + end do + if (dimsizes(tochar(line4)).ge.8190) then ; system or fortran compiler limit + print("Metrics table warning: Not creating metrics table as size of comparison results in a invalid ascii row size.") + else + write_table(getenv("OUTDIR")+"metrics.sst.indices.2.txt","w",[/header/],"%s") + write_table(getenv("OUTDIR")+"metrics.sst.indices.2.txt","a",[/line3/],"%s") + write_table(getenv("OUTDIR")+"metrics.sst.indices.2.txt","a",[/line4/],"%s") + write_table(getenv("OUTDIR")+"metrics.sst.indices.2.txt","a",[/finpr_hi/],"%s") + write_table(getenv("OUTDIR")+"metrics.sst.indices.2.txt","a",[/finpr_lo/],"%s") + end if + delete([/finpr_hi,finpr_lo,line3,line4,format2,format3,nchar,ntc,patcor_hov_hi,patcor_hov_lo,dimY,ntb,header/]) + end if + + ncol = floattointeger(sqrt(nsim)) + nrow = (nsim/ncol)+mod(nsim,ncol) + + panres = True + panres@gsnMaximize = True + panres@gsnPaperOrientation = "portrait" + panres@gsnPanelYWhiteSpacePercent = 3.0 + if (nsim.le.10) then + panres@txFontHeightF = 0.016 + else + panres@txFontHeightF = 0.012 + end if + + if (SCALE_TIMESERIES.eq."True") then + tt = ind(nyr.eq.nyr_max) + panres@gsnPanelScalePlotIndex = tt(0) + delete(tt) + end if + if (nsim.le.12) then + lp = (/nsim,1/) + else + lp = (/nrow,ncol/) ;(/nsim/2+1,nsim/8+1/) + end if + + if (isvar("title_n34")) then + panres@txString = "Nin~H-13V2F35~D~FV-2H3F21~o3.4 (Monthly, "+title_n34+")" + gsn_panel2(wks_n34,xyn34,lp,panres) + + panres@txString = "Nin~H-13V2F35~D~FV-2H3F21~o4 (Monthly, "+title_n4+")" + gsn_panel2(wks_n4,xyn4,lp,panres) + + panres@txString = "Nin~H-13V2F35~D~FV-2H3F21~o3 (Monthly, "+title_n3+")" + gsn_panel2(wks_n3,xyn3,lp,panres) + + panres@txString = "Nin~H-13V2F35~D~FV-2H3F21~o1+2 (Monthly, "+title_n12+")" + gsn_panel2(wks_n12,xyn12,lp,panres) + + panres@txString = "Tropical North Atlantic (Monthly, "+title_tna+")" + gsn_panel2(wks_tna,xytna,lp,panres) + + panres@txString = "Tropical South Atlantic (Monthly, "+title_tsa+")" + gsn_panel2(wks_tsa,xytsa,lp,panres) + + panres@txString = "Tropical Indian Ocean (Monthly, "+title_tio+")" + gsn_panel2(wks_tio,xytio,lp,panres) + + panres@txString = "Indian Ocean Dipole (Monthly, "+title_iod+")" + gsn_panel2(wks_tio,xyiod,lp,panres) + + panres@txString = "Southern Ocean (Monthly, "+title_socn+")" + gsn_panel2(wks_tio,xysocn,lp,panres) + + panres@txString = "Atlantic Meridional Mode (Monthly, "+title_iod+")" + gsn_panel2(wks_tio,xyiod,lp,panres) + + panres@txString = "Atlantic Nin~H-13V2F35~D~FV-2H3F21~o3 (Monthly, "+title_socn+")" + gsn_panel2(wks_tio,xysocn,lp,panres) + end if + delete(wks_tio) + + if (all(ismissing(xyn34_rst))) then +; print("No valid running standard deviation plots, skipping") + else + panres@txString = "Nin~H-13V2F35~D~FV-2H3F21~o3.4 30yr running standard deviation" + gsn_panel2(wks_n34_rst,xyn34_rst,lp,panres) + delete(xyn34_rst) + end if + panres@gsnPanelYWhiteSpacePercent = 0.5 + panres@txString = "Nin~H-13V2F35~D~FV-2H3F21~o3.4 standard deviation (Monthly)" + gsn_panel2(wks_n34_mst,xyn34_mst,(/nrow,ncol/),panres) + + panres2 = True + panres2@gsnMaximize = True + panres2@gsnPaperOrientation = "portrait" + panres2@gsnPanelLabelBar = True + panres2@lbLabelStride = 1 + panres2@pmLabelBarWidthF = 0.4 + panres2@pmLabelBarHeightF = 0.06 + panres2@lbLabelFontHeightF = 0.013 + panres2@txString = "" + + if (nsim.le.4) then + if (nsim.eq.1) then + panres2@txFontHeightF = 0.022 + panres2@gsnPanelBottom = 0.50 + else + panres2@txFontHeightF = 0.0145 + panres2@gsnPanelBottom = 0.50 + end if + else + panres2@txFontHeightF = 0.016 + panres2@gsnPanelBottom = 0.05 + end if + panres2@lbTitleOn = True + panres2@lbTitlePosition = "Bottom" + panres2@lbTitleFontHeightF = panres2@lbLabelFontHeightF - 0.002 + panres2@lbTitleString = "C" + + panres2@txString = "El Nin~H-13V2F35~D~FV-2H3F21~o Composite (3~S~o~N~S:3~S~o~N~N)" + gsn_panel2(wks_n34_tlon_hi,plot_n34hi,(/nrow,ncol/),panres2) + panres2@txString = "La Nin~H-13V2F35~D~FV-2H3F21~a Composite (3~S~o~N~S:3~S~o~N~N)" + gsn_panel2(wks_n34_tlon_lo,plot_n34lo,(/nrow,ncol/),panres2) + delete([/panres2@lbTitleOn,panres2@lbTitlePosition,panres2@lbTitleFontHeightF,panres2@lbTitleString/]) + + if (nsim.le.4) then + if (nsim.eq.1) then + panres@txFontHeightF = 0.022 + panres@gsnPanelBottom = 0.50 + else + panres@txFontHeightF = 0.0145 + panres@gsnPanelBottom = 0.50 + end if + else + panres@txFontHeightF = 0.016 + panres@gsnPanelBottom = 0.05 + end if + panres@gsnPanelYWhiteSpacePercent = 3.0 + panres@txString = "Nin~H-13V2F35~D~FV-2H3F21~o3.4 (Monthly, detrended)" + gsn_panel2(wks_n34_p,pspec,(/nrow,ncol/),panres) + panres@txString = "Nin~H-13V2F35~D~FV-2H3F21~o3.4 Autocorrelation (Monthly)" + gsn_panel2(wks_n34_p,xyn34_ac,(/nrow,ncol/),panres) + panres2@txString = "Nin~H-13V2F35~D~FV-2H3F21~o3.4 Wavelet (Monthly)" + panres2@gsnPanelYWhiteSpacePercent = 3.0 + panres2@gsnPanelXWhiteSpacePercent = 4.0 + gsn_panel2(wks_n34_p,plot_wave34,(/nrow,ncol/),panres2) + delete(wks_n34_p) + + delete(panres2@gsnPanelYWhiteSpacePercent) + panres2@pmLabelBarWidthF = 0.8 + panres2@lbLabelAutoStride = False + panres2@gsnPanelXWhiteSpacePercent = 8.5 + if (any(.not.ismissing(map_n34sc_jja0))) then + panres2@txString = "Nin~H-13V2F35~D~FV-2H3F21~o3.4 SST,TAS,PSL Spatial Composite (JJA~S~0~N~)" + gsn_panel2(wks_n34sc,map_n34sc_jja0,(/nrow,ncol/),panres2) + + panres2@txString = "Nin~H-13V2F35~D~FV-2H3F21~o3.4 SST,TAS,PSL Spatial Composite (SON~S~0~N~)" + gsn_panel2(wks_n34sc,map_n34sc_son0,(/nrow,ncol/),panres2) + + panres2@txString = "Nin~H-13V2F35~D~FV-2H3F21~o3.4 SST,TAS,PSL Spatial Composite (DJF~S~+1~N~)" + gsn_panel2(wks_n34sc,map_n34sc_djf1,(/nrow,ncol/),panres2) + + panres2@txString = "Nin~H-13V2F35~D~FV-2H3F21~o3.4 SST,TAS,PSL Spatial Composite (MAM~S~+1~N~)" + gsn_panel2(wks_n34sc,map_n34sc_mam1,(/nrow,ncol/),panres2) + delete(wks_n34sc) + + delete([/map_n34sc_djf1,map_n34sc_jja0,map_n34sc_son0,map_n34sc_mam1/]) + end if + if (any(.not.ismissing(map_n34sc_ppt_jja0))) then + panres2@txString = "Nin~H-13V2F35~D~FV-2H3F21~o3.4 PR Spatial Composite (JJA~S~0~N~)" + gsn_panel2(wks_n34sc_ppt,map_n34sc_ppt_jja0,(/nrow,ncol/),panres2) + + panres2@txString = "Nin~H-13V2F35~D~FV-2H3F21~o3.4 PR Spatial Composite (SON~S~0~N~)" + gsn_panel2(wks_n34sc_ppt,map_n34sc_ppt_son0,(/nrow,ncol/),panres2) + + panres2@txString = "Nin~H-13V2F35~D~FV-2H3F21~o3.4 PR Spatial Composite (DJF~S~+1~N~)" + gsn_panel2(wks_n34sc_ppt,map_n34sc_ppt_djf1,(/nrow,ncol/),panres2) + + panres2@txString = "Nin~H-13V2F35~D~FV-2H3F21~o3.4 PR Spatial Composite (MAM~S~+1~N~)" + gsn_panel2(wks_n34sc_ppt,map_n34sc_ppt_mam1,(/nrow,ncol/),panres2) + delete(wks_n34sc_ppt) + + delete([/map_n34sc_ppt_djf1,map_n34sc_ppt_jja0,map_n34sc_ppt_son0,map_n34sc_ppt_mam1/]) + end if + + delete([/xyn34,xyn4,xyn3,xyn12,xytna,xytsa,xytio,xyiod,plot_n34hi,plot_n34lo,pspec,pi,rad,wgt,lp/]) + + OUTDIR = getenv("OUTDIR") + + if (wks_type.eq."png") then + if (isfilepresent2(OUTDIR+"nino34.spatialcomp.000001.png")) then + system("mv "+OUTDIR+"nino34.spatialcomp.000001.png "+OUTDIR+"nino34.spatialcomp.jja0.png") + system("mv "+OUTDIR+"nino34.spatialcomp.000002.png "+OUTDIR+"nino34.spatialcomp.son0.png") + system("mv "+OUTDIR+"nino34.spatialcomp.000003.png "+OUTDIR+"nino34.spatialcomp.djf1.png") + system("mv "+OUTDIR+"nino34.spatialcomp.000004.png "+OUTDIR+"nino34.spatialcomp.mam1.png") + end if + if (isfilepresent2(OUTDIR+"nino34.spatialcomp.ppt.000001.png")) then + system("mv "+OUTDIR+"nino34.spatialcomp.ppt.000001.png "+OUTDIR+"nino34.spatialcomp.pr.jja0.png") + system("mv "+OUTDIR+"nino34.spatialcomp.ppt.000002.png "+OUTDIR+"nino34.spatialcomp.pr.son0.png") + system("mv "+OUTDIR+"nino34.spatialcomp.ppt.000003.png "+OUTDIR+"nino34.spatialcomp.pr.djf1.png") + system("mv "+OUTDIR+"nino34.spatialcomp.ppt.000004.png "+OUTDIR+"nino34.spatialcomp.pr.mam1.png") + end if + if (isfilepresent2(OUTDIR+"tio.timeseries.000001.png")) then + system("mv "+OUTDIR+"tio.timeseries.000001.png "+OUTDIR+"tio.timeseries.png") + system("mv "+OUTDIR+"tio.timeseries.000002.png "+OUTDIR+"iod.timeseries.png") + system("mv "+OUTDIR+"tio.timeseries.000003.png "+OUTDIR+"socn.timeseries.png") + system("mv "+OUTDIR+"tio.timeseries.000004.png "+OUTDIR+"amm.timeseries.png") + system("mv "+OUTDIR+"tio.timeseries.000005.png "+OUTDIR+"atl3.timeseries.png") + end if + if (isfilepresent2(OUTDIR+"nino34.powspec.000001.png")) then + system("mv "+OUTDIR+"nino34.powspec.000001.png "+OUTDIR+"nino34.powspec.png") + system("mv "+OUTDIR+"nino34.powspec.000002.png "+OUTDIR+"nino34.autocor.png") + system("mv "+OUTDIR+"nino34.powspec.000003.png "+OUTDIR+"nino34.wavelet.png") + end if + else + if (isfilepresent2(OUTDIR+"nino34.spatialcomp.ps")) then + system("psplit "+OUTDIR+"nino34.spatialcomp.ps "+OUTDIR+"sst_ind") + system("mv "+OUTDIR+"sst_ind0001.ps "+OUTDIR+"nino34.spatialcomp.jja0.ps") + system("mv "+OUTDIR+"sst_ind0002.ps "+OUTDIR+"nino34.spatialcomp.son0.ps") + system("mv "+OUTDIR+"sst_ind0003.ps "+OUTDIR+"nino34.spatialcomp.djf1.ps") + system("mv "+OUTDIR+"sst_ind0004.ps "+OUTDIR+"nino34.spatialcomp.mam1.ps") + system("rm "+OUTDIR+"nino34.spatialcomp.ps") + end if + if (isfilepresent2(OUTDIR+"nino34.spatialcomp.ppt.ps")) then + system("psplit "+OUTDIR+"nino34.spatialcomp.ppt.ps "+OUTDIR+"sst_ind") + system("mv "+OUTDIR+"sst_ind0001.ps "+OUTDIR+"nino34.spatialcomp.pr.jja0.ps") + system("mv "+OUTDIR+"sst_ind0002.ps "+OUTDIR+"nino34.spatialcomp.pr.son0.ps") + system("mv "+OUTDIR+"sst_ind0003.ps "+OUTDIR+"nino34.spatialcomp.pr.djf1.ps") + system("mv "+OUTDIR+"sst_ind0004.ps "+OUTDIR+"nino34.spatialcomp.pr.mam1.ps") + system("rm "+OUTDIR+"nino34.spatialcomp.ppt.ps") + end if + if (isfilepresent2(OUTDIR+"tio.timeseries.ps")) then + system("psplit "+OUTDIR+"tio.timeseries.ps "+OUTDIR+"sst_ind") + system("mv "+OUTDIR+"sst_ind0001.ps "+OUTDIR+"tio.timeseries.ps") + system("mv "+OUTDIR+"sst_ind0002.ps "+OUTDIR+"iod.timeseries.ps") + system("mv "+OUTDIR+"sst_ind0003.ps "+OUTDIR+"socn.timeseries.ps") + system("mv "+OUTDIR+"sst_ind0004.ps "+OUTDIR+"amm.timeseries.ps") + system("mv "+OUTDIR+"sst_ind0005.ps "+OUTDIR+"atl3.timeseries.ps") + end if + if (isfilepresent2(OUTDIR+"nino34.powspec.ps")) then + system("psplit "+OUTDIR+"nino34.powspec.ps "+OUTDIR+"n34p") + system("mv "+OUTDIR+"n34p0001.ps "+OUTDIR+"nino34.powspec.ps") + if (isfilepresent2(OUTDIR+"n34p0002.ps")) then + system("mv "+OUTDIR+"n34p0002.ps "+OUTDIR+"nino34.autocor.ps") + system("mv "+OUTDIR+"n34p0003.ps "+OUTDIR+"nino34.wavelet.ps") + end if + end if + end if + print("Finished: sst.indices.ncl") +end diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/sst.mean_stddev.ncl b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/sst.mean_stddev.ncl new file mode 100644 index 0000000000..445699f36d --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/sst.mean_stddev.ncl @@ -0,0 +1,386 @@ +; Calculates SST global means and standard deviations +; +; Variables used: ts +; +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/shea_util.ncl" +load "$CVDP_SCRIPTS/functions.ncl" + +begin + print("Starting: sst.mean_stddev.ncl") + + SCALE_TIMESERIES = getenv("SCALE_TIMESERIES") + OUTPUT_DATA = getenv("OUTPUT_DATA") + PNG_SCALE = tofloat(getenv("PNG_SCALE")) + OPT_CLIMO = getenv("OPT_CLIMO") + CLIMO_SYEAR = toint(getenv("CLIMO_SYEAR")) + CLIMO_EYEAR = toint(getenv("CLIMO_EYEAR")) + OUTPUT_TYPE = getenv("OUTPUT_TYPE") + COLORMAP = getenv("COLORMAP") + + nsim = numAsciiRow("namelist_byvar/namelist_ts") + na = asciiread("namelist_byvar/namelist_ts",(/nsim/),"string") + names = new(nsim,"string") + paths = new(nsim,"string") + syear = new(nsim,"integer",-999) + eyear = new(nsim,"integer",-999) + delim = "|" + + do gg = 0,nsim-1 + names(gg) = str_strip(str_get_field(na(gg),1,delim)) + paths(gg) = str_strip(str_get_field(na(gg),2,delim)) + syear(gg) = stringtointeger(str_strip(str_get_field(na(gg),3,delim))) + eyear(gg) = stringtointeger(str_strip(str_get_field(na(gg),4,delim))) + end do + nyr = eyear-syear+1 + nyr_max = max(nyr) + + wks_type = OUTPUT_TYPE + if (wks_type.eq."png") then + wks_type@wkWidth = 1500*PNG_SCALE + wks_type@wkHeight = 1500*PNG_SCALE + end if + wks_stddev_djf = gsn_open_wks(wks_type,getenv("OUTDIR")+"sst.stddev.djf") + wks_stddev_mam = gsn_open_wks(wks_type,getenv("OUTDIR")+"sst.stddev.mam") + wks_stddev_jja = gsn_open_wks(wks_type,getenv("OUTDIR")+"sst.stddev.jja") + wks_stddev_son = gsn_open_wks(wks_type,getenv("OUTDIR")+"sst.stddev.son") + wks_stddev_ann = gsn_open_wks(wks_type,getenv("OUTDIR")+"sst.stddev.ann") + wks_mean_djf = gsn_open_wks(wks_type,getenv("OUTDIR")+"sst.mean.djf") + wks_mean_mam = gsn_open_wks(wks_type,getenv("OUTDIR")+"sst.mean.mam") + wks_mean_jja = gsn_open_wks(wks_type,getenv("OUTDIR")+"sst.mean.jja") + wks_mean_son = gsn_open_wks(wks_type,getenv("OUTDIR")+"sst.mean.son") + wks_mean_ann = gsn_open_wks(wks_type,getenv("OUTDIR")+"sst.mean.ann") + + if (COLORMAP.eq.0) then + gsn_define_colormap(wks_stddev_djf,"precip3_16lev") + gsn_define_colormap(wks_stddev_mam,"precip3_16lev") + gsn_define_colormap(wks_stddev_jja,"precip3_16lev") + gsn_define_colormap(wks_stddev_son,"precip3_16lev") + gsn_define_colormap(wks_stddev_ann,"precip3_16lev") + gsn_define_colormap(wks_mean_djf,"ncl_default") + gsn_define_colormap(wks_mean_mam,"ncl_default") + gsn_define_colormap(wks_mean_jja,"ncl_default") + gsn_define_colormap(wks_mean_son,"ncl_default") + gsn_define_colormap(wks_mean_ann,"ncl_default") + end if + if (COLORMAP.eq.1) then + gsn_define_colormap(wks_stddev_djf,"cb_rainbow") + gsn_define_colormap(wks_stddev_mam,"cb_rainbow") + gsn_define_colormap(wks_stddev_jja,"cb_rainbow") + gsn_define_colormap(wks_stddev_son,"cb_rainbow") + gsn_define_colormap(wks_stddev_ann,"cb_rainbow") + gsn_define_colormap(wks_mean_djf,"BlueDarkRed18") + gsn_define_colormap(wks_mean_mam,"BlueDarkRed18") + gsn_define_colormap(wks_mean_jja,"BlueDarkRed18") + gsn_define_colormap(wks_mean_son,"BlueDarkRed18") + gsn_define_colormap(wks_mean_ann,"BlueDarkRed18") + end if + + plot_mean_djf = new(nsim,"graphic") + plot_mean_mam = new(nsim,"graphic") + plot_mean_jja = new(nsim,"graphic") + plot_mean_son = new(nsim,"graphic") + plot_mean_ann = new(nsim,"graphic") + plot_stddev_djf = new(nsim,"graphic") + plot_stddev_mam = new(nsim,"graphic") + plot_stddev_jja = new(nsim,"graphic") + plot_stddev_son = new(nsim,"graphic") + plot_stddev_ann = new(nsim,"graphic") + do ee = 0,nsim-1 + sst = data_read_in(paths(ee),"TS",syear(ee),eyear(ee)) ; read in data, orient lats/lons correctly, set time coordinate variable up + if (isatt(sst,"is_all_missing")) then + delete(sst) + continue + end if + sst = where(sst.le.-1.8,-1.8,sst) ; set all values below -1.8 to -1.8 + d = addfile("$NCARG_ROOT/lib/ncarg/data/cdf/landsea.nc","r") ; mask out land (this is redundant for data that is already masked) + basemap = d->LSMASK + lsm = landsea_mask(basemap,sst&lat,sst&lon) + sst = mask(sst,conform(sst,lsm,(/1,2/)).ge.1,False) + delete([/lsm,basemap/]) + delete(d) + + do ff = 0,1 + sstT = sst + if (ff.eq.1) then + if (OPT_CLIMO.eq."Full") then + sstT = rmMonAnnCycTLL(sstT) + else + check_custom_climo(names(ee),syear(ee),eyear(ee),CLIMO_SYEAR,CLIMO_EYEAR) + temp_arr = sstT + delete(temp_arr&time) + temp_arr&time = cd_calendar(sstT&time,-1) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(temp_arr({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(temp_arr({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + delete(temp_arr) + sstT = calcMonAnomTLL(sstT,climo) + delete(climo) + end if + end if + sst_seas = runave_n_Wrap(sstT,3,0,0) + sst_seas(0,:,:) = (/ dim_avg_n(sstT(:1,:,:),0) /) + sst_seas(dimsizes(sstT&time)-1,:,:) = (/ dim_avg_n(sstT(dimsizes(sstT&time)-2:,:,:),0) /) + sst_ann = runave_n_Wrap(sstT,12,0,0) + delete(sstT) + + if (ff.eq.0) then + sst_mean_djf = dim_avg_n_Wrap(sst_seas(0::12,:,:),0) + sst_mean_mam = dim_avg_n_Wrap(sst_seas(3::12,:,:),0) + sst_mean_jja = dim_avg_n_Wrap(sst_seas(6::12,:,:),0) + sst_mean_son = dim_avg_n_Wrap(sst_seas(9::12,:,:),0) + sst_mean_ann = dim_avg_n_Wrap(sst_ann(5::12,:,:),0) + end if + if (ff.eq.1) then + sst_sd_djf = dim_stddev_n_Wrap(dtrend_msg_n(ispan(0,nyr(ee)-1,1),sst_seas(0::12,:,:),False,False,0),0) + sst_sd_mam = dim_stddev_n_Wrap(dtrend_msg_n(ispan(0,nyr(ee)-1,1),sst_seas(3::12,:,:),False,False,0),0) + sst_sd_jja = dim_stddev_n_Wrap(dtrend_msg_n(ispan(0,nyr(ee)-1,1),sst_seas(6::12,:,:),False,False,0),0) + sst_sd_son = dim_stddev_n_Wrap(dtrend_msg_n(ispan(0,nyr(ee)-1,1),sst_seas(9::12,:,:),False,False,0),0) + sst_sd_ann = dim_stddev_n_Wrap(dtrend_msg_n(ispan(0,nyr(ee)-1,1),sst_ann(5::12,:,:),False,False,0),0) + end if + delete([/sst_seas,sst_ann/]) + end do + delete(sst) + copy_VarMeta(sst_mean_djf,sst_sd_djf) + copy_VarMeta(sst_mean_mam,sst_sd_mam) + copy_VarMeta(sst_mean_jja,sst_sd_jja) + copy_VarMeta(sst_mean_son,sst_sd_son) + copy_VarMeta(sst_mean_ann,sst_sd_ann) + + if (OUTPUT_DATA.eq."True") then + modname = str_sub_str(names(ee)," ","_") + bc = (/"/","'","(",")"/) + do gg = 0,dimsizes(bc)-1 + modname = str_sub_str(modname,bc(gg),"_") + end do + fn = getenv("OUTDIR")+modname+".cvdp_data.sst.mean_stddev."+syear(ee)+"-"+eyear(ee)+".nc" + if (.not.isfilepresent2(fn)) then + z = addfile(fn,"c") + z@source = "NCAR Climate Analysis Section's Climate Variability Diagnostics Package v"+getenv("VERSION") + z@notes = "Data from "+names(ee)+" from "+syear(ee)+"-"+eyear(ee) + if (OPT_CLIMO.eq."Full") then + z@climatology = syear(ee)+"-"+eyear(ee)+" climatology removed prior to all calculations (other than means)" + else + if (CLIMO_SYEAR.lt.0) then + z@climatology = (eyear(ee)+CLIMO_SYEAR)+"-"+(eyear(ee)+CLIMO_EYEAR)+" climatology removed prior to all calculations (other than means)" + else + z@climatology = CLIMO_SYEAR+"-"+CLIMO_EYEAR+" climatology removed prior to all calculations (other than means)" + end if + end if + z@Conventions = "CF-1.6" + else + z = addfile(fn,"w") + end if + + z->sst_spatialmean_djf = set_varAtts(sst_mean_djf,"sst mean (DJF)","","") + z->sst_spatialmean_mam = set_varAtts(sst_mean_mam,"sst mean (MAM)","","") + z->sst_spatialmean_jja = set_varAtts(sst_mean_jja,"sst mean (JJA)","","") + z->sst_spatialmean_son = set_varAtts(sst_mean_son,"sst mean (SON)","","") + z->sst_spatialmean_ann = set_varAtts(sst_mean_ann,"sst mean (annual)","","") + + z->sst_spatialstddev_djf = set_varAtts(sst_sd_djf,"sst standard deviation (DJF)","","") + z->sst_spatialstddev_mam = set_varAtts(sst_sd_mam,"sst standard deviation (MAM)","","") + z->sst_spatialstddev_jja = set_varAtts(sst_sd_jja,"sst standard deviation (JJA)","","") + z->sst_spatialstddev_son = set_varAtts(sst_sd_son,"sst standard deviation (SON)","","") + z->sst_spatialstddev_ann = set_varAtts(sst_sd_ann,"sst standard deviation (annual)","","") + delete(z) + end if +;========================================================================================== + res = True + res@mpProjection = "WinkelTripel" + res@mpGeophysicalLineColor = "gray42" + res@mpPerimOn = False + res@mpGridLatSpacingF = 90 ; change latitude line spacing + res@mpGridLonSpacingF = 180. ; change longitude line spacing + res@mpGridLineColor = "transparent" ; trick ncl into drawing perimeter + res@mpGridAndLimbOn = True ; turn on lat/lon lines + + res@mpCenterLonF = 210. + res@mpOutlineOn = True + if (wks_type.eq."png") then + res@mpGeophysicalLineThicknessF = 2. + else + res@mpGeophysicalLineThicknessF = 1. + end if + res@gsnDraw = False + res@gsnFrame = False + + res@cnLineLabelsOn = False + res@cnFillOn = True + res@cnLinesOn = False + res@lbLabelBarOn = False + + res@cnLevelSelectionMode = "ExplicitLevels" + res@gsnLeftStringOrthogonalPosF = -0.05 + res@gsnLeftStringParallelPosF = .005 + res@gsnRightStringOrthogonalPosF = -0.05 + res@gsnRightStringParallelPosF = 0.96 + res@gsnRightString = "" + res@gsnLeftString = "" + res@gsnLeftStringFontHeightF = 0.014 + res@gsnCenterStringFontHeightF = 0.018 + res@gsnRightStringFontHeightF = 0.014 + + sres = res + + res@cnLevels = fspan(.2,1.6,8) + if (COLORMAP.eq.0) then + res@cnFillColors = (/2,4,6,8,10,12,14,16,18/) + res@mpLandFillColor = "gray75" + sres@cnLevels = ispan(0,36,2) + end if + if (COLORMAP.eq.1) then + res@cnFillColors = (/35,47,63,79,95,111,124,155,175/) + res@mpLandFillColor = "gray30" + sres@cnLevels = ispan(4,32,2) + end if + + if (isfilepresent2("obs_ts").and.ee.eq.0) then ; for pattern correlation table + patcor = new((/nsim,dimsizes(sst_sd_ann&lat),dimsizes(sst_sd_ann&lon)/),typeof(sst_sd_ann)) + patcor!1 = "lat" + patcor&lat = sst_sd_ann&lat + patcor!2 = "lon" + patcor&lon = sst_sd_ann&lon + patcor(ee,:,:) = (/ sst_sd_ann /) + end if + if (isfilepresent2("obs_ts").and.ee.ge.1.and.isvar("patcor")) then + patcor(ee,:,:) = (/ totype(linint2(sst_sd_ann&lon,sst_sd_ann&lat,sst_sd_ann,True,patcor&lon,patcor&lat,0),typeof(patcor)) /) + end if + + res@gsnLeftString = syear(ee)+"-"+eyear(ee) + res@gsnRightString = sst_mean_djf@units + res@gsnCenterString = names(ee) + plot_stddev_djf(ee) = gsn_csm_contour_map(wks_stddev_djf,sst_sd_djf,res) + plot_stddev_mam(ee) = gsn_csm_contour_map(wks_stddev_mam,sst_sd_mam,res) + plot_stddev_jja(ee) = gsn_csm_contour_map(wks_stddev_jja,sst_sd_jja,res) + plot_stddev_son(ee) = gsn_csm_contour_map(wks_stddev_son,sst_sd_son,res) + plot_stddev_ann(ee) = gsn_csm_contour_map(wks_stddev_ann,sst_sd_ann,res) + + sres@gsnLeftString = syear(ee)+"-"+eyear(ee) + sres@gsnRightString = sst_mean_djf@units + sres@gsnCenterString = names(ee) + plot_mean_djf(ee) = gsn_csm_contour_map(wks_mean_djf,sst_mean_djf,sres) + plot_mean_mam(ee) = gsn_csm_contour_map(wks_mean_mam,sst_mean_mam,sres) + plot_mean_jja(ee) = gsn_csm_contour_map(wks_mean_jja,sst_mean_jja,sres) + plot_mean_son(ee) = gsn_csm_contour_map(wks_mean_son,sst_mean_son,sres) + plot_mean_ann(ee) = gsn_csm_contour_map(wks_mean_ann,sst_mean_ann,sres) + delete([/sst_sd_djf,sst_sd_mam,sst_sd_jja,sst_sd_son,sst_sd_ann,sst_mean_djf,sst_mean_mam,sst_mean_jja,sst_mean_son,sst_mean_ann,res,sres/]) + end do + + if (isvar("patcor")) then ; for pattern correlation table + clat = cos(0.01745329*patcor&lat) + finpr = "SST Std Dev (Ann) " ; Must be 18 characters long + line3 = " " ; Must be 18 characters long + line4 = line3 + header = (/"","Pattern Correlations/RMS Differences Observations vs. Model(s)",""/) + do hh = 1,nsim-1 + dimY = dimsizes(tochar(names(hh))) + nchar = dimY + nchar = where(nchar.le.10,10,nchar) + if (dimY.lt.10) then + ntb = "" + do ii = 0,10-dimY-1 + ntb = ntb+" " + end do + ntb = ntb+names(hh) + else + ntb = names(hh) + end if + + ntc = "" + do ii = 0,nchar-1 + ntc = ntc+"-" + end do + format2 = "%"+(nchar-5+1)+".2f" + format3 = "%4.2f" + line3 = line3+" "+ntb + line4 = line4+" "+ntc + if (all(ismissing(patcor(hh,:,:)))) then + finpr = finpr+sprintf(format2,9.99)+"/"+sprintf(format3,9.99) + else + finpr = finpr+sprintf(format2,(pattern_cor(patcor(0,:,:),patcor(hh,:,:),clat,0)))+"/"+sprintf(format3,(dim_rmsd(ndtooned(NewCosWeight(patcor(0,:,:))),ndtooned(NewCosWeight(patcor(hh,:,:)))))) + end if + end do + if (dimsizes(tochar(line4)).ge.8190) then ; system or fortran compiler limit + print("Metrics table warning: Not creating metrics table as size of comparison results in a invalid ascii row size.") + else + write_table(getenv("OUTDIR")+"metrics.sst.mean_stddev.txt","w",[/header/],"%s") + write_table(getenv("OUTDIR")+"metrics.sst.mean_stddev.txt","a",[/line3/],"%s") + write_table(getenv("OUTDIR")+"metrics.sst.mean_stddev.txt","a",[/line4/],"%s") + write_table(getenv("OUTDIR")+"metrics.sst.mean_stddev.txt","a",[/finpr/],"%s") + end if + delete([/finpr,line3,line4,format2,format3,nchar,ntc,clat,patcor,dimY,ntb,header/]) + end if + + panres = True + panres@gsnMaximize = True + panres@gsnPaperOrientation = "portrait" + panres@gsnPanelLabelBar = True + panres@gsnPanelYWhiteSpacePercent = 3.0 + panres@pmLabelBarHeightF = 0.05 + panres@pmLabelBarWidthF = 0.65 + panres@lbTitleOn = False + panres@lbBoxLineColor = "gray70" + if (nsim.le.4) then + if (nsim.eq.1) then + panres@txFontHeightF = 0.022 + panres@gsnPanelBottom = 0.50 + else + panres@txFontHeightF = 0.0145 + panres@gsnPanelBottom = 0.50 + end if + else + panres@txFontHeightF = 0.016 + panres@gsnPanelBottom = 0.05 + end if + panres@lbLabelFontHeightF = 0.013 + panres@lbLabelStride = 1 + ncol = floattointeger(sqrt(nsim)) + nrow = (nsim/ncol)+mod(nsim,ncol) + + panres@txString = "SST Standard Deviations (DJF)" + gsn_panel2(wks_stddev_djf,plot_stddev_djf,(/nrow,ncol/),panres) + delete(wks_stddev_djf) + + panres@txString = "SST Standard Deviations (MAM)" + gsn_panel2(wks_stddev_mam,plot_stddev_mam,(/nrow,ncol/),panres) + delete(wks_stddev_mam) + + panres@txString = "SST Standard Deviations (JJA)" + gsn_panel2(wks_stddev_jja,plot_stddev_jja,(/nrow,ncol/),panres) + delete(wks_stddev_jja) + + panres@txString = "SST Standard Deviations (SON)" + gsn_panel2(wks_stddev_son,plot_stddev_son,(/nrow,ncol/),panres) + delete(wks_stddev_son) + + panres@txString = "SST Standard Deviations (Annual)" + gsn_panel2(wks_stddev_ann,plot_stddev_ann,(/nrow,ncol/),panres) + delete(wks_stddev_ann) + + panres@txString = "SST Means (DJF)" + gsn_panel2(wks_mean_djf,plot_mean_djf,(/nrow,ncol/),panres) + delete(wks_mean_djf) + + panres@txString = "SST Means (MAM)" + gsn_panel2(wks_mean_mam,plot_mean_mam,(/nrow,ncol/),panres) + delete(wks_mean_mam) + + panres@txString = "SST Means (JJA)" + gsn_panel2(wks_mean_jja,plot_mean_jja,(/nrow,ncol/),panres) + delete(wks_mean_jja) + + panres@txString = "SST Means (SON)" + gsn_panel2(wks_mean_son,plot_mean_son,(/nrow,ncol/),panres) + delete(wks_mean_son) + + panres@txString = "SST Means (Annual)" + gsn_panel2(wks_mean_ann,plot_mean_ann,(/nrow,ncol/),panres) + delete(wks_mean_ann) + delete(panres) + print("Finished: sst.mean_stddev.ncl") +end diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/sst.trends_timeseries.ncl b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/sst.trends_timeseries.ncl new file mode 100644 index 0000000000..371eb953d2 --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/sst.trends_timeseries.ncl @@ -0,0 +1,610 @@ +; Calculates SST global trends, running global trends and timeseries +; +; Variables used: ts +; +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/shea_util.ncl" +load "$CVDP_SCRIPTS/functions.ncl" + +begin + print("Starting: sst.trends_timeseries.ncl") + + SCALE_TIMESERIES = getenv("SCALE_TIMESERIES") + OUTPUT_DATA = getenv("OUTPUT_DATA") + PNG_SCALE = tofloat(getenv("PNG_SCALE")) + OPT_CLIMO = getenv("OPT_CLIMO") + CLIMO_SYEAR = toint(getenv("CLIMO_SYEAR")) + CLIMO_EYEAR = toint(getenv("CLIMO_EYEAR")) + OUTPUT_TYPE = getenv("OUTPUT_TYPE") + COLORMAP = getenv("COLORMAP") + + nsim = numAsciiRow("namelist_byvar/namelist_ts") + na = asciiread("namelist_byvar/namelist_ts",(/nsim/),"string") + names = new(nsim,"string") + paths = new(nsim,"string") + syear = new(nsim,"integer",-999) + eyear = new(nsim,"integer",-999) + delim = "|" + + do gg = 0,nsim-1 + names(gg) = str_strip(str_get_field(na(gg),1,delim)) + paths(gg) = str_strip(str_get_field(na(gg),2,delim)) + syear(gg) = stringtointeger(str_strip(str_get_field(na(gg),3,delim))) + eyear(gg) = stringtointeger(str_strip(str_get_field(na(gg),4,delim))) + end do + nyr = eyear-syear+1 + nyr_max = max(nyr) + + pi=4.*atan(1.0) + rad=(pi/180.) + + wks_type = OUTPUT_TYPE + if (wks_type.eq."png") then + wks_type@wkWidth = 1500*PNG_SCALE + wks_type@wkHeight = 1500*PNG_SCALE + end if + wks_trends_djf = gsn_open_wks(wks_type,getenv("OUTDIR")+"sst.trends.djf") + wks_trends_mam = gsn_open_wks(wks_type,getenv("OUTDIR")+"sst.trends.mam") + wks_trends_jja = gsn_open_wks(wks_type,getenv("OUTDIR")+"sst.trends.jja") + wks_trends_son = gsn_open_wks(wks_type,getenv("OUTDIR")+"sst.trends.son") + wks_trends_ann = gsn_open_wks(wks_type,getenv("OUTDIR")+"sst.trends.ann") + wks_trends_mon = gsn_open_wks(wks_type,getenv("OUTDIR")+"sst.trends.mon") + + wks_aa_djf = gsn_open_wks(wks_type,getenv("OUTDIR")+"sst.timeseries.djf") + wks_aa_mam = gsn_open_wks(wks_type,getenv("OUTDIR")+"sst.timeseries.mam") + wks_aa_jja = gsn_open_wks(wks_type,getenv("OUTDIR")+"sst.timeseries.jja") + wks_aa_son = gsn_open_wks(wks_type,getenv("OUTDIR")+"sst.timeseries.son") + wks_aa_ann = gsn_open_wks(wks_type,getenv("OUTDIR")+"sst.timeseries.ann") + wks_aa_mon = gsn_open_wks(wks_type,getenv("OUTDIR")+"sst.timeseries.mon") + + wks_rt_mon = gsn_open_wks(wks_type,getenv("OUTDIR")+"sst.runtrend.mon") + + if (COLORMAP.eq.0) then + gsn_define_colormap(wks_trends_djf,"ncl_default") + gsn_define_colormap(wks_trends_mam,"ncl_default") + gsn_define_colormap(wks_trends_jja,"ncl_default") + gsn_define_colormap(wks_trends_son,"ncl_default") + gsn_define_colormap(wks_trends_ann,"ncl_default") + gsn_define_colormap(wks_trends_mon,"ncl_default") + gsn_define_colormap(wks_aa_djf,"ncl_default") + gsn_define_colormap(wks_aa_mam,"ncl_default") + gsn_define_colormap(wks_aa_jja,"ncl_default") + gsn_define_colormap(wks_aa_son,"ncl_default") + gsn_define_colormap(wks_aa_ann,"ncl_default") + gsn_define_colormap(wks_aa_mon,"ncl_default") + gsn_define_colormap(wks_rt_mon,"ncl_default") + end if + if (COLORMAP.eq.1) then + gsn_define_colormap(wks_trends_djf,"BlueDarkRed18") + gsn_define_colormap(wks_trends_mam,"BlueDarkRed18") + gsn_define_colormap(wks_trends_jja,"BlueDarkRed18") + gsn_define_colormap(wks_trends_son,"BlueDarkRed18") + gsn_define_colormap(wks_trends_ann,"BlueDarkRed18") + gsn_define_colormap(wks_trends_mon,"BlueDarkRed18") + gsn_define_colormap(wks_aa_djf,"ncl_default") + gsn_define_colormap(wks_aa_mam,"ncl_default") + gsn_define_colormap(wks_aa_jja,"ncl_default") + gsn_define_colormap(wks_aa_son,"ncl_default") + gsn_define_colormap(wks_aa_ann,"ncl_default") + gsn_define_colormap(wks_aa_mon,"ncl_default") + gsn_define_colormap(wks_rt_mon,"ncl_default") + end if + map_djf = new(nsim,"graphic") + map_mam = new(nsim,"graphic") + map_jja = new(nsim,"graphic") + map_son = new(nsim,"graphic") + map_ann = new(nsim,"graphic") + map_mon = new(nsim,"graphic") + xy_djf = new(nsim,"graphic") + xy_mam = new(nsim,"graphic") + xy_jja = new(nsim,"graphic") + xy_son = new(nsim,"graphic") + xy_ann = new(nsim,"graphic") + xy_mon = new(nsim,"graphic") + + xy_rt_mon = new((/5,nsim/),"graphic") + + if (isfilepresent2("obs_ts")) then + xy_obs_djf = new(nsim,"graphic") + xy_obs_mam = new(nsim,"graphic") + xy_obs_jja = new(nsim,"graphic") + xy_obs_son = new(nsim,"graphic") + xy_obs_ann = new(nsim,"graphic") + xy_obs_mon = new(nsim,"graphic") + end if + do ee = 0,nsim-1 + sst = data_read_in(paths(ee),"TS",syear(ee),eyear(ee)) ; read in data, orient lats/lons correctly, set time coordinate variable up + if (isatt(sst,"is_all_missing")) then + delete(sst) + continue + end if + sst = where(sst.le.-1.8,-1.8,sst) ; set all values below -1.8 to -1.8 + d = addfile("$NCARG_ROOT/lib/ncarg/data/cdf/landsea.nc","r") ; mask out land (this is redundant for data that is already masked) + basemap = d->LSMASK + lsm = landsea_mask(basemap,sst&lat,sst&lon) + sst = mask(sst,conform(sst,lsm,(/1,2/)).ge.1,False) + delete([/lsm,basemap/]) + delete(d) + + if (OPT_CLIMO.eq."Full") then + sst = rmMonAnnCycTLL(sst) + else + check_custom_climo(names(ee),syear(ee),eyear(ee),CLIMO_SYEAR,CLIMO_EYEAR) + temp_arr = sst + delete(temp_arr&time) + temp_arr&time = cd_calendar(sst&time,-1) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(temp_arr({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(temp_arr({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + delete(temp_arr) + sst = calcMonAnomTLL(sst,climo) + delete(climo) + end if + + coswgt=cos(rad*sst&lat) + coswgt!0 = "lat" + coswgt&lat= sst&lat + + sst_aa_mon = wgt_areaave_Wrap(sst,coswgt,1.0,0) + tttt = dtrend_msg_n(ispan(0,dimsizes(sst&time)-1,1),sst,False,True,0) + sst_trends_mon = sst(0,:,:) + sst_trends_mon = (/ onedtond(tttt@slope, (/dimsizes(sst&lat),dimsizes(sst&lon)/) ) /) + sst_trends_mon = sst_trends_mon*dimsizes(sst&time) + sst_trends_mon@units = sst@units+" "+nyr(ee)+"yr~S~-1~N~" + delete(tttt) + + sst_seas = runave_n_Wrap(sst,3,0,0) + sst_seas(0,:,:) = (/ dim_avg_n(sst(:1,:,:),0) /) + sst_seas(dimsizes(sst&time)-1,:,:) = (/ dim_avg_n(sst(dimsizes(sst&time)-2:,:,:),0) /) + sst_ann = runave_n_Wrap(sst,12,0,0) + delete(sst) + + sst_trends_seas = sst_seas(:3,:,:) + sst_trends_seas = sst_trends_seas@_FillValue + sst_trends_ann = sst_trends_seas(0,:,:) + sst_aa_seas = new((/4,nyr(ee)/),typeof(sst_seas)) + sst_aa_seas!1 = "time" + sst_aa_seas&time = ispan(syear(ee),eyear(ee),1) + sst_aa_seas&time@units = "YYYY" + sst_aa_seas&time@long_name = "time" + sst_aa_ann = sst_aa_seas(0,:) + do ff = 0,4 + if (ff.le.3) then + tarr = sst_seas(ff*3::12,:,:) + end if + if (ff.eq.4) then + tarr = sst_ann(5::12,:,:) + end if + tttt = dtrend_msg_n(ispan(0,dimsizes(tarr&time)-1,1),tarr,False,True,0) + if (ff.le.3) then + sst_trends_seas(ff,:,:) = (/ onedtond(tttt@slope, (/dimsizes(tarr&lat),dimsizes(tarr&lon)/) ) /) + sst_aa_seas(ff,:) = (/ wgt_areaave(tarr,coswgt,1.0,0) /) + end if + if (ff.eq.4) then + sst_trends_ann = (/ onedtond(tttt@slope, (/dimsizes(tarr&lat),dimsizes(tarr&lon)/) ) /) + sst_aa_ann = (/ wgt_areaave(tarr,coswgt,1.0,0) /) + end if + delete([/tarr,tttt/]) + end do + sst_trends_seas = sst_trends_seas*nyr(ee) + sst_trends_seas@units = sst_seas@units+" "+nyr(ee)+"yr~S~-1~N~" + sst_trends_ann = sst_trends_ann*nyr(ee) + sst_trends_ann@units = sst_ann@units+" "+nyr(ee)+"yr~S~-1~N~" + delete([/sst_seas,sst_ann,coswgt/]) + + if (isfilepresent2("obs_ts").and.ee.eq.0) then + sst_aa_seas@syear = syear(ee) + sst_aa_seas@eyear = eyear(ee) + sst_aa_mon@syear = syear(ee) + sst_aa_mon@eyear = eyear(ee) + sst_aa_ann@syear = syear(ee) + sst_aa_ann@eyear = eyear(ee) + sst_aa_seas_obs = sst_aa_seas + sst_aa_mon_obs = sst_aa_mon + sst_aa_ann_obs = sst_aa_ann + end if + + dimT = dimsizes(sst_aa_mon) ; calculate running trends from the monthly data + sst_rt_mon = new((/5,dimT/),typeof(sst_aa_mon)) + sst_rt_mon!1 = "time" + sst_rt_mon&time = sst_aa_mon&time + copy_VarAtts(sst_aa_mon,sst_rt_mon) + sst_rt_mon@long_name = sst_rt_mon@long_name+" global average running trend" + rt_nyr = (/8,10,12,14,16/) + do ff = 0,dimsizes(rt_nyr)-1 + incr = rt_nyr(ff)*12 + do gg = 0,dimT-incr-1 + tttt = dtrend_msg(ispan(0,incr-1,1),sst_aa_mon(gg:gg+incr-1),False,True) + sst_rt_mon(ff,gg) = (/ tttt@slope*incr /) + delete(tttt) + end do + end do + delete([/dimT,incr/]) + + if (OUTPUT_DATA.eq."True") then + modname = str_sub_str(names(ee)," ","_") + bc = (/"/","'","(",")"/) + do gg = 0,dimsizes(bc)-1 + modname = str_sub_str(modname,bc(gg),"_") + end do + fn = getenv("OUTDIR")+modname+".cvdp_data.sst.trends_timeseries."+syear(ee)+"-"+eyear(ee)+".nc" + if (.not.isfilepresent2(fn)) then + z = addfile(fn,"c") + z@source = "NCAR Climate Analysis Section's Climate Variability Diagnostics Package v"+getenv("VERSION") + z@notes = "Data from "+names(ee)+" from "+syear(ee)+"-"+eyear(ee) + if (OPT_CLIMO.eq."Full") then + z@climatology = syear(ee)+"-"+eyear(ee)+" climatology removed prior to all calculations (other than means)" + else + if (CLIMO_SYEAR.lt.0) then + z@climatology = (eyear(ee)+CLIMO_SYEAR)+"-"+(eyear(ee)+CLIMO_EYEAR)+" climatology removed prior to all calculations (other than means)" + else + z@climatology = CLIMO_SYEAR+"-"+CLIMO_EYEAR+" climatology removed prior to all calculations (other than means)" + end if + end if + z@Conventions = "CF-1.6" + else + z = addfile(fn,"w") + end if + sst_aa_seas2 = sst_aa_seas + sst_aa_seas2!1 = "TIME" + sst_aa_seas2&TIME = ispan(syear(ee),eyear(ee),1) + sst_aa_seas2&TIME@units = "YYYY" + sst_aa_seas2&TIME@long_name = "time" + sst_aa_ann2 = sst_aa_ann + sst_aa_ann2!0 = "TIME" + sst_aa_ann2&TIME = ispan(syear(ee),eyear(ee),1) + sst_aa_ann2&TIME@units = "YYYY" + sst_aa_ann2&TIME@long_name = "time" + z->sst_global_avg_mon = set_varAtts(sst_aa_mon,"sst global area-average (monthly)","C","") + z->sst_global_avg_djf = set_varAtts(sst_aa_seas2(0,:),"sst global area-average (DJF)","C","") + z->sst_global_avg_mam = set_varAtts(sst_aa_seas2(1,:),"sst global area-average (MAM)","C","") + z->sst_global_avg_jja = set_varAtts(sst_aa_seas2(2,:),"sst global area-average (JJA)","C","") + z->sst_global_avg_son = set_varAtts(sst_aa_seas2(3,:),"sst global area-average (SON)","C","") + z->sst_global_avg_ann = set_varAtts(sst_aa_ann2,"sst global area-average (annual)","C","") + z->$("sst_global_avg_runtrend_"+rt_nyr(0)+"yr")$ = set_varAtts(sst_rt_mon(0,:),"sst global area-average "+rt_nyr(0)+"yr running trend","","") + z->$("sst_global_avg_runtrend_"+rt_nyr(1)+"yr")$ = set_varAtts(sst_rt_mon(1,:),"sst global area-average "+rt_nyr(1)+"yr running trend","","") + z->$("sst_global_avg_runtrend_"+rt_nyr(2)+"yr")$ = set_varAtts(sst_rt_mon(2,:),"sst global area-average "+rt_nyr(2)+"yr running trend","","") + z->$("sst_global_avg_runtrend_"+rt_nyr(3)+"yr")$ = set_varAtts(sst_rt_mon(3,:),"sst global area-average "+rt_nyr(3)+"yr running trend","","") + z->$("sst_global_avg_runtrend_"+rt_nyr(4)+"yr")$ = set_varAtts(sst_rt_mon(4,:),"sst global area-average "+rt_nyr(4)+"yr running trend","","") + z->sst_trends_djf = set_varAtts(sst_trends_seas(0,:,:),"sst linear trends (DJF)","","") + z->sst_trends_mam = set_varAtts(sst_trends_seas(1,:,:),"sst linear trends (MAM)","","") + z->sst_trends_jja = set_varAtts(sst_trends_seas(2,:,:),"sst linear trends (JJA)","","") + z->sst_trends_son = set_varAtts(sst_trends_seas(3,:,:),"sst linear trends (SON)","","") + z->sst_trends_ann = set_varAtts(sst_trends_ann,"sst linear trends (annual)","","") + z->sst_trends_mon = set_varAtts(sst_trends_mon,"sst linear trends (monthly)","","") + delete(z) + delete([/sst_aa_seas2,sst_aa_ann2/]) + end if +;======================================================================== + res = True + res@mpProjection = "WinkelTripel" + res@mpGeophysicalLineColor = "gray42" + if (wks_type.eq."png") then + res@mpGeophysicalLineThicknessF = 2. + else + res@mpGeophysicalLineThicknessF = 1. + end if + res@mpPerimOn = False + res@mpGridLatSpacingF = 90 ; change latitude line spacing + res@mpGridLonSpacingF = 180. ; change longitude line spacing + res@mpGridLineColor = "transparent" ; trick ncl into drawing perimeter + res@mpGridAndLimbOn = True ; turn on lat/lon lines + res@mpFillOn = False + res@mpCenterLonF = 210. + res@mpOutlineOn = True + res@gsnDraw = False + res@gsnFrame = False + + res@cnLevelSelectionMode = "ExplicitLevels" + if (COLORMAP.eq.0) then + res@cnLevels = (/-8,-6,-5,-4,-3,-2,-1,-0.5,-0.25,0,0.25,0.5,1,2,3,4,5,6,8/) + end if + if (COLORMAP.eq.1) then + res@cnLevels = (/-6,-4,-3,-2,-1,-0.5,-0.25,0,0.25,0.5,1,2,3,4,6/) + end if + res@cnLineLabelsOn = False + res@cnFillOn = True + res@cnLinesOn = False + res@lbLabelBarOn = False + + res@gsnLeftStringOrthogonalPosF = -0.05 + res@gsnLeftStringParallelPosF = .005 + res@gsnRightStringOrthogonalPosF = -0.05 + res@gsnRightStringParallelPosF = 0.96 + res@gsnRightString = "" + res@gsnLeftString = "" + res@gsnLeftStringFontHeightF = 0.014 + res@gsnCenterStringFontHeightF = 0.018 + res@gsnRightStringFontHeightF = 0.014 + res@gsnLeftString = syear(ee)+"-"+eyear(ee) + + res@gsnRightString = sst_trends_seas@units + res@gsnCenterString = names(ee) + map_djf(ee) = gsn_csm_contour_map(wks_trends_djf,sst_trends_seas(0,:,:),res) + map_mam(ee) = gsn_csm_contour_map(wks_trends_mam,sst_trends_seas(1,:,:),res) + map_jja(ee) = gsn_csm_contour_map(wks_trends_jja,sst_trends_seas(2,:,:),res) + map_son(ee) = gsn_csm_contour_map(wks_trends_son,sst_trends_seas(3,:,:),res) + map_ann(ee) = gsn_csm_contour_map(wks_trends_ann,sst_trends_ann,res) + map_mon(ee) = gsn_csm_contour_map(wks_trends_mon,sst_trends_mon,res) + + xyres = True + xyres@gsnDraw = False + xyres@gsnFrame = False + xyres@gsnYRefLine = 0.0 + xyres@gsnYRefLineColor = "gray42" + + if (wks_type.eq."png") then + xyres@xyLineThicknessF = 4. + else + xyres@xyLineThicknessF = 2. + end if + if (isfilepresent2("obs_ts").and.ee.eq.0) then + xyres@xyLineColor = "black" + else + xyres@xyLineColor = "royalblue" + end if + xyres@tiYAxisString = "" + if (nsim.le.5) then + xyres@tmXBLabelFontHeightF = 0.0125 + xyres@tmYLLabelFontHeightF = 0.0125 + xyres@gsnLeftStringFontHeightF = 0.017 + xyres@gsnRightStringFontHeightF = 0.013 + else + xyres@tmXBLabelFontHeightF = 0.018 + xyres@tmYLLabelFontHeightF = 0.018 + xyres@gsnLeftStringFontHeightF = 0.024 + xyres@gsnRightStringFontHeightF = 0.020 + end if + xyres@gsnLeftStringOrthogonalPosF = 0.025 + xyres@gsnRightStringOrthogonalPosF = xyres@gsnLeftStringOrthogonalPosF + xyres@vpXF = 0.05 + xyres@vpHeightF = 0.15 + if (SCALE_TIMESERIES.eq."True") then + xyres@vpWidthF = 0.9*((nyr(ee)*1.)/nyr_max) + else + xyres@vpWidthF = 0.9 + end if + xyres@gsnLeftString = "" + xyres@gsnCenterString = "" + xyres@gsnRightString = "" + + xyres@trXMinF = syear(ee)-.5 + xyres@trXMaxF = eyear(ee)+0.5 + + xyres2 = xyres + xyres2@xyLineColor = "gray60" + xyres2@xyCurveDrawOrder = "PreDraw" + + xyres@gsnLeftString = names(ee) + tttt = dtrend_msg(ispan(0,dimsizes(sst_aa_seas&time)-1,1),sst_aa_seas(0,:),False,True) + if (isfilepresent2("obs_ts").and.ee.ge.1) then + xyres@trYMinF = min((/min(sst_aa_seas(0,:)),min(sst_aa_seas_obs(0,:))/))-.01 + xyres@trYMaxF = max((/max(sst_aa_seas(0,:)),max(sst_aa_seas_obs(0,:))/))+.01 + end if + xyres@gsnRightString = decimalPlaces(tttt@slope*nyr(ee),2,True)+sst_trends_seas@units + xy_djf(ee) = gsn_csm_xy(wks_aa_djf,ispan(syear(ee),eyear(ee),1),sst_aa_seas(0,:),xyres) + if (isfilepresent2("obs_ts").and.ee.ge.1) then + xy_obs_djf(ee) = gsn_csm_xy(wks_aa_djf,ispan(sst_aa_seas_obs@syear,sst_aa_seas_obs@eyear,1),sst_aa_seas_obs(0,:),xyres2) + overlay(xy_djf(ee),xy_obs_djf(ee)) + end if + + tttt = dtrend_msg(ispan(0,dimsizes(sst_aa_seas&time)-1,1),sst_aa_seas(1,:),False,True) + if (isfilepresent2("obs_ts").and.ee.ge.1) then + xyres@trYMinF = min((/min(sst_aa_seas(1,:)),min(sst_aa_seas_obs(1,:))/))-.01 + xyres@trYMaxF = max((/max(sst_aa_seas(1,:)),max(sst_aa_seas_obs(1,:))/))+.01 + end if + xyres@gsnRightString = decimalPlaces(tttt@slope*nyr(ee),2,True)+sst_trends_seas@units + xy_mam(ee) = gsn_csm_xy(wks_aa_mam,ispan(syear(ee),eyear(ee),1),sst_aa_seas(1,:),xyres) + if (isfilepresent2("obs_ts").and.ee.ge.1) then + xy_obs_mam(ee) = gsn_csm_xy(wks_aa_mam,ispan(sst_aa_seas_obs@syear,sst_aa_seas_obs@eyear,1),sst_aa_seas_obs(1,:),xyres2) + overlay(xy_mam(ee),xy_obs_mam(ee)) + end if + + tttt = dtrend_msg(ispan(0,dimsizes(sst_aa_seas&time)-1,1),sst_aa_seas(2,:),False,True) + if (isfilepresent2("obs_ts").and.ee.ge.1) then + xyres@trYMinF = min((/min(sst_aa_seas(2,:)),min(sst_aa_seas_obs(2,:))/))-.01 + xyres@trYMaxF = max((/max(sst_aa_seas(2,:)),max(sst_aa_seas_obs(2,:))/))+.01 + end if + xyres@gsnRightString = decimalPlaces(tttt@slope*nyr(ee),2,True)+sst_trends_seas@units + xy_jja(ee) = gsn_csm_xy(wks_aa_jja,ispan(syear(ee),eyear(ee),1),sst_aa_seas(2,:),xyres) + if (isfilepresent2("obs_ts").and.ee.ge.1) then + xy_obs_jja(ee) = gsn_csm_xy(wks_aa_jja,ispan(sst_aa_seas_obs@syear,sst_aa_seas_obs@eyear,1),sst_aa_seas_obs(2,:),xyres2) + overlay(xy_jja(ee),xy_obs_jja(ee)) + end if + + tttt = dtrend_msg(ispan(0,dimsizes(sst_aa_seas&time)-1,1),sst_aa_seas(3,:),False,True) + if (isfilepresent2("obs_ts").and.ee.ge.1) then + xyres@trYMinF = min((/min(sst_aa_seas(3,:)),min(sst_aa_seas_obs(3,:))/))-.01 + xyres@trYMaxF = max((/max(sst_aa_seas(3,:)),max(sst_aa_seas_obs(3,:))/))+.01 + end if + xyres@gsnRightString = decimalPlaces(tttt@slope*nyr(ee),2,True)+sst_trends_seas@units + xy_son(ee) = gsn_csm_xy(wks_aa_son,ispan(syear(ee),eyear(ee),1),sst_aa_seas(3,:),xyres) + if (isfilepresent2("obs_ts").and.ee.ge.1) then + xy_obs_son(ee) = gsn_csm_xy(wks_aa_son,ispan(sst_aa_seas_obs@syear,sst_aa_seas_obs@eyear,1),sst_aa_seas_obs(3,:),xyres2) + overlay(xy_son(ee),xy_obs_son(ee)) + end if + delete(tttt) + + tttt = dtrend_msg(ispan(0,dimsizes(sst_aa_ann&time)-1,1),sst_aa_ann,False,True) + if (isfilepresent2("obs_ts").and.ee.ge.1) then + xyres@trYMinF = min((/min(sst_aa_ann),min(sst_aa_ann_obs)/))-.01 + xyres@trYMaxF = max((/max(sst_aa_ann),max(sst_aa_ann_obs)/))+.01 + end if + xyres@gsnRightString = decimalPlaces(tttt@slope*nyr(ee),2,True)+sst_trends_ann@units + xy_ann(ee) = gsn_csm_xy(wks_aa_ann,ispan(syear(ee),eyear(ee),1),sst_aa_ann,xyres) + if (isfilepresent2("obs_ts").and.ee.ge.1) then + xy_obs_ann(ee) = gsn_csm_xy(wks_aa_ann,ispan(sst_aa_seas_obs@syear,sst_aa_seas_obs@eyear,1),sst_aa_ann_obs,xyres2) + overlay(xy_ann(ee),xy_obs_ann(ee)) + delete(xyres@trYMinF) + delete(xyres@trYMaxF) + end if + delete(tttt) + + xyres@trXMaxF = eyear(ee)+1.5 + xyres2@trXMaxF = eyear(ee)+1.5 + tttt = dtrend_msg(ispan(0,dimsizes(sst_aa_mon&time)-1,1),sst_aa_mon,False,True) + if (isfilepresent2("obs_ts").and.ee.ge.1) then + xyres@trYMinF = min((/min(sst_aa_mon),min(sst_aa_mon_obs)/))-.01 + xyres@trYMaxF = max((/max(sst_aa_mon),max(sst_aa_mon_obs)/))+.01 + end if + xyres@gsnRightString = decimalPlaces(tttt@slope*dimsizes(sst_aa_mon&time),2,True)+sst_trends_mon@units + xy_mon(ee) = gsn_csm_xy(wks_aa_mon,fspan(syear(ee),eyear(ee)+.91667,dimsizes(sst_aa_mon)),sst_aa_mon,xyres) + if (isfilepresent2("obs_ts").and.ee.ge.1) then + xy_obs_mon(ee) = gsn_csm_xy(wks_aa_mon,fspan(sst_aa_seas_obs@syear,sst_aa_seas_obs@eyear+.91667,dimsizes(sst_aa_mon_obs)),sst_aa_mon_obs,xyres2) + overlay(xy_mon(ee),xy_obs_mon(ee)) + end if + + xyres@gsnRightString = "" + do ff = 0,4 + if (.not.all(ismissing(sst_rt_mon(ff,:)))) + xyres@gsnRightString = sst_rt_mon@units + xy_rt_mon(ff,ee) = gsn_csm_xy(wks_rt_mon,fspan(syear(ee),eyear(ee)+.91667,dimsizes(sst_aa_mon&time)),sst_rt_mon(ff,:),xyres) + end if + end do + delete([/sst_trends_seas,sst_trends_ann,sst_trends_mon/]) + delete([/sst_aa_seas,sst_aa_mon,sst_aa_ann,xyres,xyres2,res,tttt,sst_rt_mon/]) + end do + if (isfilepresent2("obs_ts")) then + delete([/sst_aa_seas_obs,sst_aa_mon_obs,sst_aa_ann_obs/]) + end if + + panres = True + panres@gsnMaximize = True + panres@gsnPaperOrientation = "portrait" + panres@gsnPanelLabelBar = True + panres@gsnPanelYWhiteSpacePercent = 3.0 + panres@pmLabelBarHeightF = 0.05 + panres@pmLabelBarWidthF = 0.65 + panres@lbTitleOn = False + panres@lbBoxLineColor = "gray70" + panres@lbLabelFontHeightF = 0.013 + if (nsim.le.4) then + if (nsim.eq.1) then + panres@txFontHeightF = 0.022 + panres@gsnPanelBottom = 0.50 + else + panres@txFontHeightF = 0.0145 + panres@gsnPanelBottom = 0.50 + end if + else + panres@txFontHeightF = 0.016 + panres@gsnPanelBottom = 0.05 + end if + panres@lbLabelStride = 1 + + panres@txString = "TS Trends (DJF)" + ncol = floattointeger(sqrt(nsim)) + nrow = (nsim/ncol)+mod(nsim,ncol) + gsn_panel2(wks_trends_djf,map_djf,(/nrow,ncol/),panres) + delete(wks_trends_djf) + + panres@txString = "TS Trends (MAM)" + gsn_panel2(wks_trends_mam,map_mam,(/nrow,ncol/),panres) + delete(wks_trends_mam) + + panres@txString = "TS Trends (JJA)" + gsn_panel2(wks_trends_jja,map_jja,(/nrow,ncol/),panres) + delete(wks_trends_jja) + + panres@txString = "TS Trends (SON)" + gsn_panel2(wks_trends_son,map_son,(/nrow,ncol/),panres) + delete(wks_trends_son) + + panres@txString = "TS Trends (Annual)" + gsn_panel2(wks_trends_ann,map_ann,(/nrow,ncol/),panres) + delete(wks_trends_ann) + + panres@txString = "TS Trends (Monthly)" + gsn_panel2(wks_trends_mon,map_mon,(/nrow,ncol/),panres) + delete(wks_trends_mon) + delete(panres) + + panres2 = True + panres2@gsnMaximize = True + panres2@gsnPaperOrientation = "portrait" + panres2@gsnPanelYWhiteSpacePercent = 3.0 + if (nsim.le.4) then + panres2@txFontHeightF = 0.024 + else + panres2@txFontHeightF = 0.016 + end if + if (SCALE_TIMESERIES.eq."True") then + tt = ind(nyr.eq.nyr_max) + panres2@gsnPanelScalePlotIndex = tt(0) + delete(tt) + end if + if (nsim.le.12) then + lp = (/nsim,1/) + else + lp = (/nrow,ncol/) ;(/nsim/2+1,nsim/8+1/) + end if + panres2@txString = "TS Global Average (DJF)" + gsn_panel2(wks_aa_djf,xy_djf,lp,panres2) + delete(wks_aa_djf) + + panres2@txString = "TS Global Average (MAM)" + gsn_panel2(wks_aa_mam,xy_mam,lp,panres2) + delete(wks_aa_mam) + + panres2@txString = "TS Global Average (JJA)" + gsn_panel2(wks_aa_jja,xy_jja,lp,panres2) + delete(wks_aa_jja) + + panres2@txString = "TS Global Average (SON)" + gsn_panel2(wks_aa_son,xy_son,lp,panres2) + delete(wks_aa_son) + + panres2@txString = "TS Global Average (Annual)" + gsn_panel2(wks_aa_ann,xy_ann,lp,panres2) + delete(wks_aa_ann) + + panres2@txString = "TS Global Average (Monthly)" + gsn_panel2(wks_aa_mon,xy_mon,lp,panres2) + delete(wks_aa_mon) + + panres2@txString = "TS Running 8yr Trend (Monthly)" + gsn_panel2(wks_rt_mon,xy_rt_mon(0,:),lp,panres2) + + panres2@txString = "TS Running 10yr Trend (Monthly)" + gsn_panel2(wks_rt_mon,xy_rt_mon(1,:),lp,panres2) + + panres2@txString = "TS Running 12yr Trend (Monthly)" + gsn_panel2(wks_rt_mon,xy_rt_mon(2,:),lp,panres2) + + panres2@txString = "TS Running 14yr Trend (Monthly)" + gsn_panel2(wks_rt_mon,xy_rt_mon(3,:),lp,panres2) + + panres2@txString = "TS Running 16yr Trend (Monthly)" + gsn_panel2(wks_rt_mon,xy_rt_mon(4,:),lp,panres2) + delete(wks_rt_mon) + + delete([/nrow,ncol,lp,map_djf,map_mam,map_jja,map_son,map_ann,map_mon,xy_djf,xy_mam,xy_jja,xy_son,xy_ann,xy_mon/]) + delete([/xy_rt_mon/]) + delete(panres2) + if (isfilepresent2("obs_ts")) then + delete([/xy_obs_djf,xy_obs_mam,xy_obs_jja,xy_obs_son,xy_obs_ann,xy_obs_mon/]) + end if + OUTDIR = getenv("OUTDIR") + if (wks_type.eq."png") then + do gg = 1,5 + if (isfilepresent2(OUTDIR+"sst.runtrend.mon.00000"+gg+".png")) then + system("mv "+OUTDIR+"sst.runtrend.mon.00000"+gg+".png "+OUTDIR+"sst."+rt_nyr(gg-1)+"yr_runtrend.mon.png") + end if + end do + else + if (isfilepresent2(OUTDIR+"sst.runtrend.mon.ps")) then + system("psplit "+OUTDIR+"sst.runtrend.mon.ps "+OUTDIR+"pict") + do gg = 1,5 + if (isfilepresent2(OUTDIR+"pict000"+gg+".ps")) then + system("mv "+OUTDIR+"pict000"+gg+".ps "+OUTDIR+"sst."+rt_nyr(gg-1)+"yr_runtrend.mon.ps") + end if + end do + end if + end if + delete(OUTDIR) + print("Finished: sst.trends_timeseries.ncl") +end diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/tas.mean_stddev.ncl b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/tas.mean_stddev.ncl new file mode 100644 index 0000000000..84eaa1100e --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/tas.mean_stddev.ncl @@ -0,0 +1,322 @@ +; Calculates 2m air temperature global means and standard deviations +; +; Variables used: tas +; +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/shea_util.ncl" +load "$CVDP_SCRIPTS/functions.ncl" + +begin + print("Starting: tas.mean_stddev.ncl") + + SCALE_TIMESERIES = getenv("SCALE_TIMESERIES") + OUTPUT_DATA = getenv("OUTPUT_DATA") + PNG_SCALE = tofloat(getenv("PNG_SCALE")) + OPT_CLIMO = getenv("OPT_CLIMO") + CLIMO_SYEAR = toint(getenv("CLIMO_SYEAR")) + CLIMO_EYEAR = toint(getenv("CLIMO_EYEAR")) + OUTPUT_TYPE = getenv("OUTPUT_TYPE") + COLORMAP = getenv("COLORMAP") + + nsim = numAsciiRow("namelist_byvar/namelist_trefht") + na = asciiread("namelist_byvar/namelist_trefht",(/nsim/),"string") + names = new(nsim,"string") + paths = new(nsim,"string") + syear = new(nsim,"integer",-999) + eyear = new(nsim,"integer",-999) + delim = "|" + + do gg = 0,nsim-1 + names(gg) = str_strip(str_get_field(na(gg),1,delim)) + paths(gg) = str_strip(str_get_field(na(gg),2,delim)) + syear(gg) = stringtointeger(str_strip(str_get_field(na(gg),3,delim))) + eyear(gg) = stringtointeger(str_strip(str_get_field(na(gg),4,delim))) + end do + nyr = eyear-syear+1 + nyr_max = max(nyr) + + wks_type = OUTPUT_TYPE + if (wks_type.eq."png") then + wks_type@wkWidth = 1500*PNG_SCALE + wks_type@wkHeight = 1500*PNG_SCALE + end if + wks_stddev_djf = gsn_open_wks(wks_type,getenv("OUTDIR")+"tas.stddev.djf") + wks_stddev_mam = gsn_open_wks(wks_type,getenv("OUTDIR")+"tas.stddev.mam") + wks_stddev_jja = gsn_open_wks(wks_type,getenv("OUTDIR")+"tas.stddev.jja") + wks_stddev_son = gsn_open_wks(wks_type,getenv("OUTDIR")+"tas.stddev.son") + wks_stddev_ann = gsn_open_wks(wks_type,getenv("OUTDIR")+"tas.stddev.ann") + wks_mean_djf = gsn_open_wks(wks_type,getenv("OUTDIR")+"tas.mean.djf") + wks_mean_mam = gsn_open_wks(wks_type,getenv("OUTDIR")+"tas.mean.mam") + wks_mean_jja = gsn_open_wks(wks_type,getenv("OUTDIR")+"tas.mean.jja") + wks_mean_son = gsn_open_wks(wks_type,getenv("OUTDIR")+"tas.mean.son") + wks_mean_ann = gsn_open_wks(wks_type,getenv("OUTDIR")+"tas.mean.ann") + + if (COLORMAP.eq.0) then + gsn_define_colormap(wks_stddev_djf,"precip3_16lev") + gsn_define_colormap(wks_stddev_mam,"precip3_16lev") + gsn_define_colormap(wks_stddev_jja,"precip3_16lev") + gsn_define_colormap(wks_stddev_son,"precip3_16lev") + gsn_define_colormap(wks_stddev_ann,"precip3_16lev") + gsn_define_colormap(wks_mean_djf,"ncl_default") + gsn_define_colormap(wks_mean_mam,"ncl_default") + gsn_define_colormap(wks_mean_jja,"ncl_default") + gsn_define_colormap(wks_mean_son,"ncl_default") + gsn_define_colormap(wks_mean_ann,"ncl_default") + end if + if (COLORMAP.eq.1) then + gsn_define_colormap(wks_stddev_djf,"cb_rainbow") + gsn_define_colormap(wks_stddev_mam,"cb_rainbow") + gsn_define_colormap(wks_stddev_jja,"cb_rainbow") + gsn_define_colormap(wks_stddev_son,"cb_rainbow") + gsn_define_colormap(wks_stddev_ann,"cb_rainbow") + gsn_define_colormap(wks_mean_djf,"BlueDarkRed18") + gsn_define_colormap(wks_mean_mam,"BlueDarkRed18") + gsn_define_colormap(wks_mean_jja,"BlueDarkRed18") + gsn_define_colormap(wks_mean_son,"BlueDarkRed18") + gsn_define_colormap(wks_mean_ann,"BlueDarkRed18") + end if + + plot_mean_djf = new(nsim,"graphic") + plot_mean_mam = new(nsim,"graphic") + plot_mean_jja = new(nsim,"graphic") + plot_mean_son = new(nsim,"graphic") + plot_mean_ann = new(nsim,"graphic") + plot_stddev_djf = new(nsim,"graphic") + plot_stddev_mam = new(nsim,"graphic") + plot_stddev_jja = new(nsim,"graphic") + plot_stddev_son = new(nsim,"graphic") + plot_stddev_ann = new(nsim,"graphic") + do ee = 0,nsim-1 + tas = data_read_in(paths(ee),"TREFHT",syear(ee),eyear(ee)) ; read in data, orient lats/lons correctly, set time coordinate variable up + if (isatt(tas,"is_all_missing")) then + delete(tas) + continue + end if + do ff = 0,1 + tasT = tas + if (ff.eq.1) then + if (OPT_CLIMO.eq."Full") then + tasT = rmMonAnnCycTLL(tasT) + else + check_custom_climo(names(ee),syear(ee),eyear(ee),CLIMO_SYEAR,CLIMO_EYEAR) + temp_arr = tasT + delete(temp_arr&time) + temp_arr&time = cd_calendar(tasT&time,-1) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(temp_arr({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(temp_arr({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + delete(temp_arr) + tasT = calcMonAnomTLL(tasT,climo) + delete(climo) + end if + end if + tas_seas = runave_n_Wrap(tasT,3,0,0) + tas_seas(0,:,:) = (/ dim_avg_n(tasT(:1,:,:),0) /) + tas_seas(dimsizes(tasT&time)-1,:,:) = (/ dim_avg_n(tasT(dimsizes(tasT&time)-2:,:,:),0) /) + tas_ann = runave_n_Wrap(tasT,12,0,0) + delete(tasT) + + if (ff.eq.0) then + tas_mean_djf = dim_avg_n_Wrap(tas_seas(0::12,:,:),0) + tas_mean_mam = dim_avg_n_Wrap(tas_seas(3::12,:,:),0) + tas_mean_jja = dim_avg_n_Wrap(tas_seas(6::12,:,:),0) + tas_mean_son = dim_avg_n_Wrap(tas_seas(9::12,:,:),0) + tas_mean_ann = dim_avg_n_Wrap(tas_ann(5::12,:,:),0) + end if + if (ff.eq.1) then + tas_sd_djf = dim_stddev_n_Wrap(dtrend_msg_n(ispan(0,nyr(ee)-1,1),tas_seas(0::12,:,:),False,False,0),0) + tas_sd_mam = dim_stddev_n_Wrap(dtrend_msg_n(ispan(0,nyr(ee)-1,1),tas_seas(3::12,:,:),False,False,0),0) + tas_sd_jja = dim_stddev_n_Wrap(dtrend_msg_n(ispan(0,nyr(ee)-1,1),tas_seas(6::12,:,:),False,False,0),0) + tas_sd_son = dim_stddev_n_Wrap(dtrend_msg_n(ispan(0,nyr(ee)-1,1),tas_seas(9::12,:,:),False,False,0),0) + tas_sd_ann = dim_stddev_n_Wrap(dtrend_msg_n(ispan(0,nyr(ee)-1,1),tas_ann(5::12,:,:),False,False,0),0) + end if + delete([/tas_seas,tas_ann/]) + end do + delete(tas) + copy_VarMeta(tas_mean_djf,tas_sd_djf) + copy_VarMeta(tas_mean_mam,tas_sd_mam) + copy_VarMeta(tas_mean_jja,tas_sd_jja) + copy_VarMeta(tas_mean_son,tas_sd_son) + copy_VarMeta(tas_mean_ann,tas_sd_ann) + + if (OUTPUT_DATA.eq."True") then + modname = str_sub_str(names(ee)," ","_") + bc = (/"/","'","(",")"/) + do gg = 0,dimsizes(bc)-1 + modname = str_sub_str(modname,bc(gg),"_") + end do + fn = getenv("OUTDIR")+modname+".cvdp_data.tas.mean_stddev."+syear(ee)+"-"+eyear(ee)+".nc" + if (.not.isfilepresent2(fn)) then + z = addfile(fn,"c") + z@source = "NCAR Climate Analysis Section's Climate Variability Diagnostics Package v"+getenv("VERSION") + z@notes = "Data from "+names(ee)+" from "+syear(ee)+"-"+eyear(ee) + if (OPT_CLIMO.eq."Full") then + z@climatology = syear(ee)+"-"+eyear(ee)+" climatology removed prior to all calculations (other than means)" + else + if (CLIMO_SYEAR.lt.0) then + z@climatology = (eyear(ee)+CLIMO_SYEAR)+"-"+(eyear(ee)+CLIMO_EYEAR)+" climatology removed prior to all calculations (other than means)" + else + z@climatology = CLIMO_SYEAR+"-"+CLIMO_EYEAR+" climatology removed prior to all calculations (other than means)" + end if + end if + z@Conventions = "CF-1.6" + else + z = addfile(fn,"w") + end if + + z->tas_spatialmean_djf = set_varAtts(tas_mean_djf,"tas mean (DJF)","","") + z->tas_spatialmean_mam = set_varAtts(tas_mean_mam,"tas mean (MAM)","","") + z->tas_spatialmean_jja = set_varAtts(tas_mean_jja,"tas mean (JJA)","","") + z->tas_spatialmean_son = set_varAtts(tas_mean_son,"tas mean (SON)","","") + z->tas_spatialmean_ann = set_varAtts(tas_mean_ann,"tas mean (annual)","","") + + z->tas_spatialstddev_djf = set_varAtts(tas_sd_djf,"tas standard deviation (DJF)","","") + z->tas_spatialstddev_mam = set_varAtts(tas_sd_mam,"tas standard deviation (MAM)","","") + z->tas_spatialstddev_jja = set_varAtts(tas_sd_jja,"tas standard deviation (JJA)","","") + z->tas_spatialstddev_son = set_varAtts(tas_sd_son,"tas standard deviation (SON)","","") + z->tas_spatialstddev_ann = set_varAtts(tas_sd_ann,"tas standard deviation (annual)","","") + delete(z) + end if + +;========================================================================================== + res = True + res@mpProjection = "WinkelTripel" + res@mpGeophysicalLineColor = "gray42" + + res@mpPerimOn = False + res@mpGridLatSpacingF = 90 ; change latitude line spacing + res@mpGridLonSpacingF = 180. ; change longitude line spacing + res@mpGridLineColor = "transparent" ; trick ncl into drawing perimeter + res@mpGridAndLimbOn = True ; turn on lat/lon lines + res@mpFillOn = False + res@mpCenterLonF = 210. + res@mpOutlineOn = True + if (wks_type.eq."png") then + res@mpGeophysicalLineThicknessF = 2. + else + res@mpGeophysicalLineThicknessF = 1. + end if + res@gsnDraw = False + res@gsnFrame = False + + res@cnLineLabelsOn = False + res@cnFillOn = True + res@cnLinesOn = False + res@lbLabelBarOn = False + + res@cnLevelSelectionMode = "ExplicitLevels" + + res@gsnLeftStringOrthogonalPosF = -0.05 + res@gsnLeftStringParallelPosF = .005 + res@gsnRightStringOrthogonalPosF = -0.05 + res@gsnRightStringParallelPosF = 0.96 + res@gsnRightString = "" + res@gsnLeftString = "" + res@gsnLeftStringFontHeightF = 0.014 + res@gsnCenterStringFontHeightF = 0.018 + res@gsnRightStringFontHeightF = 0.014 + + sres = res + + res@cnLevels = fspan(.4,3.2,8) + if (COLORMAP.eq.0) then + res@cnFillColors = (/2,4,6,8,10,12,14,16,18/) + sres@cnLevels = ispan(-40,40,2) + end if + if (COLORMAP.eq.1) then + res@cnFillColors = (/35,47,63,79,95,111,124,155,175/) + sres@cnLevels = ispan(-20,40,4) + end if + + res@gsnLeftString = syear(ee)+"-"+eyear(ee) + res@gsnRightString = tas_sd_djf@units + res@gsnCenterString = names(ee) + plot_stddev_djf(ee) = gsn_csm_contour_map(wks_stddev_djf,tas_sd_djf,res) + plot_stddev_mam(ee) = gsn_csm_contour_map(wks_stddev_mam,tas_sd_mam,res) + plot_stddev_jja(ee) = gsn_csm_contour_map(wks_stddev_jja,tas_sd_jja,res) + plot_stddev_son(ee) = gsn_csm_contour_map(wks_stddev_son,tas_sd_son,res) + plot_stddev_ann(ee) = gsn_csm_contour_map(wks_stddev_ann,tas_sd_ann,res) + + sres@gsnLeftString = syear(ee)+"-"+eyear(ee) + sres@gsnRightString = tas_mean_djf@units + sres@gsnCenterString = names(ee) + plot_mean_djf(ee) = gsn_csm_contour_map(wks_mean_djf,tas_mean_djf,sres) + plot_mean_mam(ee) = gsn_csm_contour_map(wks_mean_mam,tas_mean_mam,sres) + plot_mean_jja(ee) = gsn_csm_contour_map(wks_mean_jja,tas_mean_jja,sres) + plot_mean_son(ee) = gsn_csm_contour_map(wks_mean_son,tas_mean_son,sres) + plot_mean_ann(ee) = gsn_csm_contour_map(wks_mean_ann,tas_mean_ann,sres) + delete([/tas_sd_djf,tas_sd_mam,tas_sd_jja,tas_sd_son,tas_sd_ann,tas_mean_djf,tas_mean_mam,tas_mean_jja,tas_mean_son,tas_mean_ann,res,sres/]) + end do + + panres = True + panres@gsnMaximize = True + panres@gsnPaperOrientation = "portrait" + panres@gsnPanelLabelBar = True + panres@gsnPanelYWhiteSpacePercent = 3.0 + panres@pmLabelBarHeightF = 0.05 + panres@pmLabelBarWidthF = 0.65 + panres@lbTitleOn = False + panres@lbBoxLineColor = "gray70" + if (nsim.le.4) then + if (nsim.eq.1) then + panres@txFontHeightF = 0.022 + panres@gsnPanelBottom = 0.50 + else + panres@txFontHeightF = 0.0145 + panres@gsnPanelBottom = 0.50 + end if + else + panres@txFontHeightF = 0.016 + panres@gsnPanelBottom = 0.05 + end if + panres@lbLabelFontHeightF = 0.013 + panres@lbLabelStride = 1 + ncol = floattointeger(sqrt(nsim)) + nrow = (nsim/ncol)+mod(nsim,ncol) + + panres@txString = "TAS Standard Deviations (DJF)" + gsn_panel2(wks_stddev_djf,plot_stddev_djf,(/nrow,ncol/),panres) + delete(wks_stddev_djf) + + panres@txString = "TAS Standard Deviations (MAM)" + gsn_panel2(wks_stddev_mam,plot_stddev_mam,(/nrow,ncol/),panres) + delete(wks_stddev_mam) + + panres@txString = "TAS Standard Deviations (JJA)" + gsn_panel2(wks_stddev_jja,plot_stddev_jja,(/nrow,ncol/),panres) + delete(wks_stddev_jja) + + panres@txString = "TAS Standard Deviations (SON)" + gsn_panel2(wks_stddev_son,plot_stddev_son,(/nrow,ncol/),panres) + delete(wks_stddev_son) + + panres@txString = "TAS Standard Deviations (Annual)" + gsn_panel2(wks_stddev_ann,plot_stddev_ann,(/nrow,ncol/),panres) + delete(wks_stddev_ann) + + panres@txString = "TAS Means (DJF)" + gsn_panel2(wks_mean_djf,plot_mean_djf,(/nrow,ncol/),panres) + delete(wks_mean_djf) + + panres@txString = "TAS Means (MAM)" + gsn_panel2(wks_mean_mam,plot_mean_mam,(/nrow,ncol/),panres) + delete(wks_mean_mam) + + panres@txString = "TAS Means (JJA)" + gsn_panel2(wks_mean_jja,plot_mean_jja,(/nrow,ncol/),panres) + delete(wks_mean_jja) + + panres@txString = "TAS Means (SON)" + gsn_panel2(wks_mean_son,plot_mean_son,(/nrow,ncol/),panres) + delete(wks_mean_son) + + panres@txString = "TAS Means (Annual)" + gsn_panel2(wks_mean_ann,plot_mean_ann,(/nrow,ncol/),panres) + delete(wks_mean_ann) + delete(panres) + print("Finished: tas.mean_stddev.ncl") +end diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/tas.trends_timeseries.ncl b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/tas.trends_timeseries.ncl new file mode 100644 index 0000000000..f0494ded6d --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/tas.trends_timeseries.ncl @@ -0,0 +1,606 @@ +; Calculates 2m air temperature global trends, running global trends and timeseries +; +; Variables used: tas +; +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/shea_util.ncl" +load "$CVDP_SCRIPTS/functions.ncl" + +begin + print("Starting: tas.trends_timeseries.ncl") + + SCALE_TIMESERIES = getenv("SCALE_TIMESERIES") + OUTPUT_DATA = getenv("OUTPUT_DATA") + PNG_SCALE = tofloat(getenv("PNG_SCALE")) + OPT_CLIMO = getenv("OPT_CLIMO") + CLIMO_SYEAR = toint(getenv("CLIMO_SYEAR")) + CLIMO_EYEAR = toint(getenv("CLIMO_EYEAR")) + OUTPUT_TYPE = getenv("OUTPUT_TYPE") + COLORMAP = getenv("COLORMAP") + + nsim = numAsciiRow("namelist_byvar/namelist_trefht") + na = asciiread("namelist_byvar/namelist_trefht",(/nsim/),"string") + names = new(nsim,"string") + paths = new(nsim,"string") + syear = new(nsim,"integer",-999) + eyear = new(nsim,"integer",-999) + delim = "|" + + do gg = 0,nsim-1 + names(gg) = str_strip(str_get_field(na(gg),1,delim)) + paths(gg) = str_strip(str_get_field(na(gg),2,delim)) + syear(gg) = stringtointeger(str_strip(str_get_field(na(gg),3,delim))) + eyear(gg) = stringtointeger(str_strip(str_get_field(na(gg),4,delim))) + end do + nyr = eyear-syear+1 + nyr_max = max(nyr) + + pi=4.*atan(1.0) + rad=(pi/180.) + + wks_type = OUTPUT_TYPE + if (wks_type.eq."png") then + wks_type@wkWidth = 1500*PNG_SCALE + wks_type@wkHeight = 1500*PNG_SCALE + end if + wks_trends_djf = gsn_open_wks(wks_type,getenv("OUTDIR")+"tas.trends.djf") + wks_trends_mam = gsn_open_wks(wks_type,getenv("OUTDIR")+"tas.trends.mam") + wks_trends_jja = gsn_open_wks(wks_type,getenv("OUTDIR")+"tas.trends.jja") + wks_trends_son = gsn_open_wks(wks_type,getenv("OUTDIR")+"tas.trends.son") + wks_trends_ann = gsn_open_wks(wks_type,getenv("OUTDIR")+"tas.trends.ann") + wks_trends_mon = gsn_open_wks(wks_type,getenv("OUTDIR")+"tas.trends.mon") + + wks_aa_djf = gsn_open_wks(wks_type,getenv("OUTDIR")+"tas.timeseries.djf") + wks_aa_mam = gsn_open_wks(wks_type,getenv("OUTDIR")+"tas.timeseries.mam") + wks_aa_jja = gsn_open_wks(wks_type,getenv("OUTDIR")+"tas.timeseries.jja") + wks_aa_son = gsn_open_wks(wks_type,getenv("OUTDIR")+"tas.timeseries.son") + wks_aa_ann = gsn_open_wks(wks_type,getenv("OUTDIR")+"tas.timeseries.ann") + wks_aa_mon = gsn_open_wks(wks_type,getenv("OUTDIR")+"tas.timeseries.mon") + + wks_rt_mon = gsn_open_wks(wks_type,getenv("OUTDIR")+"tas.runtrend.mon") + + if (COLORMAP.eq.0) then + gsn_define_colormap(wks_trends_djf,"ncl_default") + gsn_define_colormap(wks_trends_mam,"ncl_default") + gsn_define_colormap(wks_trends_jja,"ncl_default") + gsn_define_colormap(wks_trends_son,"ncl_default") + gsn_define_colormap(wks_trends_ann,"ncl_default") + gsn_define_colormap(wks_trends_mon,"ncl_default") + gsn_define_colormap(wks_aa_djf,"ncl_default") + gsn_define_colormap(wks_aa_mam,"ncl_default") + gsn_define_colormap(wks_aa_jja,"ncl_default") + gsn_define_colormap(wks_aa_son,"ncl_default") + gsn_define_colormap(wks_aa_ann,"ncl_default") + gsn_define_colormap(wks_aa_mon,"ncl_default") + gsn_define_colormap(wks_rt_mon,"ncl_default") + end if + if (COLORMAP.eq.1) then + gsn_define_colormap(wks_trends_djf,"BlueDarkRed18") + gsn_define_colormap(wks_trends_mam,"BlueDarkRed18") + gsn_define_colormap(wks_trends_jja,"BlueDarkRed18") + gsn_define_colormap(wks_trends_son,"BlueDarkRed18") + gsn_define_colormap(wks_trends_ann,"BlueDarkRed18") + gsn_define_colormap(wks_trends_mon,"BlueDarkRed18") + gsn_define_colormap(wks_aa_djf,"ncl_default") + gsn_define_colormap(wks_aa_mam,"ncl_default") + gsn_define_colormap(wks_aa_jja,"ncl_default") + gsn_define_colormap(wks_aa_son,"ncl_default") + gsn_define_colormap(wks_aa_ann,"ncl_default") + gsn_define_colormap(wks_aa_mon,"ncl_default") + gsn_define_colormap(wks_rt_mon,"ncl_default") + end if + map_djf = new(nsim,"graphic") + map_mam = new(nsim,"graphic") + map_jja = new(nsim,"graphic") + map_son = new(nsim,"graphic") + map_ann = new(nsim,"graphic") + map_mon = new(nsim,"graphic") + xy_djf = new(nsim,"graphic") + xy_mam = new(nsim,"graphic") + xy_jja = new(nsim,"graphic") + xy_son = new(nsim,"graphic") + xy_ann = new(nsim,"graphic") + xy_mon = new(nsim,"graphic") + + xy_rt_mon = new((/5,nsim/),"graphic") + + if (isfilepresent2("obs_trefht")) then + xy_obs_djf = new(nsim,"graphic") + xy_obs_mam = new(nsim,"graphic") + xy_obs_jja = new(nsim,"graphic") + xy_obs_son = new(nsim,"graphic") + xy_obs_ann = new(nsim,"graphic") + xy_obs_mon = new(nsim,"graphic") + end if + do ee = 0,nsim-1 + tas = data_read_in(paths(ee),"TREFHT",syear(ee),eyear(ee)) ; read in data, orient lats/lons correctly, set time coordinate variable up + if (isatt(tas,"is_all_missing")) then + delete(tas) + continue + end if + if (OPT_CLIMO.eq."Full") then + tas = rmMonAnnCycTLL(tas) + else + check_custom_climo(names(ee),syear(ee),eyear(ee),CLIMO_SYEAR,CLIMO_EYEAR) + temp_arr = tas + delete(temp_arr&time) + temp_arr&time = cd_calendar(tas&time,-1) + if (CLIMO_SYEAR.lt.0) then + climo = clmMonTLL(temp_arr({(eyear(ee)+CLIMO_SYEAR)*100+1:(eyear(ee)+CLIMO_EYEAR)*100+12},:,:)) + else + climo = clmMonTLL(temp_arr({CLIMO_SYEAR*100+1:CLIMO_EYEAR*100+12},:,:)) + end if + delete(temp_arr) + tas = calcMonAnomTLL(tas,climo) + delete(climo) + end if + + coswgt=cos(rad*tas&lat) + coswgt!0 = "lat" + coswgt&lat= tas&lat + + tas_aa_mon = wgt_areaave_Wrap(tas,coswgt,1.0,0) + tttt = dtrend_msg_n(ispan(0,dimsizes(tas&time)-1,1),tas,False,True,0) + tas_trends_mon = tas(0,:,:) + tas_trends_mon = (/ onedtond(tttt@slope, (/dimsizes(tas&lat),dimsizes(tas&lon)/) ) /) + tas_trends_mon = tas_trends_mon*dimsizes(tas&time) + tas_trends_mon@units = tas@units+" "+nyr(ee)+"yr~S~-1~N~" + delete(tttt) + + tas_seas = runave_n_Wrap(tas,3,0,0) + tas_seas(0,:,:) = (/ dim_avg_n(tas(:1,:,:),0) /) + tas_seas(dimsizes(tas&time)-1,:,:) = (/ dim_avg_n(tas(dimsizes(tas&time)-2:,:,:),0) /) + tas_ann = runave_n_Wrap(tas,12,0,0) + delete(tas) + + tas_trends_seas = tas_seas(:3,:,:) + tas_trends_seas = tas_trends_seas@_FillValue + tas_trends_ann = tas_trends_seas(0,:,:) + tas_aa_seas = new((/4,nyr(ee)/),typeof(tas_seas)) + tas_aa_seas!1 = "time" + tas_aa_seas&time = ispan(syear(ee),eyear(ee),1) + tas_aa_seas&time@units = "YYYY" + tas_aa_seas&time@long_name = "time" + tas_aa_ann = tas_aa_seas(0,:) + do ff = 0,4 + if (ff.le.3) then + tarr = tas_seas(ff*3::12,:,:) + end if + if (ff.eq.4) then + tarr = tas_ann(5::12,:,:) + end if + tttt = dtrend_msg_n(ispan(0,dimsizes(tarr&time)-1,1),tarr,False,True,0) + if (ff.le.3) then + tas_trends_seas(ff,:,:) = (/ onedtond(tttt@slope, (/dimsizes(tarr&lat),dimsizes(tarr&lon)/) ) /) + tas_aa_seas(ff,:) = (/ wgt_areaave(tarr,coswgt,1.0,0) /) + end if + if (ff.eq.4) then + tas_trends_ann = (/ onedtond(tttt@slope, (/dimsizes(tarr&lat),dimsizes(tarr&lon)/) ) /) + tas_aa_ann = (/ wgt_areaave(tarr,coswgt,1.0,0) /) + end if + delete([/tarr,tttt/]) + end do + tas_trends_seas = tas_trends_seas*nyr(ee) + tas_trends_seas@units = tas_seas@units+" "+nyr(ee)+"yr~S~-1~N~" + tas_trends_ann = tas_trends_ann*nyr(ee) + tas_trends_ann@units = tas_ann@units+" "+nyr(ee)+"yr~S~-1~N~" + delete([/tas_seas,tas_ann,coswgt/]) + + if (isfilepresent2("obs_trefht").and.ee.eq.0) then + tas_aa_seas@syear = syear(ee) + tas_aa_seas@eyear = eyear(ee) + tas_aa_mon@syear = syear(ee) + tas_aa_mon@eyear = eyear(ee) + tas_aa_ann@syear = syear(ee) + tas_aa_ann@eyear = eyear(ee) + tas_aa_seas_obs = tas_aa_seas + tas_aa_mon_obs = tas_aa_mon + tas_aa_ann_obs = tas_aa_ann + end if + + dimT = dimsizes(tas_aa_mon) ; calculate running trends from the monthly data + tas_rt_mon = new((/5,dimT/),typeof(tas_aa_mon)) + tas_rt_mon!1 = "time" + tas_rt_mon&time = tas_aa_mon&time + copy_VarAtts(tas_aa_mon,tas_rt_mon) + tas_rt_mon@long_name = tas_rt_mon@long_name+" global average running trend" + rt_nyr = (/8,10,12,14,16/) + do ff = 0,dimsizes(rt_nyr)-1 + incr = rt_nyr(ff)*12 + do gg = 0,dimT-incr-1 + tttt = dtrend_msg(ispan(0,incr-1,1),tas_aa_mon(gg:gg+incr-1),False,True) + tas_rt_mon(ff,gg) = (/ tttt@slope*incr /) + delete(tttt) + end do + end do + delete([/dimT,incr/]) + + if (OUTPUT_DATA.eq."True") then + modname = str_sub_str(names(ee)," ","_") + bc = (/"/","'","(",")"/) + do gg = 0,dimsizes(bc)-1 + modname = str_sub_str(modname,bc(gg),"_") + end do + fn = getenv("OUTDIR")+modname+".cvdp_data.tas.trends_timeseries."+syear(ee)+"-"+eyear(ee)+".nc" + if (.not.isfilepresent2(fn)) then + z = addfile(fn,"c") + z@source = "NCAR Climate Analysis Section's Climate Variability Diagnostics Package v"+getenv("VERSION") + z@notes = "Data from "+names(ee)+" from "+syear(ee)+"-"+eyear(ee) + if (OPT_CLIMO.eq."Full") then + z@climatology = syear(ee)+"-"+eyear(ee)+" climatology removed prior to all calculations (other than means)" + else + if (CLIMO_SYEAR.lt.0) then + z@climatology = (eyear(ee)+CLIMO_SYEAR)+"-"+(eyear(ee)+CLIMO_EYEAR)+" climatology removed prior to all calculations (other than means)" + else + z@climatology = CLIMO_SYEAR+"-"+CLIMO_EYEAR+" climatology removed prior to all calculations (other than means)" + end if + end if + z@Conventions = "CF-1.6" + else + z = addfile(fn,"w") + end if + tas_aa_seas2 = tas_aa_seas + tas_aa_seas2!1 = "TIME" + tas_aa_seas2&TIME = ispan(syear(ee),eyear(ee),1) + tas_aa_seas2&TIME@units = "YYYY" + tas_aa_seas2&TIME@long_name = "time" + tas_aa_ann2 = tas_aa_ann + tas_aa_ann2!0 = "TIME" + tas_aa_ann2&TIME = ispan(syear(ee),eyear(ee),1) + tas_aa_ann2&TIME@units = "YYYY" + tas_aa_ann2&TIME@long_name = "time" + z->tas_global_avg_mon = set_varAtts(tas_aa_mon,"tas global area-average (monthly)","C","") + z->tas_global_avg_djf = set_varAtts(tas_aa_seas2(0,:),"tas global area-average (DJF)","C","") + z->tas_global_avg_mam = set_varAtts(tas_aa_seas2(1,:),"tas global area-average (MAM)","C","") + z->tas_global_avg_jja = set_varAtts(tas_aa_seas2(2,:),"tas global area-average (JJA)","C","") + z->tas_global_avg_son = set_varAtts(tas_aa_seas2(3,:),"tas global area-average (SON)","C","") + z->tas_global_avg_ann = set_varAtts(tas_aa_ann2,"tas global area-average (annual)","C","") + z->$("tas_global_avg_runtrend_"+rt_nyr(0)+"yr")$ = set_varAtts(tas_rt_mon(0,:),"tas global area-average "+rt_nyr(0)+"yr running trend","","") + z->$("tas_global_avg_runtrend_"+rt_nyr(1)+"yr")$ = set_varAtts(tas_rt_mon(1,:),"tas global area-average "+rt_nyr(1)+"yr running trend","","") + z->$("tas_global_avg_runtrend_"+rt_nyr(2)+"yr")$ = set_varAtts(tas_rt_mon(2,:),"tas global area-average "+rt_nyr(2)+"yr running trend","","") + z->$("tas_global_avg_runtrend_"+rt_nyr(3)+"yr")$ = set_varAtts(tas_rt_mon(3,:),"tas global area-average "+rt_nyr(3)+"yr running trend","","") + z->$("tas_global_avg_runtrend_"+rt_nyr(4)+"yr")$ = set_varAtts(tas_rt_mon(4,:),"tas global area-average "+rt_nyr(4)+"yr running trend","","") + z->tas_trends_djf = set_varAtts(tas_trends_seas(0,:,:),"tas linear trends (DJF)","","") + z->tas_trends_mam = set_varAtts(tas_trends_seas(1,:,:),"tas linear trends (MAM)","","") + z->tas_trends_jja = set_varAtts(tas_trends_seas(2,:,:),"tas linear trends (JJA)","","") + z->tas_trends_son = set_varAtts(tas_trends_seas(3,:,:),"tas linear trends (SON)","","") + z->tas_trends_ann = set_varAtts(tas_trends_ann,"tas linear trends (annual)","","") + z->tas_trends_mon = set_varAtts(tas_trends_mon,"tas linear trends (monthly)","","") + delete(z) + delete([/tas_aa_seas2,tas_aa_ann2/]) + end if +;======================================================================== + res = True + res@mpProjection = "WinkelTripel" + res@mpGeophysicalLineColor = "gray42" + if (wks_type.eq."png") then + res@mpGeophysicalLineThicknessF = 2. + else + res@mpGeophysicalLineThicknessF = 1. + end if + res@mpPerimOn = False + res@mpGridLatSpacingF = 90 ; change latitude line spacing + res@mpGridLonSpacingF = 180. ; change longitude line spacing + res@mpGridLineColor = "transparent" ; trick ncl into drawing perimeter + res@mpGridAndLimbOn = True ; turn on lat/lon lines + res@mpFillOn = False + res@mpCenterLonF = 210. + res@mpOutlineOn = True + res@gsnDraw = False + res@gsnFrame = False + + res@cnLevelSelectionMode = "ExplicitLevels" + if (COLORMAP.eq.0) then + res@cnLevels = (/-8,-6,-5,-4,-3,-2,-1,-0.5,-0.25,0,0.25,0.5,1,2,3,4,5,6,8/) + end if + if (COLORMAP.eq.1) then + res@cnLevels = (/-6,-4,-3,-2,-1,-0.5,-0.25,0,0.25,0.5,1,2,3,4,6/) + end if + + res@cnLineLabelsOn = False + res@cnFillOn = True + res@cnLinesOn = False + res@lbLabelBarOn = False + + res@gsnLeftStringOrthogonalPosF = -0.05 + res@gsnLeftStringParallelPosF = .005 + res@gsnRightStringOrthogonalPosF = -0.05 + res@gsnRightStringParallelPosF = 0.96 + res@gsnRightString = "" + res@gsnLeftString = "" + res@gsnLeftStringFontHeightF = 0.014 + res@gsnCenterStringFontHeightF = 0.018 + res@gsnRightStringFontHeightF = 0.014 + res@gsnLeftString = syear(ee)+"-"+eyear(ee) + + res@gsnRightString = tas_trends_seas@units + res@gsnCenterString = names(ee) + map_djf(ee) = gsn_csm_contour_map(wks_trends_djf,tas_trends_seas(0,:,:),res) + map_mam(ee) = gsn_csm_contour_map(wks_trends_mam,tas_trends_seas(1,:,:),res) + map_jja(ee) = gsn_csm_contour_map(wks_trends_jja,tas_trends_seas(2,:,:),res) + map_son(ee) = gsn_csm_contour_map(wks_trends_son,tas_trends_seas(3,:,:),res) + map_ann(ee) = gsn_csm_contour_map(wks_trends_ann,tas_trends_ann,res) + map_mon(ee) = gsn_csm_contour_map(wks_trends_mon,tas_trends_mon,res) + + xyres = True + xyres@gsnDraw = False + xyres@gsnFrame = False + xyres@gsnYRefLine = 0.0 + xyres@gsnYRefLineColor = "gray42" + + if (wks_type.eq."png") then + xyres@xyLineThicknessF = 4. + else + xyres@xyLineThicknessF = 2. + end if + if (isfilepresent2("obs_trefht").and.ee.eq.0) then + xyres@xyLineColor = "black" + else + xyres@xyLineColor = "royalblue" + end if + xyres@tiYAxisString = "" + if (nsim.le.5) then + xyres@tmXBLabelFontHeightF = 0.0125 + xyres@tmYLLabelFontHeightF = 0.0125 + xyres@gsnLeftStringFontHeightF = 0.017 + xyres@gsnRightStringFontHeightF = 0.013 + else + xyres@tmXBLabelFontHeightF = 0.018 + xyres@tmYLLabelFontHeightF = 0.018 + xyres@gsnLeftStringFontHeightF = 0.024 + xyres@gsnRightStringFontHeightF = 0.020 + end if + xyres@gsnLeftStringOrthogonalPosF = 0.025 + xyres@gsnRightStringOrthogonalPosF = xyres@gsnLeftStringOrthogonalPosF + xyres@vpXF = 0.05 + xyres@vpHeightF = 0.15 + if (SCALE_TIMESERIES.eq."True") then + xyres@vpWidthF = 0.9*((nyr(ee)*1.)/nyr_max) + else + xyres@vpWidthF = 0.9 + end if + xyres@gsnLeftString = "" + xyres@gsnCenterString = "" + xyres@gsnRightString = "" + + xyres@trXMinF = syear(ee)-.5 + xyres@trXMaxF = eyear(ee)+0.5 + + xyres2 = xyres + xyres2@xyLineColor = "gray60" + xyres2@xyCurveDrawOrder = "PreDraw" + + xyres@gsnLeftString = names(ee) + tttt = dtrend_msg(ispan(0,dimsizes(tas_aa_seas&time)-1,1),tas_aa_seas(0,:),False,True) + if (isfilepresent2("obs_trefht").and.ee.ge.1) then + xyres@trYMinF = min((/min(tas_aa_seas(0,:)),min(tas_aa_seas_obs(0,:))/))-.01 + xyres@trYMaxF = max((/max(tas_aa_seas(0,:)),max(tas_aa_seas_obs(0,:))/))+.01 + end if + xyres@gsnRightString = decimalPlaces(tttt@slope*nyr(ee),2,True)+tas_trends_seas@units + xy_djf(ee) = gsn_csm_xy(wks_aa_djf,ispan(syear(ee),eyear(ee),1),tas_aa_seas(0,:),xyres) + if (isfilepresent2("obs_trefht").and.ee.ge.1) then + xy_obs_djf(ee) = gsn_csm_xy(wks_aa_djf,ispan(tas_aa_seas_obs@syear,tas_aa_seas_obs@eyear,1),tas_aa_seas_obs(0,:),xyres2) + overlay(xy_djf(ee),xy_obs_djf(ee)) + end if + + tttt = dtrend_msg(ispan(0,dimsizes(tas_aa_seas&time)-1,1),tas_aa_seas(1,:),False,True) + if (isfilepresent2("obs_trefht").and.ee.ge.1) then + xyres@trYMinF = min((/min(tas_aa_seas(1,:)),min(tas_aa_seas_obs(1,:))/))-.01 + xyres@trYMaxF = max((/max(tas_aa_seas(1,:)),max(tas_aa_seas_obs(1,:))/))+.01 + end if + xyres@gsnRightString = decimalPlaces(tttt@slope*nyr(ee),2,True)+tas_trends_seas@units + xy_mam(ee) = gsn_csm_xy(wks_aa_mam,ispan(syear(ee),eyear(ee),1),tas_aa_seas(1,:),xyres) + if (isfilepresent2("obs_trefht").and.ee.ge.1) then + xy_obs_mam(ee) = gsn_csm_xy(wks_aa_mam,ispan(tas_aa_seas_obs@syear,tas_aa_seas_obs@eyear,1),tas_aa_seas_obs(1,:),xyres2) + overlay(xy_mam(ee),xy_obs_mam(ee)) + end if + + tttt = dtrend_msg(ispan(0,dimsizes(tas_aa_seas&time)-1,1),tas_aa_seas(2,:),False,True) + if (isfilepresent2("obs_trefht").and.ee.ge.1) then + xyres@trYMinF = min((/min(tas_aa_seas(2,:)),min(tas_aa_seas_obs(2,:))/))-.01 + xyres@trYMaxF = max((/max(tas_aa_seas(2,:)),max(tas_aa_seas_obs(2,:))/))+.01 + end if + xyres@gsnRightString = decimalPlaces(tttt@slope*nyr(ee),2,True)+tas_trends_seas@units + xy_jja(ee) = gsn_csm_xy(wks_aa_jja,ispan(syear(ee),eyear(ee),1),tas_aa_seas(2,:),xyres) + if (isfilepresent2("obs_trefht").and.ee.ge.1) then + xy_obs_jja(ee) = gsn_csm_xy(wks_aa_jja,ispan(tas_aa_seas_obs@syear,tas_aa_seas_obs@eyear,1),tas_aa_seas_obs(2,:),xyres2) + overlay(xy_jja(ee),xy_obs_jja(ee)) + end if + + tttt = dtrend_msg(ispan(0,dimsizes(tas_aa_seas&time)-1,1),tas_aa_seas(3,:),False,True) + if (isfilepresent2("obs_trefht").and.ee.ge.1) then + xyres@trYMinF = min((/min(tas_aa_seas(3,:)),min(tas_aa_seas_obs(3,:))/))-.01 + xyres@trYMaxF = max((/max(tas_aa_seas(3,:)),max(tas_aa_seas_obs(3,:))/))+.01 + end if + xyres@gsnRightString = decimalPlaces(tttt@slope*nyr(ee),2,True)+tas_trends_seas@units + xy_son(ee) = gsn_csm_xy(wks_aa_son,ispan(syear(ee),eyear(ee),1),tas_aa_seas(3,:),xyres) + if (isfilepresent2("obs_trefht").and.ee.ge.1) then + xy_obs_son(ee) = gsn_csm_xy(wks_aa_son,ispan(tas_aa_seas_obs@syear,tas_aa_seas_obs@eyear,1),tas_aa_seas_obs(3,:),xyres2) + overlay(xy_son(ee),xy_obs_son(ee)) + end if + delete(tttt) + + + tttt = dtrend_msg(ispan(0,dimsizes(tas_aa_ann&time)-1,1),tas_aa_ann,False,True) + if (isfilepresent2("obs_trefht").and.ee.ge.1) then + xyres@trYMinF = min((/min(tas_aa_ann),min(tas_aa_ann_obs)/))-.01 + xyres@trYMaxF = max((/max(tas_aa_ann),max(tas_aa_ann_obs)/))+.01 + end if + xyres@gsnRightString = decimalPlaces(tttt@slope*nyr(ee),2,True)+tas_trends_ann@units + xy_ann(ee) = gsn_csm_xy(wks_aa_ann,ispan(syear(ee),eyear(ee),1),tas_aa_ann,xyres) + if (isfilepresent2("obs_trefht").and.ee.ge.1) then + xy_obs_ann(ee) = gsn_csm_xy(wks_aa_ann,ispan(tas_aa_seas_obs@syear,tas_aa_seas_obs@eyear,1),tas_aa_ann_obs,xyres2) + overlay(xy_ann(ee),xy_obs_ann(ee)) + delete(xyres@trYMinF) + delete(xyres@trYMaxF) + end if + delete(tttt) + + xyres@trXMaxF = eyear(ee)+1.5 + xyres2@trXMaxF = eyear(ee)+1.5 + tttt = dtrend_msg(ispan(0,dimsizes(tas_aa_mon&time)-1,1),tas_aa_mon,False,True) + if (isfilepresent2("obs_trefht").and.ee.ge.1) then + xyres@trYMinF = min((/min(tas_aa_mon),min(tas_aa_mon_obs)/))-.01 + xyres@trYMaxF = max((/max(tas_aa_mon),max(tas_aa_mon_obs)/))+.01 + end if + xyres@gsnRightString = decimalPlaces(tttt@slope*dimsizes(tas_aa_mon&time),2,True)+tas_trends_mon@units + xy_mon(ee) = gsn_csm_xy(wks_aa_mon,fspan(syear(ee),eyear(ee)+.91667,dimsizes(tas_aa_mon)),tas_aa_mon,xyres) + if (isfilepresent2("obs_trefht").and.ee.ge.1) then + xy_obs_mon(ee) = gsn_csm_xy(wks_aa_mon,fspan(tas_aa_seas_obs@syear,tas_aa_seas_obs@eyear+.91667,dimsizes(tas_aa_mon_obs)),tas_aa_mon_obs,xyres2) + overlay(xy_mon(ee),xy_obs_mon(ee)) + end if + + xyres@gsnRightString = "" + do ff = 0,4 + if (.not.all(ismissing(tas_rt_mon(ff,:)))) + xyres@gsnRightString = tas_rt_mon@units + xy_rt_mon(ff,ee) = gsn_csm_xy(wks_rt_mon,fspan(syear(ee),eyear(ee)+.91667,dimsizes(tas_aa_mon&time)),tas_rt_mon(ff,:),xyres) + end if + end do + + delete([/tas_trends_seas,tas_trends_ann,tas_trends_mon/]) + delete([/tas_aa_seas,tas_aa_mon,tas_aa_ann,xyres,xyres2,res,tttt,tas_rt_mon/]) + end do + if (isfilepresent2("obs_trefht")) then + delete([/tas_aa_seas_obs,tas_aa_mon_obs,tas_aa_ann_obs/]) + end if + + panres = True + panres@gsnMaximize = True + panres@gsnPaperOrientation = "portrait" + panres@gsnPanelLabelBar = True + panres@gsnPanelYWhiteSpacePercent = 3.0 + panres@pmLabelBarHeightF = 0.05 + panres@pmLabelBarWidthF = 0.65 + panres@lbTitleOn = False + panres@lbBoxLineColor = "gray70" + panres@lbLabelFontHeightF = 0.013 + if (nsim.le.4) then + if (nsim.eq.1) then + panres@txFontHeightF = 0.022 + panres@gsnPanelBottom = 0.50 + else + panres@txFontHeightF = 0.0145 + panres@gsnPanelBottom = 0.50 + end if + else + panres@txFontHeightF = 0.016 + panres@gsnPanelBottom = 0.05 + end if + panres@lbLabelStride = 1 + + panres@txString = "TAS Trends (DJF)" + ncol = floattointeger(sqrt(nsim)) + nrow = (nsim/ncol)+mod(nsim,ncol) + gsn_panel2(wks_trends_djf,map_djf,(/nrow,ncol/),panres) + delete(wks_trends_djf) + + panres@txString = "TAS Trends (MAM)" + gsn_panel2(wks_trends_mam,map_mam,(/nrow,ncol/),panres) + delete(wks_trends_mam) + + panres@txString = "TAS Trends (JJA)" + gsn_panel2(wks_trends_jja,map_jja,(/nrow,ncol/),panres) + delete(wks_trends_jja) + + panres@txString = "TAS Trends (SON)" + gsn_panel2(wks_trends_son,map_son,(/nrow,ncol/),panres) + delete(wks_trends_son) + + panres@txString = "TAS Trends (Annual)" + gsn_panel2(wks_trends_ann,map_ann,(/nrow,ncol/),panres) + delete(wks_trends_ann) + + panres@txString = "TAS Trends (Monthly)" + gsn_panel2(wks_trends_mon,map_mon,(/nrow,ncol/),panres) + delete(wks_trends_mon) + delete(panres) + + panres2 = True + panres2@gsnMaximize = True + panres2@gsnPaperOrientation = "portrait" + panres2@gsnPanelYWhiteSpacePercent = 3.0 + if (nsim.le.5) then + panres2@txFontHeightF = 0.024 + else + panres2@txFontHeightF = 0.016 + end if + if (SCALE_TIMESERIES.eq."True") then + tt = ind(nyr.eq.nyr_max) + panres2@gsnPanelScalePlotIndex = tt(0) + delete(tt) + end if + if (nsim.le.12) then + lp = (/nsim,1/) + else + lp = (/nrow,ncol/) ;(/nsim/2+1,nsim/8+1/) + end if + panres2@txString = "TAS Global Average (DJF)" + gsn_panel2(wks_aa_djf,xy_djf,lp,panres2) + delete(wks_aa_djf) + + panres2@txString = "TAS Global Average (MAM)" + gsn_panel2(wks_aa_mam,xy_mam,lp,panres2) + delete(wks_aa_mam) + + panres2@txString = "TAS Global Average (JJA)" + gsn_panel2(wks_aa_jja,xy_jja,lp,panres2) + delete(wks_aa_jja) + + panres2@txString = "TAS Global Average (SON)" + gsn_panel2(wks_aa_son,xy_son,lp,panres2) + delete(wks_aa_son) + + panres2@txString = "TAS Global Average (Annual)" + gsn_panel2(wks_aa_ann,xy_ann,lp,panres2) + delete(wks_aa_ann) + + panres2@txString = "TAS Global Average (Monthly)" + gsn_panel2(wks_aa_mon,xy_mon,lp,panres2) + delete(wks_aa_mon) + + panres2@txString = "TAS Running 8yr Trend (Monthly)" + gsn_panel2(wks_rt_mon,xy_rt_mon(0,:),lp,panres2) + + panres2@txString = "TAS Running 10yr Trend (Monthly)" + gsn_panel2(wks_rt_mon,xy_rt_mon(1,:),lp,panres2) + + panres2@txString = "TAS Running 12yr Trend (Monthly)" + gsn_panel2(wks_rt_mon,xy_rt_mon(2,:),lp,panres2) + + panres2@txString = "TAS Running 14yr Trend (Monthly)" + gsn_panel2(wks_rt_mon,xy_rt_mon(3,:),lp,panres2) + + panres2@txString = "TAS Running 16yr Trend (Monthly)" + gsn_panel2(wks_rt_mon,xy_rt_mon(4,:),lp,panres2) + delete(wks_rt_mon) + + delete([/nrow,ncol,lp,map_djf,map_mam,map_jja,map_son,map_ann,map_mon,xy_djf,xy_mam,xy_jja,xy_son,xy_ann,xy_mon/]) + delete([/xy_rt_mon/]) + delete(panres2) + if (isfilepresent2("obs_trefht")) then + delete([/xy_obs_djf,xy_obs_mam,xy_obs_jja,xy_obs_son,xy_obs_ann,xy_obs_mon/]) + end if + OUTDIR = getenv("OUTDIR") + if (wks_type.eq."png") then + do gg = 1,5 + if (isfilepresent2(OUTDIR+"tas.runtrend.mon.00000"+gg+".png")) then + system("mv "+OUTDIR+"tas.runtrend.mon.00000"+gg+".png "+OUTDIR+"tas."+rt_nyr(gg-1)+"yr_runtrend.mon.png") + end if + end do + else + if (isfilepresent2(OUTDIR+"tas.runtrend.mon.ps")) then + system("psplit "+OUTDIR+"tas.runtrend.mon.ps "+OUTDIR+"tas_rt") + do gg = 1,5 + if (isfilepresent2(OUTDIR+"tas_rt000"+gg+".ps")) then + system("mv "+OUTDIR+"tas_rt000"+gg+".ps "+OUTDIR+"tas."+rt_nyr(gg-1)+"yr_runtrend.mon.ps") + end if + end do + system("rm "+OUTDIR+"tas.runtrend.mon.ps") + end if + end if + delete(OUTDIR) + print("Finished: tas.trends_timeseries.ncl") +end diff --git a/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/webpage.ncl b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/webpage.ncl new file mode 100644 index 0000000000..d9a74333a5 --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp/ncl_scripts/webpage.ncl @@ -0,0 +1,604 @@ +; Create the index.html, methodology.html, and metrics.html web pages. +; +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_code.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/gsn_csm.ncl" +load "$NCARG_ROOT/lib/ncarg/nclscripts/csm/contributed.ncl" +load "$CVDP_SCRIPTS/functions.ncl" + +begin + print("Starting: webpage.ncl") + + OUTDIR = getenv("OUTDIR") + VERSION = getenv("VERSION") + OT = getenv("OUTPUT_DATA") + OPT_CLIMO = getenv("OPT_CLIMO") + CLIMO_SYEAR = getenv("CLIMO_SYEAR") + CLIMO_EYEAR = getenv("CLIMO_EYEAR") + OBS = getenv("OBS") + + quote = str_get_dq() + + if (OPT_CLIMO.eq."Full") then + subtxt = "Climatological Period Used: Full" + end if + if (OPT_CLIMO.eq."Custom") then + if (toint(CLIMO_SYEAR).lt.0) then + subtxt = "Climo. Period (relative to record end): "+CLIMO_SYEAR+"-"+CLIMO_EYEAR + else + subtxt = "Climatological Period Used: "+CLIMO_SYEAR+"-"+CLIMO_EYEAR + end if + end if + if (OBS.eq."True") then + namelist_txt = "Input Namelists: Observations | Models" + else + namelist_txt = "Input Namelist: Models" + end if + nsim_noobs = numAsciiRow("namelist") + na = asciiread("namelist",(/nsim_noobs/),"string") + names = new(nsim_noobs,"string") + delim = "|" + do gg = 0,nsim_noobs-1 + names(gg) = str_strip(str_get_field(na(gg),1,delim)) + end do +; print(names) + + txt = new(700,"string") + quote = str_get_dq() + + txt(0) = "Climate Variability Diagnostics Package" + txt(12) = "+quote+" + txt(13) = "" + txt(14) = "" + txt(17) = "" + txt(18) = "

Methodology and Definitions" + if (isfilepresent2(OUTDIR+"metrics.txt")) then + if (isfilepresent2(OUTDIR+"metrics.table_1.gif")) then + txt(15) = "
Metrics Tables: Pattern Correlations | RMS" + else + txt(15) = "
Metrics Table" + end if + end if + txt(16) = "
"+namelist_txt+"

"+webtitle+"

Derived
Namelists:
MOC | PR | PSL | SIC NH
SIC SH | SND | TAS | TS
"+subtxt+"
Created: "+systemfunc("date")+"
CVDP Version "+VERSION+"

" + delete([/subtxt,namelist_txt/]) + + txt(26) = "
" + txt(27) = "" + txt(28) = "" + txt(29) = "" + txt(30) = "" + txt(31) = "" + txt(32) = "" + txt(33) = "" + txt(34) = "" + txt(35) = "" + txt(36) = "
Means
SST"+table_link_setup(OUTDIR,"sst.mean.djf.png","DJF")+""+table_link_setup(OUTDIR,"sst.mean.mam.png","MAM")+""+table_link_setup(OUTDIR,"sst.mean.jja.png","JJA")+""+table_link_setup(OUTDIR,"sst.mean.son.png","SON")+""+table_link_setup(OUTDIR,"sst.mean.ann.png","Annual")+"
TAS"+table_link_setup(OUTDIR,"tas.mean.djf.png","DJF")+""+table_link_setup(OUTDIR,"tas.mean.mam.png","MAM")+""+table_link_setup(OUTDIR,"tas.mean.jja.png","JJA")+""+table_link_setup(OUTDIR,"tas.mean.son.png","SON")+""+table_link_setup(OUTDIR,"tas.mean.ann.png","Annual")+"
PSL"+table_link_setup(OUTDIR,"psl.mean.djf.png","DJF")+""+table_link_setup(OUTDIR,"psl.mean.mam.png","MAM")+""+table_link_setup(OUTDIR,"psl.mean.jja.png","JJA")+""+table_link_setup(OUTDIR,"psl.mean.son.png","SON")+""+table_link_setup(OUTDIR,"psl.mean.ann.png","Annual")+"
PR"+table_link_setup(OUTDIR,"pr.mean.djf.png","DJF")+""+table_link_setup(OUTDIR,"pr.mean.mam.png","MAM")+""+table_link_setup(OUTDIR,"pr.mean.jja.png","JJA")+""+table_link_setup(OUTDIR,"pr.mean.son.png","SON")+""+table_link_setup(OUTDIR,"pr.mean.ann.png","Annual")+"
[PR]"+table_link_setup(OUTDIR,"pr.za.djf.png","DJF")+""+table_link_setup(OUTDIR,"pr.za.mam.png","MAM")+""+table_link_setup(OUTDIR,"pr.za.jja.png","JJA")+""+table_link_setup(OUTDIR,"pr.za.son.png","SON")+""+table_link_setup(OUTDIR,"pr.za.ann.png","Annual")+"
SND"+table_link_setup(OUTDIR,"snd.mean.djf.png","DJF")+""+table_link_setup(OUTDIR,"snd.mean.mam.png","MAM")+""+table_link_setup(OUTDIR,"snd.mean.jja.png","JJA")+""+table_link_setup(OUTDIR,"snd.mean.son.png","SON")+""+table_link_setup(OUTDIR,"snd.mean.ann.png","Annual")+"
SIC NH"+table_link_setup(OUTDIR,"aice.mean.nh.djf.png","DJF")+""+table_link_setup(OUTDIR,"aice.mean.nh.mam.png","MAM")+""+table_link_setup(OUTDIR,"aice.mean.nh.jja.png","JJA")+""+table_link_setup(OUTDIR,"aice.mean.nh.son.png","SON")+""+table_link_setup(OUTDIR,"aice.mean.nh.ann.png","Annual")+"
SIC SH"+table_link_setup(OUTDIR,"aice.mean.sh.djf.png","DJF")+""+table_link_setup(OUTDIR,"aice.mean.sh.mam.png","MAM")+""+table_link_setup(OUTDIR,"aice.mean.sh.jja.png","JJA")+""+table_link_setup(OUTDIR,"aice.mean.sh.son.png","SON")+""+table_link_setup(OUTDIR,"aice.mean.sh.ann.png","Annual")+"
" + + txt(39) = "" + txt(40) = "
" + txt(41) = "" + txt(42) = "" + txt(43) = "" + txt(44) = "" + txt(45) = "" + txt(46) = "" + txt(47) = "" + txt(48) = "" + txt(49) = "
Standard Deviations
SST"+table_link_setup(OUTDIR,"sst.stddev.djf.png","DJF")+""+table_link_setup(OUTDIR,"sst.stddev.mam.png","MAM")+""+table_link_setup(OUTDIR,"sst.stddev.jja.png","JJA")+""+table_link_setup(OUTDIR,"sst.stddev.son.png","SON")+""+table_link_setup(OUTDIR,"sst.stddev.ann.png","Annual")+"
TAS"+table_link_setup(OUTDIR,"tas.stddev.djf.png","DJF")+""+table_link_setup(OUTDIR,"tas.stddev.mam.png","MAM")+""+table_link_setup(OUTDIR,"tas.stddev.jja.png","JJA")+""+table_link_setup(OUTDIR,"tas.stddev.son.png","SON")+""+table_link_setup(OUTDIR,"tas.stddev.ann.png","Annual")+"
PSL"+table_link_setup(OUTDIR,"psl.stddev.djf.png","DJF")+""+table_link_setup(OUTDIR,"psl.stddev.mam.png","MAM")+""+table_link_setup(OUTDIR,"psl.stddev.jja.png","JJA")+""+table_link_setup(OUTDIR,"psl.stddev.son.png","SON")+""+table_link_setup(OUTDIR,"psl.stddev.ann.png","Annual")+"
PR"+table_link_setup(OUTDIR,"pr.stddev.djf.png","DJF")+""+table_link_setup(OUTDIR,"pr.stddev.mam.png","MAM")+""+table_link_setup(OUTDIR,"pr.stddev.jja.png","JJA")+""+table_link_setup(OUTDIR,"pr.stddev.son.png","SON")+""+table_link_setup(OUTDIR,"pr.stddev.ann.png","Annual")+"
SND"+table_link_setup(OUTDIR,"snd.stddev.djf.png","DJF")+""+table_link_setup(OUTDIR,"snd.stddev.mam.png","MAM")+""+table_link_setup(OUTDIR,"snd.stddev.jja.png","JJA")+""+table_link_setup(OUTDIR,"snd.stddev.son.png","SON")+""+table_link_setup(OUTDIR,"snd.stddev.ann.png","Annual")+"
SIC NH"+table_link_setup(OUTDIR,"aice.stddev.nh.djf.png","DJF")+""+table_link_setup(OUTDIR,"aice.stddev.nh.mam.png","MAM")+""+table_link_setup(OUTDIR,"aice.stddev.nh.jja.png","JJA")+""+table_link_setup(OUTDIR,"aice.stddev.nh.son.png","SON")+""+table_link_setup(OUTDIR,"aice.stddev.nh.ann.png","Annual")+"
SIC SH"+table_link_setup(OUTDIR,"aice.stddev.sh.djf.png","DJF")+""+table_link_setup(OUTDIR,"aice.stddev.sh.mam.png","MAM")+""+table_link_setup(OUTDIR,"aice.stddev.sh.jja.png","JJA")+""+table_link_setup(OUTDIR,"aice.stddev.sh.son.png","SON")+""+table_link_setup(OUTDIR,"aice.stddev.sh.ann.png","Annual")+"
" + + txt(69) = "" + txt(70) = "
" + txt(71) = "" + txt(72) = "" + txt(73) = "" + txt(74) = "" + txt(75) = "" + txt(76) = "" + txt(77) = "" + txt(78) = "" + + txt(90) = "" + txt(91) = "" + txt(92) = "" + txt(93) = "" + txt(94) = "" + txt(95) = "" + txt(96) = "" + txt(97) = "" + + txt(101) = "" + txt(102) = "" + txt(103) = "" + txt(104) = "" + txt(105) = "" + txt(106) = "" + txt(107) = "" + txt(108) = "" + txt(109) = "" + txt(110) = "" + txt(111) = "" + txt(112) = "" + txt(113) = "" + txt(114) = "
Coupled Modes of Variability
ENSOSpatial CompositesSST/TAS/PSL
"+table_link_setup(OUTDIR,"nino34.spatialcomp.jja0.png","JJA0")+" "+table_link_setup(OUTDIR,"nino34.spatialcomp.son0.png","SON0")+"
"+table_link_setup(OUTDIR,"nino34.spatialcomp.djf1.png","DJF+1")+" "+table_link_setup(OUTDIR,"nino34.spatialcomp.mam1.png","MAM+1")+"
PR
"+table_link_setup(OUTDIR,"nino34.spatialcomp.pr.jja0.png","JJA0")+" "+table_link_setup(OUTDIR,"nino34.spatialcomp.pr.son0.png","SON0")+"
"+table_link_setup(OUTDIR,"nino34.spatialcomp.pr.djf1.png","DJF+1")+" "+table_link_setup(OUTDIR,"nino34.spatialcomp.pr.mam1.png","MAM+1")+"
"+table_link_setup(OUTDIR,"nino34.hov.elnino.png","El Niño Hovmöller")+""+table_link_setup(OUTDIR,"nino34.hov.lanina.png","La Niña Hovmöller")+"
Niño3.4"+table_link_setup(OUTDIR,"nino34.timeseries.png","Timeseries")+""+table_link_setup(OUTDIR,"nino34.powspec.png","Power Spectra")+"
"+table_link_setup(OUTDIR,"nino34.monstddev.png","Monthly Std. Dev.")+""+table_link_setup(OUTDIR,"nino34.runstddev.png","Running Std. Dev.")+"
"+table_link_setup(OUTDIR,"nino34.autocor.png","Autocorrelation")+""+table_link_setup(OUTDIR,"nino34.wavelet.png","Wavelet Analysis")+"
PDORegr:  "+table_link_setup(OUTDIR,"pdo.png","SST")+" "+table_link_setup(OUTDIR,"pdo.tasreg.png","TAS")+" "+table_link_setup(OUTDIR,"pdo.prreg.png","PR")+""+table_link_setup(OUTDIR,"pdo.timeseries.png","Timeseries")+""+table_link_setup(OUTDIR,"pdo.powspec.png","Power Spectra")+"
IPORegr:  "+table_link_setup(OUTDIR,"ipo.png","SST")+" "+table_link_setup(OUTDIR,"ipo.tasreg.png","TAS")+" "+table_link_setup(OUTDIR,"ipo.prreg.png","PR")+""+table_link_setup(OUTDIR,"ipo.timeseries.png","Timeseries")+""+table_link_setup(OUTDIR,"ipo.powspec.png","Power Spectra")+"
AMORegr:  "+table_link_setup(OUTDIR,"amo.png","SST")+" "+table_link_setup(OUTDIR,"amo.tasreg.png","TAS")+" "+table_link_setup(OUTDIR,"amo.prreg.png","PR")+""+table_link_setup(OUTDIR,"amo.timeseries.png","Timeseries")+""+table_link_setup(OUTDIR,"amo.powspec.png","Power Spectra")+"
Regr LP:  "+table_link_setup(OUTDIR,"amo.lp.png","SST")+" "+table_link_setup(OUTDIR,"amo.lp.tasreg.png","TAS")+" "+table_link_setup(OUTDIR,"amo.lp.prreg.png","PR")+"
AMOC"+table_link_setup(OUTDIR,"amoc.mean.ann.png","Means")+""+table_link_setup(OUTDIR,"amoc.stddev.ann.png","Standard Deviations")+""+table_link_setup(OUTDIR,"amoc.ann.png","Patterns")+"
"+table_link_setup(OUTDIR,"amoc.timeseries.ann.png","Timeseries")+""+table_link_setup(OUTDIR,"amoc.sstreg.ann.png","SST Regressions")+""+table_link_setup(OUTDIR,"amoc.tasreg.ann.png","TAS Regressions")+"
"+table_link_setup(OUTDIR,"amoc.powspec.ann.png","Spectra")+""+table_link_setup(OUTDIR,"amoc_amo.leadlag.ann.png","AMO/AMOC Lag Correlations")+"
" + + + + txt(130) = "
" + txt(131) = "" + txt(132) = "" + txt(133) = "" + txt(134) = "" + txt(135) = "" + txt(136) = "" + txt(137) = "" + txt(138) = "" + txt(139) = "" + txt(140) = "" + txt(141) = "" + txt(142) = "" + txt(143) = "" + + txt(159) = "" + txt(160) = "" + txt(161) = "" + txt(162) = "" + txt(163) = "" + txt(164) = "" + txt(165) = "" + txt(166) = "" + txt(167) = "" + txt(168) = "" + txt(169) = "" + txt(170) = "" + txt(171) = "" + txt(172) = "" + + txt(189) = "" + txt(190) = "" + txt(191) = "" + txt(192) = "" + txt(193) = "" + txt(194) = "" + txt(195) = "" + txt(196) = "" + txt(197) = "" + txt(198) = "" + txt(199) = "" + txt(200) = "" + txt(201) = "" + txt(202) = "" + + txt(219) = "" + txt(220) = "" + txt(221) = "" + txt(222) = "" + txt(223) = "" + txt(224) = "" + txt(225) = "" + txt(226) = "" + txt(227) = "" + txt(228) = "" + txt(229) = "" + txt(230) = "" + txt(231) = "" + txt(232) = "" + + txt(249) = "" + txt(250) = "" + txt(251) = "" + txt(252) = "" + txt(253) = "" + txt(254) = "" + txt(255) = "" + txt(256) = "" + txt(257) = "" + txt(258) = "" + txt(259) = "" + txt(260) = "" + txt(261) = "" + txt(262) = "" + + txt(279) = "" + txt(280) = "" + txt(281) = "" + txt(282) = "" + txt(283) = "" + txt(284) = "" + txt(285) = "" + txt(286) = "" + txt(287) = "" + txt(288) = "" + txt(289) = "" + txt(290) = "" + txt(291) = "" + txt(292) = "" + + txt(309) = "" + txt(310) = "" + txt(311) = "" + txt(312) = "" + txt(313) = "" + txt(314) = "" + txt(315) = "" + txt(316) = "" + txt(317) = "" + txt(318) = "" + txt(319) = "" + txt(320) = "" + txt(321) = "" + txt(322) = "" + txt(323) = "
Atmospheric Modes of Variability
NAMPatterns"+table_link_setup(OUTDIR,"nam.djf.png","DJF")+""+table_link_setup(OUTDIR,"nam.mam.png","MAM")+""+table_link_setup(OUTDIR,"nam.jja.png","JJA")+""+table_link_setup(OUTDIR,"nam.son.png","SON")+""+table_link_setup(OUTDIR,"nam.ann.png","Annual")+""+table_link_setup(OUTDIR,"nam.mon.png","Monthly")+"
Timeseries"+table_link_setup(OUTDIR,"nam.timeseries.djf.png","DJF")+""+table_link_setup(OUTDIR,"nam.timeseries.mam.png","MAM")+""+table_link_setup(OUTDIR,"nam.timeseries.jja.png","JJA")+""+table_link_setup(OUTDIR,"nam.timeseries.son.png","SON")+""+table_link_setup(OUTDIR,"nam.timeseries.ann.png","Annual")+""+table_link_setup(OUTDIR,"nam.timeseries.mon.png","Monthly")+"
SST/TAS Regressions"+table_link_setup(OUTDIR,"nam.tempreg.djf.png","DJF")+""+table_link_setup(OUTDIR,"nam.tempreg.mam.png","MAM")+""+table_link_setup(OUTDIR,"nam.tempreg.jja.png","JJA")+""+table_link_setup(OUTDIR,"nam.tempreg.son.png","SON")+""+table_link_setup(OUTDIR,"nam.tempreg.ann.png","Annual")+""+table_link_setup(OUTDIR,"nam.tempreg.mon.png","Monthly")+"
PR Regressions"+table_link_setup(OUTDIR,"nam.prreg.djf.png","DJF")+""+table_link_setup(OUTDIR,"nam.prreg.mam.png","MAM")+""+table_link_setup(OUTDIR,"nam.prreg.jja.png","JJA")+""+table_link_setup(OUTDIR,"nam.prreg.son.png","SON")+""+table_link_setup(OUTDIR,"nam.prreg.ann.png","Annual")+""+table_link_setup(OUTDIR,"nam.prreg.mon.png","Monthly")+"
SAMPatterns"+table_link_setup(OUTDIR,"sam.djf.png","DJF")+""+table_link_setup(OUTDIR,"sam.mam.png","MAM")+""+table_link_setup(OUTDIR,"sam.jja.png","JJA")+""+table_link_setup(OUTDIR,"sam.son.png","SON")+""+table_link_setup(OUTDIR,"sam.ann.png","Annual")+""+table_link_setup(OUTDIR,"sam.mon.png","Monthly")+"
Timeseries"+table_link_setup(OUTDIR,"sam.timeseries.djf.png","DJF")+""+table_link_setup(OUTDIR,"sam.timeseries.mam.png","MAM")+""+table_link_setup(OUTDIR,"sam.timeseries.jja.png","JJA")+""+table_link_setup(OUTDIR,"sam.timeseries.son.png","SON")+""+table_link_setup(OUTDIR,"sam.timeseries.ann.png","Annual")+""+table_link_setup(OUTDIR,"sam.timeseries.mon.png","Monthly")+"
SST/TAS Regressions"+table_link_setup(OUTDIR,"sam.tempreg.djf.png","DJF")+""+table_link_setup(OUTDIR,"sam.tempreg.mam.png","MAM")+""+table_link_setup(OUTDIR,"sam.tempreg.jja.png","JJA")+""+table_link_setup(OUTDIR,"sam.tempreg.son.png","SON")+""+table_link_setup(OUTDIR,"sam.tempreg.ann.png","Annual")+""+table_link_setup(OUTDIR,"sam.tempreg.mon.png","Monthly")+"
PR Regressions"+table_link_setup(OUTDIR,"sam.prreg.djf.png","DJF")+""+table_link_setup(OUTDIR,"sam.prreg.mam.png","MAM")+""+table_link_setup(OUTDIR,"sam.prreg.jja.png","JJA")+""+table_link_setup(OUTDIR,"sam.prreg.son.png","SON")+""+table_link_setup(OUTDIR,"sam.prreg.ann.png","Annual")+""+table_link_setup(OUTDIR,"sam.prreg.mon.png","Monthly")+"
NAOPatterns"+table_link_setup(OUTDIR,"nao.djf.png","DJF")+""+table_link_setup(OUTDIR,"nao.mam.png","MAM")+""+table_link_setup(OUTDIR,"nao.jja.png","JJA")+""+table_link_setup(OUTDIR,"nao.son.png","SON")+""+table_link_setup(OUTDIR,"nao.ann.png","Annual")+""+table_link_setup(OUTDIR,"nao.mon.png","Monthly")+"
Timeseries"+table_link_setup(OUTDIR,"nao.timeseries.djf.png","DJF")+""+table_link_setup(OUTDIR,"nao.timeseries.mam.png","MAM")+""+table_link_setup(OUTDIR,"nao.timeseries.jja.png","JJA")+""+table_link_setup(OUTDIR,"nao.timeseries.son.png","SON")+""+table_link_setup(OUTDIR,"nao.timeseries.ann.png","Annual")+""+table_link_setup(OUTDIR,"nao.timeseries.mon.png","Monthly")+"
SST/TAS Regressions"+table_link_setup(OUTDIR,"nao.tempreg.djf.png","DJF")+""+table_link_setup(OUTDIR,"nao.tempreg.mam.png","MAM")+""+table_link_setup(OUTDIR,"nao.tempreg.jja.png","JJA")+""+table_link_setup(OUTDIR,"nao.tempreg.son.png","SON")+""+table_link_setup(OUTDIR,"nao.tempreg.ann.png","Annual")+""+table_link_setup(OUTDIR,"nao.tempreg.mon.png","Monthly")+"
PR Regressions"+table_link_setup(OUTDIR,"nao.prreg.djf.png","DJF")+""+table_link_setup(OUTDIR,"nao.prreg.mam.png","MAM")+""+table_link_setup(OUTDIR,"nao.prreg.jja.png","JJA")+""+table_link_setup(OUTDIR,"nao.prreg.son.png","SON")+""+table_link_setup(OUTDIR,"nao.prreg.ann.png","Annual")+""+table_link_setup(OUTDIR,"nao.prreg.mon.png","Monthly")+"
PNAPatterns"+table_link_setup(OUTDIR,"pna.djf.png","DJF")+""+table_link_setup(OUTDIR,"pna.mam.png","MAM")+""+table_link_setup(OUTDIR,"pna.jja.png","JJA")+""+table_link_setup(OUTDIR,"pna.son.png","SON")+""+table_link_setup(OUTDIR,"pna.ann.png","Annual")+""+table_link_setup(OUTDIR,"pna.mon.png","Monthly")+"
Timeseries"+table_link_setup(OUTDIR,"pna.timeseries.djf.png","DJF")+""+table_link_setup(OUTDIR,"pna.timeseries.mam.png","MAM")+""+table_link_setup(OUTDIR,"pna.timeseries.jja.png","JJA")+""+table_link_setup(OUTDIR,"pna.timeseries.son.png","SON")+""+table_link_setup(OUTDIR,"pna.timeseries.ann.png","Annual")+""+table_link_setup(OUTDIR,"pna.timeseries.mon.png","Monthly")+"
SST/TAS Regressions"+table_link_setup(OUTDIR,"pna.tempreg.djf.png","DJF")+""+table_link_setup(OUTDIR,"pna.tempreg.mam.png","MAM")+""+table_link_setup(OUTDIR,"pna.tempreg.jja.png","JJA")+""+table_link_setup(OUTDIR,"pna.tempreg.son.png","SON")+""+table_link_setup(OUTDIR,"pna.tempreg.ann.png","Annual")+""+table_link_setup(OUTDIR,"pna.tempreg.mon.png","Monthly")+"
PR Regressions"+table_link_setup(OUTDIR,"pna.prreg.djf.png","DJF")+""+table_link_setup(OUTDIR,"pna.prreg.mam.png","MAM")+""+table_link_setup(OUTDIR,"pna.prreg.jja.png","JJA")+""+table_link_setup(OUTDIR,"pna.prreg.son.png","SON")+""+table_link_setup(OUTDIR,"pna.prreg.ann.png","Annual")+""+table_link_setup(OUTDIR,"pna.prreg.mon.png","Monthly")+"
NPOPatterns"+table_link_setup(OUTDIR,"npo.djf.png","DJF")+""+table_link_setup(OUTDIR,"npo.mam.png","MAM")+""+table_link_setup(OUTDIR,"npo.jja.png","JJA")+""+table_link_setup(OUTDIR,"npo.son.png","SON")+""+table_link_setup(OUTDIR,"npo.ann.png","Annual")+""+table_link_setup(OUTDIR,"npo.mon.png","Monthly")+"
Timeseries"+table_link_setup(OUTDIR,"npo.timeseries.djf.png","DJF")+""+table_link_setup(OUTDIR,"npo.timeseries.mam.png","MAM")+""+table_link_setup(OUTDIR,"npo.timeseries.jja.png","JJA")+""+table_link_setup(OUTDIR,"npo.timeseries.son.png","SON")+""+table_link_setup(OUTDIR,"npo.timeseries.ann.png","Annual")+""+table_link_setup(OUTDIR,"npo.timeseries.mon.png","Monthly")+"
SST/TAS Regressions"+table_link_setup(OUTDIR,"npo.tempreg.djf.png","DJF")+""+table_link_setup(OUTDIR,"npo.tempreg.mam.png","MAM")+""+table_link_setup(OUTDIR,"npo.tempreg.jja.png","JJA")+""+table_link_setup(OUTDIR,"npo.tempreg.son.png","SON")+""+table_link_setup(OUTDIR,"npo.tempreg.ann.png","Annual")+""+table_link_setup(OUTDIR,"npo.tempreg.mon.png","Monthly")+"
PR Regressions"+table_link_setup(OUTDIR,"npo.prreg.djf.png","DJF")+""+table_link_setup(OUTDIR,"npo.prreg.mam.png","MAM")+""+table_link_setup(OUTDIR,"npo.prreg.jja.png","JJA")+""+table_link_setup(OUTDIR,"npo.prreg.son.png","SON")+""+table_link_setup(OUTDIR,"npo.prreg.ann.png","Annual")+""+table_link_setup(OUTDIR,"npo.prreg.mon.png","Monthly")+"
PSA1Patterns"+table_link_setup(OUTDIR,"psa1.djf.png","DJF")+""+table_link_setup(OUTDIR,"psa1.mam.png","MAM")+""+table_link_setup(OUTDIR,"psa1.jja.png","JJA")+""+table_link_setup(OUTDIR,"psa1.son.png","SON")+""+table_link_setup(OUTDIR,"psa1.ann.png","Annual")+""+table_link_setup(OUTDIR,"psa1.mon.png","Monthly")+"
Timeseries"+table_link_setup(OUTDIR,"psa1.timeseries.djf.png","DJF")+""+table_link_setup(OUTDIR,"psa1.timeseries.mam.png","MAM")+""+table_link_setup(OUTDIR,"psa1.timeseries.jja.png","JJA")+""+table_link_setup(OUTDIR,"psa1.timeseries.son.png","SON")+""+table_link_setup(OUTDIR,"psa1.timeseries.ann.png","Annual")+""+table_link_setup(OUTDIR,"psa1.timeseries.mon.png","Monthly")+"
SST/TAS Regressions"+table_link_setup(OUTDIR,"psa1.tempreg.djf.png","DJF")+""+table_link_setup(OUTDIR,"psa1.tempreg.mam.png","MAM")+""+table_link_setup(OUTDIR,"psa1.tempreg.jja.png","JJA")+""+table_link_setup(OUTDIR,"psa1.tempreg.son.png","SON")+""+table_link_setup(OUTDIR,"psa1.tempreg.ann.png","Annual")+""+table_link_setup(OUTDIR,"psa1.tempreg.mon.png","Monthly")+"
PR Regressions"+table_link_setup(OUTDIR,"psa1.prreg.djf.png","DJF")+""+table_link_setup(OUTDIR,"psa1.prreg.mam.png","MAM")+""+table_link_setup(OUTDIR,"psa1.prreg.jja.png","JJA")+""+table_link_setup(OUTDIR,"psa1.prreg.son.png","SON")+""+table_link_setup(OUTDIR,"psa1.prreg.ann.png","Annual")+""+table_link_setup(OUTDIR,"psa1.prreg.mon.png","Monthly")+"
PSA2Patterns"+table_link_setup(OUTDIR,"psa2.djf.png","DJF")+""+table_link_setup(OUTDIR,"psa2.mam.png","MAM")+""+table_link_setup(OUTDIR,"psa2.jja.png","JJA")+""+table_link_setup(OUTDIR,"psa2.son.png","SON")+""+table_link_setup(OUTDIR,"psa2.ann.png","Annual")+""+table_link_setup(OUTDIR,"psa2.mon.png","Monthly")+"
Timeseries"+table_link_setup(OUTDIR,"psa2.timeseries.djf.png","DJF")+""+table_link_setup(OUTDIR,"psa2.timeseries.mam.png","MAM")+""+table_link_setup(OUTDIR,"psa2.timeseries.jja.png","JJA")+""+table_link_setup(OUTDIR,"psa2.timeseries.son.png","SON")+""+table_link_setup(OUTDIR,"psa2.timeseries.ann.png","Annual")+""+table_link_setup(OUTDIR,"psa2.timeseries.mon.png","Monthly")+"
SST/TAS Regressions"+table_link_setup(OUTDIR,"psa2.tempreg.djf.png","DJF")+""+table_link_setup(OUTDIR,"psa2.tempreg.mam.png","MAM")+""+table_link_setup(OUTDIR,"psa2.tempreg.jja.png","JJA")+""+table_link_setup(OUTDIR,"psa2.tempreg.son.png","SON")+""+table_link_setup(OUTDIR,"psa2.tempreg.ann.png","Annual")+""+table_link_setup(OUTDIR,"psa2.tempreg.mon.png","Monthly")+"
PR Regressions"+table_link_setup(OUTDIR,"psa2.prreg.djf.png","DJF")+""+table_link_setup(OUTDIR,"psa2.prreg.mam.png","MAM")+""+table_link_setup(OUTDIR,"psa2.prreg.jja.png","JJA")+""+table_link_setup(OUTDIR,"psa2.prreg.son.png","SON")+""+table_link_setup(OUTDIR,"psa2.prreg.ann.png","Annual")+""+table_link_setup(OUTDIR,"psa2.prreg.mon.png","Monthly")+"
" + + txt(330) = "" + txt(331) = "
" + txt(332) = "" + txt(333) = "" + txt(334) = "" + txt(335) = "" + txt(336) = "" + txt(337) = "" + txt(338) = "" + txt(339) = "" + txt(341) = "" + txt(342) = "" + txt(343) = "" + txt(344) = "" + txt(345) = "" + txt(346) = "" + txt(347) = "" + txt(348) = "" + txt(349) = "" + txt(351) = "" + txt(352) = "
Ice Extent Timeseries
SIC NH"+table_link_setup(OUTDIR,"aice.extent.nh.djf.png","DJF")+""+table_link_setup(OUTDIR,"aice.extent.nh.mam.png","MAM")+""+table_link_setup(OUTDIR,"aice.extent.nh.jja.png","JJA")+""+table_link_setup(OUTDIR,"aice.extent.nh.son.png","SON")+""+table_link_setup(OUTDIR,"aice.extent.nh.ann.png","Annual")+""+table_link_setup(OUTDIR,"aice.extent.nh.mon.png","Monthly")+"
"+table_link_setup(OUTDIR,"aice.extent.nh.feb.png","Feb")+"  "+table_link_setup(OUTDIR,"aice.extent.nh.mar.png","Mar")+"  "+table_link_setup(OUTDIR,"aice.extent.nh.sep.png","Sep")+""+table_link_setup(OUTDIR,"aice.extent.anom.nh.mon.png","Monthly Anomalies")+""+table_link_setup(OUTDIR,"aice.extent.nh.climo.png","Climatology")+"
SIC SH"+table_link_setup(OUTDIR,"aice.extent.sh.djf.png","DJF")+""+table_link_setup(OUTDIR,"aice.extent.sh.mam.png","MAM")+""+table_link_setup(OUTDIR,"aice.extent.sh.jja.png","JJA")+""+table_link_setup(OUTDIR,"aice.extent.sh.son.png","SON")+""+table_link_setup(OUTDIR,"aice.extent.sh.ann.png","Annual")+""+table_link_setup(OUTDIR,"aice.extent.sh.mon.png","Monthly")+"
"+table_link_setup(OUTDIR,"aice.extent.sh.feb.png","Feb")+"  "+table_link_setup(OUTDIR,"aice.extent.sh.mar.png","Mar")+"  "+table_link_setup(OUTDIR,"aice.extent.sh.sep.png","Sep")+""+table_link_setup(OUTDIR,"aice.extent.anom.sh.mon.png","Monthly Anomalies")+""+table_link_setup(OUTDIR,"aice.extent.sh.climo.png","Climatology")+"
" + + + txt(361) = "" + txt(362) = "
" + txt(363) = "" + txt(364) = "" + txt(365) = "" + txt(366) = "" + txt(367) = "" + txt(368) = "" + txt(369) = "" + txt(370) = "" + txt(371) = "" + txt(372) = "" + txt(373) = "" + txt(374) = "" + txt(375) = "" + txt(376) = "" + txt(377) = "" + txt(378) = "" + txt(379) = "" + txt(380) = "" + txt(381) = "" + txt(382) = "" + txt(383) = "" + txt(384) = "" + txt(385) = "" + txt(386) = "" + txt(387) = "" + txt(388) = "" + txt(389) = "" + txt(390) = "" + txt(391) = "" + txt(392) = "" + txt(393) = "" + txt(394) = "" + txt(395) = "" + txt(396) = "" + txt(397) = "" + txt(398) = "" + txt(399) = "" + txt(400) = "" + txt(401) = "" + txt(402) = "" + txt(403) = "" + txt(404) = "" + txt(405) = "
Global Trend Maps
SST"+table_link_setup(OUTDIR,"sst.trends.djf.png","DJF")+""+table_link_setup(OUTDIR,"sst.trends.mam.png","MAM")+""+table_link_setup(OUTDIR,"sst.trends.jja.png","JJA")+""+table_link_setup(OUTDIR,"sst.trends.son.png","SON")+""+table_link_setup(OUTDIR,"sst.trends.ann.png","Annual")+""+table_link_setup(OUTDIR,"sst.trends.mon.png","Monthly")+"
TAS"+table_link_setup(OUTDIR,"tas.trends.djf.png","DJF")+""+table_link_setup(OUTDIR,"tas.trends.mam.png","MAM")+""+table_link_setup(OUTDIR,"tas.trends.jja.png","JJA")+""+table_link_setup(OUTDIR,"tas.trends.son.png","SON")+""+table_link_setup(OUTDIR,"tas.trends.ann.png","Annual")+""+table_link_setup(OUTDIR,"tas.trends.mon.png","Monthly")+"
PSL"+table_link_setup(OUTDIR,"psl.trends.djf.png","DJF")+""+table_link_setup(OUTDIR,"psl.trends.mam.png","MAM")+""+table_link_setup(OUTDIR,"psl.trends.jja.png","JJA")+""+table_link_setup(OUTDIR,"psl.trends.son.png","SON")+""+table_link_setup(OUTDIR,"psl.trends.ann.png","Annual")+""+table_link_setup(OUTDIR,"psl.trends.mon.png","Monthly")+"
PR"+table_link_setup(OUTDIR,"pr.trends.djf.png","DJF")+""+table_link_setup(OUTDIR,"pr.trends.mam.png","MAM")+""+table_link_setup(OUTDIR,"pr.trends.jja.png","JJA")+""+table_link_setup(OUTDIR,"pr.trends.son.png","SON")+""+table_link_setup(OUTDIR,"pr.trends.ann.png","Annual")+""+table_link_setup(OUTDIR,"pr.trends.mon.png","Monthly")+"
SND"+table_link_setup(OUTDIR,"snd.trends.djf.png","DJF")+""+table_link_setup(OUTDIR,"snd.trends.mam.png","MAM")+""+table_link_setup(OUTDIR,"snd.trends.jja.png","JJA")+""+table_link_setup(OUTDIR,"snd.trends.son.png","SON")+""+table_link_setup(OUTDIR,"snd.trends.ann.png","Annual")+""+table_link_setup(OUTDIR,"snd.trends.mon.png","Monthly")+"
SIC NH"+table_link_setup(OUTDIR,"aice.trends.nh.djf.png","DJF")+""+table_link_setup(OUTDIR,"aice.trends.nh.mam.png","MAM")+""+table_link_setup(OUTDIR,"aice.trends.nh.jja.png","JJA")+""+table_link_setup(OUTDIR,"aice.trends.nh.son.png","SON")+""+table_link_setup(OUTDIR,"aice.trends.nh.ann.png","Annual")+""+table_link_setup(OUTDIR,"aice.trends.nh.mon.png","Monthly")+"
SIC SH"+table_link_setup(OUTDIR,"aice.trends.sh.djf.png","DJF")+""+table_link_setup(OUTDIR,"aice.trends.sh.mam.png","MAM")+""+table_link_setup(OUTDIR,"aice.trends.sh.jja.png","JJA")+""+table_link_setup(OUTDIR,"aice.trends.sh.son.png","SON")+""+table_link_setup(OUTDIR,"aice.trends.sh.ann.png","Annual")+""+table_link_setup(OUTDIR,"aice.trends.sh.mon.png","Monthly")+"
" + + txt(429) = "" + txt(430) = "
" + txt(431) = "" + txt(432) = "" + txt(433) = "" + txt(434) = "" + txt(435) = "" + txt(436) = "" + txt(437) = "" + txt(438) = "" + txt(439) = "" + txt(440) = "" + txt(441) = "" + txt(442) = "" + txt(443) = "" + txt(444) = "" + txt(445) = "" + txt(446) = "" + txt(447) = "" + txt(448) = "" + txt(449) = "" + txt(450) = "
Globally-Averaged Timeseries
SST"+table_link_setup(OUTDIR,"sst.timeseries.djf.png","DJF")+""+table_link_setup(OUTDIR,"sst.timeseries.mam.png","MAM")+""+table_link_setup(OUTDIR,"sst.timeseries.jja.png","JJA")+""+table_link_setup(OUTDIR,"sst.timeseries.son.png","SON")+""+table_link_setup(OUTDIR,"sst.timeseries.ann.png","Annual")+""+table_link_setup(OUTDIR,"sst.timeseries.mon.png","Monthly")+"
TAS"+table_link_setup(OUTDIR,"tas.timeseries.djf.png","DJF")+""+table_link_setup(OUTDIR,"tas.timeseries.mam.png","MAM")+""+table_link_setup(OUTDIR,"tas.timeseries.jja.png","JJA")+""+table_link_setup(OUTDIR,"tas.timeseries.son.png","SON")+""+table_link_setup(OUTDIR,"tas.timeseries.ann.png","Annual")+""+table_link_setup(OUTDIR,"tas.timeseries.mon.png","Monthly")+"
PR"+table_link_setup(OUTDIR,"pr.timeseries.djf.png","DJF")+""+table_link_setup(OUTDIR,"pr.timeseries.mam.png","MAM")+""+table_link_setup(OUTDIR,"pr.timeseries.jja.png","JJA")+""+table_link_setup(OUTDIR,"pr.timeseries.son.png","SON")+""+table_link_setup(OUTDIR,"pr.timeseries.ann.png","Annual")+""+table_link_setup(OUTDIR,"pr.timeseries.mon.png","Monthly")+"
" + + txt(574) = "" + txt(575) = "
" + txt(576) = "" + txt(577) = "" + txt(578) = "" + txt(579) = "" + txt(580) = "" + txt(581) = "" + txt(582) = "" + txt(583) = "" + txt(584) = "" + txt(585) = "" + txt(586) = "" + txt(587) = "
Running Trends of Globally-Averaged Monthly Timeseries
SST"+table_link_setup(OUTDIR,"sst.8yr_runtrend.mon.png","8yr")+""+table_link_setup(OUTDIR,"sst.10yr_runtrend.mon.png","10yr")+""+table_link_setup(OUTDIR,"sst.12yr_runtrend.mon.png","12yr")+""+table_link_setup(OUTDIR,"sst.14yr_runtrend.mon.png","14yr")+""+table_link_setup(OUTDIR,"sst.16yr_runtrend.mon.png","16yr")+"
TAS"+table_link_setup(OUTDIR,"tas.8yr_runtrend.mon.png","8yr")+""+table_link_setup(OUTDIR,"tas.10yr_runtrend.mon.png","10yr")+""+table_link_setup(OUTDIR,"tas.12yr_runtrend.mon.png","12yr")+""+table_link_setup(OUTDIR,"tas.14yr_runtrend.mon.png","14yr")+""+table_link_setup(OUTDIR,"tas.16yr_runtrend.mon.png","16yr")+"
" + + txt(609) = "" + txt(610) = "
" + txt(611) = "" + txt(612) = "" + txt(613) = "" + txt(614) = "" + txt(615) = "" + txt(616) = "" + txt(617) = "" + txt(618) = "" + txt(619) = "" + txt(620) = "" + txt(621) = "" + txt(622) = "" + txt(623) = "
Additional Indices
"+table_link_setup(OUTDIR,"amm.timeseries.png","Atlantic Meridional Mode")+""+table_link_setup(OUTDIR,"atl3.timeseries.png","Atlantic Niño (ATL3)")+""+table_link_setup(OUTDIR,"tna.timeseries.png","Tropical North Atlantic SST")+""+table_link_setup(OUTDIR,"tsa.timeseries.png","Tropical South Atlantic SST")+"
"+table_link_setup(OUTDIR,"nino12.timeseries.png","niño1+2")+""+table_link_setup(OUTDIR,"nino3.timeseries.png","niño3")+""+table_link_setup(OUTDIR,"nino4.timeseries.png","niño4")+""+table_link_setup(OUTDIR,"npi.timeseries.ndjfm.png","North Pacific PSL")+"
"+table_link_setup(OUTDIR,"iod.timeseries.png","Indian Ocean SST Dipole")+""+table_link_setup(OUTDIR,"tio.timeseries.png","Tropical Indian Ocean SST")+""+table_link_setup(OUTDIR,"socn.timeseries.png","Southern Ocean SST")+"
" + +; if (OT.eq."True") then +; txt(265) = "

Output timeseries written to "+OUTDIR+"

" +; else + txt(635) = "
" +; end if + txt(636) = "" + + tt = ind(.not.ismissing(txt)) + txt2 = txt(:tt(dimsizes(tt)-1)+1) + tt2 = ind(.not.ismissing(txt2)) + txt3 = txt2(tt2) + asciiwrite(OUTDIR+"index.html",txt3) + delete([/txt,tt,tt2,txt2,txt3,quote,names,na,nsim_noobs,OT/]) +;---------------------------------------------------------------------------- +;-- Create calculation description webpage +;---------------------------------------------------------------------------- + txt = new(200,"string") + quote = str_get_dq() + + txt(0) = "Climate Variability Diagnostics Package" + txt(11) = "+quote+" + txt(12) = "" + txt(13) = "
Back to Diagnostics Plots

Methodology and Definitions


" + + txt(15) = "

General Notes

" + txt(16) = "" + txt(17) = "
  • TS is surface ("+quote+"skin"+quote+") temperature and is used in lieu of sea surface temperatures (SSTs), TAS is 2m air temperature (CESM equivalent = TREFHT)." + txt(18) = "
  • PR is total precipitation (CESM equivalent = PRECC+PRECL), PSL is sea level pressure, SND is snow depth (SNOWDP), and SIC is ice concentration (aice)." + txt(19) = "
  • The annual cycle is removed prior to every calculation by subtracting the long-term monthly means. Exception: The annual cycle is not removed for mean spatial maps." + txt(20) = "
  • Area-averages are always based on cosine of latitude weighting." + txt(21) = "
  • The following calculations use linearly detrended data: standard deviations, nino3.4 spectra, and ENSO spatial composites." + txt(22) = "
  • For visual clarity, the Y-axis may differ amongst individual panels on a particular plot." + txt(23) = "
  • Climatological Zonal Averages: Climatological means are zonally averaged over the globe." + txt(24) = "
  • Power Spectra: The best-fit first-order Markov red noise spectrum (red curve) and its 95% (blue curve) and 99% (green curve) confidence bounds are shown on each panel. Top X-axis shows the period (in years), and the bottom X-axis shows the frequency (cycles/mo). If calculated, the observational spectrum is overlaid in gray on each model spectrum. The spectra are displayed in variance-conserving form." + txt(25) = "
  • Wavelet Analysis: “A wavelet transform is computed using a Morlet wavelet with a wavenumber of 6. Areas significant at the 95% based on a chi-square test are stippled and the “cone of influence” is hatched. See Torrence, C. and G. P. Compo, 1998: A Practical Guide to Wavelet Analysis. Bull. Amer. Meteor. Soc., 79, 61-78. doi: http://dx.doi.org/10.1175/1520-0477(1998)079<0061:APGTWA>2.0.CO;2." + txt(26) = "
  • Running Trends: N-year running trends are computed by calculating the linear trend over the N-year interval beginning at each successive timestep. For instance, for a global timeseries that runs from 1970-2012, the 8yr running trend value for January 1970 is the linear trend during January 1970 - December 1977, and the value for January 2005 is the linear trend during January 2005 - December 2012." + txt(27) = "
  • Running Standard Deviations: 30-year running standard deviations are computed by calculating the standard deviation centered over the 30 year interval at each successive timestep, with a minimum of 35 years of data required. For example, for a timeseries that runs from 1920-2010, the 30yr running standard deviation value for January 1935 is calculated over the period January 1920-December 1949. The right subtitle shows the minimum / mean / maximum of the computed timeseries." + txt(28) = "
  • Metrics Tables: Area-weighted pattern correlations and rms differences are calculated between observations and each model simulation (regridded to match the observational grid) for 11 climate metrics. The Total Score column shows the average of the 11 pattern correlations (Z-transformed) and rms differences. The following domains are used to compute the pattern correlations and rms differences: Means, standard deviations, ENSO, AMO, and PDO: Global; global for means, standard deviations, ENSO, AMO, and PDO; entire longitude/temporal range shown for El Nino and La Nina Hovmöllers; entire domain shown for NAM (20:90°N) and SAM (20:90°S)." + txt(29) = "
  • EOF significance: If an eigenvalue is significantly seperated from neighboring values a star is appended to the percent variance explained on the plot. Significance is calculated following North et al. (MWR, 1982)." + txt(30) = "
  • Ice Extent: Any grid cell defined as having a value at or above 15% is assumed to be 100% ice covered. The area of these grid cells is summed to create ice extent. For data sets with a northern hemisphere pole hole the CVDP looks for an attribute named pole_hole_area that is attached to the AREA variable in the netCDF file. The format of pole_hole_area should be start month (YYYYMM), end month (YYYYMM), area value, (repeat as necessary). If pole_hole_area is detected the CVDP will add the area value to the calculated northern hemisphere ice extent timeseries from the specified start month to the specified end month, and a * is placed after the dataset name in the ice extent plots." + txt(31) = "
  • For more information on observational datasets and climate indices, see the Climate Data Guide.
" + txt(32) = "
" + + txt(40) = "

Modes of Variability

" + txt(41) = "" + txt(42) = "" + txt(43) = "" + txt(44) = "" + txt(45) = "" + + txt(46) = "" + txt(47) = "" + txt(48) = "" + txt(49) = "" + + txt(50) = "" + txt(51) = "" + txt(52) = "" + txt(53) = "" + + txt(55) = "" + txt(56) = "" + txt(57) = "" + txt(58) = "" + txt(59) = "" + txt(60) = "" + txt(61) = "" + txt(62) = "" + txt(63) = "" + txt(64) = "" + txt(65) = "" + txt(66) = "" + txt(67) = "" + txt(68) = "" + txt(69) = "
ENSO Spatial CompositesThe normalized December nino3.4 timeseries is used to composite all years greater than 1 standard deviation (El Niño) and all years less that -1 standard deviation (La Niña). The number of El Niño/La Niña events composited is shown in the right subtitle. The December nino3.4 timeseries is based on the December values of the monthly nino3.4 time series smoothed with a 3-point binomial filter. TS/TAS/PSL composites: Temperatures are color shaded and in units of Celsius. Sea level pressure is contoured from -16 to 16hPa by 2hPa; negative contours are dashed. PR composites: Precipitation is color shaded and is in units of mm/day. See Deser, C., A. S. Phillips, R. A. Tomas, Y. Okumura, M. A. Alexander, A. Capotondi, J. D. Scott, Y. -O. Kwon, and M. Ohba, 2012: ENSO and Pacific Decadal Variability in Community Climate System Model Version 4. J. Climate, 25, 2622-2651, doi: 10.1175/JCLI-D-11-00301.1.
ENSO HovmöllersA 1-2-1 running average is applied to the monthly nino3.4 timeseries, and then December values are selected and normalized. Meridional averages are calculated by averaging from 3°S:3°N, and spatial patterns are formed by compositing -1yr->+2yrs where the nino3.4 (1-2-1 weighted) December index is greater than 1 (El Niño) and those years where the index is less than -1 (La Niña). See Deser, C., A. S. Phillips, R. A. Tomas, Y. Okumura, M. A. Alexander, A. Capotondi, J. D. Scott, Y. -O. Kwon, and M. Ohba, 2012: ENSO and Pacific Decadal Variability in Community Climate System Model Version 4. J. Climate, 25, 2622-2651, doi: 10.1175/JCLI-D-11-00301.1.
PDO (Pacific Decadal Oscillation)Monthly index timeseries defined as the leading principal component (PC) of North Pacific (20:70°N, 110°E:100°W) area-weighted SST* anomalies, where SST* denotes that the global mean SST anomaly has been removed at each timestep. Pattern created by regressing SST anomalies (in Celsius) at each grid box onto the normalized PC timeseries. Low pass-filtered timeseries (black curve) is based on a a 61-month running mean. See Deser, C., M. A. Alexander, S. -P. Xie, and A. S. Phillips, 2010: Sea surface temperature variability: patterns and mechanisms. Ann. Rev. Mar. Sci., 2010.2, 115-143, doi:10.1146/annurev-marine-120408-151453. Also see Mantua, N. J., S. R. Hare, Y. Zhang, J. M. Wallace, and R. Francis, 1997: A Pacific interdecadal climate oscillation with impacts on salmon production. Bull. Amer. Met. Soc., 1069-1079. For more information on the PDO see the Climate Data Guide.
IPO (Interdecadal Pacific Oscillation)Monthly index timeseries defined as the leading principal component (PC) of 13yr low pass filtered Pacific (40°S:60°N, 110°E:70°W) area-weighted SST* anomalies, where SST* denotes that the global mean SST anomaly has been removed at each timestep. Pattern created by regressing SST anomalies (in Celsius) at each grid box onto the normalized PC timeseries. At least 40 years of data are required for the IPO to be calculated. See Meehl, G.A. and A. Hu, 2007: Megadroughts in the Indian Monsoon Region and Southwest North America and a Mechanism for Associated Multidecadal Pacific Sea Surface Temperature Anomalies, J. Clim, 19, 1605-1623, doi: 10.1175/JCLI3675.1.
AMO (Atlantic Multidecadal Oscillation)Monthly index timeseries defined as area-weighted SST* anomalies averaged over the North Atlantic (0:60°N, 80°W:0°E), where SST* denotes that the global (60°S:60°N) mean SST anomaly has been removed at each timestep. Pattern created by regressing SST* anomalies onto the index timeseries and smoothing with a 9-point spatial filter. Low pass-filtered timeseries (black curve superimposed on the monthly timeseries) is based on a a 61-month running mean. Based on Trenberth, K. E., and D. J. Shea, 2006: Atlantic hurricanes and natural variability in 2005, Geophys. Res. Lett., 33, L12704, doi:10.1029/2006GL026894. Low-pass filtered regression maps ("+quote+"Regr LP"+quote+") use a 10-year running mean on both the index timeseries and the field being regressed. For more information on the AMO see the Climate Data Guide.
AMOCThe Atlantic Meridional Overturning Circulation (AMOC) is defined as the oceanic meridional mass transport (Sv) in the Atlantic sector. To compute AMOC, we follow the methods of Danabasoglu et al. (2012). Here we use annual averages of the AMOC, weighted by the cosine of the latitude and vertical extent of each model layer. Areas in which AMOC variance is low (standard deviation < 1e-6 Sv) are set to missing values for clarity. The leading EOF and associated principal component (PC) timeseries are computed over the Atlantic basin from 33°S to 90°N. The AMOC patterns are created by regressing the AMOC anomalies (in Sv) onto the normalized PC timeseries. The SST/TAS patterns associated with AMOC variations are created by regressing TAS/SST anomalies (in Celsius) at each grid box over the globe onto the normalized AMOC PC timeseries. A 15-point low-pass Lanczos filter is applied to the AMOC PC (and AMO) timeseries prior to computing lead/lag correlations, with a minimum of 90 years of data required. The data are not detrended (unlike Danabasoglu et al., 2012). See Danabasoglu, G., S. G. Yeager, Y. -O. Kwon, J. J. Tribbia, A. S. Phillips, and J. W. Hurrell, 2012. Variability of the Atlantic Meridional Overturning Circulation in CCSM4. J. Climate, 25, 5153-5172, doi: 10.1175/JCLI-D-11-00463.1.

For CCSM4 and CESM1, the MOC variable is read in, the Eulerian Mean, Eddy-Induced and Submeso components are summed, and the Atlantic Ocean + Mediterranean Sea + Labrador Sea + GIN Sea + Arctic Ocean + Hudson Bay transport region is selected. For CCSM2 and CCSM3 the same transport region is selected but only the Eulerian Mean component is used as that is all that is available. For CMIP5 data the msftmyz variable is read in and the atlantic_arctic_ocean basin is used. For CMIP3 data, the stfmmc variable is read in and the atlantic_ocean geo_region is used.
NAM (Northern Annular Mode)Seasonal/annual PSL averages are formed, square root of the cosine of the latitude weighting is applied, and then the leading EOF and associated principal component (PC) timeseries are computed over 20:90°N, 0:360°E. Pattern created by regressing global PSL anomalies (in hPa) onto normalized PC timeseries. Based on Hurrell, J. W., and C. Deser, 2009: North Atlantic climate variability: The role of the North Atlantic Oscillation. J. Mar. Syst., 78, 28-41, doi:10.1016/j.jmarsys.2008.11.026. Also see Thompson, D. W. J., and J. M. Wallace, 2000: Annular modes in the extratropical circulation. Part I: Month-to-month variability. J. Climate, 13, 1000-1016.
NAO (North Atlantic Oscillation)Seasonal/annual PSL averages are formed, square root of the cosine of latitude weighting is applied, and then the leading EOF and associated principal component (PC) timeseries are computed over 20:80°N, 90°W:40°E. Pattern created by regressing global PSL anomalies (in hPa) onto normalized PC timeseries. Based on Hurrell, J. W. and C. Deser, 2009: North Atlantic climate variability: The role of the North Atlantic Oscillation. J. Mar. Syst., 78, 28-41, doi:10.1016/j.jmarsys.2008.11.026. For more information on the NAO see the Climate Data Guide.
SAM/PSA1/PSA2 (Southern Annular Mode, Pacific South American Patterns 1/2)Seasonal/annual PSL averages are formed, square root of the cosine of latitude weighting is applied, and then the 1st (SAM), 2nd (PSA1) and 3rd (PSA2) EOFs and associated principal component (PC) timeseries are computed over 20:90°S, 0:360°E. Patterns created by regressing global PSL anomalies (in hPa) onto normalized PC timeseries. SAM calculation based on Thompson, D. W. J. and J.M. Wallace, 2000: Annular modes in the extratropical circulation. Part I: Month-to-month variability. J. Climate, 13, 1000-1016.
PNA/NPO (Pacific North American Pattern, North Pacific Oscillation)Seasonal/annual PSL averages are formed, the square root of the cosine of the latitude weighting is applied, and then the 1st (PNA) and 2nd (NPO) EOFs and associated principal component (PC) timeseries are computed over 20:85°N, 120°E:120°W. Patterns created by regressing global PSL anomalies (in hPa) onto normalized PC timeseries.
SST RegressionsSST anomalies (in Celsius) at each grid box are regressed upon the normalized atmospheric mode timeseries.
TAS RegressionsTAS anomalies (in Celsius) at each grid box are regressed upon the normalized atmospheric mode timeseries.
PR RegressionsPR anomalies (in mm/day) at each grid box are regressed upon the normalized atmospheric mode timeseries.
" + + txt(80) = "

Climate Indices

" + txt(81) = "" + txt(82) = "" + txt(83) = "" + txt(84) = "" + txt(85) = "" + txt(86) = "" + txt(87) = "" + txt(88) = "" + txt(89) = "" + txt(90) = "" + txt(91) = "" + txt(92) = "" + txt(93) = "" + txt(94) = "" + txt(95) = "" + txt(96) = "" + txt(97) = "" + txt(98) = "" + txt(99) = "" + + txt(100) = "" + txt(101) = "" + txt(102) = "" + txt(103) = "" + txt(104)= "" + txt(105)= "" + txt(106) = "
Atlantic Meridional ModeDefined as the difference between area-averaged SST anomalies computed over 5:15°N, 20:50°W and area-averaged SST anomalies computed over 5:15°S, 20°W:10°E. Red/blue shading denotes positive/negative departures from the best-fit linear trend line. See Doi, T., T. Tozuka and T. Yamagata (2009), Interannual variability of the Guinea Dome and its possible link with the Atlantic Meridional Mode. Climate Dynamics, 33, 985-998, doi:10.1007/s00382-009-0574-z.
Atlantic Niño (ATL3)Area-averaged SST anomalies computed over 3°S:3°N, 20°W:0°E. Red/blue shading denotes positive/negative departures from the best-fit linear trend line. See Zebiak, S. E., (1993): Air–sea interaction in the equatorial Atlantic region. Journal of Climate, 6, 1567–1586.
Tropical North Atlantic SSTArea-averaged SST anomalies computed over 5.5:23.5°N, 15:57.5°W. Red/blue shading denotes positive/negative departures from the best-fit linear trend line. See Enfield, D.B., A.M. Mestas, D.A. Mayer, and L. Cid-Serrano (1999), How ubiquitous is the dipole relationship in tropical Atlantic sea surface temperatures?, JGR-O, 104, 7841-7848.
Tropical South Atlantic SSTArea-averaged SST anomalies computed over 0:20°S, 30°W:10°E. Red/blue shading denotes positive/negative departures from the best-fit linear trend line. See Enfield, D.B., A.M. Mestas, D.A. Mayer, and L. Cid-Serrano (1999), How ubiquitous is the dipole relationship in tropical Atlantic sea surface temperatures?, JGR-O, 104, 7841-7848.
niño1+2Area-averaged SST anomalies computed over 0:10°S, 80:90°W. Red/blue shading denotes positive/negative departures from the best-fit linear trend line.
niño3Area-averaged SST anomalies computed over 5°S:5°N, 90:150°W. Red/blue shading denotes positive/negative departures from the best-fit linear trend line. See Trenberth, K. E. (1997) The Definition of El Niño. Bulletin of the American Meteorological Society, 78, 2771-2777.
niño3.4Area-averaged SST anomalies computed over 5°S:5°N, 120:170°W. Red/blue shading denotes positive/negative departures from the best-fit linear trend line. See Trenberth, K. E. (1997) The Definition of El Niño. Bulletin of the American Meteorological Society, 78, 2771-2777.
niño4Area-averaged SST anomalies computed over 5°S:5°N, 160°E:150°W. Red/blue shading denotes positive/negative departures from the best-fit linear trend line.
NPI (North Pacific PSL Index)Winter (December-March) average PSL anomalies (in hPa) area-averaged over 30°:65°N, 160°E:140°W. Based on Trenberth, K. E. and J. W. Hurrell, 1994: Decadal atmosphere-ocean variations in the Pacific, Climate Dynamics, 9, 303-319.
Indian Ocean SST DipoleDefined as the difference between area-averaged SST anomalies computed over 10°S:10°N, 50:70°E and area-averaged SST anomalies computed over 0:10°S, 90:110°E. Red/blue shading denotes positive/negative departures from the best-fit linear trend line. See: Saji N.H., Goswami B.N., Vinayachandran P.N., Yamagata T., 1999: A dipole mode in the tropical Indian Ocean, Nature, 401, 360-363.
Tropical Indian Ocean SSTArea-averaged SST anomalies computed over 15°S:15°N, 40:110°E. Red/blue shading denotes positive/negative departures from the best-fit linear trend line.
Southern Ocean SSTArea-averaged SST anomalies computed over 50°:70°S, 0:360°E. Red/blue shading denotes positive/negative departures from the best-fit linear trend line.
" + txt(107) = "" + + txt(112) = "
Created "+systemfunc("date") + txt(113) = "

CVDP Version "+VERSION+"

" + txt(114) = "" + + tt = ind(.not.ismissing(txt)) + txt2 = txt(:tt(dimsizes(tt)-1)+1) + txt2 = where(ismissing(txt2),"",txt2) + asciiwrite(OUTDIR+"methodology.html",txt2) + delete([/tt,txt,txt2/]) +;---------------------------------------------------------------------------- +;-- Create metrics webpage +;---------------------------------------------------------------------------- + if (isfilepresent2(OUTDIR+"metrics.txt")) then + txt = new(500,"string") + quote = str_get_dq() + + txt(0) = "Climate Variability Diagnostics Package" + txt(11) = "+quote+" + txt(12)= "" + txt(13) = "" + txt(14) = "

Back to Diagnostics Plots

"+webtitle+"


Metrics Table

" + + z = asciiread(OUTDIR+"metrics.txt",(/-1/),"string") + nlines = dimsizes(z) + txt(24) = "
"
+     do gg = 0,nlines-1
+        txt(25+gg) = z(gg)
+     end do
+     txt(25+nlines) = "
" + txt(25+nlines+1) = "

Observations Used

" + txt(25+nlines+2) = "Created "+systemfunc("date") + txt(25+nlines+3) = "

CVDP Version "+VERSION+"

" + + tt = ind(.not.ismissing(txt)) + txt2 = txt(:tt(dimsizes(tt)-1)+1) + txt2 = where(ismissing(txt2),"",txt2) + asciiwrite(OUTDIR+"metrics.html",txt2) + delete([/txt,txt2,tt/]) + end if + if (isfilepresent2(OUTDIR+"metrics.table_1.gif")) then ; create sorted metrics table pages + txt = new(500,"string") + quote = str_get_dq() + + txt(0) = "Climate Variability Diagnostics Package" + txt(12) = "+quote+" + txt(13)= "" + + + sort_txt = (/"Namelist (default) | ","Namelist (Alphabetically) | ","ENSO TAS | ","ENSO PSL | ", \ + "El Niño Hovmöller
","La Niña Hovmöller | ","AMO | ","PDO | ", \ + "NAM | ","SAM | ","SST std dev | ","PSL std dev | ", \ + "PR std dev | ","Mean Score"/) + sort_txt2 = (/"Namelist (default)","Namelist (Alphabetically)","ENSO TAS","ENSO PSL","El Niño Hovmöller","La Niña Hovmöller","AMO","PDO","NAM","SAM","SST std dev","PSL std dev","PR std dev","Mean Score"/) + + sort_txtA = (/"Namelist (default) | ","Namelist (Alphabetically) | ","ENSO TAS | ","ENSO PSL | ", \ + "El Niño Hovmöller
","La Niña Hovmöller | ","AMO | ","PDO | ", \ + "NAM | ","SAM | ","SST std dev | ","PSL std dev | ", \ + "PR std dev | ","Mean Score"/) + + strarr = new(14,string) + strarr = " | " + strarr(13) = " " + do gg = 0,13 + txt(14) = "" + txt(15) = "

Go to:

Methodology and Definitions
Diagnostics Plots
RMS Metric Tables

"+webtitle+"


" + sort_txtT = sort_txt + sort_txtT(gg) = ""+sort_txt2(gg)+""+strarr(gg) + txt(24) = "

Pattern Correlation Metrics Tables

" + txt(25) = "
Sort By:"+str_concat(sort_txtT)+"
" + txt(26) = "" + txt(27) = "

Observations Used

" + txt(28) = "Created "+systemfunc("date") + txt(29) = "

CVDP Version "+VERSION+"

" + tt = ind(.not.ismissing(txt)) + txt2 = txt(:tt(dimsizes(tt)-1)+1) + txt2 = where(ismissing(txt2),"",txt2) + asciiwrite(OUTDIR+"metrics.table_"+gg+".html",txt2) + delete([/txt2,tt,sort_txtT/]) + end do + + + do gg = 0,13 + txt(14) = "

Go to:

Methodology and Definitions
Diagnostics Plots
Pattern Correlation Metric Tables" + txt(15) = "

"+webtitle+"


" + sort_txtT = sort_txtA + sort_txtT(gg) = ""+sort_txt2(gg)+""+strarr(gg) + txt(24) = "

RMS Metrics Tables

" + txt(25) = "
Sort By:"+str_concat(sort_txtT)+"
" + txt(26) = "" + txt(27) = "

Observations Used

" + txt(28) = "Created "+systemfunc("date") + txt(29) = "

CVDP Version "+VERSION+"

" + tt = ind(.not.ismissing(txt)) + txt2 = txt(:tt(dimsizes(tt)-1)+1) + txt2 = where(ismissing(txt2),"",txt2) + asciiwrite(OUTDIR+"metrics.table_"+(gg+14)+".html",txt2) + delete([/txt2,tt/]) + end do + delete([/txt,strarr/]) + end if + delete(OUTDIR) + print("Finished: webpage.ncl") +end + diff --git a/esmvaltool/diag_scripts/cvdp/cvdp_wrapper.py b/esmvaltool/diag_scripts/cvdp/cvdp_wrapper.py new file mode 100644 index 0000000000..b09c6859d1 --- /dev/null +++ b/esmvaltool/diag_scripts/cvdp/cvdp_wrapper.py @@ -0,0 +1,317 @@ +"""wrapper diagnostic for the NCAR CVDP (p)ackage.""" +import logging +import os +import re +import shutil +import subprocess + +from esmvaltool._task import DiagnosticError +from esmvaltool.diag_scripts.shared import (group_metadata, run_diagnostic, + select_metadata) +from esmvaltool.diag_scripts.shared import ProvenanceLogger + +logger = logging.getLogger(os.path.basename(__file__)) + + +def setup_driver(cfg): + """Write the driver.ncl file of the cvdp package.""" + cvdp_root = os.path.join(os.path.dirname(__file__), 'cvdp') + if not os.path.isdir(cvdp_root): + raise DiagnosticError("CVDP is not available.") + + settings = { + 'outdir': "{0}/".format(cfg['work_dir']), + 'obs': 'False', + 'zp': os.path.join(cvdp_root, "ncl_scripts/"), + 'run_style': 'serial', + 'webpage_title': 'CVDP run via ESMValTool' + } + settings['output_data'] = "True" if _nco_available() else "False" + + def _update_settings(line): + + for key, value in settings.items(): + pattern = r'\s*{0}\s*=.*\n'.format(key) + search_results = re.findall(pattern, line) + if search_results == []: + continue + return re.sub( + r'".+?"', '"{0}"'.format(value), search_results[0], count=1) + + return line + + content = [] + driver = os.path.join(cvdp_root, "driver.ncl") + + with open(driver, 'r') as driver_file: + for line in driver_file: + content.append(_update_settings(line)) + + new_driver = os.path.join(cfg['run_dir'], "driver.ncl") + + with open(new_driver, 'w') as new_driver_file: + new_driver_file.write("".join(content)) + + +def create_link(cfg, inpath): + """Create link for the input file. + + The link matches the naming convention of the cvdp package. + Returns the path to the link. + + cfg: configuration dict + inpath: path to infile + """ + + def _create_link_name(inpath): + tail = os.path.split(inpath)[1] + search_result = re.search(r'[0-9]{4}-[0-9]{4}', tail).group(0) + return tail.replace(search_result, + "{0}01-{1}12".format(*search_result.split('-'))) + + if not os.path.isfile(inpath): + raise DiagnosticError("Path {0} does not exist".format(inpath)) + + lnk_dir = cfg['lnk_dir'] + + if not os.path.isdir(lnk_dir): + os.mkdir(lnk_dir) + + link = os.path.join(lnk_dir, _create_link_name(inpath)) + if not os.path.exists(link): + os.symlink(inpath, link) + + return link + + +def setup_namelist(cfg): + """Set the namelist file of the cvdp package.""" + input_data = cfg['input_data'].values() + selection = select_metadata(input_data, project='CMIP5') + grouped_selection = group_metadata(selection, 'dataset') + + content = [] + for key, attributes in grouped_selection.items(): + for item in attributes: + create_link(cfg, item["filename"]) + ppath = "{0}/".format(cfg['lnk_dir']) + content.append("{0} | {1} | {2} | {3}\n".format( + key, ppath, attributes[0]["start_year"], + attributes[0]["end_year"])) + + namelist = os.path.join(cfg['run_dir'], "namelist") + + with open(namelist, 'w') as namelist_file: + namelist_file.write("\n".join(content)) + + +def log_functions(func): + """Decorater to check functions.""" + + def inner(): + """Inner function.""" + ret = func() + logger.debug("Function %s returns %s", func.__name__, str(ret)) + return ret + + return inner + + +@log_functions +def _nco_available(): + """Check if nco is available.""" + try: + if shutil.which("ncks") is None: + ret = False + else: + ret = True + except OSError: + ret = False + return ret + + +def _is_png(path): + exclude = ['cas-cvdp.png'] + filename = os.path.basename(path) + return filename.endswith('.png') and filename not in exclude + + +def _get_caption(filename): + caption = [] + stat = _get_stat(filename) + if stat is not None and stat != "other": + caption.append(stat) + season = _get_season(filename) + if season is not None: + caption.append(season) + long_name = _get_long_name(filename) + if long_name is not None: + caption.append(long_name) + mode = _get_mode(filename) + if mode is not None: + caption.append(mode) + return " ".join(caption) + + +def _get_plot_type(filename): + plot_type = { + 'timeseries': "times", + 'mean': "other", + 'stddev': "other", + 'trends': "other", + 'eight_yr_runtrend': "other", + 'sixteen_yr_runtrend': "other", + 'fourteen_yr_runtrend': "other", + 'twelve_yr_runtrend': "other", + 'ten_yr_runtrend': "other", + 'powspec': "other", + 'reg': "other", + 'hov': "other", + 'monstddev': "other", + 'runstddev': "other", + 'za': "zonal", + } + ans = _get_info(filename, plot_type) + return ans if ans is not None else 'other' + + +def _get_stat(filename): + stat = { + 'timeseries': "other", + 'mean': "mean", + 'stddev': "stddev", + 'trends': "trend", + 'eight_yr_runtrend': "trend", + 'sixteen_yr_runtrend': "trend", + 'fourteen_yr_runtrend': "trend", + 'twelve_yr_runtrend': "trend", + 'ten_yr_runtrend': "trend", + 'powspec': "spectrum", + 'reg': "other", + 'hov': "other", + 'monstddev': "stddev", + 'runstddev': "stddev", + 'za': "mean", + } + ans = _get_info(filename, stat) + return ans if ans is not None else 'other' + + +def _get_season(filename): + season = { + 'ann': "Annual", + 'djf': "DJF", + 'mam': "MAM", + 'jja': "JJA", + 'son': "SON", + } + return _get_info(filename, season) + + +def _get_long_name(filename): + variable = { + 'pr': "Precipitation", + 'tas': "Surface temperature", + 'psl': "Sea level pressure", + 'sst': "Sea surface temperature", + } + return _get_info(filename, variable) + + +def _get_mode(filename): + mode = { + 'iod': "iod", + 'ipo': "ipo", + 'nam': "nam", + 'nao': "nao", + 'lanina': "La nina", + 'nino12': "El nino 12", + 'nino3': "El nino 3", + 'nino34': "El nino 34", + 'nino4': "El nino 4", + 'npi': "npi", + 'npo': "npo", + 'pdo': "pdo", + 'pna': "pna", + 'psa1': "psa1", + 'psa2': "psa2", + 'sam': "sam", + 'socn': "socn", + 'tio': "tio", + 'tna': "tna", + 'tsa': "tsa", + } + return _get_info(filename, mode) + + +def _get_info(filename, dictionary): + intersection = list( + set(os.path.basename(filename).split('.')).intersection( + dictionary.keys())) + if len(intersection) != 1: + return None + return dictionary[intersection[0]] + + +def _get_global_ancestors(cfg): + input_data = cfg['input_data'].values() + selection = select_metadata(input_data, project='CMIP5') + grouped_selection = group_metadata(selection, 'dataset') + ancestor = [] + for _, attributes in grouped_selection.items(): + ancestor += [item['filename'] for item in attributes] + return ancestor + + +def set_provenance(cfg): + """Add provenance to all image files that the cvdp package creates.""" + + def _get_provenance_record(filename, ancestors): + return { + 'caption': _get_caption(filename), + 'statistics': [_get_stat(filename)], + 'domain': 'global', + 'plot_type': _get_plot_type(filename), + 'plot_file': filename, + 'authors': [ + 'phil_ad', + ], + 'references': [ + 'acknow_project', + 'phillips14eos', + ], + 'ancestors': ancestors, + } + + ancestors = _get_global_ancestors(cfg) + logger.info("Path to work_dir: %s", cfg['work_dir']) + with ProvenanceLogger(cfg) as provenance_logger: + for root, _, files in os.walk(cfg['work_dir']): + for datei in files: + path = os.path.join(root, datei) + if _is_png(path): + logger.info("Name of file: %s", path) + provenance_record = _get_provenance_record(path, ancestors) + logger.info("Recording provenance of %s:\n%s", path, + provenance_record) + provenance_logger.log(path, provenance_record) + + +def _execute_cvdp(cfg): + subprocess.check_call(["ncl", "driver.ncl"], + cwd=os.path.join(cfg['run_dir'])) + + +def main(cfg): + """Set and execute the cvdp package.""" + cfg['lnk_dir'] = os.path.join(cfg['run_dir'], "links") + setup_driver(cfg) + setup_namelist(cfg) + _execute_cvdp(cfg) + set_provenance(cfg) + + +if __name__ == '__main__': + + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/droughtindex/diag_cdd.py b/esmvaltool/diag_scripts/droughtindex/diag_cdd.py new file mode 100644 index 0000000000..40d6edddea --- /dev/null +++ b/esmvaltool/diag_scripts/droughtindex/diag_cdd.py @@ -0,0 +1,93 @@ +"""A diagnostic that calculates consecutive dry days.""" +import logging +import os +from copy import deepcopy + +import iris +import numpy as np + +from esmvaltool.diag_scripts.shared import (ProvenanceLogger, + get_diagnostic_filename, + get_plot_filename, run_diagnostic) +from esmvaltool.diag_scripts.shared.plot import quickplot + +logger = logging.getLogger(os.path.basename(__file__)) + + +def save_results(cfg, cube, basename, ancestor_files): + """Create a provenance record describing the diagnostic data and plot.""" + basename = basename + '_' + cube.var_name + provenance = { + 'caption': cube.long_name.replace('\n', ' '), + 'statistics': ['other'], + 'domains': ['global'], + 'authors': ['berg_pe'], + 'references': ['acknow_project'], + 'ancestors': ancestor_files, + } + if cfg['write_plots'] and cfg.get('quickplot'): + plot_file = get_plot_filename(basename, cfg) + quickplot(cube, plot_file, **cfg['quickplot']) + provenance['plot_file'] = plot_file + if cfg['write_netcdf']: + netcdf_file = get_diagnostic_filename(basename, cfg) + iris.save(cube, target=netcdf_file) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(netcdf_file, provenance) + + +def main(cfg): + """Calculate drought indices.""" + for filename, attributes in cfg['input_data'].items(): + logger.info("Processing variable %s from dataset %s", + attributes['standard_name'], attributes['dataset']) + logger.debug("Loading %s", filename) + cube = iris.load_cube(filename) + drymaxcube, fqthcube = droughtindex(cube, cfg) + basename = os.path.splitext(os.path.basename(filename))[0] + save_results(cfg, drymaxcube, basename, ancestor_files=[filename]) + save_results(cfg, fqthcube, basename, ancestor_files=[filename]) + + +def droughtindex(cube, cfg): + """Calculate drought stats.""" + if cfg['dryindex'] == 'cdd': + plim = float(cfg['plim']) / 86400. # units of kg m-2 s-1 + frlim = float(cfg['frlim']) + precip = deepcopy(cube.data) + precip[cube.data < plim] = 1 + precip[cube.data >= plim] = 0 + cube.data[0, :, :] = precip[0, :, :] + for ttt in range(1, cube.data.shape[0]): + cube.data[ttt, :, :] = ( + (precip[ttt, :, :] + cube.data[ttt - 1, :, :]) * + precip[ttt, :, :]) + dif = cube.data[0:-1, :, :] - cube.data[1:cube.data.shape[0], :, :] + whh = np.where(dif != cube.data[0:-1]) + cube.data[whh] = 0 + # Longest consecutive period + drymaxcube = cube.collapsed('time', iris.analysis.MAX) + drymaxcube.long_name = ( + 'The greatest number of consecutive days per time period\n' + 'with daily precipitation amount below {plim} mm.').format(**cfg) + drymaxcube.var_name = 'drymax' + drymaxcube.standard_name = None + drymaxcube.units = 'days' + + whth = np.where(cube.data > frlim) + cube.data = cube.data * 0 + cube.data[whth] = 1 + fqthcube = cube.collapsed('time', iris.analysis.SUM) + fqthcube.long_name = ( + 'The number of consecutive dry day periods of at least {frlim} ' + 'days\nwith precipitation below {plim} mm each day.').format(**cfg) + fqthcube.var_name = 'dryfreq' + fqthcube.standard_name = None + fqthcube.units = None + + return drymaxcube, fqthcube + + +if __name__ == '__main__': + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/droughtindex/diag_spei.r b/esmvaltool/diag_scripts/droughtindex/diag_spei.r new file mode 100644 index 0000000000..6e79ef8466 --- /dev/null +++ b/esmvaltool/diag_scripts/droughtindex/diag_spei.r @@ -0,0 +1,243 @@ +library(yaml) +library(ncdf4) +library(SPEI) +library(RColorBrewer) # nolint + +leap_year <- function(year) { + return(ifelse( (year %% 4 == 0 & year %% 100 != 0) | + year %% 400 == 0, TRUE, FALSE)) +} + +getnc <- function(yml, m, lat = FALSE) { + id <- nc_open(yml[m][[1]]$filename, readunlim = FALSE) + if (lat){ + v <- ncvar_get(id, "lat") + }else{ + v <- ncvar_get(id, yml[m][[1]]$short_name) + if (yml[m][[1]]$short_name == "tas") v <- v - 273.15 + if (yml[m][[1]]$short_name == "pr"){ + time <- ncvar_get(id, "time") + tcal <- ncatt_get(id, "time", attname = "calendar") + tunits <- ncatt_get(id, "time", attname = "units") + tustr <- strsplit(tunits$value, " ") + stdate <- as.Date(time[1], origin = unlist(tustr)[3]) + nddate <- as.Date(time[length(time)], origin = unlist(tustr)[3]) + if (tcal$value == "365_day"){ + # Correct for missing leap years in nddate + diff <- as.numeric(nddate - stdate, units = "days") + dcorr <- floor( (diff / 365 - diff / 365.25) * 365.25) + nddate <- nddate + dcorr + } + if (tcal$value == "360_day"){ + v <- v * 30 * 24 * 3600. + }else{ + cnt <- 1 + monarr <- c(31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31) + date <- stdate + while (date <= nddate){ + year <- as.numeric(substr(date, 1, 4)) + lpyear <- leap_year(year) + month <- as.numeric(substr(date, 6, 7)) + mdays <- monarr[month] + pdays <- mdays + if (month == 2 & lpyear == TRUE){ + pdays <- 29 + if (tcal$value != "365_day"){ + mdays <- 29 + }else{ + mdays <- 28 + } + } + v[,,cnt] <- v[,,cnt] * mdays * 24 * 3600. + date <- date + pdays + cnt <- cnt + 1 + } + } + } + } + nc_close(id) + return(v) +} + +ncwritenew <- function(yml, m, hist, wdir, bins){ + fnam <- strsplit(yml[m][[1]]$filename, "/")[[1]] + pcs <- strsplit(fnam[length(fnam)], "_")[[1]] + pcs[which(pcs == yml[m][[1]]$short_name)] <- "spei" + onam <- paste(pcs, collapse = "_") + onam <- paste0(wdir, "/", strsplit(onam, ".nc"), "_hist.nc") + ncid_in <- nc_open(yml[m][[1]]$filename) + var <- ncid_in$var[[yml[m][[1]]$short_name]] + xdim <- ncid_in$dim[["lon"]] + ydim <- ncid_in$dim[["lat"]] + hdim <- ncdim_def("bins", "level", bins[1:(length(bins) - 1)]) + hdim2 <- ncdim_def("binsup", "level", bins[2:length(bins)]) + var_hist <- ncvar_def("hist", "counts", list(xdim, ydim, hdim), NA) + idw <- nc_create(onam, var_hist) + ncvar_put(idw, "hist", hist) + nc_close(idw) + return(onam) +} + +whfcn <- function(x, ilow, ihigh){ + return(length(which(x >= ilow & x < ihigh))) +} + +dothornthwaite <- function(v, lat){ + print("Estimating PET with Thornthwaite method.") + dpet <- v * NA + d <- dim(dpet) + for (i in 1:d[2]){ + tmp <- v[,i,] + tmp2 <- thornthwaite(t(tmp), rep(lat[i], d[1]), na.rm = TRUE) + d2 <- dim(tmp2) + tmp2 <- as.numeric(tmp2) + dim(tmp2) <- d2 + dpet[,i,] <- t(tmp2) + } + return(dpet) +} + +args <- commandArgs(trailingOnly = TRUE) +params <- read_yaml(args[1]) +metadata1 <- read_yaml(params$input_files[1]) +metadata2 <- read_yaml(params$input_files[2]) +modfile1 <- names(metadata1) +modfile2 <- names(metadata2) +wdir <- params$work_dir +rundir <- params$run_dir +dir.create(wdir, recursive = TRUE) +pdir <- params$plot_dir +dir.create(pdir, recursive = TRUE) +var1_input <- read_yaml(params$input_files[1]) +var2_input <- read_yaml(params$input_files[2]) +nmods <- length(names(var1_input)) + +# setup provenance file and list +provenance_file <- paste0(rundir, "/", "diagnostic_provenance.yml") +provenance <- list() + +histbrks <- c(-99999, -2, -1.5, -1, 1, 1.5, 2, 99999) +histnams <- c("Extremely dry", "Moderately dry", "Dry", + "Neutral", + "Wet", "Moderately wet", "Extremely wet") +refnam <- var1_input[1][[1]]$reference_dataset +n <- 1 +while (n <= nmods){ + if (var1_input[n][[1]]$dataset == refnam) break + n <- n + 1 +} +nref <- n +lat <- getnc(var1_input, nref, lat = TRUE) +if (max(lat) > 90){ + print(paste0("Latitude must be [-90,90]: min=", + min(lat), " max=", max(lat))) + stop("Aborting!") +} +ref <- getnc(var1_input, nref, lat = FALSE) +refmsk <- apply(ref, c(1, 2), FUN = mean, na.rm = TRUE) +refmsk[refmsk > 10000] <- NA +refmsk[!is.na(refmsk)] <- 1 + +xprov <- list(ancestors = list(""), + authors = list("berg_pe"), + references = list("vicente10jclim"), + projects = list("c3s-magic"), + caption = "", + statistics = list("other"), + realms = list("atmos"), + themes = list("phys"), + domains = list("global")) + +histarr <- array(NA, c(nmods, length(histnams))) +for (mod in 1:nmods){ + lat <- getnc(var1_input, mod, TRUE) + v1 <- getnc(var1_input, mod, FALSE) + v2 <- getnc(var2_input, mod, FALSE) + if (var1_input[1][[1]]$short_name == "pr") prtas <- TRUE else prtas <- FALSE + if (prtas){ + pet <- dothornthwaite(v2, lat) + pme <- v1 - pet + }else{ + pet <- dothornthwaite(v1, lat) + pme <- v2 - pet + } + print(var1_input[mod][[1]]$cmor_table) + d <- dim(pme) + pme_spei <- pme * NA + for (i in 1:d[1]){ + wh <- which(!is.na(refmsk[i,])) + if (length(wh) > 0){ + tmp <- pme[i,wh,] + pme_spei[i,wh,] <- t(spei(t(tmp), 1, na.rm = TRUE)$fitted) + } + } + pme_spei[is.infinite(pme_spei)] <- NA + pme_spei[pme_spei > 10000] <- NA + hist_spei <- array(NA, c(d[1], d[2], length(histbrks) - 1)) + for (nnh in 1:(length(histbrks) - 1)){ + hist_spei[,,nnh] <- apply(pme_spei, c(1, 2), FUN = whfcn, + ilow = histbrks[nnh], + ihigh = histbrks[nnh + 1]) + } + filename <- ncwritenew(var1_input, mod, hist_spei, wdir, histbrks) + # Set provenance for output files + xprov$caption <- "Histogram of SPEI index per grid point." + xprov$ancestors <- list(modfile1[mod], modfile2[mod]) + provenance[[filename]] <- xprov + for (t in 1:d[3]){ + tmp <- pme_spei[,,t] + tmp[is.na(refmsk)] <- NA + pme_spei[,,t] <- tmp + } + pme_spei[is.infinite(pme_spei)] <- NA + pme_spei[pme_spei > 10000] <- NA + # Weight against latitude + h <- c(1:length(histnams)) * 0 + for (j in 1:d[2]){ + h <- h + hist(pme_spei[j,,], breaks = histbrks, + plot = FALSE)$counts * cos(lat[j] * pi / 180.) + } + histarr[mod,] <- h / sum(h, na.rm = TRUE) +} +filehist <- paste0(params$work_dir, "/", "histarr.rsav") +save(histarr, file = filehist) +plot_file <- paste0(params$plot_dir, "/", "histplot.png") +xprov$caption <- "Global latitude-weighted histogram of SPEI index." +xprov$ancestors <- list(modfile1, modfile2) +xprov[["plot_file"]] <- plot_file +provenance[[filehist]] <- xprov +write_yaml(provenance, provenance_file) + +bhistarr <- array(NA, c(nmods - 1, 7)) +marr <- c(1:nmods)[c(1:nmods) != nref] +cnt <- 1 +for (m in marr){ + bhistarr[cnt,] <- histarr[m,] - histarr[nref,] + cnt <- cnt + 1 +} +parr <- c(nref, marr) + +mnam <- c(1:nmods) * NA +for (m in 1:nmods) mnam[m] <- var1_input[m][[1]]$dataset + +qual_col_pals <- brewer.pal.info[brewer.pal.info$category == "qual",] # nolint +col_vector <- unlist(mapply(brewer.pal, qual_col_pals$maxcolors, # nolint + rownames(qual_col_pals))) +cols <- c("black", sample(col_vector, nmods - 1)) + +png(plot_file, width = 1000, height = 500) + par(mfrow = c(2, 1), oma = c(3, 3, 3, 13), mar = c(2, 1, 1, 1)) + barplot(histarr[parr,], beside = 1, names.arg = histnams, + col = cols, xaxs = "i") + box() + mtext("Probability", side = 2, line = 2.1) + barplot(bhistarr, beside = 1, names.arg = histnams, + col = cols[2:nmods], xaxs = "i") + box() + mtext("Absolute difference", side = 2, line = 2.1) + mtext("Standardized precipitation-evapotranspiration index", + outer = TRUE, cex = 2, font = 2) + par(fig = c(0.8, .95, 0.1, 0.9), new = T, oma = c(1, 1, 1, 1) * 0, + mar = c(0, 0, 0, 0)) + legend("topright", mnam[parr], fill = cols) +dev.off() diff --git a/esmvaltool/diag_scripts/droughtindex/diag_spi.r b/esmvaltool/diag_scripts/droughtindex/diag_spi.r new file mode 100644 index 0000000000..5319d541de --- /dev/null +++ b/esmvaltool/diag_scripts/droughtindex/diag_spi.r @@ -0,0 +1,164 @@ +library(yaml) +library(ncdf4) +library(SPEI) +library(RColorBrewer) # nolint + +getnc <- function(yml, m, lat = FALSE) { + id <- nc_open(yml[m][[1]]$filename, readunlim = FALSE) + if (lat){ + v <- ncvar_get(id, "lat") + }else{ + v <- ncvar_get(id, yml[m][[1]]$short_name) + } + nc_close(id) + return(v) +} + +ncwritenew <- function(yml, m, hist, wdir, bins){ + fnam <- strsplit(yml[m][[1]]$filename, "/")[[1]] + pcs <- strsplit(fnam[length(fnam)], "_")[[1]] + pcs[which(pcs == yml[m][[1]]$short_name)] <- "spi" + onam <- paste(pcs, collapse = "_") + onam <- paste0(wdir, "/", strsplit(onam, ".nc"), "_hist.nc") + ncid_in <- nc_open(yml[m][[1]]$filename) + var <- ncid_in$var[[yml[m][[1]]$short_name]] + xdim <- ncid_in$dim[["lon"]] + ydim <- ncid_in$dim[["lat"]] + hdim <- ncdim_def("bins", "level", bins[1:(length(bins) - 1)]) + hdim2 <- ncdim_def("binsup", "level", bins[2:length(bins)]) + var_hist <- ncvar_def("hist", "counts", list(xdim, ydim, hdim), NA) + idw <- nc_create(onam, var_hist) + ncvar_put(idw, "hist", hist) + nc_close(idw) + return(onam) +} + +whfcn <- function(x, ilow, ihigh){ + return(length(which(x >= ilow & x < ihigh))) +} + +args <- commandArgs(trailingOnly = TRUE) +params <- read_yaml(args[1]) +metadata <- read_yaml(params$input_files) +modfile <- names(metadata) +wdir <- params$work_dir +rundir <- params$run_dir +dir.create(wdir, recursive = TRUE) +pdir <- params$plot_dir +dir.create(pdir, recursive = TRUE) +var1_input <- read_yaml(params$input_files[1]) +nmods <- length(names(var1_input)) + +# setup provenance file and list +provenance_file <- paste0(rundir, "/", "diagnostic_provenance.yml") +provenance <- list() + +histbrks <- c(-99999, -2, -1.5, -1, 1, 1.5, 2, 99999) +histnams <- c("Extremely dry", "Moderately dry", "Dry", + "Neutral", + "Wet", "Moderately wet", "Extremely wet") +refnam <- var1_input[1][[1]]$reference_dataset +n <- 1 +while (n <= nmods){ + if (var1_input[n][[1]]$dataset == refnam) break + n <- n + 1 +} +nref <- n +lat <- getnc(var1_input, nref, lat = TRUE) +if (max(lat) > 90){ + print(paste0("Latitude must be [-90,90]: min=", + min(lat), " max=", max(lat))) + stop("Aborting!") +} +ref <- getnc(var1_input, nref, lat = FALSE) +refmsk <- apply(ref, c(1, 2), FUN = mean, na.rm = TRUE) +refmsk[refmsk > 10000] <- NA +refmsk[!is.na(refmsk)] <- 1 + +xprov <- list(ancestors = list(""), + authors = list("berg_pe"), + references = list("mckee93"), + projects = list("c3s-magic"), + caption = "", + statistics = list("other"), + realms = list("atmos"), + themes = list("phys"), + domains = list("global")) + +histarr <- array(NA, c(nmods, length(histnams))) +for (mod in 1:nmods){ + v1 <- getnc(var1_input, mod) + print(var1_input[mod][[1]]$cmor_table) + d <- dim(v1) + v1_spi <- v1 * NA + for (i in 1:d[1]){ + wh <- which(!is.na(refmsk[i,])) + if (length(wh) > 0){ + tmp <- v1[i,wh,] + v1_spi[i,wh,] <- t(spi(t(tmp), 1, na.rm = TRUE, + distribution = "PearsonIII")$fitted) + } + } + v1_spi[is.infinite(v1_spi)] <- NA + v1_spi[v1_spi > 10000] <- NA + hist_spi <- array(NA, c(d[1], d[2], length(histbrks) - 1)) + for (nnh in 1:(length(histbrks) - 1)){ + hist_spi[,,nnh] <- apply(v1_spi, c(1, 2), FUN = whfcn, + ilow = histbrks[nnh], + ihigh = histbrks[nnh + 1]) + } + filename <- ncwritenew(var1_input, mod, hist_spi, wdir, histbrks) + # Set provenance for output files + xprov$caption <- "Histogram of SPI index per grid point." + xprov$ancestors <- modfile[mod] + provenance[[filename]] <- xprov + # Weight against latitude + h <- c(1:length(histnams)) * 0 + for (j in 1:d[2]){ + h <- h + hist(v1_spi[j,,], breaks = histbrks, + plot = FALSE)$counts * cos(lat[j] * pi / 180.) + } + histarr[mod, ] <- h / sum(h, na.rm = TRUE) +} +filehist <- paste0(params$work_dir, "/", "histarr.rsav") +save(histarr, file = filehist) +plot_file <- paste0(params$plot_dir, "/", "histplot.png") +xprov$caption <- "Global latitude-weighted histogram of SPI index." +xprov$ancestors <- list(modfile) +xprov[["plot_file"]] <- plot_file +provenance[[filehist]] <- xprov +write_yaml(provenance, provenance_file) + +bhistarr <- array(NA, c(nmods - 1, 7)) +marr <- c(1:nmods)[c(1:nmods) != nref] +cnt <- 1 +for (m in marr){ + bhistarr[cnt, ] <- histarr[m, ] - histarr[nref, ] + cnt <- cnt + 1 +} +parr <- c(nref, marr) + +mnam <- c(1:nmods) * NA +for (m in 1:nmods) mnam[m] <- var1_input[m][[1]]$dataset + +qual_col_pals <- brewer.pal.info[brewer.pal.info$category == "qual", ] # nolint +col_vector <- unlist(mapply(brewer.pal, qual_col_pals$maxcolors, # nolint + rownames(qual_col_pals))) +cols <- c("black", sample(col_vector, nmods - 1)) + +png(plot_file, width = 1000, height = 500) + par(mfrow = c(2, 1), oma = c(3, 3, 3, 13), mar = c(2, 1, 1, 1)) + barplot(histarr[parr, ], beside = 1, names.arg = histnams, + col = cols, xaxs = "i") + box() + mtext("Probability", side = 2, line = 2.1) + barplot(bhistarr, beside = 1, names.arg = histnams, + col = cols[2:nmods], xaxs = "i") + box() + mtext("Absolute difference", side = 2, line = 2.1) + mtext("Standardized precipitation index", outer = TRUE, + cex = 2, font = 2) + par(fig = c(0.8, .95, 0.1, 0.9), new = T, oma = c(0, 0, 0, 0), + mar = c(0, 0, 0, 0)) + legend("topright", mnam[parr], fill = cols) +dev.off() diff --git a/esmvaltool/diag_scripts/emergent_constraints/__init__.py b/esmvaltool/diag_scripts/emergent_constraints/__init__.py new file mode 100644 index 0000000000..d9015ad44b --- /dev/null +++ b/esmvaltool/diag_scripts/emergent_constraints/__init__.py @@ -0,0 +1,159 @@ +"""Convenience functions for emergent constraints diagnostics.""" +import logging + +import numpy as np +from scipy import integrate, stats + +logger = logging.getLogger(__name__) + + +def _check_input_arrays(*arrays): + """Check the shapes of multiple arrays.""" + shape = None + for array in arrays: + if shape is None: + shape = array.shape + else: + if array.shape != shape: + raise ValueError("Expected input arrays with identical shapes") + + +def standard_prediction_error(x_data, y_data): + """Return function to calculate standard prediction error. + + The standard prediction error of a linear regression is the error when + predicting a new value which is not in the original data. + + Parameters + ---------- + x_data : numpy.array + x coordinates of the points. + y_data : numpy.array + y coordinates of the points. + + Returns + ------- + callable + Standard prediction error function for new x values. + + """ + _check_input_arrays(x_data, y_data) + reg = stats.linregress(x_data, y_data) + y_estim = reg.slope * x_data + reg.intercept + n_data = x_data.shape[0] + see = np.sqrt(np.sum(np.square(y_data - y_estim)) / (n_data - 2)) + x_mean = np.mean(x_data) + ssx = np.sum(np.square(x_data - x_mean)) + + def spe(x_new): + """Return standard prediction error.""" + return see * np.sqrt(1.0 + 1.0 / n_data + (x_new - x_mean)**2 / ssx) + + return np.vectorize(spe) + + +def regression_line(x_data, y_data, n_points=100): + """Return x and y coordinates of the regression line (mean and error). + + Parameters + ---------- + x_data : numpy.array + x coordinates of the points. + y_data : numpy.array + y coordinates of the points. + n_points : int, optional (default: 100) + Number of points for the regression lines. + + Returns + ------- + dict + `numpy.array`s for the keys `x`, `y_best_estim`, `y_minus_err`, + `y_plus_err', 'rvalue', 'slope' and 'intercept'. + + """ + _check_input_arrays(x_data, y_data) + spe = standard_prediction_error(x_data, y_data) + out = {} + reg = stats.linregress(x_data, y_data) + x_range = max(x_data) - min(x_data) + x_lin = np.linspace(min(x_data) - x_range, max(x_data) + x_range, n_points) + out['y_best_estim'] = reg.slope * x_lin + reg.intercept + out['y_minus_err'] = out['y_best_estim'] - spe(x_lin) + out['y_plus_err'] = out['y_best_estim'] + spe(x_lin) + out['x'] = x_lin + out['rvalue'] = reg.rvalue + out['slope'] = reg.slope + out['intercept'] = reg.intercept + return out + + +def gaussian_pdf(x_data, y_data, obs_mean, obs_std, n_points=100): + """Calculate Gaussian probability densitiy function for target variable. + + Parameters + ---------- + x_data : numpy.array + x coordinates of the points. + y_data : numpy.array + y coordinates of the points. + obs_mean : float + Mean of observational data. + obs_std : float + Standard deviation of observational data. + n_points : int, optional (default: 100) + Number of points for the regression lines. + + Returns + ------- + tuple of numpy.array + x and y values for the PDF. + + """ + _check_input_arrays(x_data, y_data) + spe = standard_prediction_error(x_data, y_data) + reg = stats.linregress(x_data, y_data) + + def obs_pdf(x_new): + """Return PDF of observations P(x).""" + norm = np.sqrt(2.0 * np.pi * obs_std**2) + return np.exp(-(x_new - obs_mean)**2 / 2.0 / obs_std**2) / norm + + def cond_pdf(x_new, y_new): + """Return conditional PDF P(y|x).""" + y_estim = reg.slope * x_new + reg.intercept + norm = np.sqrt(2.0 * np.pi * spe(x_new)**2) + return np.exp(-(y_new - y_estim)**2 / 2.0 / spe(x_new)**2) / norm + + def comb_pdf(x_new, y_new): + """Return combined PDF P(y,x).""" + return obs_pdf(x_new) * cond_pdf(x_new, y_new) + + # PDF of target variable P(y) + y_range = max(y_data) - min(y_data) + y_lin = np.linspace(min(y_data) - y_range, max(y_data) + y_range, n_points) + y_pdf = [ + integrate.quad(comb_pdf, -np.inf, +np.inf, args=(y, ))[0] + for y in y_lin + ] + return (y_lin, np.array(y_pdf)) + + +def cdf(data, pdf): + """Calculate cumulative distribution function for a PDF. + + Parameters + ---------- + data : numpy.array + Data points (x axis). + pdf : numpy.array + Corresponding probability density function (PDF). + + Returns + ------- + numpy.array + Corresponding cumulative distribution function (CDF). + + """ + idx_range = range(1, len(data) + 1) + cum_dens = [integrate.simps(pdf[:idx], data[:idx]) for idx in idx_range] + return np.array(cum_dens) diff --git a/esmvaltool/diag_scripts/emergent_constraints/cox18nature.py b/esmvaltool/diag_scripts/emergent_constraints/cox18nature.py new file mode 100644 index 0000000000..48a09ba148 --- /dev/null +++ b/esmvaltool/diag_scripts/emergent_constraints/cox18nature.py @@ -0,0 +1,550 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +"""Diagnostic script to reproduce Cox et al. (2018). + +Description +----------- +Plot effective climate sensitivity ECS vs. temperature variability metric psi +to establish an emergent relationship for ECS. + +Author +------ +Manuel Schlund (DLR, Germany) + +Project +------- +CRESCENDO + +Configuration options in recipe +------------------------------- +confidence_level : float, optional (default: 0.66) + Confidence level for ECS error estimation. + +""" + +import logging +import os + +import iris +import matplotlib.lines as mlines +import matplotlib.pyplot as plt +import numpy as np + +import esmvaltool.diag_scripts.emergent_constraints as ec +import esmvaltool.diag_scripts.shared.iris_helpers as ih +from esmvaltool.diag_scripts.shared import ( + ProvenanceLogger, get_diagnostic_filename, get_plot_filename, + group_metadata, io, plot, run_diagnostic, select_metadata) + +logger = logging.getLogger(os.path.basename(__file__)) +plt.style.use(plot.get_path_to_mpl_style()) + +COLOR_SMALL_LAMBDA = '#800060' +COLOR_LARGE_LAMBDA = '#009900' +(FIG, AXES) = plt.subplots() + +ECS_ATTRS = { + 'short_name': 'ecs', + 'long_name': 'Effective Climate Sensitivity (ECS)', + 'units': 'K', +} +TASA_ATTRS = { + 'short_name': 'tasa', + 'long_name': 'Near-Surface Air Temperature Anomaly', + 'units': 'K', +} +PSI_ATTRS = { + 'short_name': 'psi', + 'long_name': 'Temperature variability metric', + 'units': 'K', +} + + +def _get_ancestor_files(cfg, obs_name, projects=None): + """Get ancestor files for provenance.""" + if projects is None: + projects = _get_project(cfg) + if isinstance(projects, str): + projects = [projects] + datasets = [] + for project in projects: + datasets.extend( + select_metadata(cfg['input_data'].values(), project=project)) + datasets.extend( + select_metadata(cfg['input_data'].values(), dataset=obs_name)) + return [d['filename'] for d in datasets] + + +def _get_model_color(model, lambda_cube): + """Get color of model dependent on climate feedback parameter.""" + clim_sens = lambda_cube.extract(iris.Constraint(dataset=model)).data + if clim_sens < 1.0: + col = COLOR_SMALL_LAMBDA + else: + col = COLOR_LARGE_LAMBDA + return col + + +def _plot_model_point(model, psi_cube, ecs_cube, lambda_cube): + """Plot a single model point for emergent relationship.""" + col = _get_model_color(model, lambda_cube) + style = plot.get_dataset_style(model, 'cox18nature') + AXES.plot( + psi_cube.extract(iris.Constraint(dataset=model)).data, + ecs_cube.extract(iris.Constraint(dataset=model)).data, + linestyle='none', + marker=style['mark'], + markeredgecolor=col, + markerfacecolor=col, + markersize=style['size']) + + +def _get_line_plot_legend(): + """Add legend for line plots.""" + color_obs = plot.get_dataset_style('OBS', 'cox18nature')['color'] + handles = [ + mlines.Line2D([], [], + color=COLOR_SMALL_LAMBDA, + label=r'$\lambda < 1.0$ Wm$^{-2}$K$^{-1}$'), + mlines.Line2D([], [], + color=COLOR_LARGE_LAMBDA, + label=r'$\lambda > 1.0$ Wm$^{-2}$K$^{-1}$'), + mlines.Line2D([], [], + linestyle='none', + marker='o', + markeredgecolor=color_obs, + markerfacecolor=color_obs, + label='Observations'), + ] + return AXES.legend(handles=handles, loc='upper left') + + +def _get_project(cfg): + """Extract project from cfg.""" + input_data = cfg['input_data'].values() + projects = list(group_metadata(input_data, 'project').keys()) + projects = [p for p in projects if 'obs' not in p.lower()] + if len(projects) == 1: + return projects[0] + return projects + + +def _save_fig(cfg, basename, legend=None): + """Save matplotlib figure.""" + path = get_plot_filename(basename, cfg) + if legend is None: + legend = [] + else: + legend = [legend] + FIG.savefig( + path, + additional_artists=legend, + bbox_inches='tight', + orientation='landscape') + logger.info("Wrote %s", path) + AXES.cla() + return path + + +def get_external_cubes(cfg): + """Get external cubes for psi, ECS and lambda.""" + cubes = iris.cube.CubeList() + for filename in ('psi.nc', 'ecs.nc', 'lambda.nc'): + filepath = io.get_ancestor_file(cfg, filename) + cube = iris.load_cube(filepath) + cube = cube.extract( + ih.iris_project_constraint(['OBS'], cfg, negate=True)) + cubes.append(cube) + cubes = ih.intersect_dataset_coordinates(cubes) + return (cubes[0], cubes[1], cubes[2]) + + +def get_provenance_record(caption, statistics, plot_type, ancestor_files): + """Create a provenance record describing the diagnostic data and plot.""" + record = { + 'ancestors': ancestor_files, + 'authors': ['schl_ma'], + 'caption': caption, + 'domains': ['global'], + 'plot_type': plot_type, + 'realms': ['atmos'], + 'references': ['cox18nature'], + 'statistics': statistics, + 'themes': ['EC'], + } + return record + + +def plot_temperature_anomaly(cfg, tas_cubes, lambda_cube, obs_name): + """Plot temperature anomaly versus time.""" + for cube in tas_cubes.values(): + cube.data -= np.mean( + cube.extract( + iris.Constraint(year=lambda cell: 1961 <= cell <= 1990)).data) + + # Save netcdf file and provencance + filename = 'temperature_anomaly_{}'.format(obs_name) + netcdf_path = get_diagnostic_filename(filename, cfg) + io.save_1d_data(tas_cubes, netcdf_path, 'year', TASA_ATTRS) + project = _get_project(cfg) + provenance_record = get_provenance_record( + "Simulated change in global temperature from {} models (coloured " + "lines), compared to the global temperature anomaly from the {} " + "dataset (black dots). The anomalies are relative to a baseline " + "period of 1961-1990.".format(project, obs_name), ['anomaly'], + ['times'], _get_ancestor_files(cfg, obs_name)) + + # Plot + if cfg['write_plots']: + models = lambda_cube.coord('dataset').points + + # Plot lines + for model in models: + cube = tas_cubes[model] + AXES.plot( + cube.coord('year').points, + cube.data, + color=_get_model_color(model, lambda_cube)) + obs_style = plot.get_dataset_style('OBS', 'cox18nature') + obs_cube = tas_cubes[obs_name] + AXES.plot( + obs_cube.coord('year').points, + obs_cube.data, + linestyle='none', + marker='o', + markeredgecolor=obs_style['color'], + markerfacecolor=obs_style['color']) + + # Plot appearance + AXES.set_title('Simulation of global warming record') + AXES.set_xlabel('Year') + AXES.set_ylabel('Temperature anomaly / K') + legend = _get_line_plot_legend() + + # Save plot + provenance_record['plot_file'] = _save_fig(cfg, filename, legend) + + # Write provenance + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(netcdf_path, provenance_record) + + +def plot_psi(cfg, psi_cubes, lambda_cube, obs_name): + """Plot temperature variability metric psi versus time.""" + filename = 'temperature_variability_metric_{}'.format(obs_name) + netcdf_path = get_diagnostic_filename(filename, cfg) + io.save_1d_data(psi_cubes, netcdf_path, 'year', PSI_ATTRS) + project = _get_project(cfg) + provenance_record = get_provenance_record( + "Psi metric of variability versus time, from the {0} models " + "(coloured lines), and the {1} observational data (black circles). " + "The psi values are calculated for windows of width {2} yr, after " + "linear de-trending in each window. These {2}-yr windows are shown " + "for different end times.".format(project, obs_name, + cfg.get('window_length', 55)), + ['corr', 'var'], ['times'], _get_ancestor_files(cfg, obs_name)) + + # Plot + if cfg['write_plots']: + models = lambda_cube.coord('dataset').points + + # Plot lines + for model in models: + cube = psi_cubes[model] + AXES.plot( + cube.coord('year').points, + cube.data, + color=_get_model_color(model, lambda_cube)) + obs_style = plot.get_dataset_style('OBS', 'cox18nature') + obs_cube = psi_cubes[obs_name] + AXES.plot( + obs_cube.coord('year').points, + obs_cube.data, + linestyle='none', + marker='o', + markeredgecolor=obs_style['color'], + markerfacecolor=obs_style['color']) + + # Plot appearance + AXES.set_title('Metric of variability versus time') + AXES.set_xlabel('Year') + AXES.set_ylabel(r'$\Psi$ / K') + legend = _get_line_plot_legend() + + # Save plot + provenance_record['plot_file'] = _save_fig(cfg, filename, legend) + + # Write provenance + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(netcdf_path, provenance_record) + + +def plot_emergent_relationship(cfg, psi_cube, ecs_cube, lambda_cube, obs_cube): + """Plot emergent relationship.""" + filename = 'emergent_relationship_{}'.format( + obs_cube.attributes['dataset']) + cube = ecs_cube.copy() + cube.add_aux_coord( + iris.coords.AuxCoord(psi_cube.data, **ih.convert_to_iris(PSI_ATTRS)), + 0) + netcdf_path = get_diagnostic_filename(filename, cfg) + io.iris_save(cube, netcdf_path) + provenance_record = get_provenance_record( + "Emergent relationship between ECS and the psi metric. The black dot-" + "dashed line shows the best-fit linear regression across the model " + "ensemble, with the prediction error for the fit given by the black " + "dashed lines. The vertical blue lines show the observational " + "constraint from the {} observations: the mean (dot-dashed line) and " + "the mean plus and minus one standard deviation (dashed lines).". + format(obs_cube.attributes['dataset']), ['mean', 'corr', 'var'], + ['scatter'], _get_ancestor_files(cfg, obs_cube.attributes['dataset'])) + + # Plot + if cfg['write_plots']: + obs_mean = np.mean(obs_cube.data) + obs_std = np.std(obs_cube.data) + + # Calculate regression line + lines = ec.regression_line(psi_cube.data, ecs_cube.data) + logger.info("Found emergent relationship with slope %.2f (r = %.2f)", + lines['slope'], lines['rvalue']) + + # Plot points + for model in psi_cube.coord('dataset').points: + _plot_model_point(model, psi_cube, ecs_cube, lambda_cube) + + # Plot lines + AXES.set_xlim(auto=False) + AXES.set_ylim(auto=False) + AXES.plot( + lines['x'], + lines['y_best_estim'], + color='black', + linestyle='dashdot', + label='Linear regression') + AXES.plot( + lines['x'], + lines['y_minus_err'], + color='black', + linestyle='dashed') + AXES.plot( + lines['x'], lines['y_plus_err'], color='black', linestyle='dashed') + AXES.axvline( + obs_mean, + color='blue', + linestyle='dashdot', + label='Observational constraint') + AXES.axvline(obs_mean - obs_std, color='blue', linestyle='dashed') + AXES.axvline(obs_mean + obs_std, color='blue', linestyle='dashed') + + # Plot appearance + AXES.set_title('Emergent relationship fit') + AXES.set_xlabel(r'$\Psi$ / K') + AXES.set_ylabel('ECS / K') + legend = AXES.legend(loc='upper left') + + # Save plot + provenance_record['plot_file'] = _save_fig(cfg, filename, legend) + + # Write provenance + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(netcdf_path, provenance_record) + + +def plot_pdf(cfg, psi_cube, ecs_cube, obs_cube): + """Plot probability density function of ECS.""" + obs_mean = np.mean(obs_cube.data) + obs_std = np.std(obs_cube.data) + (ecs_lin, ecs_pdf) = ec.gaussian_pdf(psi_cube.data, ecs_cube.data, + obs_mean, obs_std) + + # Provenance + filename = 'pdf_{}'.format(obs_cube.attributes['dataset']) + netcdf_path = get_diagnostic_filename(filename, cfg) + cube = iris.cube.Cube( + ecs_pdf, + var_name='pdf', + long_name='Probability density function', + units='K-1') + cube.add_aux_coord( + iris.coords.AuxCoord(ecs_lin, **ih.convert_to_iris(ECS_ATTRS)), 0) + io.iris_save(cube, netcdf_path) + project = _get_project(cfg) + provenance_record = get_provenance_record( + "The PDF for ECS. The orange histograms show the prior distributions " + "that arise from equal weighting of the {} models in 0.5 K bins.". + format(project), ['mean'], ['other'], + _get_ancestor_files(cfg, obs_cube.attributes['dataset'])) + + # Plot + if cfg['write_plots']: + AXES.plot( + ecs_lin, + ecs_pdf, + color='black', + linewidth=2.0, + label='Emergent constraint') + AXES.hist( + ecs_cube.data, + bins=6, + range=(2.0, 5.0), + density=True, + color='orange', + label='{} models'.format(project)) + + # Plot appearance + AXES.set_title('PDF of emergent constraint') + AXES.set_xlabel('ECS / K') + AXES.set_ylabel('Probability density') + legend = AXES.legend(loc='upper left') + + # Save plot + provenance_record['plot_file'] = _save_fig(cfg, filename, legend) + + # Write provenance + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(netcdf_path, provenance_record) + + +def plot_cdf(cfg, psi_cube, ecs_cube, obs_cube): + """Plot cumulative distribution function of ECS.""" + confidence_level = cfg.get('confidence_level', 0.66) + (ecs_lin, ecs_pdf) = ec.gaussian_pdf(psi_cube.data, ecs_cube.data, + np.mean(obs_cube.data), + np.std(obs_cube.data)) + ecs_cdf = ec.cdf(ecs_lin, ecs_pdf) + + # Provenance + filename = 'cdf_{}'.format(obs_cube.attributes['dataset']) + netcdf_path = get_diagnostic_filename(filename, cfg) + cube = iris.cube.Cube( + ecs_cdf, + var_name='cdf', + long_name='Cumulative distribution function', + units='1') + cube.add_aux_coord( + iris.coords.AuxCoord(ecs_lin, **ih.convert_to_iris(ECS_ATTRS)), 0) + io.iris_save(cube, netcdf_path) + project = _get_project(cfg) + provenance_record = get_provenance_record( + "The CDF for ECS. The horizontal dot-dashed lines show the {}% " + "confidence limits. The orange histograms show the prior " + "distributions that arise from equal weighting of the {} models in " + "0.5 K bins.".format(int(confidence_level * 100), project), ['mean'], + ['other'], _get_ancestor_files(cfg, obs_cube.attributes['dataset'])) + + # Plot + if cfg['write_plots']: + AXES.plot( + ecs_lin, + ecs_cdf, + color='black', + linewidth=2.0, + label='Emergent constraint') + AXES.hist( + ecs_cube.data, + bins=6, + range=(2.0, 5.0), + cumulative=True, + density=True, + color='orange', + label='{} models'.format(project)) + AXES.axhline( + (1.0 - confidence_level) / 2.0, color='black', linestyle='dashdot') + AXES.axhline( + (1.0 + confidence_level) / 2.0, color='black', linestyle='dashdot') + + # Plot appearance + AXES.set_title('CDF of emergent constraint') + AXES.set_xlabel('ECS / K') + AXES.set_ylabel('CDF') + legend = AXES.legend(loc='upper left') + + # Save plot + provenance_record['plot_file'] = _save_fig(cfg, filename, legend) + + # Write provenance + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(netcdf_path, provenance_record) + + +def get_ecs_range(cfg, psi_cube, ecs_cube, obs_cube): + """Get constrained ecs range.""" + confidence_level = cfg.get('confidence_level', 0.66) + conf_low = (1.0 - confidence_level) / 2.0 + conf_high = (1.0 + confidence_level) / 2.0 + + # Calculate PDF and CDF + (ecs_lin, ecs_pdf) = ec.gaussian_pdf(psi_cube.data, ecs_cube.data, + np.mean(obs_cube.data), + np.std(obs_cube.data)) + ecs_cdf = ec.cdf(ecs_lin, ecs_pdf) + + # Calculate constrained ECS range + ecs_mean = ecs_lin[np.argmax(ecs_pdf)] + ecs_index_range = np.where((ecs_cdf >= conf_low) & + (ecs_cdf <= conf_high))[0] + ecs_range = ecs_lin[ecs_index_range] + ecs_low = min(ecs_range) + ecs_high = max(ecs_range) + return (ecs_mean, ecs_low, ecs_high) + + +def main(cfg): + """Run the diagnostic.""" + input_data = ( + select_metadata(cfg['input_data'].values(), short_name='tas') + + select_metadata(cfg['input_data'].values(), short_name='tasa')) + if not input_data: + raise ValueError("This diagnostics needs 'tas' or 'tasa' variable") + + # Get tas data + tas_cubes = {} + tas_obs = [] + for (dataset, [data]) in group_metadata(input_data, 'dataset').items(): + cube = iris.load_cube(data['filename']) + cube = cube.aggregated_by('year', iris.analysis.MEAN) + tas_cubes[dataset] = cube + if data['project'] == 'OBS': + tas_obs.append(dataset) + + # Get time-dependent psi data + psi_cubes = {} + psi_obs = [] + for (dataset, [data]) in group_metadata( + io.netcdf_to_metadata(cfg, pattern='psi_*.nc'), 'dataset').items(): + cube = iris.load_cube(data['filename']) + cube = cube.aggregated_by('year', iris.analysis.MEAN) + psi_cubes[dataset] = cube + if data['project'] == 'OBS': + psi_obs.append(dataset) + + # Get psi, ECS and psi for models + (psi_cube, ecs_cube, lambda_cube) = get_external_cubes(cfg) + + # Plots + for obs_name in tas_obs: + logger.info("Observation for tas: %s", obs_name) + plot_temperature_anomaly(cfg, tas_cubes, lambda_cube, obs_name) + for obs_name in psi_obs: + logger.info("Observation for psi: %s", obs_name) + plot_psi(cfg, psi_cubes, lambda_cube, obs_name) + obs_cube = psi_cubes[obs_name] + plot_emergent_relationship(cfg, psi_cube, ecs_cube, lambda_cube, + obs_cube) + plot_pdf(cfg, psi_cube, ecs_cube, obs_cube) + plot_cdf(cfg, psi_cube, ecs_cube, obs_cube) + + # Print ECS range + ecs_range = get_ecs_range(cfg, psi_cube, ecs_cube, obs_cube) + logger.info("Observational constraint: Ψ = (%.2f ± %.2f) K", + np.mean(obs_cube.data), np.std(obs_cube.data)) + logger.info( + "Constrained ECS range: (%.2f - %.2f) K with best " + "estimate %.2f K", ecs_range[1], ecs_range[2], ecs_range[0]) + + +if __name__ == '__main__': + with run_diagnostic() as config: + main(config) + plt.close() diff --git a/esmvaltool/diag_scripts/ensclus/ens_anom.py b/esmvaltool/diag_scripts/ensclus/ens_anom.py new file mode 100644 index 0000000000..a3b664d11a --- /dev/null +++ b/esmvaltool/diag_scripts/ensclus/ens_anom.py @@ -0,0 +1,119 @@ +"""Computation of ensemble anomalies based on a desired value.""" + +import os +import numpy as np +from scipy import stats + +# User-defined packages +from read_netcdf import read_iris, save_n_2d_fields +from sel_season_area import sel_area, sel_season + + +def ens_anom(filenames, dir_output, name_outputs, varname, numens, season, + area, extreme): + """Ensemble anomalies. + + Computation of the ensemble anomalies based on the desired value + from the input variable (it can be the percentile, mean, maximum, standard + deviation or trend) + OUTPUT: NetCDF files of ensemble mean of climatology, selected value and + anomaly maps. + """ + print('The name of the output files will be _{0}.txt' + .format(name_outputs)) + print('Number of ensemble members: {0}'.format(numens)) + + outfiles = [] + # Reading the netCDF file of 3Dfield, for all the ensemble members + var_ens = [] + for ens in range(numens): + ifile = filenames[ens] + # print('ENSEMBLE MEMBER %s' %ens) + var, varunits, lat, lon, dates, _ = read_iris(ifile) + + # Convertion from kg m-2 s-1 to mm/day + if varunits == 'kg m-2 s-1': + var = var * 86400 # there are 86400 seconds in a day + varunits = 'mm/day' + + # Selecting a season (DJF,DJFM,NDJFM,JJA) + var_season, _ = sel_season(var, dates, season) + + # Selecting only [latS-latN, lonW-lonE] box region + var_area, lat_area, lon_area = sel_area(lat, lon, var_season, area) + + var_ens.append(var_area) + + if varunits == 'kg m-2 s-1': + print('\nPrecipitation rate units were converted from kg m-2 s-1 ' + 'to mm/day') + + print('The variable is {0} ({1})'.format(varname, varunits)) + print('Original var shape: (time x lat x lon)={0}'.format(var.shape)) + print('var shape after selecting season {0} and area {1}: ' + '(time x lat x lon)={2}'.format(season, area, var_area.shape)) + + if extreme == 'mean': + # Compute the time mean over the entire period, for each ens member + varextreme_ens = [np.nanmean(var_ens[i], axis=0) + for i in range(numens)] + + elif len(extreme.split("_")) == 2: + # Compute the chosen percentile over the period, for each ens member + quant = int(extreme.partition("th")[0]) + varextreme_ens = [np.nanpercentile(var_ens[i], quant, axis=0) + for i in range(numens)] + + elif extreme == 'maximum': + # Compute the maximum value over the period, for each ensemble member + varextreme_ens = [np.nanmax(var_ens[i], axis=0) for i in range(numens)] + + elif extreme == 'std': + # Compute the standard deviation over the period, for each ens member + varextreme_ens = [np.nanstd(var_ens[i], axis=0) for i in range(numens)] + + elif extreme == 'trend': + # Compute the linear trend over the period, for each ensemble member + trendmap = np.empty((var_ens[0].shape[1], var_ens[0].shape[2])) + trendmap_ens = [] + for i in range(numens): + for jla in range(var_ens[0].shape[1]): + for jlo in range(var_ens[0].shape[2]): + slope, _, _, _, _ = \ + stats.linregress(range(var_ens[0].shape[0]), + var_ens[i][:, jla, jlo]) + trendmap[jla, jlo] = slope + trendmap_ens.append(trendmap.copy()) + varextreme_ens = trendmap_ens + + varextreme_ens_np = np.array(varextreme_ens) + print('Anomalies are computed with respect to the {0}'.format(extreme)) + + # Compute and save the anomalies with respect to the ensemble + ens_anomalies = varextreme_ens_np - np.nanmean(varextreme_ens_np, axis=0) + varsave = 'ens_anomalies' + ofile = os.path.join(dir_output, 'ens_anomalies_{0}.nc' + .format(name_outputs)) + # print(ofile) + print('ens_anomalies shape: (numens x lat x lon)={0}' + .format(ens_anomalies.shape)) + save_n_2d_fields(lat_area, lon_area, ens_anomalies, varsave, + varunits, ofile) + outfiles.append(ofile) + # Compute and save the climatology + vartimemean_ens = [np.mean(var_ens[i], axis=0) for i in range(numens)] + ens_climatologies = np.array(vartimemean_ens) + varsave = 'ens_climatologies' + ofile = os.path.join(dir_output, 'ens_climatologies_{0}.nc' + .format(name_outputs)) + save_n_2d_fields(lat_area, lon_area, ens_climatologies, varsave, + varunits, ofile) + outfiles.append(ofile) + ens_extreme = varextreme_ens_np + varsave = 'ens_extreme' + ofile = os.path.join(dir_output, 'ens_extreme_{0}.nc'.format(name_outputs)) + save_n_2d_fields(lat_area, lon_area, ens_extreme, varsave, + varunits, ofile) + outfiles.append(ofile) + + return outfiles diff --git a/esmvaltool/diag_scripts/ensclus/ens_eof_kmeans.py b/esmvaltool/diag_scripts/ensclus/ens_eof_kmeans.py new file mode 100644 index 0000000000..2be17195b0 --- /dev/null +++ b/esmvaltool/diag_scripts/ensclus/ens_eof_kmeans.py @@ -0,0 +1,181 @@ +"""Find the most representative ensemble member for each cluster.""" + +import collections +import datetime +import math +import os + +import numpy as np +import pandas as pd +from sklearn.cluster import KMeans + +# User-defined libraries +from eof_tool import eof_computation +from read_netcdf import read_n_2d_fields + + +def ens_eof_kmeans(dir_output, name_outputs, numens, numpcs, perc, numclus): + """Find the most representative ensemble member for each cluster. + + METHODS: + - Empirical Orthogonal Function (EOF) analysis of the input file + - K-means cluster analysis applied to the retained + Principal Components (PCs) + OUTPUT: + Frequency + """ + print('The name of the output files will be _{0}.txt' + .format(name_outputs)) + print('Number of ensemble members: {0}'.format(numens)) + + outfiles = [] + model = name_outputs.split("_")[1] + print('Model: {0}'.format(model)) + # Either perc (cluster analysis is applied on a number of PCs + # such as they explain 'perc' of total variance) or numpcs + # (number of PCs to retain) is set. + # numpcs has priority over perc, ignored if it is set to 0 + if numpcs: + numpcs = int(numpcs) + print('Number of principal components: {0}'.format(numpcs)) + else: + print('Percentage of variance explained: {0}'.format(perc)) + + print('Number of clusters: {0}'.format(numclus)) + + # Reading the netCDF file of N 2Dfields of anomalies, saved by ens_anom.py + ifile = os.path.join(dir_output, 'ens_anomalies_{0}.nc' + .format(name_outputs)) + var, _, lat, _ = read_n_2d_fields(ifile) + print('var dim: (numens x lat x lon)={0}'.format(var.shape)) + + # Compute EOFs (Empirical Orthogonal Functions) + # and PCs (Principal Components) with respect to ensemble memeber + print('_________________________________________________________') + print('EOF analysis:') + # -------------------------------------------------------------------- + _, _, _, pcs_unscal0, eofs_unscal0, varfrac = eof_computation(var, lat) + + acc = np.cumsum(varfrac * 100) + if numpcs: + exctperc = acc[numpcs - 1] + else: + # Find how many PCs explain a certain percentage of variance + # (find the mode relative to the percentage closest to perc, + # but bigger than perc) + numpcs = min(enumerate(acc), key=lambda x: x[1] <= perc)[0] + 1 + print('\nThe number of PCs that explain the percentage closest ' + 'to {0}% of variance (but grater than {0}%) is {1}' + .format(perc, numpcs)) + exctperc = min(enumerate(acc), key=lambda x: x[1] <= perc)[1] + print('(the first {0} PCs explain exactly the {1}% of variance)' + .format(numpcs, "%.2f" % exctperc)) + + # ____________Compute k-means analysis using a subset of PCs + print('_________________________________________________________') + print('k-means analysis using a subset of PCs') + + pcs = pcs_unscal0[:, :numpcs] + + clus = KMeans(n_clusters=numclus, n_init=2000, + init='k-means++', tol=1e-4, + max_iter=1000, random_state=42) + start = datetime.datetime.now() + clus.fit(pcs) + end = datetime.datetime.now() + print('k-means algorithm took me %s seconds' % (end - start)) + + centroids = clus.cluster_centers_ # shape---> (numclus,numpcs) + labels = clus.labels_ # shape---> (numens,) + + print('\nClusters are identified for {0} PCs (explained variance {1}%)' + .format(numpcs, "%.2f" % exctperc)) + print('PCs dim: (number of ensemble members, number of PCs)={0}, ' + 'EOF dim: (number of ensemble members, lat, lon)={1}' + .format(pcs_unscal0[:, :numpcs].shape, eofs_unscal0[:numpcs].shape)) + print('Centroid coordinates dim: (number of clusters, number of PCs)={0}, ' + 'labels dim: (number of ensemble members,)={1}\n' + .format(centroids.shape, labels.shape)) + + # ____________Save labels + namef = os.path.join(dir_output, 'labels_{0}.txt'.format(name_outputs)) + np.savetxt(namef, labels, fmt='%d') + outfiles.append(namef) + + # ____________Compute cluster frequencies + clusters = [] + for nclus in range(numclus): + clu = list(np.where(labels == nclus)[0]) + frq = len(clu) * 100 / len(labels) + clusters.append([nclus, frq, clu]) + print('Cluster labels:') + print([clusters[ncl][0] for ncl in range(numclus)]) + print('Cluster frequencies (%):') + print([round(clusters[ncl][1], 3) for ncl in range(numclus)]) + print('Cluster members:') + print([clusters[ncl][2] for ncl in range(numclus)]) + + # ____________Find the most representative ensemble member for each cluster + print('_________________________________________________________') + print('In order to find the most representative ensemble member for each ' + 'cluster\n(which is the closest member to the cluster centroid)') + print('the Euclidean distance between cluster centroids and each ensemble ' + 'member is computed in the PC space') + print('_________________________________________________________') + print('Check: cluster #1 centroid coordinates vector dim {0} should be ' + 'the same as the member #1 PC vector dim {1}\n' + .format(centroids[1, :].shape, pcs[1, :].shape)) + + print('_________________________________________________________') + print('In order to study the spread of each cluster,') + print('the standard deviation of the distances between each member ' + 'in a cluster and the cluster centroid is computed in the PC space') + stat_output = [] + repres = [] + for nclus in range(numclus): + members = clusters[nclus][2] + norm = np.empty([numclus, len(members)]) + for mem, ens in enumerate(members): + normens = centroids[nclus, :] - pcs[ens, :] + norm[nclus, mem] = math.sqrt(sum(normens**2)) + print('the distances between centroid of cluster {0} and its ' + 'belonging members {1} are:\n{2}' + .format(nclus, members, np.round(norm[nclus], 3))) + print('MINIMUM DISTANCE WITHIN CLUSTER {0} IS {1} --> member #{2}' + .format(nclus, round(norm[nclus].min(), 3), + members[np.where(norm[nclus] == + norm[nclus].min())[0][0]])) + repres.append(members[np.where(norm[nclus] == + norm[nclus].min())[0][0]]) + print('MAXIMUM DISTANCE WITHIN CLUSTER {0} IS {1} --> member #{2}' + .format(nclus, round(norm[nclus].max(), 3), + members[np.where(norm[nclus] == + norm[nclus].max())[0][0]])) + print('INTRA-CLUSTER STANDARD DEVIATION FOR CLUSTER {0} IS {1}\n' + .format(nclus, norm[nclus].std())) + + d_stat = collections.OrderedDict() + d_stat['cluster'] = nclus + d_stat['member'] = members + d_stat['d_to_centroid'] = np.round(norm[nclus], 3) + d_stat['intra-clus_std'] = norm[nclus].std() + d_stat['d_min'] = round(norm[nclus].min(), 3) + d_stat['d_max'] = round(norm[nclus].max(), 3) + d_stat['freq(%)'] = round(clusters[nclus][1], 3) + stat = pd.DataFrame(d_stat) + stat_output.append(stat) + + # ____________Save the most representative ensemble members + namef = os.path.join(dir_output, 'repr_ens_{0}.txt'.format(name_outputs)) + np.savetxt(namef, repres, fmt='%i') + outfiles.append(namef) + + stat_output = pd.concat(stat_output, axis=0) + # ____________Save statistics of cluster analysis + namef = os.path.join(dir_output, 'statistics_clustering_{0}.txt' + .format(name_outputs)) + outfiles.append(namef) + with open(namef, 'w') as text_file: + text_file.write(stat_output.__repr__()) + + return outfiles diff --git a/esmvaltool/diag_scripts/ensclus/ens_plots.py b/esmvaltool/diag_scripts/ensclus/ens_plots.py new file mode 100644 index 0000000000..7540875350 --- /dev/null +++ b/esmvaltool/diag_scripts/ensclus/ens_plots.py @@ -0,0 +1,153 @@ +"""Plot the chosen field for each ensemble.""" + +import math +import os + +import matplotlib.pyplot as plt +import numpy as np +import cartopy.crs as ccrs + +# User-defined libraries +from read_netcdf import read_n_2d_fields + + +def ens_plots(dir_output, dir_plot, name_outputs, numclus, + field_to_plot, plot_type, season, area, extreme, numensmax): + """Plot the chosen field for each ensemble.""" + print('Number of clusters: {0}'.format(numclus)) + + varname = name_outputs.split("_")[0] + kind = name_outputs.split("_")[-2] + exp = name_outputs.split("_")[-1] + + # Reading the netCDF file of N 2Dfields of anomalies, saved by ens_anom.py + namef = os.path.join(dir_output, 'ens_anomalies_{0}.nc' + .format(name_outputs)) + vartoplot, varunits, lat, lon = read_n_2d_fields(namef) + print('vartoplot dim: (numens x lat x lon)={0}'.format(vartoplot.shape)) + numens = vartoplot.shape[0] + + # ____________Load labels + namef = os.path.join(dir_output, 'labels_{0}.txt'.format(name_outputs)) + labels = np.loadtxt(namef, dtype=int) + + namef = os.path.join(dir_output, 'repr_ens_{0}.txt'.format(name_outputs)) + reprens = np.loadtxt(namef, dtype=int) + + namef = os.path.join(dir_output, 'legend_{0}.txt'.format(name_outputs)) + legends = np.loadtxt(namef, dtype=str) + years = np.loadtxt(namef, dtype=str)[:, 6] + legends = np.loadtxt(namef, dtype=str)[:, 1] + + vmi = round_down(np.nanpercentile(vartoplot, 0.1)) + vma = round_up(np.nanpercentile(vartoplot, 99.9)) + + if field_to_plot == 'anomalies': + # compute range colorbar for anomalies + if abs(vmi) < abs(vma): + rangecbarmin = -abs(vma) + rangecbarmax = abs(vma) + else: + rangecbarmin = -abs(vmi) + rangecbarmax = abs(vmi) + else: + # compute range colorbar for climatologies + rangecbarmin = vmi + rangecbarmax = vma + + delta = round_down((rangecbarmax - rangecbarmin) / 100) + clevels = np.arange(rangecbarmin, rangecbarmax + delta, delta) + colors = ['b', 'g', 'r', 'c', 'm', 'y', 'DarkOrange', 'grey'] + proj = ccrs.PlateCarree() + if min(lon) < 180. < max(lon): + clon = 180. + else: + clon = 0. + + numens0 = min(numens, numensmax) + nfigs = int(np.ceil(numens / numens0)) + + ypos = int(np.ceil(np.sqrt(numens0 / 2.0))) + xpos = int(np.ceil(numens0 / ypos)) + fsize = int(min(max(4. / ypos * 15, 10), 32)) + + namef_list = [] + for ifig in range(nfigs): + fig = plt.figure(figsize=(24, 14)) + for iens in range(ifig * numens0, min((ifig + 1) * numens0, numens)): + axes = plt.subplot( + ypos, xpos, iens + 1 - ifig * numens0, + projection=ccrs.PlateCarree(central_longitude=clon) + ) + axes.set_extent([min(lon), max(lon), min(lat), max(lat)], + crs=ccrs.PlateCarree()) + axes.coastlines("110m") + + # Plot Data + if field_to_plot == 'anomalies': + map_plot = plt.contourf(lon, lat, vartoplot[iens], clevels, + cmap=plt.cm.RdBu_r, + transform=proj, extend='both') + else: + map_plot = plt.contourf(lon, lat, vartoplot[iens], clevels, + transform=proj, extend='both') + + if iens in reprens: + rect = plt.Rectangle((-0.01, -0.01), 1.02, 1.02, fill=False, + transform=axes.transAxes, clip_on=False, + zorder=10) + rect.set_edgecolor(colors[labels[iens]]) + rect.set_linewidth(6.0) + axes.add_artist(rect) + + # Add Title + title_obj = plt.title(iens, fontsize=int(fsize * 1.8), + fontweight='bold', loc='left') + title_obj.set_backgroundcolor(colors[labels[iens]]) + title_obj = plt.title(legends[iens], fontsize=fsize, loc='right') + + cax = plt.axes([0.1, 0.03, 0.8, 0.03]) # horizontal + cbar = plt.colorbar(map_plot, cax=cax, orientation='horizontal') + cbar.ax.tick_params(labelsize=24) + cbar.set_ticks(np.arange(rangecbarmin, + rangecbarmax + delta, delta * 20)) + cbar.ax.set_ylabel(varname + '\n[' + varunits + ']', fontsize=24, + fontweight='bold', rotation='horizontal', + verticalalignment='center') + cbar.ax.yaxis.set_label_position('right') + cbar.ax.yaxis.set_label_coords(1.05, 1.4) + + plt.suptitle(field_to_plot.capitalize() + ' ' + varname + ' ' + + extreme + ' ' + area + ' ' + season + ' ' + + kind + ' ' + exp + ' ' + years[0], + fontsize=40, fontweight='bold') + + top = 0.89 # the top of the subplots of the figure + bottom = 0.12 # the bottom of the subplots of the figure + left = 0.02 # the left side of the subplots of the figure + right = 0.98 # the right side of the subplots of the figure + hspace = 0.36 # height reserved for white space between subplots + wspace = 0.14 # width reserved for blank space between subplots + plt.subplots_adjust(left=left, bottom=bottom, right=right, top=top, + wspace=wspace, hspace=hspace) + + # plot the selected fields + namef = os.path.join(dir_plot, ('{0}_{1}.fig{2}.' + plot_type) + .format(field_to_plot, name_outputs, ifig + 1)) + fig.savefig(namef) # bbox_inches='tight') + print('A ', plot_type, ' figure for the selected fields saved in {0}' + .format(dir_plot)) + namef_list.append(namef) + return namef_list + + +def round_up(x, sig=2): + """Round up to a given number of significant digits.""" + dig = pow(10., sig - int(math.floor(math.log10(abs(x)))) - 1) + return math.ceil(x * dig) / dig + + +def round_down(x, sig=2): + """Round down to a given number of significant digits.""" + dig = pow(10., sig - int(math.floor(math.log10(abs(x)))) - 1) + return math.floor(x * dig) / dig diff --git a/esmvaltool/diag_scripts/ensclus/ensclus.py b/esmvaltool/diag_scripts/ensclus/ensclus.py new file mode 100644 index 0000000000..4ed69e33f4 --- /dev/null +++ b/esmvaltool/diag_scripts/ensclus/ensclus.py @@ -0,0 +1,120 @@ +""" +Ensemble Clustering Diagnostics. + +Author: Irene Mavilia (ISAC-CNR, Italy) +Copernicus C3S 34a lot 2 (MAGIC) + +Description + Cluster analysis tool based on the k-means algorithm + for ensembles of climate model simulations +Modification history + 20181202-hard_jo: cleanup, style, provenance and finalising + 20181002-arno_en: updating to version2_develpment (recipe/dataset) + 20170710-mavi_ir: Routines written. +""" + +import os +import logging +import numpy as np +from esmvaltool.diag_scripts.shared import group_metadata, run_diagnostic +from esmvaltool.diag_scripts.shared import ProvenanceLogger, sorted_metadata + +# Import user diagnostic routines +from ens_anom import ens_anom +from ens_eof_kmeans import ens_eof_kmeans +from ens_plots import ens_plots + +logger = logging.getLogger(os.path.basename(__file__)) + + +def get_provenance_record(gatt, vatt, ancestor_files): + """Create a provenance record describing the diagnostic data and plot.""" + caption = ("Ensemble Clustering Diagnostics of extreme {extreme} of " + .format(**gatt) + "variable {long_name} between " + "{start_year} and {end_year} ".format(**vatt)) + print(gatt) + record = { + 'caption': caption, + 'authors': ['hard_jo', 'arno_en', 'mavi_ir'], + 'projects': ['c3s-magic'], + 'references': ['straus07jcli'], + 'plot_types': ['other'], + 'realms': ['atmos'], + 'domains': ['reg'], + 'ancestors': ancestor_files, + } + return record + + +def main(cfg): + """Ensemble Clustering Diagnostics.""" + out_dir = cfg['work_dir'] + write_plots = cfg['write_plots'] + input_data = cfg['input_data'].values() + input_data = sorted_metadata(input_data, sort='recipe_dataset_index') + files_dict = group_metadata(input_data, 'filename', + sort=False) + numens = len(files_dict) + logger.info('numens=%d', numens) + + # Building the name of output files + element = list(files_dict.values())[0][0] + name_outputs = (element['short_name'] + '_' + str(numens) + + 'ens_' + cfg['season'] + '_' + cfg['area'] + + '_' + element['project'] + '_' + element['exp']) + logger.info('The name of the output files will be _%s.txt', + name_outputs) + variable_name = element['short_name'] + max_plot_panels = cfg.get('max_plot_panels', 72) + numpcs = cfg.get('numpcs', 0) + perc = cfg.get('numpcs', 80) + + filenames_cat = [] + legend_cat = [] + for value in files_dict.values(): + logger.info("Processing file %s", value[0]['filename']) + filenames_cat.append(value[0]['filename']) + leg = (value[0]['project'] + " " + + value[0]['dataset'] + " " + + value[0]['exp'] + " " + + value[0]['mip'] + " " + + value[0]['short_name'] + " " + + value[0]['ensemble'] + " " + + str(value[0]['start_year']) + "-" + + str(value[0]['end_year'])) + legend_cat.append(leg) + logger.info('Processing: %s', leg) + namef = os.path.join(out_dir, 'legend_{0}.txt'.format(name_outputs)) + np.savetxt(namef, legend_cat, fmt='%s') + + # ###################### PRECOMPUTATION ####################### + outfiles = ens_anom(filenames_cat, out_dir, name_outputs, variable_name, + numens, cfg['season'], cfg['area'], cfg['extreme']) + + # ###################### EOF AND K-MEANS ANALYSES ####################### + outfiles2 = ens_eof_kmeans(out_dir, name_outputs, numens, numpcs, + perc, cfg['numclus']) + + outfiles = outfiles + outfiles2 + provenance_record = get_provenance_record( + cfg, list(files_dict.values())[0][0], ancestor_files=filenames_cat) + + # ###################### PLOT AND SAVE FIGURES ########################## + if write_plots: + plotfiles = ens_plots(out_dir, cfg['plot_dir'], name_outputs, + cfg['numclus'], 'anomalies', + cfg['output_file_type'], cfg['season'], + cfg['area'], cfg['extreme'], max_plot_panels) + else: + plotfiles = [] + + for file in outfiles + plotfiles: + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(file, provenance_record) + + logger.info('\n>>>>>>>>>>>> ENDED SUCCESSFULLY!! <<<<<<<<<<<<\n') + + +if __name__ == '__main__': + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/ensclus/eof_tool.py b/esmvaltool/diag_scripts/ensclus/eof_tool.py new file mode 100644 index 0000000000..068c18dadd --- /dev/null +++ b/esmvaltool/diag_scripts/ensclus/eof_tool.py @@ -0,0 +1,100 @@ +"""Computing EOFs and PCs.""" + +import datetime + +import matplotlib.pyplot as plt +import numpy as np + +import cartopy.crs as ccrs +from eofs.standard import Eof + + +def eof_computation(var, lat): + """Computing the EOFs and PCs. + + EOF analysis of a data array with spatial dimensions that + represent latitude and longitude with weighting. In this example + the data array is dimensioned (ntime, nlat, nlon), and in order + for the latitude weights to be broadcastable to this shape, an + extra length-1 dimension is added to the end + """ + print('_________________________________________________________') + print('Computing the EOFs and PCs') + weights_array = np.sqrt(np.cos(np.deg2rad(lat)))[:, np.newaxis] + + start = datetime.datetime.now() + solver = Eof(var, weights=weights_array) + end = datetime.datetime.now() + print('EOF computation took me %s seconds' % (end - start)) + + # ALL VARIANCE FRACTIONS + varfrac = solver.varianceFraction() + # acc = np.cumsum(varfrac * 100) + + # ---------------------------------------PCs unscaled (case 0 of scaling) + pcs_unscal0 = solver.pcs() + # ---------------------------------------EOFs unscaled (case 0 of scaling) + eofs_unscal0 = solver.eofs() + + # ---------------------------------------PCs scaled (case 1 of scaling) + pcs_scal1 = solver.pcs(pcscaling=1) + + # ---------------------------------------EOFs scaled (case 2 of scaling) + eofs_scal2 = solver.eofs(eofscaling=2) + + return solver, pcs_scal1, eofs_scal2, pcs_unscal0, eofs_unscal0, varfrac + + +def eof_plots(neof, pcs_scal1, eofs_scal2, var, varunits, lat, lon, + tit, numens, varfrac): + """Plot of the nth the EOFs and PCs. + + Plot the PC scaled (divided by the square-root of their eigenvalues) + in the selected domain + """ + print('_________________________________________________________') + print('Plotting the EOFs and PCs') + print('Variable: {1} Units: {2}'.format(var, varunits)) + print('Ensemble members: {1}'.format(numens)) + + # ------------------------------------------PCs scaled (case 1 of scaling) + figpc_scal1 = plt.figure(figsize=(24, 14)) + axes = figpc_scal1.gca() + plt.plot(pcs_scal1[:, neof]) + plt.axhline(y=0, color='k', linestyle='--') + tt_pc = '{0} PC{1}: explained variance {2}%\n'\ + .format(tit, neof + 1, "%.2f" % (varfrac[neof] * 100)) + plt.title(tt_pc, fontsize=34, fontweight='bold') + plt.grid(True) + for tickx in axes.xaxis.get_major_ticks(): + tickx.label.set_fontsize(28) + for ticky in axes.yaxis.get_major_ticks(): + ticky.label.set_fontsize(28) + plt.ylabel('PC{0} {1}'.format(neof, varunits), fontsize=28) + plt.xlabel('ensemble members', fontsize=28) + + # Plot the EOF scaled (multiplied by the square-root of their eigenvalues) + # in the selected domain + + # ------------------------------------------EOFs scaled (case 2 of scaling) + + # rangecolorbar=np.arange(-180, 200, 20) + figeof_scal2 = plt.figure(figsize=(14, 14)) + # ax = figeof_scal2.gca() + proj = ccrs.PlateCarree() + axes = plt.axes(projection=proj) + axes.set_global() + axes.coastlines() + axes.gridlines() + fill2 = axes.contourf(lon, lat, eofs_scal2[neof, ...], cmap=plt.cm.RdBu_r, + transform=ccrs.PlateCarree()) + cbar = plt.colorbar(fill2, orientation='horizontal') + # cb.ax.set_position([0.9, 0.1, 0.001, 0.7])#([0.9, 0.1, 0.02, 0.8]) + cbar.set_label(varunits, rotation=0, fontsize=20) + cbar.ax.tick_params(labelsize=20) + tt_eof = '{0}\nEOF{1}: explained variance {2}%\n'\ + .format(tit, neof + 1, "%.2f" % (varfrac[neof] * 100)) + plt.title(tt_eof, fontsize=34, fontweight='bold') + plt.tight_layout() + + return figpc_scal1, figeof_scal2 diff --git a/esmvaltool/diag_scripts/ensclus/read_netcdf.py b/esmvaltool/diag_scripts/ensclus/read_netcdf.py new file mode 100644 index 0000000000..4f8a545549 --- /dev/null +++ b/esmvaltool/diag_scripts/ensclus/read_netcdf.py @@ -0,0 +1,140 @@ +"""Read netCDF file of 3D field.""" + +# Standard packages +import os + +import numpy as np +from netCDF4 import Dataset, num2date + +import iris + + +def read_iris(ifile): + """Read netCDF file of 3D field using iris. + + USAGE: var, lat, lon, dates = read_3d_ncfield_iris(filename) + """ + cube = iris.load_cube(ifile) + variabs = [coord.name() for coord in cube.coords()] + + if 'lat' in variabs: + lat = cube.coord('lat').points + elif 'latitude' in variabs: + lat = cube.coord('latitude').points + if 'lon' in variabs: + lon = cube.coord('lon').points + elif 'longitude' in variabs: + lon = cube.coord('longitude').points + time = cube.coord('time') + time_units = str(cube.coord('time').units) + dates = time.units.num2date(time.points) + var_units = str(cube.units) + var = cube.data + if isinstance(var, np.ma.masked_array): + var = var.filled(fill_value=np.nan) + + return var, var_units, lat, lon, dates, time_units + + +def read_3d_ncfield(ifile): + """Read netCDF file of 3Dfield. + + USAGE: var, lat, lon, dates = read_3d_ncfield(filename) + """ + fileh = Dataset(ifile, mode='r') + variabs = [] + for variab in fileh.variables: + variabs.append(variab) + print('The variables in the nc file are: ', variabs) + + if 'lat' in variabs: + lat = fileh.variables['lat'][:] + elif 'latitude' in variabs: + lat = fileh.variables['latitude'][:] + if 'lon' in variabs: + lon = fileh.variables['lon'][:] + elif 'longitude' in variabs: + lon = fileh.variables['longitude'][:] + time = fileh.variables['time'][:] + time_units = fileh.variables['time'].units + var_units = fileh.variables[variabs[0]].units + var = fileh.variables[variabs[0]][:, :, :] + dates = num2date(time, time_units) + fileh.close() + + return var, var_units, lat, lon, dates, time_units + + +def save_n_2d_fields(lats, lons, variab, varname, varunits, ofile): + """Save var in ofile netCDF file. + + Save a number N of 2D fields [lat x lon] + """ + try: + os.remove(ofile) # Remove the outputfile + except OSError: + pass + dataset = Dataset(ofile, 'w', format='NETCDF4_CLASSIC') + # print(dataset.file_format) + + num = dataset.createDimension('num', variab.shape[0]) + lat = dataset.createDimension('lat', variab.shape[1]) + lon = dataset.createDimension('lon', variab.shape[2]) + + # Create coordinate variables for 3-dimensions + num = dataset.createVariable('num', np.int32, ('num',)) + lat = dataset.createVariable('lat', np.float32, ('lat',)) + lon = dataset.createVariable('lon', np.float32, ('lon',)) + # Create the actual 3-d variable + var = dataset.createVariable(varname, np.float64, ('num', 'lat', 'lon')) + + # print('variable:', dataset.variables[varname]) + # for varn in dataset.variables.keys(): + # print(varn) + # Variable Attributes + lat.units = 'degree_north' + lon.units = 'degree_east' + var.units = varunits + + num[:] = np.arange(variab.shape[0]) + lat[:] = lats + lon[:] = lons + var[:, :, :] = variab + + dataset.close() + + # ----------------------------------------------------------------------- + print('The {0} 2D fields [num x lat x lon] are saved as \n{1}' + .format(variab.shape[0], ofile)) + print('__________________________________________________________') + # ----------------------------------------------------------------------- + + +def read_n_2d_fields(ifile): + """Read a number N of 2D fields [latxlon]. + + USAGE: var, lat, lon, dates = read_n_2d_fields(filename) + """ + fileh = Dataset(ifile, mode='r') + variabs = [] + for variab in fileh.variables: + variabs.append(variab) + # print('The variables in the nc file are: ', variabs) + + # num = fh.variables['num'][:] + if 'lat' in variabs: + lat = fileh.variables['lat'][:] + elif 'latitude' in variabs: + lat = fileh.variables['latitude'][:] + if 'lon' in variabs: + lon = fileh.variables['lon'][:] + elif 'longitude' in variabs: + lon = fileh.variables['longitude'][:] + var = fileh.variables[variabs[3]][:, :, :] + var_units = fileh.variables[variabs[3]].units + # print(fh.variables) + fileh.close() + + # print('\n'+txt) + + return var, var_units, lat, lon diff --git a/esmvaltool/diag_scripts/ensclus/sel_season_area.py b/esmvaltool/diag_scripts/ensclus/sel_season_area.py new file mode 100644 index 0000000000..70cddacece --- /dev/null +++ b/esmvaltool/diag_scripts/ensclus/sel_season_area.py @@ -0,0 +1,128 @@ +"""Selecting a season (DJF,DJFM,NDJFM,JJA).""" + +import numpy as np + + +def sel_season(var, dates, season): + """Selecting a season (DJF,DJFM,NDJFM,JJA). + + USAGE: var_season, dates_season = sel_season(var, dates, season) + """ + # ----------------------------------------------------------------------- + # print('Selecting only {0} data'.format(season)) + dmonth = np.array([date.month for date in dates]) + if season == 'DJF': # ONLY DEC-JAN-FEB + imon = [1, 2] + emon = [12] + mask = (dmonth == 12) | (dmonth == 1) | (dmonth == 2) + elif season == 'DJFM': # ONLY DEC-JAN-FEB-MAR + imon = [1, 2, 3] + emon = [12] + mask = (dmonth == 12) | (dmonth == 1) | (dmonth == 2) | (dmonth == 3) + elif season == 'NDJFM': # ONLY NOV-DEC-JAN-FEB-MAR + imon = [1, 2, 3] + emon = [11, 12] + mask = (dmonth == 11) | (dmonth == 12) | (dmonth == 1) |\ + (dmonth == 2) | (dmonth == 3) + elif season == 'JJA': # ONLY JUN-JUL-AUG + imon = [] + emon = [] + mask = (dmonth == 6) | (dmonth == 7) | (dmonth == 8) + else: + print('season is not one of the following: DJF, DJFM, NDJFM, JJA') + var_season = var[mask, :, :] + dates_season = dates[mask] + + dmonth = np.array([date.month for date in dates_season]) + dyear = np.array([date.year for date in dates_season]) + + imask = list(mon not in imon for mon in dmonth) | (dyear != dyear[0]) + emask = list(mon not in emon for mon in dmonth) | (dyear != dyear[-1]) + + var_season = var_season[imask & emask, :, :] + dates_season = dates_season[imask & emask] + + return var_season, dates_season + + +# ____________Selecting only [lat_s-lat_n, lon_w-lon_e] box region +def sel_area(lat, lon, var, area): + """Selecting the area of interest. + + USAGE: var_area, lat_area, lon_area =sel_area(lat,lon,var,area) + area can be 'EAT', 'PNA', 'NH', 'EU' + """ + if area == 'EAT': + # printarea = 'Euro-Atlantic' + lat_n = 87.5 + lat_s = 30.0 + lon_w = -80.0 # 280 + lon_e = 40.0 # 40 + # lat and lon are extracted from the netcdf file, assumed to be 1D + # If 0= 0: + lon_new = lon - 180 + var_roll = np.roll(var, int(len(lon) / 2), axis=2) + else: + var_roll = var + lon_new = lon + + elif area == 'PNA': + # printarea = 'Pacific North American' + lat_n = 87.5 + lat_s = 30.0 + lon_w = 140.0 + lon_e = 280.0 + # lat and lon are extracted from the netcdf file, assumed to be 1D + # If -180= 0: + lon_new = lon - 180 + var_roll = np.roll(var, int(len(lon) / 2), axis=2) + else: + var_roll = var + lon_new = lon + lon_w = lon_new.min() + lon_e = lon_new.max() + + elif area == 'EU': + # printarea = 'Europe' + lat_n = 72.0 + lat_s = 27.0 + lon_w = -22.0 + lon_e = 45.0 + # lat and lon are extracted from the netcdf file, assumed to be 1D + # If 0= 0: + lon_new = lon - 180 + var_roll = np.roll(var, int(len(lon) / 2), axis=2) + else: + var_roll = var + lon_new = lon + + # ------------------------------------------------------------------- + # print('__________________________________________________________') + # print('Selecting the area of interest: {0}'.format(printarea)) + # -------------------------------------------------------------------- + # -------------------------Selecting only an area + + latidx = (lat >= lat_s) & (lat <= lat_n) + lonidx = (lon_new >= lon_w) & (lon_new <= lon_e) + + var_area = var_roll[:, latidx][..., lonidx] + # print('Grid dimension of the selected area ---> + # {0}'.format(var_area[0].shape)) + + return var_area, lat[latidx], lon_new[lonidx] diff --git a/esmvaltool/diag_scripts/examples/correlate.py b/esmvaltool/diag_scripts/examples/correlate.py new file mode 100644 index 0000000000..fc54375803 --- /dev/null +++ b/esmvaltool/diag_scripts/examples/correlate.py @@ -0,0 +1,85 @@ +"""Python example diagnostic.""" +import logging +import os + +import iris +from iris.analysis import MEAN +from iris.analysis.stats import pearsonr + +from diagnostic import plot_diagnostic +from esmvaltool.diag_scripts.shared import group_metadata, run_diagnostic + +logger = logging.getLogger(os.path.basename(__file__)) + + +def get_provenance_record(attributes, ancestor_files, plot_type): + """Create a provenance record describing the diagnostic data and plot.""" + caption = ("Correlation of {long_name} between {dataset} and " + "{reference_dataset}.".format(**attributes)) + + record = { + 'caption': caption, + 'statistics': ['corr'], + 'domains': ['global'], + 'plot_type': plot_type, + 'authors': [ + 'ande_bo', + ], + 'references': [ + 'acknow_project', + ], + 'ancestors': ancestor_files, + } + return record + + +def main(cfg): + """Compute the time average for each input dataset.""" + input_data = group_metadata( + cfg['input_data'].values(), 'standard_name', sort='dataset') + + for standard_name in input_data: + logger.info("Processing variable %s", standard_name) + # Load reference dataset + for attributes in input_data[standard_name]: + if attributes['reference_dataset'] == attributes['dataset']: + reference_name = attributes['dataset'] + logger.info("Using %s as a reference dataset", reference_name) + reference_filename = attributes['filename'] + reference = iris.load_cube(reference_filename) + reference = reference.collapsed('time', MEAN) + logger.info("Reference cube:\n%s\n%s", reference_filename, + reference) + break + else: + raise ValueError("No reference_dataset defined in recipe.") + + # Compute and plot correlation + for attributes in input_data[standard_name]: + if attributes['dataset'] == reference_name: + continue + logger.info("Processing dataset %s", attributes['dataset']) + + filename = attributes['filename'] + dataset = iris.load_cube(filename) + kwargs = cfg.get('pearsonr', {}) + logger.info( + "Computing correlation with settings %s between " + "reference and cube:\n%s\n%s", kwargs, filename, dataset) + dataset = dataset.collapsed('time', MEAN) + cube = pearsonr(dataset, reference, **kwargs) + + name = '{}_correlation_with_{}'.format( + os.path.splitext(os.path.basename(filename))[0], + reference_name) + provenance_record = get_provenance_record( + attributes, + ancestor_files=[reference_filename, filename], + plot_type=cfg['plot_type']) + plot_diagnostic(cube, name, provenance_record, cfg) + + +if __name__ == '__main__': + + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/examples/diagnostic.ncl b/esmvaltool/diag_scripts/examples/diagnostic.ncl new file mode 100644 index 0000000000..264dd0250d --- /dev/null +++ b/esmvaltool/diag_scripts/examples/diagnostic.ncl @@ -0,0 +1,198 @@ +; ############################################################################# +; EXAMPLE SCRIPT FOR NCL DIAGNOSTICS +; ############################################################################# +; Author: Mattia Righi (DLR, Germany), Klaus-Dirk Gottschaldt (DLR, Germany) +; ############################################################################# +; +; Description +; Simple script to demonstrate the implementation of a NCL diagnostic. +; It shows how the select the metadata for specific variable(s) and +; dataset(s) and how to read the corresponding fields. +; The input is averaged over time to create a global contour plot. +; Output to NetCDF files is optional (and controlled via diag_script_info) +; Time averaging and plotting is done in separate routines. +; All information necessary for plotting is added as attributes to the +; variable containing the data. Attribute names "diag_script", "var" +; and those starting with "res_" are reserved for that purpose. +; The data variable (incl. metadata) can be passed to the plot script +; directly (most efficient), or via a netCDF file (might be useful for +; non-NCL routines). +; A workstation ("wks" = file to plot to) will by default be generated +; in the plotting routine. However, the default will be overridden, if +; a "graphic" object is passed to the plotting routine. The latter +; option is intended to be used in special cases, e.g. with +; res_gsnDraw = False and res_gsnFrame = False. +; +; Required diag_script_info attributes +; projection: map projection to use +; colormap: color table to use +; ncdf: full path and name for *.nc output +; +; Optional diag_script_info attributes +; MyDiag_title: string for annotation +; +; Caveats +; +; Modification history +; 20181107-A_righ_ma: revised and extended to include new metadata handling +; 20151027-A_laue_ax: moved call to 'write_references' to the beginning +; of the code +; 20150325-A_laue_ax: modified reference tags used for acknowledgements +; (projects, observations, etc) +; 20140312-A_righ_ma: adapted to new time_operations structure +; 20130618-A_gott_kl: adjusted to r738 of +; https://svn.dlr.de/ESM-Diagnostic/sources/trunk +; 20121130-A_gott_kl: written +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" ; load metadata + +load "$diag_scripts/shared/statistics.ncl" ; load statistics functions + +load "$diag_scripts/shared/plot/style.ncl" ; load plot style functions +load "$diag_scripts/shared/plot/contour_maps.ncl" ; load plot function + +begin + + ; DIAG_SCRIPT is a global variables defined in interface.ncl + enter_msg(DIAG_SCRIPT, "") + + ; Shortcuts to current variable and field + ; For multiple variables, use different indices of variable_info + var0 = variable_info[0]@short_name + info0 = select_metadata_by_name(input_file_info, var0) + + ; The number of variables is the size of the variable_info list + n_variables = ListCount(variable_info) + +end + + +begin + + ; The metadata for this diagnostic are defined in the input_file_info list + ; in interface.ncl for all variables and datasets: use the convenience + ; function to extract the metadata for a specific variable or dataset + + ; Retrieve metadata items by attribute (short_name and field) + atts = True + atts@short_name = var0 + info = select_metadata_by_atts(input_file_info, atts) + delete(atts) + + ; Simple version using only the variable's short name (works for most cases) + info_simple = select_metadata_by_name(input_file_info, var0) + + ; The number of datasets for var0 is the size of the returned list + n_datasets = ListCount(info) + + ; Read data for the first dataset in the list + ; Loop over all n_datasets info items to read all datasets in a loop + dataset = read_data(info[0]) + printVarSummary(dataset) + + ; Read fx-variable for the second dataset in the list + ; fx-variable are specified in the recipe, the field is returned as a list + ; where also auxiliary coordinates are stored (if available). + ; If no fx-variable is found a missing value is returned + fx_var = read_fx_data(info[1], "sftlf") + printVarSummary(fx_var) + + ; Retrieve metadata item for the reference dataset + atts = True + atts@short_name = var0 + atts@dataset = variable_info[0]@reference_dataset + info_ref = select_metadata_by_atts(input_file_info, atts) + delete(atts) + + ; Read data for the reference dataset + reference = read_data(info_ref[0]) + printVarSummary(reference) + + ; Calculate time-average of the reference dataset + ; A function in esmaltool/diag_scripts/shared/statistics.ncl is used + timeavg = time_operations(reference, -1, -1, "average", "annualclim", True) + + ; Set annotations + annots = project_style(info_ref, diag_script_info, "annots") + + ; Plot resources + ; Add to timeavg, as attributes with prefix "res_". + ; Provide attributes only that differ from the defaults given at + ; http://www.ncl.ucar.edu/Document/Graphics/Resources/list_alpha_res.shtml + timeavg@res_gsnMaximize = True ; Use full page for the plot + timeavg@res_cnFillOn = True ; Color plot desired + timeavg@res_cnLineLabelsOn = False ; Contour lines + timeavg@res_cnLinesOn = False + ; Colors + ; http://www.ncl.ucar.edu/Document/Graphics/color_table_gallery.shtml + timeavg@res_cnFillPalette = diag_script_info@colormap ; Set colormap + ; Annotation + timeavg@res_tiMainString = "NCL example diagnostic" + timeavg@res_gsnLeftString = annots + timeavg@res_gsnRightString = variable_info[0]@long_name + " in " + \ + variable_info[0]@units + ; Specified in ./nml/cfg_MyDiag/cfg_MyDiag.ncl + timeavg@res_mpProjection = diag_script_info@projection + ; Set explicit contour levels + if(isatt(diag_script_info, "explicit_cn_levels")) then + timeavg@res_cnLevelSelectionMode = "ExplicitLevels" + timeavg@res_cnLevels = diag_script_info@explicit_cn_levels + end if + + ; Other Metadata: diag_script, var + ; Add to timeavg, as attributes without prefix + if(isatt(timeavg, "diag_script")) then ; Add to existing entries + temp = timeavg@diag_script + delete(timeavg@diag_script) + timeavg@diag_script = array_append_record(temp, (/diag_script/), 0) + delete(temp) + else ; Add as new attribute + timeavg@diag_script = (/DIAG_SCRIPT/) + end if + timeavg@var = var0 ; Overwrite existing entry + if(isatt(variable_info, "long_name")) then + timeavg@var_long_name = variable_info[0]@long_name + end if + if(isatt(variable_info, "units")) then + timeavg@var_units = variable_info[0]@units + end if + + ; Optional output to netCDF + ; (controlled by diag_script_info) + if(isatt(diag_script_info, "ncdf")) then + timeavg@ncdf = diag_script_info@ncdf + ; Function in ~/interface_scripts/auxiliary.ncl + ncdf_outfile = ncdf_write(timeavg, diag_script_info@ncdf) + end if + + ; Create the plot + ; There are two options to pass data to the plotting routine: directly + ; or via a netCDF file. All information needed to create a graphics file + ; are passed with the data. It is possible to provide "dummy_for_wks" and + ; "dummy_for_var" as arguments in the call. + ; You may invoke a wks here, and/or provide the variable name (see example + ; in next section). The routine "contour_map" is a function in + ; ~/diag_scripts/shared/plot/aux_plotting.ncl + ; + ; Option 1: pass data directly + ; -> may only work with NCL (i.e. ok here) + ; Dummies trigger defaults + map = contour_map("dummy_for_wks", timeavg, "dummy_for_var") + ; Option 2: pass data via netCDF file + ; -> useful for non-NCL routines & metrics + ; map = contour_map("dummy_for_wks", ncdf_outfile,"dummy_for_var") + + ; Call provenance logger + log_provenance(ncdf_outfile, \ + map@outfile, \ + "Mean of variable: " + var0, \ + "mean", \ + "global", \ + "geo", \ + (/"righ_ma", "gott_kl"/), \ + (/"acknow_author"/), \ + metadata_att_as_array(info0, "filename")) + +end diff --git a/esmvaltool/diag_scripts/examples/diagnostic.py b/esmvaltool/diag_scripts/examples/diagnostic.py index a6247d475c..d49939b3df 100644 --- a/esmvaltool/diag_scripts/examples/diagnostic.py +++ b/esmvaltool/diag_scripts/examples/diagnostic.py @@ -7,11 +7,63 @@ from esmvaltool.diag_scripts.shared import (group_metadata, run_diagnostic, select_metadata, sorted_metadata) +from esmvaltool.diag_scripts.shared._base import ( + ProvenanceLogger, get_diagnostic_filename, get_plot_filename) from esmvaltool.diag_scripts.shared.plot import quickplot logger = logging.getLogger(os.path.basename(__file__)) +def get_provenance_record(attributes, ancestor_files): + """Create a provenance record describing the diagnostic data and plot.""" + caption = ("Average {long_name} between {start_year} and {end_year} " + "according to {dataset}.".format(**attributes)) + + record = { + 'caption': caption, + 'statistics': ['mean'], + 'domains': ['global'], + 'plot_type': 'zonal', + 'authors': [ + 'ande_bo', + 'righ_ma', + ], + 'references': [ + 'acknow_project', + ], + 'ancestors': ancestor_files, + } + return record + + +def compute_diagnostic(filename): + """Compute an example diagnostic.""" + logger.debug("Loading %s", filename) + cube = iris.load_cube(filename) + + logger.debug("Running example computation") + return cube.collapsed('time', iris.analysis.MEAN) + + +def plot_diagnostic(cube, basename, provenance_record, cfg): + """Create diagnostic data and plot it.""" + diagnostic_file = get_diagnostic_filename(basename, cfg) + + logger.info("Saving analysis results to %s", diagnostic_file) + iris.save(cube, target=diagnostic_file) + + if cfg['write_plots'] and cfg.get('quickplot'): + plot_file = get_plot_filename(basename, cfg) + logger.info("Plotting analysis results to %s", plot_file) + provenance_record['plot_file'] = plot_file + quickplot(cube, filename=plot_file, **cfg['quickplot']) + + logger.info("Recording provenance of %s:\n%s", diagnostic_file, + pformat(provenance_record)) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(diagnostic_file, provenance_record) + + def main(cfg): """Compute the time average for each input dataset.""" # Get a description of the preprocessed data that we will use as input. @@ -28,38 +80,23 @@ def main(cfg): grouped_input_data = group_metadata( input_data, 'standard_name', sort='dataset') - logger.info("Example of how to group and sort input data by standard_name:" - "\n%s", pformat(grouped_input_data)) + logger.info( + "Example of how to group and sort input data by standard_name:" + "\n%s", pformat(grouped_input_data)) # Example of how to loop over variables/datasets in alphabetical order for standard_name in grouped_input_data: logger.info("Processing variable %s", standard_name) for attributes in grouped_input_data[standard_name]: logger.info("Processing dataset %s", attributes['dataset']) - - filename = attributes['filename'] - logger.debug("Loading %s", filename) - cube = iris.load_cube(filename) - - logger.debug("Running example computation") - cube = cube.collapsed('time', iris.analysis.MEAN) - - name = os.path.splitext(os.path.basename(filename))[0] + '_mean' - if cfg['write_netcdf']: - path = os.path.join( - cfg['work_dir'], - name + '.nc', - ) - logger.debug("Saving analysis results to %s", path) - iris.save(cube, target=path) - - if cfg['write_plots'] and cfg.get('quickplot'): - path = os.path.join( - cfg['plot_dir'], - name + '.' + cfg['output_file_type'], - ) - logger.debug("Plotting analysis results to %s", path) - quickplot(cube, filename=path, **cfg['quickplot']) + input_file = attributes['filename'] + cube = compute_diagnostic(input_file) + + output_basename = os.path.splitext( + os.path.basename(input_file))[0] + '_mean' + provenance_record = get_provenance_record( + attributes, ancestor_files=[input_file]) + plot_diagnostic(cube, output_basename, provenance_record, cfg) if __name__ == '__main__': diff --git a/esmvaltool/diag_scripts/examples/diagnostic.r b/esmvaltool/diag_scripts/examples/diagnostic.r new file mode 100644 index 0000000000..ffcc8dd70e --- /dev/null +++ b/esmvaltool/diag_scripts/examples/diagnostic.r @@ -0,0 +1,63 @@ +# ############################################################################# +# diagnostic.r +# Authors: E. Arnone (ISAC-CNR, Italy) +# ############################################################################# +# Description +# Example of ESMValTool diagnostic written in R +# +# Required +# +# Optional +# +# Caveats +# +# Modification history +# 20180620-A_arnone_e: written for v2.0 +# +# ############################################################################ +library(tools) +library(yaml) + # get path to script and source subroutines (if needed) +diag_scripts_dir <- Sys.getenv("diag_scripts") +# source paste0(diag_scripts_dir,"/subroutine.r") +print(file.path("source ", diag_scripts_dir, "subroutine.r")) + # read settings and metadata files (assuming one variable only) +args <- commandArgs(trailingOnly = TRUE) +settings <- yaml::read_yaml(args[1]) +for (myname in names(settings)) { + temp <- get(myname, settings); assign(myname, temp) +} +metadata <- yaml::read_yaml(settings$input_files) +# get name of climofileis for first variable and list +# associated to first climofile +climofiles <- names(metadata) +climolist <- get(climofiles[1], metadata) + # get diagnostic name from metadata file +diag_base <- climolist$diagnostic +print(paste0(diag_base, ": starting routine")) + # create work and plot directories if they do not exist +print(paste0(diag_base, ": creating work and plot directories")) +dir.create(work_dir, recursive = T, showWarnings = F) +dir.create(plot_dir, recursive = T, showWarnings = F) + # extract metadata +models_name <- unname(sapply(metadata, "[[", "dataset")) +reference_model <- unname(sapply(metadata, "[[", "reference_dataset"))[1] +models_start_year <- unname(sapply(metadata, "[[", "start_year")) +models_end_year <- unname(sapply(metadata, "[[", "end_year")) +models_experiment <- unname(sapply(metadata, "[[", "exp")) +models_ensemble <- unname(sapply(metadata, "[[", "ensemble")) + ## Loop through input models +for (model_idx in c(1:(length(models_name)))) { + # Setup parameters and path + model <- models_name[model_idx] + year1 <- models_start_year[model_idx] + year2 <- models_end_year[model_idx] + infile <- climofiles[model_idx] + model_exp <- models_experiment[model_idx] + model_ens <- models_ensemble[model_idx] + print(paste0(diag_base, ": working on file ", infile)) + print(paste0(diag_base, ": calling diagnostic with following parameters")) + print(paste(model, model_exp, model_ens, year1, year2)) + ## Call actual diagnostic + print(paste0(diag_base, ": I am your R diagnostic")) +} diff --git a/esmvaltool/diag_scripts/examples/diagnostic_pr.ncl b/esmvaltool/diag_scripts/examples/diagnostic_pr.ncl deleted file mode 100644 index 8fddaf98b4..0000000000 --- a/esmvaltool/diag_scripts/examples/diagnostic_pr.ncl +++ /dev/null @@ -1,224 +0,0 @@ -; ############################################################################# -; SANDBOX FOR IMPLEMENTING NEW DIAGNOSTICS -; ############################################################################# -; Author: Klaus-Dirk Gottschaldt (DLR, Germany) -; ESMVal project -; ############################################################################# -; -; Description -; This script is a driver for averaging the input from the first dataset -; over time and creating a global contour plot. -; Output to NetCDF files is optional (controlled via diag_script_info) -; Time averaging and plotting is done in separate routines. -; All information necessary for plotting is added as attributes to the -; variable containing the data. Attribute names "diag_script", "var" -; and those starting with "res_" are reserved for that purpose. -; The data variable (incl. metadata) can be passed to the plot script -; directly (most efficient), or via a netCDF file (might be useful for -; non-NCL routines). -; A workstation ("wks" = file to plot to) will by default be generated -; in the plotting routine. However, the default will be overridden, if -; a "graphic" object is passed to the plotting routine. The latter -; option is intended to be used in special cases, e.g. with -; res_gsnDraw = False and res_gsnFrame = False. -; The script has been tested with input from ./variable_defs/MyVar.ncl -; -; Required diag_script_info attributes (diagnostics specific) -; projection: map projection to use -; netCDF_file: full path and name for *.nc output -; -; Required variable_info attributes (variable specific) -; MyDiag_title: string for annotation -; -; Caveats -; -; Modification history -; 20151027-A_laue_ax: moved call to 'write_references' to the beginning -; of the code -; 20150325-A_laue_ax: modified reference tags used for acknowledgements -; (projects, observations, etc) -; 20140312-A_righ_ma: adapted to new time_operations structure -; 20130618-A_gott_kl: adjusted to r738 of -; https://svn.dlr.de/ESM-Diagnostic/sources/trunk -; 20121130-A_gott_kl: written -; -; ############################################################################# - -load "interface_scripts/interface.ncl" - -load "./diag_scripts/shared/statistics.ncl" - -load "./diag_scripts/shared/plot/style.ncl" -load "./diag_scripts/shared/plot/contour_maps.ncl" - -begin - - ; Fetch general parameters, set in recipe_example_ncl.yml, passed via - ; environment variables by python code - enter_msg(diag_script, "") - - ; 'diag_script' and 'variables' are fetched from the - ; above 'ncl.interface' file - log_info("++++++++++++++++++++++++++++++++++++++++++") - log_info(diag_script + " (var: " + variables(0) + ")") - log_info("++++++++++++++++++++++++++++++++++++++++++") - - ; 'datasets', 'variables' and 'field_types' are fetched from the - ; above 'ncl.interface' file - dim_MOD = dimsizes(dataset_info@dataset) - dim_VAR = dimsizes(variables) - var0 = variables(0) - field_type0 = field_types(0) - - ; References Information - write_references(diag_script, \ ; script name - "A_gott_kl", \ ; authors - "A_eval_ma", \ ; contributors - "D_0000", \ ; diag_references - "", \ ; obs_references - (/"P_dlrveu", "P_esmval"/)) ; proj_references - -end - -begin - - ; Get data and average time - imod = 0 ; get data from first dataset - log_debug("processing " + dataset_info@dataset(imod)) - - ; See ./interface_scripts/data_handling.ncl - A0 = read_data(imod, var0, field_type0) - - ; Check dimensions - dims = getvardims(A0) - if (dimsizes(dims) .lt. 2) then - error_msg("f", diag_script, "", dimsizes(dims) + " dimensions, " + \ - "need 2 or 3") - end if - idx = ind(dims .eq. "lat") - if (ismissing(idx)) then - error_msg("f", diag_script, "", "no lat dimension") - end if - idx = ind(dims .eq. "lon") - if (ismissing(idx)) then - error_msg("f", diag_script, "", "no lon dimension") - end if - - ; Average over time (function in ./diag_scripts/shared/statistics.ncl) - data10 = time_operations(A0, -1, -1, "average", "annualclim", True) - ; Make data 2D - data1 = data10 - - ; Open file for plot - ; If you really need to do this in the diagnostic sript, please see - ; http://www.ncl.ucar.edu/Document/Graphics/Interfaces/gsn_open_wks.shtml - ; and ~/diag_scripts/shared/plot/contour_maps.ncl (look for "wks") on how - ; to do this. However, wks should be defined in the plotting routine - ; whenever possible. - - ; Style dependent annotation - ; Retrieve unique strings describing the data. - ; Function in ./diag_scripts/shared/plot/style.ncl - annots = project_style(diag_script_info, "annots") - - ; Plot resources - ; Add to data1, as attributes with prefix "res_". - ; Provide attributes only that differ from the defaults given at - ; http://www.ncl.ucar.edu/Document/Graphics/Resources/list_alpha_res.shtml - data1@res_gsnMaximize = True ; Use full page for the plot - data1@res_cnFillOn = True ; Color plot desired - data1@res_cnLineLabelsOn = False ; Contour lines - data1@res_cnLinesOn = False - ; Colors - ; http://www.ncl.ucar.edu/Document/Graphics/color_table_gallery.shtml - data1@res_cnFillPalette = diag_script_info@colormap ; Set colormap - ; Annotation - data1@res_tiMainString = "Test diag" ; Main title - data1@res_gsnLeftString = annots(imod) - data1@res_gsnRightString = variable_info@long_name + " in " + \ - variable_info@units - ; Specified in ./nml/cfg_MyDiag/cfg_MyDiag.ncl - data1@res_mpProjection = diag_script_info@projection - ; Set explicit contour levels - if(isatt(diag_script_info, "explicit_cn_levels")) then - data1@res_cnLevelSelectionMode = "ExplicitLevels" - data1@res_cnLevels = diag_script_info@explicit_cn_levels - end if - - ; Other Metadata: diag_script, var - ; Add to data1, as attributes without prefix - if(isatt(data1, "diag_script")) then ; Add to existing entries - temp = data1@diag_script - delete(data1@diag_script) - data1@diag_script = array_append_record(temp, (/diag_script/), 0) - delete(temp) - else ; Add as new attribute - data1@diag_script = (/diag_script/) - end if - data1@var = var0 ; Overwrite existing entry - if(isatt(variable_info, "long_name")) then - data1@var_long_name = variable_info@long_name - end if - if(isatt(variable_info, "units")) then - data1@var_units = variable_info@units - end if - - ; Optional output to netCDF - ; (controlled by diag_script_info) - if(isatt(diag_script_info, "ncdf")) then - data1@ncdf = diag_script_info@ncdf - ; Function in ~/interface_scripts/auxiliary.ncl - ncdf_outfile = ncdf_write(data1, diag_script_info@ncdf) - end if - - ; Create the plot - ; There are two options to pass data to the plotting routine: directly - ; or via a netCDF file. All information needed to create a graphics file - ; are passed with the data. It is possible to provide "dummy_for_wks" and - ; "dummy_for_var" as arguments in the call. - ; You may invoke a wks here, and/or provide the variable name (see example - ; in next section). The routine "contour_map" is a function in - ; ~/diag_scripts/shared/plot/aux_plotting.ncl - ; - ; Option 1: pass data directly - ; -> may only work with NCL (i.e. ok here) - ; Dummies trigger defaults - map = contour_map("dummy_for_wks", data1, "dummy_for_var") - ; Option 2: pass data via netCDF file - ; -> useful for non-NCL routines & metrics - ; map = contour_map("dummy_for_wks", ncdf_outfile,"dummy_for_var") - - ; Separate output to a graphics file, Panelling - ; Plotting routines by default create a graphics output file. - ; The default may be overridden by specifying res_gsnDraw=False (and - ; possibly res_gsnFrame=False). - ; Then file output must be done explicitely here, e.g. with the - ; "panelling" routine (see ./diag_scripts/shared/plot/aux_plotting.ncl). - ; The separation between plotting and output might be useful, when - ; multiple calls to plotting routine(s) (e.g. in a loop) are needed to - ; create a (set of) plot(s). - ; This option may (currently) only work within NCL, because the plotting - ; routines return only a pointer within the current wks. Thus plots need - ; to be collected in an existing wks, like pages in a book. Variable - ; "pres" may be used to override panelling defaults. - ; -> NOTE: The "panelling" routine does need a valid wks!!!!! - ; To test this option, please disable everything in the "Create the plot" - ; section and enable the following lines. - ; data1@res_gsnDraw = False ; Do not draw yet - ; data1@res_gsnFrame = False ; Don't advance frame - ; function in aux_plotting.ncl - ; wks = get_wks("dummy_for_wks", diag_script, var0) - ; nmap = 3 - ; maps = new(nmap,graphic) ; collect individual "map"s in a graphic array - ; do i = 0,nmap-1 ; this stupid loop creates 3 times the same plot - ; maps(i) = contour_map(wks, data1,var0) - ; end do - ; pres = True ; needed to override panelling defaults - ; pres@gsnPanelLabelBar = False ; no general label bar desired here - ; number of vertical and horizontal plots per page: nmap, 1 - ; outfile = panelling(wks, maps, nmap, 1, pres) - ; log_info(" Wrote " + outfile) - - leave_msg(diag_script, "") - -end diff --git a/esmvaltool/diag_scripts/examples/diagnostic_ta.ncl b/esmvaltool/diag_scripts/examples/diagnostic_ta.ncl deleted file mode 100644 index b48c6f9fd7..0000000000 --- a/esmvaltool/diag_scripts/examples/diagnostic_ta.ncl +++ /dev/null @@ -1,227 +0,0 @@ -; ############################################################################# -; SANDBOX FOR IMPLEMENTING NEW DIAGNOSTICS -; ############################################################################# -; Author: Klaus-Dirk Gottschaldt (DLR, Germany) -; ESMVal project -; ############################################################################# -; -; Description -; This script is a driver for averaging the input from the first dataset -; over time and creating a global contour plot. -; Output to NetCDF files is optional (controlled via diag_script_info) -; Time averaging and plotting is done in separate routines. -; All information necessary for plotting is added as attributes to the -; variable containing the data. Attribute names "diag_script", "var" -; and those starting with "res_" are reserved for that purpose. -; The data variable (incl. metadata) can be passed to the plot script -; directly (most efficient), or via a netCDF file (might be useful for -; non-NCL routines). -; A workstation ("wks" = file to plot to) will by default be generated -; in the plotting routine. However, the default will be overridden, if -; a "graphic" object is passed to the plotting routine. The latter -; option is intended to be used in special cases, e.g. with -; res_gsnDraw = False and res_gsnFrame = False. -; The script has been tested with input from ./variable_defs/MyVar.ncl -; -; Required diag_script_info attributes (diagnostics specific) -; projection: map projection to use -; netCDF_file: full path and name for *.nc output -; -; Required variable_info attributes (variable specific) -; MyDiag_title: string for annotation -; -; Caveats -; -; Modification history -; 20151027-A_laue_ax: moved call to 'write_references' to the beginning -; of the code -; 20150325-A_laue_ax: modified reference tags used for acknowledgements -; (projects, observations, etc) -; 20140312-A_righ_ma: adapted to new time_operations structure -; 20130618-A_gott_kl: adjusted to r738 of -; https://svn.dlr.de/ESM-Diagnostic/sources/trunk -; 20121130-A_gott_kl: written -; -; ############################################################################# - -load "interface_scripts/interface.ncl" - -load "./diag_scripts/shared/statistics.ncl" - -load "./diag_scripts/shared/plot/style.ncl" -load "./diag_scripts/shared/plot/contour_maps.ncl" - - -begin - - ; Fetch general parameters, set in recipe_example_ncl.yml, passed via - ; environment variables by python code - enter_msg(diag_script, "") - - ; 'diag_script' and 'variables' are fetched from the - ; above 'ncl.interface' file - log_info("++++++++++++++++++++++++++++++++++++++++++") - log_info(diag_script + " (var: " + variables(0) + ")") - log_info("++++++++++++++++++++++++++++++++++++++++++") - - ; 'datasets', 'variables' and 'field_types' are fetched from the - ; above 'ncl.interface' file - dim_MOD = dimsizes(dataset_info@dataset) - dim_VAR = dimsizes(variables) - var0 = variables(0) - field_type0 = field_types(0) - - ; References Information - write_references(diag_script, \ ; script name - "A_gott_kl", \ ; authors - "A_eval_ma", \ ; contributors - "D_0000", \ ; diag_references - "", \ ; obs_references - (/"P_dlrveu", "P_esmval"/)) ; proj_references - -end - -begin - - ; Get data and average time - imod = 0 ; get data from first dataset - log_debug("processing " + dataset_info@dataset(imod)) - - ; See ./interface_scripts/data_handling.ncl - A0 = read_data(imod, var0, field_type0) - - ; Get fx variable (land/sea mask, not used, just an example) - AM = read_fx_data(imod, "sftlf") - - ; Check dimensions - dims = getvardims(A0) - if (dimsizes(dims) .lt. 2) then - error_msg("f", diag_script, "", dimsizes(dims) + " dimensions, " + \ - "need 2 or 3") - end if - idx = ind(dims .eq. "lat") - if (ismissing(idx)) then - error_msg("f", diag_script, "", "no lat dimension") - end if - idx = ind(dims .eq. "lon") - if (ismissing(idx)) then - error_msg("f", diag_script, "", "no lon dimension") - end if - - ; Average over time (function in ./diag_scripts/shared/statistics.ncl) - data1 = time_operations(A0, -1, -1, "average", "annualclim", True) - - ; Open file for plot - - ; If you really need to do this in the diagnostic sript, please see - ; http://www.ncl.ucar.edu/Document/Graphics/Interfaces/gsn_open_wks.shtml - ; and ~/diag_scripts/shared/plot/contour_maps.ncl (look for "wks") on how - ; to do this. However, wks should be defined in the plotting routine - ; whenever possible. - - ; Style dependent annotation - ; Retrieve unique strings describing the data. - ; Function in ./diag_scripts/shared/plot/style.ncl - annots = project_style(diag_script_info, "annots") - - ; Plot resources - ; Add to data1, as attributes with prefix "res_". - ; Provide attributes only that differ from the defaults given at - ; http://www.ncl.ucar.edu/Document/Graphics/Resources/list_alpha_res.shtml - data1@res_gsnMaximize = True ; Use full page for the plot - data1@res_cnFillOn = True ; Color plot desired - data1@res_cnLineLabelsOn = False ; Contour lines - data1@res_cnLinesOn = False - ; Colors - ; http://www.ncl.ucar.edu/Document/Graphics/color_table_gallery.shtml - data1@res_cnFillPalette = diag_script_info@colormap ; Set colormap - ; Annotation - data1@res_tiMainString = "Test diag" ; Main title - data1@res_gsnLeftString = annots(imod) - data1@res_gsnRightString = variable_info@long_name + " in " + \ - variable_info@units - ; Specified in ./nml/cfg_MyDiag/cfg_MyDiag.ncl - data1@res_mpProjection = diag_script_info@projection - ; Set explicit contour levels - if(isatt(diag_script_info, "explicit_cn_levels")) then - data1@res_cnLevelSelectionMode = "ExplicitLevels" - data1@res_cnLevels = diag_script_info@explicit_cn_levels - end if - - ; Other Metadata: diag_script, var - ; Add to data1, as attributes without prefix - if(isatt(data1, "diag_script")) then ; Add to existing entries - temp = data1@diag_script - delete(data1@diag_script) - data1@diag_script = array_append_record(temp, (/diag_script/), 0) - delete(temp) - else ; Add as new attribute - data1@diag_script = (/diag_script/) - end if - data1@var = var0 ; Overwrite existing entry - if(isatt(variable_info, "long_name")) then - data1@var_long_name = variable_info@long_name - end if - if(isatt(variable_info, "units")) then - data1@var_units = variable_info@units - end if - - ; Optional output to netCDF - ; (controlled by diag_script_info) - if(isatt(diag_script_info, "ncdf")) then - data1@ncdf = diag_script_info@ncdf - ; Function in ~/interface_scripts/auxiliary.ncl - ncdf_outfile = ncdf_write(data1, diag_script_info@ncdf) - end if - - ; Create the plot - ; There are two options to pass data to the plotting routine: directly - ; or via a netCDF file. All information needed to create a graphics file - ; are passed with the data. It is possible to provide "dummy_for_wks" and - ; "dummy_for_var" as arguments in the call. - ; You may invoke a wks here, and/or provide the variable name (see example - ; in next section). The routine "contour_map" is a function in - ; ~/diag_scripts/shared/plot/aux_plotting.ncl - ; - ; Option 1: pass data directly - ; -> may only work with NCL (i.e. ok here) - ; Dummies trigger defaults - map = contour_map("dummy_for_wks", data1, "dummy_for_var") - ; Option 2: pass data via netCDF file - ; -> useful for non-NCL routines & metrics - ; map = contour_map("dummy_for_wks", ncdf_outfile,"dummy_for_var") - - ; Separate output to a graphics file, Panelling - ; Plotting routines by default create a graphics output file. - ; The default may be overridden by specifying res_gsnDraw=False (and - ; possibly res_gsnFrame=False). - ; Then file output must be done explicitely here, e.g. with the - ; "panelling" routine (see ./diag_scripts/shared/plot/aux_plotting.ncl). - ; The separation between plotting and output might be useful, when - ; multiple calls to plotting routine(s) (e.g. in a loop) are needed to - ; create a (set of) plot(s). - ; This option may (currently) only work within NCL, because the plotting - ; routines return only a pointer within the current wks. Thus plots need - ; to be collected in an existing wks, like pages in a book. Variable - ; "pres" may be used to override panelling defaults. - ; -> NOTE: The "panelling" routine does need a valid wks!!!!! - ; To test this option, please disable everything in the "Create the plot" - ; section and enable the following lines. - ; data1@res_gsnDraw = False ; Do not draw yet - ; data1@res_gsnFrame = False ; Don't advance frame - ; function in aux_plotting.ncl - ; wks = get_wks("dummy_for_wks", diag_script, var0) - ; nmap = 3 - ; maps = new(nmap,graphic) ; collect individual "map"s in a graphic array - ; do i = 0,nmap-1 ; this stupid loop creates 3 times the same plot - ; maps(i) = contour_map(wks, data1,var0) - ; end do - ; pres = True ; needed to override panelling defaults - ; pres@gsnPanelLabelBar = False ; no general label bar desired here - ; number of vertical and horizontal plots per page: nmap, 1 - ; outfile = panelling(wks, maps, nmap, 1, pres) - ; log_info(" Wrote " + outfile) - - leave_msg(diag_script, "") - -end diff --git a/esmvaltool/diag_scripts/examples/my_little_diagnostic.py b/esmvaltool/diag_scripts/examples/my_little_diagnostic.py new file mode 100644 index 0000000000..9fdbf372c5 --- /dev/null +++ b/esmvaltool/diag_scripts/examples/my_little_diagnostic.py @@ -0,0 +1,136 @@ +""" +Look at this module for guidance how to write your own. + +Read the README_PERSONAL_DIAGNOSTIC file associated with this example; + +Module for personal diagnostics (example). +Internal imports from exmvaltool work e.g.: + +from esmvaltool.preprocessor import regrid +from esmvaltool.diag_scripts.shared.supermeans import get_supermean + +Pipe output through logger; + +Please consult the documentation for help with esmvaltool's functionalities +and best coding practices. +""" +# place your module imports here: + +# operating system manipulations (e.g. path constructions) +import os + +# to manipulate iris cubes +import iris + +# import internal esmvaltool modules here +from esmvaltool.diag_scripts.shared import group_metadata, run_diagnostic +from esmvaltool.preprocessor import average_region + + +def _plot_time_series(cfg, cube, dataset): + """ + Example of personal diagnostic plotting function. + + Arguments: + cfg - nested dictionary of metadata + cube - the cube to plot + dataset - name of the dataset to plot + + Returns: + string; makes some time-series plots + + Note: this function is private; remove the '_' + so you can make it public. + """ + # custom local paths for e.g. plots are supported - + # here is an example + # root_dir = '/group_workspaces/jasmin2/cmip6_prep/' # edit as per need + # out_path = 'esmvaltool_users/valeriu/' # edit as per need + # local_path = os.path.join(root_dir, out_path) + # but one can use the already defined esmvaltool output paths + local_path = cfg['plot_dir'] + + # do the plotting dance + plt.plot(cube.data, label=dataset) + plt.xlabel('Time (months)') + plt.ylabel('Area average') + plt.title('Time series at (ground level - first level)') + plt.tight_layout() + plt.grid() + plt.legend() + png_name = 'Time_series_' + dataset + '.png' + plt.savefig(os.path.join(local_path, png_name)) + plt.close() + + # no need to brag :) + return 'I made some plots!' + + +def run_my_diagnostic(cfg): + """ + Simple example of a diagnostic. + + This is a basic (and rather esotherical) diagnostic that firstly + loads the needed model data as iris cubes, performs a difference between + values at ground level and first vertical level, then squares the + result. + + Before plotting, we grab the squared result (not all operations on cubes) + and apply an area average on it. This is a useful example of how to use + standard esmvaltool-preprocessor functionality within a diagnostic, and + especially after a certain (custom) diagnostic has been run and the user + needs to perform an operation that is already part of the preprocessor + standard library of functions. + + The user will implement their own (custom) diagnostics, but this + example shows that once the preprocessor has finished a whole lot of + user-specific metrics can be computed as part of the diagnostic, + and then plotted in various manners. + + Arguments: + cfg - nested dictionary of metadata + + Returns: + string; runs the user diagnostic + + """ + # assemble the data dictionary keyed by dataset name + # this makes use of the handy group_metadata function that + # orders the data by 'dataset'; the resulting dictionary is + # keyed on datasets e.g. dict = {'MPI-ESM-LR': [var1, var2...]} + # where var1, var2 are dicts holding all needed information per variable + my_files_dict = group_metadata(cfg['input_data'].values(), 'dataset') + + # iterate over key(dataset) and values(list of vars) + for key, value in my_files_dict.items(): + # load the cube from data files only + # using a single variable here so just grab the first (and only) + # list element + cube = iris.load_cube(value[0]['filename']) + + # the first data analysis bit: simple cube difference: + # perform a difference between ground and first levels + diff_cube = cube[:, 0, :, :] - cube[:, 1, :, :] + # square the difference'd cube just for fun + squared_cube = diff_cube ** 2. + + # the second data analysis bit (slightly more advanced): + # compute an area average over the squared cube + # to apply the area average use a preprocessor function + # rather than writing your own function + area_avg_cube = average_region(squared_cube, 'latitude', 'longitude') + + # finalize your analysis by plotting a time series of the + # diffed, squared and area averaged cube; call the plot function: + _plot_time_series(cfg, area_avg_cube, key) + + # that's it, we're done! + return 'I am done with my first ESMValTool diagnostic!' + + +if __name__ == '__main__': + # always use run_diagnostic() to get the config (the preprocessor + # nested dictionary holding all the needed information) + with run_diagnostic() as config: + # list here the functions that need to run + run_my_diagnostic(config) diff --git a/esmvaltool/diag_scripts/extreme_events/cfg_climdex.R b/esmvaltool/diag_scripts/extreme_events/cfg_climdex.R new file mode 100755 index 0000000000..c6b7b6dae9 --- /dev/null +++ b/esmvaltool/diag_scripts/extreme_events/cfg_climdex.R @@ -0,0 +1,94 @@ +# ############################################################################# +# climdex_df.r +# +# Authors: Christian W. Mohr (CICERO, Norway) +# +######################################################################################################### +# Description +# This script provides a data frame of all the possible indices (core and user-defined), +# which can be used with the climdex.pcic.ncdf R-package. The data frame created serves as an index +# to find which indices have been processed, and what title and labels the plots should have. +# +# Modification history +# 2018 0725-A_cwmohr: Created +######################################################################################################### + +idx_df <- data.frame( + idx_etccdi = c( + "altcdd", "altcsdi", "altcwd", "altwsdi", "cdd", "csdi", "cwd", + "dtr", "dtr", "fd", "gsl", "id", "prcptot", "r10mm", "r1mm", + "r20mm", "r95p", "r99p", "rx1day", "rx1day", "rx5day", "rx5day", + "sdii", "su", "tn10p", "tn10p", "tn90p", "tn90p", "tnn", "tnn", + "tnx", "tnx", "tr", "tx10p", "tx10p", "tx90p", "tx90p", "txn", + "txn", "txx", "txx", "wsdi" + ), + time = c( + "yr", "yr", "yr", "yr", "yr", "yr", "yr", "mon", "yr", "yr", + "yr", "yr", "yr", "yr", "yr", "yr", "yr", "yr", "mon", "yr", + "mon", "yr", "yr", "yr", "mon", "yr", "mon", "yr", "mon", "yr", + "mon", "yr", "yr", "mon", "yr", "mon", "yr", "mon", "yr", "mon", + "yr", "yr" + ), + unit = c( + "days", "days", "days", "days", "days", "days", "days", + "deg C", "deg C", "days", "days", "days", + "mm", "days", "days", "days", "mm", "mm", "mm", "mm", "mm", "mm", "mm/day", + "days", "Exceedance rate, %", "Exceedance rate, %", + "Exceedance rate, %", "Exceedance rate, %", + "deg C", "deg C", "deg C", "deg C", "days", + "Exceedance rate, %", "Exceedance rate, %", + "Exceedance rate, %", "Exceedance rate, %", + "deg C", "deg C", "deg C", "deg C", "days" + ), + name = c( + "Consecutive Dry Days per Year (altCDD)", + "Cold Spell Duration Index Spanning Years (altCSDI)", + "Consecutive Wet Days per Year (altCWD)", + "Warm Spell Duration Index Spanning Years (altWSDI)", + "Consecutive Dry Days (CDD)", "Cold Spell Duration Index (CSDI)", + "Consecutive Wet Days (CWD)", + "Monthly Diurnal Temperature Range (DTR)", + "Annual Diurnal Temperature Range (DTR)", "Frost Days (FD)", + "Growing Season Length (GSL)", "Icing Days (ID)", + "Annual Total Wet-Day Precipitation (PRCPTOT)", + "Heavy Precipitation Days (R10)", "Precipitation Days (R1)", + "Very Heavy Precipitation Days (R20)", + "Very Wet Days (R95p)", "Extremely Wet Days (R99p)", + "Monthly Max 1-day Precipitation (RX1day)", + "Annual Max 1-day Precipitation (RX1day)", + "Monthly Max 5-day Precipitation (RX5day)", + "Annual Max 5-day Precipitation (RX5day)", + "Simple Daily Intensity Index (SDII)", "Summer Days (SD)", + "Monthly Cold Nights (TN10p)", "Annual Cold Nights (TN10p)", + "Monthly Warm Nights (TN90p)", "Annual Warm Nights (TN90p)", + "Monthly Minimum Tmin (TNn)", "Annual Minimum Tmin (TNn)", + "Monthly Maximum Tmin (TNx)", "Annual Maximum Tmin (TNx)", + "Tropical Nights (TR)", "Monthly Cool Days (TX10p)", + "Annual Cool Days (TX10p)", + "Monthly Warm Days (TX90p)", "Annual Warm Days (TX90p)", + "Monthly Minimum Tmax (TXn)", "Annual Minimum Tmax (TXn)", + "Monthly Maximum Tmax (TXn)", "Annual Maximum Tmax (TXn)", + "Warm Spell Duration Index (WSDI)" + ), + stringsAsFactors = FALSE +) + +idx_df$idx_etccdi_time <- paste(idx_df$idx_etccdi, "ETCCDI_", + idx_df$time, sep = "") + +# Unfortunatley expressions cannot be added to dataframes. +# These expreesion are required for the timeseries. +idx_ylab <- c(expression( + "days", "days", "days", "days", "days", "days", "days", + paste(degree, "C"), paste(degree, "C"), "days", "days", "days", + "mm", "days", "days", "days", "mm", "mm", "mm", + "mm", "mm", "mm", "mm day^-1", + "days", "Exceedance rate, %", "Exceedance rate, %", + "Exceedance rate, %", "Exceedance rate, %", + paste(degree, "C"), paste(degree, "C"), + paste(degree, "C"), paste(degree, "C"), + "days", "Exceedance rate, %", "Exceedance rate, %", + "Exceedance rate, %", "Exceedance rate, %", + paste(degree, "C"), paste(degree, "C"), + paste(degree, "C"), paste(degree, "C"), "days" +)) diff --git a/esmvaltool/diag_scripts/extreme_events/cfg_extreme.R b/esmvaltool/diag_scripts/extreme_events/cfg_extreme.R new file mode 100644 index 0000000000..03bedd6554 --- /dev/null +++ b/esmvaltool/diag_scripts/extreme_events/cfg_extreme.R @@ -0,0 +1,42 @@ +# These are default values, loaded before the recipe is read +regrid_dataset <- NA +base_range <- NA +analysis_range <- NA +climdex_parallel <- 4 +mip_name <- "cmip" +ts_col_list <- c("dodgerblue2", "darkgreen", + "firebrick2", "darkorchid", "aquamarine3") +ts_png_width <- 640 +ts_png_height <- 480 +ts_png_units <- "px" +ts_png_pointsize <- 12 +ts_png_bg <- "white" +ts_lty_list <- c(1, 4, 2, 3, 5) +ts_lwd_list <- c(2, 2, 2, 2, 2) +ts_data <- TRUE +normalize <- FALSE +timeseries_idx <- c( + "tn10pETCCDI_yr", "tn90pETCCDI_yr", + "tx10pETCCDI_yr", "tx90pETCCDI_yr" +) +gleckler_idx <- c( + "tn10pETCCDI_yr", "tn90pETCCDI_yr", + "tx10pETCCDI_yr", "tx90pETCCDI_yr" +) + +ts_plt <- TRUE +glc_plt <- TRUE +glc_arr <- FALSE +gl_mar_par <- c(7, 4, 3, 11) +gl_png_res <- 480 +gl_png_units <- "px" +gl_png_pointsize <- 14 +gl_png_bg <- "white" +gl_rmsespacer <- 0.01 +gl_scaling_factor <- 1.0 +gl_text_scaling_factor <- 1.0 +gl_xscale_spacer_rmse <- 1.5 +gl_xscale_spacer_rmsestd <- 4.5 +gl_symb_scaling_factor <- 1.5 +gl_symb_yshift <- 2.5 +gl_text_symb_scaling_factor <- 0.6 diff --git a/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/CHANGELOG b/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/CHANGELOG new file mode 100644 index 0000000000..6ec5d5cee8 --- /dev/null +++ b/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/CHANGELOG @@ -0,0 +1,7 @@ +0.5-4: Updated maintainer and license and added users and contributors guides. + +0.5-3: Fix issue with data in rotated pole projection where attempting to run the code produces the error "squared eccentricity < 0". + +0.5-2: Fix problems when only tavg supplied. + +0.5-1: Initial release version. diff --git a/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/CONTRIBUTING.rst b/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/CONTRIBUTING.rst new file mode 100644 index 0000000000..0aadc75919 --- /dev/null +++ b/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/CONTRIBUTING.rst @@ -0,0 +1,171 @@ +Contributing to the climdex.pcic.ncdf R package +========================================== + +Getting Started +--------------- + +- Create a `Github account`_. +- Fork the repository on Github at https://github.com/pacificclimate/climdex.pcic.ncdf. +- Work on the code (see the `next section`_) +- Send us a `pull request`_. + +.. _Github account: https://github.com/signup/free +.. _pull request: https://help.github.com/articles/using-pull-requests/ +.. _next section: #how-to-set-up-a-development-environment + +How to set up a development environment +--------------------------------------- + +You don't need much to get started for development. You'll need to have installed: + +- R (ensure that all of the "Depends", "Imports", and "Suggests" pacakges are also installed) +- Any C++ build environment supported by the `CRAN package checking`_ +- git +- your text editor of choice + +That's it! + +Once you have the required software installed, create a local clone of the repository. +:: + $ git clone https://github.com/[your_user]/climdex.pcic.ncdf.git + +Build the docs (which builds the auto-generated NAMESPACE file needed to build). See `below <#how-to-build-the-docs>`_. + +Then make sure that everything builds out of the box +:: + $ R CMD build climdex.pcic.ncdf/ + +.. _CRAN package checking: http://cran.r-project.org/web/checks/check_flavors.html + +How to run the tests +-------------------- + +Running the tests can be done with one command: +:: + james@basalt ~/code/git $ R CMD check climdex.pcic.ncdf/ + +You'll see a bunch of package building spew that has nothing to do with the tests. But towards the end, you see something like this: +:: + * checking for unstated dependencies in tests ... OK + * checking tests ... + Running ‘bootstrap.R’ + Running ‘test_basic_file_funcs.R’ + Running ‘test_var_meta.R’ + OK + +Bug reports +----------- + +If there are problems with our package or bugs in the code, please let us know! We welcome bug reports. To submit one: + +- `Create a new issue`_ on our GitHub page. +- Tag/label the issue as a bug +- Leave it unassigned + +Then please follow these guidelines for writing your report: + +- Please describe in as much detail as possible +- Include a complete description of: + + - Exactly what you did (i.e. "steps to reproduce") + - What you expected to happen? + - What did happen? + +- Include *all* output from the terminal. +- Run R's ``sessionInfo()`` function and include the full output. + +I cannot stress enough how important it is to contrast what you expected to happen, with what actually happened. When executing the code does not produce the *advertised* result, there is a bug in the package. When the code does not produce the result that you *wished* it had, this is *not* a bug. We receive far too many reports in the latter category. + +.. _Create a new issue: https://github.com/pacificclimate/climdex.pcic.ncdf/issues/new + +.. _build-the-docs: + +How to build the docs +--------------------- + +The package documentation is inline in the code. All of the manual pages are built by using ``roxygen2``. Make sure that you have ``roxygen2`` installed and loaded: +:: + james@basalt ~/code/git/climdex.pcic.ncdf $ R + + R version 3.0.3 (2014-03-06) -- "Warm Puppy" + Copyright (C) 2014 The R Foundation for Statistical Computing + Platform: x86_64-pc-linux-gnu (64-bit) + + R is free software and comes with ABSOLUTELY NO WARRANTY. + You are welcome to redistribute it under certain conditions. + Type 'license()' or 'licence()' for distribution details. + + Natural language support but running in an English locale + + R is a collaborative project with many contributors. + Type 'contributors()' for more information and + 'citation()' on how to cite R or R packages in publications. + + Type 'demo()' for some demos, 'help()' for on-line help, or + 'help.start()' for an HTML browser interface to help. + Type 'q()' to quit R. + + > library(roxygen2) + +Then call ``roxygenize()`` to build the docs. +:: + > roxygenize() + First time using roxygen2 4.0. Upgrading automatically... + Loading required package: PCICt + Loading required package: ncdf4 + Loading required package: climdex.pcic + Loading required package: ncdf4.helpers + Loading required package: snow + Loading required package: udunits2 + Loading required package: functional + Loading required package: proj4 + Writing NAMESPACE + Writing climdex.pcic.ncdf.Rd + Writing create.climdex.cmip5.filenames.Rd + Writing get.climdex.variable.list.Rd + Writing get.climdex.functions.Rd + Writing get.climdex.variable.metadata.Rd + Writing create.ncdf.output.files.Rd + Writing compute.climdex.indices.Rd + Writing flatten.dims.Rd + Writing get.data.Rd + Writing get.northern.hemisphere.booleans.Rd + Writing get.quantiles.object.Rd + Writing compute.indices.for.stripe.Rd + Writing get.thresholds.chunk.Rd + Writing write.climdex.results.Rd + Writing get.quantiles.for.stripe.Rd + Writing create.thresholds.file.Rd + Writing get.var.file.idx.Rd + Writing create.file.metadata.Rd + Writing get.thresholds.metadata.Rd + Writing create.thresholds.from.file.Rd + Writing thresholds.open.Rd + Writing thresholds.close.Rd + Writing create.indices.from.files.Rd + + +Submitting pull requests +------------------------ + +We would love help from the greater climate community in developing the package and we welcome contributions to climdex.pcic.ncdf package. + +- Please write tests for any functionality that you may add. +- Please modify tests for any functionality that you change. +- In short, please make sure that all of the tests pass. + +After you are *positive* that everything is completely tested with passing test suite, we would love to see your pull request. If you are not familiar with the process, please follow the GitHub's help page for submitting `pull request`_. + +Don't code? No problem! +----------------------- + +Even if you don't program for a living there are plenty of ways to help. Not only is the code open and collaborative, but so is the documentation and issue tracking. Anyone can help with these. If you can't program, consider helping with the following: + +- If the documentation doesn't answer your questions, it probably doesn't answer many people's questions. Help us all out and write something that does. +- Take a look through the outstanding `"help wanted" issues`_, and see if you know any of the answers. +- If there are `open bug reports`_, see if you can reproduce the problem and verify that it exists. Having bug reports validated and/or clarified by multiple parties is extremely valuable. +- Tell us your story. If ``climdex.pcic.ncdf`` has helped your project to better understand climate extremes, we would love to hear about it. Write a blog post and/or send an e-mail to the `package maintainer`_. + +.. _"help wanted" issues: https://github.com/pacificclimate/climdex.pcic.ncdf/labels/help%20wanted +.. _open bug reports: https://github.com/pacificclimate/climdex.pcic.ncdf/labels/bug +.. _package maintainer: mailto:hiebert@uvic.ca diff --git a/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/COPYING b/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/COPYING new file mode 100644 index 0000000000..94a9ed024d --- /dev/null +++ b/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/COPYING @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/DESCRIPTION b/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/DESCRIPTION new file mode 100644 index 0000000000..85ad0b5fc6 --- /dev/null +++ b/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/DESCRIPTION @@ -0,0 +1,26 @@ +Package: climdex.pcic.ncdf +Version: 0.5-4 +Date: 2014-11-03 +Title: Functions to compute CLIMDEX indices over a NetCDF grid +Author: David Bronaugh for the Pacific Climate Impacts + Consortium (PCIC) +Maintainer: James Hiebert +Depends: + R (>= 3.0), + PCICt (>= 0.5-4) +Imports: + ncdf4 (>= 1.10), + climdex.pcic (>= 1.1-1), + ncdf4.helpers (>= 0.3-3), + snow (>= 0.3-13), + udunits2 (>= 0.6), + functional (>= 0.4), + proj4 (>= 1.0-8) +Suggests: + RUnit +Description: This package contains functions which can be used to compute + CLIMDEX indices using NetCDF input files, writing to NetCDF output files. + Code allows for parallel computation of indices using either a SOCK or MPI + cluster. +License: GPL-3 +URL: http://www.r-project.org diff --git a/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/R/ncdf.R b/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/R/ncdf.R new file mode 100644 index 0000000000..88a5f48433 --- /dev/null +++ b/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/R/ncdf.R @@ -0,0 +1,1469 @@ +# nolint start +#' climdex.pcic.ncdf, a package to calculate Climdex indices from NetCDF files. +#' +#' This package implements code to facilitate computation of Climdex indices +#' from NetCDF input files. +#' +#' The Climdex climate extremes indices have historically been calculated using +#' Fortran code. This has a number of problems:\itemize{ +#' \item{Difficult to test} +#' \item{Difficult to modify (for instance, to add NetCDF file I/O)} +#' \item{Difficult to parallelize} +#' } +#' The \code{climdex.pcic} package provides an easy interface to efficient +#' computation of Climdex indices. This package is complementary to it, providing +#' easy access to functions to compute indices in parallel, using NetCDF files as +#' input and output. It implements chunked processing of input files to keep memory +#' usage reasonable; it implements parallel computation using the \code{snow} +#' library; and it includes a test suite to verify correctness of the implementation. +#' Furthermore, the package has a modular design, allowing for easy extension to +#' allow for adaptation to changing or custom requirements. +#' +#' Users of this package should pay particular attention to the +#' \code{\link{create.indices.from.files}} function, which computes Climdex indices +#' given NetCDF input files; and \code{\link{create.thresholds.from.file}}, which +#' computes thresholds for use with threshold-based indices given NetCDF input files. +#' Many of the other functions exposed by the package are intended to provide for +#' extensibility, but are unlikely to be routinely used by users of this package. +#' +#' @name climdex.pcic.ncdf +#' @aliases climdex.pcic.ncdf-package +#' @docType package +#' @seealso \code{\link{create.indices.from.files}}, \code{\link{create.thresholds.from.file}} +#' @references \url{http://etccdi.pacificclimate.org/list_27_indices.shtml} +#' +#' Karl, T.R., N. Nicholls, and A. Ghazi, 1999: CLIVAR/GCOS/WMO workshop on +#' indices and indicators for climate extremes: Workshop summary. Climatic +#' Change, 42, 3-7. +#' +#' Peterson, T.C., and Coauthors: Report on the Activities of the Working Group +#' on Climate Change Detection and Related Rapporteurs 1998-2001. WMO, Rep. +#' WCDMP-47, WMO-TD 1071, Geneve, Switzerland, 143pp. +#' +#' Zhang, X., 2005: Avoiding inhomogeneity in percentile-based indices of +#' temperature extremes. Journal of Climate 18.11 (2005):1641-. +#' @keywords climate ts +#' @importClassesFrom climdex.pcic climdexInput +#' @import snow PCICt +NULL + +## Parallel lapply across 'x', running remote.func, and filtering with local.filter.func . +## Processing is incremental, not batch, to improve parallel throughput and reduce memory consumption. +parLapplyLBFiltered <- function(cl, x, remote.func, ..., local.filter.func=NULL) { + snow::checkCluster(cl) + cluster.size <- length(cl) + num.tasks <- length(x) + if(num.tasks == 0) + return(list()) + if(cluster.size == 0) + stop("Impossible happened; cluster size = 0") + + data.to.return <- vector("list", num.tasks) + + submit.job <- function(cluster.id, task.id) { + snow::sendCall(cl[[cluster.id]], remote.func, args=c(x[task.id], list(...)), tag=task.id) + } + + ## Fire off jobs, filling in the cur.task table as we go. + for(i in 1:min(cluster.size, num.tasks)) + submit.job(i, i) + + next.task <- min(cluster.size, num.tasks) + + ## Stalk and feed jobs + for(i in 1:num.tasks) { + d <- snow::recvOneResult(cl) + next.task <- next.task + 1 + + ## Feed the finished node another task if we have one. + if(next.task <= num.tasks) + submit.job(d$node, next.task) + + if(!is.null(local.filter.func)) + data.to.return[d$tag] <- list(local.filter.func(d$value, x[[d$tag]])) + else + data.to.return[d$tag] <- list(d$value) + + rm(d) + } + + ## Return data when complete + return(data.to.return) +} + +put.history.att <- function(f, v, definemode=FALSE) { + history.string <- paste("Created by climdex.pcic", packageVersion("climdex.pcic"), "on", date()) + ncdf4::ncatt_put(f, v, "history", history.string, definemode=definemode) + invisible(0) +} + +put.ETCCDI.atts <- function(f, freq, orig.title, author.data, definemode=FALSE) { + if("institution" %in% names(author.data)) + ncdf4::ncatt_put(f, 0, "ETCCDI_institution", author.data$institution, definemode=definemode) + if("institution_id" %in% names(author.data)) + ncdf4::ncatt_put(f, 0, "ETCCDI_institution_id", author.data$institution_id, definemode=definemode) + if("indices_archive" %in% names(author.data)) + ncdf4::ncatt_put(f, 0, "ETCCDI_indices_archive", author.data$indices_archive, definemode=definemode) + + ncdf4::ncatt_put(f, 0, "ETCCDI_software", "climdex.pcic", definemode=definemode) + ncdf4::ncatt_put(f, 0, "ETCCDI_software_version", as.character(packageVersion("climdex.pcic")), definemode=definemode) + + if("contact" %in% names(author.data)) + ncdf4::ncatt_put(f, 0, "contact", author.data$contact, definemode=definemode) + if("references" %in% names(author.data)) + ncdf4::ncatt_put(f, 0, "references", author.data$references, definemode=definemode) + + ncdf4::ncatt_put(f, 0, "frequency", freq, definemode=definemode) + ncdf4::ncatt_put(f, 0, "creation_date", format(Sys.time(), "%Y-%m-%dT%H:%M:%SZ", tz="GMT"), definemode=definemode) + ncdf4::ncatt_put(f, 0, "title", paste("ETCCDI indices computed on", orig.title), definemode=definemode) + invisible(0) +} + +all.the.same <- function(dat) { + ifelse(length(dat) == 1, TRUE, all(unlist(lapply(dat, identical, dat[[1]])))) +} + +#' Creates a list of CMIP5-compliant filenames reflecting the input data. +#' +#' Creates a list of CMIP5-compliant filenames reflecting the input data. +#' +#' This function takes a split filename (as created by \code{get.split.filename.cmip5}) and a list of variables and creates corresponding filenames for the given variables. +#' +#' @param fn.split A vector containing named components, as created by \code{get.split.filename.cmip5}. +#' @param vars.list A vector containing names of variables, as created by \code{\link{get.climdex.variable.list}}. +#' @return A vector containing filenames corresponding to the variables and filename bits supplied. +#' +#' @examples +#' \dontrun{ +#' library(ncdf4.helpers) +#' ## Split out filename bits for use below... +#' fn <- "pr_day_BCCAQ+ANUSPLIN300+MRI-CGCM3_historical+rcp85_r1i1p1_19500101-21001231.nc" +#' fn.split <- get.split.filename.cmip5(fn) +#' +#' ## Create filenames with time data and variable appropriately replaced. +#' filenames <- create.climdex.cmip5.filenames(fn.split, c("rx5dayETCCDI_mon", "tn90pETCCDI_yr")) +#' } +#' +#' @export +create.climdex.cmip5.filenames <- function(fn.split, vars.list) { + time.res <- c("yr", "mon")[grepl("_mon$", vars.list) + 1] + time.range <- substr(fn.split[c('tstart', 'tend')], 1, 4) + + paste(paste(vars.list, fn.split['model'], fn.split['emissions'], fn.split['run'], sapply(time.res, function(x) { paste(time.range, switch(x, yr=c("", ""), mon=c("01", "12")), sep="", collapse="-") }), sep="_"), ".nc", sep="") +} + +#' Returns a list of Climdex variables given constraints +#' +#' Returns a list of Climdex variables given constraints. +#' +#' This function takes a character vector which specifies what source data is present and a time resolution, and generates a list of names consisting of the variable and the time resolution, separated by an underscore. +#' +#' @param source.data.present A vector of strings naming the data that's present; at least one of (tmin, tmax, prec, tavg). +#' @param time.resolution The time resolutions to compute indices at. See \code{\link{create.indices.from.files}}. +#' @param climdex.vars.subset A character vector of lower-case names of Climdex indices to calculate (eg: tr, fd, rx5day). See \code{\link{create.indices.from.files}}. +#' @return A character vector containing variable names with time resolutions appended. +#' +#' @seealso \code{\link{create.indices.from.files}} +#' @examples +#' ## Get all variables which require tmin and/or tmax, for all time resolutions. +#' var.list1 <- get.climdex.variable.list(c("tmax", "tmin")) +#' +#' ## Get all variables which require prec with an annual time resolution. +#' var.list2 <- get.climdex.variable.list("prec", time.resolution="annual") +#' +#' ## Get the intersection of a set list of vars and available data. +#' sub.vars <- c("su", "id", "tr", "fd", "gsl", "csdi", "wsdi", "r10mm") +#' var.list3 <- get.climdex.variable.list("tmax", climdex.vars.subset=sub.vars) +#' +#' @export +get.climdex.variable.list <- function(source.data.present, time.resolution=c("all", "annual", "monthly"), climdex.vars.subset=NULL) { + time.res <- match.arg(time.resolution) + annual.only <- c("fdETCCDI", "suETCCDI", "idETCCDI", "trETCCDI", "gslETCCDI", "wsdiETCCDI", "csdiETCCDI", "sdiiETCCDI", "r10mmETCCDI", "r20mmETCCDI", "r1mmETCCDI", "cddETCCDI", "cwdETCCDI", "r95pETCCDI", "r99pETCCDI", "prcptotETCCDI", "altcddETCCDI", "altcwdETCCDI", "altcsdiETCCDI", "altwsdiETCCDI") + vars.by.src.data.reqd <- list(tmax=c("suETCCDI", "idETCCDI", "txxETCCDI", "txnETCCDI", "tx10pETCCDI", "tx90pETCCDI", "wsdiETCCDI", "altwsdiETCCDI"), + tmin=c("fdETCCDI", "trETCCDI", "tnxETCCDI", "tnnETCCDI", "tn10pETCCDI", "tn90pETCCDI", "csdiETCCDI", "altcsdiETCCDI"), + prec=c("rx1dayETCCDI", "rx5dayETCCDI", "sdiiETCCDI", "r10mmETCCDI", "r20mmETCCDI", "r1mmETCCDI", "cddETCCDI", "cwdETCCDI", "r95pETCCDI", "r99pETCCDI", "prcptotETCCDI", "altcddETCCDI", "altcwdETCCDI"), + tavg=c("gslETCCDI", "dtrETCCDI") ) + + if(any(!(source.data.present %in% c("tmin", "tmax", "tavg", "prec")))) + stop("Invalid variable listed in source.data.present.") + + if(all(c("tmax", "tmin") %in% source.data.present) && !("tavg" %in% source.data.present)) + source.data.present <- c(source.data.present, "tavg") + + climdex.vars <- unlist(vars.by.src.data.reqd[source.data.present]) + if(!is.null(climdex.vars.subset)) + climdex.vars <- climdex.vars[climdex.vars %in% paste(climdex.vars.subset, "ETCCDI", sep="")] + + freq.lists <- list(c("mon", "yr"), c("yr")) + dat <- switch(time.res, + all=unlist(lapply(climdex.vars, function(x) { paste(x, freq.lists[[(x %in% annual.only) + 1]], sep="_") })), + annual=paste(climdex.vars, "yr", sep="_"), + monthly=paste(climdex.vars[!(climdex.vars %in% annual.only)], "mon", sep="_")) + + names(dat) <- NULL + + return(dat) +} + +#' Returns a list of Climdex functions, with parameters curried in. +#' +#' Returns a list of Climdex functions, with parameters curried in. +#' +#' This function takes a variable list (as created by \code{\link{get.climdex.variable.list}}) and creates a list of functions corresponding to the specified indices, with parameters such as time resolution curried in. This allows for these functions to be called with just the \code{climdexInput} object as an argument, easing the automation of computing indices. +#' +#' @param vars.list The variable list, as created by \code{\link{get.climdex.variable.list}}. +#' @param fclimdex.compatible Whether to create fclimdex compatible functions. +#' @return A list of functions, named by the variable they compute. +#' +#' @examples +#' ## Get Climdex functions for a variable list with all appropriate params +#' ## curried in, so that all they take is a ClimdexInput object. +#' cdx.funcs <- get.climdex.functions(get.climdex.variable.list(c("tmax", "tmin"))) +#' +#' @export +get.climdex.functions <- function(vars.list, fclimdex.compatible=TRUE) { + func.names <- c("climdex.fd", "climdex.su", "climdex.id", "climdex.tr", "climdex.gsl", + "climdex.txx", "climdex.tnx", "climdex.txn", "climdex.tnn", "climdex.tn10p", "climdex.tx10p", "climdex.tn90p", "climdex.tx90p", + "climdex.txx", "climdex.tnx", "climdex.txn", "climdex.tnn", "climdex.tn10p", "climdex.tx10p", "climdex.tn90p", "climdex.tx90p", + "climdex.wsdi", "climdex.csdi", + "climdex.dtr", "climdex.rx1day", "climdex.rx5day", + "climdex.dtr", "climdex.rx1day", "climdex.rx5day", + "climdex.sdii", "climdex.r10mm", "climdex.r20mm", "climdex.rnnmm", "climdex.cdd", "climdex.cwd", "climdex.r95ptot", "climdex.r99ptot", "climdex.prcptot", + "climdex.cdd", "climdex.cwd", "climdex.csdi", "climdex.wsdi") + + el <- list() + af <- list(freq="annual") + mf <- list(freq="monthly") + cwdd.opts <- list(spells.can.span.years=TRUE) + altcwdd.opts <- list(spells.can.span.years=FALSE) + wcsdi.opts <- list(spells.can.span.years=FALSE) + altwcsdi.opts <- list(spells.can.span.years=TRUE) + rx5day.opts <- list(center.mean.on.last.day=fclimdex.compatible) + r1mm.opts <- list(threshold=1) + options <- list(el, el, el, el, el, + mf, mf, mf, mf, mf, mf, mf, mf, + af, af, af, af, af, af, af, af, + wcsdi.opts, wcsdi.opts, + mf, mf, c(mf, rx5day.opts), + af, af, c(af, rx5day.opts), + el, el, el, r1mm.opts, cwdd.opts, cwdd.opts, el, el, el, + altcwdd.opts, altcwdd.opts, altwcsdi.opts, altwcsdi.opts) + + func <- lapply(1:length(func.names), function(n) do.call(functional::Curry, c(list(getFromNamespace(func.names[n], 'climdex.pcic')), options[[n]]))) + names(func) <- c("fdETCCDI_yr", "suETCCDI_yr", "idETCCDI_yr", "trETCCDI_yr", "gslETCCDI_yr", + "txxETCCDI_mon", "tnxETCCDI_mon", "txnETCCDI_mon", "tnnETCCDI_mon", "tn10pETCCDI_mon", "tx10pETCCDI_mon", "tn90pETCCDI_mon", "tx90pETCCDI_mon", + "txxETCCDI_yr", "tnxETCCDI_yr", "txnETCCDI_yr", "tnnETCCDI_yr", "tn10pETCCDI_yr", "tx10pETCCDI_yr", "tn90pETCCDI_yr", "tx90pETCCDI_yr", + "wsdiETCCDI_yr", "csdiETCCDI_yr", + "dtrETCCDI_mon", "rx1dayETCCDI_mon", "rx5dayETCCDI_mon", + "dtrETCCDI_yr", "rx1dayETCCDI_yr", "rx5dayETCCDI_yr", + "sdiiETCCDI_yr", "r10mmETCCDI_yr", "r20mmETCCDI_yr", "r1mmETCCDI_yr", "cddETCCDI_yr", "cwdETCCDI_yr", "r95pETCCDI_yr", "r99pETCCDI_yr", "prcptotETCCDI_yr", + "altcddETCCDI_yr", "altcwdETCCDI_yr", "altcsdiETCCDI_yr", "altwsdiETCCDI_yr") + + return(func[vars.list]) +} + +#' Returns metadata for specified Climdex variables +#' +#' Returns metadata for specified Climdex variables. +#' +#' This function returns metadata suitable for use in NetCDF files for the specified variables. +#' +#' @param vars.list The list of variables, as returned by \code{\link{get.climdex.variable.list}}. +#' @param template.filename The filename template to be used when generating filenames. +#' @return A data frame containing the following: +#' \itemize{ +#' \item{long.name}{Long names for the variable} +#' \item{var.name}{Variable name for use in the file} +#' \item{units}{Units for the variable} +#' \item{annual}{Whether the variable is annual} +#' \item{base.period.attr}{Whether to include a base period attribute} +#' \item{standard.name}{Standard name to use for the variable} +#' \item{filename}{Filename to be written out} +#' } +#' +#' @examples +#' ## Get metadata (including filenames) for specified variables. +#' fn <- "pr_day_BCCAQ+ANUSPLIN300+MRI-CGCM3_historical+rcp85_r1i1p1_19500101-21001231.nc" +#' var.list2 <- get.climdex.variable.list("prec", time.resolution="annual") +#' md <- get.climdex.variable.metadata(var.list2, fn) +#' +#' @export +get.climdex.variable.metadata <- function(vars.list, template.filename) { + all.data <- data.frame(long.name=c("Number of Frost Days", "Number of Summer Days", "Number of Icing Days", "Number of Tropical Nights", "Growing Season Length", + "Monthly Maximum of Daily Maximum Temperature", "Monthly Maximum of Daily Minimum Temperature", + "Monthly Minimum of Daily Maximum Temperature", "Monthly Minimum of Daily Minimum Temperature", + "Percentage of Days when Daily Minimum Temperature is Below the 10th Percentile", "Percentage of Days when Daily Maximum Temperature is Below the 10th Percentile", + "Percentage of Days when Daily Minimum Temperature is Above the 90th Percentile", "Percentage of Days when Daily Maximum Temperature is Above the 90th Percentile", + "Annual Maximum of Daily Maximum Temperature", "Annual Maximum of Daily Minimum Temperature", + "Annual Minimum of Daily Maximum Temperature", "Annual Minimum of Daily Minimum Temperature", + "Percentage of Days when Daily Minimum Temperature is Below the 10th Percentile", "Percentage of Days when Daily Maximum Temperature is Below the 10th Percentile", + "Percentage of Days when Daily Minimum Temperature is Above the 90th Percentile", "Percentage of Days when Daily Maximum Temperature is Above the 90th Percentile", + "Warm Spell Duration Index", "Cold Spell Duration Index", + "Mean Diurnal Temperature Range", "Monthly Maximum 1-day Precipitation", "Monthly Maximum Consecutive 5-day Precipitation", + "Mean Diurnal Temperature Range", "Annual Maximum 1-day Precipitation", "Annual Maximum Consecutive 5-day Precipitation", + "Simple Precipitation Intensity Index", "Annual Count of Days with At Least 10mm of Precipitation", + "Annual Count of Days with At Least 20mm of Precipitation", "Annual Count of Days with At Least 1mm of Precipitation", + "Maximum Number of Consecutive Days with Less Than 1mm of Precipitation", "Maximum Number of Consecutive Days with At Least 1mm of Precipitation", + "Annual Total Precipitation when Daily Precipitation Exceeds the 95th Percentile of Wet Day Precipitation", + "Annual Total Precipitation when Daily Precipitation Exceeds the 99th Percentile of Wet Day Precipitation", "Annual Total Precipitation in Wet Days", + "Maximum Number of Consecutive Days Per Year with Less Than 1mm of Precipitation", "Maximum Number of Consecutive Days Per Year with At Least 1mm of Precipitation", + "Cold Spell Duration Index Spanning Years", "Warm Spell Duration Index Spanning Years"), + var.name=c("fdETCCDI", "suETCCDI", "idETCCDI", "trETCCDI", "gslETCCDI", + "txxETCCDI", "tnxETCCDI", "txnETCCDI", "tnnETCCDI", "tn10pETCCDI", "tx10pETCCDI", "tn90pETCCDI", "tx90pETCCDI", + "txxETCCDI", "tnxETCCDI", "txnETCCDI", "tnnETCCDI", "tn10pETCCDI", "tx10pETCCDI", "tn90pETCCDI", "tx90pETCCDI", + "wsdiETCCDI", "csdiETCCDI", + "dtrETCCDI", "rx1dayETCCDI", "rx5dayETCCDI", + "dtrETCCDI", "rx1dayETCCDI", "rx5dayETCCDI", + "sdiiETCCDI", "r10mmETCCDI", "r20mmETCCDI", "r1mmETCCDI", "cddETCCDI", "cwdETCCDI", "r95pETCCDI", "r99pETCCDI", "prcptotETCCDI", + "altcddETCCDI", "altcwdETCCDI", "altcsdiETCCDI", "altwsdiETCCDI"), + units=c("days", "days", "days", "days", "days", + "degrees_C", "degrees_C", "degrees_C", "degrees_C", "%", "%", "%", "%", + "degrees_C", "degrees_C", "degrees_C", "degrees_C", "%", "%", "%", "%", + "days", "days", + "degrees_C", "mm", "mm", + "degrees_C", "mm", "mm", + "mm d-1", "days", "days", "days", "days", "days", "mm", "mm", "mm", + "days", "days", "days", "days"), + annual=c(T, T, T, T, T, + F, F, F, F, F, F, F, F, + T, T, T, T, T, T, T, T, + T, T, + F, F, F, + T, T, T, + T, T, T, T, T, T, T, T, T, + T, T, T, T), + base.period.attr=c(F, F, F, F, F, + F, F, F, F, T, T, T, T, + F, F, F, F, T, T, T, T, + T, T, + F, F, F, + F, F, F, + F, F, F, F, F, F, T, T, F, + F, F, T, T), + row.names=c("fdETCCDI_yr", "suETCCDI_yr", "idETCCDI_yr", "trETCCDI_yr", "gslETCCDI_yr", + "txxETCCDI_mon", "tnxETCCDI_mon", "txnETCCDI_mon", "tnnETCCDI_mon", "tn10pETCCDI_mon", "tx10pETCCDI_mon", "tn90pETCCDI_mon", "tx90pETCCDI_mon", + "txxETCCDI_yr", "tnxETCCDI_yr", "txnETCCDI_yr", "tnnETCCDI_yr", "tn10pETCCDI_yr", "tx10pETCCDI_yr", "tn90pETCCDI_yr", "tx90pETCCDI_yr", + "wsdiETCCDI_yr", "csdiETCCDI_yr", + "dtrETCCDI_mon", "rx1dayETCCDI_mon", "rx5dayETCCDI_mon", + "dtrETCCDI_yr", "rx1dayETCCDI_yr", "rx5dayETCCDI_yr", + "sdiiETCCDI_yr", "r10mmETCCDI_yr", "r20mmETCCDI_yr", "r1mmETCCDI_yr", "cddETCCDI_yr", "cwdETCCDI_yr", "r95pETCCDI_yr", "r99pETCCDI_yr", "prcptotETCCDI_yr", + "altcddETCCDI_yr", "altcwdETCCDI_yr", "altcsdiETCCDI_yr", "altwsdiETCCDI_yr"), + stringsAsFactors=FALSE) + + standard.name.lookup <- c(fdETCCDI="number_frost_days", suETCCDI="number_summer_days", idETCCDI="number_icing_days", trETCCDI="number_tropical_nights", gslETCCDI="growing_season_length", + txxETCCDI="maximum_daily_maximum_temperature", tnxETCCDI="maximum_daily_minimum_temperature", txnETCCDI="minimum_daily_maximum_temperature", tnnETCCDI="minimum_daily_minimum_temperature", + tn10pETCCDI="percent_days_when_daily_minimum_temperature_below_10p", tx10pETCCDI="percent_days_when_daily_maximum_temperature_below_10p", + tn90pETCCDI="percent_days_when_daily_minimum_temperature_above_90p", tx90pETCCDI="percent_days_when_daily_maximum_temperature_above_90p", + wsdiETCCDI="warm_spell_duration_index", csdiETCCDI="cold_spell_duration_index", dtrETCCDI="diurnal_temperature_range", + altwsdiETCCDI="warm_spell_duration_index", altcsdiETCCDI="cold_spell_duration_index", + rx1dayETCCDI="maximum_1day_precipitation", rx5dayETCCDI="maximum_5day_precipitation", sdiiETCCDI="simple_precipitation_intensity_index", + r10mmETCCDI="count_days_more_than_10mm_precipitation", r20mmETCCDI="count_days_more_than_20mm_precipitation", r1mmETCCDI="count_days_more_than_1mm_precipitation", + cddETCCDI="maximum_number_consecutive_dry_days", cwdETCCDI="maximum_number_consecutive_wet_days", + altcddETCCDI="maximum_number_consecutive_dry_days", altcwdETCCDI="maximum_number_consecutive_wet_days", + r95pETCCDI="total_precipitation_exceeding_95th_percentile", r99pETCCDI="total_precipitation_exceeding_99th_percentile", prcptotETCCDI="total_wet_day_precipitation") + + all.data$standard.name <- standard.name.lookup[all.data$var.name] + + all.data$filename <- create.climdex.cmip5.filenames(ncdf4.helpers::get.split.filename.cmip5(template.filename), rownames(all.data)) + return(all.data[vars.list,]) +} + +get.output.time.data <- function(ts, time.origin.PCICt, time.units, time.dim.name, time.bnds.name, bnds.dim, res=c("year", "month"), origin="1970-01-01") { + res <- match.arg(res) + time.bounds <- ncdf4.helpers::nc.make.time.bounds(ts, res) + time.series <- PCICt::as.PCICt.numeric((unclass(time.bounds[1,]) + unclass(time.bounds[2,])) / 2, cal=attr(time.bounds, "cal"), origin=origin) + time.bounds.days <- as.numeric(julian(time.bounds, origin=time.origin.PCICt)) + time.days <- as.numeric(julian(time.series, origin=time.origin.PCICt)) + time.dim <- ncdf4::ncdim_def(time.dim.name, units=time.units, vals=time.days, unlim=TRUE, longname='') + time.bnds.var <- ncdf4::ncvar_def(time.bnds.name, '', list(bnds.dim, time.dim), longname='', prec="double") + return(list(time.dim=time.dim, time.bnds.var=time.bnds.var, time.bnds.data=time.bounds.days)) +} + +#' Creates output files for Climdex variables. +#' +#' Creates output files for Climdex variables. +#' +#' This function creates a set of output files for the set of variable parameters passed in \code{cdx.dat}, as created by \code{\link{get.climdex.variable.metadata}}. It copies metadata from input files as appropriate and adds new metadata as required. +#' +#' @param cdx.dat The variable description data, as created by \code{\link{get.climdex.variable.metadata}}. +#' @param f The file(s) being used as input. +#' @param v.f.idx A mapping from variables to files, as created by \code{\link{get.var.file.idx}}. +#' @param variable.name.map A mapping from standardized names (tmax, tmin, prec) to NetCDF variable names. +#' @param ts The associated time data, as created by \code{nc.get.time.series}. +#' @param time.origin The time origin, as specified in the source NetCDF file(s). +#' @param base.range The base range; a vector of two numeric years. +#' @param out.dir The output directory name. +#' @param author.data A vector containing named elements describing the author; see \code{\link{create.indices.from.files}}. +#' @return A list of objects of type \code{ncdf4}. +#' +#' @examples +#' \donttest{ +#' ## Establish basic inputs. +#' author.data <- list(institution="Looney Bin", institution_id="LBC") +#' input.files <- c("pr_NAM44_CanRCM4_ERAINT_r1i1p1_1989-2009.nc") +#' +#' ## Prepare derived inputs. +#' f <- lapply(input.files, ncdf4::nc_open) +#' variable.name.map <- c(tmax="tasmax", tmin="tasmin", prec="pr") +#' f.meta <- create.file.metadata(f, variable.name.map) +#' climdex.var.list <- get.climdex.variable.list(names(f.meta$v.f.idx), "all", NULL) +#' cdx.meta <- get.climdex.variable.metadata(climdex.var.list, input.files[1]) +#' +#' ## Create output files +#' cdx.ncfile <- create.ncdf.output.files(cdx.meta, f, f.meta$v.f.idx, variable.name.map, +#' f.meta$ts, get.time.origin(f, f.meta$dim.axes), +#' c(1981,1990), "/foo", author.data) +#' } +#' +#' @export +create.ncdf.output.files <- function(cdx.dat, f, v.f.idx, variable.name.map, ts, time.origin, base.range, out.dir, author.data) { + f.example <- f[[v.f.idx[1]]] + v.example <- variable.name.map[names(v.f.idx)[1]] + time.dim.name <- ncdf4.helpers::nc.get.dim.for.axis(f.example, v.example, "T")$name + old.time.bnds.att <- ncdf4::ncatt_get(f.example, time.dim.name, "bounds") + time.bnds.name <- if(old.time.bnds.att$hasatt) old.time.bnds.att$value else paste(time.dim.name, "bnds", sep="_") + + ## Create new time dimensions + time.origin.PCICt <- PCICt::as.PCICt.default(time.origin, cal=attr(ts, "cal")) + time.units <- paste("days since", time.origin) + + input.bounds <- ncdf4.helpers::nc.get.dim.bounds.var.list(f.example, v.example) + ## FIXME: I'm not sure how solid the assumption about the location of bnds here is. + bnds <- if(length(input.bounds) > 0) f.example$var[[input.bounds[1]]]$dim[[1]] else ncdf4::ncdim_def("bnds", "", 1:2, create_dimvar=FALSE) + time.dat <- list(annual=get.output.time.data(ts, time.origin.PCICt, time.units, time.dim.name, time.bnds.name, bnds, res="year"), + monthly=get.output.time.data(ts, time.origin.PCICt, time.units, time.dim.name, time.bnds.name, bnds, res="month")) + + grid.mapping.att <- ncdf4::ncatt_get(f.example, v.example, "grid_mapping") + vars.to.copy <- c(input.bounds[input.bounds != time.bnds.name], names(ncdf4.helpers::nc.get.coordinate.axes(f.example, v.example)), if(grid.mapping.att$hasatt) grid.mapping.att$value) + vars.to.clone.atts.for <- c(vars.to.copy, ncdf4.helpers::nc.get.dim.names(f.example, v.example)) + vars.ncvars <- sapply(vars.to.copy, function(x) { f.example$var[[x]] }, simplify=FALSE) + vars.data <- lapply(vars.ncvars, function(ncvar) { if(length(ncvar$dim) == 0) NULL else ncdf4::ncvar_get(f.example, ncvar) }) + + return(lapply(1:length(cdx.dat$var.name), function(x) { + annual <- cdx.dat$annual[x] + time.for.file <- time.dat[[c("monthly", "annual")[1 + annual]]] + + ## Establish variables, create file + nc.var.list <- c(vars.ncvars, list(time.for.file$time.bnds.var, ncdf4::ncvar_def(name=cdx.dat$var.name[x], units=cdx.dat$units[x], dim=c(f.example$var[[v.example]]$dim[1:2], list(time.for.file$time.dim)), missval=1e20, longname=cdx.dat$long.name[x]))) + new.file <- ncdf4::nc_create(paste(out.dir, cdx.dat$filename[x], sep="/"), nc.var.list, force_v4=TRUE) + + ## Copy attributes for all variables plus global attributes + att.rename <- c("frequency"="input_frequency", "creation_date"="input_creation_date", "title"="input_title", "tracking_id"="input_tracking_id") + inst.id <- ncdf4::ncatt_get(f.example, 0, "institution_id") + if(inst.id$hasatt) { + att.rename.inst <- c("contact"="contact", "references"="references") + names(att.rename.inst) <- paste(inst.id$value, names(att.rename.inst), sep="_") + att.rename <- c(att.rename, att.rename.inst) + } + + ## Copy attributes with renaming and exclusions. + ncdf4.helpers::nc.copy.atts(f.example, 0, new.file, 0, definemode=TRUE, rename.mapping=att.rename) + ncdf4.helpers::nc.copy.atts(f.example, v.example, new.file, cdx.dat$var.name[x], definemode=TRUE, exception.list=c("units", "long_name", "standard_name", "base_period", "missing_value", "_FillValue", "add_", "valid_min", "valid_max", "valid_range", "scale_factor", "add_offset", "signedness", "history")) + for(v in vars.to.clone.atts.for) { + ncdf4.helpers::nc.copy.atts(f.example, v, new.file, v, definemode=TRUE) + } + ncdf4::ncatt_put(new.file, time.dim.name, "units", time.units, definemode=TRUE) + + ## Put additional attributes. + put.history.att(new.file, cdx.dat$var.name[x], definemode=TRUE) + put.ETCCDI.atts(new.file, c("mon", "yr")[1 + annual], ncdf4::ncatt_get(f.example, 0, "title")$value, author.data, definemode=TRUE) + if(cdx.dat$base.period.attr[x]) + ncdf4::ncatt_put(new.file, cdx.dat$var.name[x], "base_period", paste(base.range, collapse="-"), definemode=TRUE) + ncdf4::nc_enddef(new.file) + + ## Copy data from vars.to.copy and put time bounds. + ncdf4::ncvar_put(new.file, time.bnds.name, time.for.file$time.bnds.data) + for(v in vars.to.copy) + if(!is.null(vars.data[[v]])) + ncdf4::ncvar_put(new.file, v, vars.data[[v]]) + + new.file + })) +} + +## Get dim sizes, with checking to make sure sizes are all the same. +get.dim.size <- function(f, v.f.idx, variable.name.map) { + dim.size.list <- lapply(names(v.f.idx), function(i) { f[[v.f.idx[i]]]$var[[variable.name.map[i]]]$varsize }) + stopifnot(all.the.same(dim.size.list)) + dim.size.list[[1]] +} + +## Get dim axes, with checking to make sure they all have same axes. +get.dim.axes <- function(f, v.f.idx, variable.name.map) { + dim.axes.list <- lapply(names(v.f.idx), function(i) { ncdf4.helpers::nc.get.dim.axes(f[[v.f.idx[i]]], variable.name.map[i]) }) + stopifnot(all.the.same(dim.axes.list)) + dim.axes.list[[1]] +} + +## Get timeseries (as PCICt), with error checking to ensure input files have same TS. +## FIXME: This will need to be revised for fixed time dimensions. Should be identified by axis. +get.ts <- function(f) { + ts.list <- lapply(lapply(f, ncdf4.helpers::nc.get.time.series), trunc, "days") + stopifnot(all.the.same(ts.list)) + ts.list[[1]] +} + +## Compute all indices for a single grid box +#' Compute Climdex indices using provided data. +#' +#' Compute Climdex indices using provided data. +#' +#' Given the provided data and functions, compute the Climdex indices defined by the functions. +#' +#' @param in.dat The input data to compute indices on. +#' @param cdx.funcs The functions to be applied to the data, as created by \code{\link{get.climdex.functions}}. +#' @param ts The associated time data, as created by \code{nc.get.time.series}. +#' @param base.range The base range; a vector of two numeric years. +#' @param fclimdex.compatible Whether to make the results identical to those of fclimdex; this affects how the data in the base period is padded. +#' @return A list of data for each index. +#' +#' @examples +#' library(climdex.pcic) +#' +#' ## Prepare input data +#' in.dat <- list(tmax=ec.1018935.tmax$MAX_TEMP) +#' cdx.funcs <- get.climdex.functions(get.climdex.variable.list(names(in.dat))) +#' in.dat$northern.hemisphere <- TRUE +#' ts <- as.PCICt(do.call(paste, ec.1018935.tmax[,c("year", "jday")]), +#' format="%Y %j", cal="gregorian") +#' +#' ## Compute indices +#' res <- compute.climdex.indices(in.dat, cdx.funcs, ts, c(1981, 1990), FALSE) +#' +#' @export +compute.climdex.indices <- function(in.dat, cdx.funcs, ts, base.range, fclimdex.compatible) { + ci <- climdex.pcic::climdexInput.raw( + in.dat$tmax, in.dat$tmin, in.dat$prec, + if(is.null(in.dat$tmax)) NULL else ts, + if(is.null(in.dat$tmin)) NULL else ts, + if(is.null(in.dat$prec)) NULL else ts, + tavg=in.dat$tavg, tavg.dates=if(is.null(in.dat$tavg)) NULL else ts, + base.range=base.range, northern.hemisphere=in.dat$northern.hemisphere, + quantiles=in.dat$quantiles) + + ## NOTE: Names must be stripped here because it increases memory usage on the head by a factor of 8-9x (!) + return(lapply(cdx.funcs, function(f) { d <- f(ci=ci); names(d) <- NULL; d })) +} + +#' Flatten the X and Y dimensions down to a space dimension. +#' +#' Flatten the X and Y dimensions down to a space dimension. +#' +#' This function takes input data, a vector of dimensions to reduce to 1 dimension, and optionally a subset of dimnames to copy. It returns the data with the specified dimensions shrunk down to 1 dimension. +#' +#' @param dat The data to operate on. +#' @param reduce.dims The names or indices of the dimensions to reduce to 1 dimension. +#' @param names.subset Optionally, a subset of dimension names to copy. +#' @return The data with the specified dimensions reduced to 1 dimension. +#' +#' @note The dimensions to reduce must be adjoining dimensions. +#' +#' @examples +#' ## Take example data and flatten the last two dims down to one. +#' dat <- structure(1:8, .Dim=c(2, 2, 2)) +#' dat.flat <- flatten.dims(dat, 2:3) +#' +#' @export +flatten.dims <- function(dat, reduce.dims, names.subset) { + stopifnot(all(diff(reduce.dims) == 1)) + dat.dim <- dim(dat) + if(!missing(names.subset)) + dat.dimnames <- dimnames(dat) + before.reduce <- 1:length(dat.dim) < min(reduce.dims) + after.reduce <- 1:length(dat.dim) > max(reduce.dims) + new.dims <- c(dat.dim[before.reduce], prod(dat.dim[reduce.dims]), dat.dim[after.reduce]) + dim(dat) <- new.dims + if(!missing(names.subset)) + dimnames(dat) <- dat.dimnames[names.subset] + return(dat) +} + +## FIXME: Handle time-minor data gracefully. +#' Retrieve and convert data to correct units and dimensions. +#' +#' Retrieve and convert data to correct units and dimensions. +#' +#' This function retrieves NetCDF data for the specified subset from the specified file and variable; converts from \code{src.units} to \code{dest.units}, transposes the data to (T, S) dimensionality, and returns the result. +#' +#' @param f The NetCDF file to retrieve data from; an object of class \code{ncdf4}. +#' @param v The variable to retrieve data from. +#' @param subset The subset to retrieve. +#' @param src.units The source units to convert data from. +#' @param dest.units The destination units to convert to. +#' @param dim.axes The dimension axes to be used. +#' @return The retrieved and massaged data. +#' +#' @examples +#' \donttest{get.data(f, "pr", list(Y=3), "kg m-2 s-1", "kg m-2 s-1", c(X="lon",Y="lat",T="time"))} +#' +#' @export +get.data <- function(f, v, subset, src.units, dest.units, dim.axes) { + stopifnot(inherits(f, "ncdf4")) + dat <- if(!missing(src.units) && !missing(dest.units)) + udunits2::ud.convert(ncdf4.helpers::nc.get.var.subset.by.axes(f, v, subset), src.units, dest.units) + else + ncdf4.helpers::nc.get.var.subset.by.axes(f, v, subset) + + reduce.dims <- which(dim.axes %in% c("X", "Y", "Z")) + return(t(flatten.dims(dat, reduce.dims=reduce.dims))) +} + +## Produce slab of northern.hemisphere booleans of the same shape as the data. +#' Determine what portions of a subset are within the northern hemisphere. +#' +#' Determine what portions of a subset are within the northern hemisphere. +#' +#' Given a subset, a file, a variable, and a projection, determine what positions are within the northern hemisphere, returning the result as an array of booleans. +#' +#' @param subset The subset to use. +#' @param f The NetCDF file to use; an object of class \code{ncdf4}. +#' @param v The variable in question. +#' @param projection The proj4 string to use; NULL if the data is not in a projected coordinate space. +#' @return An array of booleans corresponding to the subset containing TRUE if the point is within the northern hemisphere, and FALSE otherwise. +#' +#' @examples +#' \donttest{ +#' ## Open files, etc. +#' input.files <- c("tasmax_NAM44_CanRCM4_ERAINT_r1i1p1_1989-2009.nc") +#' f <- list(nc_open(input.files)) +#' f.v <- lapply(f, ncdf4.helpers::nc.get.variable.list, min.dims=2) +#' bools <- get.northern.hemisphere.booleans(list(X=1:2, Y=1:2), f[[1]], f.v[[1]], NULL) +#' } +#' +#' @export +get.northern.hemisphere.booleans <- function(subset, f, v, projection) { + y.dim <- ncdf4.helpers::nc.get.dim.for.axis(f, v, "Y") + x.dim <- ncdf4.helpers::nc.get.dim.for.axis(f, v, "X") + y.subset.vals <- rep(y.dim$vals[if(is.null(subset$Y)) 1:y.dim$len else subset$Y], + each=(if(is.null(subset$X)) x.dim$len else length(subset$X))) + if(!is.null(projection)) { + x.subset.vals <- rep(x.dim$vals[if(is.null(subset$X)) 1:x.dim$len else subset$X], + (if(is.null(subset$Y)) y.dim$len else length(subset$Y))) + dat <- proj4::project(list(x=x.subset.vals, y=y.subset.vals), projection, inverse=TRUE, ellps.default=NA) + return(dat$y >= 0) + } else + return(y.subset.vals >= 0) +} + +#' Extract a single quantiles object from a set of thresholds. +#' +#' Extract a single quantiles object from a set of thresholds. +#' +#' From a set of thresholds as retrieved from one or more NetCDF files containing thresholds, this function extracts a single point and converts the format to one suitable for passing to \code{climdexInput.raw}. +#' +#' @param thresholds The thresholds, as extracted by \code{\link{get.thresholds.chunk}}. +#' @param idx The index to extract. +#' @return A quantiles object suitable for passing to \code{climdexInput.raw} as the \code{quantiles} argument. +#' +#' @examples +#' \donttest{ +#' ## Define mappings and filenames. +#' thresholds.name.map <- c(tx10thresh="tx10thresh", tn10thresh="tn10thresh", tx90thresh="tx90thresh", +#' tn90thresh="tn90thresh", r95thresh="r95thresh", r99thresh="r99thresh") +#' thresh.files <- "thresholds.nc" +#' +#' ## Open files, etc. +#' cdx.funcs <- get.climdex.functions(get.climdex.variable.list("tmax")) +#' thresholds.netcdf <- lapply(thresh.files, nc_open) +#' t.f.idx <- get.var.file.idx(thresholds.name.map, lapply(thresholds.netcdf, +#' ncdf4.helpers::nc.get.variable.list, min.dims=2)) +#' +#' ## Get thresholds chunk. +#' dat <- get.thresholds.chunk(list(Y=1), cdx.funcs, thresholds.netcdf, t.f.idx, thresholds.name.map) +#' +#' ## Get quantiles object for index 2 +#' q <- get.quantiles.object(dat, 2) +#' } +#' +#' @export +get.quantiles.object <- function(thresholds, idx) { + if(is.null(thresholds)) + return(NULL) + + thresh.path.2d <- list(tx10thresh=c("tmax", "outbase", "q10"), + tx90thresh=c("tmax", "outbase", "q90"), + tn10thresh=c("tmin", "outbase", "q10"), + tn90thresh=c("tmin", "outbase", "q90")) + thresh.path.1d <- list(r95thresh=c("prec", "q95"), + r99thresh=c("prec", "q99")) + result <- list() + + + recursive.append <- function(x, l, data) { + if(length(x) == 0) return(data) + if(is.null(l)) l <- list() + return(c(l[!(names(l) %in% x[1])], structure(list(recursive.append(tail(x, n=-1), l[[x[1]]], data)), .Names=x[1]))) + } + + + for(threshold.var in names(thresh.path.2d)[names(thresh.path.2d) %in% names(thresholds)]) + result <- recursive.append(thresh.path.2d[[threshold.var]], result, thresholds[[threshold.var]][,idx]) + + for(threshold.var in names(thresh.path.1d)[names(thresh.path.1d) %in% names(thresholds)]) { + thresh.path <- thresh.path.1d[[threshold.var]] + result[[thresh.path[1]]] <- c(result[[thresh.path[1]]], structure(thresholds[[threshold.var]][idx], .Names=thresh.path[2])) + } + + return(result) +} + +#' Compute Climdex indices for a subset / stripe +#' +#' Compute Climdex indices for a subset / stripe +#' +#' Given a subset, a set of Climdex functions (as created by \code{\link{get.climdex.functions}}), and ancillary data, load and convert data, create a climdexInput object for each point, run all of the functions in \code{cdx.funcs} on that data, and return the result. +#' +#' @param subset The subset to use. +#' @param cdx.funcs The functions to be applied to the data, as created by \code{\link{get.climdex.functions}}. +#' @param ts The associated time data, as created by \code{nc.get.time.series}. +#' @param base.range The base range; a vector of two numeric years. +#' @param dim.axes The dimension axes for the input data. +#' @param v.f.idx A mapping from variables to files, as created by \code{\link{get.var.file.idx}}. +#' @param variable.name.map A mapping from standardized names (tmax, tmin, prec) to NetCDF variable names. +#' @param src.units The source units to convert data from. +#' @param dest.units The destination units to convert to. +#' @param t.f.idx A mapping from threshold variables to threshold files, as created by \code{\link{get.var.file.idx}}. +#' @param thresholds.name.map A mapping from standardized names (tx10thresh, tn90thresh, etc) to NetCDF variable names. +#' @param fclimdex.compatible Whether to make the results identical to those of fclimdex; this affects how the data in the base period is padded. +#' @param projection A proj4 string representing the projection the data is in. +#' @param f A list of objects of type \code{ncdf4}, consisting of the open input files. If missing, will be pulled from the global namespace. +#' @param thresholds.netcdf A list of objects of type \code{ncdf4}, consisting of the open threshold files. If missing, will be pulled from the global namespace. +#' +#' @note This function relies on an object named 'f' and containing the opened NetCDF files being part of the global namespace. +#' +#' @examples +#' \donttest{ +#' ## Define mappings and filenames. +#' author.data <- list(institution="Looney Bin", institution_id="LBC") +#' input.files <- c("pr_NAM44_CanRCM4_ERAINT_r1i1p1_1989-2009.nc") +#' variable.name.map <- c(tmax="tasmax", tmin="tasmin", prec="pr") +#' +#' ## Open files, etc. +#' cdx.funcs <- get.climdex.functions(get.climdex.variable.list("tmax")) +#' f <- lapply(input.files, ncdf4::nc_open) +#' f.meta <- create.file.metadata(f, variable.name.map) +#' climdex.var.list <- get.climdex.variable.list(names(f.meta$v.f.idx), "all", NULL) +#' cdx.meta <- get.climdex.variable.metadata(climdex.var.list, input.files[1]) +#' +#' ## Compute indices for stripe +#' cdx <- compute.indices.for.stripe(list(Y=1), cdx.funcs, f.meta$ts, c(1981, 1990), f.meta$dim.axes, +#' f.meta$v.f.idx, variable.name.map, f.meta$src.units, f.meta$dest.units, +#' t.f.idx, NULL, f=f, thresholds.netcdf=NULL) +#' } +#' +#' @export +compute.indices.for.stripe <- function(subset, cdx.funcs, ts, base.range, dim.axes, v.f.idx, variable.name.map, src.units, dest.units, t.f.idx, thresholds.name.map, fclimdex.compatible=TRUE, projection=NULL, f, thresholds.netcdf) { + f <- if(missing(f)) get("f", .GlobalEnv) else f + thresholds.netcdf <- if(missing(thresholds.netcdf)) get("thresholds.netcdf", .GlobalEnv) else thresholds.netcdf + + ## Dimension order: Time, Space for each Var in list + data.list <- sapply(names(v.f.idx), function(x) { gc(); get.data(f[[v.f.idx[x]]], variable.name.map[x], subset, src.units[x], dest.units[x], dim.axes) }, simplify=FALSE) + gc() + + northern.hemisphere <- get.northern.hemisphere.booleans(subset, f[[v.f.idx[1]]], variable.name.map[names(v.f.idx)[1]], projection) + + thresholds <- if(is.null(thresholds.netcdf)) NULL else get.thresholds.chunk(subset, cdx.funcs, thresholds.netcdf, t.f.idx, thresholds.name.map) + return(lapply(1:(dim(data.list[[1]])[2]), function(x) { + dat.list <- sapply(names(data.list), function(name) { data.list[[name]][,x] }, simplify=FALSE) + ## Fast-path the all-NA case. + if(all(sapply(dat.list, function(x) { all(is.na(x)) }))) { + ## We don't need to pad this out to full length; cbind will do that for us. + return(structure(as.list(rep(NA, length(cdx.funcs))), .Names=names(cdx.funcs))) + } else { + indices.input <- c(dat.list, northern.hemisphere=northern.hemisphere[x], list(quantiles=get.quantiles.object(thresholds, x))) + return(compute.climdex.indices(indices.input, cdx.funcs, ts, base.range, fclimdex.compatible)) + } + })) +} + +#' Retrieve thresholds for a subset +#' +#' Retrieve thresholds for a subset +#' +#' Given a subset, a set of Climdex functions (as created by \code{\link{get.climdex.functions}}), and ancillary data, load the thresholds required for the functions being called and return them. +#' +#' @param subset The subset to use. +#' @param cdx.funcs The functions to be applied to the data, as created by \code{\link{get.climdex.functions}}. +#' @param thresholds.netcdf One or more NetCDF files containing thresholds. +#' @param t.f.idx A mapping from threshold variables to threshold files, as created by \code{\link{get.var.file.idx}}. +#' @param thresholds.name.map A mapping from standardized names (tx10thresh, tn90thresh, etc) to NetCDF variable names. +#' +#' @examples +#' \donttest{ +#' ## Define mappings and filenames. +#' thresholds.name.map <- c(tx10thresh="tx10thresh", tn10thresh="tn10thresh", tx90thresh="tx90thresh", +#' tn90thresh="tn90thresh", r95thresh="r95thresh", r99thresh="r99thresh") +#' thresh.files <- "thresholds.nc" +#' +#' ## Open files, etc. +#' cdx.funcs <- get.climdex.functions(get.climdex.variable.list("tmax")) +#' thresholds.netcdf <- lapply(thresh.files, nc_open) +#' t.f.idx <- get.var.file.idx(thresholds.name.map, lapply(thresholds.netcdf, +#' ncdf4.helpers::nc.get.variable.list, min.dims=2)) +#' +#' ## Get thresholds chunk. +#' dat <- get.thresholds.chunk(list(Y=1), cdx.funcs, thresholds.netcdf, t.f.idx, thresholds.name.map) +#' } +#' +#' @export +get.thresholds.chunk <- function(subset, cdx.funcs, thresholds.netcdf, t.f.idx, thresholds.name.map) { + var.thresh.map <- list(tx10thresh=c("tx10p"), tx90thresh=c("tx90p", "WSDI"), tn10thresh=c("tn10p", "CSDI"), tn90thresh=c("tn90p"), r95thresh=c("r95p"), r99thresh=c("r99p")) + + cdx.names <- names(cdx.funcs) + thresh.var.needed <- names(var.thresh.map)[sapply(var.thresh.map, function(x) { any(unlist(lapply(x, function(substr) { any(grepl(substr, cdx.names)) }))) })] + stopifnot(all(thresh.var.needed %in% names(t.f.idx))) + return(sapply(thresh.var.needed, function(threshold.var) { + dim.axes <- ncdf4.helpers::nc.get.dim.axes(thresholds.netcdf[[t.f.idx[threshold.var]]], thresholds.name.map[threshold.var]); + return(get.data(thresholds.netcdf[[t.f.idx[threshold.var]]], thresholds.name.map[threshold.var], subset, dim.axes=dim.axes)) + }, simplify=FALSE)) +} + +## Write out results for variables computed +#' Write out computed climdex results +#' +#' Write out computed climdex results +#' +#' Given a set of Climdex results, a subset, a set of files, and dimension sizes, write out the data to the appropriate files. +#' +#' @param climdex.results The results to write out. +#' @param chunk.subset The corresponding subset. +#' @param cdx.ncfile The list of NetCDF files to write the results out to. +#' @param dim.size The overall size of the input data. +#' @param cdx.varname The list of NetCDF variable names for the files in \code{cdx.ncfile}. +#' +#' @examples +#' \donttest{ +#' ## Define mappings and filenames. +#' author.data <- list(institution="Looney Bin", institution_id="LBC") +#' input.files <- c("pr_NAM44_CanRCM4_ERAINT_r1i1p1_1989-2009.nc") +#' variable.name.map <- c(tmax="tasmax", tmin="tasmin", prec="pr") +#' +#' ## Open files, etc. +#' cdx.funcs <- get.climdex.functions("tmax") +#' f <- lapply(input.files, ncdf4::nc_open) +#' f.meta <- create.file.metadata(f, variable.name.map) +#' climdex.var.list <- get.climdex.variable.list(names(f.meta$v.f.idx), "all", NULL) +#' cdx.meta <- get.climdex.variable.metadata(climdex.var.list, input.files[1]) +#' +#' ## Create output files +#' cdx.ncfile <- create.ncdf.output.files(cdx.meta, f, f.meta$v.f.idx, variable.name.map, +#' f.meta$ts, get.time.origin(f, f.meta$dim.axes), +#' c(1981,1990), "/foo", author.data) +#' +#' ## Compute indices for stripe +#' cdx <- compute.indices.for.stripe(list(Y=1), cdx.funcs, f.meta$ts, c(1991, 2000), f.meta$dim.axes, +#' f.meta$v.f.idx, variable.name.map, f.meta$src.units, f.meta$dest.units, +#' t.f.idx, NULL, f=f, thresholds.netcdf=NULL) +#' +#' ## Write out indices +#' write.climdex.results(cdx, list(Y=1), cdx.ncfile, f.meta$dim.size, cdx.meta$varname) +#' } +#' +#' @export +write.climdex.results <- function(climdex.results, chunk.subset, cdx.ncfile, dim.size, cdx.varname) { + xy.dims <- dim.size[1:2] + if(!is.null(chunk.subset$X)) + xy.dims[1] <- length(chunk.subset$X) + if(!is.null(chunk.subset$Y)) + xy.dims[2] <- length(chunk.subset$Y) + + ## Write out results, variable by variable + lapply(1:length(cdx.ncfile), function(v) { + dat <- t(do.call(cbind, lapply(climdex.results, function(cr) { cr[[v]] }))) + t.dim.len <- ncdf4.helpers::nc.get.dim.for.axis(cdx.ncfile[[v]], cdx.varname[v], "T")$len + + ## If data is of length 1, it's an error. + if(length(dat) == 1) + stop(dat) + + ## Special case of an entire slab missing values... repeat such that we have full data. + if(prod(dim(dat)) != prod(c(xy.dims, t.dim.len))) + dat <- rep(dat, t.dim.len) + + dim(dat) <- c(xy.dims, t.dim.len) + ncdf4.helpers::nc.put.var.subset.by.axes(cdx.ncfile[[v]], cdx.varname[v], dat, chunk.subset) + }) + invisible(0) +} + +#' Compute Climdex thresholds for a subset / stripe +#' +#' Compute Climdex thresholds for a subset / stripe +#' +#' Given a subset and ancillary data, load and convert data, get the out-of-base quantiles for the data for each point, and return the result. +#' +#' @param subset The subset to use. +#' @param ts The associated time data, as created by \code{nc.get.time.series}. +#' @param base.range The base range; a vector of two numeric years. +#' @param dim.axes The dimension axes for the input data. +#' @param v.f.idx A mapping from variables to files, as created by \code{\link{get.var.file.idx}}. +#' @param variable.name.map A mapping from standardized names (tmax, tmin, prec) to NetCDF variable names. +#' @param src.units The source units to convert data from. +#' @param dest.units The destination units to convert to. +#' @param f A list of objects of type \code{ncdf4}, consisting of the open input files. If missing, will be pulled from the global namespace. +#' +#' @note This function relies on an object named 'f' and containing the opened NetCDF files being part of the global namespace. +#' +#' @examples +#' \donttest{ +#' ## Establish basic inputs. +#' author.data <- list(institution="Looney Bin", institution_id="LBC") +#' input.files <- c("pr_NAM44_CanRCM4_ERAINT_r1i1p1_1989-2009.nc") +#' +#' ## Prepare derived inputs. +#' f <- lapply(input.files, ncdf4::nc_open) +#' variable.name.map <- c(tmax="tasmax", tmin="tasmin", prec="pr") +#' f.meta <- create.file.metadata(f, variable.name.map) +#' threshold.dat <- get.thresholds.metadata(names(f.meta$v.f.idx)) +#' +#' ## Create output file +#' thresh.file <- create.thresholds.file("thresh.nc", f, f.meta$ts, f.meta$v.f.idx, variable.name.map, +#' c(1991, 2000), f.meta$dim.size, f.meta$dim.axes, +#' threshold.dat, author.data) +#' +#' ## Compute threshold quantiles for stripe +#' q <- get.quantiles.for.stripe(list(Y=1), f.meta$ts, c(1991, 2000), f.meta$dim.axes, +#' f.meta$v.f.idx, variable.name.map, f.meta$src.units, +#' f.meta$dest.units, f) +#' } +#' +#' @export +get.quantiles.for.stripe <- function(subset, ts, base.range, dim.axes, v.f.idx, variable.name.map, src.units, dest.units, f) { + f <- if(missing(f)) get("f", .GlobalEnv) else f + data.list <- sapply(names(v.f.idx), function(x) { gc(); get.data(f[[v.f.idx[x]]], variable.name.map[x], subset, src.units[x], dest.units[x], dim.axes) }, simplify=FALSE) + gc() + + r <- 1:(dim(data.list[[1]])[2]) + if(!is.null(data.list$tmax)) { + if(!is.null(data.list$tmin)) { + if(!is.null(data.list$prec)) { + return(lapply(r, function(x) climdex.pcic::get.outofbase.quantiles(data.list$tmax[,x], data.list$tmin[,x], data.list$prec[,x], ts, ts, ts, base.range))) + } else { + return(lapply(r, function(x) climdex.pcic::get.outofbase.quantiles(data.list$tmax[,x], data.list$tmin[,x], NULL, ts, ts, NULL, base.range))) + } + } else { + if(!is.null(data.list$prec)) { + return(lapply(r, function(x) climdex.pcic::get.outofbase.quantiles(data.list$tmax[,x], NULL, data.list$prec[,x], ts, NULL, ts, base.range))) + } else { + return(lapply(r, function(x) climdex.pcic::get.outofbase.quantiles(data.list$tmax[,x], NULL, NULL, ts, NULL, NULL, base.range))) + } + } + } else { + if(!is.null(data.list$tmin)) { + if(!is.null(data.list$prec)) { + return(lapply(r, function(x) climdex.pcic::get.outofbase.quantiles(NULL, data.list$tmin[,x], data.list$prec[,x], NULL, ts, ts, base.range))) + } else { + return(lapply(r, function(x) climdex.pcic::get.outofbase.quantiles(NULL, data.list$tmin[,x], NULL, NULL, ts, NULL, base.range))) + } + } else { + if(!is.null(data.list$prec)) { + return(lapply(r, function(x) climdex.pcic::get.outofbase.quantiles(NULL, NULL, data.list$prec[,x], NULL, NULL, ts, base.range))) + } else { + stop("Go home and take your shitty input with you.") + } + } + } +} + +set.up.cluster <- function(parallel, type="SOCK", src) { + ## Fire up the cluster... + cluster <- NULL + + if(!is.logical(parallel)) { + cat(paste("Creating cluster of", parallel, "nodes of type", type, "\n")) + cat(paste("SRC:", src)) + cluster <- snow::makeCluster(parallel, type) + snow::clusterCall(cluster, function() { source(src) }) + ##snow::clusterEvalQ(cluster, library(climdex.pcic.ncdf)) + ##snow::clusterEvalQ(cluster, try(getFromNamespace('nc_set_chunk_cache', 'ncdf4')(1024 * 2048, 1009), silent=TRUE)) + } + cluster +} + +#' Creates Climdex thresholds output file. +#' +#' Creates Climdex thresholds output file. +#' +#' This function creates a file suitable for outputting thresholds to, with all variables that can be created with the input data present in the file. +#' +#' @param thresholds.file The filename to be used for the thresholds file. +#' @param f The file(s) being used as sources for metadata. +#' @param ts The associated time data, as created by \code{nc.get.time.series}. +#' @param v.f.idx A mapping from variables to files, as created by \code{\link{get.var.file.idx}}. +#' @param variable.name.map A mapping from standardized names (tmax, tmin, prec) to NetCDF variable names. +#' @param base.range The base range; a vector of two numeric years. +#' @param dim.size Dimension sizes for the input. +#' @param dim.axes Dimension axes for the input. +#' @param threshold.dat Threshold metadata, as provided by \code{\link{get.thresholds.metadata}}. +#' @param author.data A vector containing named elements describing the author; see \code{\link{create.indices.from.files}}. +#' @return An object of class \code{ncdf4}. +#' +#' @examples +#' \donttest{ +#' ## Establish basic inputs. +#' author.data <- list(institution="Looney Bin", institution_id="LBC") +#' input.files <- c("pr_NAM44_CanRCM4_ERAINT_r1i1p1_1989-2009.nc") +#' +#' ## Prepare derived inputs. +#' f <- lapply(input.files, ncdf4::nc_open) +#' variable.name.map <- c(tmax="tasmax", tmin="tasmin", prec="pr") +#' f.meta <- create.file.metadata(f, variable.name.map) +#' threshold.dat <- get.thresholds.metadata(names(f.meta$v.f.idx)) +#' +#' ## Create output file +#' thresh.file <- create.thresholds.file("thresh.nc", f, f.meta$ts, f.meta$v.f.idx, variable.name.map, +#' c(1981,1990), f.meta$dim.size, f.meta$dim.axes, +#' threshold.dat, author.data) +#' } +#' +#' @export +create.thresholds.file <- function(thresholds.file, f, ts, v.f.idx, variable.name.map, base.range, dim.size, dim.axes, threshold.dat, author.data) { + exemplar.file <- f[[v.f.idx[1]]] + exemplar.var.name <- variable.name.map[names(v.f.idx)[1]] + exemplar.var <- exemplar.file$var[[exemplar.var.name]] + num.thresholds <- ifelse(is.null(attr(ts, "dpy")), 365, attr(ts, "dpy")) + cal <- attr(ts, "cal") + + ## Get time metadata... + old.time.dim <- exemplar.var$dim[[which(dim.axes == "T")]] + time.units <- old.time.dim$units + time.units.split <- strsplit(time.units, " ")[[1]] + time.origin <- if(time.units.split[2] == "as") format(trunc(min(ts), units="days"), "%Y-%m-%d") else time.units.split[3] + time.dim.name <- old.time.dim$name + old.time.bnds.att <- ncdf4::ncatt_get(exemplar.file, time.dim.name, "bounds") + time.bnds.name <- if(old.time.bnds.att$hasatt) old.time.bnds.att$value else "time_bnds" + + ## Set up time variables + out.time <- as.numeric(julian(as.PCICt(paste(floor(mean(base.range)), 1:num.thresholds, sep="-"), attr(ts, "cal"), format="%Y-%j"), as.PCICt(time.origin, cal)), units="days") + out.time.dim <- ncdf4::ncdim_def("time", paste("days since", time.origin), out.time, unlim=TRUE, calendar=cal, longname="time") + + ## Set up bounds + input.bounds <- ncdf4.helpers::nc.get.dim.bounds.var.list(exemplar.file) + input.bounds <- input.bounds[input.bounds != time.bnds.name] + input.dim.names <- ncdf4.helpers::nc.get.dim.names(exemplar.file, exemplar.var.name) + input.varname.list <- c(input.bounds, input.dim.names) + + bnds.dim <- ncdf4::ncdim_def("bnds", "", 1:2, create_dimvar=FALSE) + if(length(input.bounds) > 0) + bnds.dim <- exemplar.file$var[[input.bounds[1]]]$dim[[1]] + out.time.bnds <- as.numeric(julian(as.PCICt(c(paste(base.range[1], 1:num.thresholds, sep="-"), paste(base.range[2], 1:num.thresholds, sep="-")), attr(ts, "cal"), format="%Y-%j"), as.PCICt(time.origin, cal)), units="days") + dim(out.time.bnds) <- c(num.thresholds, 2) + out.time.bnds <- t(out.time.bnds) + out.time.bnds.var <- ncdf4::ncvar_def(time.bnds.name, '', list(bnds.dim, out.time.dim), longname='', prec="double") + + input.bounds.vars <- c(lapply(input.bounds, function(x) { exemplar.file$var[[x]] }), list(out.time.bnds.var)) + input.bounds.data <- c(lapply(input.bounds, function(x) { ncdf4::ncvar_get(exemplar.file, x) }), list(out.time.bnds)) + all.bounds <- c(input.bounds, time.bnds.name) + names(input.bounds.data) <- names(input.bounds.vars) <- all.bounds + + ## Set up 2d and 3d dims + out.dims.3d <- list(exemplar.var$dim[[which(dim.axes == 'X')]], exemplar.var$dim[[which(dim.axes == 'Y')]], out.time.dim) + out.dims.2d <- list(exemplar.var$dim[[which(dim.axes == 'X')]], exemplar.var$dim[[which(dim.axes == 'Y')]]) + out.vars <- sapply(names(threshold.dat), function(n) { + tinfo <- threshold.dat[[n]] + if(tinfo$has.time) + ncdf4::ncvar_def(n, tinfo$units, out.dims.3d, 1e20, tinfo$longname, prec="double") + else + ncdf4::ncvar_def(n, tinfo$units, out.dims.2d, 1e20, tinfo$longname, prec="double") + }, simplify=FALSE) + + ## Tack bounds vars onto var list so they get created... + all.vars <- c(input.bounds.vars, out.vars) + + ## Create file + thresholds.netcdf <- ncdf4::nc_create(thresholds.file, all.vars, force_v4=TRUE) + out.dim.axes <- c("X", "Y", "T") + + ## Copy attributes for all variables plus global attributes + ncdf4::nc_redef(thresholds.netcdf) + ncdf4.helpers::nc.copy.atts(exemplar.file, 0, thresholds.netcdf, 0, definemode=TRUE) + for(v in input.varname.list) { + ncdf4.helpers::nc.copy.atts(exemplar.file, v, thresholds.netcdf, v, definemode=TRUE) + } + + put.ETCCDI.atts(thresholds.netcdf, "monClim", ncdf4::ncatt_get(exemplar.file, 0, "title")$value, author.data, definemode=TRUE) + + ## Attach history data to threshold data. + lapply(out.vars, function(v) { + put.history.att(thresholds.netcdf, v, definemode=TRUE) + ncdf4::ncatt_put(thresholds.netcdf, v, "base_period", paste(base.range, collapse="-"), definemode=TRUE) + }) + ncdf4::nc_enddef(thresholds.netcdf) + + ## Put bounds data. + for(v in all.bounds) { + ncdf4::ncvar_put(thresholds.netcdf, v, input.bounds.data[[v]]) + } + + return(thresholds.netcdf) +} + +#' Create mapping from variables to files. +#' +#' Create mapping from variables to files. +#' +#' Given a variable name map and list of variables in each file, determine a mapping from variables to files. +#' +#' @param variable.name.map A mapping from standardized names (tmax, tmin, prec) to NetCDF variable names. +#' @param v.list A list containing a vector of variables in each file. +#' @return A vector mapping standardized variable names (tmax, tmin, prec) to indices in the file list. +#' +#' @examples +#' \dontrun{ +#' ## Get mapping for a single file. +#' input.files <- c("pr_NAM44_CanRCM4_ERAINT_r1i1p1_1989-2009.nc") +#' f <- lapply(input.files, ncdf4::nc_open) +#' v.list <- lapply(f, ncdf4.helpers::nc.get.variable.list, min.dims=2) +#' v.f.idx <- get.var.file.idx(variable.name.map, v.list) +#' } +#' +#' @export +get.var.file.idx <- function(variable.name.map, v.list) { + v.f.idx <- sapply(variable.name.map, function(v) { which(sapply(v.list, function(vl) { v %in% vl })) }, simplify=FALSE) + v.f.idx <- unlist(v.f.idx[sapply(v.f.idx, length) > 0]) + return(v.f.idx) +} + +#' Retrieve metadata about NetCDF-format files. +#' +#' Retrieve metadata about NetCDF-format files. +#' +#' Given a list of NetCDF files and a mapping from standard variable names (tmax, tmin, prec) to NetCDF variable names, retrieve a set of standardized metadata. +#' +#' @param f The list of NetCDF files. +#' @param variable.name.map A named character vector mapping standard variable names (tmax, tmin, prec) to NetCDF variable names. +#' @return A list containing time data (ts), dimension sizes (dim.size), dimension axes (dim.axes), source units (src.units), destination units (dest.units), a mapping from variables to files (v.f.idx), and a projection, if available. +#' +#' @examples +#' \dontrun{ +#' ## Get metadata about a single input file. +#' input.files <- c("pr_NAM44_CanRCM4_ERAINT_r1i1p1_1989-2009.nc") +#' f <- lapply(input.files, ncdf4::nc_open) +#' f.meta <- create.file.metadata(f, variable.name.map) +#' } +#' +#' @export +create.file.metadata <- function(f, variable.name.map) { + v.list <- lapply(f, ncdf4.helpers::nc.get.variable.list, min.dims=2) + v.f.idx <- get.var.file.idx(variable.name.map, v.list) + + if(any(sapply(v.list, function(vl) { sum(variable.name.map %in% vl) }) == 0)) + stop("At least one input file doesn't contain any of the named variables.") + if(anyDuplicated(unlist(names(v.f.idx)))) + stop("Variable(s) present in more than one input file.") + + ## Get units and specify destination units + dest.units <- c(prec="kg m-2 d-1", tmax="degrees_C", tmin="degrees_C", tavg="degrees_C") + dest.units <- dest.units[names(dest.units) %in% names(v.f.idx)] + + ## Get projection + projection <- ncdf4.helpers::nc.get.proj4.string(f[[1]], v.list[[1]][1]) + stopifnot(!is.null(projection)) + if(projection == "") + projection <- NULL + + return(list(ts=get.ts(f), dim.size=get.dim.size(f, v.f.idx, variable.name.map), dim.axes=get.dim.axes(f, v.f.idx, variable.name.map), + src.units=sapply(names(v.f.idx), function(i) { f[[v.f.idx[i]]]$var[[variable.name.map[i]]]$units }), + dest.units=dest.units, v.f.idx=v.f.idx, projection=projection)) +} + +#' Retrieve threshold metadata +#' +#' Retrieve threshold metadata +#' +#' Returns units, long names, locations within the climdexInput data structure, and whether time data should be included given the variable information available. +#' +#' @param var.names A vector containing names of available variables (tmax, tmin, prec). +#' @return A list containing metadata for each of the six thresholds. +#' +#' @examples +#' thresholds.meta <- get.thresholds.metadata("prec") +#' +#' @export +get.thresholds.metadata <- function(var.names) { + threshold.dat <- list(tx10thresh=list(units="degrees_C", longname="10th_percentile_running_baseline_tasmax", has.time=TRUE, q.path=c("tmax", "outbase", "q10")), + tx90thresh=list(units="degrees_C", longname="90th_percentile_running_baseline_tasmax", has.time=TRUE, q.path=c("tmax", "outbase", "q90")), + tn10thresh=list(units="degrees_C", longname="10th_percentile_running_baseline_tasmin", has.time=TRUE, q.path=c("tmin", "outbase", "q10")), + tn90thresh=list(units="degrees_C", longname="90th_percentile_running_baseline_tasmin", has.time=TRUE, q.path=c("tmin", "outbase", "q90")), + r95thresh=list(units="kg m-2 d-1", longname="95th_percentile_baseline_wet_day_pr", has.time=FALSE, q.path=c("prec", "q95")), + r99thresh=list(units="kg m-2 d-1", longname="99th_percentile_baseline_wet_day_pr", has.time=FALSE, q.path=c("prec", "q99"))) + return(threshold.dat[sapply(threshold.dat, function(x) { x$q.path[1] %in% var.names })]) +} + +unsquash.dims <- function(dat.dim, subset, f, n) { + dim.axes <- ncdf4.helpers::nc.get.dim.axes(f, n) + return(sapply(dim.axes, function(x) { if(any(names(subset) == x)) length(subset[[x]]) else f$dim[[names(dim.axes)[dim.axes == x]]]$len })) +} + +## Run Climdex to generate indices for computing Climdex on future data +#' Create Climdex thresholds used for computing threshold-based indices +#' +#' Create Climdex thresholds used for computing threshold-based indices +#' +#' For many applications, one may want to compute thresholds on one data set, then apply them to another. This is usually the case when comparing GCM (Global Climate Model) results for future time periods to either historical reanalysis data or historical / pre-industrial control runs from models. The purpose of this function is to compute these thresholds on the data supplied, saving them to the file specified. Then these thresholds can be used with \code{\link{create.indices.from.files}} to compute indices using the thresholds computed using this code. +#' +#' @param input.files A list of filenames of NetCDF files to be used as input. A NetCDF file may contain one or more variables. +#' @param output.file The name of the file to be created. +#' @param author.data A vector containing named elements describing the author; see \code{\link{create.indices.from.files}}. +#' @param variable.name.map A character vector mapping from standardized names (tmax, tmin, prec) to NetCDF variable names. +#' @param axis.to.split.on The axis to split up the data on for parallel / incremental processing. +#' @param fclimdex.compatible Whether the thresholds should be created to match fclimdex thresholds; affects padding at the ends of the base period. +#' @param base.range Vector of two numeric years specifying the start and end years. +#' @param parallel The number of parallel processing threads, or FALSE if no parallel processing is desired. +#' @param verbose Whether to be chatty. + #' @param max.vals.millions The number of data values to process at one time (length of time dim * number of values * number of variables). +#' @param cluster.type The cluster type, as used by the \code{snow} library. +#' +#' @note NetCDF input files may contain one or more variables, named as per \code{variable.name.map}. The code will search the files for the named variables. +#' +#' @examples +#' \dontrun{ +#' ## Prepare input data and calculate thresholds for file. +#' input.files <- c("pr_NAM44_CanRCM4_ERAINT_r1i1p1_1989-2009.nc") +#' author.data <- list(institution="Looney Bin", institution_id="LBC") +#' create.thresholds.from.file(input.files, "thresh.nc", author.data, +#' base.range=c(1991, 2000), parallel=FALSE) +#' } +#' +#' @export +create.thresholds.from.file <- function(input.files, output.file, author.data, variable.name.map=c(tmax="tasmax", tmin="tasmin", prec="pr", tavg="tas"), axis.to.split.on="Y", fclimdex.compatible=TRUE, base.range=c(1961, 1990), parallel=4, verbose=FALSE, max.vals.millions=10, cluster.type="SOCK", src="ncdf.R") { + if(!(is.logical(parallel) || is.numeric(parallel))) + stop("'parallel' option must be logical or numeric.") + + if(length(input.files) == 0) + stop("Require at least one input file.") + + f <- lapply(input.files, ncdf4::nc_open) + f.meta <- create.file.metadata(f, variable.name.map) + + ## Define what the threshold indices will look like... + threshold.dat <- get.thresholds.metadata(names(f.meta$v.f.idx)) + + ## Create the output file + thresholds.netcdf <- create.thresholds.file(output.file, f, f.meta$ts, f.meta$v.f.idx, variable.name.map, base.range, f.meta$dim.size, f.meta$dim.axes, threshold.dat, author.data) + + cluster <- set.up.cluster(parallel, cluster.type, src) + subsets <- ncdf4.helpers::get.cluster.worker.subsets(max.vals.millions * 1000000, f.meta$dim.size, f.meta$dim.axes, axis.to.split.on) + + write.thresholds.data <- function(out.list, out.sub) { + lapply(names(threshold.dat), function(n) { + d <- threshold.dat[[n]] + if(d$has.time) + dat <- t(sapply(out.list, function(y) { return(y[[d$q.path]]) })) + else + dat <- sapply(out.list, function(y) { return(y[[d$q.path[1]]][d$q.path[2]]) }) + dim(dat) <- unsquash.dims(dim(dat), out.sub, thresholds.netcdf, n) + ncdf4.helpers::nc.put.var.subset.by.axes(thresholds.netcdf, n, dat, out.sub) + }) + gc() + } + + if(!is.null(cluster)) { + lapply(f, ncdf4::nc_close) + rm(f) + + snow::clusterExport(cluster, "input.files", environment()) + snow::clusterEvalQ(cluster, f <<- lapply(input.files, ncdf4::nc_open, readunlim=FALSE)) + + ## Compute subsets and fire jobs off; collect and write out chunk-at-a-time + parLapplyLBFiltered(cluster, subsets, get.quantiles.for.stripe, f.meta$ts, base.range, f.meta$dim.axes, f.meta$v.f.idx, variable.name.map, f.meta$src.units, f.meta$dest.units, local.filter.func=write.thresholds.data) + + snow::stopCluster(cluster) + } else { + ##try(getFromNamespace('nc_set_chunk_cache', 'ncdf4')(1024 * 2048, 1009), silent=TRUE) + + lapply(subsets, function(x) { write.thresholds.data(get.quantiles.for.stripe(x, f.meta$ts, base.range, f.meta$dim.axes, f.meta$v.f.idx, variable.name.map, f.meta$src.units, f.meta$dest.units, f), x) }) + + lapply(f, ncdf4::nc_close) + } + + ## Close all the files + ncdf4::nc_close(thresholds.netcdf) + + cat("Finished computing thresholds\n") + invisible(0) +} + +#' Open thresholds file(s) +#' +#' Open thresholds file(s) +#' +#' This function opens one or more thresholds files and returns the \code{ncdf4} objects as a list. +#' +#' @param thresholds.files A character vector containing the names of thresholds files. +#' @return A list of objects of class \code{ncdf4}, or NULL if thresholds.files is NULL. +#' +#' @examples +#' \dontrun{ +#' ## Open a single thresholds file +#' thresholds.files <- c("thresh.nc") +#' thresh <- thresholds.open(thresholds.files) +#' } +#' +#' @export +thresholds.open <- function(thresholds.files) { + return(if(is.null(thresholds.files)) NULL else lapply(thresholds.files, ncdf4::nc_open)) +} + +#' Close thresholds file(s) +#' +#' Close thresholds file(s) +#' +#' This function closes one or more thresholds files. +#' +#' @param thresholds.nc A list of objects of class \code{ncdf4}, or NULL +#' +#' @examples +#' \dontrun{ +#' ## Open a single thresholds file, then close it. +#' thresholds.files <- c("thresh.nc") +#' thresh <- thresholds.open(thresholds.files) +#' thresholds.close(thresh) +#' } +#' +#' @export +thresholds.close <- function(thresholds.nc) { + if(!is.null(thresholds.nc)) lapply(thresholds.nc, ncdf4::nc_close) + invisible(0) +} + + +get.time.origin <- function(f, dim.axes) { + time.units <- f[[1]]$dim[[names(dim.axes)[which(dim.axes == "T")]]]$units + time.units.split <- strsplit(gsub("[ ]+", " ", time.units), " ")[[1]] + time.origin <- if(time.units.split[2] == "as") format(trunc(min(ts), units="days"), "%Y-%m-%d") else time.units.split[3] + return(time.origin) +} + +get.thresholds.f.idx <- function(thresholds.files, thresholds.name.map) { + if(is.null(thresholds.files)) { + return(NULL) + } else { + thresh <- thresholds.open(thresholds.files) + t.f.idx <- get.var.file.idx(thresholds.name.map, lapply(thresh, ncdf4.helpers::nc.get.variable.list, min.dims=2)) + thresholds.close(thresh) + return(t.f.idx) + } +} + +## Run Climdex and populate the output files +#' Create Climdex indices from NetCDF input files. +#' +#' Create Climdex indices from NetCDF input files. +#' +#' This function computes Climdex indices from NetCDF input files, writing out one file per variable named like the \code{template.filename}, which must follow the CMIP5 file naming conventions (this is a deficiency which will be corrected in later versions). +#' +#' The indices to be calculated can be specified; if not, they will be determined by data availability. Thresholds can be supplied (via \code{thresholds.files}) or, if there is data within the base period, calculated and used as part of the process. Note that in-base thresholds are separate from out-of-base thresholds; this is covered in more detail in the help for the \code{climdex.pcic} package. +#' +#' @param input.files A list of filenames of NetCDF files to be used as input. A NetCDF file may contain one or more variables. +#' @param out.dir The directory to put the output files in. +#' @param output.filename.template The output filename to be used as a template, which must follow the CMIP5 file naming conventions. +#' @param author.data Data describing the author; a character vector with 0 or more of the following named values:\describe{ +#' \item{institution}{The institution generating the data.} +#' \item{institution_id}{An abbreviation for the institution generating the data.} +#' \item{indices_archive}{The URL the data is published at, if applicable.} +#' \item{contact}{The email address or contact info for the author.} +#' \item{references}{What to reference when citing this work.} +#' } +#' @param climdex.vars.subset A character vector of lower-case names of Climdex indices to calculate (eg: tr, fd, rx5day). See the list of 27 indices in the References section. +#' @param climdex.time.resolution The time resolution to compute indices at; one of "all" (both monthly and annual), "annual" (only annual), or "monthly" (only monthly). +#' @param variable.name.map A character vector mapping from standardized names (tmax, tmin, prec) to NetCDF variable names. +#' @param axis.to.split.on The axis to split up the data on for parallel / incremental processing. +#' @param fclimdex.compatible Whether the thresholds should be created to match fclimdex thresholds; affects padding at the ends of the base period. +#' @param base.range Vector of two numeric years specifying the start and end years. +#' @param parallel The number of parallel processing threads, or FALSE if no parallel processing is desired. +#' @param verbose Whether to be chatty. +#' @param thresholds.files A character vector of files containing thresholds to be used. +#' @param thresholds.name.map A mapping from threshold names to NetCDF variable names. The following names will be used: \describe{ +#' \item{tx10thresh}{10th percentile for a 5-day running window of baseline daily maximum temperature.} +#' \item{tn10thresh}{10th percentile for a 5-day running window of baseline daily minimum temperature.} +#' \item{tx90thresh}{90th percentile for a 5-day running window of baseline daily maximum temperature.} +#' \item{tn90thresh}{90th percentile for a 5-day running window of baseline daily minimum temperature.} +#' \item{r95thresh}{95th percentile of daily precipitation in wet days (>=1 mm of rain).} +#' \item{r99thresh}{99th percentile of daily precipitation in wet days (>=1 mm of rain).} +#' } +#' @param max.vals.millions The number of data values to process at one time (length of time dim * number of values * number of variables). +#' @param cluster.type The cluster type, as used by the \code{snow} library. +#' +#' @note NetCDF input files may contain one or more variables, named as per \code{variable.name.map}. The code will search the files for the named variables. The same is true of thresholds files; one file may be supplied, or multiple files may be supplied, via the \code{thresholds.files} argument; and the name mapping may be supplied via the \code{thresholds.name.map} argument. +#' +#' @references \url{http://etccdi.pacificclimate.org/list_27_indices.shtml} +#' @examples +#' \dontrun{ +#' ## Prepare input data and calculate indices for a single file +#' ## with a single thread (no parallelism). +#' input.files <- c("pr_NAM44_CanRCM4_ERAINT_r1i1p1_1989-2009.nc") +#' author.data <- list(institution="Looney Bin", institution_id="LBC") +#' create.indices.from.files(input.files, "out_dir/", input.files[1], author.data, +#' base.range=c(1991, 2000), parallel=FALSE) +#' +#' ## Prepare input data and calculate indices for two files +#' ## in parallel given thresholds. +#' input.files <- c("pr_NAM44_CanRCM4_ERAINT_r1i1p1_1989-2009.nc", +#' "tasmax_NAM44_CanRCM4_ERAINT_r1i1p1_1989-2009.nc") +#' author.data <- list(institution="Looney Bin", institution_id="LBC") +#' create.indices.from.files(input.files, "out_dir/", input.files[1], author.data, +#' base.range=c(1991, 2000), parallel=8, thresholds.files="thresh.nc") +#' } +#' +#' @export +create.indices.from.files <- function(input.files, out.dir, output.filename.template, author.data, climdex.vars.subset=NULL, climdex.time.resolution=c("all", "annual", "monthly"), variable.name.map=c(tmax="tasmax", tmin="tasmin", prec="pr", tavg="tas"), axis.to.split.on="Y", fclimdex.compatible=TRUE, base.range=c(1961, 1990), parallel=4, verbose=FALSE, thresholds.files=NULL, thresholds.name.map=c(tx10thresh="tx10thresh", tn10thresh="tn10thresh", tx90thresh="tx90thresh", tn90thresh="tn90thresh", r95thresh="r95thresh", r99thresh="r99thresh"), max.vals.millions=10, cluster.type="SOCK", src="ncdf.R") { + if(!(is.logical(parallel) || is.numeric(parallel))) + stop("'parallel' option must be logical or numeric.") + + if(length(input.files) == 0) + stop("Require at least one input file.") + + ## Open files, determine mapping between files and variables. + f <- lapply(input.files, ncdf4::nc_open) + f.meta <- create.file.metadata(f, variable.name.map) + + ## Get thresholds variable-file mapping + t.f.idx <- get.thresholds.f.idx(thresholds.files, thresholds.name.map) + + ## Get variable list, subset if necessary + climdex.time.resolution <- match.arg(climdex.time.resolution) + climdex.var.list <- get.climdex.variable.list(names(f.meta$v.f.idx), climdex.time.resolution, climdex.vars.subset) + + cdx.meta <- get.climdex.variable.metadata(climdex.var.list, output.filename.template) + cdx.ncfile <- create.ncdf.output.files(cdx.meta, f, f.meta$v.f.idx, variable.name.map, f.meta$ts, get.time.origin(f, f.meta$dim.axes), base.range, out.dir, author.data) + cdx.funcs <- get.climdex.functions(climdex.var.list) + + ## Compute indices, either single process or multi-process using 'parallel' + subsets <- ncdf4.helpers::get.cluster.worker.subsets(max.vals.millions * 1000000, f.meta$dim.size, f.meta$dim.axes, axis.to.split.on) + if(is.numeric(parallel)) { + ## Setup... + lapply(f, ncdf4::nc_close) + rm(f) + cluster <- set.up.cluster(parallel, cluster.type, src) + snow::clusterExport(cluster, list("input.files", "thresholds.files"), environment()) + snow::clusterEvalQ(cluster, f <<- lapply(input.files, ncdf4::nc_open, readunlim=FALSE)) + snow::clusterEvalQ(cluster, thresholds.netcdf <<- thresholds.open(thresholds.files)) + + ## Meat... + parLapplyLBFiltered(cluster, subsets, compute.indices.for.stripe, cdx.funcs, f.meta$ts, base.range, f.meta$dim.axes, f.meta$v.f.idx, variable.name.map, f.meta$src.units, f.meta$dest.units, t.f.idx, thresholds.name.map, fclimdex.compatible, f.meta$projection, local.filter.func=function(x, x.sub) { + write.climdex.results(x, x.sub, cdx.ncfile, f.meta$dim.size, cdx.meta$var.name) + }) + + ## Clean-up. + snow::stopCluster(cluster) + } else { + ## Setup... + thresholds.netcdf <- thresholds.open(thresholds.files) + ##try(getFromNamespace('nc_set_chunk_cache', 'ncdf4')(1024 * 2048, 1009), silent=TRUE) + + ## Meat... + lapply(subsets, function(x) { write.climdex.results(compute.indices.for.stripe(x, cdx.funcs, f.meta$ts, base.range, f.meta$dim.axes, f.meta$v.f.idx, variable.name.map, f.meta$src.units, f.meta$dest.units, t.f.idx, thresholds.name.map, fclimdex.compatible, f.meta$projection, f, thresholds.netcdf), x, cdx.ncfile, f.meta$dim.size, cdx.meta$var.name) }) + + ## Clean-up. + thresholds.close(thresholds.netcdf) + lapply(f, ncdf4::nc_close) + } + + ## Close all the output files + lapply(cdx.ncfile, ncdf4::nc_close) + + cat("Finished computing indices\n") + invisible(0) +} +# nolint end diff --git a/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/README.rst b/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/README.rst new file mode 100644 index 0000000000..026ac1ae11 --- /dev/null +++ b/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/README.rst @@ -0,0 +1,66 @@ +What is climdex.pcic.ncdf? +===================== + +* `climdex.pcic.ncdf` is a companion library for `climdex.pcic` which helps in using NetCDF input grids and writing to NetCDF output files when computing the `27 core indices of extreme climate`_. The code allows for parallel computation of indices using either a SOCK or MPI cluster. It was written for the `R statistical programming language`_ by the `Pacific Climate Impacts Consortium`_. + +.. _27 core indices of extreme climate: http://etccdi.pacificclimate.org/list_27_indices.shtml +.. _R statistical programming language: http://www.r-project.org/ +.. _Pacific Climate Impacts Consortium: http://pacificclimate.org/ + +Getting Help +============ + +New to programming or to R? +--------------------------- + +* Read the the `Software Carpentry`_ `Programming in R`_ lessons +* Read one of the man `R Manuals`_. +* Attend an `R Users Group`_ meeting. + +.. _Software Carpentry: http://software-carpentry.org/index.html +.. _Programming in R: http://software-carpentry.org/v5/novice/r/index.html +.. _R Manuals: http://cran.r-project.org/manuals.html +.. _R Users Group: http://r-users-group.meetup.com/ + +Looking for code? +----------------- + +* Get the latest `climdex.pcic.ncdf release from our website`_. +* Explore the `development repository`_. +* Install it with devtools :: + + > library(devtools) + > install_github('pacificclimate/climdex.pcic.ncdf', ref='release') + +.. _climdex.pcic.ncdf release from our website: http://www.pacificclimate.org/sites/default/files/climdex.pcic_.ncdf_0.5-4.tar_.gz +.. _development repository: https://github.com/pacificclimate/climdex.pcic.ncdf/ + +Need help using the package? +---------------------------- + +* Read the manual :: + + > library(climdex.pcic.ncdf) + Loading required package: PCICt + > ?climdex.pcic.ncdf + +* Create a `new issue`_ on the `package issue tracker`_ and label it "help wanted"[1]_. + +.. _new issue: https://github.com/pacificclimate/climdex.pcic.ncdf/issues/new + +Want to contribute? +------------------- + +* To report a bug in pcic.climdex use the `package issue tracker`_ (after you've read the `bug reporting guide`_). +* To help with development read through the `contributor's guide`_ + +.. _bug reporting guide: https://github.com/pacificclimate/climdex.pcic.ncdf/blob/master/CONTRIBUTING.rst#bug-reports +.. _package issue tracker: https://github.com/pacificclimate/climdex.pcic.ncdf/issues +.. _contributor's guide: https://github.com/pacificclimate/climdex.pcic.ncdf/blob/master/CONTRIBUTING.rst + +Still need help? +---------------- + +* Contact climate@uvic.ca and let us know what we can do. + +.. [1] Please know that the pool of people who can provide support for the package is extremely small and time is limited. We don't necessarily have the capacity for long, open-ended user support. If you keep your questions short, specific and direct, there's a greater probability that someone will take on the ticket. diff --git a/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/tests/bootstrap.R b/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/tests/bootstrap.R new file mode 100644 index 0000000000..2df9e05728 --- /dev/null +++ b/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/tests/bootstrap.R @@ -0,0 +1,14 @@ +#nolint start +load("exemplar_data.rda") + +if(require("RUnit", quietly=TRUE)) { + ## Run all the tests + library(climdex.pcic.ncdf) + library(ncdf4) + wd <- getwd() + testsuite <- defineTestSuite("climdex.pcic.ncdf", dirs=wd, testFileRegexp = "^test_.+.R$", testFuncRegexp = "^test.+") + climdex.pcic.ncdf.test.result <- runTestSuite(testsuite) + printTextProtocol(climdex.pcic.ncdf.test.result) + stopifnot(climdex.pcic.ncdf.test.result$climdex.pcic.ncdf$nFail == 0 && climdex.pcic.ncdf.test.result$climdex.pcic.ncdf$nErr == 0) +} +#nolint end diff --git a/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/tests/exemplar_data.rda b/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/tests/exemplar_data.rda new file mode 100644 index 0000000000..bafe972068 Binary files /dev/null and b/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/tests/exemplar_data.rda differ diff --git a/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/tests/test_basic_file_funcs.R b/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/tests/test_basic_file_funcs.R new file mode 100644 index 0000000000..7ac98d7061 --- /dev/null +++ b/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/tests/test_basic_file_funcs.R @@ -0,0 +1,225 @@ +#nolint start +author.data <- list(institution="Pacific Climate Impacts Consortium", + institution_id="PCIC", + indices_archive="Please check http://www.cccma.ec.gc.ca/data/climdex/climdex.shtml for errata or updates.", + contact="bronaugh@uvic.ca", + references="http://www.cccma.ec.gc.ca/data/climdex/" + ) +x.subset <- 33:34 +y.subset <- 17:18 +correct.data.dir <- "correct_output/" +correct.thresh.file.6190 <- paste(correct.data.dir, "thresholds_monClim_CanESM2_historical_r1i1p1_1961-1990.nc", sep="") +thresh.omit.list <- c("tx10p", "tn10p", "tx10p", "tx90p", "wsdi", "csdi") + +test.get.thresholds.chunk <- function() { + ## Define mappings and filenames. + thresholds.name.map <- c(tx10thresh="tx10thresh", tn10thresh="tn10thresh", tx90thresh="tx90thresh", + tn90thresh="tn90thresh", r95thresh="r95thresh", r99thresh="r99thresh") + thresh.files <- "correct_output/thresholds_monClim_CanESM2_historical_r1i1p1_1961-1990.nc" + + if(all(file.exists(thresh.files))) { + ## Open files, etc. + cdx.funcs <- get.climdex.functions(get.climdex.variable.list("tmax")) + thresholds.netcdf <- lapply(thresh.files, nc_open) + t.f.idx <- get.var.file.idx(thresholds.name.map, lapply(thresholds.netcdf, + ncdf4.helpers::nc.get.variable.list, min.dims=2)) + + ## Get thresholds chunk. + dat <- get.thresholds.chunk(list(), cdx.funcs, thresholds.netcdf, t.f.idx, thresholds.name.map) + checkEquals(thresholds.chunk.tmax.only, dat) + + lapply(thresholds.netcdf, nc_close) + } +} + +test.compute.indices.for.stripe <- function() { + ## Define mappings and filenames. + author.data <- list(institution="Looney Bin", institution_id="LBC") + input.files <- list.files("test1/", full.names=TRUE) + variable.name.map <- c(tmax="tasmax", tmin="tasmin", prec="pr") + thresh.files <- "correct_output/thresholds_monClim_CanESM2_historical_r1i1p1_1961-1990.nc" + + if(length(input.files) > 0 && all(file.exists(input.files)) && all(file.exists(thresh.files))) { + ## Open files, etc. + cdx.funcs <- get.climdex.functions(get.climdex.variable.list("prec")) + f <- lapply(input.files, ncdf4::nc_open) + f.meta <- create.file.metadata(f, variable.name.map) + climdex.var.list <- get.climdex.variable.list(names(f.meta$v.f.idx), "all", NULL) + cdx.meta <- get.climdex.variable.metadata(climdex.var.list, input.files[1]) + + thresholds.name.map <- c(tx10thresh="tx10thresh", tn10thresh="tn10thresh", tx90thresh="tx90thresh", + tn90thresh="tn90thresh", r95thresh="r95thresh", r99thresh="r99thresh") + thresholds.netcdf <- lapply(thresh.files, nc_open) + t.f.idx <- get.var.file.idx(thresholds.name.map, lapply(thresholds.netcdf, + ncdf4.helpers::nc.get.variable.list, min.dims=2)) + thresh.dat <- get.thresholds.chunk(list(), cdx.funcs, thresholds.netcdf, t.f.idx, thresholds.name.map) + + ## Compute indices for stripe + cdx <- compute.indices.for.stripe(list(X=1:2, Y=1:2), cdx.funcs, f.meta$ts, c(1981, 1990), f.meta$dim.axes, + f.meta$v.f.idx, variable.name.map, f.meta$src.units, f.meta$dest.units, + t.f.idx, thresholds.name.map, f=f, thresholds.netcdf=thresholds.netcdf) + + lapply(thresholds.netcdf, nc_close) + + res <- lapply(names(cdx[[1]]), function(x) { + fn <- list.files("correct_output/", pattern=paste("^", x, sep=""), full.names=TRUE) + f.valid <- nc_open(fn, readunlim=FALSE) + d.input <- ncvar_get(f.valid, strsplit(x, "_")[[1]][1]) + nc_close(f.valid) + d.comparison <- t(do.call(cbind, lapply(cdx, function(cr) { cr[[x]] }))) + dim(d.comparison) <- dim(d.input) + + ## Apparently there are differences at the 3e-6 level between calculated and saved data... who knew? + checkEquals(d.input, d.comparison, tolerance=1e-5) + mean(abs(d.input - d.comparison)) + }) + + lapply(f, nc_close) + } + invisible(0) +} + +test.get.quantiles.for.stripe <- function() { + historical.files <- list.files("historical/", full.names=TRUE) + if(length(historical.files) > 0) { + ## FIXME: This is untestable with the current input data. + + ## Establish basic inputs. + author.data <- list(institution="Looney Bin", institution_id="LBC") + input.files <- list.files("test1/", full.names=TRUE) + + ## Prepare derived inputs. + f <- lapply(input.files, ncdf4::nc_open) + variable.name.map <- c(tmax="tasmax", tmin="tasmin", prec="pr") + f.meta <- create.file.metadata(f, variable.name.map) + threshold.dat <- get.thresholds.metadata(names(f.meta$v.f.idx)) + + ## Compute threshold quantiles for stripe + q <- get.quantiles.for.stripe(list(Y=1), f.meta$ts, c(1981, 1990), f.meta$dim.axes, + f.meta$v.f.idx, variable.name.map, f.meta$src.units, + f.meta$dest.units, f) + + ## FIXME: Compare to valid data. + + lapply(f, nc_close) + } +} + +test.get.quantiles.object <- function() { + ## Define mappings and filenames. + thresholds.name.map <- c(tx10thresh="tx10thresh", tn10thresh="tn10thresh", tx90thresh="tx90thresh", + tn90thresh="tn90thresh", r95thresh="r95thresh", r99thresh="r99thresh") + thresh.files <- "correct_output/thresholds_monClim_CanESM2_historical_r1i1p1_1961-1990.nc" + + if(all(file.exists(thresh.files))) { + ## Open files, etc. + cdx.funcs <- get.climdex.functions(get.climdex.variable.list("tmax")) + thresholds.netcdf <- lapply(thresh.files, nc_open) + t.f.idx <- get.var.file.idx(thresholds.name.map, lapply(thresholds.netcdf, + ncdf4.helpers::nc.get.variable.list, min.dims=2)) + ## Get thresholds chunk. + dat <- get.thresholds.chunk(list(Y=1), cdx.funcs, thresholds.netcdf, t.f.idx, thresholds.name.map) + + ## Get quantiles object for index 2 + q <- get.quantiles.object(dat, 2) + + ## FIXME: Compare to a correct object. + + lapply(thresholds.netcdf, nc_close) + } +} + +test.get.northern.hemisphere.booleans <- function() { + test.get.nh <- function(test.dir) { + input.files <- list.files(test.dir, full.names=TRUE) + f <- lapply(input.files, ncdf4::nc_open) + f.v <- lapply(f, ncdf4.helpers::nc.get.variable.list, min.dims=2) + bools <- get.northern.hemisphere.booleans(list(X=1:2, Y=1:2), f[[1]], f.v[[1]], NULL) + lapply(f, ncdf4::nc_close) + return(bools) + } + ## FIXME: Need test data. + browser() + if(file.exists("test3/")) + checkEquals(test.get.nh("test3/"), rep(FALSE, 4)) + ## FIXME: Need test data, and a projected dataset. + ##checkEquals(test.get.nh("test7/"), correct.data) +} + +## FIXME: Needs proper test data. This is just a framework... +test.get.data <- function() { + test.dir <- "test3/" + variable.name.map <- c(tmax="tasmax", tmin="tasmin", prec="pr") + if(file.exists(test.dir)) { + input.files <- list.files(test.dir, full.names=TRUE) + f <- lapply(input.files, ncdf4::nc_open) + f.meta <- create.file.metadata(f, variable.name.map) + d <- get.data(f[[f.meta$v.f.idx['prec']]], "pr", list(Y=2), "kg m-2 s-1", "kg m-2 s-1", c(lon="X",lat="Y",time="T")) + lapply(f, ncdf4::nc_close) + } +} + +## FIXME: Needs proper test data. This is just a framework... +test.file.funcs <- function() { + test.dir <- "test3/" + if(file.exists(test.dir)) { + input.files <- list.files(test.dir, full.names=TRUE) + variable.name.map <- c(tmax="tasmax", tmin="tasmin", prec="pr") + f <- lapply(input.files, ncdf4::nc_open) + f.meta <- create.file.metadata(f, variable.name.map) + lapply(f, ncdf4::nc_close) + } +} + +test.thresholds.create.and.indices <- function() { + test.set <- paste("test", 1:6, "/", sep="") + lapply(test.set[file.exists(test.set)], function(test) { + input.file.list <- list.files(test, full.names=TRUE) + print(file.exists(input.file.list)) + print(input.file.list) + thresh.file <- tempfile() + indices.dir.thresh <- tempdir() + indices.dir.nothresh <- tempdir() + create.thresholds.from.file(input.file.list, thresh.file, author.data, parallel=FALSE, base.range=c(2010, 2019)) + create.indices.from.files(input.file.list, indices.dir.thresh, input.file.list[1], author.data, parallel=FALSE, thresholds.files=correct.thresh.file.6190) + + ## Compare to base data. + test.file.list <- list.files(indices.dir.thresh, pattern="ETCCDI") + lapply(test.file.list, function(fn) { + print(fn) + f.test <- nc_open(paste(indices.dir.thresh, fn, sep="/")) + f.correct <- nc_open(paste(correct.data.dir, fn, sep="/")) + + d.test <- ncvar_get(f.test, ncdf4.helpers::nc.get.variable.list(f.test)[1]) + d.correct <- ncvar_get(f.correct, ncdf4.helpers::nc.get.variable.list(f.correct)[1]) + + checkEquals(d.test, d.correct) + + nc_close(f.test) + nc_close(f.correct) + }) + + create.indices.from.files(input.file.list, indices.dir.thresh, input.file.list[1], author.data, parallel=FALSE, thresholds.files=thresh.file) + create.indices.from.files(input.file.list, indices.dir.nothresh, input.file.list[1], author.data, parallel=FALSE, base.range=c(2010, 2019)) + + unlink(paste(indices.dir.nothresh, "*", sep="/")) + unlink(paste(indices.dir.thresh, "*", sep="/")) + gc() + }) +} + +parallel.thresholds.create.and.indices <- function() { + test.set <- paste("test", 1:6, "/", sep="") + lapply(test.set[file.exists(test.set)], function(test) { + input.file.list <- list.files(test, full.names=TRUE) + print(file.exists(input.file.list)) + print(input.file.list) + thresh.file <- tempfile() + indices.dir.thresh <- tempdir() + indices.dir.nothresh <- tempdir() + create.thresholds.from.file(input.file.list, thresh.file, author.data, parallel=4, base.range=c(2010, 2029)) + create.indices.from.files(input.file.list, indices.dir.thresh, input.file.list[1], author.data, parallel=4, thresholds.files=thresh.file) + create.indices.from.files(input.file.list, indices.dir.nothresh, input.file.list[1], author.data, parallel=4, base.range=c(2010, 2029)) + }) +} +#nolint end diff --git a/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/tests/test_var_meta.R b/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/tests/test_var_meta.R new file mode 100644 index 0000000000..8064494dd1 --- /dev/null +++ b/esmvaltool/diag_scripts/extreme_events/climdex.pcic.ncdf/tests/test_var_meta.R @@ -0,0 +1,51 @@ +#nolint start +test.get.climdex.variable.list <- function() { + checkEquals(climdex.var.list$tavg.all, get.climdex.variable.list(c("tavg"))) + checkEquals(climdex.var.list$tmax.all, get.climdex.variable.list(c("tmax"))) + checkEquals(climdex.var.list$tmax.mon, get.climdex.variable.list(c("tmax"), time.resolution="monthly")) + checkEquals(climdex.var.list$tmax.yr, get.climdex.variable.list(c("tmax"), time.resolution="annual")) + checkEquals(climdex.var.list$tmax.tmin.all, get.climdex.variable.list(c("tmax", "tmin"))) + checkEquals(climdex.var.list$tmax.prec.yr, get.climdex.variable.list(c("tmax", "prec"), time.resolution="annual")) + checkEquals(climdex.var.list$prec.mon, get.climdex.variable.list(c("prec"), time.resolution="monthly")) + checkEquals(climdex.var.list$prec.yr, get.climdex.variable.list(c("prec"), time.resolution="annual")) + checkEquals(climdex.var.list$tmax.tmin.prec.all, get.climdex.variable.list(c("tmax", "tmin", "prec"))) + checkEquals(climdex.var.list$tmax.tmin.prec.sub, get.climdex.variable.list(c("tmax", "tmin", "prec"), climdex.vars.subset=c("su", "tr", "cdd", "gsl"))) + checkEquals(climdex.var.list$prec.sub, get.climdex.variable.list(c("prec"), climdex.vars.subset=c("su", "tr", "cdd", "gsl"))) + checkEquals(NULL, get.climdex.variable.list(c())) +} + +test.get.climdex.variable.metadata <- function() { + fn1 <- "tasmax_NAM44_CanRCM4_ERAINT_r1i1p1_1989-2009.nc" + fn2 <- "pr_day_CanESM2_rcp85_r2i1p1_20060101-21001231.nc" + checkEquals(climdex.var.meta$tavg.all.1, get.climdex.variable.metadata(climdex.var.list$tavg.all, fn1)) + checkEquals(climdex.var.meta$prec.yr.2, get.climdex.variable.metadata(climdex.var.list$prec.yr, fn2)) +} + +test.get.climdex.functions <- function() { + checkEquals(climdex.functions$tmax.yr, get.climdex.functions(climdex.var.list$tmax.yr)) + checkEquals(climdex.functions$tmax.tmin.prec.all.fclimdex, get.climdex.functions(climdex.var.list$tmax.tmin.prec.all)) + checkEquals(climdex.functions$tmax.tmin.prec.all.notfclimdex, get.climdex.functions(climdex.var.list$tmax.tmin.prec.all, FALSE)) +} + +test.create.climdex.cmip5.filenames <- function() { + fn.split <- c(model="CanESM2", emissions="rcp45", run="r1i1p1", tstart="20100101", tend="20991231") + + valid.tmax.mon.fn <- c("txxETCCDI_mon_CanESM2_rcp45_r1i1p1_201001-209912.nc", "txnETCCDI_mon_CanESM2_rcp45_r1i1p1_201001-209912.nc", + "tx10pETCCDI_mon_CanESM2_rcp45_r1i1p1_201001-209912.nc", "tx90pETCCDI_mon_CanESM2_rcp45_r1i1p1_201001-209912.nc") + valid.tmax.all.fn <- c("suETCCDI_yr_CanESM2_rcp45_r1i1p1_2010-2099.nc", "idETCCDI_yr_CanESM2_rcp45_r1i1p1_2010-2099.nc", + "txxETCCDI_mon_CanESM2_rcp45_r1i1p1_201001-209912.nc", "txxETCCDI_yr_CanESM2_rcp45_r1i1p1_2010-2099.nc", + "txnETCCDI_mon_CanESM2_rcp45_r1i1p1_201001-209912.nc", "txnETCCDI_yr_CanESM2_rcp45_r1i1p1_2010-2099.nc", + "tx10pETCCDI_mon_CanESM2_rcp45_r1i1p1_201001-209912.nc", "tx10pETCCDI_yr_CanESM2_rcp45_r1i1p1_2010-2099.nc", + "tx90pETCCDI_mon_CanESM2_rcp45_r1i1p1_201001-209912.nc", "tx90pETCCDI_yr_CanESM2_rcp45_r1i1p1_2010-2099.nc", + "wsdiETCCDI_yr_CanESM2_rcp45_r1i1p1_2010-2099.nc", "altwsdiETCCDI_yr_CanESM2_rcp45_r1i1p1_2010-2099.nc") + + checkEquals(valid.tmax.mon.fn, create.climdex.cmip5.filenames(fn.split, climdex.var.list$tmax.mon)) + checkEquals(valid.tmax.all.fn, create.climdex.cmip5.filenames(fn.split, climdex.var.list$tmax.all)) +} + +test.flatten.dims <- function() { + dat <- structure(1:8, .Dim=c(2, 2, 2)) + valid.flat <- structure(1:8, .Dim = c(2L, 4L)) + checkEquals(flatten.dims(dat, 2:3), valid.flat) +} +#nolint end diff --git a/esmvaltool/diag_scripts/extreme_events/common_climdex_preprocessing_for_plots.R b/esmvaltool/diag_scripts/extreme_events/common_climdex_preprocessing_for_plots.R new file mode 100755 index 0000000000..b51cf4c253 --- /dev/null +++ b/esmvaltool/diag_scripts/extreme_events/common_climdex_preprocessing_for_plots.R @@ -0,0 +1,219 @@ +# ############################################################################# +# common_climdex_preprocessing.R +# +# Author: Marit Sandstad (CICERO, Norway) +# : Christian Wilhelm Mohr (CICERO, Norway) +# +# +# ############################################################################# +# Description +# Common codes to preprocsess climdex files from multiple +# sources for plotting. This includes creating a common grid +# Cropping files to the same time span. Regridding, landseamasking +# and producing timemeans. +# +# Modification history +# 2019 0506-hard_jo : Conversion to ESMValTool2 +# 2018 0725-A_cwmohr : Modification of setTimeForFilesEqual() function +# 2017 0920-A_maritsandstad: Creation +# +# ############################################################################# + +## +## +## Method to create an ascii grid file for +## to use to regrid on +## @param idx_dir path of directory containing +## files from which to create the grid +## +create_grid <- function(path = idx_dir, loc = "./gridDef") { + ## Picking the grid found in the first file to regrid over + first_file <- list.files(path, + pattern = paste0(".*", regrid_dataset, ".*\\.nc"), + full.names = TRUE + )[1] + cdo("griddes -delvar,time_bnds", input = first_file, + stdout = loc, options = "-O") +} + +# +# Method to create a landSeaMask on a suitable grid +# @param regrid name w/path of gridfile to use +# to put the landdseamask on +# +create_land_sea_mask <- function(regrid = "./gridDef", loc = "./", + landmask = "./landSeaMask.nc") { + # Test if gridfile exists + # otherwise call function to generate one + if (!file.exists(regrid)) { + create_grid(path = loc, loc = regrid) + } + + ## Making topographic map + topof <- cdo("topo", options = "-O -f nc") + + ## Regridding the topographic map to chosen grid + rtopof <- cdo("remapcon", args = regrid, input = topof, options = "-O") + + # Set above sea-level gridpoints to missing + rtopomissf <- cdo("setrtomiss", + args = "0,9000", input = rtopof, options = "-O") + + # Set above sea-level gridpoints to 1 + rtopo1posf <- cdo("setmisstoc", + args = "1", input = rtopomissf, options = "-O") + + # Set below sea-level gridpoints to missing + cdo("setrtomiss", + args = "-9000,0", input = rtopo1posf, output = landmask, options = "-O") + + unlink(c(topof, rtopof, rtopomissf, rtopo1posf)) +} + +## +## Method crop all index files for a single index +## to the same time period. +## The smallest common time period is chosen +## @param path gives path to location of index files +## @param idx lists the index under consideration +## @param model_list provides the list of selected models for time cropping +## @param time_cropped is the directory to put the time cropped files +## @param max_start is an optional crop start +## @param min_end is an optional crop end +## +set_time_for_files_equal <- function(path, idx, model_list, + time_cropped = "./timeCropped", + max_start = 0, min_end = 2500) { + + ## Getting a list of all the files for the index + models_avail <- basename(Sys.glob(file.path( + path, + paste(idx, "*.nc", sep = "") + ))) + + ## Selecting only the files from the model list + models <- vector(mode = "character", length = length(model_list)) + for (i in seq_along(model_list)) { + models[i] <- models_avail[grep( + pattern = model_list[i], + x = models_avail + )] + } + + print(models) + + ## Checking if the folder exists and making it if not + print(time_cropped) + if (!file.exists(time_cropped)) { + dir.create(time_cropped) + } + + ## Arrays to record orginal start and end years + start <- integer(length(models)) + end <- integer(length(models)) + + i <- 1 + # For-loop to find the minimum time interval + # so we can crop all files to this time interval + m <- models[1] + for (m in models) { + start[i] <- strtoi(substr(m, nchar(m) - 11, nchar(m) - 8)) + end[i] <- strtoi(substr(m, nchar(m) - 6, nchar(m) - 3)) + + if (start[i] > max_start) { + max_start <- start[i] + } + + if (end[i] < min_end) { + min_end <- end[i] + } + i <- i + 1 + } + if (max_start >= min_end) { + print("No time overlap for files") + print(c(max_start, min_end)) + for (m in models) { + file.copy(paste0(path, "/", m), paste0(time_cropped, "/", m)) + } + return(c(max_start, min_end)) + } + + i <- 1 + # For-loop to crop the files + for (m in models) { + ## If file is already of appropriate length + ## Then just copy it over + if (start[i] == max_start && end[i] == min_end) { + file.copy(paste0(path, "/", m), paste0(time_cropped, "/", m)) + ## Otherwise do the time cropping + } else { + beg <- max_start - start[i] + sto <- min_end - max_start + beg + newname <- paste(substr(m, 1, nchar(m) - 12), + max_start, "-", min_end, ".nc", + sep = "" + ) + nco("ncks", paste0( + "-d time,", beg, ",", sto, " ", + path, "/", m, " ", + time_cropped, "/", newname + )) + } + i <- i + 1 + } + return(c(max_start, min_end)) +} + +## +## +## Method that regrids and landseamasks a file +## Timemeaned versions are also produced +## @param idx_raw gives the full path name of the file +## @param regrid gives the file of the grid to regrid on +## @param landmask gives the file that defines the landseamask to be used +## +## +regrid_and_land_sea_mask <- function(idx_raw, regrid = "./gridDef", + landmask = "./landSeaMask.nc", + regridded = "./Regridded", + land = "./Land", loc = "./") { + + ## Getting just the raw name of the file + idx_name <- basename(idx_raw) + + ## If the landmask does not exist, we create one. + if (!file.exists(landmask)) { + create_land_sea_mask(regrid = regrid, loc = loc, landmask = landmask) + } + + ## Checking if directories are present and creating them if not: + if (!dir.exists(regridded)) { + dir.create(regridded) + } + if (!dir.exists(land)) { + dir.create(land) + } + + ## Regridding file: + varname <- strsplit(idx_name, "_")[[1]][1] + tmpsel <- cdo("selvar", args = varname, input = idx_raw, options = "-O") + cdo("remapcon", + args = regrid, input = tmpsel, + output = paste0(regridded, "/", idx_name), options = "-O" + ) + unlink(tmpsel) + + ## Applying landseamask: + cdo("div", + input = c(paste0(regridded, "/", idx_name), landmask), + output = paste0(land, "/", idx_name), options = "-O" + ) + + ## Also produce timemean: + ## !! check is this should be subject to some reference period or + ## time change + cdo("timmean", + input = paste0(land, "/", idx_name), + output = paste0(land, "/tm_", idx_name), options = "-O" # nolint + ) +} diff --git a/esmvaltool/diag_scripts/extreme_events/extreme_events.R b/esmvaltool/diag_scripts/extreme_events/extreme_events.R new file mode 100644 index 0000000000..4d12517f0b --- /dev/null +++ b/esmvaltool/diag_scripts/extreme_events/extreme_events.R @@ -0,0 +1,387 @@ +# ############################################################################# +# extreme_events.R +# +# Authors: Björn Brötz (DLR, Germany) +# Marit Sandstad (CICERO, Norway) +# Christian W. Mohr (CICERO, Norway) +# ############################################################################# +# Description +# Calculate extreme events with plotting functionality +# +# Modification history +# 2019 0506-hard_jo : conversion to ESMValTool2 +# 2018 1006-A_cwmohr : observation read and sorting fixes +# 2018 1003-A_cwmohr : correcting r.interface output for observation data. +# 2018 0725-A_cwmohr : modification of timeseries_main() and climdex selection +# 2018 0615-A_cwmohr : more clean up of code +# 2018 0131-A_laue_ax: clean-up of code, adaptation to ESMValTool standards +# added tagging support +# 2017 0920-A_sand_ma: modification to include plotting +# 2016 0414-A_broe_bj: written +# ############################################################################ + +library(tools) +library(yaml) +library(ncdf4) +library(ncdf4.helpers) +library(scales) +library(RColorBrewer) # nolint + +provenance_record <- function(infile) { + xprov <- list( + ancestors = as.list(infile), + authors = list("broe_bj", "sand_ma", "mohr_cw", "hard_jo"), + references = list("zhang-2011"), + projects = list("crescendo", "c3s-magic"), + caption = "Extreme events indices", + statistics = list("other"), + realms = list("atmos"), + themes = list("phys"), + domains = list("global") + ) + return(xprov) +} + +diag_scripts_dir <- Sys.getenv("diag_scripts") +climdex_src <- paste0(diag_scripts_dir, "/extreme_events/climdex.pcic.ncdf/R/ncdf.R") # nolint +source(paste0(diag_scripts_dir, "/extreme_events/climdex.pcic.ncdf/R/ncdf.R")) # nolint +source(paste0(diag_scripts_dir, "/shared/external.R")) # nolint +source(paste0(diag_scripts_dir, "/extreme_events/cfg_climdex.R")) # nolint +source(paste0(diag_scripts_dir, "/extreme_events/cfg_extreme.R")) # nolint +source(paste0(diag_scripts_dir, + "/extreme_events/common_climdex_preprocessing_for_plots.R")) # nolint +source(paste0(diag_scripts_dir, + "/extreme_events/make_timeseries_plot.R")) # nolint +source(paste0(diag_scripts_dir, + "/extreme_events/make_glecker_plot.R")) # nolint + +# read settings and metadata files +args <- commandArgs(trailingOnly = TRUE) +settings <- yaml::read_yaml(args[1]) +for (myname in names(settings)) { + temp <- get(myname, settings) + assign(myname, temp) +} + +list0 <- yaml::read_yaml(settings$input_files[1]) +# extract metadata +models_name <- unname(sapply(list0, "[[", "dataset")) +models_ensemble <- unname(sapply(list0, "[[", "ensemble")) +models_start_year <- unname(sapply(list0, "[[", "start_year")) +models_end_year <- unname(sapply(list0, "[[", "end_year")) +models_experiment <- unname(sapply(list0, "[[", "exp")) +models_project <- unname(sapply(list0, "[[", "project")) +diag_base <- unname(sapply(list0, "[[", "diagnostic"))[1] +#### Correction r.interface output correction #### +models_experiment[models_experiment == "No_value"] <- "No-values" + +variables <- c() +climofiles <- c() +models <- c() +metadata <- c() + +# loop over variables +for (i in 1:length(settings$input_files)) { + metadata <- yaml::read_yaml(settings$input_files[i]) + models_name <- unname(sapply(metadata, "[[", "dataset")) + short_name <- unname(sapply(metadata, "[[", "short_name")) + variables <- c(variables, short_name) + models <- c(models, models_name) + climofiles <- c(climofiles, names(metadata)) +} + +# associated to first climofile +print(paste(diag_base, ": starting routine")) + +# create working dirs if they do not exist +work_dir <- settings$work_dir +regridding_dir <- settings$run_dir +plot_dir <- settings$plot_dir +dir.create(work_dir, recursive = T, showWarnings = F) +dir.create(regridding_dir, recursive = T, showWarnings = F) +dir.create(plot_dir, recursive = T, showWarnings = F) + +# setup provenance file and list +provenance_file <- paste0(regridding_dir, "/", "diagnostic_provenance.yml") +provenance <- list() + +if (anyNA(base_range)) { + stop("Please choose a base_range!") +} +model_range <- c( + max(strtoi(models_start_year)), + min(strtoi(models_end_year)) +) +if ( (base_range[1] < max(strtoi(models_start_year))) | + (base_range[2] > min(strtoi(models_end_year)))) { + stop(paste( + "Base range", base_range[1], "-", base_range[2], + "outside available model data period", + model_range[1], "-", model_range[2] + )) +} +print(paste("Base range:", base_range[1], "-", base_range[2])) + +if (anyNA(regrid_dataset)) { + regrid_dataset <- reference_datasets[1] + print(paste( + "Regrid dataset not set, choosing first reference dataset:", + regrid_dataset + )) +} + +## Find earlier climdex indices in work folder +climdex_files <- list.files(path = work_dir, pattern = "ETCCDI") + +# Fix input files removing bounds +print("Removing bounds from preprocessed files") +for (i in 1:length(climofiles)) { + tmp <- tempfile() + nco("ncks", paste( + "-C -O -x -v lat_bnds,lon_bnds,time_bnds", + climofiles[i], tmp + )) + nco("ncatted", paste("-O -a bounds,time,d,,", tmp)) + nco("ncatted", paste("-O -a bounds,lat,d,,", tmp)) + nco("ncatted", paste("-O -a bounds,lon,d,,", tmp)) + nco("ncatted", paste0("-O -a coordinates,", variables[i], ",d,, ", tmp)) + file.copy(tmp, climofiles[i], overwrite = TRUE) + unlink(tmp) +} + +## +## At this stage climdex indices are calculated. This process is extremely tedious and check points are in place to check whether the indicese are already produced. +## If the climdex files are there, then this process is skipped. Delete the climdex files from the work folder if you wish to have the climdex indices recalculated. +## +for (model_idx in c(1:length(models_name))) { + author.data <- list(institution = "None", institution_id = "None") + template <- paste("var_timeres_", models_name[model_idx], "_", + models_experiment[model_idx], "_", + models_ensemble[model_idx], "_", + models_start_year[model_idx], + "01-", models_end_year[model_idx], + "12.nc", + sep = "", collapse = "" + ) + print("") + print(paste0(">>>>>>>> Template name: ", template)) + print("") + + idx_select <- unique(c(timeseries_idx, gleckler_idx)) + + ## Check point for existing files + climdex_file_check <- c() + for (idx in idx_select) { + if (grepl("mon", idx)) { + climdex_file_check <- c(climdex_file_check, + paste0( + idx, "_", + models_name[model_idx], "_", + models_experiment[model_idx], "_", + models_ensemble[model_idx], "_", + models_start_year[model_idx], "01-", + models_end_year[model_idx], "12.nc" + ) + ) + } else { + climdex_file_check <- c(climdex_file_check, + paste0( + idx, "_", + models_name[model_idx], "_", + models_experiment[model_idx], "_", + models_ensemble[model_idx], "_", + models_start_year[model_idx], "-", + models_end_year[model_idx], ".nc" + ) + ) + } + } + check_control <- vector("logical", length(climdex_file_check)) + n <- 0 + for (chck in climdex_file_check) { + n <- n + 1 + tmp <- length(grep(chck, climdex_files)) + check_control[n] <- (tmp > 0) + } + + if (!any(grepl("yr", idx_select))) { + timeres <- "mon" + write_plots <- FALSE + } else if (!any(grepl("mon", idx_select))) { + timeres <- "annual" + } else { + timeres <- "all" + write_plots <- FALSE + } + + if (!all(check_control)) { + print("") + print(paste0(">>>>>>> Producing Indices for ", models_name[model_idx])) + print(climofiles[models == models_name[model_idx]]) + print("") + infiles <- climofiles[models == models_name[model_idx]] + indices <- sub("ETCCDI.*", "", idx_select) + # Find best chunk size + chunk <- 10 + if ( !(is.logical(climdex_parallel))) { + nc <- nc_open(infiles[1]) + chunk <- floor( (nc$dim$time$len * nc$dim$lon$len * nc$dim$lat$len + + 1000.0) / (climdex_parallel * 1000000)) + chunk <- max(min(100, chunk), 1) + nc_close(nc) + print(paste("Chunk size:", chunk)) + } + create.indices.from.files(infiles, # nolint + work_dir, template, author.data, + base.range = base_range, + parallel = climdex_parallel, + verbose = TRUE, + climdex.vars.subset = indices, + climdex.time.resolution = timeres, + max.vals.millions = chunk, + src = climdex_src + ) + + # Set provenance for output files + # Get new list of files after computation + infiles <- climofiles[models == models_name[model_idx]] + print("Computing xprov") + xprov <- provenance_record(infiles) + climdex_files <- list.files( + path = work_dir, + pattern = paste0("ETCCDI.*", models_name[model_idx], ".*\\.nc"), + full.names = TRUE + ) + for (fname in climdex_files) { + print(paste("Provenance for ", fname)) + provenance[[fname]] <- xprov + } + } +} + +if (write_plots) { + ############################# + # A climdex processing section is needed here for observation data. + # CMORized observation data found in the obs directory, + # has it's climdex indices calculated, + # which are then placed in the work/extreme_events directory + ############################# + + ## Splitting models from observations + + ################################### + #### Produce time series plots #### + ################################### + + if (anyNA(analysis_range)) { + analysis_range[1] <- max(strtoi(models_start_year)) + analysis_range[2] <- min(strtoi(models_end_year)) + print(paste( + "Analysis range not defined, assigning model range:", + analysis_range[1], "-", analysis_range[2] + )) + } + if ( (analysis_range[1] < max(strtoi(models_start_year))) | + (analysis_range[2] > min(strtoi(models_end_year)))) { + stop(paste( + "Analysis range", analysis_range[1], "-", analysis_range[2], + "outside available model data period", + model_range[1], "-", model_range[2] + )) + } + print(paste("Analysis range:", analysis_range[1], "-", analysis_range[2])) + + # These are forced here for testing + + print("------ Model datasets ------") + print(setdiff(models_name, reference_datasets)) + print("---- Reference datasets ----") + print(reference_datasets) + print("----------------------------") + if (ts_plt) { + print("") + print(paste0(">>>>>>>> TIME SERIES PROCESSING INITIATION")) + plotfiles <- timeseries_main( + path = work_dir, idx_list = timeseries_idx, + model_list = setdiff(models_name, reference_datasets), + obs_list = reference_datasets, plot_dir = plot_dir, + normalize = normalize, + start_yr = analysis_range[1], end_yr = analysis_range[2] + ) + xprov <- provenance_record(climofiles) + for (fname in plotfiles) { + provenance[[fname]] <- xprov + } +# Each timeseries file gets provenance from its reference dataset + for (model in reference_datasets) { + ncfiles <- list.files(file.path(work_dir, "timeseries"), + pattern = model, full.names = TRUE) + xprov <- provenance_record(climofiles[models == model]) + for (fname in ncfiles) { + provenance[[fname]] <- xprov + } + } +# The ensemble timeseries get provenance from all model datasets + ncfiles <- list.files(file.path(work_dir, "timeseries"), + pattern = "ETCCDI.*ens", full.names = TRUE) + + ancestors <- sapply(setdiff(models_name, reference_datasets), + grep, climofiles, value = TRUE) + xprov <- provenance_record(ancestors) + for (fname in ncfiles) { + provenance[[fname]] <- xprov + } + } + + ############################### + #### Produce Gleckler plot #### + ############################### + if (glc_plt) { + print("") + print(paste0(">>>>>>>> GLECKLER PROCESSING INITIATION")) + + ## Check if Gleckler Array already exists + nidx <- length(gleckler_idx) # number of indices + nmodel <- length(models_name) # number of models + nobs <- length(reference_datasets) # number of observations + arrayname <- paste0( + "Gleckler-Array_", nidx, "-idx_", + nmodel, "-models_", nobs, "-obs", ".RDS" + ) + arraydirname <- paste0(plot_dir, "/", diag_base, "/", arrayname) + if (glc_arr) { + if (file.exists(arraydirname)) { + file.remove(arraydirname) + } + promptinput <- "y" + } + + if (file.exists(arraydirname)) { + promptinput <- "n" + } else { + promptinput <- "y" + } + + #### Running gleckler_main #### + plotfiles <- gleckler_main( + path = work_dir, idx_list = gleckler_idx, + model_list = setdiff(models_name, reference_datasets), + obs_list = reference_datasets, + plot_dir = plot_dir, promptinput = promptinput, + start_yr = analysis_range[1], end_yr = analysis_range[2] + ) + + xprov <- provenance_record(list(climofiles)) + for (fname in plotfiles) { + provenance[[fname]] <- xprov + } + ncfiles <- list.files(file.path(work_dir, "gleckler/Gleck*")) + xprov <- provenance_record(climofiles) + for (fname in ncfiles) { + provenance[[fname]] <- xprov + } + } +} + +# Write provenance to file +write_yaml(provenance, provenance_file) diff --git a/esmvaltool/diag_scripts/extreme_events/make_glecker_plot.R b/esmvaltool/diag_scripts/extreme_events/make_glecker_plot.R new file mode 100755 index 0000000000..ffcb987a62 --- /dev/null +++ b/esmvaltool/diag_scripts/extreme_events/make_glecker_plot.R @@ -0,0 +1,670 @@ +# ######################################################################################################### +# make_glecker_plot.R +# +# Author: Christian W. Mohr (CICERO, Norway) +# Marit Sandstad (CICERO, Norway) +# +# +# ######################################################################################################### +# Description: +# Code to plot Glecker polygon diagram to compare climdex index +# performance between models and reanalysis. +# +# Modification history +# +# 2019 0506-hard_jo: conversion to ESMValTool2 +# 20180601-A_cwmohr: re-creation (complete new script incorparating segments from "make_timeseries_plot.r & make_Glecker_plot.r") +# +# ######################################################################################################### + +gleckler_main <- function(path = "./", idx_list, model_list, obs_list, + plot_dir = "../plot/extreme_events/", + promptinput = promptinput, + start_yr = 2000, end_yr = 2009) { + + #### CLIMDEX PREPROCESSING #### + + ## For file structure and files + tsgrid <- paste(path, "/tsGridDef", sep = "") # nolint + time_cropped <- paste(path, "/timeCropped", sep = "") # nolint + landmask <- paste(path, "/landSeaMask.nc", sep = "") # nolint + regridded <- paste(path, "/regridded", sep = "") # nolint + land <- paste(path, "/Land", sep = "") # nolint + + nmodel <- length(model_list) # number of models + nidx <- length(idx_list) # number of indices + nobs <- length(obs_list) # number of observations + + if (file.exists(paste0( + path, "/gleckler/Gleckler-Array_", # nolint + nidx, "-idx_", nmodel, "-models_", + nobs, "-obs", ".RDS" + ))) { + promptinput <- "n" + } + + if (promptinput == "y") { + # Initial nc-file time crop, regrid, land and plot purge + unlink(c(time_cropped, regridded, land, + landmask, tsgrid), recursive = TRUE) + + ## Initial grid and landmask creation reset + grid_and_landmask <- TRUE + + ## Combine model and observation list + modelandobs_list <- unique(c(model_list, obs_list)) + + ## Loop over the indices to produce a plot for each index + for (idx in idx_list) { + + ## Time crop + returnvalue <- set_time_for_files_equal( + path = path, idx = idx, + model_list = modelandobs_list, + time_cropped = time_cropped, + max_start = start_yr, min_end = end_yr + ) + + max_start <- returnvalue[1] + min_end <- returnvalue[2] + + ## If there is no overlap in the files the index + ## should be skipped + if (max_start >= min_end) { + print(paste("No time overlap in files for index", idx)) + break + } + + ## Find the new model and observation names (after time cropping) + modelsandobs <- basename(Sys.glob(file.path( + time_cropped, + paste0(idx, "*.nc") + ))) + split_modelsandobs <- strsplit(modelsandobs, split = "_") + modelsandobs_index <- unlist(lapply(split_modelsandobs, function(x) { + x[3] + })) + + ## new models + models <- modelsandobs[which(modelsandobs_index %in% model_list)] + + ## new observations + obs <- modelsandobs[which(modelsandobs_index %in% obs_list)] + + ## Find the start year (to be used in plotting) + # start_yr <- strtoi(substr(models[1], nchar(models[1]) - 11, + # nchar(models[1]) - 8)) + # !New Grid and landseamask for each idx + # !(or just the first idx set) should be + # !produced here + if (grid_and_landmask) { + create_grid(path = path, loc = tsgrid) + create_land_sea_mask(regrid = tsgrid, loc = path, landmask = landmask) + grid_and_landmask <- FALSE + } + + ## Loop over each file so it can be regridded + ## and landseaMasked + for (mo in modelsandobs) { + print(paste(time_cropped, "/", mo, sep = "")) + regrid_and_land_sea_mask( + idx_raw = paste(time_cropped, "/", mo, sep = ""), + regrid = tsgrid, landmask = landmask, + regridded = regridded, land = land, loc = path + ) + } + } + + #### Gleckler Array Processing #### + rmserelarr <- gleckler_array( + path = land, idx_list = idx_list, + model_list = model_list, obs_list = obs_list + ) + + ## Save Array + glecdir <- paste0(path, "/gleckler") # nolint + if (!file.exists(glecdir)) { + dir.create(glecdir) + } + saveRDS(object = rmserelarr, file = paste0( + path, "/gleckler/Gleckler-Array_", # nolint + nidx, "-idx_", + nmodel, "-models_", + nobs, "-obs", ".RDS" + )) + saveRDS(object = returnvalue, file = paste0( + path, + "/gleckler/Gleckler-years.RDS" # nolint + )) + + # Final cleanup + unlink(c(time_cropped, regridded, land, + landmask, tsgrid), recursive = TRUE) + } + + #### Gleckler Plotting #### + rmserelarr <- readRDS(file = paste0( + path, "/gleckler/Gleckler-Array_", # nolint + nidx, "-idx_", nmodel, "-models_", + nobs, "-obs", ".RDS" + )) + year_range <- readRDS( + file = paste0( + path, + "/gleckler/Gleckler-years.RDS" # nolint + ) + ) + + plotfile <- gleckler_plotting( + arr = rmserelarr, idx_list = idx_list, + model_list = model_list, obs_list = obs_list, + plot_dir = plot_dir, syear = year_range[1], + eyear = year_range[2] + ) + return(plotfile) +} + +#### Computing the RMSEs #### + +gleckler_array <- function(path = land, idx_list = gleckler_idx, + model_list = model_list, obs_list = obs_list) { + ## Produce an array to hold all the model and reanalysis means + + ## Input data for testing the plotting routine + nidx <- length(idx_list) # number of indices + nmodel <- length(model_list) # number of models + nobs <- length(obs_list) # number of reanalyses + + ## Check point for reanalysis data + if (nobs == 0) { + print("No reanalysis datasets provided") + break + } + + ## Function to calculate area mean + area.mean <- function(x, lat) { + + nlon <- dim(x)[1] + nlat <- dim(x)[2] + + meanlat <- apply(x, 2, function(x) { + mean(x, na.rm = TRUE) + }) + + fi <- lat * 3.14159 / 180 + + wgt.prod <- meanlat * cos(fi) + + # At some latitudes there is no land and therfore no data. + nan.check <- is.nan(wgt.prod) + # The mean of missing data is not a number, and hench results in NaNs. + # These NaN must be removed in order to calculate the correct area mean. + + gl <- sum(wgt.prod[!nan.check]) + sumcos <- sum(cos(fi)[!nan.check]) + ar.m <- gl / sumcos + return(ar.m) + } + + ## Function to calculate the RMSE between the model and + ## observed climatology (RMSExy) + ## Equation 1, from Sillmann et. al 2013 + RMSE <- function(model = tm_model_idx, obs = tm_obs_idx, lat = model_lat) { + RMSE <- sqrt(area.mean( (model - obs) ^ 2, lat)) + return(RMSE) + } + + # Array for the RMSE spaces in the array are created so that the + # RSMEall, ENSmean, ENSmedian and CMIP RMSE can be created + rmsearr <- array(NA, dim = c(nidx + 1, nmodel + 3, nobs)) + rmserelarr <- rmsearr + ensmodel_list <- list() + + i <- 2 + m <- 1 + o <- 1 + lat_collect <- TRUE + + for (i in seq_along(idx_list)) { + for (m in seq_along(model_list)) { + ## Read in model annual climatology + + tm_model <- nc_open(Sys.glob(file.path(path, paste0( + "tm_", idx_list[i], + "_", model_list[m], + "*.nc" + )))) + idxs <- unlist(strsplit(idx_list[i], split = "_"))[1] + tm_model_idx <- ncvar_get(tm_model, idxs) + + # extract latitudes for area mean calculations + if (lat_collect) { + model_lat <- ncvar_get(tm_model, "lat") + lat_collect <- FALSE + } + + nc_close(tm_model) + ensmodel_list[[m]] <- tm_model_idx + } + ## Create a new array for adding the time mean model matices + ensarr <- array(NA, dim = c( + nrow(tm_model_idx), ncol(tm_model_idx), + length(ensmodel_list) + 2 + )) + + # Copy each matrix from the multimodel list to the array "ensarr". + # Notice the "+2" on the 3rd dimension. This is so later the model + # ensemble mean and median matrices can be added to the array. + for (n in seq_along(ensmodel_list)) { + ensarr[, , n + 2] <- ensmodel_list[[n]] + } + + ## Calculate the ensemble mean and median of + ## all the model time mean matrices + ensmean <- apply(ensarr, c(1, 2), function(x) { + mean(na.omit(x)) + }) + ensmedian <- apply(ensarr, c(1, 2), function(x) { + median(na.omit(x)) + }) + + # Place the ensemble model mean and medians into the + # first two matrices (3-dimention) of the array "ensarr" + ensarr[, , 1] <- ensmean + ensarr[, , 2] <- ensmedian + + j <- 1 + ## Calculate the RMSE for all the models and the ensemble mean and median + for (j in 1:dim(ensarr)[3]) { + ## Read in reannalysis annual climatology + for (o in seq_along(obs_list)) { + tm_obs <- nc_open(Sys.glob(file.path(path, paste0( + "tm_", idx_list[i], + "_", obs_list[o], + "*.nc" + )))) + tm_obs_idx <- ncvar_get(tm_obs, idxs) + nc_close(tm_obs) + rmsearr[i + 1, j, o] <- RMSE( + model = ensarr[, , j], obs = tm_obs_idx, + lat = model_lat + ) # Calculate each RMSE and place value in RMSE-array + + ## Calculate the model standard deviation. + ## Later used for calculating the rmsemedian,std. + ## Denominator in equation 3, from Sillmann et. al 2013 + rmsearr[i + 1, ncol(rmsearr), o] <- + sqrt(area.mean( (tm_obs_idx - area.mean(tm_obs_idx, + lat = model_lat)) ^ 2, + lat = model_lat)) + } + } + } + + ## Calculate the RMSE median for the models + tmprmsearr <- rmsearr[, -c(1, 2, ncol(rmsearr)), ] + if (length(dim(tmprmsearr)) == 3) { + rmsemed <- apply(tmprmsearr, c(1, 3), function(x) { + median(x, na.rm = TRUE) + }) + } else { + rmsemed <- apply(tmprmsearr, 1, function(x) { + median(x, na.rm = TRUE) + }) + } + + ## Function to calculate the relative RMSE (RMSE'xy) + ## between the model and observed climatology + ## Equation 2, from Sillmann et. al 2013 + rmserel <- function(rmse, rmsemed) { + rmserel <- (rmse - rmsemed) / rmsemed + return(rmserel) + } + + ## Calculating the relative RMSE (RMSE'xy) + m <- 1 + for (m in 1:(ncol(rmsearr) - 1)) { + rmserelarr[, m, ] <- rmserel(rmse = rmsearr[, m, ], rmsemed = rmsemed) + } + + ## Calculating the RMSE median,std. Equation 3, from Sillmann et. al 2013 + rmserelarr[, ncol(rmserelarr), ] <- rmsemed / rmsearr[, ncol(rmsearr), ] + + ## Calculating the RSME mean + tmprmsearr <- rmserelarr[, -ncol(rmserelarr), ] + if (length(dim(tmprmsearr)) == 3) { + rmserelarr[1, -ncol(rmserelarr), ] <- apply( + tmprmsearr, c(2, 3), + function(x) { + mean(x, na.rm = TRUE) + } + ) + } else { + rmserelarr[1, -ncol(rmserelarr), ] <- apply( + tmprmsearr, c(2), + function(x) { + mean(x, na.rm = TRUE) + } + ) + } + print(rmserelarr) + return(rmserelarr) +} + +#### Plotting Routine #### +gleckler_plotting <- function(arr = rmserelarr, idx_list, model_list, + obs_list, plot_dir = "../plots/extreme_events/", + syear = max_start, eyear = min_end) { + nidx <- length(idx_list) # number of indices + nmodel <- length(model_list) # number of models + nobs <- length(obs_list) # number of reanalyses + + ## Numbers for color scale + sclseq <- seq(-0.55, 0.55, 0.1) + + ## Colour scale + glc <- brewer.pal(length(sclseq) - 2, "RdYlBu") # nolint + glc <- c("#662506", glc, "#3f007d") + glc <- rev(glc) + + # Numbers for black & white scale + sclseq_bw <- seq(0.05, 1.15, 0.1) + sclseq_bw + glbw <- gray(seq(0, 1, length.out = length(sclseq_bw))) + glbw <- rev(glbw) + + ## Determining what shapes should be plotted, based on number of observations + if (nobs == 1) { + # One reanalysis references + x1 <- c(0, 1, 1, 0) + y1 <- c(0, 0, 1, 1) + xs <- list(x1) + ys <- list(y1) + + # text coordinates + xtx <- 0.50 + ytx <- -0.25 + rotx <- 0 # text rotation in degrees + } + + if (nobs == 2) { + # Two reanalysis references + x1 <- c(0, 1, 1) # lower triangle + y1 <- c(0, 0, 1) # lower triangle + x2 <- c(0, 1, 0) # upper triangle + y2 <- c(0, 1, 1) # upper triangle + + xs <- list(x1, x2) + ys <- list(y1, y2) + + # text coordinates + xtx <- c(0.75, 0.25) + ytx <- c(-0.25, 1.25) + rotx <- c(0, 0) # text rotation in degrees + } + + if (nobs == 3) { + # Three reanalysis references + x1 <- c(0, 0.5, 0.5, 0) # bottom left + y1 <- c(0, 0, 0.5, 1) # bottom left + x2 <- c(0.5, 1, 1, 0.5) # bottom right + y2 <- c(0, 0, 1, 0.5) # bottom right + x3 <- c(0, 0, 0.5, 1, 1) # top + y3 <- c(1, 0.75, 0.5, 0.75, 1) # top + + xs <- list(x1, x2, x3) + ys <- list(y1, y2, y3) + + # text coordinates + xtx <- c(-0.25, 1.25, 0.5) + ytx <- c(0.25, 0.25, 1.25) + rotx <- c(90, 90, 0) # text rotation in degrees + } + + if (nobs == 4) { + # Four reanalysis references + x1 <- c(0, 0.5, 1) # bottom triangle + y1 <- c(0, 0.5, 0) # bottom triangle + x2 <- c(0, 0.5, 0) # left triangle + y2 <- c(0, 0.5, 1) # left triangle + x3 <- c(0, 0.5, 1) # top triangle + y3 <- c(1, 0.5, 1) # top triangle + x4 <- c(1, 0.5, 1) # right triangle + y4 <- c(1, 0.5, 0) # right triangle + + xs <- list(x1, x2, x3, x4) + ys <- list(y1, y2, y3, y4) + + # text coordinates + xtx <- c(0.5, -0.25, 0.5, 1.25) + ytx <- c(-0.25, 0.5, 1.25, 0.5) + rotx <- c(0, 90, 0, 90) # text rotation in degrees + } + + if (!(nobs %in% c(1, 2, 3, 4))) { + if (nobs == 0) { + print("No reanalysis dataset provided") + break + } else { + print(paste( + "Too many reanalysis datasets provided.", + "Please choose between 1 and 4 datasets" + )) + break + } + } + + print("--- Creating Gleckler plot ---") + img.adj <- gl_mar_par * 0.05 + width.fct <- ( (nmodel + 3) / (nidx + 1)) + sum(img.adj[c(2, 4)]) + height.fct <- 1 + sum(img.adj[c(1, 3)]) + + figure_filename <- paste(plot_dir, "/Gleckler_", mip_name, "_", # nolint + nmodel, "-models_", nidx, "-idx_", nobs, "-obs_", + syear, "-", eyear, ".", output_file_type, + sep = "" + ) + + ## Chose output format for figure + if (tolower(output_file_type) == "png") { + png( + filename = figure_filename, + width = gl_png_res * (width.fct / height.fct), + height = gl_png_res, + units = gl_png_units, + pointsize = gl_png_pointsize, + bg = gl_png_bg + ) + } else if (tolower(output_file_type) == "pdf") { + pdf(file <- figure_filename) + } else if (tolower(output_file_type) == "eps") { + setEPS() + postscript(figure_filename) + } + + par(mfrow = c(1, 1), mar = gl_mar_par, xpd = FALSE, oma = rep(0, 4)) + plot( + x = c(0, 1 + gl_rmsespacer), y = c(0, 1), type = "n", ann = FALSE, + xaxs = "i", yaxs = "i", bty = "n", xaxt = "n", yaxt = "n" + ) + + ## Array dimentions + xn <- ncol(arr) + yn <- nrow(arr) + + ## Testing array plotting + xi <- 1 # model + yj <- 2 # index + zk <- 1 # obs + + ## Plotting RMSE of models, ensemble mean and median and RSMEall + for (xi in 1:(xn - 1)) { + for (yj in 1:yn) { + for (zk in 1:nobs) { + polygon( + x = (xs[[zk]] / xn) + ( (xi - 1) / xn), + y = (ys[[zk]] / yn) + ( (yn - yj) / yn), + col = glc[which.min(abs(sclseq - arr[yj, xi, zk]))] + ) + } + } + } + + ## Plotting RMSE median standard diviation + for (yj in 2:yn) { + for (zk in 1:nobs) { + polygon( + x = (xs[[zk]] / xn) + ( (xn - 1) / xn) + gl_rmsespacer, + y = (ys[[zk]] / yn) + ( (yn - yj) / yn), + col = glbw[which.min(abs(sclseq_bw - arr[yj, xn, zk]))] + ) + } + } + + ## Produce the borders for the Glecker plot + par(xpd = TRUE) + rect( + xleft = 0, ybottom = 0, xright = (1 - 1 / xn), ytop = (1 - 1 / yn), + density = NULL, angle = 45, + col = NA, border = 1, lty = par("lty"), lwd = 4 + ) + rect( + xleft = 0, ybottom = (1 - 1 / yn), xright = (1 - 1 / xn), ytop = 1, + density = NULL, angle = 45, + col = NA, border = 1, lty = par("lty"), lwd = 4 + ) + + ## Scale for Gleckler plot + gleckler_scale <- function(sclseq, glc, xn, scaling_factor, + text.scaling_factor, xscale_spacer) { + par(xpd = TRUE) + ## Square legend + sqrxs <- c(0, 1, 1, 0) + sqrys <- c(0, 0, 1, 1) + + # up-triangle legend + utrixs <- c(0, 1, 0.5) + utriys <- c(0, 0, 1) + + # down-triangle legend + dtrixs <- c(0.5, 1, 0) + dtriys <- c(0, 1, 1) + + # Legend number shifter + seq_shift <- mean(diff(sclseq) / 2) # Shifts the legend numbers so that + # they represent the border values + + # y-scale spacer + yscale_spacer <- (1 - scaling_factor) / 2 + + exlen <- length(glc) + for (a in 1:exlen) { + if (a == 1) { + xtmp <- scaling_factor * (dtrixs / xn) + 1 + xscale_spacer / xn + ytmp <- (scaling_factor * (dtriys / exlen + (a - 1) / exlen) + + yscale_spacer) + polygon(x = xtmp, y = ytmp, col = glc[a]) + text( + x = max(xtmp), y = max(ytmp), + round(sclseq[a] + seq_shift, 1), + cex = text.scaling_factor, pos = 4 + ) + } else if (a == exlen) { + xtmp <- scaling_factor * (utrixs / xn) + 1 + xscale_spacer / xn + ytmp <- (scaling_factor * (utriys / exlen + (a - 1) / exlen) + + yscale_spacer) + polygon(x = xtmp, y = ytmp, col = glc[a]) + } else { + xtmp <- scaling_factor * (sqrxs / xn) + 1 + xscale_spacer / xn + ytmp <- (scaling_factor * (sqrys / exlen + (a - 1) / exlen) + + yscale_spacer) + polygon(x = xtmp, y = ytmp, col = glc[a]) + text( + x = max(xtmp), y = max(ytmp), round(sclseq[a] + seq_shift, 1), + cex = text.scaling_factor, pos = 4 + ) + } + } + } + + ## Plot scales + gleckler_scale(sclseq, glc, xn, + scaling_factor = gl_scaling_factor, + text.scaling_factor = gl_text_scaling_factor, + xscale_spacer = gl_xscale_spacer_rmse + ) + + gleckler_scale(sclseq_bw, glbw, xn, + scaling_factor = gl_scaling_factor, + text.scaling_factor = gl_text_scaling_factor, + xscale_spacer = gl_xscale_spacer_rmsestd + ) + + ## Plotting symbol legend + exlen <- length(glc) + xsym1 <- gl_scaling_factor * (0.5 / xn) + 1 + gl_xscale_spacer_rmse / xn + exlen <- length(glbw) + xsym2 <- gl_scaling_factor * (0.5 / xn) + 1 + gl_xscale_spacer_rmsestd / xn + x.max_adj <- max(gl_symb_scaling_factor * (xs[[zk]] / xn)) + x.min_adj <- min(gl_symb_scaling_factor * (xs[[zk]] / xn)) + xmidadj <- (x.max_adj - x.min_adj) / 2 + + gl_symb_xshift <- (xsym1 + xsym2) / 2 - xmidadj + + for (zk in 1:nobs) { + xsym <- gl_symb_scaling_factor * (xs[[zk]] / xn) + gl_symb_xshift + ysym <- (gl_symb_scaling_factor * (ys[[zk]] / xn) + - gl_symb_yshift / xn) * width.fct / height.fct + print(paste("xs:", xsym)) + print(paste("ys:", ysym)) + polygon(x = xsym, y = ysym, col = "white", border = 1) + + xtxsym <- gl_symb_scaling_factor * (xtx[[zk]] / xn) + gl_symb_xshift + ytxsym <- (gl_symb_scaling_factor * (ytx[[zk]] / xn) + - gl_symb_yshift / xn) * width.fct / height.fct + + text( + x = xtxsym, y = ytxsym, labels = obs_list[zk], adj = 0.5, + cex = gl_text_symb_scaling_factor, srt = rotx[zk] + ) + } + + ## Label adjusting parameters + axlabsize <- 0.8 + lineadj <- -0.5 + + ## Add model labels + col_names <- c("ENSMEAN", "ENSMEDIAN", model_list) + xtcks1 <- seq( (0.5 / xn), ( (xn - 1) / xn), by = (1 / xn)) + axis( + side = 1, at = xtcks1, labels = col_names, las = 2, + cex.axis = axlabsize, tick = FALSE, line = lineadj + ) + + xtcks2 <- ( (xn - 1) / xn) + gl_rmsespacer + (0.5 / xn) + axis( + side = 1, at = xtcks2, labels = expression("RMSE"["std"]), + las = 2, cex.axis = axlabsize, tick = FALSE, line = lineadj + ) + + ## Add index labels + row_names <- vector(mode = "character", length = length(idx_list)) + for (i in seq_along(idx_list)) { + row_names[i] <- idx_df$idx_etccdi[which(idx_df$idx_etccdi_time + %in% idx_list[i])] + } + row_names <- rev(c(expression("RSME"["all"]), row_names)) + ytcks1 <- seq( (1 / yn) * 0.5, 1, by = (1 / yn)) + axis( + side = 2, at = ytcks1, labels = row_names, las = 2, + cex.axis = axlabsize, tick = FALSE, line = lineadj + ) + + mtext( + text = paste(mip_name, " global land ", syear, "-", eyear, sep = ""), + side = 3, line = 1, font = 2, cex = 1.1 + ) + + dev.off() + return(figure_filename) +} diff --git a/esmvaltool/diag_scripts/extreme_events/make_timeseries_plot.R b/esmvaltool/diag_scripts/extreme_events/make_timeseries_plot.R new file mode 100755 index 0000000000..1485e21488 --- /dev/null +++ b/esmvaltool/diag_scripts/extreme_events/make_timeseries_plot.R @@ -0,0 +1,578 @@ +# ############################################################################# +# make_timeseries_plot.R +# +# Author: Marit Sandstad (CICERO, Norway) +# : Christian W. Mohr (CICERO, Norway) +# +# ############################################################################# +# Description +# Code to plot a timeseries plot for a set of climdex indices +# +# Modification history +# 2019 0506-hard_jo : conversion to ESMValTool2 +# 2018 0816-A_cwmohr: adding input procedure and plotting for/of observation data +# 2018 0725-A_cwmohr: modification of time croppin +# 2018 0618-A_cwmohr: alpha levels for polygon plotting, second y-axis, +# 2018 0131-A_laue_ax: clean-up of code, adaptation to ESMValTool standards, +# added tagging, bugfixes: time axis, cdo, filenames +# 2017 0920-A_maritsandstad: Creation +# +# ############################################################################# + +## +## +## Method to call all preprocessing and loop through +## all models and indices then call plotting script +## to produce time series plots for a list of indices +## @param path is the path to where the original indices +## are stored +## @param idx_list lists the indices to be considered in +## this run. Defaults are the indices from the IPCC +## report. +## + +########################### + + +timeseries_main <- function(path = "../work/extreme_events", + idx_list, model_list, obs_list, + plot_dir = "./plot", + normalize = FALSE, + start_yr = 2000, end_yr = 2006) { + + ## For file structure and files + tsgrid <- paste(path, "/tsGridDef", sep = "") # nolint + time_cropped <- paste(path, "/timeCropped", sep = "") # nolint + landmask <- paste(path, "/landSeaMask.nc", sep = "") # nolint + regridded <- paste(path, "/regridded", sep = "") # nolint + land <- paste(path, "/Land", sep = "") # nolint + + # Initial nc-file time crop, regrid, land and plot purge + unlink(c(time_cropped, regridded, land, landmask, tsgrid), recursive = TRUE) + + # Initial grid and landmask creation reset + gridandlandmask <- TRUE + + ## Loop over the indices to produce a plot for each index + plotfiles <- list() + idx <- idx_list[1] + for (idx in idx_list) { + + ## Combine the list of models and observations + modelobs_list <- unique(c(model_list, obs_list)) + + ## Find the model files + modelandobs <- basename(Sys.glob(file.path( + path, + paste(idx, "*.nc", sep = "") + ))) + + if (ts_data) { + ## Time crop + returnvalue <- set_time_for_files_equal( + path = path, idx = idx, + model_list = modelobs_list, + time_cropped = time_cropped + ) # This is a temporary solution + + max_start <- returnvalue[1] + min_end <- returnvalue[2] + + ## If there is no overlap in the files the index + ## should be skipped + if (max_start >= min_end) { + print(paste("No time overlap in files for index", idx)) + break + } + + ## Find the new model files after time cropping + modelandobs <- basename(Sys.glob(file.path( + time_cropped, + paste0(idx, "*.nc") + ))) + + # !New Grid and landseamask for each idx + # !(or just the first idx set) should be + # !produced here + if (gridandlandmask) { + create_grid(path = path, loc = tsgrid) + create_land_sea_mask(regrid = tsgrid, loc = path, landmask = landmask) + gridandlandmask <- FALSE + } + + ## Loop over each file so it can be regridded + ## and landseaMasked + for (m in modelandobs) { + print(paste(time_cropped, "/", m, sep = "")) + regrid_and_land_sea_mask( + idx_raw = paste0(time_cropped, "/", m), + regrid = tsgrid, + landmask = landmask, regridded = regridded, + land = land, loc = path + ) + } + + ## Then do the preprocessing + + time_series_preprocessing( + land = land, idx = idx, + model_list = model_list, + obs_list = obs_list, + plot_dir = plot_dir, work_dir = path, + normalize = normalize + ) + } + + ## Produce plot for this index + fname <- timeseries_plot( + plot_dir = plot_dir, idx = idx, + obs_list = obs_list, + start_yr = start_yr, end_yr = end_yr, + normalize = normalize + ) + plotfiles <- c(plotfiles, fname) + } + + # Final cleanup + unlink(c(time_cropped, regridded, land, landmask, tsgrid), recursive = TRUE) + + return(plotfiles) +} + +# +# Method that preprocesses idx-files for a single index +# in order to get data to plot time series plot +# for this index +# @param path is the path to index file location +# @param idx is the index to be processed. +# +time_series_preprocessing <- function(land = "./Land", idx = "tnnETCCDI_yr", + model_list = model_list, + obs_list = obs_list, + plot_dir = "./plot", + work_dir = "./work", normalize = FALSE) { + + tseriesdir <- paste0(work_dir, "/timeseries") # nolint + if (!file.exists(tseriesdir)) { + dir.create(tseriesdir) + } + + ## List of indices which are never normalized: + pidx <- c( + "tn10pETCCDI_yr", "tx10pETCCDI_yr", "tn90pETCCDI_yr", + "tx90pETCCDI_yr", "csdiETCCDI_yr", "wsdiETCCDI_yr", + "tn10pETCCDI_mon", "tx10pETCCDI_mon", "tn90pETCCDI_mon", + "tx90pETCCDI_mon", "csdiETCCDI_mon", "wsdiETCCDI_mon" + ) + + # Getting a list of all the files for the index + modelsandobs <- basename(Sys.glob(file.path(land, paste0(idx, "*.nc")))) + modelsandobssplitlist <- strsplit(modelsandobs, split = "_") + modelsandobssplit <- unlist(lapply(modelsandobssplitlist, function(x) { + x[3] + })) + + # Extracting only the model files + models <- modelsandobs[which(modelsandobssplit %in% model_list)] + print("These are the models:") + print(models) + + ## Extracting only the observation files + obs_order <- which(modelsandobssplit %in% obs_list) + obs <- modelsandobs[obs_order] + print("These are the observations:") + print(obs) + + #### NORMALIZE VALUES #### + if (normalize) { + # File string to be filled with file names that can be used + # For the aggregated statistics + # (ensmean, enspctl) + file_string_models <- "" + m <- models[1] + for (m in models) { + print(m) + if (idx %in% pidx) { + + # Fieldmean results + cdo("fldmean", + input = paste0(land, "/", m), + output = paste0(land, "/", "fldm_", m), options = "-O" + ) + + ## add the preprocessed file to the filestring + file_string_models <- paste(file_string_models, land, + "/fldm_", m, " ", sep = "") # nolint + } else { + # Subtracting timemeans from land files: + cdo("sub", + input = c(paste0(land, "/", m), paste0(land, "/tm_", m)), # nolint + output = paste0(land, "/", "norm_", m), options = "-O" + ) + + # Detrended results: + cdo("detrend", + input = paste0(land, "/norm_", m), # nolint + output = paste0(land, "/", "detrend_", m), options = "-O" + ) + + # Timstd of detrend + cdo("timstd", + input = paste0(land, "/detrend_", m), # nolint + output = paste0(land, "/", "detrend_std_", m), options = "-O" + ) + + # Divide normalized by timstded detrend + cdo("div", + input = c( + paste0(land, "/norm", m), # nolint + paste0(land, "/detrend_std_", m) # nolint + ), + output = paste0(land, "/detrend_standard_", m), options = "-O" # nolint + ) + + # Fieldmean results + cdo("fldmean", + input = paste0(land, "/detrend_standard_", m), # nolint + output = paste0(land, "/detrend_std_fldm_", m), options = "-O" # nolint + ) + + ## add the preprocessed file to the filestring + file_string_models <- paste(file_string_models, land, + "/detrend_std_fldm_", m, " ", sep = "") # nolint + } + } + # Find model ensemble mean + cdo("ensmean", + input = file_string_models, + output = paste0(tseriesdir, "/", idx, "_ensmean_for_timeseries.nc"), + options = "-O" + ) + + # Find ensemble 25th percentile + cdo("enspctl", + args = "25", input = file_string_models, + output = paste0(tseriesdir, "/", idx, "_25enspctl_for_timeseries.nc"), + options = "-O" + ) + + # Find ensemble 75th percentile + cdo("enspctl", + args = "75", input = file_string_models, + output = paste0(tseriesdir, "/", idx, "_75enspctl_for_timeseries.nc"), + options = "-O" + ) + + n <- 0 + for (o in obs) { + print(o) + + if (idx %in% pidx) { + # Fieldmean results + cdo("fldmean", + input = paste0(land, "/", o), + output = paste0(land, "/", "fldm_", o), options = "-O" + ) + + # Copy obs file to plot + n <- n + 1 + file.copy( + paste0(land, "/fldm_", o), # nolint + paste0( + tseriesdir, "/", idx, "_", + modelsandobssplit[obs_order[n]], + "_for_timeseries.nc" + ) + ) + } else { + # Subtracting timemeans from land files: + cdo("sub", + input = c(paste0(land, "/", o), paste0(land, "/tm_", o)), # nolint + output = paste0(land, "/norm_", o), options = "-O" # nolint + ) + + # Detrended results: + cdo("detrend", + input = paste0(land, "/norm_", o), # nolint + output = paste0(land, "/detrend_", o), options = "-O" # nolint + ) + + # Timstd of detrend + cdo("timstd", + input = paste0(land, "/detrend_", o), # nolint + output = paste0(land, "/detrend_std_", o), options = "-O" # nolint + ) + + # Divide normalized by timstded detrend + cdo("div", + input = c( + paste0(land, "/norm", o), # nolint + paste0(land, "/detrend_std_", o) # nolint + ), + output = paste0(land, "/detrend_standard_", o), options = "-O" # nolint + ) + + # Fieldmean results + cdo("fldmean", + input = paste0(land, "/detrend_standard_", o), # nolint + output = paste0(land, "/detrend_std_fldm_", o), options = "-O" # nolint + ) + + # Copy obs file to plot + n <- n + 1 + file.copy( + paste0(land, "/detrend_std_fldm_", o), # nolint + paste0( + tseriesdir, "/", idx, "_", + modelsandobssplit[obs_order[n]], + "_for_timeseries.nc" + ) + ) + } + } + } + + # ABSOLUTE VALUES #### + # Non-normalized values fieldmeans + if (!normalize) { + file_string_models <- "" + m <- models[1] + for (m in models) { + print(m) + # Fieldmean results + cdo("fldmean", + input = paste0(land, "/", m), + output = paste0(land, "/fldm_", m), options = "-O" # nolint + ) + + ## add the preprocessed file to the filestring + file_string_models <- paste(file_string_models, land, + "/fldm_", m, " ", sep = "") # nolint + } + # Find model ensemble mean + cdo("ensmean", + input = file_string_models, + output = paste0(tseriesdir, "/", idx, "_ensmean_for_timeseries.nc"), + options = "-O" + ) + + # Find ensemble 25th percentile + cdo("enspctl", + args = "25", input = file_string_models, + output = paste0(tseriesdir, "/", idx, "_25enspctl_for_timeseries.nc"), + options = "-O" + ) + + # Find ensemble 75th percentile + cdo("enspctl", + args = "75", input = file_string_models, + output = paste0(tseriesdir, "/", idx, "_75enspctl_for_timeseries.nc"), + options = "-O" + ) + + ## Extracting only the observation files + obs_order <- which(modelsandobssplit %in% obs_list) + obs <- modelsandobs[obs_order] + + print("These are the observations:") + print(obs) + n <- 0 + for (o in obs) { + print(o) + # Fieldmean results + cdo("fldmean", + input = paste0(land, "/", o), + output = paste0(land, "/fldm_", o), options = "-O" # nolint + ) + # Copy obs file to plot + n <- n + 1 + file.copy( + paste0(land, "/fldm_", o), # nolint + paste0( + tseriesdir, "/", idx, "_", + modelsandobssplit[obs_order[n]], "_for_timeseries.nc" + ) + ) + } + } +} + +# +# +# Method to plot the time series plot +# of single idx for already preprocessed data +# yearly data is assumed +# @param path - path to directory containing ensemble mean +# and percentile data. +# @param idx name of index to be processed +# @param start_yr start year for data to be used to convert +# values from days after start year format to +# year. +# + +timeseries_plot <- function(plot_dir = "./plot", idx = "tn10pETCCDI_yr", + obs_list, start_yr = 2006, end_yr = 2010, + normalize = FALSE) { + # Drawing parameters + leg_names <- c(mip_name, obs_list) + + ## Reading the netcdf data files into R + ## First ensemble mean file + ensm <- nc_open(paste(work_dir, "/timeseries/", idx, + "_ensmean_for_timeseries.nc", + sep = "" + )) + + ## Then 25th percentile file + enspctl25 <- nc_open(paste(work_dir, "/timeseries/", idx, + "_25enspctl_for_timeseries.nc", + sep = "" + )) + ## Finally 75th percentile file + enspctl75 <- nc_open(paste(work_dir, "/timeseries/", idx, + "_75enspctl_for_timeseries.nc", + sep = "" + )) + + ## Reading in time variable and converting to years: + ts <- nc.get.time.series(ensm) # nolint + time_conv <- format(ts, "%Y") # extract years + + ## Stripping off the _yr tail to the index name + idx_no <- which(idx_df$idx_etccdi_time == idx) + idx_name <- paste(idx_df$idx_etccdi[idx_no], "ETCCDI", sep = "") + + # Reading in the y-variables to be plotted + # First the ensemble mean + idx_ensm <- ncvar_get(ensm, idx_name) + # Then the 25th percentile + idx_ens25 <- ncvar_get(enspctl25, idx_name) + # Finally the 75th percentile + idx_ens75 <- ncvar_get(enspctl75, idx_name) + + # Maximum and minimum x and y values + max.x <- end_yr + min.x <- start_yr + irange <- ( (time_conv >= min.x) & (time_conv <= max.x)) + max.y <- max(idx_ensm[irange], idx_ens25[irange], idx_ens75[irange]) + min.y <- min(idx_ensm[irange], idx_ens25[irange], idx_ens75[irange]) + + # Reading in the observations and plotting via a loop + obsdata_list <- list() + n <- 0 + for (o in obs_list) { + n <- n + 1 + nc_obs <- nc_open(paste(work_dir, "/timeseries/", idx, + "_", o, "_for_timeseries.nc", + sep = "" + )) + ts_obs <- nc.get.time.series(nc_obs) # nolint + time_conv_obs <- format(ts_obs, "%Y") # extract years + idx_obs <- ncvar_get(nc_obs, idx_name) + nc_close(nc_obs) + obsdata_list[[n]] <- list(o, as.numeric(time_conv_obs), idx_obs) + irange <- ( (time_conv_obs >= min.x) & (time_conv_obs <= max.x)) + max.y <- max(max.y, idx_obs[irange]) + min.y <- min(min.y, idx_obs[irange]) + + if (n > length(ts_col_list)) { + print(paste( + "Error: There are more observations,", + "than available color plotting parameters." + )) + print("Update cfg_ExtermeEvents.r file.") + dev.off() + break + } + } + + # Setting the x- and y-range limits for plotting + xrng <- as.numeric(c(min.x, max.x)) + yrng <- c(min.y, max.y) + print(xrng) + print(yrng) + + ## Making name string for the plot + plotname <- paste(plot_dir, "/", idx, "_", length(obs_list), + "-obs_ensmean_timeseriesplot", + sep = "" + ) + + ## Setting device to write the plot to + figure_filename <- paste(plotname, output_file_type, sep = ".") + + ## Chose output format for figure + if (tolower(output_file_type) == "png") { + png( + filename = figure_filename, + width = ts_png_width, + height = ts_png_height, + units = ts_png_units, + pointsize = ts_png_pointsize, + bg = ts_png_bg + ) + } else if (tolower(output_file_type) == "pdf") { + pdf(file <- figure_filename) + } else if (tolower(output_file_type) == "eps") { + setEPS() + postscript(figure_filename) + } + + n <- 1 + # Parameters for plot + par(mfrow = c(1, 1), mar = c(4.5, 4.5, 2, 3)) + # Plotting first the ensemblemean + plot(time_conv, idx_ensm, + type = "l", col = ts_col_list[n], + lty = ts_lty_list[n], xlim = xrng, ylim = yrng, lwd = ts_lwd_list[n], + ann = FALSE, xaxs = "i", yaxt = "n" + ) + # Then making a transparent polygon between the 25th and 75 percentile + polygon(c(time_conv, rev(time_conv)), c(idx_ens75, rev(idx_ens25)), + col = alpha(ts_col_list[n], 0.1), border = NA + ) + + # Plotting observations and plotting via a loop + n <- 0 + for (o in obs_list) { + n <- n + 1 + lines(obsdata_list[[n]][[2]], obsdata_list[[n]][[3]], + col = ts_col_list[n + 1], lty = ts_lty_list[n + 1], + lwd = ts_lwd_list[n + 1] + ) # plot observation + } + + # Produce a legend + legend("top", + legend = leg_names, col = ts_col_list, lty = ts_lty_list, + lwd = ts_lwd_list, bty = "n", ncol = 3 + ) + + # Produce a first y-axis + axis(side = 2, at = pretty(yrng, 5)) + axis(side = 2, at = pretty(yrng, 5)) + pretty(yrng, 10) + + axis(side = 2, at = pretty(yrng, 5)) + + # Produce a second y-axis + axis(side = 4, at = pretty(yrng, 5)) + + # Producing a title from info in netcdf file + title(main = idx_df$name[idx_no], font.main = 2) + + # Choosing x-label + title(xlab = "Year") + + # Chosing y-label from idx_ylab list + title(ylab = idx_ylab[idx_no]) + # Resetting plotting device to default + dev.off() + + # Close Ensemble files + nc_close(ensm) + nc_close(enspctl25) + nc_close(enspctl75) + + return(figure_filename) +} diff --git a/esmvaltool/diag_scripts/hyint/hyint.R b/esmvaltool/diag_scripts/hyint/hyint.R new file mode 100644 index 0000000000..5658c1cf3d --- /dev/null +++ b/esmvaltool/diag_scripts/hyint/hyint.R @@ -0,0 +1,259 @@ +# ############################################################################# +# hyint.R +# Authors: E. Arnone (ISAC-CNR, Italy) +# J. von Hardenberg (ISAC-CNR, Italy) +# ############################################################################# +# Description +# HyInt is a tool for calculation of the HY-INT index (Giorgi et al. 2011) +# and additional hydroclimatic indices (Giorgi et al. 2014) +# which allow an estimate of the overall behaviour of the hydroclimatic cycle. +# The tool calculates also timeseries and trends over selected regions and +# produces a variety of types of plots including maps and timeseries. The +# timeseries/trend and plotting modules handle also ETCCDI indices data +# calculated with the climdex library through an ad hoc pre-processing. +# +# Details +# The following indices are calculated based on input daily precipitation data: +# PRY = mean annual precipitation +# INT = mean annual precipitation intensity (intensity during wet days, or +#  simple precipitation intensity index SDII) +# WSL = mean annual wet spell length (number of consecutive days +# during each wet spell) +# DSL = mean annual dry spell lenght (number of consecutive days +# during each dry spell) +# PA = precipitation area (area over which of any given day i +# precipitation occurs) +# R95 = heavy precipitation index (percent of total precipitation above the 95% +# percentile of the reference distribution) +# HY-INT = hydroclimatic intensity. HY-INT = normalized(INT) x normalized(DSL). +# +# For EC-Earth data and then extended to any model and observational data, +# producing plots of data vs. a reference dataset (e.g. ERA-INTERIM). Indices +# are normalized over a reference period. Both absolute and normalized values +# are made available: users can select the indices to be stored and plotted. +# The tool makes extensives use of the cfg_hyint configuration file for user +# selectable options and ease feeding needed inputs (e.g. region boundaries for +# timeseries or value ranges and labels for figures). +# +# Required +# It reads daily precipitation data through ESMValTool. If requested, input +# precipitation data are pre-processed interpolating on a common grid set by +# the user in the hyint_parameters file. +# R libraries:"tools","PCICt","ncdf4","maps" +# +# Optional +# Several options can be selected via the configuration file, e.g. provision +# of an external normalization functions for the indices; a reference +# climatology for the R95 index; type of plots; etc. +# +# Caveats +# +# Modification history +# 20181001-A_arno_en: converted to latest v2.0 +# 20180302-A_arno_en: converted to ESMValTool2 +# 20171206-A_arno_en: modularized version accepting climdex indices +# 20171010-A_arno_en: modularized version +# 20170901-A_arno_en: 1st github version +# +# ############################################################################ + +library(tools) +library(yaml) +library(ncdf4) + +# get path to script and source subroutines (if needed) +args <- commandArgs(trailingOnly = FALSE) +spath <- paste0(dirname(unlist(strsplit(grep("--file", args, + value = TRUE), "="))[2]), "/") + +source(paste0(spath, "hyint_functions.R")) +source(paste0(spath, "hyint_metadata.R")) +source(paste0(spath, "hyint_preproc.R")) +source(paste0(spath, "hyint_diagnostic.R")) +source(paste0(spath, "hyint_etccdi_preproc.R")) +source(paste0(spath, "hyint_trends.R")) +source(paste0(spath, "hyint_plot_maps.R")) +source(paste0(spath, "hyint_plot_trends.R")) +source(paste0(spath, "hyint_parameters.R")) + +diag_script_cfg <- paste0(spath, "hyint_parameters.R") + +# Read settings and metadata files +args <- commandArgs(trailingOnly = TRUE) +settings_file <- args[1] +settings <- yaml::read_yaml(settings_file) +# load data from settings +for (myname in names(settings)) { + temp <- get(myname, settings) + assign(myname, temp) +} +metadata <- yaml::read_yaml(settings$input_files) + +## check required settings +if (!all(plot_type %in% c(1, 2, 3, 11, 12, 13, 14, 15) ) ) { + stop("requested plot_type not available") +} + +# setup provenance file and list +provenance_file <- paste0(run_dir, "/", "diagnostic_provenance.yml") +provenance <- list() +prov_info <- list() + +# get name of climofile for selected variable and +# list associated to first climofile +climofiles <- names(metadata) +climolist <- get(climofiles[1], metadata) +list0 <- climolist +climolist0 <- list0 + +# get variable name +varname <- paste0("'", climolist$short_name, "'") +var0 <- varname +var0 <- "pr" + +diag_base <- climolist0$diagnostic +print(paste0(diag_base, ": starting routine")) + +if (length(etccdi_dir) != 1) { + etcddi_dir <- work_dir +} +dir.create(plot_dir, recursive = T, showWarnings = F) +dir.create(work_dir, recursive = T, showWarnings = F) + +# Set dir +setwd(run_dir) + +# extract metadata +models_name <- unname(sapply(metadata, "[[", "dataset")) +reference_model <- unname(sapply(metadata, "[[", "reference_dataset"))[1] +models_start_year <- unname(sapply(metadata, "[[", "start_year")) +models_end_year <- unname(sapply(metadata, "[[", "end_year")) +models_experiment <- unname(sapply(metadata, "[[", "exp")) +models_ensemble <- unname(sapply(metadata, "[[", "ensemble")) + +# select reference dataset; if not available, use last of list +ref_idx <- which(models_name == reference_model) +if (length(ref_idx) == 0) { + ref_idx <- length(models_name) +} + +# check requested time intervals +if (!anyNA(match(models_start_year[ref_idx]:models_end_year[ref_idx], + norm_years[1]:norm_years[2]))) { + stop(paste0("normalization period covering entire dataset: ", + "reduce it to calculate meaningful results")) +} +if (trend_years != F) { + if (anyNA(match(trend_years[1]:trend_years[2], + models_start_year[ref_idx]:models_end_year[ref_idx]))) { + stop("trend period outside available data") + } + if (trend_years[2] - trend_years[1] < 2) { + stop("set at least a 3 year interval for trend calculation") + } +} + +# Select regions and indices to be adopted and test selection +selregions <- match(select_regions, region_codes) +if (anyNA(selregions)) { + stop("requested region not available") +} +selfields <- match(select_indices, field_names) +if (anyNA(selfields)) { + stop("requested field not available") +} + +## Run regridding and diagnostic +if (write_netcdf) { + + # loop through models + for (model_idx in c(1:(length(models_name)))) { + + # Setup filenames + climofile <- climofiles[model_idx] + sgrid <- "noregrid" + if (rgrid != F) { + sgrid <- rgrid + } + regfile <- getfilename_regridded(run_dir, sgrid, var0, model_idx) + + # If needed, pre-process file and add absolute time axis + if (run_regridding) { + if (!file.exists(regfile) | force_regridding) { + dummy <- hyint_preproc(work_dir, model_idx, ref_idx, climofile, + regfile, rgrid) + } else { + gridfile <- getfilename_indices(work_dir, diag_base, model_idx, + grid = T) + cdo("griddes", input = regfile, stdout = gridfile) + print(paste0(diag_base, ": data file exists: ", regfile)) + print(paste0(diag_base, ": corresponding grid: ", gridfile)) + } + } + + if (run_diagnostic) { + # Loop through seasons and call diagnostic + for (seas in seasons) { + prov_info <- hyint_diagnostic(work_dir, regfile, model_idx, seas, + prov_info, rewrite = force_diagnostic) + } + } + } +} + +## Preprocess ETCCDI input files and merge them with HyInt indices +if (write_netcdf & etccdi_preproc) { + for (model_idx in c(1:(length(models_name)))) { + gridfile <- getfilename_indices(work_dir, diag_base, model_idx, grid = T) + dummy <- hyint_etccdi_preproc(work_dir, etccdi_dir, etccdi_list_import, + gridfile, model_idx, "ALL", yrmon = "yr") + } +} + +## Calculate timeseries and trends +if (write_netcdf & run_timeseries) { + for (model_idx in c(1:(length(models_name)))) { + for (seas in seasons) { + prov_info <- hyint_trends(work_dir, model_idx, seas, prov_info) + } + } +} + +## Create figures +if (write_plots) { + plot_type_list <- plot_type + for (plot_type in plot_type_list) { + print(paste0("******** PLOT TYPE: ", plot_type, " *********")) + for (seas in seasons) { + if (plot_type <= 10) { + # Plot maps + prov_info <- hyint_plot_maps( + work_dir, plot_dir, work_dir, ref_idx, seas, prov_info) + } else { + # Plot timeseries and trends + prov_info <- hyint_plot_trends( + work_dir, plot_dir, ref_idx, seas, prov_info) + } + } + } +} + +# Assign provenance information for timeseries&trends figures +for (fname in names(prov_info)) { + xprov <- list(ancestors = climofiles[unlist(prov_info[[fname]]$model_idx)], + authors = list("arno_en", "hard_jo"), + references = list("giorgi11jc", "giorgi14jgr"), + projects = list("c3s-magic"), + caption = prov_info[[fname]]$caption, + statistics = list("other"), + realms = list("atmos"), + themes = list("phys"), + domains = list("global")) + provenance[[fname]] <- xprov +} + +# Write provenance to file +write_yaml(provenance, provenance_file) + +# Closing message +print(paste0(diag_base, ": done.")) diff --git a/esmvaltool/diag_scripts/hyint/hyint_diagnostic.R b/esmvaltool/diag_scripts/hyint/hyint_diagnostic.R new file mode 100644 index 0000000000..de45a701f4 --- /dev/null +++ b/esmvaltool/diag_scripts/hyint/hyint_diagnostic.R @@ -0,0 +1,391 @@ +###################################################### +#-----Hydroclimatic Intensity (HyInt) diagnostic-----# +#-------------E. Arnone (June 2017)------------------# +###################################################### +hyint_diagnostic <- function(work_dir, infile, model_idx, season, + prov_info, rewrite = FALSE) { + + # setting up path and parameters + year1 <- models_start_year[model_idx] + year2 <- models_end_year[model_idx] + + outfile <- getfilename_indices(work_dir, diag_base, model_idx, season) + + # If diagnostic output file already exists skip calculation + if (file.exists(outfile) & !rewrite) { + print(paste0(diag_base, ": output file already exists:", outfile)) + print(paste0(diag_base, ": skipping calculation")) + return() + } + + # Test input file exists + print(infile) + if (!file.exists(infile)) { + stop("HyInt: missing regridded input file. Run HyInt pre-processing.") + } + + # setting up time domain + years <- year1:year2 + timeseason <- season2timeseason(season) + + # file opening + pr_list <- ncdf_opener_universal(infile, + namevar = "pr", + tmonths = timeseason, tyears = years, rotate = rotlongitude + ) + + # extract calendar and time unit from the original file + tcal <- attributes(pr_list$time)$cal + tunit <- attributes(pr_list$time)$units + + etime <- power_date_new(pr_list$time) + + # declare and convert variable + pr <- pr_list$field * 86400. # convert (Kg m-2 s-1) to (mm day-1) + + ############################################################# + #--------HyInt calculation (Giorgi et al. 2011/14)----------# + ############################################################# + + # Setup useful arrays and parameters + nyear <- length(years) + pry <- pr[, , 1:nyear] * NA # annual mean precipitation (over all days) + int <- pr[, , 1:nyear] * NA # mean prec. intensity (over wet days == SDII) + dsl <- pr[, , 1:nyear] * NA # mean dry spell length (DSL) + wsl <- pr[, , 1:nyear] * NA # mean wet spell length (WSL) + pa <- pr[, , 1:nyear] * NA # precipitation area (PA) + r95 <- pr[, , 1:nyear] * NA # heavy precipitation index (R95) + pry_norm <- pry + int_norm <- int + dsl_norm <- dsl + wsl_norm <- wsl + pa_norm <- pa + r95_norm <- r95 + + # Evaluate r95_threshold over normalization period (or load it if requested) + if (external_r95[1] == F) { + r95_threshold <- apply(pr, c(1, 2), quantile, probs = 0.95, na.rm = T) + } else { + # if required, use HyInt file from historical period + if (external_r95[1] == "HIST") { + external_r95 <- getfilename_indices(work_dir, diag_base, model_idx, + season, + hist = T, hist_years = norm_years + ) + } + r95_idx <- model_idx # assume each model has its r95_threshold file + if (length(external_r95) == 1) { + # if list of files with r95_threshold has only 1 entry, + # use that for all models + r95_idx <- 1 + } + print(paste( + diag_base, ": loading external r95_threshold data from ", + external_r95[r95_idx] + )) + r95_threshold <- ncdf_opener(external_r95[r95_idx], "r95_threshold", + rotate = "no" + ) + } + r95_threshold360 <- replicate(360, r95_threshold) + r95_threshold365 <- replicate(365, r95_threshold) + r95_threshold366 <- replicate(366, r95_threshold) + + # Calculate indices + print(paste0(diag_base, ": calculating indices")) + for (iyear in 1:nyear) { + ret_year <- which(etime$year == years[iyear]) + pr_year <- pr[, , ret_year] + + r95_thresh_year <- r95_threshold365 + if (length(pr_year[1, 1, ]) == 360) { + r95_thresh_year <- r95_threshold360 + } + if (length(pr_year[1, 1, ]) == 366) { + r95_thresh_year <- r95_threshold366 + } + + + # Identify dry and wet days (Salinger and Griffiths 2001) + ret_dry <- (pr_year < 1) # Dry days when pr < 1 mm + ret_wet <- (pr_year >= 1) # Rainy days when pr >= 1 mm + ret_below_r95 <- (pr_year < r95_thresh_year) # Rainy days when pr < + # reference 95% quantile + pr_year_dry <- pr_year * 0. + pr_year_dry[ret_dry] <- 1 # mask with 1 for dry day + pr_year_wet <- pr_year * 0. + pr_year_wet[ret_wet] <- 1 # mask with 1 for rainy day + pr_year_int <- pr_year + pr_year_int[ret_dry] <- NA # actual precipitation but with NA on dry days + pr_year_r95 <- pr_year + pr_year_r95[ret_below_r95] <- NA # actual precipitation but with NA on + # days with pr < reference 95% quantile + + # Mean annual precipitation + pry_year <- apply(pr_year, c(1, 2), mean, na.rm = T) + + # Mean annual precipitation intensity (INT/SDII; intensity during wet days) + int_year <- apply(pr_year_int, c(1, 2), mean, na.rm = T) + + # Mean annual dry spell length (DSL: + # number of consecutive dry days during each dry spell). + dsl_year <- mean_spell_length(pr_year_dry) + + # Mean annual wet spell length (WSL: + # number of consecutive wet days during each wet spell). + wsl_year <- mean_spell_length(pr_year_wet) + + # Precipitation area (PA: number of rainy days * area of grid box) + area_size <- area_size(ics, ipsilon) + pa_year <- (apply(pr_year_wet, c(1, 2), sum, na.rm = T)) * area_size + + # Heavy precipitation Index (R95: percent of total precipitation above the + # 95% percentile of the reference distribution); + r95_year <- apply(pr_year_r95, c(1, 2), sum, na.rm = T) / + apply(pr_year, c(1, 2), sum, na.rm = T) * 100. + + # Assign in-loop variables to storage array + pry[, , iyear] <- pry_year + dsl[, , iyear] <- dsl_year + wsl[, , iyear] <- wsl_year + int[, , iyear] <- int_year + pa[, , iyear] <- pa_year + r95[, , iyear] <- r95_year + } + + # remove desert areas if required + # (mean annual precipitation <0.5 mm, Giorgi et al. 2014) + if (removedesert) { + retdes <- which(pry < 0.5) + pry[retdes] <- NA + # create mask with NAs for deserts and 1's for not-desert + retdes2D <- apply(pry * 0, c(1, 2), sum) + 1 + retdes3D <- replicate(nyear, retdes2D) + pry <- pry * retdes3D + dsl <- dsl * retdes3D + wsl <- wsl * retdes3D + int <- int * retdes3D + pa <- pa * retdes3D + r95 <- r95 * retdes3D + } + + # Normalize to available data in reference period + # NOTE: take care of normalization by 0: when the normalizing function is 0 + # (e.g. short dataset in reference period), the resulting normalized + # index will be NA. + + # calculate normalization function + if (external_norm[1] == F) { + ret_years <- which(years >= norm_years[1] & years <= norm_years[2]) + if (length(ret_years) == 0) { + stop(paste0(diag_base, ": no data over selected normalization period, + unable to normalize")) + } + pry_mean <- apply(pry[, , ret_years], c(1, 2), mean, na.rm = T) + dsl_mean <- apply(dsl[, , ret_years], c(1, 2), mean, na.rm = T) + wsl_mean <- apply(wsl[, , ret_years], c(1, 2), mean, na.rm = T) + int_mean <- apply(int[, , ret_years], c(1, 2), mean, na.rm = T) + pa_mean <- apply(pa[, , ret_years], c(1, 2), mean, na.rm = T) + r95_mean <- apply(r95[, , ret_years], c(1, 2), mean, na.rm = T) + pry_mean_sd <- apply(pry[, , ret_years], c(1, 2), sd, na.rm = T) + dsl_mean_sd <- apply(dsl[, , ret_years], c(1, 2), sd, na.rm = T) + wsl_mean_sd <- apply(wsl[, , ret_years], c(1, 2), sd, na.rm = T) + int_mean_sd <- apply(int[, , ret_years], c(1, 2), sd, na.rm = T) + pa_mean_sd <- apply(pa[, , ret_years], c(1, 2), sd, na.rm = T) + r95_mean_sd <- apply(r95[, , ret_years], c(1, 2), sd, na.rm = T) + } else { + # load normalization data from file + mean_idx <- model_idx # assume each model has its normalization file + if (external_norm[1] == "HIST") { + # if required, use HyInt file from historical period + external_norm <- getfilename_indices(work_dir, diag_base, model_idx, + season, + hist = T, hist_years = norm_years + ) + } + if (length(external_norm) == 1) { + mean_idx <- 1 + } + # if list of files with normalization functions has only 1 entry, + # use that for all models + print(paste( + diag_base, ": loading external normalization data from ", + external_norm[mean_idx] + )) + pry_mean <- ncdf_opener(external_norm[mean_idx], "pry_mean", rotate = "no") + dsl_mean <- ncdf_opener(external_norm[mean_idx], "dsl_mean", rotate = "no") + wsl_mean <- ncdf_opener(external_norm[mean_idx], "wsl_mean", rotate = "no") + int_mean <- ncdf_opener(external_norm[mean_idx], "int_mean", rotate = "no") + pa_mean <- ncdf_opener(external_norm[mean_idx], "pa_mean", rotate = "no") + r95_mean <- ncdf_opener(external_norm[mean_idx], "r95_mean", rotate = "no") + pry_mean_sd <- ncdf_opener(external_norm[mean_idx], "pry_mean_sd", + rotate = "no" + ) + dsl_mean_sd <- ncdf_opener(external_norm[mean_idx], "dsl_mean_sd", + rotate = "no" + ) + wsl_mean_sd <- ncdf_opener(external_norm[mean_idx], "wsl_mean_sd", + rotate = "no" + ) + int_mean_sd <- ncdf_opener(external_norm[mean_idx], "int_mean_sd", + rotate = "no" + ) + pa_mean_sd <- ncdf_opener(external_norm[mean_idx], "pa_mean_sd", + rotate = "no" + ) + r95_mean_sd <- ncdf_opener(external_norm[mean_idx], "r95_mean_sd", + rotate = "no" + ) + } + + # remove 0s from normalizing functions + pry_mean[pry_mean == 0] <- NA + dsl_mean[dsl_mean == 0] <- NA + wsl_mean[wsl_mean == 0] <- NA + int_mean[int_mean == 0] <- NA + pa_mean[pa_mean == 0] <- NA + r95_mean[r95_mean == 0] <- NA + + # perform normalization + for (iyear in 1:nyear) { + pry_norm[, , iyear] <- pry[, , iyear] / pry_mean + dsl_norm[, , iyear] <- dsl[, , iyear] / dsl_mean + wsl_norm[, , iyear] <- wsl[, , iyear] / wsl_mean + int_norm[, , iyear] <- int[, , iyear] / int_mean + pa_norm[, , iyear] <- pa[, , iyear] / pa_mean + r95_norm[, , iyear] <- r95[, , iyear] / r95_mean + } + + # Calculate HY-INT index + hyint <- dsl_norm * int_norm + + + # Calculate mean and mean_sd for hyint + if (external_norm[1] == F) { + # calculate or load hyint_mean from file for consistency with other indice + ret_years <- which(years >= norm_years[1] & years <= norm_years[2]) + hyint_mean <- apply(hyint[, , ret_years], c(1, 2), mean, na.rm = T) + hyint_mean_sd <- apply(hyint[, , ret_years], c(1, 2), sd, na.rm = T) + } else { + # load normalization data from file + mean_idx <- model_idx # assume each model has its normalization file + if (length(external_norm) == 1) { + # if list of files with normalization functions has only 1 entry, + # use that for all models + mean_idx <- 1 + } + hyint_mean <- ncdf_opener(external_norm[mean_idx], "hyint_mean", + rotate = "no" + ) + hyint_mean_sd <- ncdf_opener(external_norm[mean_idx], "hyint_mean_sd", + rotate = "no" + ) + } + + # HyInt list + hyint_list <- list( + pry = pry, dsl = dsl, wsl = wsl, int = int, pa = pa, r95 = r95, + hyint = hyint, pry_mean = pry_mean, dsl_mean = dsl_mean, + wsl_mean = wsl_mean, int_mean = int_mean, pa_mean = pa_mean, + r95_mean = r95_mean, hyint_mean = hyint_mean, pry_mean_sd = pry_mean_sd, + dsl_mean_sd = dsl_mean_sd, wsl_mean_sd = wsl_mean_sd, + int_mean_sd = int_mean_sd, pa_mean_sd = pa_mean_sd, + r95_mean_sd = r95_mean_sd, hyint_mean_sd = hyint_mean_sd, + pry_norm = pry_norm, dsl_norm = dsl_norm, wsl_norm = wsl_norm, + int_norm = int_norm, pa_norm = pa_norm, r95_norm = r95_norm, + r95_threshold = r95_threshold + ) + + print(paste(diag_base, ": calculation done. Returning mean precipitation, + sdii, dsl, wsl, pa, r95 (absolute and normalized values) + and hyint indices")) + + + ########################################################## + #------------------------Save to NetCDF------------------# + ########################################################## + + # saving output to netcdf files + print(paste0(diag_base, ": saving data to NetCDF file:")) + + # define fieds to be saved + field_list <- c( + "pry", "dsl", "wsl", "int", "pa", "r95", "hyint", "pry_mean", "dsl_mean", + "wsl_mean", "int_mean", "pa_mean", "r95_mean", "hyint_mean", + "pry_mean_sd", "dsl_mean_sd", "wsl_mean_sd", "int_mean_sd", "pa_mean_sd", + "r95_mean_sd", "hyint_mean_sd", "pry_norm", "dsl_norm", "wsl_norm", + "int_norm", "pa_norm", "r95_norm", "r95_threshold" + ) + + TIME <- paste(tunit, " since ", year1, "-", timeseason[1], "-01 00:00:00", + sep = "" + ) + + # dimensions definition + x <- ncdim_def("lon", "degrees_east", ics, longname = "longitude") + y <- ncdim_def("lat", "degrees_north", ipsilon, longname = "latitude") + t <- ncdim_def(timedimname, "years", years, + unlim = T, calendar = tcal, + longname = timedimname + ) + # timedim <- ncdim_def( timedimname, + # "years since 1950-01-01 00:00:00", (years-1950),unlim=T) + + # t <- ncdim_def( timedimname, TIME, years, + # unlim=T, calendar=tcal, longname=timedimname) + + for (var in field_list) { + field <- get(var, hyint_list) + field[is.nan(field)] <- NA + metadata <- setmetadata_indices(var) + longvar <- metadata$longvar + unit <- metadata$unit + # variable definitions + var_ncdf <- ncvar_def(var, unit, list(x, y, t), -999, + longname = longvar, + prec = "single", compression = 1 + ) + if ( (var == "pry_mean") | (var == "int_mean") | (var == "dsl_mean") | + (var == "wsl_mean") | (var == "pa_mean") | (var == "r95_mean") | + (var == "hyint_mean") | (var == "pry_mean_sd") | + (var == "int_mean_sd") | (var == "dsl_mean_sd") | + (var == "wsl_mean_sd") | (var == "pa_mean_sd") | + (var == "r95_mean_sd") | (var == "hyint_mean_sd") | + (var == "r95_threshold")) { + var_ncdf <- ncvar_def(var, unit, list(x, y), -999, + longname = longvar, + prec = "single", compression = 1 + ) + } + assign(paste0("var", var), var_ncdf) + assign(paste0("field", var), field) + } + + # Netcdf file creation + print(paste(diag_base, ": saving output to ", outfile)) + namelist <- paste0("var", field_list) + nclist <- mget(namelist) + ncfile <- nc_create(outfile, nclist) + for (var in field_list) { + # put variables into the ncdf file + ndims <- get(paste0("var", var))$ndims + ncvar_put(ncfile, var, get(paste0("field", var)), + start = rep(1, ndims), + count = rep(-1, ndims) + ) + } + nc_close(ncfile) + + # Set provenance for this output file + caption <- paste0("Hyint indices for years ", year1, " to ", year2, + " according to ", models_name[model_idx]) + xprov <- list(ancestors = list(infile), + model_idx = list(model_idx), + caption = caption) + + # Store provenance in main provenance list + prov_info[[outfile]] <- xprov + + print(paste(diag_base, ": diagnostic netCDF files saved")) + return(prov_info) +} diff --git a/esmvaltool/diag_scripts/hyint/hyint_etccdi_preproc.R b/esmvaltool/diag_scripts/hyint/hyint_etccdi_preproc.R new file mode 100644 index 0000000000..85c8422baf --- /dev/null +++ b/esmvaltool/diag_scripts/hyint/hyint_etccdi_preproc.R @@ -0,0 +1,41 @@ +###################################################### +#-------------ETCCDI preprocessing for HyInt---------# +#-------------E. Arnone (Oct 2017)-------------------# +############################################################################ +# ABOUT: This function pre-process ETCCDI files obtained with the +# CRESCENDO_extremeEvents namelist remapping the data from +# gaussian to lonlat, changing longitude range from 0/360 to -180/180 +# and merging all indices into the HyInt indices file. + +hyint_etccdi_preproc <- function(work_dir, etccdi_dir, etccdi_list_import, + cdo_grid, model_idx, season, yrmon = "yr") { + year1 <- toString(models_start_year[model_idx]) + year2 <- toString(models_end_year[model_idx]) + print(str(c(year1, year2))) + hyint_file <- getfilename_indices(work_dir, diag_base, model_idx, season) + etccdi_files <- getfilename_etccdi(etccdi_dir, etccdi_list_import, model_idx, + yrmon = "yr") + etccdi_files_tmp <- c() + for (sfile in etccdi_files) { + sfile_tmp0 <- cdo("delvar", args = "time_bnds", input = sfile) + if (rgrid != F) { + sfile_tmp <- cdo("setgrid", args = cdo_grid, input = sfile_tmp0) + } else { + sfile_tmp <- cdo("sellonlatbox", args = "-180,180,-90,90", + input = sfile_tmp0) + } + etccdi_files_tmp <- c(etccdi_files_tmp, sfile_tmp) + unlink(sfile_tmp0) + } + hyint_file_tmp <- tempfile() + mv_command <- paste("mv -n ", hyint_file, hyint_file_tmp) + system(mv_command) + print(paste0("HyInt: merging ", length(etccdi_files), " ETCCDI files")) + hyint_file_tmp_sel <- cdo("sellonlatbox", args = "-180,180,-90,90", + input = hyint_file_tmp) + cdo("merge", options = "-O", input = c(hyint_file_tmp_sel, etccdi_files_tmp), + output = hyint_file) + unlink(c(etccdi_files_tmp, hyint_file_tmp, hyint_file_tmp_sel)) + + return(0) +} diff --git a/esmvaltool/diag_scripts/hyint/hyint_functions.R b/esmvaltool/diag_scripts/hyint/hyint_functions.R new file mode 100644 index 0000000000..db087ea67e --- /dev/null +++ b/esmvaltool/diag_scripts/hyint/hyint_functions.R @@ -0,0 +1,1364 @@ +# ############################################################################# +# hyint_functions.R +# +# Author: Enrico Arnone (ISAC-CNR, Italy) +# +# ############################################################################# +# Description +# Functions used in HyInt routines +# +# Modification history +# 20170901-A_arno_en: adapted to HyInt and extended +# 20170522-A_davi_pa: Creation for MiLES +# ############################################################################# + +# basis functions + +########################################################## +#------------------------Packages------------------------# +########################################################## + +# loading packages +library("maps") +library("ncdf4") +library("PCICt") + +# check if fast linear fit is operative (after R 3.1): +# 3x faster than lm.fit, 36x faster than lm +if (exists(".lm.fit")) { + lin.fit <- .lm.fit +} else { + lin.fit <- lm.fit +} + + +########################################################## +#----------------Naming functions------------------------# +########################################################## + +getfilename_regridded <- function(spath, rgrid, var0, model_idx) { + exp <- models_name[model_idx] + year1 <- models_start_year[model_idx] + year2 <- models_end_year[model_idx] + model_exp <- models_experiment[model_idx] + model_ens <- models_ensemble[model_idx] + filename <- paste0( + spath, "/", exp, "_", model_exp, "_", model_ens, + "_", toString(year1), "-", toString(year2), + "_", var0, "_", rgrid, ".nc" + ) + return(filename) +} + +getfilename_indices <- function(spath, label, model_idx, season, hist = F, + hist_years = hist_years, grid = F, topo = F) { + exp <- models_name[model_idx] + model_exp <- models_experiment[model_idx] + model_ens <- models_ensemble[model_idx] + if (grid) { + filename <- paste0( + spath, "/", label, "_", exp, "_", + model_exp, "_", model_ens, ".grid" + ) + } else if (topo) { + filename <- paste0( + spath, "/", label, "_", exp, "_", + model_exp, "_", model_ens, "_topo.nc" + ) + } else { + year1 <- models_start_year[model_idx] + year2 <- models_end_year[model_idx] + if (hist) { + model_exp <- "historical" + year1 <- hist_years[1] + year2 <- hist_years[2] + } + filename <- paste0( + spath, "/", label, "_", exp, "_", model_exp, "_", + model_ens, "_", toString(year1), "_", toString(year2), + "_", season, ".nc" + ) + } + return(filename) +} + +getfilename_etccdi <- function(spath, var, model_idx, yrmon = "yr") { + # Function to get names of files of ETCCDI indices + # If input 'var' is an array of names, 'filename' an array will be as well + + filename <- "" + for (svar in var) { + exp <- models_name[model_idx] + model_exp <- models_experiment[model_idx] + model_ens <- models_ensemble[model_idx] + year1 <- toString(models_start_year[model_idx]) + year2 <- toString(models_end_year[model_idx]) + if (yrmon == "mon") { + year1 <- paste0(year1, "01") + year2 <- paste0(year2, "12") + } + filenametmp <- paste0( + spath, "/", svar, "_", yrmon, "_", model_exp, "_", + exp, "_", model_ens, "_", year1, "-", year2, ".nc" + ) + filename <- c(filename, filenametmp) + } + filename <- filename[2:length(filename)] + return(filename) +} + +getfilename_trends <- function(spath, label, model_idx, season) { + exp <- models_name[model_idx] + year1 <- models_start_year[model_idx] + year2 <- models_end_year[model_idx] + model_exp <- models_experiment[model_idx] + model_ens <- models_ensemble[model_idx] + filename <- paste0( + spath, "/", diag_base, "_", exp, "_", model_exp, "_", + model_ens, "_", toString(year1), "_", toString(year2), + "_", season, "_tseries_", label, ".nc" + ) + return(filename) +} + +getfilename_figure <- function(spath, var, year1, year2, model_idx, season, + syears, sregion, label, map, output_file_type, + multimodel = F) { + if (nchar(var) > 10) { + var <- substr(var, 1, 10) + } + exp <- models_name[model_idx] + year1 <- models_start_year[model_idx] + year2 <- models_end_year[model_idx] + model_exp <- models_experiment[model_idx] + model_ens <- models_ensemble[model_idx] + model_tag <- paste(exp, model_exp, model_ens, sep = "_") + if (multimodel) { + model_tag <- "multimodel" + } + figname <- paste0(spath, "/", paste(var, model_tag, + paste(year1, year2, sep = "-"), season, syears, sregion, + map, + sep = "_" + ), ".", output_file_type) + if (!(label == "") & !(label == F)) { + figname <- paste0(spath, "/", paste(var, model_tag, + paste(year1, year2, sep = "-"), season, syears, sregion, + label, map, + sep = "_" + ), ".", output_file_type) + } + return(figname) +} + + +########################################################## +#-----------------Basic functions------------------------# +########################################################## + +# normalize a time series +standardize <- function(timeseries) { + out <- (timeseries - mean(timeseries, na.rm = T)) / sd(timeseries, na.rm = T) + return(out) +} + + +# detect ics ipsilon lat-lon +whicher <- function(axis, number) { + out <- which.min(abs(axis - number)) + return(out) +} + + +# area of longitude/latitude rectangle +area_lonlat <- function(lon1, lon2, lat1, lat2) { + R <- 6378 + return(2 * pi * R ^ 2 * abs(sin(lat1 / 180. * pi) - sin(lat2 / 180. * pi)) + * abs(lon1 - lon2) / 360) +} + + +# produce a 2d matrix of area size for given longitude/latitude grid points +area_size <- function(ics, ipsilon, resolution = NA, norm = F) { + if (is.na(resolution) & (length(ics) == 1) & (length(ipsilon) == 1)) { + stop("Provide either resolution or two adjacent elements") + } + if (is.na(resolution) & (length(ics) != 1)) { + resolution <- ics[2] - ics[1] + } + field <- array(NA, dim = c(length(ics), length(ipsilon))) + for (j in 1:length(ipsilon)) { + field[, j] <- area_lonlat( + 0, resolution, ipsilon[j] - 0.5 * resolution, + ipsilon[j] + 0.5 * resolution + ) + } + if (norm) { + field <- field / sum(field) + } + + return(field) +} + + + +# produce a 2d matrix of area weight +area_weight <- function(ics, ipsilon, root = T, norm = F) { + field <- array(NA, dim = c(length(ics), length(ipsilon))) + if (root == T) { + for (j in 1:length(ipsilon)) { + field[, j] <- sqrt(cos(pi / 180 * ipsilon[j])) + } + } + + if (root == F) { + for (j in 1:length(ipsilon)) { + field[, j] <- cos(pi / 180 * ipsilon[j]) + } + } + if (norm) { + field <- field / mean(field) + } + return(field) +} + +# normalize a 2D or 3D field by a 2d matrix of area weight +area_weight_norm <- function(ics, ipsilon, field, root = T, norm = F) { + timedim <- dim(field)[length(dim(field))] + weights <- replicate(timedim, area_weight(ics, ipsilon, + root = root, + norm = norm + )) + field <- field * weights + return(field) +} + +########################################################## +#--------------Time Based functions----------------------# +########################################################## + +# check number of days for each month + +number_days_month <- function(datas) { + + # evaluate the number of days in a defined month of a year + datas <- as.Date(datas) + m <- format(datas, format = "%m") + while (format(datas, format = "%m") == m) { + datas <- datas + 1 + } + return(as.integer(format(datas - 1, format = "%d"))) +} + + +# to convert season charname to months number +season2timeseason <- function(season) { + if (season == "ALL") { + timeseason <- 1:12 + } + if (season == "JJA") { + timeseason <- 6:8 + } + if (season == "DJF") { + timeseason <- c(1, 2, 12) + } + if (season == "MAM") { + timeseason <- 3:5 + } + if (season == "SON") { + timeseason <- 9:11 + } + if (!exists("timeseason")) { + stop("wrong season selected!") + } + return(timeseason) +} + +# leap year treu/false function +is_leapyear <- function(year) { + return( ( (year %% 4 == 0) & (year %% 100 != 0)) | (year %% 400 == 0)) +} + +power_date_new <- function(datas) { + whichdays <- as.numeric(format(datas, "%m")) + # create a "season" for continuous time + seas <- whichdays * 1 + ss <- 1 + for (i in 1:(length(whichdays) - 1)) { + if (diff(whichdays)[i] > 1) { + ss <- ss + 1 + } + seas[i + 1] <- ss + } + etime <- list( + day = as.numeric(format(datas, "%d")), + month = as.numeric(format(datas, "%m")), + year = as.numeric(format(datas, "%Y")), + data = datas, season = seas + ) + return(etime) +} + +power_date <- function(season, ANNO1, ANNO2) { + # evalute the number of days that will analyze in order + # to create arrays of the needed dimensions + + # create continous calendar + p1 <- as.Date(paste0(ANNO1, "-01-01")) + p2 <- as.Date(paste0(ANNO2, "-12-31")) + datas <- seq(p1, p2, by = "day") + + # select only days correspondeing to the needed season + timeseason <- season2timeseason(season) + month <- as.numeric(format(datas, "%m")) + whichdays <- which(month %in% timeseason) + + # create a "season" for continuous time, used by persistance tracking + seas <- whichdays * 1 + ss <- 1 + for (i in 1:(length(whichdays) - 1)) { + if (diff(whichdays)[i] > 1) { + ss <- ss + 1 + } + seas[i + 1] <- ss + } + # produce a final timeseries of dates + datas <- datas[whichdays] + dataline <- list( + day = as.numeric(format(datas, "%d")), + month = as.numeric(format(datas, "%m")), + year = as.numeric(format(datas, "%Y")), + season = seas, data = datas + ) + print("Time Array Built") + print(paste("Length:", length(seas), "days for", season, "season")) + print(paste("From", datas[1], "to", datas[length(seas)])) + + return(dataline) +} + +power_date_no_leap <- function(season, ANNO1, ANNO2) { + # apply to power_date object to clean out elements for leap years + e <- power_date(season, ANNO1, ANNO2) + leap.days <- which(e$month == 2 & e$day == 29) + dataline.leap <- list( + day = e$day[-leap.days], month = e$month[-leap.days], + year = e$year[-leap.days], + season = e$season[-leap.days], + data = e$data[-leap.days] + ) + print("FIXED FOR NO LEAP CALENDAR: Time Array Built") + print(paste( + "Length:", length(dataline.leap$season), "days for", + season, "season" + )) + print(paste( + "From", dataline.leap$data[1], "to", + dataline.leap$data[length(dataline.leap$season)] + )) + return(dataline.leap) +} + +power_date_30day <- function(season, ANNO1, ANNO2) { + # apply to power.date object to clean out elements for leap years + nmonths <- length(season2timeseason(season)) + nyears <- as.numeric(ANNO2) - as.numeric(ANNO1) + 1 + dd <- rep(seq(1, 30), nmonths * nyears) + mm <- rep(rep(season2timeseason(season), each = 30), nyears) + # create a "season" for continuous time, used by persistance tracking + seas <- mm * 0 + 1 + ss <- 1 + for (i in 1:(length(mm) - 1)) { + if (diff(mm)[i] > 1) { + ss <- ss + 1 + } + seas[i + 1] <- ss + } + dataline_30day <- list(day = dd, month = mm, season = seas) + print("SIMPLIFIED CALENDAR FOR 30-day CALENDAR: Time Array Built") + print(paste( + "Length:", length(dataline.30day$season), "days for", + season, "season" + )) + return(dataline_30day) +} + +calc_region_timeseries <- function(x, y, indata, region, calc_sd = F, + weighted_mean = T, root = F, norm = T, ..) { + # This function subsets a lon/lat/time array based on an input + # region(lon1,loni2,lat1,lat2) and returns its timeseries. + # Area weights are applied if requested. The function returns also + # the standard deviation of the averaging elements + # (currently excluding weights) + + idimtimedata <- length(dim(indata)) + dimtimedata <- (dim(indata))[idimtimedata] + retx <- which(region[1] <= x & x <= region[2]) + rety <- which(region[3] <= y & y <= region[4]) + if (!calc_sd) { + print(paste( + "Calc.region.timeseries: ", + length(retx) * length(rety) + )) + } + if (is.na(retx[1]) | is.na(rety[1])) { + print("calc.region.timeseries: no data in selected region. Returning NA.") + outdata <- array(dim = dimtimedata) + } else { + retdata <- indata[retx, rety, , drop = F] + if (weighted_mean & !calc_sd) { + retdata <- area_weight_norm(x[retx], y[rety], retdata, + root = root, norm = norm + ) + } + outdata <- apply(retdata, idimtimedata, mean, na.rm = T) + if (calc_sd) { + outdata <- apply(retdata, idimtimedata, sd, na.rm = T) + } + } + return(outdata) +} + +################## +#--------Data preprocessing +################# + +# +# Method to create an asci grid file for +# to use to regrid on +# @param idx_dir path of directory containing +# files from which to create the grid +# Adapted from 20170920-A_maritsandstad +# +create_grid <- function(ref_file = "./reffile", path = idx_dir, + out_file = "./gridDef") { + + ## Picking the grid found in reference file to regrid over + if (!file.exists(ref_file)) { + ## Picking the grid found in the first file to regrid over + ref_file <- list.files(path, pattern = "*.nc", full.names = TRUE)[1] + } + cdo("griddes", input = ref_file, stdout = out_file) +} + +# +# Method to create a landSeaMask on a suitable grid +# @param regrid name w/path of gridfile to use +# to put the landdseamask on +# Adapted from 20170920-A_maritsandstad +# +create_landseamask <- function(regrid = "./gridDef", ref_file = ref_file, + loc = "./", regridded_topo = paste0("./", "regridded_topo.nc"), + landmask = "./landSeaMask.nc", topo_only = F) { + + # Test if gridfile exists + # otherwise call function to generate one + if (!file.exists(regrid)) { + if (length(ref_file) == 0) { + print("Unable to access grid file") + stop + } + create_grid(ref_file = ref_file, out_file = regrid) + } + + ## Making topographic map + ftopo <- cdo("topo", options = "-f nc") + + ## Regridding the topographic map to chosen grid + cdo("remapcon2", args = paste0("'", regrid, "'"), + input = ftopo, output = regridded_topo) + + if (!topo_only) { + + # Set above sea-level gridpoints to missing + ftopomiss1 <- cdo("setrtomiss", args = "0,9000", input = regridded_topo) + + # Set above sea-level gridpoints to 1 + ftopo1pos <- cdo("setmisstoc", args = "1", input = ftopomiss1) + + # Set below sea-level gridpoints to missing + cdo("setrtomiss", args = "-9000,0", input = ftopo1pos, output = landmask) + unlink(c(ftopomiss1, ftopo1pos)) + } + unlink(ftopo) +} + +## +## Read seaLandElevationMask and mask data +## +apply_elevation_mask <- function(rfield, relevation, el_threshold, + reverse = F) { + if (!reverse) { + if (el_threshold >= 0) { + # mountains + relevation[relevation < el_threshold] <- NA + relevation <- relevation * 0 + 1 + } else { + # oceans + relevation[relevation > el_threshold] <- NA + relevation <- relevation * 0 + 1 + } + } else { + if (el_threshold >= 0) { + # mountains + relevation[relevation > el_threshold] <- NA + relevation <- relevation * 0 + 1 + } else { + # oceans + relevation[relevation < el_threshold] <- NA + relevation <- relevation * 0 + 1 + } + } + itimedim <- dim(rfield)[length(dim(rfield))] + myear_relevation <- replicate(itimedim, relevation) + if (dim(myear_relevation) != dim(rfield)) { + stop("STOP - dimension of topography does not match +dimension of field: remove old topography files if needed") + } + rfield <- rfield * myear_relevation + + return(rfield) +} + + + +########################################################## +#-------------------Data analysis------------------------# +########################################################## + + +################################### +# Function: Annual mean spell length +# +# About: This function calculates the annual mean spell length of a given +# field (lon x lat x time) reporting 1's for active parameter and +# 0's for non active parameter. In order to reduce memory usage only +# the annual mean spell length is returned. E.g. calculation of dry +# spell length needs input fields with 1 for dry days, 0 for wet ones. +# +# Author: E. Arnone ( ISAC-CNR, Torino) +# Last update: 14 June 2017 + +mean_spell_length <- function(m) { + # Setup useful arrays and parameters + nlon <- dim(m)[1] + nlat <- dim(m)[2] + ntime <- dim(m)[3] + mean_spell_length_year <- m[, , 1] * NA + + # Loop through grid points + for (ilon in 1:nlon) { + for (ilat in 1:nlat) { + spell_point <- (m[ilon, ilat, ]) + # Look for variations along time axis + diff_spell_point <- spell_point[2:ntime] - spell_point[1:ntime - 1] + # select when variation is positive (starting spell) + spell_start <- which(diff_spell_point == 1) + 1 + if (!is.na(spell_point[1])) { + if (spell_point[1] == 1) { + spell_start <- c(1, spell_start) + } + } # if first day is active add it to list + # select when variation is negative (ending spell) + spell_stop <- which(diff_spell_point == -1) + if (!is.na(spell_point[ntime])) { + if (spell_point[ntime] == 1) { + spell_stop <- c(spell_stop, ntime) + } + } # if last day is active add it to list + # difference between stop and start gives spell length + spell_length <- spell_stop - spell_start + 1 + # assign annual mean spell length to output array + mean_spell_length_year[ilon, ilat] <- mean(spell_length, na.rm = T) + } + } + return(mean_spell_length_year) +} + +get_elevation <- function(filename = NULL, elev_range = c(-1000, 10000), + mask = F, elev_plot = F) { + # get elevation data from a high resolution topography file. + + funlink <- F + if (is.null(filename)) { + filename <- cdo("topo", options = "-f nc") + funlink <- T + } + elevation <- ncdf_opener(filename, + namevar = "elevation", + namelon = "longitude", namelat = "latitude", rotate = "no" + ) + lon_el <- ncdf_opener(filename, namevar = "longitude", rotate = "no") + lat_el <- ncdf_opener(filename, namevar = "latitude", rotate = "no") + elevation[which(elevation < elev_range[1] | elevation > elev_range[2])] <- NA + if (mask) { + elevation[which(elevation >= elev_range[1] & + elevation <= elev_range[2])] <- 1 + } + if (elev_plot) { + filled_contour3(lon_el, lat_el, elevation, color.palette = rainbow) + map("world", + regions = ".", interior = F, exact = F, boundary = T, add = T, + col = "gray", lwd = 1.5 + ) + } + el_list <- list(elevation = elevation, lon_el = lon_el, lat_el = lat_el) + if (funlink) unlink(filename) + return(el_list) +} + + +########################################################## +#--------------NetCDF loading function-------------------# +########################################################## + +# universal function to open a single var 3D (x,y,time) ncdf files: it includes +# rotation, y-axis filpping, time selection and CDO-based interpolation +# to replace both ncdf.opener.time and ncdf.opener (deprecated and removed) +# automatically rotate matrix to place greenwich at the center (flag "rotate") +# and flip the latitudes in order to have increasing +# if required (flag "interp2grid") additional interpolation with CDO is used. +# "grid" can be used to specify the target grid name +# time selection based on package PCICt must be specifed with both "tmonths" +# and "tyears" flags. It returns a list including its own dimensions +ncdf_opener_universal <- function(namefile, namevar = NULL, namelon = NULL, + namelat = NULL, tmonths = NULL, + tyears = NULL, rotate = "full", + interp2grid = F, grid = "r144x73", + remap_method = "remapcon2", + exportlonlat = TRUE, verbose = F) { + + # load package + require(ncdf4) + + # verbose-only printing function + printv <- function(value) { + if (verbose) { + print(value) + } + } + + # check if timeflag is activated or full file must be loaded + if (is.null(tyears) | is.null(tmonths)) { + timeflag <- FALSE + printv("No time and months specified, loading all the data") + } else { + timeflag <- TRUE + printv("tyears and tmonths are set!") + require(PCICt) + } + + if (rotate == "full") { + rot <- T + move1 <- move2 <- 1 / 2 + } # 180 degrees rotation of longitude + if (rotate == "half") { + rot <- T + move1 <- 1 / 4 + move2 <- 3 / 4 + } # 90 degree rotation (useful for TM90) + if (rotate == "no") { + rot <- F + } # keep as it is + + # interpolation made with CDO: second order conservative remapping + if (interp2grid) { + print(paste("Remapping with CDO on", grid, "grid")) + if (is.null(namevar)) { + namefile <- cdo(remap_method, args = paste0("'", grid, "'"), + input = namefile) + } else { + selectf <- cdo("selvar", args = namevar, input = namefile) + gridf <- tempfile() + cdo("griddes", input = grid, stdout = gridf) + namefile <- cdo(remap_method, args = gridf, input = selectf) + unlink(c(selectf, gridf)) + } + } + + # define rotate function (faster than with apply) + rotation <- function(line) { + vettore <- line + dims <- length(dim(vettore)) + # for longitudes + if (dims == 1) { + ll <- length(line) + line[(ll * move1):ll] <- vettore[1:(ll * move2 + 1)] + line[1:(ll * move1 - 1)] <- vettore[(ll * move2 + 2):ll] - 360 + } + # for x,y data + if (dims == 2) { + ll <- length(line[, 1]) + line[(ll * move1):ll, ] <- vettore[1:(ll * move2 + 1), ] + line[1:(ll * move1 - 1), ] <- vettore[(ll * move2 + 2):ll, ] + } + # for x,y,t data + if (dims == 3) { + ll <- length(line[, 1, 1]) + line[(ll * move1):ll, , ] <- vettore[1:(ll * move2 + 1), , ] + line[1:(ll * move1 - 1), , ] <- vettore[(ll * move2 + 2):ll, , ] + } + return(line) + } + + # define flip function ('cos rev/apply is not working) + flipper <- function(field) { + dims <- length(dim(field)) + if (dims == 2) { + ll <- length(field[1, ]) + field <- field[, ll:1] + } # for x,y data + if (dims == 3) { + ll <- length(field[1, , 1]) + field <- field[, ll:1, ] + } # for x,y,t data + return(field) + } + + # opening file: getting variable (if namevar is given, that variable + # is extracted) + printv(paste("opening file:", namefile)) + a <- nc_open(namefile) + + # if no name provided load the only variable available + if (is.null(namevar)) { + namevar <- names(a$var) + if (length(namevar) > 1) { + print(namevar) + stop("More than one var in the files, please select it +with namevar=yourvar") + } + } + + # load axis: updated version, looking for dimension directly stored + # inside the variable + naxis <- unlist(lapply(a$var[[namevar]]$dim, function(x) x["name"])) + for (axis in naxis) { + assign(axis, ncvar_get(a, axis)) + printv(paste(axis, ":", length(get(axis)), "records")) + } + + if (timeflag) { + printv("selecting years and months") + + # based on preprocessing of CDO time format: get calendar type and + # use PCICt package for irregular data + caldata <- ncatt_get(a, "time", "calendar")$value + timeline <- as.PCICt(as.character(time), format = "%Y%m%d", cal = caldata) + + # break if the calendar has not been recognized + if (any(is.na(timeline))) { + stop("Calendar from NetCDF is unsupported or not present. Stopping!!!") + } + + # break if the data requested is not there + lastday_base <- paste0(max(tyears), "-", max(tmonths), "-28") + maxdays <- number_days_month(lastday_base) + if (caldata == "360_day") { + maxdays <- 30 + } + # uses number_days_month, which loops to get the month change + lastday <- as.PCICt(paste0( + max(tyears), "-", max(tmonths), "-", + maxdays + ), + cal = caldata, + format = "%Y-%m-%d" + ) + firstday <- as.PCICt(paste0(min(tyears), "-", min(tmonths), "-01"), + cal = caldata, format = "%Y-%m-%d" + ) + if (max(timeline) < lastday | min(timeline) > firstday) { + stop("You requested a time interval that is not present in the NetCDF") + } + } + + # time selection and variable loading + printv("loading full field...") + field <- ncvar_get(a, namevar) + + if (timeflag) { + + # select data we need + select <- which(as.numeric(format(timeline, "%Y")) %in% tyears & + as.numeric(format(timeline, "%m")) %in% tmonths) + field <- field[, , select] + time <- timeline[select] + + printv(paste("This is a", caldata, "calendar")) + printv(paste( + length(time), "days selected from", time[1], + "to", time[length(time)] + )) + + printv(paste("Months that have been loaded are.. ")) + printv(unique(format(time, "%Y-%m"))) + } + + # check for dimensions (presence or not of time dimension) + dimensions <- length(dim(field)) + + # if dimensions are multiple, get longitude, latitude + # if needed, rotate and flip the array + xlist <- c("lon", "Lon", "longitude", "Longitude") + ylist <- c("lat", "Lat", "latitude", "Latitude") + if (dimensions > 1) { + # assign ics and ipsilon + if (is.null(namelon)) { + if (any(xlist %in% naxis)) { + ics <- get(naxis[naxis %in% xlist], a$dim)$vals + } else { + print("WARNING: No lon found") + ics <- NA + } + } else { + ics <- ncvar_get(a, namelon) + } + if (is.null(namelat)) { + if (any(ylist %in% naxis)) { + ipsilon <- get(naxis[naxis %in% ylist], a$dim)$vals + } else { + print("WARNING: No lat found") + ipsilon <- NA + } + } else { + ipsilon <- ncvar_get(a, namelat) + } + + # longitute rotation around Greenwich + if (rot) { + printv("rotating...") + ics <- rotation(ics) + field <- rotation(field) + } + if (ipsilon[2] < ipsilon[1] & length(ipsilon) > 1) { + if (length(ics) > 1) { + print("flipping...") + ipsilon <- sort(ipsilon) + field <- flipper(field) + } + } + + # exporting variables to the main program + if (exportlonlat) { + assign("ics", ics, envir = .GlobalEnv) + assign("ipsilon", ipsilon, envir = .GlobalEnv) + } + assign(naxis[naxis %in% c(xlist, namelon)], ics) + assign(naxis[naxis %in% c(ylist, namelat)], ipsilon) + } + + if (dimensions > 3) { + stop("This file is more than 3D file") + } + + # close connection + nc_close(a) + + # remove interpolated file + if (interp2grid) { + unlink(namefile) + } + + # showing array properties + printv(paste(dim(field))) + if (timeflag) { + printv(paste("From", time[1], "to", time[length(time)])) + } + + # returning file list + return(mget(c("field", naxis))) +} + +# ncdf.opener is a simplified wrapper for ncdf.opener.universal which returns +# only the field, ignoring the list +ncdf_opener <- function(namefile, namevar = NULL, namelon = NULL, + namelat = NULL, tmonths = NULL, tyears = NULL, + rotate = "full", interp2grid = F, grid = "r144x73", + remap_method = "remapcon2", exportlonlat = T) { + field <- ncdf_opener_universal(namefile, namevar, namelon, namelat, tmonths, + tyears, rotate, interp2grid, grid, remap_method, + exportlonlat = exportlonlat + ) + return(field$field) +} + + +# function to open ncdf files (much more refined, with CDO-based interpolation) +ncdf_opener_time <- function(namefile, namevar = NULL, namelon = NULL, + namelat = NULL, tmonths = NULL, tyears = NULL, + ics = ics, ipsilon = ipsilon, rotate = "full", + interp2grid = F, grid = "r144x73", + remap_method = "remapcon2") { + # function to open netcdf files. It uses ncdf4 library + # time selection of month and years needed automatically rotate matrix + # to place greenwich at the center (flag "rotate") + # and flip the latitudes in order to have increasing + # if require (flag "interp2grid") additional interpolation with CDO + # can be used. "grid" can be used to specify the grid name + require(ncdf4) + require(PCICt) + + if (is.null(tyears) | is.null(tmonths)) { + stop("Please specify both months and years to load") + } + + if (rotate == "full") { + rot <- T + move1 <- move2 <- 1 / 2 + } # 180 degrees rotation of longitude + if (rotate == "half") { + rot <- T + move1 <- 1 / 4 + move2 <- 3 / 4 + } # 90 degree rotation (useful for TM90) + if (rotate == "no") { + rot <- F + } # keep as it is, breaking at Greemwich + + # interpolation made with CDO: second order conservative remapping + if (interp2grid) { + print(paste("Remapping with CDO on", grid, "grid")) + namefile <- cdo(remap_method, args = paste0("'", grid, "'"), + input = namefile) + } + + # define rotate function (faster than with apply) + rotation <- function(line) { + vettore <- line + dims <- length(dim(vettore)) + if (dims == 1) { + # for longitudes + ll <- length(line) + line[(ll * move1):ll] <- vettore[1:(ll * move2 + 1)] + line[1:(ll * move1 - 1)] <- vettore[(ll * move2 + 2):ll] - 360 + } + if (dims == 2) { + # for x,y data + ll <- length(line[, 1]) + line[(ll * move1):ll, ] <- vettore[1:(ll * move2 + 1), ] + line[1:(ll * move1 - 1), ] <- vettore[(ll * move2 + 2):ll, ] + } + if (dims == 3) { + # for x,y,t data + ll <- length(line[, 1, 1]) + line[(ll * move1):ll, , ] <- vettore[1:(ll * move2 + 1), , ] + line[1:(ll * move1 - 1), , ] <- vettore[(ll * move2 + 2):ll, , ] + } + return(line) + } + + # define flip function ('cos rev/apply is not working) + flipper <- function(field) { + dims <- length(dim(field)) + if (dims == 2) { + ll <- length(field[1, ]) + field <- field[, ll:1] + } # for x,y data + if (dims == 3) { + ll <- length(field[1, , 1]) + field <- field[, ll:1, ] + } # for x,y,t data + return(field) + } + + + # opening file: getting variable (if namevar is given, + # that variable is extracted) + print(paste("opening file:", namefile)) + a <- nc_open(namefile) + + # load axis: old version, loading the variable dimensions with a max of + # 4 dimensions. It showed some issues with the time_bnds variable appearing + # in some NetCDF file. naxis=names(a$dim)[1:min(c(4,length(a$dim)))] + # load axis: updated version, looking for dimension directly stored inside + # the variable + naxis <- unlist(lapply(a$var[[namevar]]$dim, function(x) x["name"])) + for (axis in naxis) { + print(axis) + assign(axis, ncvar_get(a, axis)) + } + # based on preprocessing of CDO time format: get calendar type and + # use PCICt package for irregular data + caldata <- ncatt_get(a, "time", "calendar")$value + timeline <- as.PCICt(as.character(time), format = "%Y%m%d", cal = caldata) + str(timeline) + + # break if the calendar has not been recognized + if (any(is.na(timeline))) { + stop("Calendar from NetCDF is unsupported or not present. Stopping!!!") + } + + # break if the data requested is not there + lastday_base <- paste0(max(tyears), "-", max(tmonths), "-28") + # uses number_days_month, which loops to get the month change + lastday <- as.PCICt(paste0( + max(tyears), "-", max(tmonths), "-", + number_days_month(lastday_base) + ), + cal = caldata, format = "%Y-%m-%d" + ) + firstday <- as.PCICt(paste0(min(tyears), "-", min(tmonths), "-01"), + cal = caldata, format = "%Y-%m-%d" + ) + if (max(timeline) < lastday | min(timeline) > firstday) { + stop("You requested a time interval that is not present in the NetCDF") + } + + # time selection and variable loading + # if no name provided load the only variable available + if (is.null(namevar)) { + namevar <- names(a$var) + } + field <- ncvar_get(a, namevar) + + # select data we need + select <- which(as.numeric(format(timeline, "%Y")) %in% tyears & + as.numeric(format(timeline, "%m")) %in% tmonths) + + field <- field[, , select] + time <- timeline[select] + + # check for dimensions (presence or not of time dimension) + dimensions <- length(dim(field)) + + # if dimensions are multiple, get longitude, latitude + # if needed, rotate and flip the array + if (dimensions > 1) { + # assign ics and ipsilon + if (is.null(namelon)) { + xlist <- c("lon", "Lon", "longitude", "Longitude") + if (any(xlist %in% naxis)) { + ics <- get(naxis[(naxis %in% xlist)], a$dim)$vals + } else { + stop("No lon found") + } + } else { + ics <- ncvar_get(a, namelon) + } + if (is.null(namelat)) { + ylist <- c("lat", "Lat", "latitude", "Latitude") + if (any(ylist %in% naxis)) { + ipsilon <- get(naxis[(naxis %in% ylist)], a$dim)$vals + } else { + stop("No lon found") + } + } else { + ipsilon <- ncvar_get(a, namelat) + } + + print("flipping and rotating") + # longitute rotation around Greenwich + if (rot) { + ics <- rotation(ics) + field <- rotation(field) + } + if (ipsilon[2] < ipsilon[1] & length(ipsilon) > 1) { + if (length(ics) > 1) { + ipsilon <- sort(ipsilon) + field <- flipper(field) + } + } + + # exporting variables to the main program + assign("ics", ics, envir = .GlobalEnv) + assign("ipsilon", ipsilon, envir = .GlobalEnv) + assign(naxis[naxis %in% xlist], ics) + assign(naxis[naxis %in% ylist], ipsilon) + } + + + if (dimensions > 3) { + stop("This file is more than 3D file") + } + + # close connection + nc_close(a) + + # remove interpolated file + if (interp2grid) { + unlink(namefile) + } + + # showing array properties + print(paste(dim(field))) + print(paste("From", time[1], "to", time[length(time)])) + + return(mget(c("field", naxis))) +} + + +########################################################## +#--------------Plotting functions------------------------# +########################################################## + + +# Figure functions +scale_figure <- function(plot_type, diag_script_cfg, + nfields, npancol, npanrow) { + source(diag_script_cfg) + if (plot_type == 1 || plot_type == 11) { + npancol <- 1 + npanrow <- 1 + } + if (plot_type == 2) { + npancol <- 1 + npanrow <- 3 + } + if (plot_type == 3) { + npancol <- 3 + napnrow <- nfields + } + npanels <- npancol * npanrow + if (npancol > 1) { + png_width <- png_width_multi * npancol + pdf_width <- pdf_width_multi * npancol + x11_width <- x11_width_multi * npancol + } + png_width <- png_width * figure_rel_width[plot_type] + pdf_width <- pdf_width * figure_rel_width[plot_type] + x11_width <- x11_width * figure_rel_width[plot_type] + + figure_aspect_ratio[plot_type] <- (figure_aspect_ratio[plot_type] + * npancol / npanrow) + + plot_size <- c(png_width, png_width / figure_aspect_ratio[plot_type]) + if (tolower(output_file_type) == "pdf") { + plot_size[1] <- pdf_width + plot_size[2] <- pdf_width / figure_aspect_ratio[plot_type] + } else if ( (tolower(output_file_type) == "eps") | + (tolower(output_file_type) == "epsi") | + (tolower(output_file_type) == "ps") ) { + plot_size[1] <- pdf_width + plot_size[2] <- pdf_width / figure_aspect_ratio[plot_type] + } else if (tolower(output_file_type) == "x11") { + plot_size[1] <- x11_width + plot_size[2] <- x11_width / figure_aspect_ratio[plot_type] + } + print(plot_size) + return(plot_size) +} + +graphics_startup <- function(figname, output_file_type, plot_size) { + source(diag_script_cfg) + # choose output format for figure - by JvH + if (tolower(output_file_type) == "png") { + png(filename = figname, width = plot_size[1], height = plot_size[2]) + } else if (tolower(output_file_type) == "pdf") { + pdf(file = figname, width = plot_size[1], + height = plot_size[2], onefile = T) + } else if ( (tolower(output_file_type) == "eps") | + (tolower(output_file_type) == "epsi") | + (tolower(output_file_type) == "ps") ) { + setEPS( + width = plot_size[1], height = plot_size[2], + onefile = T, paper = "special" + ) + postscript(figname) + } else if (tolower(output_file_type) == "x11") { + x11(width = plot_size[1], height = plot_size[2]) + } + return() +} + +graphics_close <- function(figname) { + print(figname) + dev.off() + return() +} + +# extensive filled.contour function +filled_contour3 <- + function(x = seq(0, 1, length.out = nrow(z)), + y = seq(0, 1, length.out = ncol(z)), z, + xlim = range(x, finite = TRUE), + ylim = range(y, finite = TRUE), zlim = range(z, finite = TRUE), + levels = pretty(zlim, nlevels), nlevels = 20, + color.palette = cm.colors, col = color.palette(length(levels) - 1), + extend = TRUE, plot.title, plot.axes, + key.title, key.axes, asp = NA, xaxs = "i", yaxs = "i", las = 1, + axes = TRUE, frame.plot = axes, mar, ...) { + # modification by Ian Taylor of the filled.contour function + # to remove the key and facilitate overplotting with contour() + # further modified by Carey McGilliard and Bridget Ferris + # to allow multiple plots on one page + # modification to allow plot outside boundaries + + if (missing(z)) { + if (!missing(x)) { + if (is.list(x)) { + z <- x$z + y <- x$y + x <- x$x + } + else { + z <- x + x <- seq.int(0, 1, length.out = nrow(z)) + } + } + else { + stop("no 'z' matrix specified") + } + } + else if (is.list(x)) { + y <- x$y + x <- x$x + } + if (any(diff(x) <= 0) || any(diff(y) <= 0)) { + stop("increasing 'x' and 'y' values expected") + } + + # trim extremes for nicer plots + if (extend) { + z[z < min(levels)] <- min(levels) + z[z > max(levels)] <- max(levels) + } + + plot.new() + plot.window(xlim, ylim, "", xaxs = xaxs, yaxs = yaxs, asp = asp) + if (!is.matrix(z) || nrow(z) <= 1 || ncol(z) <= 1) { + stop("no proper 'z' matrix specified") + } + if (!is.double(z)) { + storage.mode(z) <- "double" + } + .filled.contour(as.double(x), as.double(y), z, as.double(levels), + col = col + ) + if (missing(plot.axes)) { + if (axes) { + title(main = "", xlab = "", ylab = "") + Axis(x, side = 1, ...) + Axis(y, side = 2, ...) + } + } + else { + plot.axes + } + if (frame.plot) { + box() + } + if (missing(plot.title)) { + title(...) + } else { + plot.title + } + invisible() + } + +image_scale3 <- function(z, levels, color.palette = heat.colors, + colorbar.label = "image.scale", extend = T, + line.label = 2, line.colorbar = 0, cex.label = 1, + cex.colorbar = 1, colorbar.width = 1, + new_fig_scale = c(-0.07, -0.03, 0.1, -0.1), ...) { + + # save properties from main plotting region + old.par <- par(no.readonly = TRUE) + mfg.save <- par()$mfg + old.fig <- par()$fig + + # defining plotting region with proper scaling + xscal <- (old.fig[2] - old.fig[1]) + yscal <- (old.fig[4] - old.fig[3]) + lw <- colorbar.width + lp <- line.colorbar / 100 + new.fig <- c(old.fig[2] + new_fig_scale[1] * xscal * lw - lp, + old.fig[2] + new_fig_scale[2] * xscal - lp, + old.fig[3] + new_fig_scale[3] * yscal, + old.fig[4] + new_fig_scale[4] * yscal) + + if (missing(levels)) { + levels <- seq(min(z), max(z), , 12) + } + # fixing color palette + col <- color.palette(length(levels) - 1) + + # starting plot + par(mar = c(1, 1, 1, 1), fig = new.fig, new = TRUE) + + # creating polygons for legend + poly <- vector(mode = "list", length(col)) + for (i in seq(poly)) { + poly[[i]] <- c(levels[i], levels[i + 1], levels[i + 1], levels[i]) + } + + xlim <- c(0, 1) + if (extend) { + longer <- 1.5 + dl <- diff(levels)[1] * longer + ylim <- c(min(levels) - dl, max(levels) + dl) + } else { + ylim <- range(levels) + } + plot(1, 1, + t = "n", ylim = ylim, xlim = xlim, axes = FALSE, xlab = "", + ylab = "", xaxs = "i", yaxs = "i", ... + ) + for (i in seq(poly)) { + polygon(c(0, 0, 1, 1), poly[[i]], col = col[i], border = NA) + } + if (extend) { + polygon(c(0, 1, 1 / 2), c(levels[1], levels[1], levels[1] - dl), + col = col[1], border = NA + ) + polygon(c(0, 1, 1 / 2), c( + levels[length(levels)], levels[length(levels)], + levels[length(levels)] + dl + ), + col = col[length(col)], border = NA + ) + polygon(c(0, 0, 1 / 2, 1, 1, 1 / 2), c( + levels[1], levels[length(levels)], levels[length(levels)] + dl, + levels[length(levels)], levels[1], levels[1] - dl + ), border = "black", lwd = 2) + ylim0 <- range(levels) + prettyspecial <- pretty(ylim0) + prettyspecial <- prettyspecial[prettyspecial <= max(ylim0) & + prettyspecial >= min(ylim0)] + axis(4, + las = 1, cex.axis = cex.colorbar, at = prettyspecial, + labels = prettyspecial, ... + ) + } else { + box() + axis(4, las = 1, cex.axis = cex.colorbar, ...) + } + + # box, axis and leged + mtext(colorbar.label, line = line.label, side = 4, cex = cex.label, ...) + + # resetting properties for starting a new plot (mfrow style) + par(old.par) + par(mfg = mfg.save, new = FALSE) + invisible() +} + +cdo <- function(command, args = "", input = "", options = "", output = "", + stdout = "", noout = F) { + if (args != "") args <- paste0(",", args) + if (stdout != "") { + stdout <- paste0(" > '", stdout, "'") + noout <- T + } + if (input[1] != "") { + for (i in 1:length(input)) { + input[i] <- paste0("'", input[i], "'") + } + input <- paste(input, collapse = " ") + } + output0 <- output + if (output != "") { + output <- paste0("'", output, "'") + } else if ( !noout ) { + output <- tempfile() + output0 <- output + } + argstr <- paste0(options, " ", command, args, " ", input, " ", output, + " ", stdout) + print(paste("cdo", argstr)) + ret <- system2("cdo", args = argstr) + if (ret != 0) { + stop(paste("Failed (", ret, "): cdo", argstr)) + } + return(output0) +} diff --git a/esmvaltool/diag_scripts/hyint/hyint_metadata.R b/esmvaltool/diag_scripts/hyint/hyint_metadata.R new file mode 100644 index 0000000000..4ff258c204 --- /dev/null +++ b/esmvaltool/diag_scripts/hyint/hyint_metadata.R @@ -0,0 +1,148 @@ +# HyInt metadata + +########################################################## +#----------------Metadata functions------------------------# +########################################################## + +getmetadata_indices <- function(var, sfile) { + ncfile <- nc_open(sfile) + long_name <- (ncatt_get(ncfile, var, "long_name"))$value + units <- (ncatt_get(ncfile, var, "units"))$value + missval <- (ncatt_get(ncfile, var, "missing_value"))$value + if (units == 0) { + units <- "" + } + nc_close(ncfile) + metadata <- list(long_name = long_name, units = units, missval = missval) + + return(metadata) +} + +setmetadata_indices <- function(var) { + longvar <- "" + unit <- "" + + # name of the var + if (var == "pry") { + longvar <- "Annual mean precipitation" + unit <- "days" + } + if (var == "pry_mean") { + longvar <- "Normalization function: Annual mean precipitation averaged over + available XX century data" + unit <- "days" + } + if (var == "pry_mean_sd") { + longvar <- "Normalization function: Standard deviation of the annual mean + precipitation averaged over available XX century data" + unit <- "days" + } + if (var == "pry_norm") { + longvar <- "Normalized annual mean precipitation" + unit <- "" + } + + if (var == "dsl") { + longvar <- "Annual mean dry spell length" + unit <- "days" + } + if (var == "dsl_mean") { + longvar <- "Normalization function: Annual mean dry spell length averaged + over available XX century data" + unit <- "days" + } + if (var == "dsl_mean_sd") { + longvar <- "Normalization function: Standard deviation of the annual mean + dry spell length averaged over available XX century data" + unit <- "days" + } + if (var == "dsl_norm") { + longvar <- "Normalized annual mean dry spell length" + unit <- "" + } + + + if (var == "dsl_tseries") { + longvar <- "dsl timeseries over selected regions" + unit <- "" + } + if (var == "dsl_tseries_sd") { + longvar <- "standard deviation about the mean dsl timeseries" + unit <- "" + } + if (var == "dsl_trend") { + longvar <- "trend coefficients over selected time period" + unit <- "" + } + if (var == "dsl_trend_stat") { + longvar <- "statistics of trend over selected time period" + unit <- "" + } + + if (var == "wsl") { + longvar <- "Annual mean wet spell length" + unit <- "days" + } + if (var == "wsl_mean") { + longvar <- "Normalization function: Annual mean wet spell length averaged + over available XX century data" + unit <- "days" + } + if (var == "wsl_mean_sd") { + longvar <- "Normalization function: Standard deviation of the annual mean + wet spell length averaged over available XX century data" + unit <- "days" + } + if (var == "wsl_norm") { + longvar <- "Normalized annual mean wet spell length" + unit <- "" + } + + if (var == "int") { + longvar <- "Annual mean precipitation intensity" + unit <- "mm day-1" + } + if (var == "int_mean") { + longvar <- "Normalization function: Annual mean precipitation intensity + averaged over available XX century data" + unit <- "mm day-1" + } + if (var == "int_mean_sd") { + longvar <- "Normalization function: Standard deviation of the annual mean + precipitation intensity averaged over available XX century data" + unit <- "mm day-1" + } + if (var == "int_norm") { + longvar <- "Normalized annual mean precipitation intensity" + unit <- "" + } + + if (var == "pa") { + longvar <- "Precipitation area: area over which of any given day + precipitation occurs." + unit <- "mm day-1 km2" + } + if (var == "pa_mean") { + longvar <- "Normalization function: Precipitation ara averaged over + available XX century data" + unit <- "mm day-1" + } + if (var == "pa_mean_sd") { + longvar <- "Normalization function: Standard deviation of the + precipitation area averaged over available XX century data" + unit <- "mm day-1" + } + if (var == "pa_norm") { + longvar <- "Normalized precipitation area" + unit <- "" + } + + if (var == "hyint") { + longvar <- "Hydroclimatic intensity index" + unit <- "" + } + + metadata <- list(longvar = longvar, unit = unit) + + return(metadata) +} diff --git a/esmvaltool/diag_scripts/hyint/hyint_parameters.R b/esmvaltool/diag_scripts/hyint/hyint_parameters.R new file mode 100644 index 0000000000..a1499063fa --- /dev/null +++ b/esmvaltool/diag_scripts/hyint/hyint_parameters.R @@ -0,0 +1,317 @@ +##################################################################### +# +# HyInt parameters file +# +# About: fixed parameters for ESMValTool HyInt tool. +# Please refer to hyint_recipe for user-friendly settings. +# Only advanced users should change the settings here below. +# +##################################################################### + +run_regridding <- T +force_regridding <- F +run_diagnostic <- T +force_diagnostic <- F +run_timeseries <- T + +if (!exists("write_ncdf")) { + write_netcdf <- T +} +if (!exists("write_plots")) { + write_plots <- T +} +if (!exists("etccdi_preproc")) { + etccdi_preproc <- F +} +if (!exists("etccdi_dir")) { + etccdi_dir <- "" +} + +# basic settings +seasons <- c("ALL") # seasons to be analysed: "ALL", "DJF", ... + +timedimname <- "time" + +rotlongitude <- "full" # a) "full" to convert input arrays from 0/360 to + # -180/180 longitude grid + # b) "no" to leave input data on its original grid + +grid_file <- "grid_file" # suffix for grid file +topography_file <- "topo" # suffix for topography file (needed for filtering + # ocean/land or land elevation) + +# Diagnostic options +# norm_years set in recipe +external_norm <- F # a) F=use internal data to normalize + # b) list of names of normalization files + # (one per input data file or one for all) + # c) "HIST" to automatically generate the name of the + # historical run associated with the model name + +external_r95 <- external_norm # a) F=use internal data for r95 threshold + # over the norm_years period + # b) list of names of files (one per input + # data file or one for all) + # c) "HIST" to automatically generate the name of + # the historical experiment associated with the + # model name + +masksealand <- F # T to mask depending on seaLandElevation threshold +sealandelevation <- 0 # a) 0 land; b) positive value: land above given + # elevation; c) negative value: sea below given depth. + # The topography/bathymetry file is generated with cdo + # from ETOPO data. +reverse_masksealand <- F # T to reject what selected, + # F to keep what selected +topography_highres <- "" + +# Plotting options +# Plot_type set in namelist +map_continents <- -2 # thickness of continents: + # positive values in white, negative values in gray +map_continents_regions <- F # plot also regional boundaries +ryearplot <- 2006 # years to be plotted for experiments + # (maps over individual years): + # a) actual years, b) "FIRST" = first year in dataset + # c) "ALL" = all years in dataset. E.g. c(1998,2000) +rmultiyear_mean <- T # plot multiyear mean (this override ryearplot) + + + +ryearplot_ref <- c("EXP") # year to be plotted for reference dataset: options + # a) "EXP" == same as experiments, + # b) one year only, e.g. c(1998) +force_ref <- F # set TRUE to force plotting of reference data + # as any other experiment + +# settings for trend calculation +lm_trend <- T # calculate linear trend + +# user defined extra label for figure file name +label <- "" + +# colorbar +add_colorbar <- T # T to add colorbar + +# timeseries options +highreselevation <- F # a) F: neglect; b) value: threshold of minimum elevation + # to be overplotted with contour lines of elevation +highreselevation_only <- F # T to plot only high resolution elevation contours + +# timeseries and trend plotting options +add_legend <- 4 # a) F=no legend; b) n>0 list disposed in n column; + # c) <0 horizontal legend +xy_legend <- c(0.01, 0.98) # position of legend in fraction of plotting panel +tag_legend <- c(T, F, F) # 1=model name, 2=model experiment, 3=model ensemble + # (select one or more) + +# define fields for timeseries calculation and plotting +hyint_list <- c( + "int_norm", "dsl_norm", "wsl_norm", "hyint", "int", "dsl", + "wsl", "pa_norm", "r95_norm" +) +etccdi_yr_list <- c( + "altcddETCCDI", "altcsdiETCCDI", "altcwdETCCDI", + "altwsdiETCCDI", "cddETCCDI", "csdiETCCDI", "cwdETCCDI", + "dtrETCCDI", "fdETCCDI", "gslETCCDI", "idETCCDI", + "prcptotETCCDI", "r10mmETCCDI", "r1mmETCCDI", "r20mmETCCDI", + "r95pETCCDI", "r99pETCCDI", "rx1dayETCCDI", "rx5dayETCCDI", + "sdiiETCCDI", "suETCCDI", "tn10pETCCDI", "tn90pETCCDI", + "tnnETCCDI", "tnxETCCDI", "trETCCDI", "tx10pETCCDI", + "tx90pETCCDI", "txnETCCDI", "txxETCCDI", "wsdiETCCDI" +) +# Select one or more fields to be plotted (with the required order) +# through the selfields key above +etccdi_list_import <- etccdi_yr_list +field_names <- c(hyint_list, etccdi_yr_list) + +# region box matrix (predefined following Giorgi et al. 2011,2014): +# add here further regions and select those needed through iregion +region_names <- c( + "World", "World60", "Tropics", "South-America", "Africa", + "North-America", "India", "Europe", "East-Asia", "Australia" +) +region_codes <- c( + "GL", "GL60", "TR", "SA", "AF", + "NA", "IN", "EU", "EA", "AU" +) +# Select one or more index values through selregions in the standard +# settings above to define regions to be used. Default c(1) == global. + +regions <- matrix(nrow = length(region_names), ncol = 4) +# c(lon1,lon2,lat1,lat2) NOTE: lon(-180/180) +regions[1, ] <- c(-180, 180, -90, 90) +regions[2, ] <- c(-180, 180, -60, 60) +regions[3, ] <- c(-180, 180, -30, 30) +regions[4, ] <- c(-90, -30, -60, 10) +regions[5, ] <- c(-20, 60, -40, 35) +regions[6, ] <- c(-140, -60, 10, 60) +regions[7, ] <- c(60, 100, 0, 35) +regions[8, ] <- c(-10, 30, 35, 70) +regions[9, ] <- c(100, 150, 20, 50) +regions[10, ] <- c(110, 160, -40, -10) + +# define titles and units +title_unit_m <- matrix(nrow = length(field_names), ncol = 4) +title_unit_m[1, ] <- c( + "SDII", "Norm. annual mean INT", + "Norm. annual mean precipitation intensity", "" +) +title_unit_m[2, ] <- c( + "DSL", "Norm. annual mean DSL", + "Norm. annual mean dry spell length", "" +) +title_unit_m[3, ] <- c( + "WSL", "Norm. annual mean WSL", + "Norm. annual mean wet spell length", "" +) +title_unit_m[4, ] <- c("HY-INT", "HY-INT", "Hydroclimatic intensity", "") +title_unit_m[5, ] <- c( + "ABS_INT", "Annual mean INT", + "Annual mean precipitation intensity", "(mm/day)" +) +title_unit_m[6, ] <- c( + "ABS_DSL", "Annual mean DSL", + "Annual mean dry spell length", "(days)" +) +title_unit_m[7, ] <- c( + "ABS_WSL", "Annual mean WSL", + "Annual mean wet spell length", "(days)" +) +title_unit_m[8, ] <- c( + "PA", " Normalized precipitation area", + "Norm. precipitation area", "" +) +title_unit_m[9, ] <- c( + "R95", "Norm. heavy precipitation index", + "Norm. % of total precip. above 95% percentile of reference distribution", "" +) + + +# define levels for contour/yrange for abs. values: +# (minlev,maxlev,minlev_diff,maxlev_diff) and nlev +nlev <- 24 +levels_m <- matrix(nrow = length(field_names), ncol = 4) + +levels_m[1, ] <- c(0.9, 1.1, -1.2, 1.2) +levels_m[1, ] <- c(0.5, 1.3, -1.2, 1.2) +levels_m[2, ] <- c(0.9, 1.1, -1.2, 1.2) +levels_m[2, ] <- c(0.6, 1.4, -1.2, 1.2) +levels_m[3, ] <- c(0.9, 1.1, -1.2, 1.2) +levels_m[3, ] <- c(0.7, 1.3, -1.2, 1.2) +levels_m[4, ] <- c(0.5, 1.5, -1.2, 1.2) +levels_m[5, ] <- c(0, 10, -5, 5) +levels_m[6, ] <- c(0, 20, -5, 5) +levels_m[7, ] <- c(0, 10, -3, 3) +levels_m[8, ] <- c(0.5, 1.5, -1.2, 1.2) +levels_m[9, ] <- c(0.5, 1.5, -2, 2) +levels_m[10, ] <- c(0, 200, -5, 5) +levels_m[11, ] <- c(-5, 15, -5, 5) +levels_m[12, ] <- c(0, 20, -5, 5) +levels_m[13, ] <- c(0, 20, -5, 5) +levels_m[14, ] <- c(0, 200, -5, 5) +levels_m[15, ] <- c(-10, 30, -5, 5) +levels_m[16, ] <- c(0, 80, -5, 5) +levels_m[17, ] <- c(0, 15, -4, 4) +levels_m[18, ] <- c(0, 200, -10, 10) +levels_m[19, ] <- c(0, 400, -10, 10) +levels_m[20, ] <- c(-10, 200, -10, 10) +levels_m[21, ] <- c(0, 3000, -100, 100) +levels_m[22, ] <- c(0, 80, -10, 10) +levels_m[23, ] <- c(0, 300, -10, 10) +levels_m[24, ] <- c(0, 50, -2, 2) +levels_m[25, ] <- c(0, 800, -20, 20) +levels_m[26, ] <- c(0, 300, -10, 10) +levels_m[27, ] <- c(0, 100, -10, 10) +levels_m[28, ] <- c(0, 200, -10, 10) +levels_m[29, ] <- c(0, 15, -5, 5) +levels_m[30, ] <- c(0, 300, -20, 20) +levels_m[31, ] <- c(-5, 25, -5, 5) +levels_m[32, ] <- c(0, 300, -5, 5) +levels_m[33, ] <- c(-40, 40, -5, 5) +levels_m[34, ] <- c(0, 40, -5, 5) +levels_m[35, ] <- c(-20, 300, -5, 5) +levels_m[36, ] <- c(-5, 25, -2, 2) +levels_m[37, ] <- c(-20, 140, -4, 4) +levels_m[38, ] <- c(-30, 30, -5, 5) +levels_m[39, ] <- c(0, 50, -2, 2) +levels_m[40, ] <- c(-20, 320, -2, 2) + +# define levels for contour/yrange for trends (minlev,maxlev) +ntlev <- 24 +tlevels_m <- matrix(nrow = length(field_names), ncol = 2) +tlevels_m[1, ] <- c(-0.05, 0.2) * 0.01 +tlevels_m[2, ] <- c(-0.1, 0.4) * 0.01 +tlevels_m[3, ] <- c(-0.1, 0.1) * 0.01 +tlevels_m[4, ] <- c(0, 0.4) * 0.01 +tlevels_m[5, ] <- c(0, 1.5) * 0.01 +tlevels_m[6, ] <- c(-1, 6) * 0.01 +tlevels_m[7, ] <- c(-0.8, 0.8) * 0.01 +tlevels_m[8, ] <- c(-0.3, 0.5) * 0.01 +tlevels_m[9, ] <- c(0, 0.6) * 0.01 +tlevels_m[10, ] <- c(0, 200) * 0.01 +tlevels_m[11, ] <- c(0, 12) * 0.01 +tlevels_m[12, ] <- c(0, 20) * 0.01 +tlevels_m[13, ] <- c(0, 20) * 0.01 +tlevels_m[14, ] <- c(0, 15) * 0.01 +tlevels_m[15, ] <- c(-70, 0) * 0.01 +tlevels_m[16, ] <- c(-4, 4) * 0.01 +tlevels_m[17, ] <- c(-1, 0) * 0.01 +tlevels_m[18, ] <- c(-70, 10) * 0.01 +tlevels_m[19, ] <- c(-10, 90) * 0.01 +tlevels_m[20, ] <- c(-60, 0) * 0.01 +tlevels_m[21, ] <- c(-20, 120) * 0.01 +tlevels_m[22, ] <- c(0, 10) * 0.01 +tlevels_m[23, ] <- c(-15, 5) * 0.01 +tlevels_m[24, ] <- c(0, 6) * 0.01 +tlevels_m[25, ] <- c(0, 100) * 0.01 +tlevels_m[26, ] <- c(0, 60) * 0.01 +tlevels_m[27, ] <- c(0, 15) * 0.01 +tlevels_m[28, ] <- c(0, 50) * 0.01 +tlevels_m[29, ] <- c(0, 15) * 0.01 +tlevels_m[30, ] <- c(0, 140) * 0.01 +tlevels_m[31, ] <- c(-30, 0) * 0.01 +tlevels_m[32, ] <- c(0, 100) * 0.01 +tlevels_m[33, ] <- c(0, 8) * 0.01 +tlevels_m[34, ] <- c(0, 8) * 0.01 +tlevels_m[35, ] <- c(0, 150) * 0.01 +tlevels_m[36, ] <- c(-30, 0) * 0.01 +tlevels_m[37, ] <- c(0, 160) * 0.01 +tlevels_m[38, ] <- c(2, 8) * 0.01 +tlevels_m[39, ] <- c(0, 8) * 0.01 +tlevels_m[40, ] <- c(-100, 300) * 0.01 + +# Figure details + +# Aspect ratio (width:height) +# (depending on plot_type) +figure_aspect_ratio <- c(1.6, 1.6, 1, 1.3, 1, 1, 1, 1, 1, 1, + 1.6, 1.8, 1.8, 1.8, 1.8, 1, 1, 1, 1, 1) + +figure_rel_width <- c(0.7, 0.7, 0.7, 1, 1, 1, 1, 1, 1, 1, + 0.7, 1, 1, 1, 1, 1, 1, 1, 1, 1) + +# Figure width +png_width <- 960 +pdf_width <- 10 +x11_width <- 7 + +# Panel width when adopting multi-panel +# (this overrides figure width) +png_width_multi <- 480 +pdf_width_multi <- 5 +x11_width_multi <- 4 + +# color palette to be used +palette1 <- colorRampPalette(c("white", "orange", "darkred")) +palette2 <- colorRampPalette(c("blue", "white", "red")) +palette3 <- colorRampPalette(c("darkblue", "blue", "dodgerblue", + "white", "orange", "red", "darkred")) +palette_giorgi2011 <- colorRampPalette(c( + "white", "khaki1", "darkseagreen2", "mediumseagreen", "lightskyblue1", + "lightskyblue", "deepskyblue2", "dodgerblue2", "dodgerblue3", "royalblue4" +)) + palette_ts <- c("#377EB8", "#4DAF4A", "#984EA3", "#FF7F00", "#A65628", + "#F781BF", "#E41A1C", "#8DD3C7", "#BEBADA", "#FB8072", + "#80B1D3", "#FDB462", "#B3DE69", "#FCCDE5", "#D9D9D9", + "#BC80BD", "#CCEBC5", "#FFED6F") diff --git a/esmvaltool/diag_scripts/hyint/hyint_plot_maps.R b/esmvaltool/diag_scripts/hyint/hyint_plot_maps.R new file mode 100644 index 0000000000..5a79b91675 --- /dev/null +++ b/esmvaltool/diag_scripts/hyint/hyint_plot_maps.R @@ -0,0 +1,557 @@ +###################################################### +#---------Maps plotting routine for HyInt------------# +#-------------E. Arnone (September 2017)-------------# +###################################################### + +# DECLARING THE FUNCTION: EXECUTION IS AT THE BOTTOM OF THE SCRIPT + +hyint_plot_maps <- function(work_dir, plot_dir, ref_dir, ref_idx, season, + prov_info) { + + # setting up path and parameters + dataset_ref <- models_name[ref_idx] + year1_ref <- models_start_year[ref_idx] + year2_ref <- models_end_year[ref_idx] + years_ref <- year1_ref:year2_ref + + # set main paths + work_dir_exp <- work_dir + plot_dir_exp <- plot_dir + dir.create(plot_dir_exp, recursive = T) + + # Define fields to be used + if (selfields[1] != F) { + field_names <- field_names[selfields, drop = F] + levels_m <- levels_m[selfields, , drop = F] + title_unit_m <- title_unit_m[selfields, , drop = F] + } + nfields <- length(field_names) + + # Define quantity (exp, ref, exp-ref) to be plotted depending on plot_type + # 1=exp_only, 2=ref_only, 3=exp/ref/exp-ref + nquantity <- c(1, 3, 3, 1) + + # Define regions to be used + nregions <- length(selregions) + if (nregions > dim(regions)[1]) { + stop(paste(diag_base, ": requesting regions outside list")) + } + + if (autolevels) { + levels_m[] <- NA + } + + # ------- loading reference data ---------- + # load topography if needed + if (masksealand) { + topofile <- getfilename_indices(work_dir, diag_base, ref_idx, topo = T) + gridfile <- getfilename_indices(work_dir, diag_base, ref_idx, grid = T) + if (!file.exists(topofile)) { + create_landseamask(regrid = gridfile, loc = run_dir, + regridded_topo = topofile, topo_only = T) + } + relevation <- ncdf_opener(topofile, "topo", "lon", "lat", + rotate = "no") + } + if (highreselevation) { + highresel <- get_elevation(elev_range = c(highreselevation, 9000)) + } + + # produce desert areas map if required from reference file + # (mean annual precipitation <0.5 mm, Giorgi et al. 2014) + if (removedesert) { + # reference model + ref_filename <- getfilename_indices(ref_dir, diag_base, ref_idx, season) + pry <- ncdf_opener(ref_filename, "pry", "lon", "lat", rotate = "no") + retdes <- which(pry < 0.5) + pry[retdes] <- NA + # create mask with NAs for deserts and 1's for non-desert + ref_retdes2D <- apply(pry * 0, c(1, 2), sum) + 1 + ref_retdes3D <- replicate(dim(pry)[length(dim(pry))], ref_retdes2D) + } + + # open reference field + ref_filename <- getfilename_indices(ref_dir, diag_base, ref_idx, season) + print(paste("Reading reference ", ref_filename)) + for (field in field_names) { + field_ref <- ncdf_opener(ref_filename, field, "lon", "lat", rotate = "no") + ics_ref <- ics + ipsilon_ref <- ipsilon + + if (removedesert) { + field_ref <- field_ref * ref_retdes3D + } + if (masksealand) { + field_ref <- apply_elevation_mask(field_ref, relevation, + sealandelevation) + } + # if requested calculate multiyear average and store at time=1 + # in this case skip multi-year plot_type 4 + if (rmultiyear_mean) { + if (plot_type == 4) { + print("skipping multi-year plot_type 4 with multiyear mean") + return(0) + } + # exclude normalization years from multiyear mean + retyears <- 1:length(years_ref) + skipyears <- which(as.logical(match(years_ref, + norm_years[1]:norm_years[2]))) + retyears[skipyears] <- NA + retyears <- retyears[which(is.finite(retyears))] + field_ref[, , 1] <- apply(field_ref[, , retyears], + c(1, 2), mean, na.rm = T) + } + assign(paste(field, "_ref", sep = ""), field_ref) + } + + # Loop over models + for (model_idx in c(1:(length(models_name)))) { + # Do not compare reference with itself + if ( (model_idx == ref_idx) && ( (plot_type == 2) || (plot_type == 3))) { + if (length(models_name) == 1) { + print("skipping comparison plots because + only one dataset was requested") + } + next + } + + # setting up path and parameters + exp <- models_name[model_idx] + year1 <- models_start_year[model_idx] + year2 <- models_end_year[model_idx] + + # Years to be considered based on namelist and cfg_file + years <- year1:year2 + if (ryearplot[1] == "ALL") { + years <- year1:year2 + } else if (ryearplot[1] == "FIRST") { + years <- year1 + } else { + years <- years[match(ryearplot, years)] + years <- years[!is.na(years)] + } + nyears <- length(years) + + # Remove deserts if required + if (removedesert) { + filename <- getfilename_indices(work_dir_exp, diag_base, model_idx, + season) + if ( (rgrid == F) & ( (plot_type == 2) | (plot_type == 3) ) ) { + # regrid when comparing + pry <- ncdf_opener(filename, "pry", "lon", "lat", rotate = "no", + interp2grid = T, grid = ref_filename) + } else { + pry <- ncdf_opener(filename, "pry", "lon", "lat", rotate = "no") + } + retdes <- which(pry < 0.5) + pry[retdes] <- NA + # create mask with NAs for deserts and 1's for non-desert + exp_retdes2D <- apply(pry * 0, c(1, 2), sum) + 1 + exp_retdes3D <- replicate(dim(pry)[length(dim(pry))], exp_retdes2D) + } + + #-----------------Loading data-----------------------# + # open experiment field + for (field in field_names) { + infile <- getfilename_indices(work_dir_exp, diag_base, model_idx, + season) + print(paste("Reading ", field, " from experiment ", infile)) + if ( (rgrid == F) & ( (plot_type == 2) | (plot_type == 3) ) ) { + # regrid when comparing + field_exp <- ncdf_opener(infile, field, "lon", "lat", rotate = "no", + interp2grid = T, grid = ref_filename) + } else { + field_exp <- ncdf_opener(infile, field, "lon", "lat", rotate = "no") + } + if (removedesert) { + field_exp <- field_exp * exp_retdes3D + } + if (masksealand) { + field_exp <- apply_elevation_mask(field_exp, relevation, + sealandelevation) + } + # if requested calculate multiyear average and store it at time=1 + if (rmultiyear_mean) { + years <- year1:year2 + retyears <- 1:length(years) + skipyears <- which(as.logical(match(years, + norm_years[1]:norm_years[2]))) + retyears[skipyears] <- NA + retyears <- retyears[which(is.finite(retyears))] + field_exp[, , 1] <- apply(field_exp[, , retyears], + c(1, 2), mean, na.rm = T) + } + if (highreselevation_only) { + field_exp[] <- NA + } + assign(paste(field, "_exp", sep = ""), field_exp) + } + + #---------------Multiyear mean-----# + if (rmultiyear_mean) { + nyears <- 1 + } + + #-----------------Producing figures------------------------# + + print(paste0(diag_base, ": starting figures")) + + # Set figure dimensions + plot_size <- scale_figure(plot_type, diag_script_cfg, length(selfields), + npancol, npanrow) + if (boxregion != 0) { + # boxregion will plot region boxes over a global map of selected field + nregions <- 1 + } + + # LOOP over selected regions + for (iselregion in 1:nregions) { + iregion <- selregions[iselregion] + print(paste("region: ", region_names[iregion])) + + # Startup graphics for multiple years in one figure + if (plot_type == 4) { + field_label <- "multiindex" + figname <- getfilename_figure( + plot_dir_exp, field_label, year1, year2, model_idx, season, + "multiyear", region_codes[iregion], label, "map", output_file_type + ) + graphics_startup(figname, output_file_type, plot_size) + par(mfrow = c(nyears, nfields), cex.main = 1.3, cex.axis = 1.2, + cex.lab = 1.2, mar = c(2, 2, 2, 2), oma = c(1, 1, 1, 1)) + } + # LOOP over years defined in parameter file + for (iyear in c(1:nyears)) { + if (ryearplot_ref[1] == "EXP") { + iyear_ref <- iyear + } else { + iyear_ref <- match(ryearplot_ref, years_ref) + } + time_label <- years[iyear] + time_label_ref <- years[iyear_ref] + time_label_fig <- time_label + if (rmultiyear_mean) { + time_label <- paste(year1, year2, sep = "-") + time_label_ref <- paste(year1_ref, year2_ref, sep = "-") + time_label_fig <- "myearmean" + } + print(paste0(diag_base, ": plotting data for ", region_names[iregion], + "-", time_label)) + + # standard properties + info_exp <- paste(exp, time_label) # ,season) + info_ref <- paste(dataset_ref, time_label_ref) # ,season) + + #  Startup graphics for multiple fields/quantities in one figure + if (plot_type == 3) { + field_label <- "multiindex" + figname <- getfilename_figure( + plot_dir_exp, field_label, year1, year2, model_idx, + season, time_label_fig, region_codes[iregion], label, "map", + output_file_type + ) + graphics_startup(figname, output_file_type, plot_size) + par(mfrow = c(nfields, 3), cex.main = 1.3, cex.axis = 1.2, + cex.lab = 1.2, mar = c(2, 2, 2, 6), oma = c(1, 1, 1, 1)) + } + # LOOP over fields + for (field in field_names) { + ifield <- which(field == field_names) + if (anyNA(title_unit_m[ifield, 1:3])) { + title_unit_m[ifield, 1:3] <- field + title_unit_m[ifield, 4] <- "" + } + + # get fields + field_ref <- get(paste(field, "_ref", sep = "")) + field_exp <- get(paste(field, "_exp", sep = "")) + + # MAPS: select required year (if requested, multiyear average + # is stored at iyear=1) + field_ref <- field_ref[, , iyear] + field_exp <- field_exp[, , iyear_ref] + tmp_field <- field_exp + + # define quantity-dependent properties (exp, ref, exp-ref) + tmp.colorbar <- c(F, T, T) + if (plot_type == 1) { + tmp.colorbar <- T + } + tmp.palette <- palette_giorgi2011 + if (is.na(levels_m[ifield, 1]) | is.na(levels_m[ifield, 2])) { + print("No value for range: assigning min and max") + tmp.levels <- seq(min(field_ref, na.rm = T), + max(field_ref, na.rm = T), len = nlev) + } else { + tmp.levels <- seq(levels_m[ifield, 1], levels_m[ifield, 2], + len = nlev) + } + if (highreselevation_only) { + title_unit_m[ifield, 1] <- "Elevation" + } + tmp.titles <- paste0(title_unit_m[ifield, 1], ": ", + region_names[iregion], "-", + c(info_exp, info_ref, "Difference")) + if (plot_type == 4) { + tmp.titles <- paste(title_unit_m[ifield, 1], time_label) + } + + # Startup graphics for individual fields and multi + # quantities in each figure + if (plot_type == 2) { + figname <- getfilename_figure( + plot_dir_exp, field, year1, year2, model_idx, + season, time_label_fig, region_codes[iregion], + label, "comp_map", output_file_type + ) + graphics_startup(figname, output_file_type, plot_size) + par(mfrow = c(3, 1), cex.main = 2, cex.axis = 1.5, cex.lab = 1.5, + mar = c(5, 5, 4, 8), oma = c(1, 1, 1, 1)) + } + + # --- MAPS ---- + # LOOP over quantity (exp,ref,exp-ref difference) to be plotted + for (iquantity in c(1:nquantity[plot_type])) { + if (iquantity == 2) { + tmp_field <- field_ref + ipsilon <- ipsilon_ref + ics <- ics_ref + } + if (iquantity == 3) { + tmp.palette <- palette2 + tmp_field <- field_exp - field_ref + if (is.na(levels_m[ifield, 3]) | is.na(levels_m[ifield, 4])) { + tmp_field_max <- max(abs(tmp_field), na.rm = T) + tmp.levels <- seq(-tmp_field_max, tmp_field_max, + len = nlev) + } else { + tmp.levels <- seq(levels_m[ifield, 3], levels_m[ifield, 4], + len = nlev) + } + } + # Startup graphics for individual field in each figure + if (plot_type == 1) { + figname <- getfilename_figure( + plot_dir_exp, field, year1, year2, model_idx, + season, time_label_fig, region_codes[iregion], label, + "map", output_file_type + ) + graphics_startup(figname, output_file_type, plot_size) + lonlat_aratio <- (max(ics) - min(ics)) / + (max(ipsilon) - min(ipsilon)) + par(mfrow = c(1, 1), cex.main = 2, cex.axis = 1.5, cex.lab = 1.5, + mar = c(5, 5, 4, 8), oma = c(1, 1, 1, 1)) + #mar = c(3, 3, 4, 8), oma = c(1, 1, 1, 1)) + } + + # set active panel + if (plot_type == 3) { + par(mfg = c(ifield, iquantity, nfields, 3)) + } + if (plot_type == 4) { + par(mfg = c(iyear, ifield, nyears, nfields)) + } + + # scale autolevels if required + if (autolevels && (autolevels_scale != 1)) { + autorange <- max(tmp.levels) - min(tmp.levels) + meanrange <- mean(tmp.levels) + tmp.levels <- seq(meanrange - autorange * autolevels_scale, + meanrange + autorange * autolevels_scale, + len = nlev) + } + + cex_main <- 1.4 + if (plot_type == 1) { + cex_main <- 1.3 + } + + # drop data outside region limits + retlon <- which(ics < regions[iregion, 1] + | ics > regions[iregion, 2]) + retlat <- which(ipsilon < regions[iregion, 3] + | ipsilon > regions[iregion, 4]) + mask_field <- tmp_field + mask_field[retlon, ] <- NA + mask_field[, retlat] <- NA + tmp_field <- mask_field + + # contours + filled_contour3(ics, ipsilon, tmp_field, + xlab = "Longitude", ylab = "Latitude", + main = tmp.titles[iquantity], levels = tmp.levels, + color.palette = tmp.palette, + xlim = c(regions[iregion, 1], regions[iregion, 2]), + ylim = c(regions[iregion, 3], regions[iregion, 4]), axes = F, + asp = 1, cex.main = cex_main + ) + # continents + continents_col <- "white" + if (map_continents <= 0) { + continents_col <- "gray30" + } + map("world", regions = ".", interior = map_continents_regions, + exact = F, boundary = T, add = T, col = continents_col, + lwd = abs(map_continents)) + #rect(regions[iregion, 1], regions[iregion, 3], + # regions[iregion, 2], regions[iregion, 4], + # border = "grey90", lwd = 3) + # grid points + if (oplot_grid) { + # build up grid if needed + ics2 <- replicate(length(ipsilon), ics) + ipsilon2 <- t(replicate(length(ics), ipsilon)) + points(ics2, ipsilon2, pch = 1, col = "grey40", cex = oplot_grid) + } + # add highres elevation contours + if (highreselevation) { + palette(terrain.colors(10)) + contour(highresel$lon_el, highresel$lat_el, highresel$elevation, + levels = seq(500, 5000, length.out = 10), col = 1:10, add = T + ) + } + # boxes + box(col = "grey60") + if (boxregion != 0) { + box_col <- "white" + if (boxregion <= 0) { + box_col <- "grey30" + } + for (ireg in 2:length(selregions)) { + iselreg <- selregions[ireg] + rect(regions[iselreg, 1], regions[iselreg, 3], + regions[iselreg, 2], regions[iselreg, 4], + border = box_col, lwd = abs(boxregion) + ) + text(regions[iselreg, 1], regions[iselreg, 3], + paste0(" ", region_codes[iselreg] + ), col = box_col, pos = 3, offset = 0.5) + } + } + # axis + if (plot_type <= 2) { + if ( (regions[iregion, 2] - regions[iregion, 1] > 90) + | (regions[iregion, 4] - regions[iregion, 3] > 90)) { + axis(1, col = "grey40", at = seq(-180, 180, 45)) + axis(2, col = "grey40", at = seq(-90, 90, 30)) + } else { + axis(1, col = "grey40") + axis(2, col = "grey40") + } + } else if (plot_type == 3) { + if (iquantity == 1) { + if ( (regions[iregion, 2] - regions[iregion, 1] > 90) + | (regions[iregion, 4] - regions[iregion, 3] > 90)) { + axis(2, col = "grey40", at = seq(-90, 90, 30)) + } else { + axis(2, col = "grey40") + } + } + if (ifield == length(field_names)) { + if ( (regions[iregion, 2] - regions[iregion, 1] > 90) + | (regions[iregion, 4] - regions[iregion, 3] > 90)) { + axis(1, col = "grey40", at = seq(-180, 180, 45)) + } else { + axis(1, col = "grey40") + } + } + } else if (plot_type == 4) { + if (iyear == nyears) { + if ( (regions[iregion, 2] - regions[iregion, 1] > 90) + | (regions[iregion, 4] - regions[iregion, 3] > 90)) { + axis(1, col = "grey40", at = seq(-180, 180, 45)) + } else { + axis(1, col = "grey40") + } + } + if (field == "int_norm") { + if ( (regions[iregion, 2] - regions[iregion, 1] > 90) + | (regions[iregion, 4] - regions[iregion, 3] > 90)) { + axis(2, col = "grey40", at = seq(-90, 90, 30)) + } else { + axis(2, col = "grey40") + } + } + } + + # colorbar + new_fig_scale <- c(-0.11, -0.04, 0.1, -0.1) + line_label <- 2.7 + cex_label <- 1.2 + cex_colorbar <- 1 + if (plot_type == 2) { + new_fig_scale <- c(-0.07, -0.02, 0.1, -0.1) + line_label <- 2.7 + cex_label <- 1 + cex_colorbar <- 1.5 + } + if (plot_type == 3) { + new_fig_scale <- c(-0.11, -0.03, 0.1, -0.1) + line_label <- 3 + cex_label <- 1 + cex_colorbar <- 1.2 + } + if ( (tmp.colorbar[iquantity]) & add_colorbar) { + image_scale3(volcano, levels = tmp.levels, + new_fig_scale = new_fig_scale, + color.palette = tmp.palette, colorbar.label = + paste(title_unit_m[ifield, 1], + title_unit_m[ifield, 4]), + cex.colorbar = cex_colorbar, cex.label = cex_label, + colorbar.width = 1, line.label = line_label, + line.colorbar = 1.0 + ) + } + } # close loop over quantity + if (plot_type == 1) { + graphics_close(figname) + # Store data for provenance + caption <- paste0("Map for index ", field, " over region ", + region_codes[iregion], " according to ", + models_name[model_idx]) + prov_fig_now <- list(figname = figname, + caption = caption, + model_idx = list(model_idx), + ancestors = list(infile)) + prov_info[[figname]] <- prov_fig_now + } + if (plot_type == 2) { + graphics_close(figname) + # Store data for provenance + caption <- paste0("Map for index ", field, " over region ", + region_codes[iregion], " according to ", + models_name[model_idx], + " in comparison to reference dataset") + prov_fig_now <- list(figname = figname, + caption = caption, + model_idx = list(model_idx, ref_idx), + ancestors = list(infile, ref_filename)) + prov_info[[figname]] <- prov_fig_now + } + } # close loop over field + if (plot_type == 3) { + graphics_close(figname) + # Store data for provenance + caption <- paste0("Comparison maps for multiple indices", + " over region ", region_codes[iregion]) + prov_fig_now <- list(figname = figname, + caption = caption, + model_idx = list(model_idx, ref_idx), + ancestors = list(infile, ref_filename)) + prov_info[[figname]] <- prov_fig_now + } + } # close loop over years + if (plot_type == 4) { + graphics_close(figname) + # Store data for provenance + caption <- paste0("Maps for multiple indices over selected years") + prov_fig_now <- list(figname = figname, + caption = caption, + model_idx = list(model_idx), + ancestors = list(infile)) + prov_info[[figname]] <- prov_fig_now + } + } # close loop over regions + } # close loop over models + return(prov_info) +} # close function diff --git a/esmvaltool/diag_scripts/hyint/hyint_plot_trends.R b/esmvaltool/diag_scripts/hyint/hyint_plot_trends.R new file mode 100644 index 0000000000..fedfbbc04d --- /dev/null +++ b/esmvaltool/diag_scripts/hyint/hyint_plot_trends.R @@ -0,0 +1,691 @@ +###################################################### +#--------Trend plotting routine for HyInt------------# +#-------------E. Arnone (September 2017)-------------# +###################################################### + +hyint_plot_trends <- function(work_dir, plot_dir, ref_idx, season, + prov_info) { + + # Define subscripts for variable names + var_type <- c("tseries", "tseries-sd", "trend", "trend-stat") + + # Set main paths + dir.create(plot_dir, recursive = T) + + # Load palette + palette(palette_ts) + + # Number of models + nmodels <- length(models_name) + + # Define regions to be used + nregions <- length(selregions) + if ( (plot_type == 13) | (plot_type == 15)) { + # if plotting multiple models use only first region of list + nregions <- 1 + } + + # Check whether enough panels are allocated for plotting the + # indices requested. In not, drop extra indices + npanels <- npancol * npanrow + if (npanels < length(selfields)) { + selfields <- selfields[1:npanels] + } + + # Update number of panels and columns if selfields has one element only + if (length(selfields) == 1) { + npancol <- 1 + npanrow <- 1 + } + + # Define fields to be used (note that the routine is + # optimized for 6 fields in 3x2 panels per multi-panel figures) + if (selfields[1] != F) { + field_names <- field_names[selfields, drop = F] + levels_m <- levels_m[selfields, , drop = F] + tlevels_m <- tlevels_m[selfields, , drop = F] + title_unit_m <- title_unit_m[selfields, , drop = F] + } + + # Define field label for filenames + field_label <- "multiindex" + if (length(selfields) == 1) { + field_label <- field_names + } + + # Remove preset range of values for plotting if needed + nyears <- models_end_year[ref_idx] - models_start_year[ref_idx] + if (autolevels) { + tlevels_m[] <- NA + levels_m[] <- NA + } + + # If on, switch shade option off and lines on for plot_type 13 + if (plot_type == 13 & add_trend_sd_shade) { + add_trend_sd_shade <- F + add_trend_sd_lines <- T + } + + # Define array to store plotting limits for each panel of multi-panel figures + plot_limits <- array(NaN, c(4, length(field_names))) + + # Load parameters for reference dataset + year1_ref <- models_start_year[ref_idx] + year2_ref <- models_end_year[ref_idx] + + # Handle label tag when overplotting data from tseries + # files with different labels in plot_type 14 and 15 + label_figname <- label[1] + if (length(label) > 1 & plot_type >= 10) { + label_figname <- paste0(label[1], "-plus") + } + + # Set figure dimensions + plot_size <- scale_figure(plot_type, diag_script_cfg, length(selfields), + npancol, npanrow) + + # Startup graphics for multi-model timeseries or trends + plot_type_now <- (plot_type == 13) | (plot_type == 15) + if (plot_type_now == T) { + tseries_trend_tag <- "timeseries" + if (plot_type == 15) { + tseries_trend_tag <- "trend_summary" + } + figname <- getfilename_figure(plot_dir, field_label, year1_ref, + year2_ref, ref_idx, season, "", region_codes[selregions[1]], + label_figname, tseries_trend_tag, output_file_type, + multimodel = T + ) + graphics_startup(figname, output_file_type, plot_size) + par( + mfrow = c(npanrow, npancol), cex.main = 1.3, cex.axis = 1.2, + cex.lab = 1.2, mar = c(5, 5, 5, 5), oma = c(1, 1, 1, 1) + ) + } + + n_noplot <- 1 + if ( (plot_type == 13 | plot_type == 15) & autolevels) { + n_noplot <- 2 + } + minmax_levels <- c(NA, NA) + + # if requested, loop twice over all models to get range of values for plots + for (noplot in n_noplot:1) { + + # Loop over models + for (model_idx in 1:nmodels) { + # setting up path and parameters + year1 <- models_start_year[model_idx] + year2 <- models_end_year[model_idx] + + # Years to be considered based on namelist and cfg_file + years <- year1:year2 + years_ref <- year1_ref:year2_ref + if (ryearplot[1] == "ALL") { + years <- year1:year2 + } else if (ryearplot[1] == "FIRST") { + years <- year1 + } else { + years <- years[match(ryearplot, years)] + years <- years[!is.na(years)] + } + if (plot_type >= 14) { + add_trend <- F + } # do not plot trend line for plot 14 or 15 + + # Startup graphics for multi-region timeseries + if (plot_type == 12) { + figname <- getfilename_figure( + plot_dir, field_label, year1, year2, + model_idx, season, "", "multiregion", label_figname, "timeseries", + output_file_type + ) + graphics_startup(figname, output_file_type, plot_size) + par( + mfrow = c(npanrow, npancol), cex.main = 1.3, cex.axis = 1.2, + cex.lab = 1.2, mar = c(5, 5, 5, 5), oma = c(1, 1, 1, 1) + ) + } + #  Startup graphics for bar plot of trend coefficients + if (plot_type == 14) { + figname <- getfilename_figure( + plot_dir, field_label, year1, year2, + model_idx, season, "", "multiregion", label_figname, + "trend_summary", output_file_type + ) + graphics_startup(figname, output_file_type, plot_size) + par( + mfrow = c(npanrow, npancol), cex.main = 1.3, cex.axis = 1.2, + cex.lab = 1.2, mar = c(8, 8, 2, 2), oma = c(1, 1, 1, 1) + ) + } + + if (model_idx == 1) { + store_label <- label + } + + # ----- Loop over label when plotting more files in the same panel ---- + for (ilabel in 1:length(store_label)) { + label <- store_label[ilabel] + #-----------------Loading data-----------------------# + + # open timeseries and trends for exp and ref + infile <- getfilename_trends(work_dir, label, model_idx, season) + print(paste("HyInt_trends: reading file ", infile)) + field_long_names <- array(NaN, length(field_names)) + field_units <- array(NaN, length(field_names)) + + if ( (plot_type == 13) | (plot_type == 15)) { + # Store data for provenance + caption <- paste0("Hyint timeseries for selected indices and regions ", + "according to selected datasets") + if (plot_type == 15) { + caption <- paste0("Hyint trends for multiple indices and regions ", + "according to selected datasets") + } + if (length(prov_info[[figname]]) == 0) { + prov_fig_now <- list(figname = figname, + caption = caption, + model_idx = list(model_idx), + ancestors = list(infile)) + prov_info[[figname]] <- prov_fig_now + } else { + if (is.na(match(model_idx, prov_info[[figname]]$model_idx))) { + prov_info[[figname]]$model_idx <- + c(prov_info[[figname]]$model_idx, model_idx) + prov_info[[figname]]$ancestors <- + c(prov_info[[figname]]$ancestors, infile) + } + } + } + + for (var in field_names) { + ivar <- which(field_names == var) + for (stype in var_type[1:2]) { + svar <- paste0(var, "_", stype) + rfield <- ncdf_opener(infile, svar, "region", timedimname, + rotate = "no" + ) + assign(svar, rfield) # assign field data to field name + nc <- nc_open(infile) + dlname <- ncatt_get(nc, svar, "long_name") + dunits <- ncatt_get(nc, svar, "units") + field_long_names[ivar] <- dlname$value + field_units[ivar] <- dunits$value + nc_close(nc) + } + for (stype in var_type[3:4]) { + svar <- paste0(var, "_", stype) + rfield <- ncdf_opener(infile, svar, "region", "coefficients", + rotate = "no" + ) + assign(svar, rfield) # assign field data to field name + } + } + + # store size of time and region arrays + time <- ncdf_opener(infile, timedimname, + timedimname, rotate = "no") + 1950 + regions <- ncdf_opener(infile, "regions", "region", "boundaries", + rotate = "no" + ) + # setup time selection for trends + rettimes <- which(!is.na(time)) + if (trend_years[1] != F) { + # apply trend to limited time interval if required + rettimes_tmp <- (time >= trend_years[1]) & time <= trend_years[2] + rettimes <- which(rettimes_tmp) + if (length(trend_years) == 4) { + # apply trend also to second time interval if required + rettime2_tmp <- (time >= trend_years[3]) & time <= trend_years[4] + rettimes2 <- which(rettime2_tmp) + } + } + xlim <- c(min(time), max(time)) + if (trend_years_only & (trend_years[1] != F)) { + xlim <- trend_years[1:2] + } + + + #-----------------Producing figures------------------------# + + print(paste0(diag_base, ": starting figures")) + + # LOOP over fields + for (field in field_names) { + ifield <- which(field == field_names) + + if (noplot == 2 & model_idx == 1) { + minmax_levels <- c(NA, NA) + minmax_tlevels <- c(NA, NA) + assign(paste0(field, "_levels"), minmax_levels) + assign(paste0(field, "_tlevels"), minmax_tlevels) + } + + if (anyNA(title_unit_m[ifield, 1:3])) { + title_unit_m[ifield, 1] <- field_names[ifield] + title_unit_m[ifield, 2:3] <- field_long_names[ifield] + title_unit_m[ifield, 4] <- field_units[ifield] + } + + # TIMESERIES: get timeseries and trends + tfield_exp <- get(paste0(field, "_", var_type[1])) + tfield_exp_sd <- get(paste0(field, "_", var_type[2])) + trend_exp <- get(paste0(field, "_", var_type[3])) + trend_exp_stat <- get(paste0(field, "_", var_type[4])) + + if (length(dim(tfield_exp)) < 2) { + # reshape data to matrix if regions has only one element + tfield_exp <- array(tfield_exp, c(1, length(tfield_exp))) + tfield_exp_sd <- array(tfield_exp_sd, c(1, length(tfield_exp_sd))) + trend_exp <- array(trend_exp, c(1, length(trend_exp))) + trend_exp_stat <- array(trend_exp_stat, c(1, length(trend_exp_stat))) + } + if (plot_type == 13 | plot_type == 15) { + # get only first region if working on multimodel + tfield_exp <- tfield_exp[1, , drop = F] + tfield_exp_sd <- tfield_exp_sd[1, , drop = F] + trend_exp <- trend_exp[1, , drop = F] + trend_exp_stat <- trend_exp_stat[1, , drop = F] + } + + if (is.na(levels_m[ifield, 1]) | is.na(levels_m[ifield, 2])) { + print("No value for range: assigning min and max") + tmp.levels <- c(min(tfield_exp, na.rm = T), + max(tfield_exp, na.rm = T)) + if (add_trend_sd | add_trend_sd_shade) { + tmp.levels <- c(min(tfield_exp - tfield_exp_sd, na.rm = T), + max(tfield_exp + tfield_exp_sd, na.rm = T)) + } + } else { + tmp.levels <- c(levels_m[ifield, 1], levels_m[ifield, 2]) + } + + if (nyears < 20 & (!autolevels)) { + levrange <- max(tmp.levels, na.rm = T) - min(tmp.levels, na.rm = T) + meanrange <- mean(tmp.levels, na.rm = T) + tmp.levels <- c(meanrange - levrange * 1.5, + meanrange + levrange * 1.5) + } + + #  Startup graphics for one timeseries in one figure + if (plot_type == 11) { + figname <- getfilename_figure( + plot_dir, field, year1, year2, model_idx, season, + "", "multiregion", label_figname, + "timeseries_single", output_file_type + ) + graphics_startup(figname, output_file_type, plot_size) + par(cex.main = 1.3, cex.axis = 1.2, cex.lab = 1.2, + mar = c(4, 4, 2, 2), oma = c(1, 1, 1, 1) + ) + } + + # Actual plotting + if ( (plot_type == 11) | (plot_type == 12) | (plot_type == 13)) { + if (plot_type != 11) { + # set active panel + par_row <- (ifield - 1) %/% npancol + 1 + par_col <- (ifield - 1) %% npancol + 1 + par(mfg = c(par_row, par_col, npanrow, npancol)) + } + + # scale autolevels if required + if (autolevels && (autolevels_scale != 1)) { + autorange <- max(tmp.levels, nat.rm = T) + - min(tmp.levels, na.rm = T) + meanrange <- mean(tmp.levels, na.rm = T) + tmp.levels <- c(meanrange - autorange * autolevels_scale, + meanrange + autorange * autolevels_scale) + } + + if (noplot == 2 & autolevels & plot_type == 13) { + # Recursively store min and max values to be plotted + # NOTE: this works as long as only one region at the time is used + minmax_levels <- get(paste0(field, "_levels")) + minmax_levels[1] <- min(c(minmax_levels[1], tmp.levels[1]) + , na.rm = T) + minmax_levels[2] <- max(c(minmax_levels[2], tmp.levels[2]) + , na.rm = T) + assign(paste0(field, "_levels"), minmax_levels) + next + } + + if (noplot == 1 & autolevels & plot_type == 13) { + tmp.levels[1] <- (get(paste0(field, "_levels")))[1] + tmp.levels[2] <- (get(paste0(field, "_levels")))[2] + } + + # Base plot + if (!(plot_type == 13 & model_idx > 1) & ilabel == 1) { + ylab <- paste0(title_unit_m[ifield, 1]) + if (title_unit_m[ifield, 4] != "") { + ylab <- paste0(ylab, title_unit_m[ifield, 4]) + } + plot(time, type = "n", + ylim = c(tmp.levels[1], tmp.levels[2]), + xlim = xlim, xlab = "Year", ylab = ylab, + main = title_unit_m[ifield, 3], xaxs = "i") + # store panel plot limits + plot_limits[, ifield] <- par("usr") + } + + # Update plot limits in case panel has changed + par(usr = plot_limits[, ifield]) + + # LOOP over regions to plot timeseries + if (add_trend_sd_shade) { + for (ireg in 1:nregions) { + iselreg <- selregions[ireg] + shade_area <- c( + tfield_exp[ireg, ] + tfield_exp_sd[ireg, ], + rev(tfield_exp[ireg, ] - tfield_exp_sd[ireg, ]) + ) + shade_area[shade_area < tmp.levels[1]] <- tmp.levels[1] + polygon(c(time, rev(time)), shade_area, + col = "grey95", + border = NA + ) + } + } + for (ireg in 1:nregions) { + iselreg <- selregions[ireg] + col_ts <- ireg + if (length(label) > 1) { + col_ts <- c( + "dodgerblue4", "darkseagreen4", "goldenrod4", + "coral4", "grey", "mediumorchid1", "black" + )[ilabel] + } + if (plot_type == 13) { + col_ts <- model_idx + } + if (add_trend_sd) { + lines(time, tfield_exp[ireg, ] + tfield_exp_sd[ireg, ], + lty = 3, + col = col_ts + ) + lines(time, tfield_exp[ireg, ] - tfield_exp_sd[ireg, ], + lty = 3, + col = col_ts + ) + } + if (add_tseries_lines) { + lines(time, tfield_exp[ireg, ], col = col_ts) + } + points(time, tfield_exp[ireg, ], col = col_ts) + if (add_trend) { + lines(time[rettimes], trend_exp[ireg, 1] + trend_exp[ireg, 2] + * time[rettimes], col = col_ts, lwd = 2) + if (length(trend_years) == 4) { + # apply trend also to second time interval if required + lines(time[rettimes2], trend_exp[ireg, 3] + trend_exp[ireg, 4] + * time[rettimes2], col = col_ts, lwd = 2) + } + } + } + if (abs(add_legend) & ( (plot_type == 11) | + (plot_type == 12)) & (ifield == 1)) { + pos_legend <- c( + plot_limits[1, ifield] + (plot_limits[2, ifield] + - plot_limits[1, ifield]) * xy_legend[1], + plot_limits[3, ifield] + (plot_limits[4, ifield] + - plot_limits[3, ifield]) * xy_legend[2] + ) + ncol <- 1 + if (add_legend < 0) { + ncol <- nregions + } + if (add_legend > 1) { + ncol <- add_legend + } + legend(pos_legend[1], pos_legend[2], region_codes[selregions], + text.col = (1:nregions), ncol = ncol + ) + } + box(lwd = 2) + if (plot_type == 11) { + graphics_close(figname) + # Store data for provenance + caption <- paste0("Hyint timeseries for index ", field, + " over selected regions according to ", + models_name[model_idx]) + prov_fig_now <- list(figname = figname, + caption = caption, + model_idx = list(model_idx), + ancestors = list(infile)) + prov_info[[figname]] <- prov_fig_now + } + } + if ( (plot_type == 14) | (plot_type == 15)) { + # plot trend coefficients for different regions, one panel per field + if (anyNA(tlevels_m[ifield, ])) { + print("No value for range: assigning min and max") + ylim <- c( + min(trend_exp_stat[, 1] - trend_exp_stat[, 2], na.rm = T), + max(trend_exp_stat[, 1] + trend_exp_stat[, 2], na.rm = T)) + # scale autolevels if required + if (autolevels && (autolevels_scale_t != 1)) { + autorange <- max(ylim, na.rm = T) - min(ylim, na.rm = T) + meanrange <- mean(ylim, na.rm = T) + ylim <- c(meanrange - autorange * autolevels_scale_t, + meanrange + autorange * autolevels_scale_t) + } + } else { + ylim <- tlevels_m[ifield, ] + } + + if (trend_years[1] != F) { + xlim <- trend_years[1:2] + } + ylab <- paste0("Avg trend") + # change y scale to % and 1/100 years + if (scalepercent & (field != "hyint")) { + trend_exp <- trend_exp * 100 # trend coefficients + trend_exp_stat[, 2] <- trend_exp_stat[, 2] * 100 # standard error + ylab <- paste0(ylab, " (%)") + ylim <- ylim * 100 + } + if (scale100years) { + trend_exp <- trend_exp * 100 # trend coefficients + trend_exp_stat[, 2] <- trend_exp_stat[, 2] * 100 # standard error + ylab <- paste0(ylab, " (1/100 years)") + ylim <- ylim * 100 + } + nx <- nregions + xlab <- "Regions" + xlabels <- region_codes[selregions] + if (plot_type == 15) { + nx <- nmodels + xlab <- "" # "Models" + xlabels <- models_name + } + # hereafter xregions is the x which also holds models for plottype 15 + xregions <- 1:nx + + # Actual plotting + # set active panel + par_row <- (ifield - 1) %/% npancol + 1 + par_col <- (ifield - 1) %% npancol + 1 + par(mfg = c(par_row, par_col, npanrow, npancol)) + + if (noplot == 2 & autolevels & plot_type == 15) { + # Recursively store min and max values to be plotted + # NOTE: this works as long as only one region at the time is used + minmax_tlevels <- get(paste0(field, "_tlevels")) + minmax_tlevels[1] <- min(c(minmax_tlevels[1], ylim[1]) + , na.rm = T) + minmax_tlevels[2] <- max(c(minmax_tlevels[2], ylim[2]) + , na.rm = T) + assign(paste0(field, "_tlevels"), minmax_tlevels) + next + } + if (noplot == 1 & autolevels & plot_type == 15) { + ylim[1] <- (get(paste0(field, "_tlevels")))[1] + ylim[2] <- (get(paste0(field, "_tlevels")))[2] + } + + # Base plot + if (!(plot_type == 15 & model_idx > 1) & ilabel == 1) { + plot(xregions, xregions, + type = "n", pch = 22, axes = F, xlab = xlab, ylab = ylab, + ylim = ylim, main = (paste0( + title_unit_m[ifield, 1], + " trend (", xlim[1], "-", xlim[2], ")" + )) + ) + box() + # store panel plot limits + plot_limits[, ifield] <- par("usr") + } + + # Update plot limits in case panel has changed + par(usr = plot_limits[, ifield]) + for (ireg in 1:nregions) { + iregion <- ireg + ixregion <- ireg + if (plot_type == 15) { + ixregion <- model_idx + } + # add errorbar (standard error) + if (!anyNA(trend_exp_stat[iregion, ])) { + arrows(xregions[ixregion], trend_exp[iregion, 2] - + trend_exp_stat[iregion, 2], xregions[ixregion], + trend_exp[iregion, 2] + trend_exp_stat[iregion, 2], + length = 0.05, angle = 90, code = 3 + ) + points(xregions[ixregion], trend_exp[iregion, 2], + pch = 22, + col = "grey40", bg = "white", cex = 2 + ) + # add filled points for significant (95% level) + col90 <- "grey70" + col95 <- "dodgerblue3" + if (length(label) > 1) { + col90 <- c("dodgerblue3", "darkseagreen3", "goldenrod3", + "coral3", "grey", "mediumorchid1", "black") + col95 <- c("dodgerblue4", "darkseagreen4", "goldenrod4", + "coral4", "grey", "mediumorchid1", "black") + } + if (trend_exp_stat[iregion, 4] <= 0.1) { + points(xregions[ixregion], trend_exp[iregion, 2], + pch = 22, + col = col90[ilabel], bg = col90[ilabel], cex = 2 + ) + } + if (trend_exp_stat[iregion, 4] <= 0.05) { + points(xregions[ixregion], trend_exp[iregion, 2], + pch = 22, + col = col95[ilabel], bg = col95[ilabel], cex = 2 + ) + } + } else { + print(paste( + "MISSING VALUES in index ", field, ", region ", + region_codes[iregion] + )) + print(trend_exp_stat[iregion, ]) + } + } + if (length(label) > 1) { + retsig90 <- which(trend_exp_stat[, 4] < 0.1) + if (!is.na(retsig90[1])) { + points(xregions[retsig90], trend_exp[retsig90, 2], + pch = 22, + col = "grey70", bg = "grey70", cex = 2) + } + retsig95 <- which(trend_exp_stat[, 4] < 0.05) + if (!is.na(retsig95[1])) { + points(xregions[retsig95], trend_exp[retsig95, 2], + pch = 22, + col = "dodgerblue3", bg = "dodgerblue3", cex = 2) + } + } + box() + if (!( (plot_type == 15) & (model_idx > 1))) { + if (add_zeroline & (ylim[1] != 0)) { + lines(c(-1, nx + 1), c(0, 0), lty = 2, lwd = 1.5, col = "grey40") + } + las <- 1 + cex.axis <- 1 + if (plot_type == 15) { + las <- 2 + cex.axis <- 0.8 + } + axis(1, + labels = xlabels, at = xregions, las = las, + cex.axis = cex.axis + ) + axis(2) + } + } # close if on plot_type 14 and 15 + } # close loop over field + } # close loop over label + if ( (plot_type == 12) | (plot_type == 14)) { + graphics_close(figname) + # Store data for provenance + caption <- paste0("Hyint timeseries for selected indices and regions ", + "according to ", models_name[model_idx]) + if (plot_type == 14) { + caption <- paste0("Hyint trends for multiple indices and regions ", + "according to ", models_name[model_idx]) + } + prov_fig_now <- list(figname = figname, + caption = caption, + model_idx = list(model_idx), + ancestors = list(infile)) + prov_info[[figname]] <- prov_fig_now + } + } # close loop over model + } # close miniloop over noplot + + # Legend for plot_type 13 + if (abs(add_legend) & (plot_type == 13)) { + ncol <- 1 + if (add_legend > 1) { + ncol <- add_legend + } + if (add_legend < 0) { + ncol <- nmodels + } + # for (ifield in 1:length(field_names)) { + ifield <- 1 + # set active panel + par_row <- (ifield - 1) %/% npancol + 1 + par_col <- (ifield - 1) %% npancol + 1 + par(mfg = c(par_row, par_col, npanrow, npancol), + usr = plot_limits[, ifield] + ) + pos_legend <- c( + plot_limits[1, ifield] + (plot_limits[2, ifield] - + plot_limits[1, ifield]) * xy_legend[1], + plot_limits[3, ifield] + (plot_limits[4, ifield] - + plot_limits[3, ifield]) * xy_legend[2] + ) + legend_label <- "" + if (tag_legend[1]) legend_label <- models_name + if (tag_legend[2]) { + legend_label <- paste(legend_label, + models_experiments, + sep = " " + ) + } + if (tag_legend[3]) { + legend_label <- paste(legend_label, + models_ensemble, + sep = " " + ) + } + legend(pos_legend[1], pos_legend[2], + legend = legend_label, + text.col = (1:nmodels), ncol = ncol, cex = 0.9 + ) + print(legend_label) + print("legend_label") + + } + if ( (plot_type == 13) | (plot_type == 15)) { + graphics_close(figname) + } +return(prov_info) +} # close function diff --git a/esmvaltool/diag_scripts/hyint/hyint_preproc.R b/esmvaltool/diag_scripts/hyint/hyint_preproc.R new file mode 100644 index 0000000000..2f0b36f7f7 --- /dev/null +++ b/esmvaltool/diag_scripts/hyint/hyint_preproc.R @@ -0,0 +1,39 @@ +###################################################### +#---------Regridding preprocessing for HyInt---------# +#-------------E. Arnone (Oct 2017)-------------------# +###################################################### + +hyint_preproc <- function(work_dir, model_idx, ref_idx, + climofile, regfile, rgrid) { + print(paste0(diag_base, ": pre-processing file: ", climofile)) + + # add absolute axis, remove leap year days, regrid if needed + # cdo delete and copy do not like files with whitespace + + if (rgrid != F) { + if (rgrid == "REF") { + rgrid <- climofiles[ref_idx] + gridf <- tempfile() + cdo("griddes", input = rgrid, stdout = gridf) + } else { + gridf <- rgrid + } + tempf <- cdo("remapcon2", args = gridf, input = climofile) + unlink(gridf) + } else { + tempf <- cdo("addc", args = "0", input = climofile) + } + + cdo("-copy", options = "-L -f nc -a", + input = tempf, output = regfile) + + unlink(tempf) + + # generate grid file + gridfile <- getfilename_indices(work_dir, diag_base, model_idx, grid = T) + cdo("griddes", input = regfile, stdout = gridfile) + + print(paste0(diag_base, ": pre-processed file: ", regfile)) + + return(0) +} diff --git a/esmvaltool/diag_scripts/hyint/hyint_trends.R b/esmvaltool/diag_scripts/hyint/hyint_trends.R new file mode 100644 index 0000000000..1484f07f93 --- /dev/null +++ b/esmvaltool/diag_scripts/hyint/hyint_trends.R @@ -0,0 +1,284 @@ +###################################################### +#-------------Trends routine for HyInt---------------# +#-------------E. Arnone (June 2017)------------------# +###################################################### + + +# MAIN TRENDS FUNCTION +hyint_trends <- function(work_dir, model_idx, season, prov_info) { + + # setup useful strings + var_type <- c("tseries", "tseries-sd", "trend", "trend-stat") + var_type_long <- c( + "Timeseries", "St.dev of timeseries", "Trend coeff. for two intervals ", + "Trend statistics for trend 1 (Estimate, Std. Error, t value, Pr(>|t|))" + ) + + # setup parameters + year1 <- models_start_year[model_idx] + year2 <- models_end_year[model_idx] + + # set main paths + outfile <- getfilename_trends(work_dir, label, model_idx, season) + + # Define regions to be used + nregions <- length(selregions) + + # Define fields to be used (main list loaded from cfg_file) + if (selfields[1] != F) { + field_names <- field_names[selfields] + } + + # Years to be considered based on namelist and cfg_file + years <- year1:year2 + nyears <- length(years) + + print(paste0(diag_base, ": starting timeseries calculation")) + + #-----------------Loading data-----------------------# + # open experiment field + + gridfile <- getfilename_indices(work_dir, diag_base, model_idx, grid = T) + infile <- getfilename_indices(work_dir, diag_base, model_idx, season) + # test if file contains all requested variables and + # keep file open for reading attributes + nc <- nc_open(infile) + nc_att_glob <- ncatt_get(nc, 0) + if (!all(is.element(field_names, names(nc$var)))) { + missing <- (field_names)[!is.element(field_names, names(nc$var))] + print(paste( + "HyInt_trends: missing variable in input indices file: ", + missing + )) + nc_close(nc) + stop("HyInt: check field_names list in configuration file") + } + nc_close(nc) + + # Get seaLandElevation mask + if (masksealand) { + topofile <- getfilename_indices(work_dir, diag_base, ref_idx, topo = T) + if (!file.exists(topofile)) { + create_landseamask( + regrid = gridfile, ref_file = infile, loc = run_dir, + regridded_topo = topofile, topo_only = T + ) + } + relevation <- ncdf_opener(topofile, "topo", rotate = "no") + } + + # remove desert areas if required + # (mean annual precipitation <0.5 mm, Giorgi et al. 2014) + if (removedesert) { + pry <- ncdf_opener(infile, "pry", rotate = "no") + retdes <- which(pry < 0.5) + pry[retdes] <- NA + # create mask with NAs for deserts and 1's for not-desert + retdes2D <- apply(pry * 0, c(1, 2), sum) + 1 + retdes3D <- replicate(dim(pry)[length(dim(pry))], retdes2D) + } + + for (var in field_names) { + rfield <- ncdf_opener(infile, var, rotate = "no") + print("===========================================") + print(paste(infile, var)) + + if (removedesert) { + rfield <- rfield * retdes3D + } + if (masksealand) { + rfield <- apply_elevation_mask(rfield, relevation, sealandelevation, + reverse = reverse_masksealand + ) + } + # store size of time array + ntime <- length(rfield[1, 1, ]) + + #-----------------Calculating timeseries and trends-----------------------# + + # TIMESERIES: + # - select required region and calculate timeseries + # - timeseries are temporarily stored as a "region x time" matrix + # - trends are temporarily stored as a "region x coefficient" matrix + tfield <- matrix(nrow = nregions, ncol = ntime) + tfield_sd <- matrix(nrow = nregions, ncol = ntime) + rtrend <- matrix(nrow = nregions, ncol = 4) + rtrend_stat <- matrix(nrow = nregions, ncol = 4) + for (ireg in 1:nregions) { + iselreg <- selregions[ireg] + # extract data and perform averages + print(paste("Working on ", region_names[iselreg])) + + tfield[ireg, ] <- calc_region_timeseries(ics, ipsilon, rfield, + regions[iselreg, ], + weighted_mean = weight_tseries + ) + tfield_sd[ireg, ] <- calc_region_timeseries(ics, ipsilon, rfield, + regions[iselreg, ], + calc_sd = T + ) + } + + # setup time array + times <- as.numeric(year1) + 1:ntime - 1 + rettimes <- 1:length(times) + if (trend_years[1] != F) { + # apply trend to limited time interval if required + rettimes <- which( (times >= trend_years[1]) & + times <= trend_years[2]) + if (length(trend_years) == 4) { + # apply trend also to second time interval if required + rettimes2 <- which( (times >= trend_years[3]) & + times <= trend_years[4]) + } + } + + # LOOP through regions to calculate trends as required + for (ireg in 1:nregions) { + iselreg <- selregions[ireg] + if (lm_trend) { + # linear regression + print("-----------------------------------------------------") + print(paste(var, region_names[iselreg])) + temp.tfield <- tfield[ireg, rettimes] + if (length(which(!is.na(temp.tfield))) < 2) { + print("less than 2 points in selected region - skipping") + } else { + lm_fit <- lm(temp.tfield ~ times[rettimes]) + lm_sum <- summary(lm_fit) + # store trend coefficients (intercept and linear coef.) + rtrend[ireg, 1:2] <- lm_fit$coefficients + # store trend coef., standard error, t value, Pr(>|t|) + rtrend_stat[ireg, ] <- lm_sum$coefficients[2, ] + print(lm_sum$coefficients[2, ]) + if (length(trend_years) == 4) { + # apply trend also to second time interval if required + temp_tfield2 <- tfield[ireg, rettimes2] + if (length(which(!is.na(temp.tfield))) < 2) { + print("less than 2 points in second trend over selected region + - skipping") + } else { + lm_fit2 <- lm(temp_tfield2 ~ times[rettimes2]) + # store 2nd interval trend coefficients + rtrend[ireg, 3:4] <- lm_fit2$coefficients + } + } + } + } + } + + # assign timeseries and trends to named field variables + assign(paste0(var, "_tseries"), tfield) + assign(paste0(var, "_tseries-sd"), tfield_sd) + assign(paste0(var, "_trend"), rtrend) + assign(paste0(var, "_trend-stat"), rtrend_stat) + } # close loop over fields + + # store field variables in named lists + stseries_list <- c( + paste0(field_names, "_tseries"), + paste0(field_names, "_tseries-sd"), + paste0(field_names, "_trend"), + paste0(field_names, "_trend-stat") + ) + rtseries_list <- mget(stseries_list) + names(rtseries_list) <- stseries_list + + ########################################################## + #------------------------Save to NetCDF------------------# + ########################################################## + + # saving output to netcdf files + print(paste0(diag_base, "_timeseries: saving data to NetCDF file:")) + + + # dimensions definition + var_region <- 1:nregions + regiondim <- ncdim_def("region", "number", var_region) + coeffdim <- ncdim_def("coefficients", "number", 1:4) + boundarydim <- ncdim_def("boundaries", "degrees", 1:4) + timedim <- ncdim_def(timedimname, "years since 1950-01-01 00:00:00", + (years - 1950), + unlim = T + ) + + # variables definition + for (var in field_names) { + for (itype in 1:length(var_type)) { + svar <- paste0(var, "_", var_type[itype]) + rfield <- get(svar, rtseries_list) + rfield[is.nan(rfield)] <- NA + # copy and update attributes + metadata <- getmetadata_indices(var, infile) + long_name <- metadata$long_name + description <<- paste0(var_type_long[itype], " of ", metadata$long_name) + units <- metadata$units + missval <- metadata$missing_value + # variable definitions + var_ncdf <- ncvar_def( + svar, units, list(regiondim, timedim), missval, + longname = long_name, prec = "single", compression = 1 + ) + if (itype > 2) { + # trends + var_ncdf <- ncvar_def( + svar, units, list(regiondim, coeffdim), missval, + longname = long_name, prec = "single", compression = 1 + ) + } + assign(paste0("var", svar), var_ncdf) + assign(paste0("field", svar), rfield) + assign(paste0(svar, "_", "description"), description) + } + } + + varregions <- ncvar_def( + "regions", "degrees", list(regiondim, boundarydim), -999, + "region boundaries", + prec = "single", compression = 1 + ) + regions_description <- "regions over which averages are performed" + fieldregions <- regions[selregions, ] + fieldregion_names <- region_names[selregions] + fieldregion_codes <- region_codes[selregions] + + # Netcdf file creation + print(paste0(diag_base, ": saving output to ", outfile)) + namelist <- c("regions", stseries_list) + varnamelist <- paste0("var", c(namelist)) + nclist <- mget(varnamelist) + ncfile <- nc_create(outfile, nclist) + + # put variables into the ncdf file + for (var in namelist) { + ndims <- get(paste0("var", var))$ndims + tmp.field <- get(paste0("field", var)) + ncvar_put(ncfile, var, tmp.field, + start = rep(1, ndims), + count = rep(-1, ndims) + ) + ncatt_put(ncfile, var, "description", get(paste0(var, "_description"))) + } + + # put additional attributes into dimension and data variables + ncatt_put(ncfile, "regions", "regionnames", paste(fieldregion_names, + collapse = " ")) + ncatt_put(ncfile, "regions", "region_codes", + paste(fieldregion_codes, collapse = " ")) + + nc_close(ncfile) + + # Set provenance for this output file + caption <- paste0("Hyint timeseries and trends for years ", + year1, " to ", year2, + " according to ", models_name[model_idx]) + xprov <- list(ancestors = list(infile), + model_idx = list(model_idx), + caption = caption) + + # Store provenance in main provenance list + prov_info[[outfile]] <- xprov + + print(paste(diag_base, ": timeseries netCDF file saved")) + return(prov_info) +} diff --git a/esmvaltool/diag_scripts/ipcc_ar5/ch09_fig09_42a.py b/esmvaltool/diag_scripts/ipcc_ar5/ch09_fig09_42a.py index 3240dddec2..63784743ab 100644 --- a/esmvaltool/diag_scripts/ipcc_ar5/ch09_fig09_42a.py +++ b/esmvaltool/diag_scripts/ipcc_ar5/ch09_fig09_42a.py @@ -1,207 +1,205 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- - - -"""Plot figure 9.42a of IPCC AR5 chapter 9. - -############################################################################### -ipcc_ar5/ch09_fig09-42a.py -Author: Manuel Schlund (DLR, Germany) -CRESCENDO project -############################################################################### +"""Diagnostic script to plot figure 9.42a of IPCC AR5 chapter 9. Description ----------- - Calculate and plot the equilibrium climate sensitivity (ECS) vs. the global - mean surface temperature (GMSAT) for several CMIP5 models (see IPCC AR5 WG1 - ch. 9, fig. 9.42a). - -Configuration options ---------------------- - ecs_filename : Name of the netcdf in which the ECS data is saved. - output_name : Name of the output files. - save : Keyword arguments for the fig.saveplot() function. - axes_functions : Plot appearance functions. - -############################################################################### +Calculate and plot the effective climate sensitivity (ECS) vs. the global +mean surface temperature (GMSAT) (see IPCC AR5 WG1 ch.9, fig. 9.42a). + +Author +------ +Manuel Schlund (DLR, Germany) + +Project +------- +CRESCENDO + +Configuration options in recipe +------------------------------- +save : dict, optional + Keyword arguments for the `fig.saveplot()` function. +axes_functions : dict, optional + Keyword arguments for the plot appearance functions. +dataset_style : str, optional + Dataset style file (located in + :mod:`esmvaltool.diag_scripts.shared.plot.styles_python`). +matplotlib_style : str, optional + Dataset style file (located in + :mod:`esmvaltool.diag_scripts.shared.plot.styles_python.matplotlib`). """ - import logging import os -from datetime import datetime -import cf_units import iris +from iris import Constraint -import esmvaltool.diag_scripts.shared as e -import esmvaltool.diag_scripts.shared.names as n +from esmvaltool.diag_scripts.shared import ( + ProvenanceLogger, extract_variables, get_diagnostic_filename, + get_plot_filename, group_metadata, io, plot, run_diagnostic, + variables_available) logger = logging.getLogger(os.path.basename(__file__)) -def plot_data(cfg, datasets): +def get_provenance_record(project, ancestor_files): + """Create a provenance record describing the diagnostic data and plot.""" + record = { + 'caption': + ('Effective climate sensitivity (ECS) against the global ' + 'mean surface temperature of {} models, both for the ' + 'period 1961-1990 (larger symbols) and for the ' + 'pre-industrial control runs (smaller symbols).'.format(project)), + 'statistics': ['mean'], + 'domains': ['global'], + 'plot_types': ['scatter'], + 'authors': ['schl_ma'], + 'references': ['flato13ipcc'], + 'realms': ['atmos'], + 'themes': ['phys'], + 'ancestors': + ancestor_files, + } + return record + + +def plot_data(cfg, hist_cubes, pi_cubes, ecs_cube): """Plot data.""" - if not cfg[n.WRITE_PLOTS]: - return - filepath = os.path.join(cfg[n.PLOT_DIR], - cfg.get('output_name', 'fig09-42a') + '.' + - cfg[n.OUTPUT_FILE_TYPE]) + if not cfg['write_plots']: + return None x_data = [] y_data = [] dataset_names = [] plot_kwargs = [] - names = datasets.get_info_list(n.DATASET, short_name='ecs') - ecs_data = datasets.get_data_list(short_name='ecs') - - # Historical - x_data.extend(ecs_data) - y_data.extend(datasets.get_data_list(short_name='tas', exp=HISTORICAL)) - dataset_names.extend(names) - for name in names: - plot_kwargs.append({'label': name, 'linestyle': 'none', - 'markersize': 10}) - - # piControl - x_data.extend(ecs_data) - y_data.extend(datasets.get_data_list(short_name='tas', exp=PICONTROL)) - dataset_names.extend(names) - for name in names: - plot_kwargs.append({'label': '_' + name, 'linestyle': 'none', - 'markersize': 6}) + + # Collect data + for dataset in hist_cubes: + + # Historical data + x_data.append(ecs_cube.extract(Constraint(dataset=dataset)).data) + y_data.append(hist_cubes[dataset].data) + dataset_names.append(dataset) + plot_kwargs.append({ + 'label': dataset, + 'linestyle': 'none', + 'markersize': 10, + }) + + # PiControl data + x_data.append(ecs_cube.extract(Constraint(dataset=dataset)).data) + y_data.append(pi_cubes[dataset].data) + dataset_names.append(dataset) + plot_kwargs.append({ + 'label': '_' + dataset, + 'linestyle': 'none', + 'markersize': 6, + }) # Plot data - e.plot.multi_dataset_scatterplot( + path = get_plot_filename('ch09_fig09_42a', cfg) + plot.multi_dataset_scatterplot( x_data, y_data, dataset_names, - filepath, + path, plot_kwargs=plot_kwargs, save_kwargs=cfg.get('save', {}), - axes_functions=cfg.get('axes_functions', {})) - return + axes_functions=cfg.get('axes_functions', {}), + dataset_style_file=cfg.get('dataset_style'), + mpl_style_file=cfg.get('matplotlib_style'), + ) + return path -def write_data(cfg, datasets, variables): +def write_data(cfg, hist_cubes, pi_cubes, ecs_cube): """Write netcdf file.""" - if cfg[n.WRITE_PLOTS]: - data_ecs = datasets.get_data_list(short_name='ecs') - data_tas_hist = datasets.get_data_list(short_name='tas', - exp=HISTORICAL) - data_tas_picontrol = datasets.get_data_list(short_name='tas', - exp=PICONTROL) - models = datasets.get_info_list(n.DATASET, short_name='ecs') - dataset_coord = iris.coords.AuxCoord(models, long_name='models') - tas_hist_coord = iris.coords.AuxCoord( - data_tas_hist, - attributes={'experiment': HISTORICAL}, - **variables.iris_dict('tas')) - tas_picontrol_coord = iris.coords.AuxCoord( - data_tas_picontrol, - attributes={'experiment': PICONTROL}, - **variables.iris_dict('tas')) - attr = {'created_by': 'ESMValTool version {}'.format(cfg[n.VERSION]) + - ', diagnostic {}'.format(cfg[n.SCRIPT]), - 'creation_date': datetime.utcnow().isoformat(' ') + 'UTC'} - cube = iris.cube.Cube(data_ecs, long_name=variables.long_name('ecs'), - var_name='ecs', units=variables.units('ecs'), - aux_coords_and_dims=[(dataset_coord, 0), - (tas_hist_coord, 0), - (tas_picontrol_coord, 0)], - attributes=attr) - - # Save file - filepath = os.path.join(cfg[n.WORK_DIR], - cfg.get('output_name', 'fig09_42a') + '.nc') - iris.save(cube, filepath) - logger.info("Writing %s", filepath) - - -############################################################################### -# Setup diagnostic -############################################################################### - -# Experiments -PICONTROL = 'piControl' -HISTORICAL = 'historical' -ABRUPT4XCO2 = 'abrupt4xCO2' -DIFF = 'difference of abrupt4xCO2 and piControl' - -# Default settings -DEFAULT_TAS_UNITS = 'celsius' + datasets = list(hist_cubes) + + # Collect data + data_ecs = [] + data_hist = [] + data_pi = [] + for dataset in datasets: + data_ecs.append(ecs_cube.extract(Constraint(dataset=dataset)).data) + data_hist.append(hist_cubes[dataset].data) + data_pi.append(pi_cubes[dataset].data) + + # Create cube + dataset_coord = iris.coords.AuxCoord(datasets, long_name='dataset') + tas_hist_coord = iris.coords.AuxCoord( + data_hist, + attributes={'exp': 'historical'}, + **extract_variables(cfg, as_iris=True)['tas']) + tas_picontrol_coord = iris.coords.AuxCoord( + data_pi, + attributes={'exp': 'piControl'}, + **extract_variables(cfg, as_iris=True)['tas']) + cube = iris.cube.Cube( + data_ecs, + var_name='ecs', + long_name='Effective Climate Sensitivity (ECS)', + aux_coords_and_dims=[(dataset_coord, 0), (tas_hist_coord, 0), + (tas_picontrol_coord, 0)]) + + # Save file + path = get_diagnostic_filename('ch09_fig09_42a', cfg) + io.iris_save(cube, path) + return path def main(cfg): - """Run the diagnostic. - - Parameters - ---------- - cfg : dict - Configuration dictionary of the recipe. - - """ - ########################################################################### - # Read recipe data - ########################################################################### - - # Dataset data containers - data = e.Datasets(cfg) - logging.debug("Found datasets in recipe:\n%s", data) - - # Variables - var = e.Variables(cfg) - var.modify_var('tas', units=cfg.get('tas_units', DEFAULT_TAS_UNITS)) - logging.debug("Found variables in recipe:\n%s", var) - - # Get ECS data (ignore metadata.yml files) - input_dirs = [d for d in cfg[n.INPUT_FILES] - if not d.endswith(n.METADATA_YAML_FILE)] - if len(input_dirs) != 1: - logging.error("Input files directory from ancestors should contain " - "exactly one directory (ECS directory)") - ecs_filepath = os.path.join(input_dirs[0], - cfg.get('ecs_filename', 'ecs') + '.nc') - - ########################################################################### - # Read data - ########################################################################### - - # Create iris cube for each dataset - for dataset_path in data: - cube = iris.load(dataset_path, var.standard_names())[0] - - # Convert units if desired - cube.convert_units(cfg.get('tas_units', DEFAULT_TAS_UNITS)) - - # Total temporal means - cube = cube.collapsed([n.TIME], iris.analysis.MEAN) - data.set_data(cube.data, dataset_path) - - # Create iris cube for ECS data - cube = iris.load_cube(ecs_filepath) - var.add_vars(ecs={n.SHORT_NAME: cube.var_name, - n.LONG_NAME: cube.long_name, - n.UNITS: cube.units.format(cf_units.UT_DEFINITION)}) - for (idx, model) in enumerate(cube.coord('datasets').points): - data.add_dataset('ecs_' + model, - data=cube.data[idx], - dataset=model, - short_name='ecs') - - ########################################################################### - # Plot data - ########################################################################### + """Run the diagnostic.""" + input_data = cfg['input_data'].values() + project = list(group_metadata(input_data, 'project').keys()) + project = [p for p in project if 'obs' not in p.lower()] + if len(project) == 1: + project = project[0] + + # Check if tas is available + if not variables_available(cfg, ['tas']): + raise ValueError("This diagnostic needs 'tas' variable") + + # Get ECS data + ecs_filepath = io.get_ancestor_file(cfg, 'ecs.nc') + ecs_cube = iris.load_cube(ecs_filepath) + + # Create iris cubes for each dataset + hist_cubes = {} + pi_cubes = {} + for data in input_data: + name = data['dataset'] + logger.info("Processing %s", name) + cube = iris.load_cube(data['filename']) + + # Preprocess cubes + cube.convert_units(cfg.get('tas_units', 'celsius')) + cube = cube.collapsed(['time'], iris.analysis.MEAN) + + # Save cubes + if data.get('exp') == 'historical': + hist_cubes[name] = cube + elif data.get('exp') == 'piControl': + pi_cubes[name] = cube + else: + pass - plot_data(cfg, data) + # Plot data + plot_path = plot_data(cfg, hist_cubes, pi_cubes, ecs_cube) - ########################################################################### - # Write nc file - ########################################################################### + # Write netcdf file + netcdf_path = write_data(cfg, hist_cubes, pi_cubes, ecs_cube) - write_data(cfg, data, var) + # Provenance + ancestor_files = [d['filename'] for d in input_data] + provenance_record = get_provenance_record(project, ancestor_files) + if plot_path is not None: + provenance_record['plot_file'] = plot_path + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(netcdf_path, provenance_record) if __name__ == '__main__': - with e.run_diagnostic() as config: + with run_diagnostic() as config: main(config) diff --git a/esmvaltool/diag_scripts/ipcc_ar5/tsline.ncl b/esmvaltool/diag_scripts/ipcc_ar5/tsline.ncl new file mode 100644 index 0000000000..de81fd4692 --- /dev/null +++ b/esmvaltool/diag_scripts/ipcc_ar5/tsline.ncl @@ -0,0 +1,455 @@ +; ############################################################################# +; PLOTS TIME SERIES +; Author: Irene Cionni (ENEA, Italy), Veronika Eyring (DLR, Germany) +; ESMVal project +; ############################################################################# +; +; Description +; This script is based on the original CCMVal script tsline.ncl and has +; been further developed as part of the ESMValTool. +; +; Required diag_script_info attributes (diagnostic specific) +; styleset: as in diag_scripts/shared/plot/style.ncl functions +; +; Optional diag_script_info attributes (diagnostic specific) +; time_avg: type of time average (currently only "yearly" and "monthly" are +; available). +; ts_anomaly: calculates anomalies with respect to the first 10-year +; average +; ref_start: start year of reference period for anomalies +; ref_end: end year of reference period for anomalies +; plot_units: variable unit for plotting +; +; Caveats +; +; Modification history +; 20181112-A_bock_ls: code rewritten for ESMValTool v2.0 +; 20170623-A_laue_ax: added tags for reporting +; 20160905-A-Bock_li: implemented multi-model mean and variable refence +; period for anomaly with refence mean value. +; 20151027-A_laue_ax: moved call to 'write_references' to the beginning +; of the code +; 20150622-A_wenz_sa: added optional anomaly calculation, choise of +; area opperations (sum, average) and detrending of +; time series. +; 20150420-A_righ_ma: adapted to new structure. Some of the original +; features not ported yet (seasonal average, smoothing, +; multi-model mean, etc.). +; 20??????-A_cion_ir: written. +; +; ############################################################################# + +load "$diag_scripts/../interface_scripts/interface.ncl" + +load "$diag_scripts/shared/latlon.ncl" +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/scaling.ncl" +load "$diag_scripts/shared/ensemble.ncl" + +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/xy_line.ncl" + + +begin + + enter_msg(DIAG_SCRIPT, "") + + var0 = variable_info[0]@short_name + field_type0 = variable_info[0]@field + info_items = select_metadata_by_name(input_file_info, var0) + datasetnames = metadata_att_as_array(info_items, "dataset") + dim_MOD = ListCount(info_items) + dim_VAR = ListCount(variable_info) + + ; Create output plot directory + plot_dir = config_user_info@plot_dir + system("mkdir -p " + plot_dir) + + ; Check field type + plot_fields = (/"T2Ms"/) + ind_f = any(field_type0 .eq. plot_fields) + if (all(ismissing(ind_f))) then + error_msg("f", DIAG_SCRIPT, "", "can't use field " + field_type0) + end if + delete(plot_fields) + delete(ind_f) + + ; Plot file type + file_type = config_user_info@output_file_type + if (ismissing(file_type)) then + file_type = "ps" + end if + + ; Time averaging + if (isatt(diag_script_info, "time_avg")) then + time_avg = diag_script_info@time_avg + if (all(time_avg.ne.(/"yearly", "monthly"/))) then + error_msg("f", DIAG_SCRIPT, "", \ + "time averaging option " + time_avg + " not yet " + \ + "implemented") + end if + else + time_avg = "monthly" + end if + + ; Check for required settings + exit_if_missing_atts(diag_script_info, "styleset") + + ; Set default values for non-required diag_script_info attributes + set_default_att(diag_script_info, "time_avg", "monthly") + set_default_att(diag_script_info, "ts_anomaly", "noanom") + + ; Determine time range + start_year = min(metadata_att_as_array(info_items, "start_year")) + end_year = max(metadata_att_as_array(info_items, "end_year")) + all_years = ispan(start_year, end_year, 1) + + ; Create time coordinate + if (time_avg.eq."monthly") then + ntime = 12 * (end_year - start_year + 1) + time = new(ntime, integer) + do yy = start_year, end_year + do mm = 1, 12 + time(12 * (yy - start_year) + mm - 1) = 100 * yy + mm + end do + end do + else if (time_avg.eq."yearly") then + ntime = end_year - start_year + 1 + time = new(ntime, integer) + time = ispan(start_year, end_year, 1) + end if + end if + + ; Set index of the reference (and alternative) dataset + if (isatt(variable_info[0], "reference_dataset")) then + ref_ind = ind(datasetnames.eq.variable_info[0]@reference_dataset) + A_ref = read_data(info_items[ref_ind]) + end if + + ; individual case for HadCRUT4 observations + ; FIX-ME: mean value comes from climatology file (absolute.nc). + ; There are no missing values as in the anomaly data. + clim_ind = -1 + if (any(datasetnames.eq."HadCRUT4-clim")) then + clim_ind = ind(datasetnames.eq."HadCRUT4-clim") + ind_wo_clim = ind(datasetnames.ne."HadCRUT4-clim") + end if + + ; Anomaly + if (isatt(diag_script_info, "ts_anomaly")) then + anom = diag_script_info@ts_anomaly + if (anom .eq. "anom") then + if (isatt(diag_script_info, "ref_start") .and. \ + isatt(diag_script_info, "ref_end")) then + ref_start = diag_script_info@ref_start + ref_end = diag_script_info@ref_end + if ((ref_start.lt.start_year) .or. (ref_end.gt.end_year) .or. \ + (ref_end.lt.ref_start)) then + error_msg("f", DIAG_SCRIPT, "", \ + "period for reference years is not properly defined") + end if + else + error_msg("f", DIAG_SCRIPT, "", \ + "period for reference years is not defined " + \ + "(needed for anomaly)") + end if + + anom_ref = new((/dim_MOD/), double) + anom_ref!0 = "model" + anom_ref&model = datasetnames + end if + else + anom = "noanom" + end if + + ; TODO + model_arr_stddev = 0 + + ; Create model array + model_arr = new((/dim_MOD, ntime/), double) + model_arr!0 = "model" + model_arr!1 = "time" + model_arr&model = datasetnames + model_arr&time = time + delete(time) + delete(ntime) + + ; Loop over models + do imod = 0, dim_MOD - 1 + + if(imod.ne.clim_ind) then + + ; Read data + A0 = read_data(info_items[imod]) + dnames = getVarDimNames(A0) + + ; Convert units for plotting (if required) + if (isatt(diag_script_info, "plot_units")) then + A0 = convert_units(A0, diag_script_info@plot_units) + end if + + ; time, lat, lon + if (field_type0.eq."T2Ms") then + + if (isatt(variable_info[0], "reference_dataset")) then + + if(imod .ne. ref_ind) then + + ; Determine start/end year + start_year = info_items[imod]@start_year + end_year = info_items[imod]@end_year + A_ref_mask = time_operations(A_ref, start_year, \ + end_year, "extract", "", 0) + + A0_mask = A0 + A0_mask = where(A_ref_mask.eq.A_ref_mask@_FillValue, \ + A_ref_mask@_FillValue, A0) + + delete(A_ref_mask) + end if + + end if + + ; Anomaly + if(anom .eq. "anom") then + A0_timavg_ref = time_operations(A0, ref_start, ref_end, \ + "average", "annualclim", True) + if(imod .ne. ref_ind) then + do i = 0, dimsizes(A0&time) - 1 + A0_mask(i, :, :) = A0_mask(i, :, :) - A0_timavg_ref(:, :) + end do + else + do i = 0, dimsizes(A0&time) - 1 + A0(i, :, :) = A0(i, :, :) - A0_timavg_ref(:, :) + end do + end if + + anom_ref(imod) = area_operations(A0_timavg_ref, -90., 90., \ + 0., 360., "average", True) + delete(A0_timavg_ref) + + end if + + if(imod .ne. ref_ind) then + procmod = area_operations(A0_mask, -90., 90., 0., \ + 360., "average", True) + else + procmod = area_operations(A0, -90., 90., 0., 360., \ + "average", True) + end if + + end if + + ; time + if (field_type0.eq."T0M") then + + ; Anomaly + if(anom .eq. "anom") then + A0_timavg_ref = time_operations(A0, ref_start, ref_end, \ + "average", "annualclim", \ + True) + do i = 0, dimsizes(A0&time) - 1 + A0(i) = A0(i) - A0_timavg_ref + end do + end if + + procmod = A0 + + end if + + if (.not.isdefined("procmod")) then + error_msg("f", diag_script_info, "", "cannot process this " + \ + "field type " + field_type0) + end if + delete(A0) + + ; Detrend + if (isatt(diag_script_info, "ts_detrend")) then + detr = diag_script_info@ts_detrend + tmp = runave_Wrap(procmod, 2, 0) + delete(procmod) + procmod = tmp + delete(tmp) + else + detr = "nodetr" + end if + + ; Calculate time average + if (time_avg.ne."monthly") then + A0_timavg = time_operations(procmod, -1, -1, "average", \ + time_avg, True) + else + A0_timavg = procmod + end if + delete(procmod) + procmod = A0_timavg + delete(A0_timavg) + + ; Match time coordinate + if (time_avg.eq."monthly") then + date = cd_calendar(procmod&time, -1) + else if (time_avg.eq."yearly") then + date = procmod&year + end if + end if + idx1 = ind(date(0).eq.model_arr&time) + idx2 = ind(date(dimsizes(date) - 1).eq.model_arr&time) + model_arr(imod, idx1:idx2) = (/procmod/) + if (imod.eq.0) then + copy_VarAtts(procmod, model_arr) + end if + delete(procmod) + delete(date) + + end if + + end do + + ; individual case for HadCRUT4 observations + ; FIX-ME: mean value comes from climatology file (absolute.nc). + ; There are no missing values as in the anomaly data. + if (any(datasetnames.eq."HadCRUT4-clim")) then + ; Read data + A0 = read_data(info_items[clim_ind]) + dnames = getVarDimNames(A0) + + ; Convert units for plotting (if required) + if (isatt(diag_script_info, "plot_units")) then + A0 = convert_units(A0, diag_script_info@plot_units) + end if + + A0_timavg_clim = time_operations(A0, 1990, 1990, "average", \ + "annualclim", True) + clim_ref = area_operations(A0_timavg_clim, -90., 90., 0., 360., \ + "average", True) + anom_ref(ref_ind) = clim_ref + + tmp = model_arr(ind_wo_clim, :) + delete(model_arr) + model_arr = tmp + delete(tmp) + tmp = anom_ref(ind_wo_clim) + delete(anom_ref) + anom_ref = tmp + delete(tmp) + end if + + ; Convert time coordinate to years (required by the plot routine) + if (time_avg.eq."monthly") then + year = model_arr&time / 100 + xmin = min(year) + xmax = max(year) + month = model_arr&time - 100 * year + time = todouble(year + month/12.) + delete(model_arr&time) + model_arr&time = time + delete(time) + delete(year) + delete(month) + else if (time_avg.eq."yearly") + xmin = min(model_arr&time) + xmax = max(model_arr&time) + tmp = todouble(model_arr&time) + delete(model_arr&time) + model_arr&time = tmp + delete(tmp) + end if + end if + + ; Optional output to NetCDF + if (config_user_info@write_netcdf.eq."True") then + out_path = config_user_info@work_dir + system("mkdir -p " + out_path) + out_path = out_path + "tsline_" + var0 + "_" + anom + "_" + detr + "_" \ + + start_year + "-" + end_year + ".nc" + model_arr@ncdf = out_path + model_arr@diag_script = DIAG_SCRIPT + model_arr@var = var0 + ncdf_outfile = ncdf_write(model_arr, out_path) + end if + + ; Define workstation + outfile = config_user_info@plot_dir + var0 + "_" + anom + "_" + \ + detr + "_" + start_year + "-" + end_year + wks = gsn_open_wks(file_type, outfile) + + ; Set resources + res = True + res@trXMinF = xmin + res@trXMaxF = xmax + ; res@trXMaxF = xmax + 0.25 * (xmax - xmin) + res@tmXBMode = "Explicit" + if (xmax - xmin.gt.20) then + res@tmXBValues = ispan(xmin, xmax, 10) + res@tmXBLabels = ispan(xmin, xmax, 10) + res@tmXBMinorValues = ispan(xmin, xmax, 5) + else + res@tmXBValues = ispan(xmin, xmax, 5) + res@tmXBLabels = ispan(xmin, xmax, 5) + res@tmXBMinorValues = ispan(xmin, xmax, 1) + end if + res@tmXBLabelAngleF = 45 + res@tmXBLabelJust = "CenterRight" + + if isatt(model_arr, "long_name") + res@tiMainString = model_arr@long_name + end if + + if (isatt(diag_script_info, "ts_anomaly")) then + res@tiYAxisString = var0 + " Anomaly" + " [" + model_arr@units + "]" + else + res@tiYAxisString = var0 + " [" + model_arr@units + "]" + end if + + res0 = True + ref_start = diag_script_info@ref_start + ref_end = diag_script_info@ref_end + res0@tiYAxisString = tostring(ref_start) + "-" + tostring(ref_end) \ + + " " + var0 + " Mean" + " [" + model_arr@units + "]" + + if(anom .eq. "anom") then + xy_line_anom(wks, anom_ref, model_arr, model_arr&time, \ + model_arr_stddev, ref_start, ref_end, res, res0, info_items) + else + xy_line(wks, model_arr, model_arr&time, model_arr_stddev, res, info_items) + end if + + log_info(" wrote " + outfile + "." + file_type) + +; ; add meta data to plot (for reporting) +; +; climofiles = new(dim_MOD, string) +; climofiles = input_file_info@filename +; +; domain = "DM_global" +; +; if (anom .eq. "anom") then +; stat = "ST_anomaly" +; captionadd = "anomalies" +; else +; stat = "ST_mean" +; captionadd = "means" +; end if +; +; alltags = array_append_record(tags, (/"PT_time", stat, domain/), 0) +; +; if (diag_script_info@multi_model_mean.eq."y") then +; allmodelnames = array_append_record(dataset_info@dataset, \ +; (/"multi-model-mean"/), 0) +; else +; allmodelnames = dataset_info@dataset +; end if +; +; caption = "Time series of the " + captionadd + " for variable " \ +; + variables(0) + ", similar to IPCC AR5, fig. 9.8." +; +; id = DIAG_SCRIPT + "_" + variables(0) +; +; contrib_authors = (/"A_cion_ir", "A_righ_ma", "A_wenz_sa", "A_bock_ls"/) +; +; ; ESMValMD(wks@fullname, alltags, caption, id, variables(0), \ +; ; allmodelnames, climofiles, DIAG_SCRIPT, contrib_authors) + + leave_msg(DIAG_SCRIPT, "") + +end diff --git a/esmvaltool/diag_scripts/landcover/landcover.py b/esmvaltool/diag_scripts/landcover/landcover.py new file mode 100644 index 0000000000..8b6424e227 --- /dev/null +++ b/esmvaltool/diag_scripts/landcover/landcover.py @@ -0,0 +1,534 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +"""Landcover analysis plots. + +############################################################### +landcover/landcover.py +Authors ESMValToolV1 Version + Stefan Hagemann (stefan.hagemann@hzg.de) + Alexander Loew + Benjamin Mueller (b.mueller@iggf.geo.uni-muenchen.de) +Port to ESMValTool Version 2 + Tobias Stacke (tobias.stacke@mpimet.mpg.de) +############################################################### + +Description +----------- + Computes accumulated and fractional extent for major land + cover types (bare soil, crops, grasses, shrubs and trees) + for the whole globe as well as separated into regions + (tropics, northern extratropics and southern extratropics). + The fractions are compared to ESA-CCI land cover data. + + ESA-CCI land cover data needs to be downloaded separately + by the user and converted to netCDF files containing the + grid cell fractions for the respective cover type. + The data and a conversion tool are available at + https://maps.elie.ucl.ac.be/CCI/viewer/ upon registration. + Detailed instructions for the installation and use of the + CCI-LC user tools is available on the same page. + + Note, that all experiments will be regridded onto the + grid of the ESA-CCI data, thus it is recommended to + download the coarses resolution which is sufficient for + the planned study. For testing, ESA-CCI data on 0.5 degree + resolution was used. +""" + +import logging +import os +import numpy as np + +import iris +import matplotlib.pyplot as plt +from matplotlib.backends.backend_pdf import PdfPages +import esmvaltool.diag_scripts.shared as diag +from esmvaltool.diag_scripts.shared import ProvenanceLogger + +logger = logging.getLogger(os.path.basename(__file__)) + + +def write_plotdata(infos, modnam, values): + """Write region values for all datasets of one variable. + + Parameters + ---------- + infos : list + contains infos about configuration, regions and provenance + modnam : dict + containing list of dataset names for specific metrics + values : dict + dictionary of nested list containing the keys + area --> region sums in 1.0e+6 km2 + frac --> region average fractions in % + """ + cfg, regnam, prov_rec, var = infos + # Header information for different metrics + filehead = { + 'area': + 'Accumulated land coverage for ' + var + + ' in different regions [1.0e+6 km2]', + 'frac': + 'Average land cover fraction for ' + var + ' in different regions [%]', + 'bias': + 'Bias in average land cover fraction for ' + var + + ' compared to reference [%]' + } + # Write experiment data + for metric in values.keys(): + filepath = os.path.join(cfg[diag.names.WORK_DIR], + '_'.join([metric, var]) + '.txt') + ncol = len(regnam) + with open(filepath, 'w') as fout: + header = '{:35} ' + ncol * ' {:>12}' + '\n' + body = '{:35} ' + ncol * ' {:12.4f}' + '\n' + line = [ + ' ', + ] + regnam + fout.write(filehead[metric] + '\n\n') + fout.write(header.format(*line)) + for irow, row in enumerate(values[metric]): + line = [modnam[metric][irow]] + row + fout.write(body.format(*line)) + + # provenance tracking, only if comparison == variable + if prov_rec is not None: + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(filepath, prov_rec[var]) + + +def init_plot(cfg, var): + """Prepare plot and set defaults. + + Parameters + ---------- + cfg : dict + Configuration dictionary of the recipe + var : str + variable short name + """ + if cfg.get('output_file_type', 'png') == 'pdf': + filepath = os.path.join(cfg[diag.names.PLOT_DIR], + '_'.join(['metrics', var]) + ".pdf") + pdf = PdfPages(filepath) + else: + pdf = None + + nicename = { + 'baresoilFrac': 'bare soil covered', + 'treeFrac': 'tree covered', + 'grassFrac': 'grass covered', + 'cropFrac': 'crop covered', + 'shrubFrac': 'shrub covered' + } + + info = { + # Plot titles + 'pt': { + 'area': ' '.join(['Accumulated', + nicename.get(var, var), 'area']), + 'frac': ' '.join(['Average', + nicename.get(var, var), 'fraction']), + 'bias': + ' '.join(['Average', + nicename.get(var, var), 'fraction bias']) + }, + # Labels for y axis + 'yl': { + 'area': r'Area [$10^6$ km$^2$]', + 'frac': r'Fraction [%]', + 'bias': r'Bias [%]' + }, + # Plot directory + 'pd': cfg[diag.names.PLOT_DIR] + } + + return pdf, info + + +def plot_bars(info, metric, data, regnam): + """Add legend and save plot to either png or pdf. + + Parameters + ---------- + info : dict + compilation of plot properties + metric : str + plot type [area, fraction or bias] + data : list + list of floats for plotting + regnam : list + list containing the region names + """ + fig, axs = plt.subplots(nrows=1, ncols=1, sharex=False) + axs.set_title(info['pt'][metric]) + axs.set_ylabel(info['yl'][metric]) + nbar, ncat = np.array(data).shape + index = np.arange(0, (nbar + 1) * ncat, nbar + 1) + xticks = np.linspace((nbar + 1) / 2.0, + (nbar + 1) * ncat - (nbar + 1) / 2.0, ncat) - 1.0 + axs.set_xticklabels(regnam) + axs.set_xticks(xticks) + for irow, row in enumerate(data): + axs.bar(index + irow, row) + + return fig + + +def finish_plot(fig, labels, pltdir, name, pdf): + """Add legend and save plot to either png or pdf. + + Parameters + ---------- + fig : obj + actual figure + labels : list + list of plot labels + pltdir : str + target directory to store plots + name : str + filename for png output without extension + pdf : obj + pdf object collection all pages in case of pdf output + """ + fig.subplots_adjust(bottom=0.20) + caxe = fig.add_axes([0.05, 0.01, 0.9, 0.20]) + for lbl in labels: + caxe.plot([], [], lw=4, label=lbl) + caxe.legend(ncol=2, loc="lower center", fontsize='small') + caxe.set_axis_off() + + if pdf is None: + filepath = os.path.join(pltdir, name + ".png") + fig.savefig(filepath) + else: + fig.savefig(pdf, dpi=80, format='pdf') + plt.close() + + +def make_landcover_bars(cfg, regnam, modnam, values, var): + """Make bar plots for regional values. + + Parameters + ---------- + cfg : dict + Configuration dictionary of the recipe + regnam : list + list containing the region names + modnam : dict + containing list of dataset names for specific metrics + values : dict + dictionary of nested list containing the keys + area --> region sums in 1.0e+6 km2 + frac --> region average fractions in % + var : str + variable short name + """ + # Get colorscheme from recipe + plt.style.use(cfg.get('colorscheme', 'seaborn')) + + # Set up plot + pdf, info = init_plot(cfg, var) + + # Loop over metrices + for metr in values.keys(): + # Plot plot with bars + fig = plot_bars(info, metr, values[metr], regnam) + # Add legend and finish plot + finish_plot(fig, modnam[metr], info['pd'], '_'.join([metr, var]), pdf) + + if pdf is not None: + pdf.close() + + +def sel_lats(latlist, bounds): + """Return subset of latitudes within bounds. + + Parameters + ---------- + latlist : numpy array + contains all latitudes for the cube + bounds : list + bounds for latitude selection + """ + subset = [] + for lat in latlist.tolist(): + if min(bounds) < lat < max(bounds): + subset.append(lat) + + return subset + + +def get_timmeans(attr, cubes, refset, prov_rec): + """Return time averaged data cubes. + + Parameters + ---------- + attr : dict + contains metadata for dataset. + cubes : dict + collection of iris data cubes. + refset : dict + reference dataset names for all variables. + prov_rec : dict + contains information for provenance tracking. + """ + # Get dataset information + var = attr['short_name'] + # Store name of reference data for given variable + if var not in refset.keys(): + refset[var] = attr.get('reference_dataset', None) + # Load data into iris cube + new_cube = iris.load_cube(attr['filename']) + # Check for expected unit + if new_cube.units != '%': + raise ValueError('Unit % is expected for ' + + new_cube.long_name.lower() + ' area fraction') + # Compute long term mean + mean_cube = new_cube.collapsed([diag.names.TIME], iris.analysis.MEAN) + # Rename variable in cube + mean_cube.var_name = "_".join([ + attr.get('cmor_table', ''), + attr.get('dataset', ''), + attr.get('exp', ''), + attr.get('ensemble', '') + ]).replace('__', '_').strip("_") + mean_cube.long_name = " ".join([var, 'for dataset', attr['dataset']]) + # Append to cubelist for temporary output + if attr['dataset'] == refset[var]: + cubes['ref'][var].append(mean_cube) + else: + cubes['exp'][var].append(mean_cube) + # Add information to provenance record + if prov_rec[var] == {}: + caption = ("Mean land cover fraction for {long_name} between " + "{start_year} and {end_year} for different datasets".format( + **attr)) + prov_rec[var] = { + 'caption': caption, + 'statistics': ['mean'], + 'domains': ['global'], + 'plot_type': 'regional averages', + 'authors': ['hage_st', 'loew_al', 'muel_bn', 'stac_to'], + 'references': [ + 'acknow_project', + ], + 'ancestors': [attr['filename']] + } + else: + prov_rec[var]['ancestors'].append(attr['filename']) + + +def write_data(cfg, cubes, var, prov_rec): + """Write intermediate datafield for one variable. + + Parameters + ---------- + cfg : dict + Configuration dictionary of the recipe. + cubes : dict + collection of iris data cubes. + var : str + variable short name + prov_rec : dict + contains information for provenance tracking. + """ + # Compile output path + filepath = os.path.join(cfg[diag.names.WORK_DIR], + '_'.join(['postproc', var]) + '.nc') + + # Join cubes in one list with ref being the last entry + outcubes = cubes['exp'][var] + cubes['ref'][var] + if cfg[diag.names.WRITE_NETCDF]: + iris.save(outcubes, filepath) + logger.info("Writing %s", filepath) + + # provenance tracking + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(filepath, prov_rec[var]) + + +def compute_landcover(var, lcdata, cubes): + """Return aggregated and averaged land cover values. + + Parameters + ---------- + var : str + variable short name + lcdata : dict + collection of land cover values per region + cubes : dict + collection of time averaged iris data cubes. + """ + # Define regions + regdef = { + 'Global': None, + 'Tropics': [-30, 30], + 'North. Hem.': [30, 90], + 'South. Hem.': [-90, -30] + } + + values = {'area': [], 'frac': [], 'bias': []} + modnam = {'area': [], 'frac': [], 'bias': []} + # Compute metrices for all datasets of a given variable + for sub_cube in cubes: + modnam['area'].append(sub_cube.var_name) + modnam['frac'].append(sub_cube.var_name) + cellarea = sub_cube.copy() + cellarea.name = 'cellarea' + cellarea.data = iris.analysis.cartography.area_weights(cubes[0]) + row = {'area': [], 'frac': []} + # Compute land cover area in million km2: + # area = Percentage * 0.01 * area [m2] + # / 1.0e+6 [km2] + # / 1.0e+6 [1.0e+6 km2] + coverarea = sub_cube.copy() + coverarea.data *= (0.01 * cellarea.data / 1.0E+6 / 1.0e+6) + # Sum over area for different regions + for reg in regdef: + if regdef[reg] is not None: + zone = iris.Constraint( + latitude=sel_lats( + sub_cube.coord('latitude').points, regdef[reg])) + row['area'].append( + coverarea.extract(zone).collapsed( + ['longitude', 'latitude'], + iris.analysis.SUM).data.tolist()) + row['frac'].append( + sub_cube.extract(zone).collapsed( + ['longitude', 'latitude'], + iris.analysis.MEAN, + weights=cellarea.extract(zone).data).data.tolist()) + + else: + row['area'].append( + coverarea.collapsed(['longitude', 'latitude'], + iris.analysis.SUM).data.tolist()) + row['frac'].append( + sub_cube.collapsed(['longitude', 'latitude'], + iris.analysis.MEAN, + weights=cellarea.data).data.tolist()) + values['area'].append(row['area']) + values['frac'].append(row['frac']) + # Compute relative bias in average fractions compared to reference + reffrac = np.array(values['frac'][-1]) + for imod, modfrac in enumerate(values['frac'][:-1]): + values['bias'].append( + ((np.array(modfrac) - reffrac) / reffrac * 100.0).tolist()) + modnam['bias'].append(modnam['frac'][imod]) + + lcdata[var] = {'values': values, 'groups': modnam} + + return list(regdef.keys()) + + +def focus2model(cfg, lcdata, refset): + """Resort lcdata for model focus. + + Parameters + ---------- + cfg : dict + Configuration dictionary of the recipe. + lcdata : dict + collection of land cover values per region + refset : dict + reference dataset names for all variables. + """ + var = diag.Variables(cfg).short_names()[0] + shuffle = {key: {} for key in lcdata[var]['groups']['area']} + for dset in shuffle.keys(): + ids = lcdata[var]['groups']['area'].index(dset) + if refset[var] in dset: + shuffle[dset] = { + 'groups': { + 'area': [], + 'frac': [] + }, + 'values': { + 'area': [], + 'frac': [] + } + } + else: + shuffle[dset] = { + 'groups': { + 'area': [], + 'frac': [], + 'bias': [] + }, + 'values': { + 'area': [], + 'frac': [], + 'bias': [] + } + } + for var in sorted(diag.Variables(cfg).short_names()): + for metric in shuffle[dset]['groups'].keys(): + shuffle[dset]['groups'][metric].append(var) + shuffle[dset]['values'][metric].append( + lcdata[var]['values'][metric][ids]) + lcdata = shuffle + + +def main(cfg): + """Run the diagnostic. + + Parameters + ---------- + cfg : dict + Configuration dictionary of the recipe. + """ + # Print dataset and variable information + logging.debug("Found datasets in recipe:\n%s", diag.Datasets(cfg)) + logging.debug("Found variables in recipe:\n%s", diag.Variables(cfg)) + + # Get metadata information + grouped_input_data = diag.group_metadata( + cfg['input_data'].values(), 'standard_name', sort='dataset') + + # Prepare dictionaries + timcubes = { + 'exp': {key: [] + for key in diag.Variables(cfg).short_names()}, + 'ref': {key: [] + for key in diag.Variables(cfg).short_names()} + } + lcdata = {key: {} for key in diag.Variables(cfg).short_names()} + refset = {} + prov_rec = {key: {} for key in diag.Variables(cfg).short_names()} + + # Read data and compute long term means + for standard_name in grouped_input_data: + for attributes in grouped_input_data[standard_name]: + get_timmeans(attributes, timcubes, refset, prov_rec) + + for var in diag.Variables(cfg).short_names(): + # Write regridded and temporal aggregated netCDF data files + write_data(cfg, timcubes, var, prov_rec) + # Compute aggregated and fraction average land cover + regnam = compute_landcover(var, lcdata, + timcubes['exp'][var] + timcubes['ref'][var]) + + # Reshuffle data if models are the comparison target + if cfg.get('comparison', 'variable') == 'model': + focus2model(cfg, lcdata, refset) + prov_rec = None + elif cfg.get('comparison', 'variable') != 'variable': + raise ValueError('Only variable or model are valid comparison targets') + + # Output ascii files and plots + for target in lcdata.keys(): + # Write plotdata as ascii files for user information + infos = [cfg, regnam, prov_rec, target] + write_plotdata(infos, lcdata[target]['groups'], + lcdata[target]['values']) + + # Plot area values + make_landcover_bars(cfg, regnam, lcdata[target]['groups'], + lcdata[target]['values'], target) + + +if __name__ == '__main__': + + with diag.run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/magic_bsc/PC.r b/esmvaltool/diag_scripts/magic_bsc/PC.r new file mode 100644 index 0000000000..068f896ed3 --- /dev/null +++ b/esmvaltool/diag_scripts/magic_bsc/PC.r @@ -0,0 +1,111 @@ +library(ggplot2) +library(plyr) + +read_pc <- function(file) { + pc <- list() + pc$points <- rbind(c(0, 0), read.delim(file, comment.char = "#")) + pc$fun <- approxfun(pc$points$WindSpeed, pc$points$Power, # nolint + method = "linear", + yleft = NA, yright = 0) + attr <- strsplit(trimws(system(paste( + "perl -e 'open FH,\"", file, + "\";while(){@parts= /^# (.+): (.+) /;print \"@parts \";}'", + sep = ""), + intern = TRUE)), + "\\s+") + attr <- matrix(unlist(attr), ncol = 2, byrow = T) + pc$attr <- as.list(attr[, 2]) + names(pc$attr) <- attr[, 1] + pc$attr$Filename <- file # nolint + pc$attr$RatedPower <- as.numeric(pc$attr$RatedPower) # nolint + return(pc) +} +read_xml_pc <- function(file) { + xml <- xmlTreeParse(file, useInternalNodes = TRUE) # nolint + xml_data <- xmlToList(xml) # nolint + pc <- list() + pcs <- xml_data$wind_turbine_properties$power_curves + for (i in 1 : length(pcs)) { + if (pcs[[i]]$air_density == 1.225) { + pc$points <- ldply(pcs[[i]]$power_curve_table, #nolint + data.frame)[, c(2, 3)] + colnames(pc$points) <- c("WindSpeed", "Power") #nolint + pc$points <- transform( + pc$points, + WindSpeed = as.numeric(as.character(WindSpeed)), # nolint + Power = as.numeric(as.character(Power)) + ) + pc$points <- rbind(c(0, 0), pc$points) + break + } + } + pc$fun <- approxfun( + pc$points$WindSpeed, #nolint + pc$points$Power, # nolint + method = "linear", + yleft = NA, + yright = 0, + ties = "ordered" + ) + pc$attr$Diameter <- xml_data$wind_turbine_properties$rotor_diameter # nolint + pc$attr$CutIn <- NA # nolint + pc$attr$CutOut <- NA # nolint + pc$attr$ReCutIn <- NA # nolint + pc$attr$RatedSpeed <- NA # nolint + pc$attr$RatedPower <- xml_data$wind_turbine_properties$rated_power # nolint + pc$attr$IECClass <- NA # nolint + pc$attr$Control <- NA # nolint + pc$attr$Density <- 1.225 # nolint + pc$attr$Name <- file # nolint + pc$attr$Filename <- file # nolint + pc$attr$RatedPower <- as.numeric(pc$attr$RatedPower) # nolint + return(pc) +} +plot_pc <- function(pc) { + plot <- ggplot(pc$points, aes(x = WindSpeed, y = Power)) + # nolint + geom_point() + + stat_function(fun = pc$fun) + + xlim(0, 35) +return(plot) +} +plot_pc_list <- function(list_pcs) { + list_funs <- lapply(list_pcs, function(x) { + function(y) { + x$fun(y) / x$attr$RatedPower} # nolint + }) + names <- lapply(list_pcs, function(x) x$attr$Name ) # nolint + plot <- ggplot(NULL, aes(x = x, colour = Turbine)) #nolint + for (i in 1 : length(list_pcs)) { + plot <- plot + stat_function(data = data.frame(x = 0 : 30, + Turbine = factor(names[[i]])), fun = list_funs[[i]]) + } + plot <- plot + xlab("Wind speed (m/s)") + ylab("Capacity Factor (%)") + + ggtitle("Selected power curves") + return(plot) +} +get_list_turbines <- function() { + files <- list.files() + turb_list <- list() + for (i in seq(files)) { + file <- files[i] + turb_list[[i]] <- read_xml_pc(file) + } + names(turb_list) <- files + return(turb_list) +} +wind2power <- function(wind, pc) { + power <- pc$fun(wind) +} +wind2CF <- function(wind, pc) { + power <- pc$fun(wind) + CF <- power / pc$attr$RatedPower #nolint +} +WPD <- function(wind, ro) { + return(0.5 * ro * wind ^ 3) +} +bump <- function(x) { + f <- function(y) { + exp(-1 / y ^ 2) + } + return(f(x) / (f(x) + f(1 - x))) +} diff --git a/esmvaltool/diag_scripts/magic_bsc/PowerCurves/Enercon_E70_2.3MW.txt b/esmvaltool/diag_scripts/magic_bsc/PowerCurves/Enercon_E70_2.3MW.txt new file mode 100644 index 0000000000..65dc4b1b13 --- /dev/null +++ b/esmvaltool/diag_scripts/magic_bsc/PowerCurves/Enercon_E70_2.3MW.txt @@ -0,0 +1,38 @@ +#--------------------------- +# TURBINE CHARACTERISTICS +#--------------------------- +# Name: Enercon_E70_2.3MW [string] +# Manufacturer: Enercon [string] +# Diameter: 71 m +# CutIn: 2 m/s +# CutOut: 25 m/s +# ReCutIn: unknown m/s +# RatedSpeed: 16 m/s +# RatedPower: 2310 kW +# IECClass: Ia [Ia/IIa/IIIa/S] +# Control: pitch [pitch/stall/active_stall/flaps] +# HubHeights: 57,64,74,85,98,113 m +# +#--------------------------- +# POWER CURVE +# Density: 1.225 kg/m^3 +#--------------------------- +WindSpeed Power +1.0 0 +2.0 2 +3.0 18 +4.0 56 +5.0 127 +6.0 240 +7.0 400 +8.0 626 +9.0 892 +10.0 1223 +11.0 1590 +12.0 1900 +13.0 2080 +14.0 2230 +15.0 2300 +16.0 2310 +25.0 2310 +25.5 0 diff --git a/esmvaltool/diag_scripts/magic_bsc/PowerCurves/Gamesa_G80_2.0MW.txt b/esmvaltool/diag_scripts/magic_bsc/PowerCurves/Gamesa_G80_2.0MW.txt new file mode 100644 index 0000000000..0ba925e802 --- /dev/null +++ b/esmvaltool/diag_scripts/magic_bsc/PowerCurves/Gamesa_G80_2.0MW.txt @@ -0,0 +1,37 @@ +#--------------------------- +# TURBINE CHARACTERISTICS +#--------------------------- +# Name: Gamesa_G80_2.0MW [string] +# Manufacturer: Gamesa [string] +# Diameter: 80 m +# CutIn: 4 m/s +# CutOut: 25 m/s +# ReCutIn: unknown m/s +# RatedSpeed: 17 m/s +# RatedPower: 2000 kW +# IECClass: Ia/IIa [Ia/IIa/IIIa/S] +# Control: pitch [pitch/stall/active_stall/flaps] +# HubHeights: 60,67,78,100 m +# +#--------------------------- +# POWER CURVE +# Density: 1.225 kg/m^3 +#--------------------------- +WindSpeed Power +3.0 0 +4.0 66 +5.0 152 +6.0 280 +7.0 457 +8.0 690 +9.0 978 +10.0 1296 +11.0 1598 +12.0 1818 +13.0 1935 +14.0 1980 +15.0 1995 +16.0 1999 +17.0 2000 +25.0 2000 +25.5 0 diff --git a/esmvaltool/diag_scripts/magic_bsc/PowerCurves/Gamesa_G87_2.0MW.txt b/esmvaltool/diag_scripts/magic_bsc/PowerCurves/Gamesa_G87_2.0MW.txt new file mode 100644 index 0000000000..3ade6af20f --- /dev/null +++ b/esmvaltool/diag_scripts/magic_bsc/PowerCurves/Gamesa_G87_2.0MW.txt @@ -0,0 +1,36 @@ +#--------------------------- +# TURBINE CHARACTERISTICS +#--------------------------- +# Name: Gamesa_G87_2.0MW [string] +# Manufacturer: Gamesa [string] +# Diameter: 87 m +# CutIn: 4 m/s +# CutOut: 25 m/s +# ReCutIn: unknown m/s +# RatedSpeed: 16 m/s +# RatedPower: 2000 kW +# IECClass: IIa [Ia/IIa/IIIa/S] +# Control: pitch [pitch/stall/active_stall/flaps] +# HubHeights: 67,78,90,100 m +# +#--------------------------- +# POWER CURVE +# Density: 1.225 kg/m^3 +#--------------------------- +WindSpeed Power +3.0 0 +4.0 79 +5.0 181 +6.0 335 +7.0 550 +8.0 832 +9.0 1175 +10.0 1530 +11.0 1816 +12.0 1963 +13.0 1988 +14.0 1996 +15.0 1999 +16.0 2000 +25.0 2000 +25.5 0 diff --git a/esmvaltool/diag_scripts/magic_bsc/PowerCurves/Vestas_V100_2.0MW.txt b/esmvaltool/diag_scripts/magic_bsc/PowerCurves/Vestas_V100_2.0MW.txt new file mode 100644 index 0000000000..462aeaaaa0 --- /dev/null +++ b/esmvaltool/diag_scripts/magic_bsc/PowerCurves/Vestas_V100_2.0MW.txt @@ -0,0 +1,42 @@ +#--------------------------- +# TURBINE CHARACTERISTICS +#--------------------------- +# Name: Vestas_V100_2.0MW [string] +# Manufacturer: Vestas [string] +# Diameter: 100 m +# CutIn: 3 m/s +# CutOut: 20 m/s +# ReCutIn: 18 m/s +# RatedSpeed: 12 m/s +# RatedPower: 2000 kW +# IECClass: IIa/IIIa [Ia/IIa/IIIa/S] +# Control: pitch [pitch/stall/active_stall/flaps] +# +#--------------------------- +# POWER CURVE +# Density: 1.225 kg/m^3 +# noise_mode: 0 +#--------------------------- +WindSpeed Power +2.5 0 +3 13 +3.5 51 +4 107 +4.5 175 +5 253 +5.5 346 +6 454 +6.5 584 +7 738 +7.5 912 +8 1109 +8.5 1321 +9 1538 +9.5 1734 +10 1873 +10.5 1951 +11 1984 +11.5 1995 +12 2000 +20 2000 +20.5 0 diff --git a/esmvaltool/diag_scripts/magic_bsc/PowerCurves/Vestas_V110_2.0MW.txt b/esmvaltool/diag_scripts/magic_bsc/PowerCurves/Vestas_V110_2.0MW.txt new file mode 100644 index 0000000000..6ab358dde3 --- /dev/null +++ b/esmvaltool/diag_scripts/magic_bsc/PowerCurves/Vestas_V110_2.0MW.txt @@ -0,0 +1,41 @@ +#--------------------------- +# TURBINE CHARACTERISTICS +#--------------------------- +# Name: Vestas_V110_2.0MW [string] +# Manufacturer: Vestas [string] +# Diameter: 110 m +# CutIn: 3 m/s +# CutOut: 20 m/s +# ReCutIn: unknown m/s +# RatedSpeed: 11.5 m/s +# RatedPower: 2000 kW +# IECClass: IIIa [Ia/IIa/IIIa/S] +# Control: pitch [pitch/stall/active_stall/flaps] +# HubHeights: 80,95,125 m +# +#--------------------------- +# POWER CURVE +# Density: 1.225 kg/m^3 +#--------------------------- +WindSpeed Power +2.5 0 +3.0 23 +3.5 80 +4.0 140 +4.5 223 +5.0 314 +5.5 422 +6.0 549 +6.5 703 +7.0 900 +7.5 1123 +8.0 1347 +8.5 1555 +9.0 1775 +9.5 1907 +10.0 1972 +10.5 1997 +11.0 1999 +11.5 2000 +20.0 2000 +20.5 0 diff --git a/esmvaltool/diag_scripts/magic_bsc/RegimesAssign.r b/esmvaltool/diag_scripts/magic_bsc/RegimesAssign.r new file mode 100644 index 0000000000..e0c6319a1a --- /dev/null +++ b/esmvaltool/diag_scripts/magic_bsc/RegimesAssign.r @@ -0,0 +1,136 @@ +anom2regime <- function(ref, target, method = "distance", lat) { + posdim <- which(names(dim(ref)) == "nclust") + poslat <- which(names(dim(ref)) == "lat") + poslon <- which(names(dim(ref)) == "lon") + + nclust <- dim(ref)[posdim] + + if (all(dim(ref)[-posdim] != dim(target))) { + stop("The target should have the same dimensions [lat,lon] that + the reference ") + } + if (is.null(names(dim(ref))) | is.null(names(dim(target)))) { + stop( + "The arrays should include dimensions names ref[nclust,lat,lon] + and target [lat,lon]" + ) + } + + if (length(lat) != dim(ref)[poslat]) { + stop("latitudes do not match with the maps") + } + + # This dimensions are reorganized + ref <- aperm(ref, c(posdim, poslat, poslon)) + target <- + aperm(target, c(which(names(dim( + target + )) == "lat"), which(names(dim( + target + )) == "lon"))) + + # weights are defined + latWeights <- InsertDim(sqrt(cos(lat * pi / 180)), 2, dim(ref)[3]) #nolint + + + rmsdiff <- function(x, y) { + dims <- dim(x) + ndims <- length(dims) + if (ndims != 2 | ndims != length(dim(y))) { + stop("x and y should be maps") + } + map_diff <- NA * x + for (i in 1 : dims[1]) { + for (j in 1 : dims[2]) { + map_diff[i, j] <- (x[i, j] - y[i, j]) ^ 2 + } + } + rmsdiff <- sqrt(mean(map_diff, na.rm = TRUE)) + return(rmsdiff) + } + + if (method == "ACC") { + corr <- rep(NA, nclust) + for (i in 1:nclust) { + corr[i] <- + ACC(InsertDim(InsertDim( #nolint + InsertDim(ref[i, , ] * latWeights, 1, 1), 2, 1 #nolint + ), 3, 1), + InsertDim(InsertDim( #nolint + InsertDim(target * latWeights, 1, 1), 2, 1 #nolint + ), 3, 1))$ACC[2] + } + assign <- which(corr == max(corr)) + } + + if (method == "distance") { + rms <- rep(NA, nclust) + for (i in 1 : nclust) { + rms[i] <- rmsdiff(ref[i, , ] * latWeights, target * latWeights)#nolint + } + assign <- which(rms == min(rms, na.rm = TRUE)) + } + return(assign) +} + +RegimesAssign <- function(var_ano, ref_maps, lats, #nolint + method = "distance") { + posdim <- which(names(dim(ref_maps)) == "nclust") + poslat <- which(names(dim(ref_maps)) == "lat") + poslon <- which(names(dim(ref_maps)) == "lon") + poslat_ano <- which(names(dim(var_ano)) == "lat") + poslon_ano <- which(names(dim(var_ano)) == "lon") + + nclust <- dim(ref_maps)[posdim] + nlat <- dim(ref_maps)[poslat] + nlon <- dim(ref_maps)[poslon] + + + if (is.null(names(dim(ref_maps))) | is.null(names(dim(var_ano)))) { + stop( + "The arrays should include dimensions names ref[nclust,lat,lon] + and target [lat,lon]" + ) + } + + if (length(lats) != dim(ref_maps)[poslat]) { + stop("latitudes do not match with the maps") + } +print(str(var_ano)) + assign <- + Apply( + data = list(target = var_ano), + margins = c( (1 : length(dim(var_ano)) )[-c(poslat_ano, poslon_ano)]), + fun = "anom2regime", + ref = ref_maps, + lat = lats, + method = method + ) + + if (poslat_ano < poslon_ano) { + dim_order <- c(nlat, nlon) + } else { + dim_order <- c(nlon, nlat) + } + + anom_array <- + array(var_ano, dim = c(prod(dim(var_ano)[-c(poslat_ano, poslon_ano)]), + dim_order)) + + rm(var_ano) + + index <- as.vector(assign$output1) + recon <- Composite(var = aperm(anom_array, c(3, 2, 1)), occ = index) + freqs <- rep(NA, nclust) + for (n in 1 : nclust) { + freqs[n] <- (length(which(index == n)) / length(index)) * 100 + } + output <- + list( + composite = recon$composite, + pvalue = recon$pvalue, + cluster = assign$output1, + frequency = freqs + ) + return(output) +} diff --git a/esmvaltool/diag_scripts/magic_bsc/WeatherRegime.r b/esmvaltool/diag_scripts/magic_bsc/WeatherRegime.r new file mode 100644 index 0000000000..aab9823911 --- /dev/null +++ b/esmvaltool/diag_scripts/magic_bsc/WeatherRegime.r @@ -0,0 +1,135 @@ +AtomicWeatherRegime <- function( # nolint + data, EOFS = TRUE, neofs = 30, threshold = NULL, lon = NULL, lat = NULL, + ncenters = NULL, method = "kmeans", nstart = 30) { + names(dim(data)) <- c("sdate", "ftime", "lat", "lon") + sdate <- which(names(dim(data)) == "sdate") + ftime <- which(names(dim(data)) == "ftime") + nftimes <- dim(data)[ftime] + nsdates <- dim(data)[sdate] + lon2 <- which(names(dim(data)) == "lon") + lat2 <- which(names(dim(data)) == "lat") + data <- aperm(data, c(ftime, sdate, lat2, lon2)) + nlon <- dim(data)[lon2] + nlat <- dim(data)[lat2] + dim(data) <- c(nftimes * nsdates, nlat, nlon) + + if (is.null(ncenters)) { + stop("ncenters must be specified") + } + if (EOFS == TRUE && is.null(lon)) { + stop("longitudes must be specified") + } + if (EOFS == TRUE && is.null(lat)) { + stop("latitudes must be specified") + } + + if (EOFS == TRUE) { + data_pc <- EOF( # nolint + data, + lat = as.vector(lat), + lon = as.vector(lon), + neofs = neofs + ) + if (is.null(threshold)){ + threshold <- sum(data_pc$var) + cluster_input <- data_pc$PC + } else { + threshold <- threshold + min_pc <- head(as.numeric(which(cumsum(data_pc$var) > threshold)), 1) + cluster_input <- data_pc$PC[, 1 : min_pc] + } + } else { + cluster_input <- data + latWeights <- InsertDim( #nolint + InsertDim(cos(lat * pi / 180), 1, nftimes * nsdates), #nolint + 3, + nlon + ) + cluster_input <- cluster_input * latWeights #nolint + dim(cluster_input) <- c(nftimes * nsdates, nlat * nlon) + } + if (method == "kmeans") { + result <- kmeans(cluster_input, centers = ncenters, + iter.max = 100, nstart = nstart, trace = FALSE) + reconstructed <- array(0, c(ncenters, nlat, nlon)) + data <- aperm(data, c(2, 3, 1)) + reconstructed <- Composite(data, result$cluster) # nolint + names(dim(reconstructed$composite)) <- c("lon", "lat", "cluster") + cluster_timeseries <- list(lengths = c(), values = c()) + frequency <- persistence <- matrix(NA, nsdates, ncenters) + for (i in 1 : nsdates) { + occurences <- rle( + result$cluster[((i * nftimes) + 1 - nftimes) : (i * nftimes)] #nolint + ) + cluster_timeseries <- list( + lengths = c(cluster_timeseries$lengths, occurences$lengths), + values = c(cluster_timeseries$values, occurences$values) + ) + for (j in 1 : ncenters) { + total <- sum(occurences$lengths[occurences$values == j]) + frequency[i, j] <- (total / nftimes) * 100 + persistence[i, j] <- mean(occurences$lengths[occurences$values == j]) + } + } + } else { + result <- hclust(dist(cluster_input), method = method) + clusterCut <- cutree(result, ncenters) #nolint + data <- aperm(data, c(3, 2, 1)) + result <- Composite(data, clusterCut) # nolint + } + if (method == "kmeans") { + return(list( + composite = reconstructed$composite, + pvalue = reconstructed$pvalue, + cluster = as.array(result$cluster), + center = as.array(result$center), + cluster_lengths = as.array(cluster_timeseries$lengths), + cluster_values = as.array(cluster_timeseries$values), + persistence = as.array(persistence), frequency = frequency)) + } else { + return(list( + composite = result$composite, + pvalue = result$pvalue, + cluster = as.array(clusterCut) # nolint + )) + } +} + +WeatherRegime <- function( # nolint + data, EOFS = TRUE, neofs = 30, threshold = NULL, lon = NULL, lat = NULL, + ncenters = NULL, method = "kmeans", nstart = 30, iter.max = 100, + ncores = NULL) { + if (length(dim(data)) > 4) { + sdate <- which(names(dim(data)) == "sdate") + ftime <- which(names(dim(data)) == "ftime") + lon_dim <- which(names(dim(data)) == "lon") + lat_dim <- which(names(dim(data)) == "lat") + dims <- c(1 : length(dim(data)))[-c(sdate, ftime, lon_dim, lat_dim)] + data <- aperm(data, c(sdate, ftime, lat_dim, lon_dim, dims)) + margins <- 5 : length(dim(data)) + result <- Apply( + data = list(data), + margins = list(margins), + fun = "AtomicWeatherRegime", + EOFS = EOFS, + neofs = neofs, + threshold = threshold, + lon = lon, + lat = lat, + ncenters = ncenters, + method = method, + ncores = ncores + ) + } else { + result <- AtomicWeatherRegime( # nolint + data, + EOFS = EOFS, + neofs = neofs, + threshold = threshold, + lon = lon, + lat = lat, + ncenters = ncenters, + method = method + ) + } +} diff --git a/esmvaltool/diag_scripts/magic_bsc/capacity_factor.r b/esmvaltool/diag_scripts/magic_bsc/capacity_factor.r new file mode 100644 index 0000000000..ee213e3413 --- /dev/null +++ b/esmvaltool/diag_scripts/magic_bsc/capacity_factor.r @@ -0,0 +1,248 @@ + +Sys.setenv(TAR = "/bin/tar") # nolint + +library(abind) +library(climdex.pcic) +library(ggplot2) +library(multiApply) # nolint +library(ncdf4) +library(RColorBrewer) # nolint +library(s2dverification) +library(yaml) + +#Parsing input file paths and creating output dirs +args <- commandArgs(trailingOnly = TRUE) +params <- read_yaml(args[1]) +print(args) +initial.options <- commandArgs(trailingOnly = FALSE) +file_arg_name <- "--file=" +script_name <- sub( + file_arg_name, "", initial.options[grep(file_arg_name, initial.options)] +) +script_dirname <- dirname(script_name) + +source(file.path(script_dirname, "PC.r")) +plot_dir <- params$plot_dir +run_dir <- params$run_dir +work_dir <- params$work_dir + +# setup provenance file and list +provenance_file <- paste0(run_dir, "/", "diagnostic_provenance.yml") +provenance <- list() + +## Create working dirs if they do not exist +dir.create(plot_dir, recursive = TRUE) +dir.create(run_dir, recursive = TRUE) +dir.create(work_dir, recursive = TRUE) + + +input_files_per_var <- yaml::read_yaml(params$input_files) +var_names <- names(input_files_per_var) +model_names <- lapply(input_files_per_var, function(x) x$dataset) +model_names <- unname(model_names) +var0 <- lapply(input_files_per_var, function(x) x$short_name) +fullpath_filenames <- names(var0) +var0 <- unname(var0)[1] +start_year <- lapply(input_files_per_var, function(x) x$start_year) +start_year <- c(unlist(unname(start_year)))[1] +end_year <- lapply(input_files_per_var, function(x) x$end_year) +end_year <- c(unlist(unname(end_year)))[1] +seasons <- params$seasons +power_curves <- params$power_curves + + +no_of_years <- length(start_year : end_year) +var0 <- unlist(var0) +data_nc <- nc_open(fullpath_filenames) +data <- ncvar_get(data_nc, var0) + +names(dim(data)) <- c("lon", "lat", "time") +lat <- ncvar_get(data_nc, "lat") +lon <- ncvar_get(data_nc, "lon") +units <- ncatt_get(data_nc, var0, "units")$value +calendar <- ncatt_get(data_nc, "time", "calendar")$value +long_names <- ncatt_get(data_nc, var0, "long_name")$value +time <- ncvar_get(data_nc, "time") +start_date <- as.POSIXct( + substr(ncatt_get(data_nc, "time", "units")$value, 11, 29 ) +) +nc_close(data_nc) +time <- as.Date(time, origin = start_date, calendar = calendar) +time <- as.POSIXct(time, format = "%Y-%m-%d") + +print(calendar) +print(str(data)) +time_dim <- which(names(dim(data)) == "time") +time <- as.PCICt(time, cal = calendar) +time <- as.character(time) +jdays <- as.numeric(strftime(time, format = "%j")) +if (calendar == "gregorian" | calendar == "standard" | + calendar == "proleptic_gregorian") { + year <- as.numeric(strftime(time, format = "%Y")) + pos <- ( (year / 100) %% 1 == 0) + ( (year / 4) %% 1 == 0) + + ( (year / 400) %% 1 == 0) + pos <- which(pos == 1) + bisiesto <- which(jdays == 60) + if ( length(intersect(pos, bisiesto)) > 0) { + time <- time[-intersect(pos, bisiesto)] + data <- apply(data, c(1 : length(dim(data)))[-time_dim], + function(x) { + x[-intersect(pos, bisiesto)] + }) + data <- aperm(data, c(2, 3, 1)) + names(dim(data)) <- c("lon", "lat", "time") + } +} + +dims <- dim(data) +dims <- append( + dims[-time_dim], c(no_of_years, dims[time_dim] / no_of_years), after = 2 +) + +dim(data) <- dims + +data <- aperm(data, c(3, 4, 2, 1)) +names(dim(data)) <- c("year", "day", "lat", "lon") +##################################### +# Cross with PC +#################################### + +#--------------------------- +# Load PC to use and compute CF for 6h values +#--------------------------- +seas_data <- Mean1Dim(data, 2) +print(power_curves[1]) +pc1 <- read_pc(file.path(script_dirname, power_curves[1])) +pc2 <- read_pc(file.path(script_dirname, power_curves[2])) +pc3 <- read_pc(file.path(script_dirname, power_curves[3])) +pc4 <- read_pc(file.path(script_dirname, power_curves[4])) +pc5 <- read_pc(file.path(script_dirname, power_curves[5])) + + +data_cf1 <- wind2CF(data, pc1) +dim(data_cf1) <- dim(data) +data_cf2 <- wind2CF(data, pc2) +dim(data_cf2) <- dim(data) +data_cf3 <- wind2CF(data, pc3) +dim(data_cf3) <- dim(data) +data_cf4 <- wind2CF(data, pc4) +dim(data_cf4) <- dim(data) +data_cf5 <- wind2CF(data, pc5) +dim(data_cf5) <- dim(data) + +#--------------------------- +# Aggregate daily data to seasonal means +#--------------------------- + +seas_data_cf1 <- Mean1Dim(data_cf1, 2) +seas_data_cf2 <- Mean1Dim(data_cf2, 2) +seas_data_cf3 <- Mean1Dim(data_cf3, 2) +seas_data_cf4 <- Mean1Dim(data_cf4, 2) +seas_data_cf5 <- Mean1Dim(data_cf5, 2) + + + +############################## +# Make some plots +############################## +#--------------------------- +# Prepare data, labels and colorscales +#--------------------------- +p <- colorRampPalette(brewer.pal(9, "YlOrRd")) +q <- colorRampPalette(rev(brewer.pal(11, "RdBu"))) +years <- seq(start_year, end_year) +turb_types <- c("IEC I", "IEC I/II", "IEC II", "IEC II/III", "IEC III") + +seas_data_cf_all <- abind( + seas_data_cf1, seas_data_cf2, seas_data_cf3, seas_data_cf4, seas_data_cf5, + along = 0 +) +mean_data_cf_all <- Mean1Dim(seas_data_cf_all, 2) +anom_data_cf_all <- seas_data_cf_all - InsertDim( # nolint + Mean1Dim(seas_data_cf_all, 2), 2, dim(data)[1] # nolint +) +pct_anom_data_cf_all <- (seas_data_cf_all / InsertDim( # nolint + Mean1Dim(seas_data_cf_all, 2), 2, dim(data)[1] # nolint +)) - 1 + +#--------------------------- +# Plot seasonal CF maps +#--------------------------- +filepng <- paste0( + plot_dir, "/", "capacity_factor_", + model_names, "_", start_year, "-", end_year, ".png") +title <- paste0(seasons, " CF from ", + model_names, " (", start_year, "-", end_year, ")") + +PW_names <- c("Enercon E70", "Gamesa G80", "Gamesa G87", + "Vestas V100", "Vestas V110") +PlotLayout( # nolint + PlotEquiMap, # nolint + c(3, 2), + Mean1Dim(seas_data_cf_all, 2), + lon, + lat, + filled.continents = F, + toptitle = title, + titles = PW_names, + fileout = filepng) + +filencdf <- paste0(work_dir, "/", "capacity_factor_", + model_names, "_", start_year, "-", end_year, ".nc") + +dimlon <- ncdim_def( + name = "lon", + units = "degrees_east", + vals = as.vector(lon), + longname = "longitude" +) +dimlat <- ncdim_def( + name = "lat", + units = "degrees_north", + vals = as.vector(lat), + longname = "latitude" +) +dimtime <- ncdim_def( + name = "season", + units = "season", + vals = start_year : end_year, + longname = "season of the year: DJF, MAM, JJA, SON" +) +dimcurve <- ncdim_def( + name = "curve", + units = "name", + vals = 1 : 5, + longname = "Power curves of considered turbines" +) + +names(dim(seas_data_cf_all)) <- c("curve", "time", "lat", "lon") +defdata <- ncvar_def( + name = "CapacityFactor", + units = "%", + dim = list(season = dimcurve, dimtime, lat = dimlat, lon = dimlon), + longname = paste0("Capacity Factor of wind on different turbines") +) +file <- nc_create(filencdf, list(defdata)) +ncvar_put(file, defdata, seas_data_cf_all) +nc_close(file) + + # Set provenance for output files + xprov <- list(ancestors = list(fullpath_filenames, + file.path(script_dirname, power_curves[1]), + file.path(script_dirname, power_curves[2]), + file.path(script_dirname, power_curves[3]), + file.path(script_dirname, power_curves[4]), + file.path(script_dirname, power_curves[5])), + authors = list("hunt_al", "manu_ni", "lled_ll", "caro_lo", + "bojo_dr", "gonz_nu"), + projects = list("c3s-magic"), + caption = title, + statistics = list("other"), + realms = list("atmos"), + themes = list("phys"), + plot_file = filepng) + + provenance[[filencdf]] <- xprov + +# Write provenance to file +write_yaml(provenance, provenance_file) diff --git a/esmvaltool/diag_scripts/magic_bsc/combined_indices.r b/esmvaltool/diag_scripts/magic_bsc/combined_indices.r new file mode 100644 index 0000000000..9cc7571198 --- /dev/null +++ b/esmvaltool/diag_scripts/magic_bsc/combined_indices.r @@ -0,0 +1,168 @@ +Sys.setenv(TAR = "/bin/tar") # nolint +library(s2dverification) +library(multiApply) # nolint +library(ggplot2) +library(yaml) +library(ncdf4) +library(ClimProjDiags) #nolint +library(abind) +library(climdex.pcic) + +#Parsing input file paths and creating output dirs +args <- commandArgs(trailingOnly = TRUE) +params <- read_yaml(args[1]) + +plot_dir <- params$plot_dir +run_dir <- params$run_dir +work_dir <- params$work_dir + +## Create working dirs if they do not exist +dir.create(plot_dir, recursive = TRUE) +dir.create(run_dir, recursive = TRUE) +dir.create(work_dir, recursive = TRUE) + + +# setup provenance file and list +provenance_file <- paste0(run_dir, "/", "diagnostic_provenance.yml") +provenance <- list() + +input_files_per_var <- yaml::read_yaml(params$input_files[1]) +var_names <- names(input_files_per_var) +model_names <- lapply(input_files_per_var, function(x) x$dataset) +model_names <- unique(unlist(unname(model_names))) + +var0 <- lapply(input_files_per_var, function(x) x$short_name) +fullpath_filenames <- names(var0) +var0 <- unname(var0)[1] +var0 <- unlist(var0) + +start_year <- lapply(input_files_per_var, function(x) x$start_year) +starting <- c(unlist(unname(start_year)))[1] +end_year <- lapply(input_files_per_var, function(x) x$end_year) +ending <- c(unlist(unname(end_year)))[1] +start_year <- as.POSIXct(as.Date(paste0(starting, "-01-01"), "%Y-%m-%d")) +end_year <- as.POSIXct(as.Date(paste0(ending, "-12-31"), "%Y-%m-%d")) + +#Parameters for Season() function +monini <- 1 +moninf <- params$moninf +monsup <- params$monsup +months <- "" +region <- params$region +running_mean <- params$running_mean +timestamp <- "" +standardized <- params$standardized + +if (region == "Nino3") { + lon_min <- 360 - 150 + lon_max <- 360 - 90 + lat_min <- -5 + lat_max <- 5 +} else if (region == "Nino3.4") { + lon_min <- 360 - 170 + lon_max <- 360 - 120 + lat_min <- -5 + lat_max <- 5 +} else if (region == "Nino4") { + lon_min <- 360 - 160 + lon_max <- 360 - 150 + lat_min <- -5 + lat_max <- 5 +} else if (region == "NAO") { + lon_min <- 360 + c(-90, -90) + lon_max <- c(40, 40) + lat_min <- c(25, 60) + lat_max <- c(45, 80) +} else if (region == "SOI") { + lon_min <- c(90, 360 - 130) + lon_max <- c(140, 360 - 80) + lat_min <- c(-5, -5) + lat_max <- c(5, 5) +} +### Load data +data_nc <- nc_open(fullpath_filenames) +lat <- as.vector(ncvar_get(data_nc, "lat")) +lon <- as.vector(ncvar_get(data_nc, "lon")) +units <- ncatt_get(data_nc, var0, "units")$value +long_names <- ncatt_get(data_nc, var0, "long_name")$value + +data <- InsertDim(ncvar_get(data_nc, var0), 1, 1) # nolint +names(dim(data)) <- c("model", "lon", "lat", "time") +time <- seq(start_year, end_year, "month") +nc_close(data_nc) + +if (standardized) { + data <- Apply(list(data), target_dims = c("time"), + fun = function(x) {(x - mean(x)) / sqrt(var(x))}) #nolint + data <- aperm(data$output1, c(2, 3, 4, 1)) + names(dim(data)) <- c("model", "lon", "lat", "time") +} + +if (!is.null(running_mean)) { + data <- Smoothing(data, runmeanlen = running_mean, numdimt = 4) #nolint + timestamp <- paste0(running_mean, "-month-running-mean-") +} + +if (!is.null(moninf)) { + data <- Season(data, posdim = 4, monini = monini, #nolint + moninf = moninf, monsup = monsup) + months <- paste0(month.abb[moninf], "-", month.abb[monsup]) +} + +if (length(lon_min) == 1) { + data <- WeightedMean(data, lon = lon, lat = lat, #nolint + region = c(lon_min, lon_max, lat_min, lat_max), + londim = 2, latdim = 3, mask = NULL) + + data <- drop(data) +} else { + data1 <- WeightedMean(data, lon = lon, lat = lat, #nolint + region = c(lon_min[1], lon_max[1], lat_min[1], lat_max[1]), + londim = 2, latdim = 3, mask = NULL) + data2 <- WeightedMean(data, lon = lon, lat = lat, #nolint + region = c(lon_min[2], lon_max[2], lat_min[2], lat_max[2]), + londim = 2, latdim = 3, mask = NULL) + data1 <- drop(data1) + data2 <- drop(data2) + data <- CombineIndices(list(data1, data2), weights = c(1, -1), #nolint + operation = "add") +} + +if (moninf > monsup) { + period <- (starting : ending)[-1] +} else { + period <- starting : ending +} + +dimtime <- ncdim_def(name = "Time", units = "years", + vals = period, longname = "Time") +defdata <- ncvar_def(name = "data", units = units, dim = list(time = dimtime), + longname = paste("Index for region", region, "Variable", var0)) +filencdf <- paste0(work_dir, "/", var0, "_", timestamp, "_", months, "_", + starting, ending, "_", ".nc") +file <- nc_create(filencdf, list(defdata)) +ncvar_put(file, defdata, data) +nc_close(file) + + +png(paste0(plot_dir, "/", "Index_", region, ".png"), width = 7, height = 4, + units = "in", res = 150) +plot(period, data, type = "l", col = "purple", lwd = 2, bty = "n", + xlab = "Time (years)", ylab = "Index", + main = paste("Region", region, "and Variable", var0)) +abline(h = 0, col = "grey", lty = 4) +dev.off() + + +# Set provenance for output files +xprov <- list(ancestors = list(fullpath_filenames), + authors = list("hunt_al", "manu_ni"), + projects = list("c3s-magic"), + caption = "Combined selection", + statistics = list("other"), + realms = list("atmos"), + themes = list("phys")) +provenance[[filencdf]] <- xprov + +# Write provenance to file +write_yaml(provenance, provenance_file) diff --git a/esmvaltool/diag_scripts/magic_bsc/diurnal_temp_index.r b/esmvaltool/diag_scripts/magic_bsc/diurnal_temp_index.r new file mode 100644 index 0000000000..531e53cb95 --- /dev/null +++ b/esmvaltool/diag_scripts/magic_bsc/diurnal_temp_index.r @@ -0,0 +1,221 @@ +library(yaml) +library(s2dverification) +library(multiApply) # nolint +library(climdex.pcic) +library(ClimProjDiags) #nolint +library(parallel) +library(ncdf4) + +## Insurance products +args <- commandArgs(trailingOnly = TRUE) +params <- read_yaml(args[1]) + +plot_dir <- params$plot_dir +run_dir <- params$run_dir +work_dir <- params$work_dir +## Create working dirs if they do not exist +dir.create(plot_dir, recursive = TRUE) +dir.create(run_dir, recursive = TRUE) +dir.create(work_dir, recursive = TRUE) + +# setup provenance file and list +provenance_file <- paste0(run_dir, "/", "diagnostic_provenance.yml") +provenance <- list() + +#FOR THE FIRST METADATA.yml +input_files_tasmax <- yaml::read_yaml(params$input_files[1]) +model_names <- input_files_tasmax[[1]]$dataset +var_names_tmax <- input_files_tasmax[[1]]$short_name +experiment <- lapply( + input_files_tasmax, + function(x){ + x$exp + } +) +filename_tasmax <- lapply( + input_files_tasmax, + function(x){ + x$filename + } +) + +input_files_tasmin <- yaml::read_yaml(params$input_files[2]) +var_names_tmin <- input_files_tasmin[[1]]$short_name +filename_tasmin <- lapply( + input_files_tasmin, + function(x){ + x$filename + } +) + +reference_files <- which(experiment == "historical") +projection_files <- which(experiment != "historical") + +start_historical <- input_files_tasmax[[reference_files]]$start_year +end_historical <- input_files_tasmax[[reference_files]]$end_year +start_projection <- input_files_tasmax[[projection_files[1]]]$start_year +end_projection <- input_files_tasmax[[projection_files[1]]]$end_year + +#Regime parameters +metric <- params$metric +rcp8.5 <- params$rcp8.5 +rcp2.6 <- params$rcp2.6 +rcp_scenario <- c(rcp8.5, rcp2.6) + +fullpath_hist_tasmax <- filename_tasmax[[reference_files]] +file <- nc_open(fullpath_hist_tasmax) +historical_tasmax <- ncvar_get(file, "tasmax") +names(dim(historical_tasmax)) <- rev(names(file$dim))[-1] +lat <- ncvar_get(file, "lat") +lon <- ncvar_get(file, "lon") +units <- ncatt_get(file, "tasmax", "units")$value +calendario <- ncatt_get(file, "time", "calendar")$value +long_names <- ncatt_get(file, "tasmax", "long_name")$value +time <- ncvar_get(file, "time") +start_date <- as.POSIXct(substr(ncatt_get(file, "time", + "units")$value, 11, 29)) +nc_close(file) + +fullpath_hist_tasmin <- filename_tasmin[[reference_files]] +file <- nc_open(fullpath_hist_tasmin) +historical_tasmin <- ncvar_get(file, "tasmin") +names(dim(historical_tasmin)) <- rev(names(file$dim))[-1] +lat <- ncvar_get(file, "lat") +lon <- ncvar_get(file, "lon") +units <- ncatt_get(file, "tasmin", "units")$value +calendario <- ncatt_get(file, "time", "calendar")$value +long_names <- ncatt_get(file, "tasmin", "long_name")$value +tunits <- ncatt_get(file, "time", "units")$value +time <- ncvar_get(file, "time") +start_date <- as.POSIXct(substr(ncatt_get(file, "time", + "units")$value, 11, 29)) +nc_close(file) +dia <- as.Date(strsplit(tunits, " ")[[1]][3], format = "%Y-%m-%d") +time <- time + dia + + +dtr_base <- DTRRef( # nolint + tmax = historical_tasmax, + tmin = historical_tasmin, + by.seasons = TRUE, + ncores = NULL, + dates = time, + calendar = calendario +) + +for (i in 1 : length(projection_files)) { + fullpath_projection_tasmax <- filename_tasmax[[projection_files[i]]] + file <- nc_open(fullpath_projection_tasmax) + rcp_tasmax <- ncvar_get(file, "tasmax") + names(dim(rcp_tasmax)) <- rev(names(file$dim))[-1] + lat <- ncvar_get(file, "lat") + lon <- ncvar_get(file, "lon") + units <- ncatt_get(file, "tasmax", "units")$value + calendario <- ncatt_get(file, "time", "calendar")$value + long_names <- ncatt_get(file, "tasmax", "long_name")$value + time <- ncvar_get(file, "time") + start_date <- as.POSIXct(substr(ncatt_get(file, "time", + "units")$value, 11, 29)) + nc_close(file) + + fullpath_projection_tasmin <- filename_tasmin[[projection_files[i]]] + file <- nc_open(fullpath_projection_tasmin) + rcp_tasmin <- ncvar_get(file, "tasmin") + names(dim(rcp_tasmin)) <- rev(names(file$dim))[-1] + lat <- ncvar_get(file, "lat") + lon <- ncvar_get(file, "lon") + units <- ncatt_get(file, "tasmin", "units")$value + calendario <- ncatt_get(file, "time", "calendar")$value + long_names <- ncatt_get(file, "tasmin", "long_name")$value + tunits <- ncatt_get(file, "time", "units")$value + time <- ncvar_get(file, "time") + start_date <- as.POSIXct(substr(ncatt_get(file, "time", + "units")$value, 11, 29)) + nc_close(file) + + dia <- as.Date(strsplit(tunits, " ")[[1]][3], format = "%Y-%m-%d") + time <- time + dia + +dtr_indicator <- DTRIndicator( + rcp_tasmax, rcp_tasmin, ref = dtr_base, by.seasons = TRUE, ncores = NULL, + dates = time, calendar = calendario + ) + + +dtr_rcp <- array(dim = c(4, length(lon), length(lat))) +for (j in 1 : 4){ + dtr_rcp[j , , ] <- Mean1Dim(dtr_indicator$indicator[, j, , ], 1)#nolint +} +names(dim(dtr_rcp)) <- c("season", "lon", "lat") +title <- paste( + "Number of days exceeding the DTR in 5 degrees during the period", + start_projection, "-", end_projection) +PlotLayout( # nolint + PlotEquiMap, # nolint + plot_dims = c("lon", "lat"), + var = dtr_rcp, + lon = lon, + lat = lat, + titles = c("DJF", "MAM", "JJA", "SON"), + toptitle = title, + filled.continents = FALSE, units = "Days", + axelab = FALSE, draw_separators = TRUE, subsampleg = 1, + brks = seq(0, max(dtr_rcp), 2), color_fun = clim.palette("yellowred"), + bar_extra_labels = c(2, 0, 0, 0), title_scale = 0.7, + fileout = file.path(plot_dir, "rcp85.png") +) + +dimlon <- ncdim_def( + name = "lon", + units = "degrees_east", + vals = as.vector(lon), + longname = "longitude" +) +dimlat <- ncdim_def( + name = "lat", + units = "degrees_north", + vals = as.vector(lat), + longname = "latitude" +) +dimseason <- ncdim_def( + name = "season", + units = "season", + vals = 1 : 4, + longname = "season of the year: DJF, MAM, JJA, SON" +) +defdata <- ncvar_def( + name = "VulnerabilityIndex", + units = "number_of_days", + dim = list(season = dimseason, lat = dimlat, lon = dimlon), + longname = paste0( + "Number of days exceeding in 5 degrees the Diurnal ", + "Temeprature Range for the reference period") +) + +filencdf <- paste0( + work_dir, "/", "Seasonal_DTRindicator_", model_names, "_", + start_projection, "_", end_projection, "_", + start_historical, "_", end_historical, ".nc") +file <- nc_create(filencdf, list(defdata)) +ncvar_put(file, defdata, dtr_rcp) +nc_close(file) + + + # Set provenance for output files + xprov <- list(ancestors = list(filename_tasmin[[reference_files]], + filename_tasmax[[reference_files]], + filename_tasmin[[projection_files[i]]], + filename_tasmax[[projection_files[i]]]), + authors = list("hunt_al", "manu_ni", "caro_lo"), + projects = list("c3s-magic"), + caption = title, + statistics = list("other"), + realms = list("atmos"), + themes = list("phys"), + plot_file = file.path(plot_dir, "rcp85.png")) + + provenance[[filencdf]] <- xprov +} + +# Write provenance to file +write_yaml(provenance, provenance_file) diff --git a/esmvaltool/diag_scripts/magic_bsc/extreme_index.r b/esmvaltool/diag_scripts/magic_bsc/extreme_index.r new file mode 100644 index 0000000000..180cf65550 --- /dev/null +++ b/esmvaltool/diag_scripts/magic_bsc/extreme_index.r @@ -0,0 +1,331 @@ +library(s2dverification) +library(multiApply) #nolint +library(yaml) +library(ncdf4) +library(abind) +library(parallel) +library(ClimProjDiags) #nolint + +args <- commandArgs(trailingOnly = TRUE) +params <- read_yaml(args[1]) +plot_dir <- params$plot_dir +run_dir <- params$run_dir +work_dir <- params$work_dir + +dir.create(plot_dir, recursive = TRUE) +dir.create(run_dir, recursive = TRUE) +dir.create(work_dir, recursive = TRUE) + +weights <- c(t90p = params$weight_t90p, t10p = params$weight_t10p, + Wx = params$weight_Wx, rx5day = params$weight_rx5day, + cdd = params$weight_cdd) +running_mean <- params$running_mean + +# setup provenance file and list +provenance_file <- paste0(run_dir, "/", "diagnostic_provenance.yml") +provenance <- list() + +wdata <- NULL +for (j in 1 : 4) { + input_files_per_var <- yaml::read_yaml(params$input_files[j]) + var0 <- lapply(input_files_per_var, function(x) x$short_name) + fullpath_filenames <- names(var0) + var0 <- unname(var0)[1] + experiment <- lapply(input_files_per_var, function(x) x$exp) + experiment <- unlist(unname(experiment)) + + reference_files <- which(unname(experiment) == "historical") + projection_files <- which(unname(experiment) != "historical") + + rcp_scenario <- unique(experiment[projection_files]) + model_names <- lapply(input_files_per_var, function(x) x$dataset) + model_names <- unlist(unname(model_names))[projection_files] + + start_reference <- lapply(input_files_per_var, function(x) x$start_year) + start_reference <- c(unlist(unname(start_reference))[reference_files])[1] + end_reference <- lapply(input_files_per_var, function(x) x$end_year) + end_reference <- c(unlist(unname(end_reference))[reference_files])[1] + + start_projection <- lapply(input_files_per_var, function(x) x$start_year) + start_projection <- c(unlist(unname(start_projection))[projection_files])[1] + end_projection <- lapply(input_files_per_var, function(x) x$end_year) + end_projection <- c(unlist(unname(end_projection))[projection_files])[1] + + var0 <- unlist(var0) + projection <- "NULL" + reference_filenames <- fullpath_filenames[reference_files] + hist_nc <- nc_open(reference_filenames) + historical_data <- ncvar_get(hist_nc, var0) + + names(dim(historical_data)) <- rev(names(hist_nc$dim))[-1] + lat <- ncvar_get(hist_nc, "lat") + lon <- ncvar_get(hist_nc, "lon") + units <- ncatt_get(hist_nc, var0, "units")$value + calendar <- ncatt_get(hist_nc, "time", "calendar")$value + long_names <- ncatt_get(hist_nc, var0, "long_name")$value + time <- ncvar_get(hist_nc, "time") + # Time correction: + start_date <- as.POSIXct(paste0(start_reference, "-01-01"), tz = "UTC", + format = "%Y-%m-%d") + end_date <- as.POSIXct(paste0(end_reference, "-12-31"), tz = "UTC", + format = "%Y-%m-%d") + nc_close(hist_nc) + time <- seq(start_date, end_date, "day") + if (calendar == "noleap" | calendar == "365_day" | calendar == "365") { + time <- time[format(time, "%m-%d") != "02-29"] + } else if (calendar == "360_day" | calendar == "360") { + time <- time[format(time, "%m-%d") != "02-29"] + time <- time[format(time, "%m-%d") != "01-31"] + time <- time[format(time, "%m-%d") != "05-31"] + time <- time[format(time, "%m-%d") != "07-31"] + time <- time[format(time, "%m-%d") != "10-31"] + time <- time[format(time, "%m-%d") != "12-31"] + } +# nolint start +#hist_names <- names(dim(historical_data)) +#jpeg(paste0(plot_dir, "/plot1.jpg")) +#PlotEquiMap(historical_data[1,1,1,,], lon = lon, lat = lat, filled = F) +#dev.off() +# ------------------------------ +# Provisional solution to error in dimension order: +# nolint end + historical_data <- as.vector(historical_data) + dim(historical_data) <- c(model = 1, var = 1, + lon = length(lon), lat = length(lat), time = length(time)) + historical_data <- aperm(historical_data, c(1, 2, 5, 3, 4)) +# nolint start +# ------------------------------ +#jpeg(paste0(plot_dir, "/plot2.jpg")) +#PlotEquiMap(historical_data[1,1,1,,], lon = lon, lat = lat, filled = F) +#dev.off() +# nolint end + names(dim(historical_data)) <- c("model", "var", "time", "lon", "lat") + time_dimension <- which(names(dim(historical_data)) == "time") + attributes(lon) <- NULL + attributes(lat) <- NULL + + dim(lon) <- c(lon = length(lon)) + dim(lat) <- c(lat = length(lat)) + model_dim <- which(names(dim(historical_data)) == "model") +###Compute the quantiles and standard deviation for the historical period. + if (var0 == "tasmin") { + quantile <- 0.1 + metric <- "t10p" + } else if (var0 == "tasmax") { + quantile <- 0.9 + metric <- "t90p" + } else if (var0 == "sfcWind") { + historical_data <- 0.5 * 1.23 * (historical_data ** 3) + quantile <- 0.9 + metric <- "Wx" + } else if (var0 == "pr") { + historical_data <- historical_data * 60 * 60 * 24 + metric <- c("rx5day", "cdd") + } + attr(historical_data, "Variables")$dat1$time <- time + + base_sd <- base_sd_historical <- base_mean <- list() + for (m in 1 : length(metric)) { + if (var0 != "pr") { + thresholds <- Threshold( #nolint + historical_data, calendar = calendar, + qtiles = quantile, ncores = detectCores() - 1) + str(thresholds) + base_index <- Climdex( #nolint + data = historical_data, calendar = calendar, + metric = metric[m], threshold = thresholds, + ncores = detectCores() - 1) + } else { + base_index <- Climdex( #nolint + data = historical_data, calendar = calendar, + metric = metric[m], ncores = detectCores() - 1) + } + base_sd[[m]] <- Apply( #nolint + list(base_index$result), target_dims = list(c(1)), + "sd")$output1 + base_sd_historical[[m]] <- InsertDim( #nolint + base_sd[[m]], 1, dim(base_index$result)[1]) + + if (var0 != "pr") { + base_mean[[m]] <- 10 + base_mean_historical <- 10 + } else { + base_mean[[m]] <- Apply( #nolint + list(base_index$result), target_dims = list(c(1)), + "mean")$output1 + base_mean_historical <- InsertDim( #nolint + base_mean[[m]], 1, dim(base_index$result)[1]) + } + } +# Compute the time series of the relevant index, using the quantiles +# and standard deviation from the index + projection_filenames <- fullpath_filenames[projection_files] + + for (i in 1 : length(projection_filenames)) { + proj_nc <- nc_open(projection_filenames[i]) + projection_data <- ncvar_get(proj_nc, var0) + time <- ncvar_get(proj_nc, "time") + # Time correction: + start_date <- as.POSIXct(paste0(start_projection, "-01-01"), tz = "UTC", + format = "%Y-%m-%d") + end_date <- as.POSIXct(paste0(end_projection, "-12-31"), tz = "UTC", + format = "%Y-%m-%d") + nc_close(proj_nc) + time <- seq(start_date, end_date, "day") + if (calendar == "noleap" | calendar == "365_day" | calendar == "365") { + time <- time[format(time, "%m-%d") != "02-29"] + } else if (calendar == "360_day" | calendar == "360") { + time <- time[format(time, "%m-%d") != "02-29"] + time <- time[format(time, "%m-%d") != "01-31"] + time <- time[format(time, "%m-%d") != "05-31"] + time <- time[format(time, "%m-%d") != "07-31"] + time <- time[format(time, "%m-%d") != "10-31"] + time <- time[format(time, "%m-%d") != "12-31"] + } + projection_data <- as.vector(projection_data) + dim(projection_data) <- c(model = 1, var = 1, lon = length(lon), + lat = length(lat), time = length(time)) + projection_data <- aperm(projection_data, c(1, 2, 5, 3, 4)) + attr(projection_data, "Variables")$dat1$time <- time + names(dim(projection_data)) <- c("model", "var", "time", "lon", "lat") + num_model <- dim(projection_data)["model"] + print(num_model) + # nolint start + # ------------------------------ + #jpeg(paste0(plot_dir, "/plot4.jpg")) + #PlotEquiMap(projection_data[1,1,1,,], lon = lon, lat = lat, filled = F) + #dev.off() + # nolint end + if (var0 == "pr") { + projection_data <- projection_data * 60 * 60 * 24 + } else if (var0 == "sfcWind") { + projection_data <- 0.5 * 1.23 * (projection_data ** 3) + } + + for (m in 1 : length(metric)) { + if (var0 != "pr") { + projection_index <- Climdex(data = projection_data, metric = metric[m], + calendar = calendar, threshold = thresholds, + ncores = detectCores() - 1) + projection_mean <- 10 + } else { #nolint + projection_index <- Climdex(data = projection_data, metric = metric[m], + calendar = calendar, + ncores = detectCores() - 1) + projection_mean <- InsertDim(base_mean[[m]], 1, #nolint + dim(projection_index$result)[1]) + } + base_sd_proj <- InsertDim(base_sd[[m]], 1, #nolint + dim(projection_index$result)[1]) + projection_index_standardized <- + (projection_index$result - projection_mean) / base_sd_proj + for (mod in 1 : num_model) { + model_dim <- which(names(dim(projection_index_standardized)) == "model") + if (length(model_dim) == 0) { + data <- drop(projection_index_standardized) + } else { + print(dim(projection_index_standardized)) + data <- drop(projection_index_standardized[, mod,,,]) + } + print(paste( + "Attribute projection from climatological data is saved and,", + "if it's correct, it can be added to the final output:", + projection)) + dimlon <- ncdim_def(name = "lon", units = "degrees_east", + vals = as.vector(lon), longname = "longitude") + dimlat <- ncdim_def(name = "lat", units = "degrees_north", + vals = as.vector(lat), longname = "latitude") + dimtime <- ncdim_def(name = "time", units = "Years", + vals = start_projection : end_projection, + longname = "Time in years") + defdata <- ncvar_def(name = "data", units = units, + dim = list(year = dimtime, lon = dimlon, lat = dimlat), + longname = paste("Annual", metric[m], long_names)) + filencdf <- paste0( + work_dir, "/", var0, "_", metric[m], "_risk_insurance_index_", + model_names, "_", start_projection, "_", end_projection, "_", + start_reference, "_", end_reference, ".nc") + file <- nc_create(filencdf, list(defdata)) + ncvar_put(file, defdata, projection_index_standardized) + nc_close(file) + + # Plottings + data <- drop(Mean1Dim(projection_index_standardized, 1)) + title <- paste0( + "Index for ", metric[m], " ", substr(start_projection, 1, 4), "-", + substr(end_projection, 1, 4), " ", " (", rcp_scenario[i], + " ", model_names, ")") + + breaks <- seq(-1 * ceiling(max(abs(data))), ceiling(max(abs(data))), + 2 * ceiling(max(abs(data))) / 16) + filepng <- paste0( + plot_dir, "/", metric[m], "_", model_names[mod], "_", + rcp_scenario[i], + "_", start_projection, "_", end_projection, ".png") + PlotEquiMap( #nolint + data, lon = lon, lat = lat, filled.continents = FALSE, + toptitle = title, sizetit = 0.5, brks = breaks, fileout = filepng) + # Set provenance for output files + xprov <- list(ancestors = list(projection_filenames, reference_filenames), + authors = list("hunt_al", "manu_ni", "caro_lo"), + projects = list("c3s-magic"), + caption = title, + statistics = list("other"), + weight = weights[j + (m - 1)], + realms = list("atmos"), + themes = list("phys"), + plot_file = filepng) + provenance[[filencdf]] <- xprov + # compute weights in the data + lon <- as.vector(lon) + lat <- as.vector(lat) + temporal <- WeightedMean(projection_index_standardized, #nolint + lon = lon, lat = lat) + time_dim <- which(names(dim(temporal)) == "year") + if (!is.null(running_mean)) { + temporal <- Smoothing(temporal, runmeanlen = running_mean, #nolint + numdimt = time_dim) + timestamp <- paste0(running_mean, "-month-running-mean-") + } + wdata[[j + (m - 1)]] <- temporal + } #model index + } # metric index + } # number of projections +} # variable index +if (!is.numeric(weights)) { + data <- CombineIndices(wdata, weights = NULL) # nolint +} else { + data <- CombineIndices(wdata, weights = weights) # nolint +} + +# Plotting time series: +data <- drop(data) +if (length(data) >= 5) { + png(paste0(plot_dir, "/", "CombinedIndices.png")) + plot(start_projection : end_projection, data, type = "l", + lwd = 2, col = "darkblue", xlab = "Time (years)", + ylab = "Combined indices") + dev.off() +} +dimtime <- ncdim_def(name = "time", units = "years", + vals = start_projection : end_projection, longname = "time") +defdata <- ncvar_def(name = "data", units = "adimensional", + dim = list(time = dimtime), longname = paste("Combination", long_names)) +filencdf <- paste0(work_dir, "/", "_", paste0(model_names, collapse = "_"), + ".nc") +file <- nc_create(filencdf, list(defdata)) +ncvar_put(file, defdata, data) +nc_close(file) + +xprov <- list(ancestors = list(fullpath_filenames), + authors = list("hunt_al", "manu_ni", "pere_nu"), + projects = list("c3s-magic"), + plot_file = paste0(plot_dir, "/", "CombinedIndices.png"), + caption = "Combined selection", + statistics = list("other"), + realms = list("atmos"), + themes = list("phys")) +provenance[[filencdf]] <- xprov + +# Write provenance to file +write_yaml(provenance, provenance_file) diff --git a/esmvaltool/diag_scripts/magic_bsc/extreme_spells.r b/esmvaltool/diag_scripts/magic_bsc/extreme_spells.r new file mode 100644 index 0000000000..8db805157a --- /dev/null +++ b/esmvaltool/diag_scripts/magic_bsc/extreme_spells.r @@ -0,0 +1,236 @@ +library(yaml) +library(s2dverification) +library(multiApply) # nolint +library(ncdf4) +library(climdex.pcic) +library(parallel) +library(ClimProjDiags) # nolint + + +args <- commandArgs(trailingOnly = TRUE) +params <- read_yaml(args[1]) +plot_dir <- params$plot_dir +run_dir <- params$run_dir +work_dir <- params$work_dir + +dir.create(plot_dir, recursive = TRUE) +dir.create(run_dir, recursive = TRUE) +dir.create(work_dir, recursive = TRUE) + +# setup provenance file and list +provenance_file <- paste0(run_dir, "/", "diagnostic_provenance.yml") +provenance <- list() + +input_files_per_var <- yaml::read_yaml(params$input_files) +var_names <- names(input_files_per_var) +model_names <- lapply(input_files_per_var, function(x) x$dataset) +model_names <- unname(model_names) +var0 <- lapply(input_files_per_var, function(x) x$short_name) +fullpath_filenames <- names(var0) +var0 <- unname(var0)[1] + +experiment <- lapply(input_files_per_var, function(x) x$exp) +experiment <- unlist(unname(experiment)) + +reference_files <- which(unname(experiment) == "historical") +projection_files <- which(unname(experiment) != "historical") + +rcp_scenario <- unique(experiment[projection_files]) +model_names <- lapply(input_files_per_var, function(x) x$dataset) +model_names <- unlist(unname(model_names))[projection_files] + +start_reference <- lapply(input_files_per_var, function(x) x$start_year) +start_reference <- c(unlist(unname(start_reference))[reference_files])[1] +end_reference <- lapply(input_files_per_var, function(x) x$end_year) +end_reference <- c(unlist(unname(end_reference))[reference_files])[1] + +start_projection <- lapply(input_files_per_var, function(x) x$start_year) +start_projection <- c(unlist(unname(start_projection))[projection_files])[1] +end_projection <- lapply(input_files_per_var, function(x) x$end_year) +end_projection <- c(unlist(unname(end_projection))[projection_files])[1] + + +op <- as.character(params$operator) +qtile <- params$quantile +spell_length <- params$min_duration +season <- params$season + +reference_filenames <- fullpath_filenames[reference_files] +projection <- "NULL" +reference_filenames <- fullpath_filenames[reference_files] +hist_nc <- nc_open(reference_filenames) +var0 <- unlist(var0) +historical_data <- ncvar_get(hist_nc, var0) + +names(dim(historical_data)) <- rev(names(hist_nc$dim))[-1] +lat <- ncvar_get(hist_nc, "lat") +lon <- ncvar_get(hist_nc, "lon") +units <- ncatt_get(hist_nc, var0, "units")$value +calendar <- ncatt_get(hist_nc, "time", "calendar")$value +long_names <- ncatt_get(hist_nc, var0, "long_name")$value +time <- ncvar_get(hist_nc, "time") +start_date <- as.POSIXct(substr(ncatt_get(hist_nc, "time", + "units")$value, 11, 29)) +nc_close(hist_nc) +time <- as.Date(time, origin = start_date, calendar = calendar) + + +historical_data <- as.vector(historical_data) +dim(historical_data) <- c( + model = 1, + var = 1, + lon = length(lon), + lat = length(lat), + time = length(time) +) +historical_data <- aperm(historical_data, c(1, 2, 5, 4, 3)) +attr(historical_data, "Variables")$dat1$time <- time +print(dim(historical_data)) + +names(dim(historical_data)) <- c("model", "var", "time", "lon", "lat") +time_dimension <- which(names(dim(historical_data)) == "time") + +base_range <- c( + as.numeric(substr(start_reference, 1, 4)), + as.numeric(substr(end_reference, 1, 4)) +) +threshold <- Threshold(historical_data, base.range = base_range, #nolint + calendar = calendar, qtiles = qtile, ncores = NULL) + +projection_filenames <- fullpath_filenames[projection_files] +for (i in 1 : length(projection_filenames)) { + proj_nc <- nc_open(projection_filenames[i]) + projection_data <- ncvar_get(proj_nc, var0) + time <- ncvar_get(proj_nc, "time") + start_date <- as.POSIXct(substr(ncatt_get(proj_nc, "time", + "units")$value, 11, 29)) + calendar <- ncatt_get(hist_nc, "time", "calendar")$value + time <- as.Date(time, origin = start_date, calendar = calendar) + nc_close(proj_nc) + projection_data <- as.vector(projection_data) + dim(projection_data) <- c( + model = 1, + var = 1, + lon = length(lon), + lat = length(lat), + time = length(time) + ) + projection_data <- aperm(projection_data, c(1, 2, 5, 4, 3)) + attr(projection_data, "Variables")$dat1$time <- time + names(dim(projection_data)) <- c("model", "var", "time", "lon", "lat") + # ------------------------------ + heatwave <- WaveDuration( # nolint + projection_data, + threshold, + calendar = calendar, + op = op, + spell.length = spell_length, + by.seasons = TRUE, + ncores = NULL + ) + + if (season == "summer") { + heatwave_season <- heatwave$result[seq(2, dim(heatwave$result)[1] - 2, + by = 4), 1, 1, , ]#nolint + years <- heatwave$years[seq(2, length(heatwave$years) - 2, by = 4)] + } else if (season == "winter") { + heatwave_season <- heatwave$result[seq(1, dim(heatwave$result)[1] - 2, + by = 4), 1, 1, , ]#nolint + years <- heatwave$years[seq(1, length(heatwave$years) - 1, by = 4)] + } else if (season == "spring") { + heatwave_season <- heatwave$result[seq(3, dim(heatwave$result)[1] - 2, + by = 4), 1, 1, , ]#nolint + years <- heatwave$years[seq(3, length(heatwave$years) - 2, by = 4)] + } else { + heatwave_season <- heatwave$result[seq(4, dim(heatwave$result)[1] - 2, + by = 4), 1, 1, , ]#nolint + years <- heatwave$years[seq(4, length(heatwave$years) - 2, by = 4)] + } + + data <- heatwave_season + names(dim(data)) <- c("time", "lon", "lat") + attributes(lon) <- NULL + attributes(lat) <- NULL + dim(lon) <- c(lon = length(lon)) + dim(lat) <- c(lat = length(lat)) + time <- as.numeric(substr(years, 1, 4)) + attributes(time) <- NULL + dim(time) <- c(time = length(time)) + print(paste( + "Attribute projection from climatological data is saved and,", + "if it's correct, it can be added to the final output:", + projection + )) + + dimlon <- ncdim_def( + name = "lon", units = "degrees_east", + vals = as.vector(lon), longname = "longitude") + dimlat <- ncdim_def( + name = "lat", units = "degrees_north", + vals = as.vector(lat), longname = "latitude") + dimtime <- ncdim_def( + name = "time", units = "years since 0-0-0 00:00:00", + vals = time, longname = "time") + defdata <- ncvar_def( + name = "duration", units = "days", + dim = list(season = dimtime, lat = dimlat, lon = dimlon), + longname = paste( + "Number of days during the period", start_projection, "-", end_projection, + "for", season, "in which", var0, "is", op, "than the", qtile, + "quantile obtained from", start_reference, "-", end_reference + ) + ) + filencdf <- paste0(work_dir, "/", var0, "_extreme_spell_duration", season, + "_", model_names, "_", rcp_scenario[i], "_", start_projection, "_", + end_projection, ".nc") + + file <- nc_create(filencdf, list(defdata)) + ncvar_put(file, defdata, data) + nc_close(file) + + + + + brks <- seq(0, 40, 4) + title <- paste0( + "Days ", season, " ", var0, " ", substr(start_projection, 1, 4), "-", + substr(end_projection, 1, 4), " ", op, " the ", qtile * 100, + "th quantile for ", substr(start_reference, 1, 4), "-", + substr(end_reference, 1, 4), " (", rcp_scenario[i], ")" + ) + filepng <- paste0( + plot_dir, "/", var0, "_extreme_spell_duration", season, "_", + model_names, "_", rcp_scenario[i], "_", start_projection, "_", + end_projection, ".png") + PlotEquiMap( Mean1Dim(data, 1), # nolint + lat = lat, + lon = lon, + filled.continents = FALSE, + brks = brks, + color_fun = clim.palette("yellowred"), + units = "Days", + toptitle = title, + fileout = filepng, + title_scale = 0.5 + ) + + # Set provenance for output files + + xprov <- list(ancestors = list(projection_filenames, reference_filenames), + authors = list("hunt_al", "manu_ni", "caro_lo"), + projects = list("c3s-magic"), + caption = title, + statistics = list("other"), + op = as.character(params$operator), + qtile = params$quantile, + spell_length = params$min_duration, + season = params$season, + realms = list("atmos"), + themes = list("phys"), + plot_file = filepng) + + provenance[[filencdf]] <- xprov +} + +# Write provenance to file +write_yaml(provenance, provenance_file) diff --git a/esmvaltool/diag_scripts/magic_bsc/multimodel_products.r b/esmvaltool/diag_scripts/magic_bsc/multimodel_products.r new file mode 100644 index 0000000000..fa9a27d9f8 --- /dev/null +++ b/esmvaltool/diag_scripts/magic_bsc/multimodel_products.r @@ -0,0 +1,548 @@ + +Sys.setenv(TAR = "/bin/tar") # nolint +library(s2dverification) +library(ClimProjDiags) # nolint +library(abind) +library(ggplot2) +library(yaml) +library(ncdf4) +library(multiApply)# nolint + +#Parsing input file paths and creating output dirs +args <- commandArgs(trailingOnly = TRUE) +params <- read_yaml(args[1]) +plot_dir <- params$plot_dir +run_dir <- params$run_dir +work_dir <- params$work_dir + +## Create working dirs if they do not exist +dir.create(plot_dir, recursive = TRUE) +dir.create(run_dir, recursive = TRUE) +dir.create(work_dir, recursive = TRUE) + +# setup provenance file and list +provenance_file <- paste0(run_dir, "/", "diagnostic_provenance.yml") +provenance <- list() + +input_files_per_var <- yaml::read_yaml(params$input_files) + +var0 <- lapply(input_files_per_var, function(x) x$short_name) +fullpath_filenames <- names(var0) +var0 <- unname(var0)[1] +experiment <- lapply(input_files_per_var, function(x) x$exp) +experiment <- unlist(unname(experiment)) + +climatology_files <- which(unname(experiment) == "historical") +projection_files <- which(unname(experiment) != "historical") + +rcp_scenario <- unique(experiment[projection_files]) +model_names <- lapply(input_files_per_var, function(x) x$dataset) +model_names <- unlist(unname(model_names))[projection_files] + +start_climatology <- lapply(input_files_per_var, function(x) x$start_year) +start_climatology <- c(unlist(unname(start_climatology))[climatology_files])[1] +end_climatology <- lapply(input_files_per_var, function(x) x$end_year) +end_climatology <- c(unlist(unname(end_climatology))[climatology_files])[1] + +start_projection <- lapply(input_files_per_var, function(x) x$start_year) +start_projection <- c(unlist(unname(start_projection))[projection_files])[1] +end_projection <- lapply(input_files_per_var, function(x) x$end_year) +end_projection <- c(unlist(unname(end_projection))[projection_files])[1] + + +agreement_threshold <- params$agreement_threshold + +font_size <- 12 + +#Parameters for Season() function +monini <- 1 +moninf <- params$moninf +monsup <- params$monsup +if (is.null(moninf) & !is.null(monsup)){ + moninf <- monsup +} else if (!is.null(moninf) & is.null(monsup)) { + monsup <- moninf +} +month_names <- c( + "JAN", "FEB", "MAR", "APR", "MAY", "JUN", "JUL", "AGO", "SEP", + "OCT", "NOV", "DEC" +) +if (moninf == monsup) { + months <- month_names[moninf] +} else { + months <- paste0(month_names[moninf], month_names[monsup], sep = "-") +} + +time_series_plot <- params$time_series_plot +### Load data and compute climatologies and anomalies +var0 <- unlist(var0) +climatology_filenames <- fullpath_filenames[climatology_files] +ref_nc <- nc_open(fullpath_filenames[climatology_files][1]) +lat <- ncvar_get(ref_nc, "lat") +lon <- ncvar_get(ref_nc, "lon") +units <- ncatt_get(ref_nc, var0, "units")$value +calendar <- ncatt_get(ref_nc, "time", "calendar")$value +long_names <- ncatt_get(ref_nc, var0, "long_name")$value +time <- ncvar_get(ref_nc, "time") +reference_data <- InsertDim(ncvar_get(ref_nc, var0), 1, 1) # nolint +start_date <- as.POSIXct(substr(ncatt_get(ref_nc, "time", + "units")$value, 11, 29)) +time <- as.Date(time, origin = start_date, calendar = calendar) +projection <- "NULL" +nc_close(ref_nc) +for (i in 2 : length(fullpath_filenames[climatology_files])) { + ref_nc <- nc_open(fullpath_filenames[climatology_files][i]) + reference_data <- abind(reference_data, + InsertDim(ncvar_get(ref_nc, var0), 1, 1), along = 1) # nolint + nc_close(ref_nc) +} +attr(reference_data, "Variables")$dat1$time <- time + +names(dim(reference_data)) <- c("model", "lon", "lat", "time") +# nolint start +#jpeg(paste0(plot_dir, "/plot.jpg")) +#PlotEquiMap(reference_data[1,1,1,,], lon = lon, lat = lat, filled = F) +#dev.off() +# ------------------------------ +# Provisional solution to error in dimension order and time values: +# nolint end +time <- attr(reference_data, "Variables")$dat1$time +attributes(time)$variables$time$calendar <- calendar +if ( (end_climatology - start_climatology + 1) * 12 == length(time) ) { + time <- seq( + as.Date( + paste(start_climatology, "01", "01", sep = "-"), + format = "%Y-%m-%d" + ), + as.Date( + paste(end_climatology, "12", "01", sep = "-"), + format = "%Y-%m-%d" + ), + "month" + ) +} + +num_models <- dim(reference_data)[which(names(dim(reference_data)) == "model")] +reference_data <- as.vector(reference_data) +dim(reference_data) <- c( + num_models, + var = 1, + lon = length(lon), + lat = length(lat), + time = length(time) +) +reference_data <- aperm(reference_data, c(1, 2, 5, 4, 3)) +attr(reference_data, "Variables")$dat1$time <- time +names(dim(reference_data)) <- c("model", "var", "time", "lat", "lon") +# nolint start +# ------------------------------ +#jpeg(paste0(plot_dir, "/plot1.jpg")) +#PlotEquiMap(reference_data[1,1,1,,], lon = lon, lat = lat, filled = F) +#dev.off() +#--------------------------------------------- +# MONTHLY - SEASONAL - ANNUAL +# MONTH: moninf = monsup +# SEASONAL: specify the moninf and monsup; +# if winter: moninf = 12 monsup = 2; +# any other moninf > monsup allowed +#--------------------------------------------- +# nolint end + +dims <- dim(reference_data) +time_dim <- which(names(dim(reference_data)) == "time") +if (moninf <= monsup) { + dims <- append(dims, c(12, dims[time_dim] / 12), after = time_dim) + dims <- dims[-time_dim] + dim(reference_data) <- dims + names(dim(reference_data))[c(time_dim, time_dim + 1)] <- c("month", "year") + reference_seasonal_mean <- Season( + reference_data, + posdim = time_dim, + monini = monini, + moninf = moninf, + monsup = monsup + ) + reference_seasonal_mean <- adrop(adrop(reference_seasonal_mean, 2), 2) +} else { + if (monsup == 2 & moninf == 12) { + reference_seasonal_mean <- SeasonSelect( #nolint + reference_data, season = "DJF", dates = time, calendar = calendar + )$data + # Adding one NA december at the begining + time_dim <- which(names(dim(reference_seasonal_mean)) == "time") + dims <- dim(reference_seasonal_mean) + empty_array <- rep(NA, prod(dims[-time_dim])) + dims[time_dim] <- 1 + dim(empty_array) <- dims[-time_dim] + nom <- names(dim(reference_seasonal_mean)) + reference_seasonal_mean <- abind( + reference_seasonal_mean, empty_array, along = time_dim + ) + # and removing the last december + names(dim(reference_seasonal_mean)) <- nom + dimensiones <- 1 : length(dim(reference_seasonal_mean)) + reference_seasonal_mean <- Apply( + reference_seasonal_mean, + target_dims = time_dim, + fun = function (x) { + x[1 : (length(x) - 1)] + } + )$output1 + dims <- dim(reference_seasonal_mean) + time_dim <- which(names(dim(reference_seasonal_mean)) == "time") + dims <- append(dims, c(3, dims[time_dim] / 3), after = time_dim) + dims <- dims[-time_dim] + dim(reference_seasonal_mean) <- dims + names(dim(reference_seasonal_mean))[c(time_dim, time_dim + 1)] <- + c("season", "year") + reference_seasonal_mean <- Mean1Dim(reference_seasonal_mean, + posdim = time_dim) + } +} + +margins <- list(c(1 : length(dim(reference_seasonal_mean)))[-c(time_dim + 1)]) +years_dim <- which(names(dim(reference_seasonal_mean)) == "year") +climatology <- Mean1Dim(reference_seasonal_mean, years_dim) #nolint +projection_filenames <- fullpath_filenames[projection_files] +rcp_nc <- nc_open(projection_filenames[1]) +lat <- ncvar_get(rcp_nc, "lat") +lon <- ncvar_get(rcp_nc, "lon") +units <- ncatt_get(rcp_nc, var0, "units")$value +calendar <- ncatt_get(rcp_nc, "time", "calendar")$value +long_names <- ncatt_get(rcp_nc, var0, "long_name")$value +time <- ncvar_get(rcp_nc, "time") +rcp_data <- InsertDim(ncvar_get(rcp_nc, var0), 1, 1) # nolint +start_date <- as.POSIXct(substr(ncatt_get(rcp_nc, "time", + "units")$value, 11, 29)) +time <- as.Date(time, origin = start_date, calendar = calendar) + +nc_close(rcp_nc) +for (i in 2 : length(projection_filenames)) { + rcp_nc <- nc_open(projection_filenames[i]) + rcp_data <- abind(rcp_data, InsertDim(ncvar_get(rcp_nc, var0), 1, 1), #nolint + along = 1) + nc_close(rcp_nc) +} +attr(rcp_data, "Variables")$dat1$time <- time + +names(dim(rcp_data)) <- c("model", "lon", "lat", "time") +# nolint start +#jpeg(paste0(plot_dir, "/plot2.jpg")) +#PlotEquiMap(rcp_data[1,1,1,,], lon = lon, lat = lat, filled = F) +#dev.off() +# ------------------------------ +# Provisional solution to error in dimension order +#if (attributes(time)$variables$time$calendar != calendar) { +# print("Different calendars between climatology and anomaly.") +#} +# nolint end +if ( (end_projection - start_projection + 1) * 12 == length(time)) { + time <- seq( + as.Date( + paste(start_projection, "01", "01", sep = "-"), + format = "%Y-%m-%d" + ), + as.Date( + paste(end_projection, "12", "01", sep = "-"), + format = "%Y-%m-%d" + ), + "month" + ) +} +num_models <- dim(rcp_data)[which(names(dim(rcp_data)) == "model")] +rcp_data <- as.vector(rcp_data) +dim(rcp_data) <- c( + num_models, + var = 1, + lon = length(lon), + lat = length(lat), + time = length(time) +) +rcp_data <- aperm(rcp_data, c(1, 2, 5, 4, 3)) +names(dim(rcp_data)) <- c("model", "var", "time", "lat", "lon") +attr(rcp_data, "Variables")$dat1$time <- time + +# nolint start +# ------------------------------ +#jpeg(paste0(plot_dir, "/plot3.jpg")) +#PlotEquiMap(rcp_data[1,1,1,,], lon = lon, lat = lat, filled = F) +#dev.off() + + +#--------------------------------------------- +# MONTHLY - SEASONAL - ANNUAL +# MONTH: moninf = monsup +# SEASONAL: specify the moninf and monsup; +# if winter: moninf = 12 monsup = 2; +# any other moninf > monsup allowed +#--------------------------------------------- +# nolint end + +time_dim <- which(names(dim(rcp_data)) == "time") +dims <- dim(rcp_data) +mes <- as.numeric(substr(time, 6, 7)) + +if (moninf <= monsup) { + dims <- append(dims, c(12, dims[time_dim] / 12), after = time_dim) + dims <- dims[-time_dim] + dim(rcp_data) <- dims + names(dim(rcp_data))[c(time_dim, time_dim + 1)] <- c("month", "year") + rcp_seasonal_mean <- Season( + rcp_data, + posdim = time_dim, + monini = monini, + moninf = moninf, + monsup = monsup + ) + rcp_seasonal_mean <- adrop(adrop(rcp_seasonal_mean, 2), 2) +} else { + if (monsup == 2 & moninf == 12) { + rcp_seasonal_mean <- SeasonSelect( #nolint + rcp_data, season = "DJF", + dates = time, + calendar = calendar + )$data + time_dim <- which(names(dim(rcp_seasonal_mean)) == "time") + dims <- dim(rcp_seasonal_mean) + empty_array <- rep(NA, prod(dims[-time_dim])) + dims[time_dim] <- 1 + dim(empty_array) <- dims[-time_dim] + nom <- names(dim(rcp_seasonal_mean)) + rcp_seasonal_mean <- abind( + rcp_seasonal_mean, empty_array, along = time_dim + ) + borrar <- dim(rcp_seasonal_mean)[time_dim] + names(dim(rcp_seasonal_mean)) <- nom + dimensiones <- 1 : length(dim(rcp_seasonal_mean)) + rcp_seasonal_mean <- Apply( # nolint + rcp_seasonal_mean, + target_dims = time_dim, + fun = function (x) { + x[1 : (length(x) - 1)] + } + )$output1 + dims <- dim(rcp_seasonal_mean) + time_dim <- which(names(dim(rcp_seasonal_mean)) == "time") + dims <- append(dims, c(3, dims[time_dim] / 3), after = time_dim) + dims <- dims[-time_dim] + dim(rcp_seasonal_mean) <- dims + names(dim(rcp_seasonal_mean))[c(time_dim, time_dim + 1)] <- + c("season", "year") + rcp_seasonal_mean <- Mean1Dim(rcp_seasonal_mean, posdim = time_dim) + rcp_seasonal_mean <- aperm(rcp_seasonal_mean, c(2, 1, 3, 4)) + } +} +years_dim <- which(names(dim(rcp_seasonal_mean)) == "year") +climatology <- InsertDim( # nolint + climatology, + years_dim, + lendim = dim(rcp_seasonal_mean)[years_dim] +) +anomaly <- rcp_seasonal_mean - climatology +multi_year_anomaly <- Mean1Dim(anomaly, years_dim) + +time <- seq(start_projection, end_projection, by = 1) +month <- moninf + if (month <= 9) { + month <- paste0(as.character(0), as.character(month)) + } + month <- paste0("-", month, "-") + day <- "01" + time <- as.POSIXct(paste0(time, month, day), tz = "CET") + time <- julian(time, origin = as.POSIXct("1970-01-01")) + +attributes(time) <- NULL +dim(time) <- c(time = length(time)) +metadata <- list(time = list( + standard_name = "time", + long_name = "time", + units = "days since 1970-01-01 00:00:00", + prec = "double", + dim = list(list(name = "time", unlim = FALSE)) +)) +attr(time, "variables") <- metadata + +#Save the single model anomalies +for (mod in 1 : length(model_names)) { + data <- anomaly[mod, , , ] # nolint + data <- aperm(data, c(2, 3, 1)) + names(dim(data)) <- c("lat", "lon", "time") + metadata <- list(variable = list( + dim = list(list(name = "time", unlim = FALSE)), + units = units + )) + names(metadata)[1] <- var0 + attr(data, "variables") <- metadata + attributes(lat) <- NULL + attributes(lon) <- NULL + dim(lat) <- c(lat = length(lat)) + dim(lon) <- c(lon = length(lon)) + variable_list <- list(variable = data, lat = lat, lon = lon, time = time) + names(variable_list)[1] <- var0 + + #ArrayToNetCDF( # nolint + # variable_list, + # paste0( + # plot_dir, "/", var0, "_", months, "_anomaly_", model_names[mod], + # "_", start_anomaly, "_", end_anomaly, "_", start_climatology, "_", + # end_climatology, ".nc" + # ) + #) +} + +model_anomalies <- WeightedMean( # nolint + anomaly, + lon = as.vector(lon), + lat = as.vector(lat), + mask = NULL +) +if (!is.null(params$running_mean)) { + model_anomalies <- Smoothing( # nolint + model_anomalies, + runmeanlen = params$running_mean, + numdimt = 2 + ) +} +data_frame <- as.data.frame.table(t(model_anomalies[, ])) +years <- rep(start_projection : end_projection, dim(model_anomalies)[1]) +data_frame$Year <- c(years) +names(data_frame)[2] <- "Model" + +for (i in 1 : length(levels(data_frame$Model))) { + levels(data_frame$Model)[i] <- model_names[i] +} + +if (time_series_plot == "single") { + g <- ggplot( + data_frame, + aes(x = Year, y = Freq, color = Model)) + theme_bw() + + geom_line() + ylab(paste0("Anomaly (", units, ")")) + xlab("Year") + + theme(text = element_text(size = font_size), + legend.text = element_text(size = font_size), + axis.title = element_text(size = font_size)) + + stat_summary(data = data_frame, fun.y = "mean", + mapping = aes(x = data_frame$Year, y = data_frame$Freq, + group = interaction(data_frame[2, 3]), + color = data_frame$Model), geom = "line", size = 1) + + ggtitle(paste0(months, " ", var0, " anomaly (", start_projection, "-", + end_projection, ") - ", "(", start_climatology, "-", end_climatology, + ")")) +} else { + g <- ggplot(data_frame, aes(x = Year, y = Freq)) + theme_bw() + + ylab(paste0("Anomaly (", units, ")")) + xlab("Year") + + theme(text = element_text(size = font_size), + legend.text = element_text(size = font_size), + axis.title = element_text(size = font_size)) + + stat_summary(data = data_frame, fun.y = "mean", + mapping = aes(x = data_frame$Year, y = data_frame$Freq, + group = interaction(data_frame[2, 3]), + color = data_frame$Model), geom = "line", size = 0.8) + + stat_summary(data = data_frame, geom = "ribbon", fun.ymin = "min", + fun.ymax = "max", mapping = aes(x = data_frame$Year, + y = data_frame$Freq, group = interaction(data_frame[2, 3])), + alpha = 0.3, color = "red", fill = "red") + + ggtitle(paste0(months, " ", var0, " anomaly (", start_projection, "-", + end_projection, ") - ", "(", start_climatology, "-", end_climatology, + ")")) +} +filepng1 <- paste0( + plot_dir, "/", "Area-averaged_", var0, "_", months, "_multimodel-anomaly_", + start_projection, "_", end_projection, "_", start_climatology, "_", + end_climatology, ".png") +ggsave( + filename = filepng1, + g, + device = NULL +) + +if (!is.null(agreement_threshold)) { + model_dim <- which(names(dim(multi_year_anomaly)) == "model") + agreement <- AnoAgree(multi_year_anomaly + # nolint + rnorm(length(unique(model_names)) * length(lat) * length(lon)), + membersdim = model_dim + ) +} else { + agreement_threshold <- 1000 + agreement <- NULL +} + +colorbar_lim <- ceiling(max(abs(max(multi_year_anomaly)), abs(min(data)))) +brks <- seq(-colorbar_lim, colorbar_lim, length.out = 21) +title <- paste0( + months, " ", var0, " anomaly (", start_projection, "-", end_projection, + ") - (", start_climatology, "-", end_climatology, ")") +data <- drop(Mean1Dim(multi_year_anomaly, model_dim)) + +filepng2 <- paste0(plot_dir, "/", var0, "_", months, "_multimodel-anomaly_", + start_projection, + "_", end_projection, "_", start_climatology, "_", end_climatology, ".png") +PlotEquiMap( # nolint + data, + lat = lat, + lon = lon, + brks = brks, + units = units, + toptitle = title, + filled.continents = FALSE, + dots = drop(agreement) >= agreement_threshold, + fileout = filepng2) +model_names_filename <- paste(model_names, collapse = "_") +print(paste( + "Attribute projection from climatological data is saved and,", + "if it's correct, it can be added to the final output:", + projection +)) + +dimlon <- ncdim_def( + name = "lon", + units = "degrees_east", + vals = as.vector(lon), + longname = "longitude" +) +dimlat <- ncdim_def( + name = "lat", + units = "degrees_north", + vals = as.vector(lat), + longname = "latitude" +) +defdata <- ncvar_def( + name = "data", + units = units, + dim = list(lat = dimlat, lon = dimlon), + longname = paste("Mean", long_names) +) +defagreement <- ncvar_def( + name = "agreement", + units = "%", + dim = list(lat = dimlat, lon = dimlon), + longname = "Agremeent between models") +filencdf <- paste0( + work_dir, "/", var0, "_", months, "_multimodel-anomaly_", + model_names_filename, "_", start_projection, "_", end_projection, "_", + start_climatology, "_", end_climatology, ".nc") +file <- nc_create(filencdf, list(defdata, defagreement)) +ncvar_put(file, defdata, data) +ncvar_put(file, defagreement, agreement) +nc_close(file) + + + # Set provenance for output files + xprov <- list(ancestors = list(fullpath_filenames), + authors = list("hunt_al", "manu_ni"), + projects = list("c3s-magic"), + caption = title, + statistics = list("other"), + agreement_threshold = params$agreement_threshold, + moninf = params$moninf, + monsup = params$monsup, + runmena = params$running_mean, + time_series_plot = params$time_series_plot, + realms = list("atmos"), + themes = list("phys"), + plot_file = filepng1) + + provenance[[filencdf]] <- xprov + + +# Write provenance to file +write_yaml(provenance, provenance_file) diff --git a/esmvaltool/diag_scripts/magic_bsc/toymodel.r b/esmvaltool/diag_scripts/magic_bsc/toymodel.r new file mode 100644 index 0000000000..2e5622628e --- /dev/null +++ b/esmvaltool/diag_scripts/magic_bsc/toymodel.r @@ -0,0 +1,205 @@ +library(s2dverification) +library(ncdf4) +library(multiApply) #nolint +library(yaml) +library(abind) +library(ClimProjDiags) #nolint +library(RColorBrewer) #nolint + +args <- commandArgs(trailingOnly = TRUE) +params <- read_yaml(args[1]) +plot_dir <- params$plot_dir +run_dir <- params$run_dir +work_dir <- params$work_dir + +dir.create(plot_dir, recursive = TRUE) +dir.create(run_dir, recursive = TRUE) +dir.create(work_dir, recursive = TRUE) + +# setup provenance file and list +provenance_file <- paste0(run_dir, "/", "diagnostic_provenance.yml") +provenance <- list() + +input_files_per_var <- yaml::read_yaml(params$input_files) + +model_names <- lapply(input_files_per_var, function(x) x$model) +model_names <- unname(model_names) +var0 <- lapply(input_files_per_var, function(x) x$short_name) +fullpath_filenames <- names(var0) +var0 <- unname(var0)[1] +a <- 1 +b <- params$beta +g <- 0.1 +nm <- params$number_of_members +nstartd <- 1 +nleadt <- params$no_of_lead_times + + +var0 <- unlist(var0) +data_nc <- nc_open(fullpath_filenames) +data <- ncvar_get(data_nc, var0) +data <- InsertDim(InsertDim(data, 1, 1), 1, 1) #nolint +names(dim(data)) <- c("model", "var", "lon", "lat", "time") +lat <- ncvar_get(data_nc, "lat") +lon <- ncvar_get(data_nc, "lon") +lon <- unlist(lon) +lat <- unlist(lat) +print(lon) +print(lat) +attributes(lon) <- NULL +attributes(lat) <- NULL +units <- ncatt_get(data_nc, var0, "units")$value +calendar <- ncatt_get(data_nc, "time", "calendar")$value +long_names <- ncatt_get(data_nc, var0, "long_name")$value +time <- ncvar_get(data_nc, "time") +start_date <- as.POSIXct(substr(ncatt_get(data_nc, "time", "units")$value, + 11, 29)) +nc_close(data_nc) +time <- as.Date(time, origin = start_date, calendar = calendar) + +dim_names <- names(dim(data)) +lon_dim <- which(names(dim(data)) == "lon") +lat_dim <- which(names(dim(data)) == "lat") +data <- WeightedMean(data, lat = lat, lon = lon, #nolint + londim = lon_dim, latdim = lat_dim) +names(dim(data)) <- dim_names[-c(lon_dim, lat_dim)] +time_dim <- which(names(dim(data)) == "time") + +ToyModel <- function ( #nolint + alpha = 0.1, beta = 0.4, gamma = 1, sig = 1, trend = 0, + nstartd = 30, nleadt = 4, nmemb = 10, obsini = NULL, fxerr = NULL +) { + if (any(!is.numeric(c(alpha, beta, gamma, sig, trend, nstartd, + nleadt, nmemb)))) { + stop(paste("Parameters alpha, beta, gamma, sig, trend, nstartd,", + "nleadt and nmemb must be numeric.")) + } + nstartd <- round(nstartd) + nleadt <- round(nleadt) + nmemb <- round(nmemb) + if (!is.null(obsini)) { + if (!is.numeric(obsini) || !is.array(obsini)) { + stop("Parameter obsini must be a numeric array.") + } + if (length(dim(obsini)) != 4) { + stop(paste( + "Parameter obsini must be an array with dimensions", + "c(1, 1, nleadt, nstartd)." + )) + } + if (dim(obsini)[3] != nstartd || dim(obsini)[4] != nleadt) { + stop(paste0( + "The dimensions of parameter obsini and the parameters ", + "nleadt and nstartd must match:\n dim(obsini) = c(", + dim(obsini)[3], ", ", dim(obsini)[4], ")\n nstartd = ", + nstartd, " nleadt = ", nleadt + )) + } + } + if (!is.null(fxerr)) { + if (!is.numeric(fxerr)) { + stop("Parameter fxerr must be numeric.") + } + } + if (nstartd < 0) { + stop("Number of start dates must be positive") + } + if (nleadt < 0) { + stop("Number of lead-times must be positive") + } + if (nmemb < 0) { + stop("Number of members must be positive") + } + + obs_ano <- obsini + + forecast <- array(dim = c(length(gamma), nmemb, nstartd, nleadt)) + for (j in 1 : nstartd) { + for (f in 1 : nleadt) { + for (g in 1 : length(gamma)) { + auto_term <- obs_ano[1, 1, j, f] + if (is.numeric(fxerr)) { + conf_term <- fxerr + } + else { + conf_term <- rnorm(1, mean = 0, sd = beta) + } + trend_term <- gamma[g] * trend * j + var_corr <- rnorm( + nmemb, + mean = 0, + sd = sqrt(sig - alpha ^ 2 - beta ^ 2) + ) + forecast[g, , j, f] <- matrix(auto_term, c(nmemb,1)) + #nolint + matrix(conf_term, c(nmemb, 1)) + matrix(trend_term, c(nmemb, 1)) + } + } + } + list(mod = forecast, obs = obs_ano) +} + +forecast <- ToyModel(#nolint + alpha = a, + beta = b, + gamma = g, + nmemb = nm, + obsini = InsertDim(data, 1, 1), # nolint + nstartd = 1, + nleadt = dim(data)[time_dim] +) + + +print(brewer.pal(n = nm, name = "Reds")) +filepng <- paste0(plot_dir, "/", "synthetic_", gsub(".nc", "", + basename(fullpath_filenames)), ".jpg") +jpeg(filepng, height = 460, width = 600) +title <- paste(nm, "synthetic members generated") +plot(time, forecast$obs, type = "l", + ylab = paste(var0, "(", units, ")"), + main = title, + bty = "n" +) +matlines( + time, + t(forecast$mod[1, , 1, ]), #nolint + col = brewer.pal(n = nm, name = "Blues") +) +lines(time, forecast$obs, lwd = 2) +dev.off() + + +obs_data <- forecast$obs +data <- forecast$mod[1, , 1, ] #nolint +names(dim(data))[c(1, 2)] <- c("number", "time") + +attributes(time) <- NULL +dim(time) <- c(time = length(time)) +metadata <- list(time = list(standard_name = "time", long_name = "time", + units = "days since 1970-01-01 00:00:00", prec = "double", + dim = list(list(name = "time", unlim = FALSE)))) +attr(time, "variables") <- metadata +metadata <- list(index = list(dim = list(list(name = "time", unlim = FALSE, + prec = "double")))) +names(metadata)[1] <- var0 +attr(data, "variables") <- metadata +variable_list <- list(variable = data, time = time) +names(variable_list)[1] <- var0 +print(str(data)) +filencdf <- paste0(work_dir, "/", "synthetic_", + basename(fullpath_filenames)) +ArrayToNetCDF(variable_list, filencdf) #nolint + + # Set provenance for output files + xprov <- list(ancestors = list(fullpath_filenames), + authors = list("bell_om"), + projects = list("c3s-magic"), + caption = title, + statistics = list("other"), + realms = list("atmos"), + themes = list("phys"), + plot_file = filepng) + + provenance[[filencdf]] <- xprov + +# Write provenance to file +write_yaml(provenance, provenance_file) diff --git a/esmvaltool/diag_scripts/magic_bsc/weather_regime.r b/esmvaltool/diag_scripts/magic_bsc/weather_regime.r new file mode 100644 index 0000000000..131d2d08fb --- /dev/null +++ b/esmvaltool/diag_scripts/magic_bsc/weather_regime.r @@ -0,0 +1,614 @@ +library(s2dverification) +library(ggplot2) +library(multiApply) #nolint +library(ncdf4) +library(gridExtra) #nolint +library(ClimProjDiags) #nolint +library(yaml) + +initial_options <- commandArgs(trailingOnly = FALSE) +file_arg_name <- "--file=" +script_name <- sub(file_arg_name, "", initial_options[grep(file_arg_name, + initial_options)]) +script_dirname <- dirname(script_name) +source(file.path(script_dirname, "WeatherRegime.r")) +source(file.path(script_dirname, "RegimesAssign.r")) + +## Regimes namelist +args <- commandArgs(trailingOnly = TRUE) +params <- yaml::read_yaml(args[1]) + +plot_dir <- params$plot_dir +run_dir <- params$run_dir +work_dir <- params$work_dir +## Create working dirs if they do not exist +dir.create(plot_dir, recursive = TRUE) +dir.create(run_dir, recursive = TRUE) +dir.create(work_dir, recursive = TRUE) + +# setup provenance file and list +provenance_file <- paste0(run_dir, "/", "diagnostic_provenance.yml") +provenance <- list() + + +input_files_per_var <- yaml::read_yaml(params$input_files) + +model_names <- lapply(input_files_per_var, function(x) x$dataset) +model_names <- unique(unlist(unname(model_names))) + + +var0 <- lapply(input_files_per_var, function(x) x$short_name) +fullpath_filenames <- names(var0) +var0 <- unname(var0)[1] + +experiment <- lapply(input_files_per_var, function(x) x$exp) +experiment <- unlist(unname(experiment)) + +reference_files <- which(unname(experiment) == "historical") +projection_files <- which(unname(experiment) != "historical") + + +#Region considered to select the plot +region <- params$plot_type + + +#Start and end periods for the historical and projection periods +start_historical <- lapply(input_files_per_var, + function(x) x$start_year)[reference_files] +starting1 <- c(unlist(unname(start_historical)))[1] +end_historical <- lapply(input_files_per_var, + function(x) x$end_year)[reference_files] +ending1 <- c(unlist(unname(end_historical)))[1] +start_historical <- as.POSIXct(as.Date(paste0(starting1, "-01-01"), + "%Y-%m-%d")) +end_historical <- as.POSIXct(as.Date(paste0(ending1, "-12-31"), "%Y-%m-%d")) + +start_projection <- lapply(input_files_per_var, + function(x) x$start_year)[projection_files] +starting2 <- c(unlist(unname(start_projection)))[1] +end_projection <- lapply(input_files_per_var, + function(x) x$end_year)[projection_files] +ending2 <- c(unlist(unname(end_projection)))[1] +start_projection <- as.POSIXct(as.Date(paste0(starting2, "-01-01"), + "%Y-%m-%d")) +end_projection <- as.POSIXct(as.Date(paste0(ending2, "-12-31"), + "%Y-%m-%d")) + + +#Regime parameters +ncenters <- params$ncenters +cluster_method <- params$cluster_method +if (cluster_method != "kmeans") { + cluster_method <- "complete" +} +EOFS <- params$EOFS +frequency <- params$frequency +detrend_order <- params$detrend_order + +if (grepl("OImon", fullpath_filenames[1])) { + var_lat <- "rlat" + var_lon <- "rlon" +} else { + var_lat <- "lat" + var_lon <- "lon" +} +# --------------------------- +# Reading and formating +# --------------------------- +ref_nc <- nc_open(fullpath_filenames[reference_files]) +var0 <- unlist(var0) +reference_data <- ncvar_get(ref_nc, var0) +if (var0 == "psl") { + names(dim(reference_data)) <- rev(names(ref_nc$dim))[-1] +} +lat <- ncvar_get(ref_nc, var_lat) +lon <- ncvar_get(ref_nc, var_lon) +units <- ncatt_get(ref_nc, var0, "units")$value +calendario <- ncatt_get(ref_nc, "time", "calendar")$value +long_names <- ncatt_get(ref_nc, var0, "long_name")$value +time <- ncvar_get(ref_nc, "time") +start_date <- as.POSIXct(substr(ncatt_get(ref_nc, "time", + "units")$value, 11, 29)) +nc_close(ref_nc) + +data_type <- ifelse(grepl("day", fullpath_filenames[1]), "day", "month") +dates_historical <- seq(start_historical, end_historical, data_type) + +if (dim(reference_data)[3] != length(dates_historical)) { + if (calendario == "365" | calendario == "365_days" | + calendario == "365_day" | calendario == "noleap") { + dates_historical <- dates_historical[-which(substr(dates_historical, + 6, 10) == "02-29")]#nolint + } +} + +reference_data <- as.vector(reference_data) +dim(reference_data) <- c(model = 1, var = 1, + lon = length(lon), lat = length(lat), + time = length(dates_historical)) + +reference_data <- aperm(reference_data, c(1, 2, 5, 3, 4)) +attr(reference_data, "Variables")$dat1$time <- dates_historical + +names(dim(reference_data)) <- c("model", "var", "time", "lon", "lat") +time_dimension <- which(names(dim(reference_data)) == "time") + + +# ------------------------------- +## Selecting the season or month +# ------------------------------- +time_dim <- which(names(dim(reference_data)) == "time") + +months <- c("JAN", "FEB", "MAR", "APR", "MAY", "JUN", "JUL", "AUG", "SEP", + "OCT", "NOV", "DEC") +seasons <- c("DJF", "MAM", "JJA", "SON") +mes <- match(frequency, months) +sea <- match(frequency, seasons) +if (!is.na(mes)) { + dims <- dim(reference_data) + ind <- which(as.numeric(substr(dates_historical, 6, 7)) == mes) + years <- unique(as.numeric(substr(dates_historical, 1, 4))) + reference_data <- reference_data[ , , ind , , ] #nolint + dims <- append(dims, c(length(ind) / length(years), length(years)), + after = time_dim) +} else if (!is.na(sea)) { + reference_data <- SeasonSelect(reference_data, season = frequency, #nolint + dates = dates_historical, + calendar = calendario) + time <- reference_data$dates + years <- unique(as.numeric(substr(time, 1, 4))) + reference_data <- reference_data$data + reference_data <- InsertDim(reference_data, posdim = 1, lendim = 1) #nolint + reference_data <- InsertDim(reference_data, posdim = 1, lendim = 1) #nolint + names(dim(reference_data))[c(1, 2)] <- c("model", "var") + dims <- dim(reference_data) + dims <- append(dims, c(length(time) / length(years), length(years)), + after = time_dim) +} +dims <- dims[-time_dim] +dim(reference_data) <- dims + +names(dim(reference_data))[c(time_dim, time_dim + 1)] <- c("sdate", "ftime") + +Loess <- function(clim, loess_span) { + if (sum(is.na(clim)) != length(clim)) { + data <- data.frame(ensmean = clim, day = 1 : length(clim)) + loess_filt <- loess(ensmean ~ day, data, span = loess_span, + degree = detrend_order) + output <- predict(loess_filt) + } else { + output <- rep(NA, length(clim)) + } + return(output) +} +# ------------------------------- +## Computing the WR_obs +# ------------------------------- + +clim_obs <- array(apply(reference_data, c(1, 2, 3, 5, 6), mean), + dim = dim(reference_data)[-4]) +if (data_type == "day" | !is.na(sea)) { + clim_obs <- aperm(apply(clim_obs, + c(1 : length(dim(clim_obs)))[-which(names(dim(clim_obs)) == "sdate")], + Loess, loess_span = 1), c(2, 3, 1, 4, 5)) +} + +anom_obs <- Ano(reference_data, clim_obs) +print(dim(anom_obs)) +print(length(lon)) +WR_obs <- WeatherRegime(data = anom_obs, EOFS = EOFS, lat = lat, #nolint + lon = lon, ncenters = ncenters, + method = cluster_method) +if (cluster_method != "kmeans" & data_type == "day") { + WR_obs$composite <- aperm(WR_obs$composite, c(2, 1, 3, 4, 5)) +} +names(dim(WR_obs$composite)) <- c("lat", "lon", "Cluster", "Mod", "exp") +names(dim(WR_obs$cluster))[1] <- "Evolution" +# ------------------------------- +## Plotting the WR_obs output +# ------------------------------- + +if (cluster_method == "kmeans") { + clim_frequencies <- paste0("freq = ", + round(Mean1Dim(WR_obs$frequency, 1), 1), "%") +} else { + freq_clus <- NULL + for (i in 1 : ncenters) { + freq_clus <- c(freq_clus, round(sum(WR_obs$cluster[, 1, 1] == i) / + length(WR_obs$cluster[ , 1, 1]) * 100, 1)) + } + clim_frequencies <- paste0("freq = ", freq_clus, "%") + WR_obs$frequency <- freq_clus +} + +cosa <- aperm(drop(WR_obs$composite), c(3, 1, 2)) +lim <- max(abs(cosa / 100), na.rm = TRUE) +if (lim < 1) { + x <- floor(log10(lim)) + 1 + lim <- 10 ^ x +} else { + lim <- ceiling(lim) +} + +if (region == "polar") { + filepng1 <- paste0(plot_dir, "/", frequency, "-", var0, + "_observed_regimes.png") + title <- paste0(paste0("Cluster ", 1 : 4), " (", clim_frequencies, " )") + PlotLayout(PlotStereoMap, c(2, 3), lon = lon, lat = lat, #nolint + var = cosa / 100, titles = title, filled.continents = FALSE, + axelab = FALSE, draw_separators = TRUE, subsampleg = 1, + brks = seq(-1 * lim, lim, by = lim / 10), + fileout = filepng1) +} else { + filepng1 <- paste0(plot_dir, "/", frequency, "-", var0, + "_observed_regimes.png") + title <- paste0(paste0("Cluster ", 1 : 4), " (", clim_frequencies, " )") + if (length(lon) != dim(cosa)["lon"]) { + pos_lon <- which(names(dim(cosa)) == "lon") + pos_lat <- which(names(dim(cosa)) == "lat") + names(dim(cosa))[pos_lon] <- "lat" + names(dim(cosa))[pos_lat] <- "lon" + } + PlotLayout(PlotEquiMap, c(2, 3), lon = lon, lat = lat, #nolint + var = cosa / 100, titles = title, filled.continents = FALSE, + axelab = FALSE, draw_separators = TRUE, subsampleg = 1, + brks = seq(-1 * lim, lim, by = lim / 10), fileout = filepng1) +} + +# ------------------------------- +## Save the WR_obs output to ncdf +# ------------------------------- +time <- dates_historical +time <- julian(time, origin = as.POSIXct("1970-01-01")) +attributes(time) <- NULL +dim(time) <- c(time = length(time)) +metadata <- list(time = list(standard_name = "time", long_name = "time", + units = "days since 1970-01-01 00:00:00", prec = "double", + dim = list(list(name = "time", unlim = FALSE)))) +attr(time, "variables") <- metadata + +attributes(lon) <- NULL +attributes(lat) <- NULL +dim(lon) <- c(lon = length(lon)) +dim(lat) <- c(lat = length(lat)) +metadata <- list(variable = list(dim = list(list(name = "time", + unlim = FALSE)))) +dim(WR_obs$frequency) <- c(frequency = length(WR_obs$frequency)) +dim(WR_obs$pvalue) <- c(pvalue = length(WR_obs$pvalue)) +dim(WR_obs$cluster) <- c(cluster = length(WR_obs$cluster)) +if (cluster_method != "kmeans") { + names(dim(WR_obs$composite))[c(1, 2)] <- c("lon", "lat") +} +if (length(lon) != dim(WR_obs$composite)["lon"]) { + pos_lon <- which(names(dim(WR_obs$composite)) == "lon") + pos_lat <- which(names(dim(WR_obs$composite)) == "lat") + names(dim(WR_obs$composite))[pos_lon] <- "lat" + names(dim(WR_obs$composite))[pos_lat] <- "lon" +} +variable_list <- list(variable = WR_obs$composite, pvalue = WR_obs$pvalue, + cluster = WR_obs$cluster, frequency = WR_obs$frequency, + lat = lat, lon = lon, time = time) +names(variable_list)[1] <- var0 +attributes(variable_list) <- NULL +filencdf1 <- paste0(work_dir, "/", var0, "_", frequency, "_WR_obs_", + model_names, "_", start_projection, "_", end_projection, + "_", start_historical, "_", end_historical, ".nc") + +ArrayToNetCDF(variable_list, filencdf1) #nolint + # Set provenance for output files + xprov <- list(ancestors = list(fullpath_filenames[reference_files]), + authors = list("torr_ve", "fuck_ne","cort_ni", "guem_vi", + "hunt_al", "manu_ni"), + projects = list("c3s-magic"), + caption = "Observed modes of variability", + statistics = list("eof"), + ncenters = params$ncenters, + cluster_method = cluster_method, + EOFS = params$EOFS, + frequency = params$frequency, + detrend_order = params$detrend_order, + realms = list("atmos"), + themes = list("phys"), + plot_file = filepng1) + + provenance[[filencdf1]] <- xprov +# --------------------------- +# --------------------------- +# Reading and formating +# --------------------------- +proj_nc <- nc_open(fullpath_filenames[projection_files]) +projection_data <- ncvar_get(proj_nc, var0) +names(dim(projection_data)) <- rev(names(proj_nc$dim))[-1] +time <- ncvar_get(proj_nc, "time") +start_date <- as.POSIXct(substr(ncatt_get(proj_nc, "time", + "units")$value, 11, 29)) +nc_close(proj_nc) + +dates_projection <- seq(start_projection, end_projection, data_type) + +if (dim(projection_data)[3] != length(dates_projection)) { + if (calendario == "365" | calendario == "365_days" | + calendario == "365_day" | calendario == "noleap") { + dates_projection <- dates_projection[-which(substr(dates_projection, + 6, 10) == "02-29")] #nolint + } +} +data <- as.vector(projection_data) +dim(projection_data) <- c(model = 1, var = 1, lon = length(lon), + lat = length(lat), time = length(time)) +projection_data <- aperm(projection_data, c(1, 2, 5, 3, 4)) +attr(projection_data, "Variables")$dat1$time <- dates_projection +names(dim(projection_data)) <- c("model", "var", "time", "lon", "lat") +time_dimension <- which(names(dim(projection_data)) == "time") + +# --------------------------- +# Selecting the period +# --------------------------- +time_dim <- which(names(dim(projection_data)) == "time") + +if (!is.na(mes)) { + dims <- dim(projection_data) + ind <- which(as.numeric(substr(dates_projection, 6, 7)) == mes) + years <- unique(as.numeric(substr(dates_projection, 1, 4))) + projection_data <- projection_data[ , , ind , , ] #nolint + dims <- append(dims, c(length(ind) / length(years), length(years)), + after = time_dim) +} else if (!is.na(sea)) { + projection_data <- SeasonSelect(projection_data, #nolint + season = frequency, + dates = dates_projection, + calendar = calendario) + time <- projection_data$dates + years <- unique(as.numeric(substr(time, 1, 4))) + projection_data <- projection_data$data + projection_data <- InsertDim(projection_data, posdim = 1, lendim = 1)#nolint + projection_data <- InsertDim(projection_data, posdim = 1, lendim = 1)#nolint + + names(dim(projection_data))[c(1, 2)] <- c("model", "var") + dims <- dim(projection_data) + dims <- append(dims, + c(length(time) / length(years), length(years)), + after = time_dim) +} +dims <- dims[-time_dim] +dim(projection_data) <- dims +names(dim(projection_data))[c(time_dim, time_dim + 1)] <- c("sdate", "ftime") + +clim_ref <- array(apply(projection_data, c(1, 2, 3, 5, 6), mean), + dim = dim(projection_data)[-4]) +if (data_type == "day" | !is.na(sea)) { + clim_ref <- aperm(apply(clim_ref, c(1 : length(dim(clim_ref))) + [-which(names(dim(clim_ref)) == "sdate")], + Loess, loess_span = 1), c(2, 3, 1, 4, 5)) +} +anom_exp <- Ano(projection_data, clim_ref) +reference <- drop(WR_obs$composite) +if (cluster_method == "kmeans") { + names(dim(reference)) <- c("lat", "lon", "nclust") + reference <- aperm(reference, c(3, 2, 1)) + names(dim(reference)) <- c("nclust", "lon", "lat") +} else { + names(dim(reference)) <- c("lon", "lat", "nclust") +} +if (length(lon) != dim(reference)["lon"]) { + pos_lon <- which(names(dim(reference)) == "lon") + pos_lat <- which(names(dim(reference)) == "lat") + names(dim(reference))[pos_lon] <- "lat" + names(dim(reference))[pos_lat] <- "lon" +} +if (length(lon) != dim(anom_exp)["lon"]) { + pos_lon <- which(names(dim(anom_exp)) == "lon") + pos_lat <- which(names(dim(anom_exp)) == "lat") + names(dim(anom_exp))[pos_lon] <- "lat" + names(dim(anom_exp))[pos_lat] <- "lon" +} + +if (any(names(dim(reference)) == "Cluster")) { + pos <- which(names(dim(reference)) == "Cluster") + names(dim(reference)) <- "nclust" #nolint +} +if ((which(names(dim(reference)) == "lon") < #nolint + which(names(dim(reference)) == "lat") & + which(names(dim(anom_exp)) == "lon") > + which(names(dim(anom_exp)) == "lat")) | + (which(names(dim(reference)) == "lon") > + which(names(dim(reference)) == "lat") & + which(names(dim(anom_exp)) == "lon") < + which(names(dim(anom_exp)) == "lat"))) { #nolint + dim_names <- names(dim(reference)) + pos_lon <- which(names(dim(reference)) == "lon") + pos_lat <- which(names(dim(reference)) == "lat") + pos <- 1 : length(dim(reference)) + pos[pos_lon] <- pos_lat + pos[pos_lat] <- pos_lon + reference <- aperm(reference, pos) + names(dim(reference))[pos_lon] == "lat" + names(dim(reference))[pos_lat] == "lon" +} +WR_exp <- RegimesAssign(var_ano = anom_exp, ref_maps = reference, #nolint + lats = lat, method = "distance") + + +# --------------------------- +# Plotting WR projection: +# --------------------------- + +if (cluster_method == "kmeans") { + cosa <- aperm(WR_exp$composite, c(3, 2, 1)) + names(dim(WR_exp$composite))[3] <- "nclust" +} else { + names(dim(WR_exp$composite))[3] <- "nclust" +} +lim <- max(abs(cosa / 100), na.rm = TRUE) +if (lim < 1) { + x <- floor(log10(lim)) + 1 + lim <- 10 ^ x +} else { + lim <- ceiling(lim) +} +if (region == "polar") { + filepng2 <- paste0(plot_dir, "/", frequency, "-", var0, + "_predicted_regimes.png") + title <- paste0(paste0("Cluster ", 1 : dim(cosa)[1], " (", + paste0("freq = ", round(WR_exp$frequency, 1), "%"), " )")) + PlotLayout(PlotStereoMap, c(2, 3), lon = lon, lat = lat, #nolint + var = cosa / 100, titles = title, + filled.continents = FALSE, draw_separators = TRUE, + subsampleg = 1, brks = seq(-1 * lim, lim, by = lim / 10), + fileout = filepng2) +} else { + filepng2 <- paste0(plot_dir, "/", frequency, "-", var0, + "_predicted_regimes.png") + title <- paste0(paste0("Cluster ", 1 : dim(cosa)[1], " (", + paste0("freq = ", round(WR_exp$frequency, 1), "%"), " )")) + PlotLayout(PlotEquiMap, c(2, 3), lon = lon, lat = lat, #nolint + var = cosa / 100, titles = title, filled.continents = FALSE, + axelab = FALSE, draw_separators = TRUE, subsampleg = 1, + brks = seq(-1 * lim, lim, by = lim / 10), + fileout = filepng2) +} + + +# ------------------------------- +## Save the WR_exp output to ncdf +# ------------------------------- +time <- dates_projection +time <- julian(time, origin = as.POSIXct("1970-01-01")) +attributes(time) <- NULL +dim(time) <- c(time = length(time)) +metadata <- list(time = list(standard_name = "time", long_name = "time", + units = "days since 1970-01-01 00:00:00", prec = "double", + dim = list(list(name = "time", unlim = FALSE)))) +attr(time, "variables") <- metadata + +attributes(lon) <- NULL +attributes(lat) <- NULL +dim(lon) <- c(lon = length(lon)) +dim(lat) <- c(lat = length(lat)) +metadata <- list(variable = list(dim = list(list(name = "time", + unlim = FALSE)))) + +dim(WR_exp$frequency) <- c(frequency = length(WR_exp$frequency)) +dim(WR_exp$pvalue) <- c(pvalue = length(WR_exp$pvalue)) +dim(WR_exp$cluster) <- c(cluster = length(WR_exp$cluster)) + +variable_list <- list(variable = WR_exp$composite, pvalue = WR_exp$pvalue, + cluster = WR_exp$cluster, frequency = WR_exp$frequency, + lat = lat, lon = lon, time = time) +names(variable_list)[1] <- var0 + +attributes(variable_list) <- NULL +filencdf2 <- paste0(work_dir, "/", var0, "_", frequency, "_WR_exp_", + model_names, "_", start_projection, "_", end_projection, + "_", start_historical, "_", end_historical, ".nc") +ArrayToNetCDF(variable_list, filencdf2) #nolint + + # Set provenance for output files + xprov <- list(ancestors = list(filencdf1, + fullpath_filenames[projection_files]), + authors = list("torr_ve", "fuck_ne","cort_ni", "guem_vi", + "hunt_al", "manu_ni"), + projects = list("c3s-magic"), + caption = "Predicted modes of variability", + statistics = list("other"), + ncenters = params$ncenters, + cluster_method = cluster_method, + EOFS = params$EOFS, + frequency = params$frequency, + detrend_order = params$detrend_order, + realms = list("atmos"), + themes = list("phys"), + plot_file = filepng2) + + provenance[[filencdf2]] <- xprov +# --------------------------- +# Computing the RMSE: +# --------------------------- +if (cluster_method != "kmeans") { + cosa <- aperm(cosa, c(2, 3, 1)) + reference <- aperm(reference, c(3, 1, 2)) +} else { + cosa <- aperm(WR_exp$composite, c(2, 1, 3)) +} +if (length(lon) != dim(cosa)["lon"]) { + pos_lon <- which(names(dim(cosa)) == "lon") + pos_lat <- which(names(dim(cosa)) == "lat") + names(dim(cosa))[pos_lon] <- "lat" + names(dim(cosa))[pos_lat] <- "lon" +} +if ((which(names(dim(reference)) == "lon") < #nolint + which(names(dim(reference)) == "lat") & + which(names(dim(cosa)) == "lon") > + which(names(dim(cosa)) == "lat")) | + (which(names(dim(reference)) == "lon") > + which(names(dim(reference)) == "lat") & + which(names(dim(cosa)) == "lon") < + which(names(dim(cosa)) == "lat"))) { #nolint + dim_names <- names(dim(reference)) + pos_lon <- which(names(dim(reference)) == "lon") + pos_lat <- which(names(dim(reference)) == "lat") + pos <- 1 : length(dim(reference)) + pos[pos_lon] <- pos_lat + pos[pos_lat] <- pos_lon + reference <- aperm(reference, pos) + names(dim(reference))[pos_lon] == "lat" + names(dim(reference))[pos_lat] == "lon" +} + +rmse <- NULL +for (i in 1 : ncenters) { + for (j in 1 : ncenters) { + rmse <- c(rmse, sqrt(mean( (reference[i, , ] - cosa[, , j]) ^ 2, + na.rm = T))) + } +} +dim(rmse) <- c(ncenters, ncenters) +print(rmse) + +dimpattern <- ncdim_def(name = "pattern", units = "undim", + vals = 1 : ncenters, longname = "Pattern") +title <- paste0("Root Mean Squared Error between observed and ", + "future projected patterns") +defrmse <- ncvar_def(name = "rmse", units = "undim", + dim = list(observed = dimpattern, + experiment = dimpattern), + longname = title) +filencdf3 <- paste0(work_dir, "/", var0, "_", frequency, "_rmse_", + model_names, "_", start_projection, "_", + end_projection, "_", start_historical, "_", + end_historical, ".nc") +file <- nc_create(filencdf3, list(defrmse)) +ncvar_put(file, defrmse, rmse) + +nc_close(file) + +colnames(rmse) <- paste("Obs", 1 : ncenters) +rownames(rmse) <- paste("Pre", 1 : ncenters) +filepng3 <- paste0(file.path(plot_dir, "Table_"), var0, "_", frequency, + "_rmse_", model_names, "_", start_projection, "_", + end_projection, "_", start_historical, + "_", end_historical, ".png") +png(filepng3, height = 6, width = 18, units = "cm", res = 100) +grid.table(round(rmse, 2)) +dev.off() + + # Set provenance for output files + xprov <- list(ancestors = list(filencdf1, filencdf2), + authors = list("torr_ve", "fuck_ne","cort_ni", "guem_vi", + "hunt_al", "manu_ni"), + projects = list("c3s-magic"), + caption = title, + statistics = list("rmsd"), + ncenters = params$ncenters, + cluster_method = cluster_method, + EOFS = params$EOFS, + frequency = params$frequency, + detrend_order = params$detrend_order, + realms = list("atmos"), + themes = list("phys"), + plot_file = filepng3) + + provenance[[filencdf3]] <- xprov + +# Write provenance to file +write_yaml(provenance, provenance_file) diff --git a/esmvaltool/diag_scripts/miles/basis_functions.R b/esmvaltool/diag_scripts/miles/basis_functions.R new file mode 100644 index 0000000000..69c10a7ec7 --- /dev/null +++ b/esmvaltool/diag_scripts/miles/basis_functions.R @@ -0,0 +1,1510 @@ +# basis functions + + +########################################################## +#------------------------Packages------------------------# +########################################################## + +# loadin packages +library("maps") +library("ncdf4") +library("PCICt") + +# check if fast linear fit is operative (after R 3.1): 3x faster than lm.fit, 36x faster than lm +if (exists(".lm.fit")) { + lin.fit <- .lm.fit +} else { + lin.fit <- lm.fit +} + +# check R version as numeric +R_version <- as.numeric(R.Version()$major) + as.numeric(R.Version()$minor) / 10 + +########################################################## +#-----------------Basic functions------------------------# +########################################################## + +# normalize a time series +standardize <- function(timeseries) { + out <- (timeseries - mean(timeseries, na.rm = T)) / sd(timeseries, na.rm = T) + return(out) +} + + +# detect ics ipsilon lat-lon +whicher <- function(axis, number) { + out <- which.min(abs(axis - number)) + return(out) +} + +# produce a 2d matrix of area weight +area_weight <- function(ics, ipsilon, root = T) { + field <- array(NA, dim = c(length(ics), length(ipsilon))) + if (root == T) { + for (j in 1:length(ipsilon)) { + field[, j] <- sqrt(cos(pi / 180 * ipsilon[j])) + } + } + + if (root == F) { + for (j in 1:length(ipsilon)) { + field[, j] <- cos(pi / 180 * ipsilon[j]) + } + } + + return(field) +} + +# sector details for blocking extra diagnostics and EOFs sectors +sector_details <- function(SECTOR) { + if (SECTOR == "Euro") { + lons <- c(-15, 25) + lats <- c(50, 65) + namesec <- "Central Europe" + } + if (SECTOR == "Azores") { + lons <- c(-70, -10) + lats <- c(30, 40) + namesec <- "Central Atlantic" + } + if (SECTOR == "Greenland") { + lons <- c(-65, -15) + lats <- c(62.5, 72.5) + namesec <- "Greenland" + } + if (SECTOR == "FullPacific") { + lons <- c(130, -150) + lats <- c(60, 75) + namesec <- "North Pacific" + } + if (SECTOR == "FullPacific2") { + lons <- c(130, 210) + lats <- c(60, 75) + namesec <- "North Pacific" + } + + left1 <- which.min(abs(ics - lons[1])) + right1 <- which.min(abs(ics - lons[2])) + low1 <- which.min(abs(ipsilon - lats[1])) + high1 <- which.min(abs(ipsilon - lats[2])) + + latssel <- low1:high1 + if (SECTOR == "FullPacific") { + lonssel <- c(left1:length(ics), 1:right1) + } else { + lonssel <- left1:right1 + } + out <- list( + lons = lons, lonssel = lonssel, lats = lats, latssel = latssel, + name = namesec + ) + return(out) +} + +# weighted correlation +weighted_cor <- function(x, y, w) { + w_mean_x <- sum(w * x) / sum(w) + w_mean_y <- sum(w * y) / sum(w) + + w_cov_xy <- sum(w * (x - w_mean_x) * (y - w_mean_y)) / sum(w) + w_var_y <- sum(w * (y - w_mean_y) * (y - w_mean_y)) / sum(w) + w_var_x <- sum(w * (x - w_mean_x) * (x - w_mean_x)) / sum(w) + + corr <- w_cov_xy / sqrt(w_var_x * w_var_y) + return(corr) +} + +# weighted standard deviations +weighted_sd <- function(x, w) { + w_mean <- sum(w * x) / sum(w) + v1 <- sum(w) + v2 <- sum(w ^ 2) + var <- v1 / (v1 ^ 2 - v2) * sum(w * (x - w_mean) ^ 2) + sdd <- sqrt(var) + return(sdd) +} + +# info string creator +info_builder <- function(dataset, expid, ens, year1, year2, season) { + + # loop on descriptors that are concatenated to create info string + descriptors <- c(dataset, expid, ens, paste0(year1, "-", year2), season) + info <- NULL + for (dcode in descriptors) { + if (length(dcode) > 0) { + info <- paste(info, dcode) + } + } + return(info) +} + +# basic switch to create NetCDF file names and folders (use recursive structure from v0.6) +file_builder <- function(DATADIR, dir_name, file_name, dataset, expid, ens, + year1, year2, season) { + + # loop on descriptors that are concatenated to create dir and file name + descriptors <- c(dataset, expid, ens, paste0(year1, "-", year2), season) + for (dcode in descriptors) { + if (length(dcode) > 0) { + DATADIR <- file.path(DATADIR, dcode) + file_name <- paste0(file_name, "_", dcode) + } + } + + # add directory name descriptor + DATADIR <- file.path(DATADIR, dir_name) + + # actually dir.exists is in devtools only for R < 3.2, + # then is included in base package + if (exists("dir.exists")) { + if (!dir.exists(DATADIR)) { + dir.create(DATADIR, recursive = T) + } + } else { + dir.create(DATADIR, recursive = T, showWarnings = F) + } + return(file.path(DATADIR, paste0(file_name, ".nc"))) +} + +# basic switch to create figures names and folders +# (use recursive structure from v0.6) +fig_builder <- function(FIGDIR, dir_name, file_name, dataset, expid, + ens, year1, year2, season, output_file_type) { + + # loop on descriptors that are concatenated to create dir and file name + descriptors <- c(dataset, expid, ens, paste0(year1, "-", year2), season) + for (dcode in descriptors) { + if (dcode != "NO") { + FIGDIR <- file.path(FIGDIR, dcode) + file_name <- paste0(file_name, "_", dcode) + } + } + + # add directory name descriptor + FIGDIR <- file.path(FIGDIR, dir_name) + + # actually dir.exists is in devtools only for R < 3.2, + # then is included in base package + if (exists("dir.exists")) { + if (!dir.exists(FIGDIR)) { + dir.create(FIGDIR, recursive = T) + } + } else { + dir.create(FIGDIR, recursive = T, showWarnings = F) + } + + return(file.path(FIGDIR, paste0(file_name, ".", output_file_type))) +} + +# progression bar +progression_bar <- function(index, total_length, each = 10) { + if (any(index == round(seq(0, total_length, , each + 1)))) { + progression <- paste("--->", round(index / total_length * 100), "%") + print(progression) + } +} + + +########################################################## +#--------------Time Based functions----------------------# +########################################################## + +# to convert season charname to months number +season2timeseason <- function(season) { + if (nchar(season) == 3 & toupper(season) == season) { + if (season == "ALL") { + timeseason <- 1:12 + } + if (season == "JJA") { + timeseason <- 6:8 + } + if (season == "DJF") { + timeseason <- c(1, 2, 12) + } + if (season == "MAM") { + timeseason <- 3:5 + } + if (season == "SON") { + timeseason <- 9:11 + } + } else { + charseason <- strsplit(season, "_")[[1]] + print(charseason) + if (mean(nchar(charseason)) == 3) { + timeseason <- which(charseason == month.abb) + } else { + timeseason <- which(charseason == month.name) + } + } + print(timeseason) + if (length(timeseason) == 0 | min(timeseason) < 0 | max(timeseason) > 13) { + stop("wrong season selected!") + } + return(timeseason) +} + +# leap year treu/false function +is_leapyear <- function(year) { + return( ( (year %% 4 == 0) & (year %% 100 != 0) ) | (year %% 400 == 0) ) +} + +# check number of days for each month +number_days_month <- function(datas) { + # evaluate the number of days in a defined month of a year + datas <- as.Date(datas) + m <- format(datas, format = "%m") + while (format(datas, format = "%m") == m) { + datas <- datas + 1 + } + return(as.integer(format(datas - 1, format = "%d"))) +} + +power_date_new <- function(datas) { + whichdays <- as.numeric(format(datas, "%m")) + # create a "season" for continuous time, used by persistance tracking + seas <- whichdays * 1 + ss <- 1 + for (i in 1:(length(whichdays) - 1)) { + if (diff(whichdays)[i] > 1) { + ss <- ss + 1 + } + seas[i + 1] <- ss + } + + etime <- list( + day = as.numeric(format(datas, "%d")), + month = as.numeric(format(datas, "%m")), + year = as.numeric(format(datas, "%Y")), data = datas, season = seas + ) + print("Time Array Built") + print(paste("Length:", length(seas))) + print(paste("From", datas[1], "to", datas[length(seas)])) + return(etime) +} + +########################################################## +#--------------NetCDF loading function-------------------# +########################################################## + +# universal function to open a single var 3D (x,y,time) ncdf files: +# it includes rotation, y-axis filpping, possible time selection and +# CDO-based interpolation to replace both ncdf.opener.time and ncdf.opener +# (deprecated and removed) +# automatically rotate matrix to place greenwich at the center (flag "rotate") +# and flip the latitudes in order to have increasing +# if required (flag "interp2grid") additional interpolation with CDO can be +# used. "grid" can be used to specify the target grid name +# time selection based on package PCICt must be specifed with +# both "tmonths" and "tyears" flags +# it returns a list including its own dimensions +ncdf_opener_universal <- function(namefile, namevar = NULL, namelon = NULL, + namelat = NULL, tmonths = NULL, + tyears = NULL, rotate = "full", + interp2grid = F, grid = "r144x73", + remap_method = "remapcon2", + exportlonlat = TRUE, verbose = TRUE) { + + # load package + require(ncdf4) + + # verbose-only printing function + printv <- function(value) { + if (verbose) { + print(value) + } + } + + # check if timeflag is activated or full file must be loaded + if (is.null(tyears) | is.null(tmonths)) { + timeflag <- FALSE + printv("No time and months specified, loading all the data") + } else { + timeflag <- TRUE + printv("tyears and tmonths are set!") + require(PCICt) + } + + if (rotate == "full") { + rot <- T + move1 <- move2 <- 1 / 2 + } # 180 degrees rotation of longitude + if (rotate == "half") { + rot <- T + move1 <- 1 / 4 + move2 <- 3 / 4 + } # 90 degree rotation (useful for TM90) + if (rotate == "no") { + rot <- F + } # keep as it is + + # interpolation made with CDO: second order conservative remapping + if (interp2grid) { + print(paste("Remapping with CDO on", grid, "grid")) + filename <- basename(normalizePath(namefile)) + filedir <- dirname(normalizePath(namefile)) + cdo <- Sys.which("cdo") + tempfile <- paste0(file.path(filedir, paste0("tempfile_", filename))) + system2(cdo, args = c(paste0(remap_method, ",", grid), namefile, tempfile)) + namefile <- tempfile + } + + # define rotate function (faster than with apply) + rotation <- function(line) { + vettore <- line + dims <- length(dim(vettore)) + # for longitudes + if (dims == 1) { + ll <- length(line) + line[(ll * move1):ll] <- vettore[1:(ll * move2 + 1)] + line[1:(ll * move1 - 1)] <- vettore[(ll * move2 + 2):ll] - 360 + } + # for x,y data + if (dims == 2) { + ll <- length(line[, 1]) + line[(ll * move1):ll, ] <- vettore[1:(ll * move2 + 1), ] + line[1:(ll * move1 - 1), ] <- vettore[(ll * move2 + 2):ll, ] + } + # for x,y,t data + if (dims == 3) { + ll <- length(line[, 1, 1]) + line[(ll * move1):ll, , ] <- vettore[1:(ll * move2 + 1), , ] + line[1:(ll * move1 - 1), , ] <- vettore[(ll * move2 + 2):ll, , ] + } + # for x,y,z,t data + if (dims == 4) { + ll <- length(line[, 1, 1, 1]) + line[(ll * move1):ll, , , ] <- vettore[1:(ll * move2 + 1), , , ] + line[1:(ll * move1 - 1), , , ] <- vettore[(ll * move2 + 2):ll, , , ] + } + return(line) + } + + # define flip function ('cos rev/apply is not working) + flipper <- function(field) { + dims <- length(dim(field)) + if (dims == 2) { + ll <- length(field[1, ]) + field <- field[, ll:1] + } # for x,y data + if (dims == 3) { + ll <- length(field[1, , 1]) + field <- field[, ll:1, ] + } # for x,y,t data + if (dims == 4) { + ll <- length(field[1, , 1, 1]) + field <- field[, ll:1, , ] + } # for x,y,z,t data + return(field) + } + + # opening file: getting variable (if namevar is given, that variable + # is extracted) + printv(paste("opening file:", namefile)) + a <- nc_open(namefile) + print(paste("Loading", namevar, "...")) + + # if no name provided load the only variable available + if (is.null(namevar)) { + namevar <- names(a$var) + if (length(namevar) > 1) { + print(namevar) + stop(paste("More than one var in the files, please select it with"), + " namevar=yourvar", + sep = "" + ) + } + } + + # load axis: updated version, looking for dimension directly + # stored inside the variable + naxis <- unlist(lapply(a$var[[namevar]]$dim, function(x) x["name"])) + for (axis in naxis) { + assign(axis, ncvar_get(a, axis)) + printv(paste(axis, ":", length(get(axis)), "records")) + } + + if (timeflag) { + printv("selecting years and months") + + # based on preprocessing of CDO time format: get calendar type and + # use PCICt package for irregular data + units <- ncatt_get(a, "time", "units")$value + caldata <- ncatt_get(a, "time", "calendar")$value + if (grepl("day as", units, fixed = TRUE) | + grepl("days as", units, fixed = TRUE)) { + timeline <- as.PCICt(as.character(time), + format = "%Y%m%d", + cal = caldata + ) + } else if (grepl("day since", units, fixed = TRUE) | + grepl("days since", units, fixed = TRUE)) { + origin <- unlist(strsplit(units, "[a-zA-Z ]+"))[2] + origin.pcict <- as.PCICt(origin, cal = caldata, format = "%Y-%m-%d") + timeline <- origin.pcict + (floor(time) * 86400) + } else { + printv(units) + stop("Time units from NetCDF unsupported. Stopping!!!") + } + + # break if the calendar has not been recognized + if (any(is.na(timeline))) { + stop("Calendar from NetCDF is unsupported or not present. Stopping!!!") + } + + # break if the data requested is not there + lastday_base <- paste0(max(tyears), "-", max(tmonths), "-28") + # uses number.days.month, which loops to get the month change + lastday <- as.PCICt(paste0( + max(tyears), "-", max(tmonths), "-", + number_days_month(lastday_base) + ), cal = caldata, format = "%Y-%m-%d") + firstday <- as.PCICt(paste0(min(tyears), "-", min(tmonths), "-01"), + cal = caldata, format = "%Y-%m-%d" + ) + + if (max(timeline) < lastday | min(timeline) > firstday) { + print(firstday) + print(lastday) + print(min(timeline)) + print(max(timeline)) + stop("You requested a time interval that is not present in the NetCDF") + } + } + + # time selection and variable loading + printv("loading full field...") + field <- ncvar_get(a, namevar) + + if (timeflag) { + + # select data we need + select <- which(as.numeric(format(timeline, "%Y")) %in% + tyears & as.numeric(format(timeline, "%m")) %in% tmonths) + field <- field[, , select] + time <- timeline[select] + + printv(paste("This is a", caldata, "calendar")) + printv(paste( + length(time), "days selected from", time[1], "to", + time[length(time)] + )) + + printv(paste("Months that have been loaded are.. ")) + printv(unique(format(time, "%Y-%m"))) + } + + # check for dimensions (presence or not of time dimension) + dimensions <- length(dim(field)) + + # if dimensions are multiple, get longitude, latitude + # if needed, rotate and flip the array + xlist <- c("lon", "Lon", "longitude", "Longitude") + ylist <- c("lat", "Lat", "latitude", "Latitude") + if (dimensions > 1) { + # assign ics and ipsilon + if (is.null(namelon)) { + if (any(xlist %in% naxis)) { + ics <- get(naxis[naxis %in% xlist], a$dim)$vals + } else { + printv("WARNING: No lon found") + ics <- NA + } + } else { + ics <- ncvar_get(a, namelon) + } + if (is.null(namelat)) { + if (any(ylist %in% naxis)) { + ipsilon <- get(naxis[naxis %in% ylist], a$dim)$vals + } else { + printv("WARNING: No lat found") + ipsilon <- NA + } + } else { + ipsilon <- ncvar_get(a, namelat) + } + + # longitute rotation around Greenwich + if (rot) { + printv("rotating...") + ics <- rotation(ics) + field <- rotation(field) + } + if (ipsilon[2] < ipsilon[1] & length(ipsilon) > 1) { + if (length(ics) > 1) { + print("flipping...") + ipsilon <- sort(ipsilon) + field <- flipper(field) + } + } + + # exporting variables to the main program + if (exportlonlat) { + assign("ics", ics, envir = .GlobalEnv) + assign("ipsilon", ipsilon, envir = .GlobalEnv) + } + # if ics and ipsilon exists, assign the rearranged values + if (!is.na(ics[1])) { + assign(naxis[naxis %in% c(xlist, namelon)], ics) + } + if (!is.na(ipsilon[1])) { + assign(naxis[naxis %in% c(ylist, namelat)], ipsilon) + } + } + + if (dimensions > 4) { + stop("This file is more than 4D file") + } + + # close connection + nc_close(a) + + # remove interpolated file + if (interp2grid) { + system2("rm", tempfile) + } + + # showing array properties + printv(paste(dim(field))) + if (timeflag) { + printv(paste("From", time[1], "to", time[length(time)])) + } + + # returning file list + return(mget(c("field", naxis))) +} + +# ncdf_opener is a simplified wrapper for ncdf_opener_universal which returns +# only the field, ignoring the list and no verbosity +ncdf_opener <- function(namefile, namevar = NULL, namelon = NULL, + namelat = NULL, tmonths = NULL, tyears = NULL, + rotate = "full", interp2grid = F, grid = "r144x73", + remap_method = "remapcon2", + exportlonlat = TRUE, verbose = FALSE) { + field <- ncdf_opener_universal( + namefile, namevar, namelon, namelat, + tmonths, tyears, rotate, interp2grid, + grid, remap_method, exportlonlat, verbose + ) + return(field$field) +} + +########################################################## +#--------------Plotting functions------------------------# +########################################################## + +# function to open devices +open_plot_device <- function(figname, output_file_type, special = FALSE) { + # Choose output format for figure + output_file_type <- tolower(output_file_type) + if (special == FALSE) { + if (output_file_type == "png") { + png(filename = figname, width = png_width, height = png_height) + } else if (output_file_type == "pdf") { + pdf(file = figname, width = pdf_width, height = pdf_height, onefile = T) + } else if ( (output_file_type == "eps") | + (output_file_type == "epsi") | + (output_file_type == "ps") ) { + setEPS( + width = pdf_width, height = pdf_height, onefile = T, + paper = "special" + ) + postscript(figname) + } + } else { + if (output_file_type == "png") { + png( + filename = figname, width = png_width / af, + height = png_height * af / 2 + ) + } else if (output_file_type == "pdf") { + pdf( + file = figname, width = pdf_width / af, + height = pdf_height * af / 2, onefile = T + ) + } else if ( (output_file_type == "eps") | + (output_file_type == "epsi") | + (output_file_type == "ps") ) { + setEPS( + width = pdf_width / af, + height = pdf_height * af / 2, onefile = T, paper = "special" + ) + postscript(figname) + } + } +} + + +# extensive filled_contour function +filled_contour3 <- + function(x = seq(0, 1, length.out = nrow(z)), + y = seq(0, 1, length.out = ncol(z)), z, + xlim = range(x, finite = TRUE), + ylim = range(y, finite = TRUE), zlim = range(z, finite = TRUE), + levels = pretty(zlim, nlevels), nlevels = 20, + color.palette = cm.colors, + col = color.palette(length(levels) - 1), extend = TRUE, + plot.title, plot.axes, key.title, key.axes, asp = NA, + xaxs = "i", yaxs = "i", las = 1, + axes = TRUE, frame.plot = axes, mar, ...) { + # modification by Ian Taylor of the filled_contour function + # to remove the key and facilitate overplotting with contour() + # further modified by Carey McGilliard and Bridget Ferris + # to allow multiple plots on one page + # modification to allow plot outside boundaries + + if (missing(z)) { + if (!missing(x)) { + if (is.list(x)) { + z <- x$z + y <- x$y + x <- x$x + } + else { + z <- x + x <- seq.int(0, 1, length.out = nrow(z)) + } + } + else { + stop("no 'z' matrix specified") + } + } + else if (is.list(x)) { + y <- x$y + x <- x$x + } + if (any(diff(x) <= 0) || any(diff(y) <= 0)) { + stop("increasing 'x' and 'y' values expected") + } + + if (extend) { + z[z < min(levels)] <- min(levels) + z[z > max(levels)] <- max(levels) + } + + plot.new() + plot.window(xlim, ylim, "", xaxs = xaxs, yaxs = yaxs, asp = asp) + if (!is.matrix(z) || nrow(z) <= 1 || ncol(z) <= 1) { + stop("no proper 'z' matrix specified") + } + if (!is.double(z)) { + storage.mode(z) <- "double" + } + .filled.contour(as.double(x), as.double(y), z, as.double(levels), + col = col + ) + if (missing(plot.axes)) { + if (axes) { + title(main = "", xlab = "", ylab = "") + Axis(x, side = 1, ...) + Axis(y, side = 2, ...) + } + } + else { + plot.axes + } + if (frame.plot) { + box() + } + if (missing(plot.title)) { + title(...) + } else { + plot.title + } + invisible() + } + +image_scale3 <- function(z, levels, color.palette = heat.colors, + colorbar.label = "image.scale", extend = T, + line.label = 2, line.colorbar = 0, cex.label = 1, + cex.colorbar = 1, colorbar.width = 1, ...) { + + # save properties from main plotting region + old.par <- par(no.readonly = TRUE) + mfg.save <- par()$mfg + old.fig <- par()$fig + + # defining plotting region with proper scaling + xscal <- (old.fig[2] - old.fig[1]) + yscal <- (old.fig[4] - old.fig[3]) + lw <- colorbar.width + lp <- line.colorbar / 100 + new.fig <- c( + old.fig[2] - 0.07 * xscal * lw - lp, + old.fig[2] - 0.03 * xscal - lp, old.fig[3] + 0.1 * yscal, + old.fig[4] - 0.1 * yscal + ) + + if (missing(levels)) { + levels <- seq(min(z), max(z), , 12) + } + # fixing color palette + col <- color.palette(length(levels) - 1) + + # starting plot + par(mar = c(1, 1, 1, 1), fig = new.fig, new = TRUE) + + # creating polygons for legend + poly <- vector(mode = "list", length(col)) + for (i in seq(poly)) { + poly[[i]] <- c(levels[i], levels[i + 1], levels[i + 1], levels[i]) + } + + xlim <- c(0, 1) + if (extend) { + longer <- 1.5 + dl <- diff(levels)[1] * longer + ylim <- c(min(levels) - dl, max(levels) + dl) + } else { + ylim <- range(levels) + } + plot(1, 1, + t = "n", ylim = ylim, xlim = xlim, axes = FALSE, + xlab = "", ylab = "", xaxs = "i", yaxs = "i", ... + ) + for (i in seq(poly)) { + polygon(c(0, 0, 1, 1), poly[[i]], col = col[i], border = NA) + } + + if (extend) { + polygon(c(0, 1, 1 / 2), c(levels[1], levels[1], levels[1] - dl), + col = col[1], border = NA + ) + polygon(c(0, 1, 1 / 2), c( + levels[length(levels)], levels[length(levels)], + levels[length(levels)] + dl ), + col = col[length(col)], border = NA) + polygon(c(0, 0, 1 / 2, 1, 1, 1 / 2), c( + levels[1], levels[length(levels)], + levels[length(levels)] + dl, levels[length(levels)], levels[1], + levels[1] - dl + ), border = "black", lwd = 2) + ylim0 <- range(levels) + prettyspecial <- pretty(ylim0) + prettyspecial <- prettyspecial[prettyspecial <= max(ylim0) & + prettyspecial >= min(ylim0)] + axis(4, + las = 1, cex.axis = cex.colorbar, at = prettyspecial, + labels = prettyspecial, ... + ) + } else { + box() + axis(4, las = 1, cex.axis = cex.colorbar, ...) + } + + # box, axis and leged + mtext(colorbar.label, line = line.label, side = 4, cex = cex.label, ...) + + # resetting properties for starting a new plot (mfrow style) + par(old.par) + par(mfg = mfg.save, new = FALSE) + invisible() +} + +# function for interpolation and projection of a 2D field on a +# mapproj R projection +proj_plot <- function(lon, lat, field, lmin = NULL, proj = "azequalarea", + param = NULL, orient = c(90, 0, 0), npoints = 201) { + + # default is azimuthal equal area map + + # required packages + require(mapproj) + require(akima) + + # it provides lower latitude limit for plots + if (is.null(lmin)) { + lmin <- min(lat) + } + + # build grids + lon.grid <- rep(lon, length(lat)) + lat.grid <- sort(rep(ipsilon, length(lon))) + + # project grid + proj.grid <- mapproject(lon.grid, lat.grid, + projection = proj, + parameters = param, orientation = orient + ) + + # provide limits for future plots (for polar projection) + limiter <- mapproject(c(0, 90, 180, 270), rep(lmin, 4), + proj = "", orientation = orient + ) + xlims <- sort(c(limiter$x[2], limiter$x[4])) + ylims <- sort(c(limiter$y[1], limiter$y[3])) + + # plot grid + lon.plot <- seq(min(proj.grid$x, na.rm = T), max(proj.grid$x, na.rm = T), + length.out = npoints + ) + lat.plot <- seq(min(proj.grid$y, na.rm = T), max(proj.grid$y, na.rm = T), + length.out = npoints + ) + + # interpolation (akima needed) + good <- is.finite(field) & is.finite(proj.grid$x) & is.finite(proj.grid$y) + projected <- interp(proj.grid$x[good], proj.grid$y[good], field[good], + lon.plot, lat.plot, + duplicate = "strip" + ) + return(projected = list( + x = projected$x, y = projected$y, + z = projected$z, xlim = xlims, ylim = ylims + )) +} + +# addland function based on map which can handle projections +proj_addland <- function(proj = "no", orient = c(90, 0, 0), + param = NULL, color = "black") { + + # required packages + require(maps) + require(mapproj) + + if (proj == "no") { + map("world", regions = ".", interior = F, exact = F, boundary = T, add = T) + } else { + # get map, project and do the lines + box() + map("world", + add = T, projection = proj, orientation = orient, + parameter = param, interior = F, exact = F, boundary = T + ) + + # default lines for northern hemisphere + for (i in seq(-80, 80, 20)) { + x0 <- ics + y0 <- rep(i, length(ics)) + p <- mapproject(x0, y0, proj = "", orientation = orient) + lines(p, lty = 3) + } + + # default circles for northern hemisphere + for (i in c(seq(-360, 360, 30))) { + y0 <- seq(min(ipsilon), max(ipsilon), , 90) + x0 <- rep(i, 90) + p <- mapproject(x0, y0, proj = "", orientation = orient) + lines(p, lty = 3) + } + } +} + +# rearrange arrays for use both standard plotting and proj_plot +plot_prepare <- function(ics, ipsilon, field, proj, lat_lim) { + if (proj == "no") { + outfile <- list( + x = ics, y = ipsilon, z = field, xlim = range(ics), + ylim = lat_lim, xlab = "Longitude", ylab = "Latitude", axes = T + ) + } else { + field[is.na(field)] <- 0 + p <- proj_plot(ics, ipsilon, field, + lmin = lat_lim[1], proj = proj, + param = NULL, orient = c(90, 0, 0), npoints = 80 + ) + outfile <- list( + x = p$x, y = p$y, z = p$z, xlim = p$xlim, + ylim = p$ylim, xlab = "", ylab = "", axes = F + ) + } + return(outfile) +} + +# function that provides labels and names for Blocking Plots +field_details <- function(field) { + + # default value + legend_distance <- 3 + lev_hist <- NULL + + # case specific + if (field == "TM90") { + color_field <- c("dodgerblue", "darkred") + color_diff <- NULL + lev_field <- c(0, 30) + lev_diff <- NULL + legend_unit <- "Blocked Days (%)" + title_name <- "TM90 Instantaneous Blocking" + } + + if (field == "InstBlock") { + color_field <- palette1 + color_diff <- palette2 + lev_field <- seq(0, 36, 3) + lev_diff <- seq(-10.5, 10.5, 1) + legend_unit <- "Blocked Days (%)" + title_name <- "Instantaneous Blocking frequency:" + } + + if (field == "ExtraBlock") { + color_field <- palette1 + color_diff <- palette2 + lev_field <- seq(0, 36, 3) + lev_diff <- seq(-10.5, 10.5, 1) + legend_unit <- "Blocked Days (%)" + title_name <- "Instantaneous Blocking frequency (GHGS2 condition):" + } + + if (field == "BlockEvents") { + color_field <- palette1 + color_diff <- palette2 + lev_field <- seq(0, 27, 3) + lev_diff <- seq(-10.5, 10.5, 1) + lev_hist <- c(0, 16) + legend_unit <- "Blocked Days (%)" + title_name <- "Blocking Events frequency:" + } + + if (field == "LongBlockEvents") { + color_field <- palette1 + color_diff <- palette2 + lev_field <- seq(0, 16, 2) + lev_diff <- seq(-5.25, 5.25, .5) + legend_unit <- "Blocked Days (%)" + title_name <- "10-day Blocking Events frequency:" + } + + if (field == "DurationEvents") { + color_field <- palette0 + color_diff <- palette2 + lev_field <- seq(5, 11.5, .5) + lev_diff <- seq(-2.1, 2.1, .2) + lev_hist <- c(6, 8) + legend_unit <- "Duration (days)" + title_name <- "Duration of Blocking Events:" + } + + if (field == "NumberEvents") { + color_field <- palette0 + color_diff <- palette2 + lev_field <- seq(0, 100, 10) + lev_diff <- seq(-42.5, 42.5, 5) + lev_hist <- c(0, 60) + legend_unit <- "" + title_name <- "Number of Blocking Events:" + } + + if (field == "Z500") { + color_field <- palette0 + color_diff <- palette2 + lev_field <- seq(4800, 6000, 50) + lev_diff <- seq(-310, 310, 20) + legend_unit <- "Geopotential Height (m)" + title_name <- "Z500:" + legend_distance <- 4 + } + + if (field == "BI") { + color_field <- palette0 + color_diff <- palette2 + lev_field <- seq(1, 6, 0.25) + lev_diff <- seq(-2.1, 2.1, .2) + legend_unit <- "BI index" + title_name <- "Blocking Intensity (BI):" + } + + if (field == "MGI") { + color_field <- palette0 + color_diff <- palette2 + lev_field <- seq(0, 15, 1) + lev_diff <- seq(-5.25, 5.25, .5) + legend_unit <- "MGI Index" + title_name <- "Meridional Gradient Inversion (MGI):" + } + + if (field == "ACN" | field == "CN") { + if (field == "ACN") { + title_name <- "Anticyclonic Rossby wave breaking frequency:" + } + if (field == "CN") { + title_name <- "Cyclonic Rossby wave breaking frequency:" + } + color_field <- palette1 + color_diff <- palette2 + lev_field <- seq(0, 20, 2) + lev_diff <- seq(-5.25, 5.25, .5) + legend_unit <- "RWB frequency (%)" + } + + + out <- list( + color_field = color_field, color_diff = color_diff, + lev_field = lev_field, lev_diff = lev_diff, lev_hist = lev_hist, + legend_unit = legend_unit, legend_distance = legend_distance, + title_name = title_name + ) + return(out) +} + +########################################################## +#------------Blocking Tracking Functions-----------------# +########################################################## + +# time persistence (used for longitude filter too) +time_persistence <- function(timeseries, persistence = 5) { + rr <- rle(timeseries) + rr$values[which(rr$values == 1 & rr$length < persistence)] <- 0 + nn <- rep(rr$values, rr$length) + return(nn) +} + + +# blocking 5 days tracking +blocking_persistence <- function(field, minduration = 5, time.array) { + + # function for persistence + pers2 <- function(timeseries, persistence, time.array) { + dd <- min(time.array$season):max(time.array$season) + nn <- sapply(dd, function(x) { + time_persistence(timeseries[which(time.array$season == x)], persistence) + }) + xx <- c(unlist(nn)) + return(xx) + } + + # check for etime + if (length(time.array$month) != length(field[1, 1, ])) { + stop("Wrong time array! Exiting...") + } + + print("Time filtering...") + newfield <- apply(field, c(1, 2), function(x) pers2(x, + persistence = minduration, time.array + )) + newfield <- aperm(newfield, c(2, 3, 1)) + print("Mean field...") + meanfield <- apply(newfield, c(1, 2), mean, na.rm = T) * 100 + + + print("Events detection...") + maxdim <- max(apply( + newfield, c(1, 2), + function(x) length(rle(x)$length[which(rle(x)$values == 1)]) + )) + events <- apply( + newfield, c(1, 2), + function(x) c( + rle(x)$lengths[which(rle(x)$values == 1)], + rep(NA, maxdim - length(rle(x)$length[which(rle(x)$values == 1)])) + ) + ) + events <- aperm(events, c(2, 3, 1)) + print("Mean Duration...") + duration <- apply(events, c(1, 2), mean, na.rm = T) + print("Number of Events...") + nevents <- apply(events, c(1, 2), function(x) length(x[!is.na(x)])) + + out <- list( + track = newfield, percentage = meanfield, duration = duration, + events = events, nevents = nevents + ) + print(quantile(meanfield)) + print(min(duration, na.rm = T)) + return(out) +} + + +# large scale extension with further implementation +largescale_extension_if <- function(ics, ipsilon, field) { + print("Large Scale Extension based on fixed angle") + fimin <- 30 # southern latitude to be analyzed + fimax <- 75 # northern latitude to be analyzed + yreso <- ipsilon[2] - ipsilon[1] + xreso <- ics[2] - ics[1] + passo <- 5 / xreso # horizontal movemenent + vertical <- 2.5 / yreso # vertical movement + time <- which(apply(field, 3, max) != 0) # elements length of the dataset + # (removing not blocked days) + + print(paste( + "Box dimension:", passo * 2 * xreso, "° lon x ", + vertical * 2 * yreso, "° lat" + )) + + short <- function(ics, ipsilon, field, passo, vertical) { + control <- field + range <- which.min(abs(ipsilon - fimin)):which.min(abs(ipsilon - fimax)) + # check range for latitude excursion + # reduce range considering border effect + new <- rbind(field, field, field) # bind domain for cross-date line + for (i in 1:length(ics)) { + ii <- i + length(ics) + # check to speed up + if (!all(new[(ii - passo):(ii + passo), ] == 0)) { + for (j in range) { + control[i, j] <- mean(new[ + (ii - passo):(ii + passo), + (j - vertical):(j + vertical) + ], na.rm = T) + } + } + } + control[control > 0] <- 1 + return(control) + } + + + tt <- length(time) + for (t in time) { + progression_bar(t, tt) + field[, , t] <- short(ics, ipsilon, field[, , t], passo, vertical) + } + return(field) +} + + +# Longitude filter for minimum extension +longitude_filter <- function(ics, ipsilon, field) { + print("Longitude filter based on fixed angle") + yreso <- ipsilon[2] - ipsilon[1] + xreso <- ics[2] - ics[1] + startipsilon <- which.min(abs(ipsilon - 30)) + estension <- (75 - 30) / yreso + passo <- 15 / xreso + + print(paste("Continous longitude contrain", passo * xreso, "° lon")) + + tt <- length(field[1, 1, ]) + for (t in 1:tt) { + progression_bar(t, tt) + + new <- rbind(field[, , t], field[, , t], field[, , t]) + for (j in startipsilon:( (startipsilon + estension))) { + new[, j] <- time_persistence(new[, j], persistence = passo) + } + field[, , t] <- new[length(ics) + (1:length(ics)), ] + } + return(field) +} + + +########################################################## +#------------EOFs and regims functions-------------------# +########################################################## + +eofs <- function(lon, lat, field, neof = 4, xlim, ylim, method = "SVD", + do_standardize = F, do_regression = F) { + # R tool for computing EOFs based on Singular Value Decomposition + # ("SVD", default) + # or with the eigenvectors of the covariance matrix ("covariance", slower) + # If requested, computes linear regressions and standardizes the PCs + # If you want to use the regressions, remember to standardize the PCs + # Take as input a 3D anomaly field. + # Requires "personal" functions area_weight, whicher and standardize + + # area weighting, based on the root of cosine + print("Area Weighting...") + ww <- area_weight(lon, lat, root = T) + wwfield <- sweep(field, c(1, 2), ww, "*") + + # selection of the box + box <- wwfield[ + whicher(lon, xlim[1]):whicher(lon, xlim[2]), + whicher(lat, ylim[1]):whicher(lat, ylim[2]), + ] + slon <- lon[whicher(lon, xlim[1]):whicher(lon, xlim[2])] + slat <- lat[whicher(lat, ylim[1]):whicher(lat, ylim[2])] + + # transform 3D field in a matrix + new_box <- array(box, dim = c(dim(box)[1] * dim(box)[2], dim(box)[3])) + + # calling SVD + if (method == "SVD") { + print("Calling SVD...") + SVD <- svd(new_box, nu = neof, nv = neof) + + # extracting EOFs (loading pattern), + # expansions coefficient and variance explained + pattern <- array(SVD$u, dim = c(dim(box)[1], dim(box)[2], neof)) + coefficient <- SVD$v + variance <- (SVD$d[1:neof]) ^ 2 / sum( (SVD$d) ^ 2) + if (do_standardize) { + coefficient <- apply(coefficient, c(2), standardize) + } else { + coefficient <- sweep(coefficient, c(2), sqrt(variance), "*") + } + } + + # calling covariance matrix + if (method == "covariance") { + print("Calling eigenvectors of the covariance matrix...") + covma <- cov(t(new_box)) + eig <- eigen(covma) + coef <- (t(new_box) %*% eig$vector)[, 1:neof] + pattern <- array(eig$vectors, dim = c( + dim(box)[1], dim(box)[2], + dim(box)[3] + ))[, , 1:neof] + variance <- eig$values[1:neof] / sum(eig$values) + if (do_standardize) { + coefficient <- apply(coef, c(2), standardize) + } else { + coefficient <- coef + } + } + + # linear regressions on anomalies + regression <- NULL + if (do_regression) { + print("Linear Regressions (it can takes a while)... ") + regression <- array(NA, dim = c(length(lon), length(lat), neof)) + # for (i in 1:neof) {regression[,,i]=apply(field,c(1,2), + # function(x) coef(lm(x ~ coefficient[,i]))[2])} + for (i in 1:neof) { + regression[, , i] <- apply( + field, c(1, 2), + function(x) lin.fit(as.matrix(coefficient[, i], + ncol = 1 + ), x)$coefficients + ) + } + } + + # preparing output + print("Finalize...") + pattern <- list(x = slon, y = slat, z = pattern) + out <- list( + pattern = pattern, coeff = coefficient, + variance = variance, regression = regression + ) + return(out) +} + +eofs_coeff <- function(lon, lat, field, eof_object, do_standardize = F) { + # Computes expansion coefficient (i.e. PCs) of a given dataset on the + # loading pattern of EOF previously computed + # Works only on eof_object obtained with "eofs" function + + # Area weighting, based on the root of cosine + print("Area Weighting...") + ww <- area_weight(lon, lat, root = T) + wwfield <- sweep(field, c(1, 2), ww, "*") + + # selection of the box + xlim <- c(min(eof_object$pattern$x), max(eof_object$pattern$x)) + ylim <- c(min(eof_object$pattern$y), max(eof_object$pattern$y)) + box <- wwfield[ + whicher(lon, xlim[1]):whicher(lon, xlim[2]), + whicher(lat, ylim[1]):whicher(lat, ylim[2]), + ] + + # transform 3D field in a matrix + new_box <- array(box, dim = c(dim(box)[1] * dim(box)[2], dim(box)[3])) + new_pattern <- array(eof_object$pattern$z, + dim = c( + dim(eof_object$pattern$z)[1] * dim(eof_object$pattern$z)[2], + dim(eof_object$pattern$z)[3] + ) + ) + + # projects the coefficients + coef <- (t(new_box) %*% new_pattern) + + # standardize + if (do_standardize) { + coefficient <- apply(coef, c(2), standardize) + } else { + coefficient <- coef + } + + print("Finalize...") + return(coefficient) +} + + +regimes <- function(lon, lat, field, ncluster = 4, ntime = 1000, + neof = 10, xlim, ylim, alg = "Hartigan-Wong") { + # R tool to compute cluster analysis based on k-means. + # Requires "personal" function eofs + # Take as input a 3D anomaly field + + # Reduce the phase space with EOFs: use SVD and do not standardize PCs + print("Launching EOFs...") + reducedspace <- eofs(lon, lat, field, + neof = neof, xlim = xlim, ylim = ylim, + method = "SVD", do_regression = F, do_standardize = F + ) + + # extract the principal components + PC <- reducedspace$coeff + print(str(PC)) + + # k-means computation repeat for ntime to find best solution. + print("Computing k-means...") + print(str(ncluster)) + regimes <- kmeans(PC, as.numeric(ncluster), + nstart = ntime, + iter.max = 1000, algorithm = alg + ) + + # Extract regimes frequencyr and timeseries of occupation + cluster <- regimes$cluster + frequencies <- regimes$size / dim(field)[3] * 100 + print(frequencies[order(frequencies, decreasing = T)]) + + print("Creating Composites...") + compose <- aperm(apply(field, c(1, 2), by, cluster, mean), c(2, 3, 1)) + + # sorting from the more frequent to the less frequent + kk <- order(frequencies, decreasing = T) + cluster <- cluster + 10 + for (ss in 1:ncluster) { + cluster[cluster == (ss + 10)] <- which(kk == ss) + } + + # prepare output + print("Finalize...") + out <- list( + cluster = cluster, frequencies = frequencies[kk], + regimes = compose[, , kk], tot.withinss = regimes$tot.withinss + ) + return(out) +} + + +regimes2 <- function(lon, lat, field, ncluster = 4, ntime = 1000, minvar = 0.8, + xlim, ylim, alg = "Hartigan-Wong") { + + # R tool to compute cluster analysis based on k-means. + # Requires "personal" function eofs (see above) + # Take as input a 3D anomaly field + + # Reduce the phase space with EOFs: use SVD and do not standardize PCs + print("Launching EOFs...") + reducedspace <- eofs(lon, lat, field, + neof = 20, xlim = xlim, ylim = ylim, + method = "SVD", do_regression = F, do_standardize = F + ) + reqpc <- which(cumsum(reducedspace$variance) > minvar)[1] + print(paste( + "Retaining", reqpc, + "PCs to fullfil minimum explained variance required (", minvar * 100, "%)" + )) + + # extract the principal components + PC <- reducedspace$coeff[, 1:reqpc] + print(str(PC)) + + # k-means computation repeat for ntime to find best solution. + print("Computing k-means...") + print(str(ncluster)) + regimes <- kmeans(PC, as.numeric(ncluster), + nstart = ntime, + iter.max = 100, algorithm = alg + ) + + # Extract regimes frequencyr and timeseries of occupation + cluster <- regimes$cluster + frequencies <- regimes$size / dim(field)[3] * 100 + print(frequencies[order(frequencies, decreasing = T)]) + + print("Creating Composites...") + compose <- aperm(apply(field, c(1, 2), by, cluster, mean), c(2, 3, 1)) + + # sorting from the more frequent to the less frequent + kk <- order(frequencies, decreasing = T) + cluster <- cluster + 10 + for (ss in 1:ncluster) { + cluster[cluster == (ss + 10)] <- which(kk == ss) + } + + # prepare output + print("Finalize...") + out <- list( + cluster = cluster, frequencies = frequencies[kk], + regimes = compose[, , kk], tot.withinss = regimes$tot.withinss + ) + return(out) +} + +########################################################## +#-------------------Time Avg functions-------------------# +########################################################## + +# fast function for monthly mean, using preallocation, +# vectorization and rowMeans +monthly_mean <- function(ics, ipsilon, field, etime) { + condition <- paste(etime$month, etime$year) + monthly <- array(NA, dim = c( + length(ics), length(ipsilon), + length(unique(condition)) + )) + for (t in unique(condition)) { + monthly[, , which(t == unique(condition))] <- rowMeans(field[ + , , + t == condition + ], dims = 2) + } + return(monthly) +} + +# introduce running mean +run_mean <- function(field, n = 5) { + nn <- floor(n / 2) + newfield <- field + for (t in (1 + nn):(length(field) - nn)) { + newfield[t] <- mean(field[(t - nn):(t + nn)]) + } + return(newfield) +} + +# improve running mean +# use vectorization for a 5 day running mean ad-hoc function +# (to be generalized!) +# about 10 times faster that a standard running mean function based +# on for loop +run_mean5 <- function(field) { + newfield <- rowMeans(cbind( + c(field[3:length(field)], NA, NA), + c(field[2:length(field)], NA), field, + c(NA, field[1:(length(field) - 1)]), + c(NA, NA, field[1:(length(field) - 2)]) + ), + na.rm = T + ) + return(newfield) +} + +# function for daily anomalies, use array predeclaration +# and rowMeans (40 times faster!) +daily_anom_mean <- function(ics, ipsilon, field, etime) { + condition <- paste(etime$day, etime$month) + daily <- array(NA, dim = c( + length(ics), length(ipsilon), + length(unique(condition)) + )) + anom <- field * NA + for (t in unique(condition)) { + daily[, , which(t == unique(condition))] <- + rowMeans(field[, , t == condition], dims = 2) + anom[, , which(t == condition)] <- + sweep( + field[, , which(t == condition)], c(1, 2), + daily[, , which(t == unique(condition))], "-" + ) + } + return(anom) +} + +# beta function for daily anomalies plus running mean +# (only 50% slower that standard daily avg) +daily_anom_run_mean <- function(ics, ipsilon, field, etime) { + condition <- paste(etime$day, etime$month) + daily <- array(NA, dim = c( + length(ics), length(ipsilon), + length(unique(condition)) + )) + for (t in unique(condition)) { + daily[, , which(t == unique(condition))] <- + rowMeans(field[, , t == condition], dims = 2) + } + anom <- field * NA + for (t in unique(condition)) { + anom[, , which(t == condition)] <- + sweep( + field[, , which(t == condition)], c(1, 2), + daily[, , which(t == unique(condition))], "-" + ) + } + return(anom) +} diff --git a/esmvaltool/diag_scripts/miles/block_fast.R b/esmvaltool/diag_scripts/miles/block_fast.R new file mode 100644 index 0000000000..6cb78e134c --- /dev/null +++ b/esmvaltool/diag_scripts/miles/block_fast.R @@ -0,0 +1,420 @@ +###################################################### +#-----Blocking routines computation for MiLES--------# +#-------------P. Davini (Oct 2014)-------------------# +###################################################### +miles_block_fast <- function(dataset, expid, ens, year1, year2, season, + z500filename, FILESDIR, doforce) { + t0 <- proc.time() + + # setting up time domain + years <- year1:year2 + timeseason <- season2timeseason(season) + + # define folders using file.builder function (takes care of ensembles) + savefile1 <- file_builder( + FILESDIR, "Block", "BlockClim", dataset, + expid, ens, year1, year2, season + ) + savefile2 <- file_builder( + FILESDIR, "Block", "BlockFull", dataset, + expid, ens, year1, year2, season + ) + + # check if data is already there to avoid re-run + if (file.exists(savefile1) & file.exists(savefile2)) { + print("Actually requested blocking data is already there!") + print(savefile1) + print(savefile2) + if (doforce == TRUE) { + print("Running with doforce=true... re-run!") + } else { + print("Skipping... activate doforce=true if you want to re-run it") + q() + } + } + + # new file opening + nomefile <- z500filename + fieldlist <- ncdf_opener_universal(nomefile, + namevar = "zg", + tmonths = timeseason, tyears = years, + rotate = "full" + ) + print(str(fieldlist)) + + # extract calendar and time unit from the original file + tcal <- attributes(fieldlist$time)$cal + tunit <- attributes(fieldlist$time)$units + + # time array to simplify time filtering + etime <- power_date_new(fieldlist$time) + totdays <- length(fieldlist$time) + + # declare variable + z500 <- fieldlist$field + + # grid resolution + yreso <- ipsilon[2] - ipsilon[1] + xreso <- ics[2] - ics[1] + + # reso checks: this are not needed with default 2.5 grid, + # but they may be relevant with + # future envisaged power up to finer grids + # xcritical factor is due to RWB longitudinal jump of 7.5 + # ycritical factor is due to Large Scale Extension of 2.5 + xcritical <- 2.5 + ycritical <- 2.5 + if (ycritical %% yreso != 0) { + stop("Latitudinal resolution is not a factor of 5 deg") + } + + if (xcritical %% xreso != 0) { + stop("Longitudinal resolution is not a factor of 5 deg") + } + + ########################################################## + #--------------Tibaldi and Molteni 1990------------------# + ########################################################## + + print("Tibaldi and Molteni (1990) index...") + # TM90: parametres for blocking detection + tm90_fi0 <- 60 # central_lat + tm90_fin <- tm90_fi0 + 20 + tm90_fis <- tm90_fi0 - 20 # south and north lat, 80N and 40N + tm90_central <- whicher(ipsilon, tm90_fi0) + tm90_south <- whicher(ipsilon, tm90_fis) + tm90_north <- whicher(ipsilon, tm90_fin) + tm90_range <- seq(-5, 5, yreso) / yreso # 5 degrees to the north, + # 5 to the south (larger than TM90 or D'Andrea et al 1998) + + # TM90: beta version, the amazing power of R vectorization! + # 6 lines to get the climatology + tm90_ghgn <- (z500[, tm90_north + tm90_range, ] - + z500[, tm90_central + tm90_range, ]) / (tm90_fin - tm90_fi0) + tm90_ghgs <- (z500[, tm90_central + tm90_range, ] - + z500[, tm90_south + tm90_range, ]) / (tm90_fi0 - tm90_fis) + tm90_check <- (tm90_ghgs > 0 & tm90_ghgn < (-10)) # TM90 conditions + tm90_check[tm90_check == T] <- 1 + tm90_check[tm90_check == F] <- 0 + tottm90 <- apply(tm90_check, c(1, 3), max, na.rm = T) + tm90 <- apply(tottm90, 1, mean) * 100 + print("Done!") + + ########################################################## + #--------------Davini et al. 2012------------------------# + ########################################################## + + # decleare main variables to be computed (considerable speed up!) + totrwb <- totmeridional <- totbi <- z500 * NA + totblocked <- totblocked2 <- z500 * 0 + + # Davini et al. 2012: parameters to be set for blocking detection + fi0 <- 30 # lowest latitude to be analyzed + jump <- 15 # distance on which compute gradients + step0 <- jump / yreso # number of grid points to be used + central <- which.min(abs(ipsilon - fi0)) # lowest starting latitude + north <- central + step0 # lowest north latitude + south <- central - step0 # lowest sourth latitude + maxsouth <- central - 2 * step0 + fin <- ipsilon[north] + fis <- ipsilon[south] + range <- (90 - fi0 - jump) / yreso # escursion to the north for + # computing blocking (from 30 up to 75) + + print("--------------------------------------------------") + print("Davini et al. (2012) index and diagnostics...") + print(c("distance for gradients:", step0 * diff(ics)[1])) + print(paste("range of latitudes ", fi0, "-", 90 - step0 * diff(ics)[1], + " N", + sep = "" + )) + + ########################################################## + #--------------Istantaneous Blocking---------------------# + ########################################################## + + #----COMPUTING BLOCKING INDICES----- + for (t in 1:totdays) { + progression_bar(t, totdays) + + # multidim extension + new_field <- rbind(z500[, , t], z500[, , t], z500[, , t]) + + # computing blocking for different latitudes + for (delta in 0:range) { + ghgn <- (z500[, north + delta, t] - + z500[, central + delta, t]) / (fin - fi0) + ghgs <- (z500[, central + delta, t] - + z500[, south + delta, t]) / (fi0 - fis) + gh2gs <- (z500[, south + delta, t] - + z500[, maxsouth + delta, t]) / (fi0 - fis) + check1 <- which(ghgs > 0 & ghgn < (-10)) + check2 <- which(ghgs > 0 & ghgn < (-10) & gh2gs < (-5)) + # supplementary condition + + if (length(check2) > 0) { + totblocked2[check2, central + delta, t] <- 1 + } + + if (length(check1) > 0) { + # 1-MATRIX FOR INSTANTANEOUS BLOCKING + totblocked[check1, central + delta, t] <- 1 + + + # 2-PART ON COMPUTATION OF ROSSBY WAVEBREAKING + r <- check1 + length(ics) + rwb_jump <- jump / 2 + steprwb <- rwb_jump / xreso + rwb_west <- new_field[(r - steprwb), south + delta + steprwb] + rwb_east <- new_field[(r + steprwb), south + delta + steprwb] + fullgh <- (rwb_west - rwb_east) + + totrwb[check1[fullgh < 0], central + delta, t] <- (-10) + # gradient decreasing: cyclonic RWB + totrwb[check1[fullgh > 0], central + delta, t] <- 10 + # gradient increasing: anticyclonic RWB + + # 4-part about adapted version of blocking intensity + # by Wiedenmann et al. (2002) + step <- 60 / xreso + ii <- check1 + length(ics) + zu <- zd <- NULL + for (ll in ii) { + zu <- c(zu, min(new_field[(ll - step):ll, central + delta])) + zd <- c(zd, min(new_field[ll:(ll + step), central + delta])) + } + mz <- z500[check1, central + delta, t] + rc <- 0.5 * ( (zu + mz) / 2 + (zd + mz) / 2) + totbi[check1, central + delta, t] <- 100 * (mz / rc - 1) + + # 5 - part about meridional gradient index + totmeridional[check1, central + delta, t] <- ghgs[check1] + } + } + } + + print(paste("Total # of days:", t)) + print("-------------------------") + + ########################################################## + #--------------------Mean Values-------------------------# + ########################################################## + + # compute mean values (use rowMeans, faster when there are no NA values) + frequency <- rowMeans(totblocked, dims = 2) * 100 + # frequency of Instantaneous Blocking days + frequency2 <- rowMeans(totblocked2, dims = 2) * 100 + # frequency of Instantaneous Blocking days with GHGS2 + z500mean <- rowMeans(z500, dims = 2) # Z500 mean value + bi <- apply(totbi, c(1, 2), mean, na.rm = T) + # Blocking Intensity Index as Wiedenmann et al. (2002) + mgi <- apply(totmeridional, c(1, 2), mean, na.rm = T) + # Value of meridional gradient inversion + + # anticyclonic and cyclonic averages RWB + cn <- apply(totrwb, c(1, 2), function(x) sum(x[x == (-10)], na.rm = T)) / + (totdays) * (-10) + acn <- apply(totrwb, c(1, 2), function(x) sum(x[x == (10)], na.rm = T)) / + (totdays) * (10) + + t1 <- proc.time() - t0 + print(t1) + + print("Instantaneous blocking and diagnostics done!") + + ########################################################## + #--------------------Time filtering----------------------# + ########################################################## + + # spatial filtering on fixed longitude distance + spatial <- longitude_filter(ics, ipsilon, totblocked) + + # large scale extension on 10x5 box + large <- largescale_extension_if(ics, ipsilon, spatial) + + # 5-day persistence filter + block <- blocking_persistence(large, minduration = 5, time.array = etime) + + # 10-day persistence for extreme long block + longblock <- blocking_persistence(large, + minduration = 10, + time.array = etime + ) + + tf <- proc.time() - t1 + print(tf) + + + ########################################################## + #------------------------Save to NetCDF------------------# + ########################################################## + + # saving output to netcdf files + print("saving NetCDF climatologies...") + + # which fieds to plot/save + fieldlist <- c( + "TM90", "InstBlock", "ExtraBlock", "Z500", "MGI", "BI", + "CN", "ACN", "BlockEvents", "LongBlockEvents", + "DurationEvents", "NumberEvents" + ) + full_fieldlist <- c( + "TM90", "InstBlock", "ExtraBlock", "Z500", "MGI", "BI", + "CN", "ACN", "BlockEvents", "LongBlockEvents" + ) + + # dimensions definition + fulltime <- as.numeric(etime$data) - as.numeric(etime$data)[1] + TIME <- paste(tunit, " since ", year1, "-", timeseason[1], + "-01 00:00:00", + sep = "" + ) + LEVEL <- 50000 + x <- ncdim_def("lon", "degrees_east", ics, longname = "longitude") + y <- ncdim_def("lat", "degrees_north", ipsilon, longname = "latitude") + z <- ncdim_def("plev", "Pa", LEVEL, longname = "pressure") + t1 <- ncdim_def("time", TIME, 0, + unlim = T, calendar = tcal, + longname = "time" + ) + t2 <- ncdim_def("time", TIME, fulltime, + unlim = T, calendar = tcal, + longname = "time" + ) + + for (var in fieldlist) { + # name of the var + if (var == "TM90") { + longvar <- "Tibaldi-Molteni 1990 Instantaneous Blocking frequency" + unit <- "%" + field <- tm90 + full_field <- tottm90 + } + if (var == "InstBlock") { + longvar <- "Instantaneous Blocking frequency" + unit <- "%" + field <- frequency + full_field <- totblocked + } + if (var == "ExtraBlock") { + longvar <- "Instantaneous Blocking frequency (GHGS2)" + unit <- "%" + field <- frequency2 + full_field <- totblocked2 + } + if (var == "Z500") { + longvar <- "Geopotential Height" + unit <- "m" + field <- z500mean + full_field <- z500 + } + if (var == "BI") { + longvar <- "BI index" + unit <- "" + field <- bi + full_field <- totbi + } + if (var == "MGI") { + longvar <- "MGI index" + unit <- "" + field <- mgi + full_field <- totmeridional + } + if (var == "ACN") { + longvar <- "Anticyclonic RWB frequency" + unit <- "%" + field <- acn + full_field <- totrwb / 10 + full_field[full_field == (-1)] <- NA + } + if (var == "CN") { + longvar <- "Cyclonic RWB frequency" + unit <- "%" + field <- cn + full_field <- totrwb / 10 + full_field[full_field == (1)] <- NA + } + if (var == "BlockEvents") { + longvar <- "Blocking Events frequency" + unit <- "%" + field <- block$percentage + full_field <- block$track + } + if (var == "LongBlockEvents") { + longvar <- "10-day Blocking Events frequency" + unit <- "%" + field <- longblock$percentage + full_field <- longblock$track + } + if (var == "DurationEvents") { + longvar <- "Blocking Events duration" + unit <- "days" + field <- block$duration + } + if (var == "NumberEvents") { + longvar <- "Blocking Events number" + unit <- "" + field <- block$nevents + } + + # fix eventual NaN + field[is.nan(field)] <- NA + + # variable definitions + if (var == "TM90") { + var_ncdf <- ncvar_def(var, unit, list(x, t = t1), -999, + longname = longvar, prec = "single", compression = 1 + ) + full_var_ncdf <- ncvar_def(var, unit, list(x, t = t2), -999, + longname = longvar, prec = "single", compression = 1 + ) + } else { + var_ncdf <- ncvar_def(var, unit, list(x, y, z, t = t1), -999, + longname = longvar, prec = "single", compression = 1 + ) + full_var_ncdf <- ncvar_def(var, unit, list(x, y, z, t = t2), -999, + longname = longvar, prec = "single", compression = 1 + ) + } + + assign(paste0("var", var), var_ncdf) + assign(paste0("full_var", var), full_var_ncdf) + assign(paste0("field", var), field) + assign(paste0("full_field", var), full_field) + } + + # Climatologies Netcdf file creation + print(savefile1) + namelist1 <- paste0("var", fieldlist) + nclist1 <- mget(namelist1) + ncfile1 <- nc_create(savefile1, nclist1) + for (var in fieldlist) { + # put variables into the ncdf file + # ncvar_put(ncfile1, fieldlist[which(var==fieldlist)], + # get(paste0("field",var)), start = c(1, 1, 1, 1), count = c(-1,-1,-1,-1)) + ndims <- get(paste0("var", var))$ndims + ncvar_put(ncfile1, var, get(paste0("field", var)), + start = rep(1, ndims), + count = rep(-1, ndims) + ) + } + nc_close(ncfile1) + + # Fullfield Netcdf file creation + print(savefile2) + namelist2 <- paste0("full_var", full_fieldlist) + nclist2 <- mget(namelist2) + ncfile2 <- nc_create(savefile2, nclist2) + for (var in full_fieldlist) { + # put variables into the ncdf file + # ncvar_put(ncfile2, full_fieldlist[which(var==full_fieldlist)], + # get(paste0("full_field",var)), start = c(1, 1, 1, 1), + # count = c(-1,-1,-1,-1)) + ndims <- get(paste0("full_var", var))$ndims + ncvar_put(ncfile2, var, get(paste0("full_field", var)), + start = rep(1, ndims), count = rep(-1, ndims) + ) + } + nc_close(ncfile2) + return(c(savefile1, savefile2)) +} diff --git a/esmvaltool/diag_scripts/miles/block_figures.R b/esmvaltool/diag_scripts/miles/block_figures.R new file mode 100644 index 0000000000..8353f4e091 --- /dev/null +++ b/esmvaltool/diag_scripts/miles/block_figures.R @@ -0,0 +1,186 @@ +###################################################### +#------Blocking routines plotting for MiLES----------# +#-------------P. Davini (May 2017)-------------------# +###################################################### + +miles_block_figures <- function(dataset, expid, ens, year1, year2, + dataset_ref, expid_ref, ens_ref, year1_ref, + year2_ref, + season, FIGDIR, FILESDIR, REFDIR) { + + # which fieds to load/plot + fieldlist <- c( + "InstBlock", "ExtraBlock", "Z500", "MGI", "BI", "CN", "ACN", + "BlockEvents", "LongBlockEvents", "DurationEvents", + "NumberEvents", "TM90" + ) + + ########################################################## + #-----------------Loading datasets-----------------------# + ########################################################## + + # open field + for (field in fieldlist) { + + # use file.builder function + nomefile <- file_builder( + FILESDIR, "Block", "BlockClim", dataset, expid, + ens, year1, year2, season + ) + field_exp <- ncdf_opener(nomefile, namevar = field, rotate = "no") + assign(paste(field, "_exp", sep = ""), field_exp) + } + + # open reference field + for (field in fieldlist) { + + # check for REFDIR==FILESDIR, i.e. if we are using the climatology + # provided by MiLES or another dataset MiLES-generated + if (REFDIR != FILESDIR) { + nomefile_ref <- paste0( + file.path(REFDIR, "Block"), "/BlockClim_", # nolint + dataset_ref, "_", year1_ref, "_", year2_ref, "_", season, ".nc" + ) + } else { + + # use file.builder to create the path of the blocking files + nomefile_ref <- file_builder( + FILESDIR, "Block", "BlockClim", + dataset_ref, expid_ref, ens_ref, year1_ref, year2_ref, season + ) + } + + field_ref <- ncdf_opener(nomefile_ref, namevar = field, rotate = "no") + assign(paste(field, "_ref", sep = ""), field_ref) + } + + ########################################################## + #-----------------Produce figures------------------------# + ########################################################## + + # standard properties + info_exp <- info_builder(dataset, expid, ens, year1, year2, season) + info_ref <- info_builder( + dataset_ref, expid_ref, ens_ref, year1_ref, + year2_ref, season + ) + + filenames <- c() + # loop on fields + for (field in fieldlist) { + + # define field-dependent properties + fp <- field_details(field) + + # get fields + field_ref <- get(paste(field, "_ref", sep = "")) + field_exp <- get(paste(field, "_exp", sep = "")) + + # create figure names with ad-hoc function + figname <- fig_builder( + FIGDIR, "Block", field, dataset, expid, ens, year1, + year2, season, output_file_type + ) + print(figname) + filenames <- c(filenames, figname) + + # special treatment for TM90: it is a 1D field! + if (field == "TM90") { + open_plot_device(figname, output_file_type, special = TRUE) + + # panels option + par( + cex.main = 2, cex.axis = 1.5, cex.lab = 1.5, mar = c(5, 5, 4, 3), + oma = c(0, 0, 0, 0) + ) + + # rotation to simplify the view (90 deg to the west) + n <- (-length(ics) / 4) + ics2 <- c(tail(ics, n), head(ics, -n) + 360) + field_exp2 <- c(tail(field_exp, n), head(field_exp, -n)) + field_ref2 <- c(tail(field_ref, n), head(field_ref, -n)) + + # plot properties + lwdline <- 4 + tm90cols <- fp$color_field + plot(ics2, field_exp2, + type = "l", lwd = lwdline, ylim = fp$lev_field, + main = fp$title_name, xlab = "Longitude", ylab = fp$legend_unit, + col = tm90cols[1] + ) + points(ics2, field_ref2, + type = "l", lwd = lwdline, lty = 1, + col = tm90cols[2] + ) + grid() + legend(100, 30, + legend = c(info_exp, info_ref), lwd = lwdline, + lty = c(1, 1), col = tm90cols, bg = "white", cex = 1. + ) + + dev.off() + + # skip other part of the script + next + } + + # Choose output format for figure - by JvH + open_plot_device(figname, output_file_type) + + # plot options + par(plotpar) + + # main experiment plot + im <- plot_prepare(ics, ipsilon, field_exp, + proj = map_projection, + lat_lim = lat_lim + ) + filled_contour3(im$x, im$y, im$z, + xlab = im$xlab, ylab = im$ylab, + main = paste(info_exp), levels = fp$lev_field, + color.palette = fp$color_field, xlim = im$xlim, + ylim = im$ylim, axes = im$axes + ) + mtext(fp$title_name, side = 3, line = .5, outer = TRUE, cex = 2, font = 2) + proj_addland(proj = map_projection) + + # reference field plot + im <- plot_prepare(ics, ipsilon, field_ref, + proj = map_projection, lat_lim = lat_lim + ) + filled_contour3(im$x, im$y, im$z, + xlab = im$xlab, ylab = im$ylab, + main = paste(info_ref), levels = fp$lev_field, + color.palette = fp$color_field, xlim = im$xlim, + ylim = im$ylim, axes = im$axes + ) + proj_addland(proj = map_projection) + image_scale3(volcano, + levels = fp$lev_field, + color.palette = fp$color_field, + colorbar.label = fp$legend_unit, + cex.colorbar = imgscl_colorbar, cex.label = imgscl_label, + colorbar.width = 1 * af, line.label = fp$legend_distance + ) + + # delta field plot + im <- plot_prepare(ics, ipsilon, field_exp - field_ref, + proj = map_projection, lat_lim = lat_lim) + filled_contour3(im$x, im$y, im$z, + xlab = im$xlab, ylab = im$ylab, + main = paste("Difference"), levels = fp$lev_diff, + color.palette = fp$color_diff, xlim = im$xlim, + ylim = im$ylim, axes = im$axes + ) + proj_addland(proj = map_projection) + image_scale3(volcano, + levels = fp$lev_diff, color.palette = fp$color_diff, + colorbar.label = fp$legend_unit, + cex.colorbar = imgscl_colorbar, cex.label = imgscl_label, + colorbar.width = 1 * af, line.label = fp$legend_distance + ) + + dev.off() + } + return(list(figs = filenames, mod = nomefile, ref = nomefile_ref)) +} diff --git a/esmvaltool/diag_scripts/miles/eof_fast.R b/esmvaltool/diag_scripts/miles/eof_fast.R new file mode 100644 index 0000000000..716175dac4 --- /dev/null +++ b/esmvaltool/diag_scripts/miles/eof_fast.R @@ -0,0 +1,237 @@ +###################################################### +#-----EOFs routines computation for MiLES--------# +#-------------P. Davini (Feb 2018)-------------------# +###################################################### +miles_eofs_fast <- function(dataset, expid, ens, year1, year2, season, + tele, z500filename, FILESDIR, PROGDIR, doforce) { + + # standard defined 4 EOFs + neofs <- 4 + + # t0 + t0 <- proc.time() + + # setting up time domain + years <- year1:year2 + timeseason <- season2timeseason(season) + + # define folders using file.builder function (takes care of ensembles) + print(".....") + print(dataset) + print(expid) + print(ens) + savefile1 <- file_builder( + FILESDIR, paste0("EOFs/", tele), "EOFs", dataset, + expid, ens, year1, year2, season + ) + + # select teleconnection region + if (tele == "NAO") { + xlim <- c(-90, 40) + ylim <- c(20, 85) + rotation <- "full" + } else if (tele == "AO") { + xlim <- c(-180, 180) + ylim <- c(20, 85) + rotation <- "full" + } else if (tele == "PNA") { + xlim <- c(140, 280) + ylim <- c(20, 85) + rotation <- "no" # 140E-80W: use trick of rotation for cross-dateline + } else { + # use non standard region, detect region with strsplit + splitter <- as.numeric(strsplit(tele, "_")[[1]]) + if (length(splitter) == 4) { + xlim <- c(splitter[1], splitter[2]) + ylim <- c(splitter[3], splitter[4]) + if (xlim[2] > 180) { + rotation <- "no" + } else { + rotation <- "full" + } + } else { + stop("Wrong teleconnection region!") + } + } + + # check if data is already there to avoid re-run + if (file.exists(savefile1)) { + print("Actually requested EOFs data is already there!") + print(savefile1) + if (doforce == TRUE) { + print("Running with doforce=true... re-run!") + } else { + print("Skipping... activate doforce=true if you want to re-run it") + q() + } + } + + # new file opening + nomefile <- z500filename + fieldlist <- ncdf_opener_universal(nomefile, "zg", + tmonths = timeseason, + tyears = years, rotate = rotation + ) + print(str(fieldlist)) + + # extract calendar and time unit from the original file + tcal <- attributes(fieldlist$time)$cal + tunit <- attributes(fieldlist$time)$units + + # time array + etime <- power_date_new(fieldlist$time) + + # declare variable + z500 <- fieldlist$field + + # monthly averaging + print("monthly mean...") + + # new faster monthly mean function + z500monthly <- monthly_mean(ics, ipsilon, z500, etime) + + # climatology + print("climatological mean...") + z500clim <- apply(z500monthly, c(1, 2), ave, rep(timeseason, length(years))) + z500clim <- aperm(z500clim, c(2, 3, 1)) + + # monthly anomalies + print("anomalies...") + z500anom <- z500monthly - z500clim + + # compute EOFs + print("EOFs...") + EOFS <- eofs(ics, ipsilon, z500anom, + neof = neofs, xlim, ylim, + method = "SVD", do_standardize = T, do_regression = T + ) + # COEFF=eofs.coeff(ics,ipsilon,z500anom,EOFS, + # do_standardize=T) #do we really need this? + + # flip signs of patterns and regressions for NAO and AO + print("checking signs...") + for (i in 1:neofs) { + posreg <- NULL + + # define regions for sign control: boxes where values should be positive + if (tele == "NAO") { + if (i == 1) { + posreg <- c(-30, 30, 40, 50) + } # NAO + if (i == 2) { + posreg <- c(-60, 0, 40, 60) + } # East Atlantic Pattern + if (i == 3) { + posreg <- c(-30, 30, 50, 70) + } # Scandinavian Blocking + } + + if (tele == "AO") { + if (i == 1) { + posreg <- c(-180, 180, 20, 50) + } # Arctic Oscillation + if (i == 2) { + posreg <- c(-120, -60, 40, 60) + } # PNA + } + + # if definition of region exists + if (!is.null(posreg)) { + # convert into indices + xbox <- whicher(EOFS$pattern$x, posreg[1]):whicher( + EOFS$pattern$x, + posreg[2] + ) + ybox <- whicher(EOFS$pattern$y, posreg[3]):whicher( + EOFS$pattern$y, + posreg[4] + ) + valuereg <- mean(EOFS$pattern$z[xbox, ybox, i]) + + # if negative in the box, flip all signs! + if (valuereg < 0) { + EOFS$pattern$z[, , i] <- -EOFS$pattern$z[, , i] + EOFS$regression <- -EOFS$regression + } + } + } + + # expand EOF pattern to save it + expanded_pattern <- EOFS$regression * NA + expanded_pattern[ + whicher(ics, xlim[1]):whicher(ics, xlim[2]), + whicher(ipsilon, ylim[1]):whicher(ipsilon, ylim[2]), + ] <- + EOFS$pattern$z + + t1 <- proc.time() - t0 + print(t1) + + + ########################################################## + #------------------------Save to NetCDF------------------# + ########################################################## + + # saving output to netcdf files + print("saving NetCDF climatologies...") + print(savefile1) + + # monthly specific time + monthtime <- as.numeric(etime$data[etime$day == 15]) + + # dimensions definition + TIME <- paste(tunit, " since ", year1, "-", timeseason[1], + "-01 00:00:00", + sep = "" + ) + LEVEL <- 50000 + x <- ncdim_def("lon", "degrees_east", ics, longname = "longitude") + y <- ncdim_def("lat", "degrees_north", ipsilon, longname = "latitude") + z <- ncdim_def("plev", "Pa", LEVEL, longname = "pressure") + ef <- ncdim_def("PC", "-", 1:neofs) + t <- ncdim_def("time", TIME, monthtime, + calendar = tcal, + longname = "time", unlim = T + ) + + # defining vars + unit <- "m" + longvar <- "EOFs Loading Pattern" + pattern_ncdf <- ncvar_def("Patterns", unit, list(x, y, z, ef), -999, + longname = longvar, prec = "single", compression = 1 + ) + + unit <- "m" + longvar <- "EOFs Linear Regressions" + regression_ncdf <- ncvar_def("Regressions", unit, list(x, y, z, ef), -999, + longname = longvar, prec = "single", compression = 1 + ) + + unit <- paste0("0-", neofs) + longvar <- "PCs timeseries" + pc_ncdf <- ncvar_def("PCs", unit, list(ef, t), -999, + longname = longvar, prec = "single", compression = 1 + ) + + unit <- "%" + longvar <- "EOFs variance" + variance_ncdf <- ncvar_def("Variances", unit, list(ef), -999, + longname = longvar, prec = "single", compression = 1 + ) + + # saving files + ncfile1 <- nc_create( + savefile1, + list(pattern_ncdf, pc_ncdf, variance_ncdf, regression_ncdf) + ) + ncvar_put(ncfile1, "Patterns", expanded_pattern, + start = c(1, 1, 1, 1), count = c(-1, -1, -1, -1) + ) + ncvar_put(ncfile1, "Regressions", EOFS$regression, + start = c(1, 1, 1, 1), count = c(-1, -1, -1, -1) + ) + ncvar_put(ncfile1, "PCs", EOFS$coeff, start = c(1, 1), count = c(-1, -1)) + ncvar_put(ncfile1, "Variances", EOFS$variance, start = c(1), count = c(-1)) + nc_close(ncfile1) + return(savefile1) +} diff --git a/esmvaltool/diag_scripts/miles/eof_figures.R b/esmvaltool/diag_scripts/miles/eof_figures.R new file mode 100644 index 0000000000..6517117bc9 --- /dev/null +++ b/esmvaltool/diag_scripts/miles/eof_figures.R @@ -0,0 +1,180 @@ +###################################################### +#--------Routines for EOFs plotting for MiLES--------# +#-------------P. Davini (May 2017)-------------------# +###################################################### + +# DECLARING THE FUNCTION: EXECUTION IS AT THE BOTTOM OF THE SCRIPT + +miles_eof_figures <- function(dataset, expid, ens, year1, year2, + dataset_ref, expid_ref, ens_ref, + year1_ref, year2_ref, + season, FIGDIR, FILESDIR, + REFDIR, PROGDIR, tele) { + + # use filebuilding script to access to file + nomefile_exp <- file_builder( + FILESDIR, paste0("EOFs/", tele), "EOFs", + dataset, expid, ens, year1, year2, season + ) + + # check for REFDIR==FILESDIR, i.e. if we are using the + # climatology provided by MiLES or another dataset MiLES-generated + if (REFDIR != FILESDIR) { + nomefile_ref <- paste0( + file.path(REFDIR, paste0("EOFs/", tele)), "/EOFs_", # nolint + dataset_ref, "_", year1_ref, "_", year2_ref, "_", season, ".nc" + ) + } else { + # use file.builder to create the path of the blocking files + nomefile_ref <- file_builder( + FILESDIR, paste0("EOFs/", tele), "EOFs", + dataset_ref, expid_ref, ens_ref, year1_ref, year2_ref, season + ) + } + + # EOFs to plot (depends on how many computed by CDO!) + neofs <- 4 + + ########################################################## + #-----------------Loading datasets-----------------------# + ########################################################## + + # loading anomalies and variances of experiment + variance_exp <- ncdf_opener(nomefile_exp, + namevar = "Variances", + rotate = "no" + ) * 100 # convert to percentage + regressions_exp <- ncdf_opener(nomefile_exp, + namevar = "Regressions", + rotate = "no" + ) + + # loading reference field + variance_ref <- ncdf_opener(nomefile_ref, + namevar = "Variances", + rotate = "no" + ) * 100 # convert to percentage + regressions_ref <- ncdf_opener(nomefile_ref, + namevar = "Regressions", + rotate = "no" + ) + + + ########################################################## + #-----------------Produce figures------------------------# + ########################################################## + + # plot properties + info_exp <- info_builder(dataset, expid, ens, year1, year2, season) + info_ref <- info_builder( + dataset_ref, expid_ref, ens_ref, + year1_ref, year2_ref, season + ) + lev_field <- seq(-150, 150, 20) + lev_diff <- seq(-95, 95, 10) + + filenames <- c() + # loop on number of EOFs + for (neof in 1:neofs) { + linear_exp <- regressions_exp[, , neof] + linear_ref <- regressions_ref[, , neof] + + # check and flip signs (to be in agreement with reference field) + if (cor(c(linear_ref), c(linear_exp)) < 0) { + linear_exp <- (-linear_exp) + } + + #-----plotting-------# + + # plot properties + region <- tele # if it is a box of lonlat + if (tele == "NAO") { + region <- "North Atlantic" + } + if (tele == "AO") { + region <- "Northern Hemisphere" + } + if (tele == "PNA") { + region <- "North Pacific" + } + title_name <- paste0(region, " EOF", neof) + + # define figure + figname <- fig_builder( + FIGDIR, paste0("EOFs/", tele), paste0("EOF", neof), + dataset, expid, ens, year1, year2, season, output_file_type + ) + print(figname) + filenames <- c(filenames, figname) + + # Chose output format for figure - by JvH + open_plot_device(figname, output_file_type) + + # where to plot variances values + if (map_projection == "no") { + varpoints <- c(120, 85) + } else { + varpoints <- c(0, 0.7) + } + + # plot properties + par(plotpar) + + im <- plot_prepare(ics, ipsilon, linear_exp, proj = map_projection, + lat_lim = lat_lim) + filled_contour3(im$x, im$y, im$z, + xlab = im$xlab, ylab = im$ylab, + main = paste(info_exp), levels = lev_field, + color.palette = palette3, xlim = im$xlim, ylim = im$ylim, + axes = im$axes + ) + mtext(title_name, side = 3, line = .5, outer = TRUE, cex = 2, font = 2) + proj_addland(proj = map_projection) + text(varpoints[1], varpoints[2], paste("Variance Explained: ", + round(variance_exp[neof], 2), "%", + sep = "" + ), cex = 2) + + im <- plot_prepare(ics, ipsilon, linear_ref, proj = map_projection, + lat_lim = lat_lim) + filled_contour3(im$x, im$y, im$z, + xlab = im$xlab, ylab = im$ylab, + main = paste(info_ref), levels = lev_field, + color.palette = palette3, xlim = im$xlim, ylim = im$ylim, + axes = im$axes + ) + mtext(title_name, side = 3, line = .5, outer = TRUE, cex = 2, font = 2) + proj_addland(proj = map_projection) + image_scale3(volcano, + levels = lev_field, color.palette = palette3, + colorbar.label = "m", cex.colorbar = imgscl_colorbar, + cex.label = imgscl_label, colorbar.width = 1 * af, + line.label = imgscl_line + ) + text(varpoints[1], varpoints[2], paste("Variance Explained: ", + round(variance_ref[neof], 2), "%", + sep = "" + ), cex = 2) + + # delta field plot + im <- plot_prepare(ics, ipsilon, linear_exp - linear_ref, + proj = map_projection, lat_lim = lat_lim + ) + filled_contour3(im$x, im$y, im$z, + xlab = im$xlab, ylab = im$ylab, + main = paste("Difference"), levels = lev_diff, + color.palette = palette2, xlim = im$xlim, ylim = im$ylim, + axes = im$axes + ) + proj_addland(proj = map_projection) + image_scale3(volcano, + levels = lev_diff, color.palette = palette2, + colorbar.label = "m", cex.colorbar = imgscl_colorbar, + cex.label = imgscl_label, colorbar.width = 1 * af, + line.label = imgscl_line + ) + + dev.off() + } + return(list(figs = filenames, mod = nomefile_exp, ref = nomefile_ref)) +} diff --git a/esmvaltool/diag_scripts/miles/miles_block.R b/esmvaltool/diag_scripts/miles/miles_block.R new file mode 100644 index 0000000000..3dd2fed15c --- /dev/null +++ b/esmvaltool/diag_scripts/miles/miles_block.R @@ -0,0 +1,158 @@ +# ############################################################################# +# miles_block.r +# Authors: P. Davini (ISAC-CNR, Italy) (author of MiLES) +# J. von Hardenberg (ISAC-CNR, Italy) (ESMValTool adaptation) +# E. Arnone (ISAC-CNR, Italy) (ESMValTool v2.0 adaptation) +# ############################################################################# +# Description +# MiLES is a tool for estimating properties of mid-latitude climate. +# It works on daily 500hPa geopotential height data and it produces +# climatological figures for the chosen time period. Data are interpolated +# on a common 2.5x2.5 grid. +# Model data are compared against a reference field such as the +# ECMWF ERA-Interim reanalysis. +# +# Modification history +# 20180525-arno_en: Conversion to v2.0 +# 20181203 hard_jo: Completed conversion, rlint compliant +# +# ############################################################################ + +library(tools) +library(yaml) + +provenance_record <- function(infile) { + xprov <- list(ancestors = infile, + authors = list("hard_jo", "davi_pa", "arno_en"), + references = list("davini18", "davini12jclim", + "tibaldi90tel"), + projects = list("c3s-magic"), + caption = "MiLES blocking statistics", + statistics = list("other"), + realms = list("atmos"), + themes = list("phys"), + domains = list("nh")) + return(xprov) +} + +diag_scripts_dir <- Sys.getenv("diag_scripts") + +source(paste0(diag_scripts_dir, "/miles/basis_functions.R")) +source(paste0(diag_scripts_dir, "/miles/block_figures.R")) +source(paste0(diag_scripts_dir, "/miles/block_fast.R")) +source(paste0(diag_scripts_dir, "/miles/miles_parameters.R")) + +# read settings and metadata files +args <- commandArgs(trailingOnly = TRUE) +settings <- yaml::read_yaml(args[1]) +metadata <- yaml::read_yaml(settings$input_files) +for (myname in names(settings)) { + temp <- get(myname, settings) + assign(myname, temp) +} + +field_type0 <- "T2Ds" + +# get first variable and list associated to pr variable +var0 <- "zg" +list0 <- metadata + +# get name of climofile for first variable and list +# associated to first climofile +climofiles <- names(list0) +climolist0 <- get(climofiles[1], list0) + +diag_base <- climolist0$diagnostic +print(paste(diag_base, ": starting routine")) + +# create working dirs if they do not exist +work_dir <- settings$work_dir +regridding_dir <- settings$run_dir +plot_dir <- settings$plot_dir +dir.create(work_dir, recursive = T, showWarnings = F) +dir.create(regridding_dir, recursive = T, showWarnings = F) +dir.create(plot_dir, recursive = T, showWarnings = F) + +# setup provenance file and list +provenance_file <- paste0(regridding_dir, "/", "diagnostic_provenance.yml") +provenance <- list() + +# extract metadata +models_dataset <- unname(sapply(list0, "[[", "dataset")) +models_ensemble <- unname(sapply(list0, "[[", "ensemble")) +models_exp <- unname(sapply(list0, "[[", "exp")) +reference_model <- unname(sapply(list0, "[[", "reference_dataset"))[1] +models_start_year <- unname(sapply(list0, "[[", "start_year")) +models_end_year <- unname(sapply(list0, "[[", "end_year")) +models_experiment <- unname(sapply(list0, "[[", "exp")) +models_ensemble <- unname(sapply(list0, "[[", "ensemble")) + +## +## Run it all +## + +for (model_idx in c(1:(length(models_dataset)))) { + exp <- models_exp[model_idx] + dataset <- models_dataset[model_idx] + ensemble <- models_ensemble[model_idx] + year1 <- models_start_year[model_idx] + year2 <- models_end_year[model_idx] + infile <- climofiles[model_idx] + for (seas in seasons) { + filenames <- miles_block_fast( + year1 = year1, year2 = year2, expid = exp, ens = ensemble, + dataset = dataset, season = seas, z500filename = infile, + FILESDIR = work_dir, doforce = TRUE + ) + # Set provenance for output files + xprov <- provenance_record(list(infile)) + for (fname in filenames) { + provenance[[fname]] <- xprov + } + } +} + +## +## Make the plots +## +if (write_plots) { + ref_idx <- which(models_dataset == reference_model) + if (length(ref_idx) == 0) { + ref_idx <- length(models_dataset) + } + dataset_ref <- models_dataset[ref_idx] + exp_ref <- models_exp[ref_idx] + ensemble_ref <- models_ensemble[ref_idx] + year1_ref <- models_start_year[ref_idx] + year2_ref <- models_end_year[ref_idx] + + for (model_idx in c(1:(length(models_dataset)))) { + if (model_idx != ref_idx) { + exp <- models_exp[model_idx] + dataset <- models_dataset[model_idx] + ensemble <- models_ensemble[model_idx] + year1 <- models_start_year[model_idx] + year2 <- models_end_year[model_idx] + for (seas in seasons) { + filenames <- miles_block_figures( + year1 = year1, year2 = year2, expid = exp, + dataset = dataset, ens = ensemble, + dataset_ref = dataset_ref, year1_ref = year1_ref, + year2_ref = year2_ref, expid_ref = exp_ref, + ens_ref = ensemble_ref, season = seas, + FIGDIR = plot_dir, FILESDIR = work_dir, + REFDIR = work_dir + ) + # Set provenance for output files (same as diagnostic files) + xprov <- provenance_record(list(climofiles[model_idx], + climofiles[ref_idx])) + for (fname in filenames$figs) { + provenance[[fname]] <- xprov + } + } + } + } +} + +# Write provenance to file +write_yaml(provenance, provenance_file) diff --git a/esmvaltool/diag_scripts/miles/miles_eof.R b/esmvaltool/diag_scripts/miles/miles_eof.R new file mode 100644 index 0000000000..bdff70c4e4 --- /dev/null +++ b/esmvaltool/diag_scripts/miles/miles_eof.R @@ -0,0 +1,156 @@ +# ############################################################################# +# miles_eof.R +# Authors: P. Davini (ISAC-CNR, Italy) (author of MiLES) +# J. von Hardenberg (ISAC-CNR, Italy) (ESMValTool adaptation) +# ############################################################################# +# Description +# MiLES is a tool for estimating properties of mid-latitude climate. +# It works on daily 500hPa geopotential height data and it produces +# climatological figures for the chosen time period. Data are interpolated +# on a common 2.5x2.5 grid. +# Model data are compared against a reference field such as the +# ECMWF ERA-Interim reanalysis. +# It supports analysis for the 4 standard seasons. +# +# ############################################################################ + +library(tools) +library(yaml) + +provenance_record <- function(infile) { + xprov <- list(ancestors = infile, + authors = list("hard_jo", "davi_pa", "arno_en"), + references = list("davini18"), + projects = list("c3s-magic"), + caption = "MiLES EOF statistics", + statistics = list("other"), + realms = list("atmos"), + themes = list("phys"), + domains = list("nh")) + return(xprov) +} + +diag_scripts_dir <- Sys.getenv("diag_scripts") + +source(paste0(diag_scripts_dir, "/miles/basis_functions.R")) +source(paste0(diag_scripts_dir, "/miles/eof_figures.R")) +source(paste0(diag_scripts_dir, "/miles/eof_fast.R")) +source(paste0(diag_scripts_dir, "/miles/miles_parameters.R")) + +# read settings and metadata files +args <- commandArgs(trailingOnly = TRUE) +settings <- yaml::read_yaml(args[1]) +metadata <- yaml::read_yaml(settings$input_files) +for (myname in names(settings)) { + temp <- get(myname, settings) + assign(myname, temp) +} + +field_type0 <- "T2Ds" + +# get first variable and list associated to pr variable +var0 <- "zg" +list0 <- metadata + +# get name of climofile for first variable and list associated +# to first climofile +climofiles <- names(list0) +climolist0 <- get(climofiles[1], list0) + +diag_base <- climolist0$diagnostic +print(paste(diag_base, ": starting routine")) + +# create working dirs if they do not exist +work_dir <- settings$work_dir +regridding_dir <- settings$run_dir +plot_dir <- settings$plot_dir +dir.create(work_dir, recursive = T, showWarnings = F) +dir.create(regridding_dir, recursive = T, showWarnings = F) +dir.create(plot_dir, recursive = T, showWarnings = F) + +# setup provenance file and list +provenance_file <- paste0(regridding_dir, "/", "diagnostic_provenance.yml") +provenance <- list() + +# extract metadata +models_dataset <- unname(sapply(list0, "[[", "dataset")) +models_ensemble <- unname(sapply(list0, "[[", "ensemble")) +models_exp <- unname(sapply(list0, "[[", "exp")) +reference_model <- unname(sapply(list0, "[[", "reference_dataset"))[1] +models_start_year <- unname(sapply(list0, "[[", "start_year")) +models_end_year <- unname(sapply(list0, "[[", "end_year")) +models_experiment <- unname(sapply(list0, "[[", "exp")) +models_ensemble <- unname(sapply(list0, "[[", "ensemble")) + +## +## Run it all +## + +for (model_idx in c(1:(length(models_dataset)))) { + exp <- models_exp[model_idx] + dataset <- models_dataset[model_idx] + ensemble <- models_ensemble[model_idx] + year1 <- models_start_year[model_idx] + year2 <- models_end_year[model_idx] + infile <- climofiles[model_idx] + for (tele in teles) { + for (seas in seasons) { + filenames <- miles_eofs_fast( + dataset = dataset, expid = exp, ens = ensemble, + year1 = year1, year2 = year2, season = seas, tele = tele, + z500filename = infile, FILESDIR = work_dir, + doforce = TRUE + ) + # Set provenance for output files + xprov <- provenance_record(list(infile)) + for (fname in filenames) { + provenance[[fname]] <- xprov + } + } + } +} + +## +## Make the plots +## +if (write_plots) { + ref_idx <- which(models_dataset == reference_model) + if (length(ref_idx) == 0) { + ref_idx <- length(models_dataset) + } + dataset_ref <- models_dataset[ref_idx] + exp_ref <- models_exp[ref_idx] + ensemble_ref <- models_ensemble[ref_idx] + year1_ref <- models_start_year[ref_idx] + year2_ref <- models_end_year[ref_idx] + + for (model_idx in c(1:(length(models_dataset)))) { + if (model_idx != ref_idx) { + exp <- models_exp[model_idx] + dataset <- models_dataset[model_idx] + ensemble <- models_ensemble[model_idx] + year1 <- models_start_year[model_idx] + year2 <- models_end_year[model_idx] + for (tele in teles) { + for (seas in seasons) { + filenames <- miles_eof_figures( + expid = exp, year1 = year1, year2 = year2, dataset = dataset, + ens = ensemble, dataset_ref = dataset_ref, expid_ref = exp_ref, + year1_ref = year1_ref, ens_ref = ensemble_ref, + year2_ref = year2_ref, season = seas, tele = tele, + FIGDIR = plot_dir, FILESDIR = work_dir, REFDIR = work_dir + ) + # Set provenance for output files (same as diagnostic files) + xprov <- provenance_record(list(climofiles[model_idx], + climofiles[ref_idx])) + for (fname in filenames$figs) { + provenance[[fname]] <- xprov + } + } + } + } + } +} + +# Write provenance to file +write_yaml(provenance, provenance_file) diff --git a/esmvaltool/diag_scripts/miles/miles_parameters.R b/esmvaltool/diag_scripts/miles/miles_parameters.R new file mode 100644 index 0000000000..40dbdb5ead --- /dev/null +++ b/esmvaltool/diag_scripts/miles/miles_parameters.R @@ -0,0 +1,77 @@ +########################################################## +#-------------Plot configurations------------------------# +########################################################## + +# plot all the warnings +options(warn = 1) + +# Specific settings for PNG output +png_width <- 900 +png_height <- 900 +png_units <- "px" +png_pointsize <- 12 +png_bg <- "white" + +# Specific settings for PDF and EPS output (in inches) +pdf_width <- 12 +pdf_height <- 12 + +# aspect ratio +af <- 1 + +# Type of projection ("no" for standard plotting") +# All projection from mapproj package should be supported +# but error may arise for non-polar plots +# DEFAULT IS POLAR PLOT +map_projection <- "azequalarea" # Alternative: "azequidistant" + +# Number of panels per figure (rows and column): default for polar plots +panels <- c(3, 1) + +# Latitudinal range for plots +lat_lim <- c(25, 90) + +# if not regular projection (i.e. if using polar) +if (map_projection != "no") { + af <- round(sqrt(3), 2) + pdf_height <- pdf_height / af + pdf_width <- 3 * pdf_width / af + png_height <- png_height / af + png_width <- 3 * png_width / af + panels <- rev(panels) +} + +# Custom parameteres for plots +plotpar <- list(mfrow = panels, cex.main = 2.5, cex.axis = 1.5, cex.lab = 1.5, + mar = c(5, 5, 5, 7), oma = c(1, 1, 3, 2)) + +# imagescale3 color bar details +imgscl_colorbar <- 1.4 +imgscl_label <- 1.5 +imgscl_line <- 3 + +# color palette to be used +# palette0 is taken from tim.colors of field to avoid library dependencies... +palette0 <- colorRampPalette(c( + "#00008F", "#00009F", "#0000AF", "#0000BF", "#0000CF", + "#0000DF", "#0000EF", "#0000FF", "#0010FF", "#0020FF", + "#0030FF", "#0040FF", "#0050FF", "#0060FF", "#0070FF", + "#0080FF", "#008FFF", "#009FFF", "#00AFFF", "#00BFFF", + "#00CFFF", "#00DFFF", "#00EFFF", "#00FFFF", "#10FFEF", + "#20FFDF", "#30FFCF", "#40FFBF", "#50FFAF", "#60FF9F", + "#70FF8F", "#80FF80", "#8FFF70", "#9FFF60", "#AFFF50", + "#BFFF40", "#CFFF30", "#DFFF20", "#EFFF10", "#FFFF00", + "#FFEF00", "#FFDF00", "#FFCF00", "#FFBF00", "#FFAF00", + "#FF9F00", "#FF8F00", "#FF8000", "#FF7000", "#FF6000", + "#FF5000", "#FF4000", "#FF3000", "#FF2000", "#FF1000", + "#FF0000", "#EF0000", "#DF0000", "#CF0000", "#BF0000", + "#AF0000", "#9F0000", "#8F0000", "#800000" +)) +palette1 <- colorRampPalette(c("white", "orange", "darkred")) +palette2 <- colorRampPalette(c("blue", "white", "red")) +palette3 <- colorRampPalette(c("darkblue", "blue", "dodgerblue", + "white", "orange", "red", "darkred")) + +# additional color palette used for extradiagnostics histogram +KOL <- c("black", "darkgreen", "blue", "darkorange", "red", + "violet", "grey50", "black") diff --git a/esmvaltool/diag_scripts/miles/miles_regimes.R b/esmvaltool/diag_scripts/miles/miles_regimes.R new file mode 100644 index 0000000000..a747defe78 --- /dev/null +++ b/esmvaltool/diag_scripts/miles/miles_regimes.R @@ -0,0 +1,153 @@ +# ############################################################################# +# miles_regimes.R +# Authors: P. Davini (ISAC-CNR, Italy) (author of MiLES) +# J. von Hardenberg (ISAC-CNR, Italy) (ESMValTool adaptation) +# ############################################################################# +# Description +# MiLES is a tool for estimating properties of mid-latitude climate. +# It works on daily 500hPa geopotential height data and it produces +# climatological figures for the chosen time period. Data are interpolated +# on a common 2.5x2.5 grid. +# Model data are compared against a reference field such as the +# ECMWF ERA-Interim reanalysis. +# It supports analysis for the 4 standard seasons. +# +# ############################################################################ + +library(tools) +library(yaml) + +provenance_record <- function(infile) { + xprov <- list(ancestors = infile, + authors = list("hard_jo", "davi_pa", "arno_en"), + references = list("davini18", "corti99nat"), + projects = list("c3s-magic"), + caption = "MiLES regimes statistics", + statistics = list("other"), + realms = list("atmos"), + themes = list("phys"), + domains = list("nh")) + return(xprov) +} + +diag_scripts_dir <- Sys.getenv("diag_scripts") + +source(paste0(diag_scripts_dir, "/miles/basis_functions.R")) +source(paste0(diag_scripts_dir, "/miles/regimes_figures.R")) +source(paste0(diag_scripts_dir, "/miles/regimes_fast.R")) +source(paste0(diag_scripts_dir, "/miles/miles_parameters.R")) + +# read settings and metadata files +args <- commandArgs(trailingOnly = TRUE) +settings <- yaml::read_yaml(args[1]) +metadata <- yaml::read_yaml(settings$input_files) +for (myname in names(settings)) { + temp <- get(myname, settings) + assign(myname, temp) +} + +field_type0 <- "T2Ds" + +# get first variable and list associated to pr variable +var0 <- "zg" +list0 <- metadata + +# get name of climofile for first variable and list associated +# to first climofile +climofiles <- names(list0) +climolist0 <- get(climofiles[1], list0) + +diag_base <- climolist0$diagnostic +print(paste(diag_base, ": starting routine")) + +# create working dirs if they do not exist +work_dir <- settings$work_dir +regridding_dir <- settings$run_dir +plot_dir <- settings$plot_dir +dir.create(work_dir, recursive = T, showWarnings = F) +dir.create(regridding_dir, recursive = T, showWarnings = F) +dir.create(plot_dir, recursive = T, showWarnings = F) + +# setup provenance file and list +provenance_file <- paste0(regridding_dir, "/", "diagnostic_provenance.yml") +provenance <- list() + +# extract metadata +models_dataset <- unname(sapply(list0, "[[", "dataset")) +models_ensemble <- unname(sapply(list0, "[[", "ensemble")) +models_exp <- unname(sapply(list0, "[[", "exp")) +reference_model <- unname(sapply(list0, "[[", "reference_dataset"))[1] +models_start_year <- unname(sapply(list0, "[[", "start_year")) +models_end_year <- unname(sapply(list0, "[[", "end_year")) +models_experiment <- unname(sapply(list0, "[[", "exp")) +models_ensemble <- unname(sapply(list0, "[[", "ensemble")) + +## +## Run it all +## + +for (model_idx in c(1:(length(models_dataset)))) { + exp <- models_exp[model_idx] + dataset <- models_dataset[model_idx] + ensemble <- models_ensemble[model_idx] + year1 <- models_start_year[model_idx] + year2 <- models_end_year[model_idx] + infile <- climofiles[model_idx] + for (seas in seasons) { + filenames <- miles_regimes_fast( + dataset = dataset, expid = exp, ens = ensemble, + year1 = year1, year2 = year2, season = seas, + z500filename = infile, FILESDIR = work_dir, nclusters = nclusters, + doforce = T + ) + # Set provenance for output files + xprov <- provenance_record(list(infile)) + for (fname in filenames) { + provenance[[fname]] <- xprov + } + } +} + +## +## Make the plots +## +if (write_plots) { + ref_idx <- which(models_dataset == reference_model) + if (length(ref_idx) == 0) { + ref_idx <- length(models_dataset) + } + dataset_ref <- models_dataset[ref_idx] + exp_ref <- models_exp[ref_idx] + ensemble_ref <- models_ensemble[ref_idx] + year1_ref <- models_start_year[ref_idx] + year2_ref <- models_end_year[ref_idx] + + for (model_idx in c(1:(length(models_dataset)))) { + if (model_idx != ref_idx) { + exp <- models_exp[model_idx] + dataset <- models_dataset[model_idx] + ensemble <- models_ensemble[model_idx] + year1 <- models_start_year[model_idx] + year2 <- models_end_year[model_idx] + for (seas in seasons) { + filenames <- miles_regimes_figures( + expid = exp, year1 = year1, year2 = year2, dataset = dataset, + ens = ensemble, dataset_ref = dataset_ref, expid_ref = exp_ref, + year1_ref = year1_ref, ens_ref = ensemble_ref, + year2_ref = year2_ref, season = seas, + FIGDIR = plot_dir, FILESDIR = work_dir, REFDIR = work_dir, + nclusters + ) + # Set provenance for output files (same as diagnostic files) + xprov <- provenance_record(list(climofiles[model_idx], + climofiles[ref_idx])) + for (fname in filenames$figs) { + provenance[[fname]] <- xprov + } + } + } + } +} + +# Write provenance to file +write_yaml(provenance, provenance_file) diff --git a/esmvaltool/diag_scripts/miles/regimes_fast.R b/esmvaltool/diag_scripts/miles/regimes_fast.R new file mode 100644 index 0000000000..464bd7c26b --- /dev/null +++ b/esmvaltool/diag_scripts/miles/regimes_fast.R @@ -0,0 +1,176 @@ +###################################################### +#------Regimes routines computation for MiLES--------# +#-------------P. Davini (May 2017)-------------------# +###################################################### + +miles_regimes_fast <- function(dataset, expid, ens, year1, year2, season, + z500filename, FILESDIR, nclusters, doforce) { + + # t0 + t0 <- proc.time() + + if (nclusters != 4 | season != "DJF") { + stop("Beta version: unsupported season and/or number of clusters") + } + + # test function to smooth seasonal cycle: + # it does not work fine yet, keep it false + smoothing <- T + + # region boundaries for North Atlantic + xlim <- c(-80, 40) + ylim <- c(30, 87.5) + + # define file where save data + savefile1 <- file_builder(FILESDIR, "Regimes", "RegimesPattern", dataset, + expid, ens, year1, year2, season) + + # check if data is already there to avoid re-run + if (file.exists(savefile1)) { + print("Actually requested weather regimes data is already there!") + print(savefile1) + if (doforce == T) { + print("Running with doforce=true... re-run!") + } else { + print("Skipping... activate doforce=true if you want to re-run it") + q() + } + } + + # setting up time domain + years <- year1:year2 + timeseason <- season2timeseason(season) + + # new file opening + fieldlist <- ncdf_opener_universal(z500filename, namevar = "zg", + tmonths = timeseason, tyears = years, + rotate = "full") + + # extract calendar and time unit from the original file + tcal <- attributes(fieldlist$time)$cal + tunit <- attributes(fieldlist$time)$units + + # time array + etime <- power_date_new(fieldlist$time) + + # declare variable + Z500 <- fieldlist$field + + print("Compute anomalies based on daily mean") + # smoothing flag and daily anomalies + if (smoothing) { + Z500anom <- daily_anom_run_mean(ics, ipsilon, Z500, etime) + } else { + Z500anom <- daily_anom_mean(ics, ipsilon, Z500, etime) + } + + # compute weather regimes: new regimes2 function with minimum + # variance evaluation + weather_regimes <- regimes2(ics, ipsilon, Z500anom, ncluster = nclusters, + ntime = 1000, minvar = 0.8, xlim, ylim, + alg = "Hartigan-Wong") + + # Cluster assignation: based on the position of the absolute maximum/minimum + # negative value for NAO-, maximum for the other 3 regimes + compose <- weather_regimes$regimes + names <- paste("Regimes", 1:nclusters) + position <- rbind(c(-45, 65), c(-35, 50), c(10, 60), c(-20, 60)) + rownames(position) <- c("NAO-", "Atlantic Ridge", + "Scandinavian Blocking", "NAO+") + + # minimum distance in degrees to assign a regime name + min_dist_in_deg <- 20 + + # loop + for (i in 1:nclusters) { + + # find position of max and minimum values + MM <- which(compose[, , i] == max(compose[, , i], na.rm = T), arr.ind = T) + mm <- which(compose[, , i] == min(compose[, , i], na.rm = T), arr.ind = T) + + # use maximum or minimum (use special vector to alterate + # distance when needed) + if (max(compose[, , i], na.rm = T) > abs(min(compose[, , i], na.rm = T))) { + distmatrix <- rbind(c(ics[MM[1]], ipsilon[MM[2]]), + position + c(0, 0, 0, 1000)) + } else { + distmatrix <- rbind(c(ics[mm[1]], ipsilon[mm[2]]), + position + c(1000, 1000, 1000, 0)) + } + + # compute distances and names assignation + distmm <- dist(distmatrix)[1:nclusters] + print(distmm) + + # minimum distance for correct assignation of 15 deg + if (min(distmm) < min_dist_in_deg) { + names[i] <- rownames(position)[which.min(distmm)] + + # avoid double assignation + if (i > 1 & any(names[i] == names[1:max(c(1, i - 1))])) { + print(paste("Warning: double assignation of the same regime.", + "Avoiding last assignation...")) + names[i] <- paste("Regime", i) + } + } + print(names[i]) + } + + t1 <- proc.time() - t0 + print(t1) + + ########################################################## + #------------------------Save to NetCDF------------------# + ########################################################## + + # saving output to netcdf files + print("saving NetCDF climatologies...") + + # dimensions definition + fulltime <- as.numeric(etime$data) - as.numeric(etime$data)[1] + TIME <- paste(tunit, " since ", year1, "-", timeseason[1], + "-01 00:00:00", sep = "") + x <- ncdim_def("lon", "degrees_east", ics, longname = "longitude") + y <- ncdim_def("lat", "degrees_north", ipsilon, longname = "latitude") + t <- ncdim_def("time", TIME, fulltime, calendar = tcal, + longname = "time", unlim = T) + + # extra dimensions definition + cl <- ncdim_def("lev", "cluster index", 1:nclusters, longname = "pressure") + + # var definition + unit <- "m" + longvar <- "Weather Regimes Pattern" + pattern_ncdf <- ncvar_def("Regimes", unit, list(x, y, cl), -999, + longname = longvar, prec = "single", + compression = 1) + + unit <- paste0("0-", nclusters) + longvar <- "Weather Regimes Cluster Index" + cluster_ncdf <- ncvar_def("Indices", unit, list(t), -999, longname = longvar, + prec = "single", compression = 1) + + unit <- "%" + longvar <- "Weather Regimes Frequencies" + frequencies_ncdf <- ncvar_def("Frequencies", unit, list(cl), -999, + longname = longvar, prec = "single", + compression = 1) + + # testnames + dimnchar <- ncdim_def("nchar", "", 1:max(nchar(names)), + create_dimvar = FALSE) + names_ncdf <- ncvar_def("Names", "", list(dimnchar, cl), prec = "char") + + # saving file + ncfile1 <- nc_create(savefile1, list(pattern_ncdf, cluster_ncdf, + frequencies_ncdf, names_ncdf)) + ncvar_put(ncfile1, "Regimes", weather_regimes$regimes, start = c(1, 1, 1), + count = c(-1, -1, -1)) + ncvar_put(ncfile1, "Indices", weather_regimes$cluster, start = c(1), + count = c(-1)) + ncvar_put(ncfile1, "Frequencies", weather_regimes$frequencies, + start = c(1), count = c(-1)) + ncvar_put(ncfile1, "Names", names) + nc_close(ncfile1) + return(savefile1) +} diff --git a/esmvaltool/diag_scripts/miles/regimes_figures.R b/esmvaltool/diag_scripts/miles/regimes_figures.R new file mode 100644 index 0000000000..aff99c6f71 --- /dev/null +++ b/esmvaltool/diag_scripts/miles/regimes_figures.R @@ -0,0 +1,142 @@ +###################################################### +#------Regimes routines figures for MiLES------------# +#-------------P. Davini (May 2017)-------------------# +###################################################### + +miles_regimes_figures <- function(dataset, expid, ens, year1, year2, + dataset_ref, expid_ref, ens_ref, + year1_ref, year2_ref, season, + FIGDIR, FILESDIR, REFDIR, nclusters) { + + if (nclusters != 4 | season != "DJF") { + stop("Beta version: unsupported season and/or number of clusters") + } + + ########################################################## + #-----------------Loading datasets-----------------------# + ########################################################## + + # loading anomalies and variances of experiment + nomefile <- file_builder(FILESDIR, "Regimes", "RegimesPattern", dataset, + expid, ens, year1, year2, season) + frequencies_exp <- ncdf_opener(nomefile, "Frequencies") + regimes_exp <- ncdf_opener(nomefile, namevar = "Regimes", rotate = "no") + + # loading names + p <- nc_open(nomefile) + names_exp <- ncvar_get(p, "Names") + nc_close(p) + print(names_exp) + + # loading reference field + # check for REFDIR==FILESDIR, i.e. if we are using the climatology + # provided by MiLES or another dataset MiLES-generated + if (REFDIR != FILESDIR) { + nomefile_ref <- paste0(file.path(REFDIR, "Regimes"), "/RegimesPattern_", + dataset_ref, "_", year1_ref, "_", year2_ref, + "_", season, ".nc") + } else { + # use file.builder to create the path of the blocking files + nomefile_ref <- file_builder(FILESDIR, "Regimes", "RegimesPattern", + dataset_ref, expid_ref, ens_ref, + year1_ref, year2_ref, season) + } + + frequencies_ref <- ncdf_opener(nomefile_ref, "Frequencies") + regimes_ref <- ncdf_opener(nomefile_ref, namevar = "Regimes", rotate = "no") + + # loading names + p <- nc_open(nomefile_ref) + names_ref <- ncvar_get(p, "Names") + nc_close(p) + print(names_ref) + + # plot properties + lev_field <- seq(-250, 250, 20) + lev_diff <- seq(-150, 150, 20) + + # standard properties + info_exp <- info_builder(dataset, expid, ens, year1, year2, season) + info_ref <- info_builder(dataset_ref, expid_ref, ens_ref, + year1_ref, year2_ref, season) + + filenames <- c() + kk0 <- 1 + # loop on regimes + for (name in names_ref) { + #-----plotting-------# + # a bit complicated but it is used to compare similar regimes + # even if they do not equal percentage of occurrence (using names) + ii <- which(name == names_exp) + jj <- which(name == names_ref) + print(ii) + print(jj) + if (length(ii) == 0) { + ii <- which(setdiff(names_exp, names_ref)[kk0] == names_exp) + kk0 <- kk0 + 1 + } + print(name) + + # final plot production + figname <- fig_builder(FIGDIR, "Regimes", paste0("Regime", ii), + dataset, expid, ens, year1, year2, + season, output_file_type) + print(figname) + filenames <- c(filenames, figname) + + # Chose output format for figure - by JvH + open_plot_device(figname, output_file_type) + + # where to plot frequencies values + if (map_projection == "no") { + varpoints <- c(120, 85) + } else { + varpoints <- c(0, 0.7) + } + + # plot properties + par(plotpar) + + im <- plot_prepare(ics, ipsilon, regimes_exp[, , ii], + proj = map_projection, lat_lim = lat_lim) + filled_contour3(im$x, im$y, im$z, xlab = im$xlab, ylab = im$ylab, + main = paste(info_exp), levels = lev_field, + color.palette = palette3, xlim = im$xlim, + ylim = im$ylim, axes = im$axes) + mtext(name, side = 3, line = .5, outer = TRUE, cex = 2, font = 2) + proj_addland(proj = map_projection) + text(varpoints[1], varpoints[2], + paste("Frequencies: ", round(frequencies_exp[ii], 2), "%", sep = ""), + cex = 2) + + im <- plot_prepare(ics, ipsilon, regimes_ref[, , jj], + proj = map_projection, lat_lim = lat_lim) + filled_contour3(im$x, im$y, im$z, xlab = im$xlab, ylab = im$ylab, + main = paste(info_ref), levels = lev_field, + color.palette = palette3, xlim = im$xlim, + ylim = im$ylim, axes = im$axes) + proj_addland(proj = map_projection) + text(varpoints[1], varpoints[2], paste("Frequencies: ", + round(frequencies_ref[ii], 2), "%", sep = ""), cex = 2) + image_scale3(volcano, levels = lev_field, color.palette = palette3, + colorbar.label = "m", cex.colorbar = imgscl_colorbar, + cex.label = imgscl_label, colorbar.width = 1 * af, + line.label = imgscl_line) + + # delta field plot + im <- plot_prepare(ics, ipsilon, regimes_exp[, , ii] - regimes_ref[, , jj], + proj = map_projection, lat_lim = lat_lim) + filled_contour3(im$x, im$y, im$z, xlab = im$xlab, ylab = im$ylab, + main = paste("Difference"), levels = lev_diff, + color.palette = palette2, xlim = im$xlim, + ylim = im$ylim, axes = im$axes) + proj_addland(proj = map_projection) + image_scale3(volcano, levels = lev_diff, color.palette = palette2, + colorbar.label = "m", cex.colorbar = imgscl_colorbar, + cex.label = imgscl_label, colorbar.width = 1 * af, + line.label = imgscl_line) + + dev.off() + } + return(list(figs = filenames, mod = nomefile, ref = nomefile_ref)) +} diff --git a/tests/unit/preprocessor/_area_pp/__init__.py b/esmvaltool/diag_scripts/ocean/__init__.py similarity index 100% rename from tests/unit/preprocessor/_area_pp/__init__.py rename to esmvaltool/diag_scripts/ocean/__init__.py diff --git a/esmvaltool/diag_scripts/ocean/diagnostic_maps.py b/esmvaltool/diag_scripts/ocean/diagnostic_maps.py index 9fffc1e231..586afd8588 100644 --- a/esmvaltool/diag_scripts/ocean/diagnostic_maps.py +++ b/esmvaltool/diag_scripts/ocean/diagnostic_maps.py @@ -1,7 +1,8 @@ """ -Diagnostic Maps: +Maps diagnostics +================ -Diagnostic to produce png images of a map with coastlines from a cube. +Diagnostic to produce images of a map with coastlines from a cube. These plost show latitude vs longitude and the cube value is used as the colour scale. @@ -10,13 +11,28 @@ and metadata.yml files) has no time component, a small number of depth layers, and a latitude and longitude coordinates. -An approproate preprocessor for a 3D+time field would be: -preprocessors: - prep_map: - extract_levels: - levels: [100., ] - scheme: linear_extrap - time_average: +An approproate preprocessor for a 3D+time field would be:: + + preprocessors: + prep_map: + extract_levels: + levels: [100., ] + scheme: linear_extrap + time_average: + + +Note that this recipe may not function on machines with no access to the +internet, as cartopy may try to download the shapefiles. The solution to +this issue is the put the relevant cartopy shapefiles on a disk visible to your +machine, then link that path to ESMValTool via the `auxiliary_data_dir` +variable. The cartopy masking files can be downloaded from:: + + https://www.naturalearthdata.com/downloads/ + +Here, cartopy uses the 1:10, physical coastlines and land files:: + + 110m_coastline.dbf 110m_coastline.shp 110m_coastline.shx + 110m_land.dbf 110m_land.shp 110m_land.shx This tool is part of the ocean diagnostic tools package in the ESMValTool. @@ -26,12 +42,14 @@ import logging import os import sys +from itertools import product +import matplotlib.pyplot as plt import iris import iris.quickplot as qplt -import matplotlib.pyplot as plt +import cartopy -import diagnostic_tools as diagtools +from esmvaltool.diag_scripts.ocean import diagnostic_tools as diagtools from esmvaltool.diag_scripts.shared import run_diagnostic # This part sends debug statements to stdout @@ -47,9 +65,15 @@ def make_map_plots( """ Make a simple map plot for an individual model. - The cfg is the opened global config, - metadata is the metadata dictionairy - filename is the preprocessing model file. + Parameters + ---------- + cfg: dict + the opened global config dictionary, passed by ESMValTool. + metadata: dict + the metadata dictionary + filename: str + the preprocessed model file. + """ # Load cube and set up units cube = iris.load_cube(filename) @@ -61,11 +85,14 @@ def make_map_plots( # Make a dict of cubes for each layer. cubes = diagtools.make_cube_layer_dict(cube) + # Load image format extention + image_extention = diagtools.get_image_format(cfg) + # Making plots for each layer for layer_index, (layer, cube_layer) in enumerate(cubes.items()): layer = str(layer) - qplt.contourf(cube_layer, 25) + qplt.contourf(cube_layer, 25, linewidth=0, rasterized=True) try: plt.gca().coastlines() @@ -75,21 +102,22 @@ def make_map_plots( # Add title to plot title = ' '.join([metadata['dataset'], metadata['long_name']]) if layer: - title = ' '.join( - [title, '(', layer, - str(cube_layer.coords('depth')[0].units), ')']) + title = ' '.join([ + title, '(', layer, + str(cube_layer.coords('depth')[0].units), ')' + ]) plt.title(title) - # Determine png filename: + # Determine image filename: if multi_model: path = diagtools.folder( cfg['plot_dir']) + os.path.basename(filename).replace( - '.nc', '_map_' + str(layer_index) + '.png') + '.nc', '_map_' + str(layer_index) + image_extention) else: path = diagtools.get_image_path( cfg, metadata, - suffix='map_' + str(layer_index) + '.png', + suffix='map_' + str(layer_index) + image_extention, ) # Saving files: @@ -101,12 +129,248 @@ def make_map_plots( plt.close() +def make_map_contour( + cfg, + metadata, + filename, +): + """ + Make a simple contour map plot for an individual model. + + Parameters + ---------- + cfg: dict + the opened global config dictionary, passed by ESMValTool. + metadata: dict + the metadata dictionary + filename: str + the preprocessed model file. + + """ + # Load cube and set up units + cube = iris.load_cube(filename) + cube = diagtools.bgc_units(cube, metadata['short_name']) + + # Is this data is a multi-model dataset? + multi_model = metadata['dataset'].find('MultiModel') > -1 + + # Make a dict of cubes for each layer. + cubes = diagtools.make_cube_layer_dict(cube) + + # Load image format extention and threshold.thresholds. + image_extention = diagtools.get_image_format(cfg) + + # Load threshold/thresholds. + plot_details = {} + colours = [] + thresholds = diagtools.load_thresholds(cfg, metadata) + + for itr, thres in enumerate(thresholds): + if len(thresholds) > 1: + colour = plt.cm.jet(float(itr) / float(len(thresholds) - 1.)) + else: + colour = plt.cm.jet(0) + label = str(thres) + ' ' + str(cube.units) + colours.append(colour) + plot_details[thres] = {'c': colour, + 'lw': 1, + 'ls': '-', + 'label': label} + + linewidths = [1 for thres in thresholds] + linestyles = ['-' for thres in thresholds] + # Making plots for each layer + for layer_index, (layer, cube_layer) in enumerate(cubes.items()): + layer = str(layer) + qplt.contour(cube_layer, + thresholds, + colors=colours, + linewidths=linewidths, + linestyles=linestyles, + rasterized=True) + + try: + plt.gca().coastlines() + except AttributeError: + logger.warning('Not able to add coastlines') + try: + plt.gca().add_feature(cartopy.feature.LAND, + zorder=10, + facecolor=[0.8, 0.8, 0.8]) + except AttributeError: + logger.warning('Not able to add coastlines') + # Add legend + diagtools.add_legend_outside_right(plot_details, + plt.gca(), + column_width=0.02, + loc='below') + + # Add title to plot + title = ' '.join([metadata['dataset'], metadata['long_name']]) + depth_units = str(cube_layer.coords('depth')[0].units) + if layer: + title = '{} ({} {})'.format(title, layer, depth_units) + plt.title(title) + + # Determine image filename: + if multi_model: + path = os.path.join(diagtools.folder(cfg['plot_dir']), + os.path.basename(filename)) + path = path.replace('.nc', '_contour_map_' + str(layer_index)) + path = path + image_extention + else: + path = diagtools.get_image_path( + cfg, + metadata, + suffix='_contour_map_' + str(layer_index) + image_extention, + ) + + # Saving files: + if cfg['write_plots']: + logger.info('Saving plots to %s', path) + plt.savefig(path) + + plt.close() + + +def multi_model_contours( + cfg, + metadata, +): + """ + Make a contour map showing several models. + + Parameters + ---------- + cfg: dict + the opened global config dictionary, passed by ESMValTool. + metadata: dict + the metadata dictionary. + + """ + #### + # Load the data for each layer as a separate cube + model_cubes = {} + layers = {} + for filename in sorted(metadata): + cube = iris.load_cube(filename) + cube = diagtools.bgc_units(cube, metadata[filename]['short_name']) + + cubes = diagtools.make_cube_layer_dict(cube) + model_cubes[filename] = cubes + for layer in cubes: + layers[layer] = True + + # Load image format extention + image_extention = diagtools.get_image_format(cfg) + + # Load threshold/thresholds. + thresholds = diagtools.load_thresholds(cfg, metadata) + + # Make a plot for each layer and each threshold + for layer, threshold in product(layers, thresholds): + + title = '' + z_units = '' + plot_details = {} + cmap = plt.cm.get_cmap('jet') + land_drawn = False + + # Plot each file in the group + for index, filename in enumerate(sorted(metadata)): + + if len(metadata) > 1: + color = cmap(index / (len(metadata) - 1.)) + else: + color = 'blue' + linewidth = 1. + linestyle = '-' + + # Determine line style for Observations + if metadata[filename]['project'] in diagtools.get_obs_projects(): + color = 'black' + linewidth = 1.7 + linestyle = '-' + + # Determine line style for MultiModel statistics: + if 'MultiModel' in metadata[filename]['dataset']: + color = 'black' + linestyle = ':' + linewidth = 1.4 + + cube = model_cubes[filename][layer] + qplt.contour(cube, + [threshold, ], + colors=[color, ], + linewidths=linewidth, + linestyles=linestyle, + rasterized=True) + plot_details[filename] = { + 'c': color, + 'ls': linestyle, + 'lw': linewidth, + 'label': metadata[filename]['dataset'] + } + + if not land_drawn: + try: + plt.gca().coastlines() + except AttributeError: + logger.warning('Not able to add coastlines') + plt.gca().add_feature(cartopy.feature.LAND, + zorder=10, + facecolor=[0.8, 0.8, 0.8]) + land_drawn = True + + title = metadata[filename]['long_name'] + if layer != '': + z_units = model_cubes[filename][layer].coords('depth')[0].units + units = str(model_cubes[filename][layer].units) + + # Add title, threshold, legend to plots + title = ' '.join([title, str(threshold), units]) + if layer: + title = ' '.join([title, '(', str(layer), str(z_units), ')']) + plt.title(title) + plt.legend(loc='best') + + # Saving files: + if cfg['write_plots']: + path = diagtools.get_image_path( + cfg, + metadata[filename], + prefix='MultipleModels_', + suffix='_'.join(['_contour_map_', + str(threshold), + str(layer) + image_extention]), + metadata_id_list=[ + 'field', 'short_name', 'preprocessor', 'diagnostic', + 'start_year', 'end_year' + ], + ) + + # Resize and add legend outside thew axes. + plt.gcf().set_size_inches(9., 6.) + diagtools.add_legend_outside_right( + plot_details, plt.gca(), column_width=0.15) + + logger.info('Saving plots to %s', path) + plt.savefig(path) + plt.close() + + def main(cfg): """ - Load the config file, and send it to the plot maker. + Load the config file, and send it to the plot makers. + + Parameters + ---------- + cfg: dict + the opened global config dictionary, passed by ESMValTool. - The cfg is the opened global config. """ + cartopy.config['data_dir'] = cfg['auxiliary_data_dir'] + for index, metadata_filename in enumerate(cfg['input_files']): logger.info( 'metadata filename:\t%s', @@ -114,6 +378,16 @@ def main(cfg): ) metadatas = diagtools.get_input_files(cfg, index=index) + thresholds = diagtools.load_thresholds(cfg, metadatas) + + if thresholds: + ####### + # Multi model contour plots + multi_model_contours( + cfg, + metadatas, + ) + for filename in sorted(metadatas.keys()): logger.info('-----------------') @@ -123,7 +397,12 @@ def main(cfg): ) ###### - # Time series of individual model + # Contour maps of individual model + if thresholds: + make_map_contour(cfg, metadatas[filename], filename) + + ###### + # Maps of individual model make_map_plots(cfg, metadatas[filename], filename) logger.info('Success') diff --git a/esmvaltool/diag_scripts/ocean/diagnostic_maps_quad.py b/esmvaltool/diag_scripts/ocean/diagnostic_maps_quad.py new file mode 100644 index 0000000000..ce762f7e13 --- /dev/null +++ b/esmvaltool/diag_scripts/ocean/diagnostic_maps_quad.py @@ -0,0 +1,220 @@ +""" +Model 1 vs Model 2 vs Observations diagnostics. +=============================================== + +Diagnostic to produce an image showing four maps, based on a comparison of two +differnt models results against an observational dataset. This process is +often used to compare a new iteration of a model under development against +a previous version of the same model. The four map plots are: + +* Top left: model 1 +* Top right: model 1 minus model 2 +* Bottom left: model 2 minus obs +* Bottom right: model 1 minus obs + +All four plots show latitude vs longitude and the cube value is used as the +colour scale. + +Note that this diagnostic assumes that the preprocessors do the bulk of the +hard work, and that the cube received by this diagnostic (via the settings.yml +and metadata.yml files) has no time component, a small number of depth layers, +and a latitude and longitude coordinates. + +An approproate preprocessor for a 3D+time field would be:: + + preprocessors: + prep_map: + extract_levels: + levels: [100., ] + scheme: linear_extrap + time_average: + +This diagnostic also requires the ``exper_model``, ``exper_model`` and +``observational_dataset`` keys in the recipe:: + + diagnostics: + diag_name: + ... + scripts: + Global_Ocean_map: + script: ocean/diagnostic_maps_quad.py + exper_model: {Model 1 dataset details} + control_model: {Model 2 dataset details} + observational_dataset: {Observational dataset details} + +This tool is part of the ocean diagnostic tools package in the ESMValTool, +and was based on the plots produced by the Ocean Assess/Marine Assess toolkit. + +Author: Lee de Mora (PML) + ledm@pml.ac.uk + +""" +import logging +import os +import sys + +import iris +import iris.quickplot as qplt +import matplotlib.pyplot as plt +import numpy as np + +from esmvaltool.diag_scripts.ocean import diagnostic_tools as diagtools +from esmvaltool.diag_scripts.shared import run_diagnostic + +# This part sends debug statements to stdout +logger = logging.getLogger(os.path.basename(__file__)) +logging.getLogger().addHandler(logging.StreamHandler(sys.stdout)) + + +def add_map_subplot(subplot, cube, nspace, title='', cmap=''): + """ + Add a map subplot to the current pyplot figure. + + Parameters + ---------- + subplot: int + The matplotlib.pyplot subplot number. (ie 221) + cube: iris.cube.Cube + the iris cube to be plotted. + nspace: numpy.array + An array of the ticks of the colour part. + title: str + A string to set as the subplot title. + cmap: str + A string to describe the matplotlib colour map. + + """ + plt.subplot(subplot) + qplot = qplt.contourf(cube, nspace, linewidth=0, + cmap=plt.cm.get_cmap(cmap)) + qplot.colorbar.set_ticks([nspace.min(), + (nspace.max() + nspace.min()) / 2., + nspace.max()]) + + plt.gca().coastlines() + plt.title(title) + + +def multi_model_maps( + cfg, + input_files, +): + """ + Make the four pane model vs model vs obs comparison plot. + + Parameters + ---------- + cfg: dict + the opened global config dictionairy, passed by ESMValTool. + input_files: dict + the metadata dictionairy + + """ + filenames = {} + ctl_key = 'control_model' + exp_key = 'exper_model' + obs_key = 'observational_dataset' + model_types = [ctl_key, exp_key, obs_key] + for model_type in model_types: + logger.debug(model_type, cfg[model_type]) + filenames[model_type] = diagtools.match_model_to_key(model_type, + cfg[model_type], + input_files) + + # #### + # Load the data for each layer as a separate cube + layers = {} + cubes = {} + for model_type, input_file in filenames.items(): + cube = iris.load_cube(input_file) + cube = diagtools.bgc_units(cube, input_files[input_file]['short_name']) + + cubes[model_type] = diagtools.make_cube_layer_dict(cube) + for layer in cubes[model_type]: + layers[layer] = True + + logger.debug('layers: %s', ', '.join(layers)) + logger.debug('cubes: %s', ', '.join(cubes.keys())) + + # #### + # load names: + exper = input_files[filenames[exp_key]]['dataset'] + control = input_files[filenames[ctl_key]]['dataset'] + obs = input_files[filenames[obs_key]]['dataset'] + long_name = cubes[exp_key][list(layers.keys())[0]].long_name + + # Load image format extention + image_extention = diagtools.get_image_format(cfg) + + # Make a plot for each layer + for layer in layers: + fig = plt.figure() + fig.set_size_inches(9, 6) + + # Create the cubes + cube221 = cubes[exp_key][layer] + cube222 = cubes[exp_key][layer] - cubes[ctl_key][layer] + cube223 = cubes[ctl_key][layer] - cubes[obs_key][layer] + cube224 = cubes[exp_key][layer] - cubes[obs_key][layer] + + # create the z axis for plots 2, 3, 4. + zrange1 = diagtools.get_cube_range([cube221, ]) + zrange2 = diagtools.get_cube_range_diff([cube222, cube223, cube224]) + + linspace1 = np.linspace(zrange1[0], zrange1[1], 12, endpoint=True) + linspace2 = np.linspace(zrange2[0], zrange2[1], 12, endpoint=True) + + # Add the sub plots to the figure. + add_map_subplot(221, cube221, linspace1, cmap='viridis', title=exper) + add_map_subplot(222, cube222, linspace2, cmap='bwr', + title=' '.join([exper, 'minus', control])) + add_map_subplot(223, cube223, linspace2, cmap='bwr', + title=' '.join([control, 'minus', obs])) + add_map_subplot(224, cube224, linspace2, cmap='bwr', + title=' '.join([exper, 'minus', obs])) + + # Add overall title + fig.suptitle(long_name, fontsize=14) + + # Determine image filename: + fn_list = [long_name, exper, control, obs, str(layer)] + path = diagtools.folder(cfg['plot_dir']) + '_'.join(fn_list) + path = path.replace(' ', '') + image_extention + + # Saving files: + if cfg['write_plots']: + logger.info('Saving plots to %s', path) + plt.savefig(path) + + plt.close() + + +def main(cfg): + """ + Load the config file, and send it to the plot maker. + + Parameters + ---------- + cfg: dict + the opened global config dictionairy, passed by ESMValTool. + + """ + for index, metadata_filename in enumerate(cfg['input_files']): + logger.info( + 'metadata filename:\t%s', + metadata_filename, + ) + input_files = diagtools.get_input_files(cfg, index=index) + # ##### + # Multi model time series + multi_model_maps( + cfg, + input_files, + ) + + logger.info('Success') + + +if __name__ == '__main__': + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/ocean/diagnostic_model_vs_obs.py b/esmvaltool/diag_scripts/ocean/diagnostic_model_vs_obs.py new file mode 100644 index 0000000000..4eab99b76b --- /dev/null +++ b/esmvaltool/diag_scripts/ocean/diagnostic_model_vs_obs.py @@ -0,0 +1,516 @@ +""" +Model vs Observations maps Diagnostic. +====================================== + +Diagnostic to produce comparison of model and data. +The first kind of image shows four maps and the other shows a scatter plot. + +The four pane image is a latitude vs longitude figures showing: + +* Top left: model +* Top right: observations +* Bottom left: model minus observations +* Bottom right: model over observations + + +The scatter plots plot the matched model coordinate on the x axis, and the +observational dataset on the y coordinate, then performs a linear +regression of those data and plots the line of best fit on the plot. +The parameters of the fit are also shown on the figure. + +Note that this diagnostic assumes that the preprocessors do the bulk of the +hard work, and that the cube received by this diagnostic (via the settings.yml +and metadata.yml files) has no time component, a small number of depth layers, +and a latitude and longitude coordinates. + +An approproate preprocessor for a 3D + time field would be:: + + preprocessors: + prep_map: + extract_levels: + levels: [100., ] + scheme: linear_extrap + time_average: + regrid: + target_grid: 1x1 + scheme: linear + +This tool is part of the ocean diagnostic tools package in the ESMValTool, +and was based on the plots produced by the Ocean Assess/Marine Assess toolkit. + +Author: Lee de Mora (PML) + ledm@pml.ac.uk +""" +import logging +import os +import sys +import math + +from matplotlib import pyplot +from matplotlib.colors import LogNorm +import matplotlib.pyplot as plt + +import iris +import iris.quickplot as qplt +import numpy as np +from scipy.stats import linregress + +from esmvaltool.diag_scripts.ocean import diagnostic_tools as diagtools +from esmvaltool.diag_scripts.shared import run_diagnostic + +# This part sends debug statements to stdout +logger = logging.getLogger(os.path.basename(__file__)) +logging.getLogger().addHandler(logging.StreamHandler(sys.stdout)) + + +def add_map_subplot(subplot, cube, nspace, title='', + cmap='', extend='neither', log=False): + """ + Add a map subplot to the current pyplot figure. + + Parameters + ---------- + subplot: int + The matplotlib.pyplot subplot number. (ie 221) + cube: iris.cube.Cube + the iris cube to be plotted. + nspace: numpy.array + An array of the ticks of the colour part. + title: str + A string to set as the subplot title. + cmap: str + A string to describe the matplotlib colour map. + extend: str + Contourf-coloring of values outside the levels range + log: bool + Flag to plot the colour scale linearly (False) or + logarithmically (True) + """ + plt.subplot(subplot) + logger.info('add_map_subplot: %s', subplot) + if log: + qplot = qplt.contourf( + cube, + nspace, + linewidth=0, + cmap=plt.cm.get_cmap(cmap), + norm=LogNorm(), + zmin=nspace.min(), + zmax=nspace.max()) + qplot.colorbar.set_ticks([0.1, 1., 10.]) + else: + qplot = iris.plot.contourf( + cube, + nspace, + linewidth=0, + cmap=plt.cm.get_cmap(cmap), + extend=extend, + zmin=nspace.min(), + zmax=nspace.max()) + cbar = pyplot.colorbar(orientation='horizontal') + cbar.set_ticks( + [nspace.min(), (nspace.max() + nspace.min()) / 2., + nspace.max()]) + + plt.gca().coastlines() + plt.title(title) + + +def make_model_vs_obs_plots( + cfg, + metadata, + model_filename, + obs_filename): + """ + Make a figure showing four maps and the other shows a scatter plot. + + The four pane image is a latitude vs longitude figures showing: + + * Top left: model + * Top right: observations + * Bottom left: model minus observations + * Bottom right: model over observations + + Parameters + ---------- + cfg: dict + the opened global config dictionairy, passed by ESMValTool. + metadata: dict + the input files dictionairy + model_filename: str + the preprocessed model file. + obs_filename: str + the preprocessed observations file. + + """ + filenames = {'model': model_filename, 'obs': obs_filename} + logger.debug('make_model_vs_obs_plots filenames: %s', filenames) + # #### + # Load the data for each layer as a separate cube + input_file = None + layers = {} + cubes = {} + for model_type, input_file in filenames.items(): + logger.debug('loading: \t%s, \t%s', model_type, input_file) + cube = iris.load_cube(input_file) + cube = diagtools.bgc_units(cube, metadata[input_file]['short_name']) + cubes[model_type] = diagtools.make_cube_layer_dict(cube) + for layer in cubes[model_type]: + layers[layer] = True + + logger.debug('layers: %s', layers) + logger.debug('cubes: %s', ', '.join(cubes.keys())) + + # #### + # load names: + model = metadata[filenames['model']]['dataset'] + obs = metadata[filenames['obs']]['dataset'] + + long_name = cubes['model'][list(layers.keys())[0]].long_name + units = str(cubes['model'][list(layers.keys())[0]].units) + + # Load image format extention + image_extention = diagtools.get_image_format(cfg) + + # Make a plot for each layer + for layer in layers: + + fig = plt.figure() + fig.set_size_inches(9, 6) + + # Create the cubes + cube221 = cubes['model'][layer] + cube222 = cubes['obs'][layer] + cube223 = cubes['model'][layer] - cubes['obs'][layer] + cube224 = cubes['model'][layer] / cubes['obs'][layer] + + # create the z axis for plots 2, 3, 4. + extend = 'neither' + zrange12 = diagtools.get_cube_range([cube221, cube222]) + if 'maps_range' in metadata[input_file]: + zrange12 = metadata[input_file]['maps_range'] + extend = 'both' + zrange3 = diagtools.get_cube_range_diff([cube223]) + if 'diff_range' in metadata[input_file]: + zrange3 = metadata[input_file]['diff_range'] + extend = 'both' + + cube224.data = np.ma.clip(cube224.data, 0.1, 10.) + + n_points = 12 + linspace12 = np.linspace( + zrange12[0], zrange12[1], n_points, endpoint=True) + linspace3 = np.linspace( + zrange3[0], zrange3[1], n_points, endpoint=True) + logspace4 = np.logspace(-1., 1., 12, endpoint=True) + + # Add the sub plots to the figure. + add_map_subplot( + 221, cube221, linspace12, cmap='viridis', title=model, + extend=extend) + add_map_subplot( + 222, cube222, linspace12, cmap='viridis', + title=' '.join([obs]), + extend=extend) + add_map_subplot( + 223, + cube223, + linspace3, + cmap='bwr', + title=' '.join([model, 'minus', obs]), + extend=extend) + if np.min(zrange12) > 0.: + add_map_subplot( + 224, + cube224, + logspace4, + cmap='bwr', + title=' '.join([model, 'over', obs]), + log=True) + + # Add overall title + fig.suptitle(long_name + ' [' + units + ']', fontsize=14) + + # Determine image filename: + fn_list = ['model_vs_obs', long_name, model, obs, str(layer), 'maps'] + path = diagtools.folder(cfg['plot_dir']) + '_'.join(fn_list) + path = path.replace(' ', '') + image_extention + + # Saving files: + if cfg['write_plots']: + logger.info('Saving plots to %s', path) + plt.savefig(path, dpi=200) + + plt.close() + + +def rounds_sig(value, sig=3): + """ + Round a float to a specific number of sig. figs. & return it as a string. + + Parameters + ---------- + value: float + The float that is to be rounded. + sig: int + The number of significant figures. + + Returns + ---------- + str: + The rounded output string. + + """ + if value == 0.: + return str(0.) + if value < 0.: + value = abs(value) + return str( + -1. * round(value, sig - int(math.floor(math.log10(value))) - 1)) + return str(round(value, sig - int(math.floor(math.log10(value))) - 1)) + + +def add_linear_regression(plot_axes, + arr_x, + arr_y, + showtext=True, + add_diagonal=False, + extent=None): + """ + Add a straight line fit to an axis. + + Parameters + ---------- + plot_axes: matplotlib.pyplot.axes + The matplotlib axes on which to plot the linear regression. + arr_x: numpy.array + The data for the x coordinate. + arr_y: numpy array + The data for the y coordinate. + showtext: bool + A flag to turn on or off the result of the fit on the plot. + add_diagonal: bool + A flag to also add the 1:1 diagonal line to the figure + extent: list of floats + The extent of the plot axes. + """ + beta1, beta0, r_value, p_value, std_err = linregress(arr_x, arr_y) + texts = [ + r'$\^\beta_0$ = ' + rounds_sig(beta0), + r'$\^\beta_1$ = ' + rounds_sig(beta1), + r'R = ' + rounds_sig(r_value), + r'P = ' + rounds_sig(p_value), + r'N = ' + str(int(len(arr_x))) + ] + thetext = '\n'.join(texts) + + if showtext: + pyplot.text( + 0.04, + 0.96, + thetext, + horizontalalignment='left', + verticalalignment='top', + transform=plot_axes.transAxes) + + if extent is None: + x_values = np.arange(arr_x.min(), arr_x.max(), + (arr_x.max() - arr_x.min()) / 20.) + y_values = [beta0 + beta1 * a for a in x_values] + else: + minv = min(extent) + maxv = max(extent) + x_values = np.arange(minv, maxv, (maxv - minv) / 1000.) + y_values = np.array([beta0 + beta1 * a for a in x_values]) + + mask = (x_values < minv) + (y_values < minv) \ + + (x_values > maxv) + (y_values > maxv) + x_values = np.ma.masked_where(mask, x_values) + y_values = np.ma.masked_where(mask, y_values) + + pyplot.plot(x_values, y_values, 'k') + + if add_diagonal: + axis = pyplot.gca().axis() + step = (max(axis) - min(axis)) / 100. + one_to_one = np.arange(min(axis), max(axis) + step, step) + pyplot.plot(one_to_one, one_to_one, 'k--') + + +def make_scatter( + cfg, + metadata, + model_filename, + obs_filename): + """ + Makes Scatter plots of model vs observational data. + + Make scatter plot showing the matched model and observational data with the + model data as the x-axis coordinate and the observational data as the + y-axis coordinate. A linear regression is also applied to the matched + data and the result of the fit is shown on the figure. + + Parameters + ---------- + cfg: dict + the opened global config dictionairy, passed by ESMValTool. + metadata: dict + the input files dictionairy + model_filename: str + the preprocessed model file. + obs_filename: str + the preprocessed observations file. + """ + + filenames = {'model': model_filename, 'obs': obs_filename} + logger.debug('make_model_vs_obs_plots: \t%s', filenames) + # #### + # Load the data for each layer as a separate cube + layers = {} + cubes = {} + for model_type, input_file in filenames.items(): + logger.debug('loading: \t%s, \t%s', model_type, input_file) + cube = iris.load_cube(input_file) + cube = diagtools.bgc_units(cube, metadata[input_file]['short_name']) + cubes[model_type] = diagtools.make_cube_layer_dict(cube) + for layer in cubes[model_type]: + layers[layer] = True + + logger.debug('layers: %s', layers) + logger.debug('cubes: %s', ', '.join(cubes)) + + # #### + # load names: + model = metadata[filenames['model']]['dataset'] + obs = metadata[filenames['obs']]['dataset'] + + long_name = cubes['model'][list(layers.keys())[0]].long_name + + # Load image format extention + image_extention = diagtools.get_image_format(cfg) + + # Make a plot for each layer + for layer in layers: + + fig = plt.figure() + fig.set_size_inches(7, 6) + + # Create the cubes + model_data = np.ma.array(cubes['model'][layer].data) + obs_data = np.ma.array(cubes['obs'][layer].data) + + mask = model_data.mask + obs_data.mask + model_data = np.ma.masked_where(mask, model_data).compressed() + obs_data = np.ma.masked_where(mask, obs_data).compressed() + + colours = 'gist_yarg' + zrange = diagtools.get_array_range([model_data, obs_data]) + plotrange = [zrange[0], zrange[1], zrange[0], zrange[1]] + + x_scale = 'log' + if np.min(zrange) * np.max(zrange) < -1: + x_scale = 'linear' + if np.min(zrange) < 0.: + logger.info('Skip scatter for %s. Min is < 0', long_name) + return + + pyplot.hexbin( + model_data, + obs_data, + xscale=x_scale, + yscale=x_scale, + bins='log', + # extent=np.log10(plotrange), + gridsize=50, + cmap=pyplot.get_cmap(colours), + mincnt=0) + cbar = pyplot.colorbar() + cbar.set_label('log10(N)') + + pyplot.gca().set_aspect("equal") + pyplot.axis(plotrange) + + add_linear_regression( + pyplot.gca(), + model_data, + obs_data, + showtext=True, + add_diagonal=True, + extent=plotrange) + + pyplot.title(long_name) + pyplot.xlabel(model) + pyplot.ylabel(obs) + + # Determine image filename: + fn_list = [ + 'model_vs_obs', long_name, model, obs, + str(layer), 'scatter' + ] + path = diagtools.folder(cfg['plot_dir']) + '_'.join(fn_list) + path = path.replace(' ', '') + image_extention + + # Saving files: + if cfg['write_plots']: + logger.info('Saving plots to %s', path) + plt.savefig(path) + + plt.close() + + +def main(cfg): + """ + Load the config file, and send it to the plot maker. + + Parameters + ---------- + cfg: dict + the opened global config dictionairy, passed by ESMValTool. + + """ + for index, metadata_filename in enumerate(cfg['input_files']): + logger.info( + 'metadata filename:\t%s, %s', + index, + metadata_filename, + ) + metadatas = diagtools.get_input_files(cfg, index=index) + + model_type = 'observational_dataset' + logger.debug( + 'model_type: %s, %s', + index, + model_type, + ) + logger.debug( + 'metadatas: %s, %s', + index, + metadatas, + ) + obs_filename = diagtools.match_model_to_key('observational_dataset', + cfg[model_type], metadatas) + for filename in sorted(metadatas.keys()): + + if filename == obs_filename: + continue + if not os.path.exists(obs_filename): + continue + logger.info('-----------------') + logger.info( + 'model filenames:\t%s', + filename, + ) + + # ##### + # model vs obs scatter plots + make_scatter(cfg, metadatas, filename, obs_filename) + + # ##### + # model vs obs map plots + make_model_vs_obs_plots(cfg, metadatas, filename, obs_filename) + logger.info('Success') + + +if __name__ == '__main__': + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/ocean/diagnostic_profiles.py b/esmvaltool/diag_scripts/ocean/diagnostic_profiles.py index b7d0a4c5fc..35d38b583d 100644 --- a/esmvaltool/diag_scripts/ocean/diagnostic_profiles.py +++ b/esmvaltool/diag_scripts/ocean/diagnostic_profiles.py @@ -1,28 +1,44 @@ """ -Diagnostic: +Profile diagnostics. +==================== -Diagnostic to produce png images of the profile over time from a cube. +Diagnostic to produce figure of the profile over time from a cube. These plost show cube value (ie temperature) on the x-axis, and depth/height -on the y axis. The colour scale is the annual mean of the cube data. +on the y axis. The colour scale is the time series. Note that this diagnostic assumes that the preprocessors do the bulk of the hard work, and that the cube received by this diagnostic (via the settings.yml and metadata.yml files) has a time component, and depth component, but no latitude or longitude coordinates. -An approproate preprocessor for a 3D+time field would be: -preprocessors: - prep_profile: - extract_volume: - long1: 0. - long2: 20. - lat1: -30. - lat2: 30. - z_min: 0. - z_max: 3000. - average_region: - coord1: longitude - coord2: latitude +An approproate preprocessor for a 3D+time field would be:: + + preprocessors: + prep_profile: + extract_volume: + long1: 0. + long2: 20. + lat1: -30. + lat2: 30. + z_min: 0. + z_max: 3000. + average_region: + coord1: longitude + coord2: latitude + +In order to add an observational dataset to the profile plot, the following +arguments are needed in the diagnostic script:: + + diagnostics: + diagnostic_name: + variables: + ... + additional_datasets: + - {observational dataset description} + scripts: + script_name: + script: ocean/diagnostic_profiles.py + observational_dataset: {observational dataset description} This tool is part of the ocean diagnostic tools package in the ESMValTool. @@ -33,11 +49,12 @@ import os import sys +import numpy as np import iris import iris.quickplot as qplt import matplotlib.pyplot as plt -import diagnostic_tools as diagtools +from esmvaltool.diag_scripts.ocean import diagnostic_tools as diagtools from esmvaltool.diag_scripts.shared import run_diagnostic # This part sends debug statements to stdout @@ -49,7 +66,15 @@ def determine_profiles_str(cube): """ Determine a string from the cube, to describe the profile. - Used in image titles, descriptions and filenames. + Parameters + ---------- + cube: iris.cube.Cube + the opened dataset as a cube. + + Returns + ------- + str + Returns a string which describes the profile. """ options = ['latitude', 'longitude'] for option in options: @@ -70,41 +95,72 @@ def make_profiles_plots( cfg, metadata, filename, + obs_metadata={}, + obs_filename='', ): """ - Make a simple profile plot for an individual model. + Make a profile plot for an individual model. + + The optional observational dataset can also be added. + + Parameters + ---------- + cfg: dict + the opened global config dictionairy, passed by ESMValTool. + metadata: dict + The metadata dictionairy for a specific model. + filename: str + The preprocessed model file. + obs_metadata: dict + The metadata dictionairy for the observational dataset. + obs_filename: str + The preprocessed observational dataset file. - The cfg is the opened global config, - metadata is the metadata dictionairy - filename is the preprocessing model file. """ # Load cube and set up units cube = iris.load_cube(filename) cube = diagtools.bgc_units(cube, metadata['short_name']) - # Make annual means from: - cube = cube.aggregated_by('year', iris.analysis.MEAN) + raw_times = diagtools.cube_time_to_float(cube) - # Is this data is a multi-model dataset? - multi_model = metadata['dataset'].find('MultiModel') > -1 + # Make annual or Decadal means from: + if np.max(raw_times) - np.min(raw_times) < 20: + cube = cube.aggregated_by('year', iris.analysis.MEAN) + else: + cube = diagtools.decadal_average(cube) - # - times = cube.coord('time') - times_float = diagtools.timecoord_to_float(times) + times_float = diagtools.cube_time_to_float(cube) time_0 = times_float[0] + # Is this data is a multi-model dataset? + multi_model = metadata['dataset'].find('MultiModel') > -1 + cmap = plt.cm.get_cmap('jet') plot_details = {} for time_index, time in enumerate(times_float): - - color = cmap((time - time_0) / (times_float[-1] - time_0)) + if times_float[-1] == time_0: + color = 'black' + else: + color = cmap((time - time_0) / (times_float[-1] - time_0)) qplt.plot(cube[time_index, :], cube[time_index, :].coord('depth'), c=color) - plot_details[time_index] = {'c': color, 'ls': '-', 'lw': 1, - 'label': str(int(time))} + plot_details[str(time_index)] = {'c': color, 'ls': '-', 'lw': 1, + 'label': str(int(time))} + + # Add observational data. + if obs_filename: + obs_cube = iris.load_cube(obs_filename) + obs_cube = diagtools.bgc_units(obs_cube, metadata['short_name']) + obs_cube = obs_cube.collapsed('time', iris.analysis.MEAN) + + obs_key = obs_metadata['dataset'] + qplt.plot(obs_cube, obs_cube.coord('depth'), c='black') + + plot_details[obs_key] = {'c': 'black', 'ls': '-', 'lw': 1, + 'label': obs_key} # Add title to plot title = ' '.join([ @@ -116,16 +172,19 @@ def make_profiles_plots( # Add Legend outside right. diagtools.add_legend_outside_right(plot_details, plt.gca()) - # Determine png filename: + # Load image format extention + image_extention = diagtools.get_image_format(cfg) + + # Determine image filename: if multi_model: path = diagtools.folder( cfg['plot_dir']) + os.path.basename(filename).replace( - '.nc', '_profile.png') + '.nc', '_profile' + image_extention) else: path = diagtools.get_image_path( cfg, metadata, - suffix='profile.png', + suffix='profile' + image_extention, ) # Saving files: @@ -138,19 +197,36 @@ def make_profiles_plots( def main(cfg): """ - Load the config file, and send it to the plot maker. + Run the diagnostics profile tool. + + Load the config file, find an observational dataset filename, + pass loaded into the plot making tool. + + Parameters + ---------- + cfg: dict + the opened global config dictionairy, passed by ESMValTool. - The cfg is the opened global config. """ for index, metadata_filename in enumerate(cfg['input_files']): - logger.info( - 'metadata filename:\t%s', - metadata_filename - ) + logger.info('metadata filename:\t%s', metadata_filename) metadatas = diagtools.get_input_files(cfg, index=index) + + obs_key = 'observational_dataset' + obs_filename = '' + obs_metadata = {} + if obs_key in cfg: + obs_filename = diagtools.match_model_to_key(obs_key, + cfg[obs_key], + metadatas) + obs_metadata = metadatas[obs_filename] + for filename in sorted(metadatas.keys()): + if filename == obs_filename: + continue + logger.info('-----------------') logger.info( 'model filenames:\t%s', @@ -159,7 +235,9 @@ def main(cfg): ###### # Time series of individual model - make_profiles_plots(cfg, metadatas[filename], filename) + make_profiles_plots(cfg, metadatas[filename], filename, + obs_metadata=obs_metadata, + obs_filename=obs_filename) logger.info('Success') diff --git a/esmvaltool/diag_scripts/ocean/diagnostic_seaice.py b/esmvaltool/diag_scripts/ocean/diagnostic_seaice.py new file mode 100644 index 0000000000..7b99926bce --- /dev/null +++ b/esmvaltool/diag_scripts/ocean/diagnostic_seaice.py @@ -0,0 +1,701 @@ +""" +Sea Ice Diagnostics. +==================== + +Diagnostic to produce a series of images which are useful for evaluating +the behaviour of the a sea ice model. + +There are three kinds of plots shown here. +1. Sea ice Extent maps plots with a stereoscoic projection. +2. Maps plots of individual models ice fracrtion. +3. Time series plots for the total ice extent. + +All three kinds of plots are made for both Summer and Winter in both the +North and Southern hemisphere. + +Note that this diagnostic assumes that the preprocessors do the bulk of the +hard work, and that the cube received by this diagnostic (via the settings.yml +and metadata.yml files) has no time component, a small number of depth layers, +and a latitude and longitude coordinates. + +This diagnostic takes data from either North or South hemisphere, and +from either December-January-February or June-July-August. This diagnostic +requires the data to be 2D+time, and typically expects the data field to be +the sea ice cover. +An approproate preprocessor would be:: + + preprocessors: + timeseries_NHW_ice_extent: # North Hemisphere Winter ice_extent + custom_order: true + extract_time: + start_year: 1960 + start_month: 12 + start_day: 1 + end_year: 2005 + end_month: 9 + end_day: 31 + extract_season: + season: DJF + extract_region: + start_longitude: -180. + end_longitude: 180. + start_latitude: 0. + end_latitude: 90. + + +Note that this recipe may not function on machines with no access to the +internet, as cartopy may try to download the shapefiles. The solution to +this issue is the put the relevant cartopy shapefiles on a disk visible to your +machine, then link that path to ESMValTool via the `auxiliary_data_dir` +variable. The cartopy masking files can be downloaded from:: + + https://www.naturalearthdata.com/downloads/ + +Here, cartopy uses the 1:10, physical coastlines and land files:: + + 110m_coastline.dbf 110m_coastline.shp 110m_coastline.shx + 110m_land.dbf 110m_land.shp 110m_land.shx + +This tool is part of the ocean diagnostic tools package in the ESMValTool. + +Author: Lee de Mora (PML) + ledm@pml.ac.uk +""" +import logging +import os +import sys +from itertools import product + +import cartopy +import iris +import iris.quickplot as qplt +import matplotlib +import matplotlib.pyplot as plt +import numpy as np + +from esmvaltool.diag_scripts.ocean import diagnostic_tools as diagtools +from esmvaltool.diag_scripts.shared import run_diagnostic + +# This part sends debug statements to stdout +logger = logging.getLogger(os.path.basename(__file__)) +logging.getLogger().addHandler(logging.StreamHandler(sys.stdout)) + + +# Note that this recipe may not function on machines with no access to +# the internet, as cartopy may try to download geographic files. + + +def create_ice_cmap(threshold=0.15): + """ + Create colour map with ocean blue below a threshold and white above. + + Parameters + ---------- + threshold: float + The threshold for the line between blue and white. + + Returns + ------- + matplotlib.colors.LinearSegmentedColormap: + The resulting colour map. + + """ + threshold = threshold / 100. + ice_cmap_dict = { + 'red': ((0., 0.0313, 0.0313), (threshold, 0.0313, 1.), (1., 1., 1.)), + 'green': ((0., 0.237, 0.237), (threshold, 0.237, 1.), (1., 1., 1.)), + 'blue': ((0., 0.456, 0.456), (threshold, 0.456, 1.), (1., 1., 1.)) + } + + return matplotlib.colors.LinearSegmentedColormap('ice_cmap', ice_cmap_dict) + + +def calculate_area_time_series(cube, plot_type, threshold): + """ + Calculate the area of unmasked cube cells. + + Requires a cube with two spacial dimensions. (no depth coordinate). + + Parameters + ---------- + cube: iris.cube.Cube + Data Cube + plot_type: str + The type of plot: ice extent or ice area + threshold: float + The threshold for ice fraction (typically 15%) + + Returns + ------- + numpy array: + An numpy array containing the time points. + numpy.array: + An numpy array containing the total ice extent or total ice area. + + """ + data = [] + times = diagtools.cube_time_to_float(cube) + for time_itr, time in enumerate(times): + icedata = cube[time_itr].data + + area = iris.analysis.cartography.area_weights(cube[time_itr]) + if plot_type.lower() == 'ice extent': + # Ice extend is the area with more than 15% ice cover. + icedata = np.ma.masked_where(icedata < threshold, icedata) + total_area = np.ma.masked_where(icedata.mask, area.data).sum() + if plot_type.lower() == 'ice area': + # Ice area is cover * cell area + total_area = np.sum(icedata * area) + + logger.debug('Calculating time series area: %s, %s, %s,', time_itr, + time, total_area) + data.append(total_area) + + ###### + # Create a small dummy output array + data = np.array(data) + return times, data + + +def make_ts_plots( + cfg, + metadata, + filename, +): + """ + Make a ice extent and ice area time series plot for an individual model. + + Parameters + ---------- + cfg: dict + the opened global config dictionairy, passed by ESMValTool. + metadata: dict + The metadata dictionairy for a specific model. + filename: str + The preprocessed model file. + + """ + # Load cube and set up units + cube = iris.load_cube(filename) + cube = diagtools.bgc_units(cube, metadata['short_name']) + cube = agregate_by_season(cube) + + # Is this data is a multi-model dataset? + multi_model = metadata['dataset'].find('MultiModel') > -1 + + # Make a dict of cubes for each layer. + cubes = diagtools.make_cube_layer_dict(cube) + + # Load image format extention + image_extention = diagtools.get_image_format(cfg) + + # # Load threshold, pole, season. + threshold = float(cfg['threshold']) + pole = get_pole(cube) + season = get_season(cube) + + # Making plots for each layer + for plot_type in ['Ice Extent', 'Ice Area']: + for layer_index, (layer, cube_layer) in enumerate(cubes.items()): + layer = str(layer) + + times, data = calculate_area_time_series(cube_layer, plot_type, + threshold) + + plt.plot(times, data) + + # Add title to plot + title = ' '.join( + [metadata['dataset'], pole, 'hemisphere', season, plot_type]) + if layer: + title = ' '.join([ + title, '(', layer, + str(cube_layer.coords('depth')[0].units), ')' + ]) + plt.title(title) + + # y axis label: + plt.ylabel(' '.join([plot_type, 'm^2'])) + + # Determine image filename: + suffix = '_'.join(['ts', metadata['preprocessor'], season, pole, + plot_type, str(layer_index)])\ + + image_extention + suffix = suffix.replace(' ', '') + if multi_model: + path = diagtools.folder( + cfg['plot_dir']) + os.path.basename(filename) + path = path.replace('.nc', suffix) + else: + path = diagtools.get_image_path( + cfg, + metadata, + suffix=suffix, + ) + + # Saving files: + if cfg['write_plots']: + logger.info('Saving plots to %s', path) + plt.savefig(path) + + plt.close() + + +def make_polar_map( + cube, + pole='North', + cmap='Blues_r', +): + """ + Make a polar stereoscopic map plot. + + The cube is the opened cube (two dimensional), + pole is the polar region (North/South) + cmap is the colourmap, + + Parameters + ---------- + cube: iris.cube.Cube + Data Cube + pole: str + The hemisphere + cmap: str + The string describing the matplotlib colourmap. + + Returns + ---------- + matplotlib.pyplot.figure: + The matplotlib figure where the map was drawn. + matplotlib.pyplot.axes: + The matplotlib axes where the map was drawn. + + """ + fig = plt.figure() + fig.set_size_inches(7, 7) + + # #### + # Set limits, based on https://nedbatchelder.com/blog/200806/pylint.html + + if pole not in ['North', 'South']: + logger.fatal('make_polar_map: hemisphere not provided.') + + if pole == 'North': # North Hemisphere + ax1 = plt.subplot(111, projection=cartopy.crs.NorthPolarStereo()) + ax1.set_extent([-180, 180, 50, 90], cartopy.crs.PlateCarree()) + + if pole == 'South': # South Hemisphere + ax1 = plt.subplot(111, projection=cartopy.crs.SouthPolarStereo()) + ax1.set_extent([-180, 180, -90, -50], cartopy.crs.PlateCarree()) + + linrange = np.linspace(0., 100., 21.) + qplt.contourf(cube, linrange, cmap=cmap, linewidth=0, rasterized=True) + plt.tight_layout() + + try: + ax1.add_feature( + cartopy.feature.LAND, + zorder=10, + facecolor=[0.8, 0.8, 0.8], + ) + except ConnectionRefusedError: + logger.error('Cartopy was unable add coastlines due to a ' + 'connection error.') + ax1.gridlines( + linewidth=0.5, color='black', zorder=20, alpha=0.5, linestyle='--') + try: + plt.gca().coastlines() + except AttributeError: + logger.warning('make_polar_map: Not able to add coastlines') + return fig + + +def get_pole(cube): + """ + Figure out the hemisphere and returns it as a string (North or South). + + Parameters + ---------- + cube: iris.cube.Cube + Data Cube + + Returns + ---------- + str: + The hemisphere (North or South) + + """ + margin = 5. + if np.max(cube.coord('latitude').points) < 0. + margin: + return 'South' + if np.min(cube.coord('latitude').points) > 0. - margin: + return 'North' + logger.fatal('get_pole: Not able to determine hemisphere.') + return False + + +def get_time_string(cube): + """ + Return a climatological season string in the format: "year season". + + Parameters + ---------- + cube: iris.cube.Cube + Data Cube + + Returns + ---------- + str: + The climatological season as a string + + """ + season = cube.coord('clim_season').points + year = cube.coord('year').points + return str(int(year[0])) + ' ' + season[0].upper() + + +def get_year(cube): + """ + Return the cube year as a string. + + Parameters + ---------- + cube: iris.cube.Cube + Data Cube + + Returns + ---------- + str: + The year as a string + + """ + year = cube.coord('year').points + return str(int(year)) + + +def get_season(cube): + """ + Return a climatological season time string. + + Parameters + ---------- + cube: iris.cube.Cube + Data Cube + + Returns + ---------- + str: + The climatological season as a string + + """ + season = cube.coord('clim_season').points + return season[0].upper() + + +def make_map_plots( + cfg, + metadata, + filename, +): + """ + Make a simple map plot for an individual model. + + Parameters + ---------- + cfg: dict + the opened global config dictionairy, passed by ESMValTool. + metadata: dict + The metadata dictionairy for a specific model. + filename: str + The preprocessed model file. + + """ + # Load cube and set up units + cube = iris.load_cube(filename) + cube = diagtools.bgc_units(cube, metadata['short_name']) + cube = agregate_by_season(cube) + + # Is this data is a multi-model dataset? + multi_model = metadata['dataset'].find('MultiModel') > -1 + + # Make a dict of cubes for each layer. + cubes = diagtools.make_cube_layer_dict(cube) + + # Load image format extention and threshold. + image_extention = diagtools.get_image_format(cfg) + threshold = float(cfg['threshold']) + + # Making plots for each layer + plot_types = ['Fractional cover', 'Ice Extent'] + plot_times = [0, -1] + for plot_type, plot_time in product(plot_types, plot_times): + for layer_index, (layer, cube_layer) in enumerate(cubes.items()): + layer = str(layer) + + if plot_type == 'Fractional cover': + cmap = 'Blues_r' + if plot_type == 'Ice Extent': + cmap = create_ice_cmap(threshold) + + cube = cube_layer[plot_time] + + # use cube to determine which hemisphere, season and year. + pole = get_pole(cube) + time_str = get_time_string(cube) + + # Make the polar map. + make_polar_map(cube, pole=pole, cmap=cmap) + + # Add title to plot + title = ' '.join([metadata['dataset'], plot_type, time_str]) + if layer: + title = ' '.join([ + title, '(', layer, + str(cube_layer.coords('depth')[0].units), ')' + ]) + plt.title(title) + + # Determine image filename: + suffix = '_'.join( + ['ortho_map', plot_type, time_str, + str(layer_index)]) + suffix = suffix.replace(' ', '') + image_extention + if multi_model: + path = diagtools.folder(cfg['plot_dir']) + path = path + os.path.basename(filename) + path = path.replace('.nc', suffix) + else: + path = diagtools.get_image_path( + cfg, + metadata, + suffix=suffix, + ) + + # Saving files: + if cfg['write_plots']: + logger.info('Saving plots to %s', path) + plt.savefig(path) + + plt.close() + + +def agregate_by_season(cube): + """ + Aggregate the cube into seasonal means. + + Note that it is not currently possible to do this in the preprocessor, + as the seasonal mean changes the cube units. + + Parameters + ---------- + cube: iris.cube.Cube + Data Cube + + Returns + ---------- + iris.cube.Cube: + Data Cube with the seasonal means + + """ + if not cube.coords('clim_season'): + iris.coord_categorisation.add_season(cube, 'time', name='clim_season') + if not cube.coords('season_year'): + iris.coord_categorisation.add_season_year( + cube, 'time', name='season_year') + return cube.aggregated_by(['clim_season', 'season_year'], + iris.analysis.MEAN) + + +def make_map_extent_plots( + cfg, + metadata, + filename, +): + """ + Make an extent map plot showing several times for an individual model. + + Parameters + ---------- + cfg: dict + the opened global config dictionairy, passed by ESMValTool. + metadata: dict + The metadata dictionairy for a specific model. + filename: str + The preprocessed model file. + + """ + # Load cube and set up units + cube = iris.load_cube(filename) + cube = diagtools.bgc_units(cube, metadata['short_name']) + cube = agregate_by_season(cube) + + # Is this data is a multi-model dataset? + multi_model = metadata['dataset'].find('MultiModel') > -1 + + # Make a dict of cubes for each layer. + cubes = diagtools.make_cube_layer_dict(cube) + + # Load image format extention + image_extention = diagtools.get_image_format(cfg) + + # Load threshold, pole and season + threshold = float(cfg['threshold']) + pole = get_pole(cube) + season = get_season(cube) + + # Start making figure + for layer_index, (layer, cube_layer) in enumerate(cubes.items()): + + fig = plt.figure() + fig.set_size_inches(7, 7) + + if pole == 'North': # North Hemisphere + projection = cartopy.crs.NorthPolarStereo() + ax1 = plt.subplot(111, projection=projection) + ax1.set_extent([-180, 180, 50, 90], cartopy.crs.PlateCarree()) + + if pole == 'South': # South Hemisphere + projection = cartopy.crs.SouthPolarStereo() + ax1 = plt.subplot(111, projection=projection) + ax1.set_extent([-180, 180, -90, -50], cartopy.crs.PlateCarree()) + try: + ax1.add_feature( + cartopy.feature.LAND, zorder=10, facecolor=[0.8, 0.8, 0.8]) + except ConnectionRefusedError: + logger.error('Cartopy was unable add coastlines due to a ' + 'connection error.') + + ax1.gridlines( + linewidth=0.5, color='black', zorder=20, alpha=0.5, linestyle='--') + + try: + plt.gca().coastlines() + except AttributeError: + logger.warning('make_polar_map: Not able to add coastlines') + + times = np.array(cube.coord('time').points.astype(float)) + plot_desc = {} + for time_itr, time in enumerate(times): + cube = cube_layer[time_itr] + line_width = 1 + color = plt.cm.jet(float(time_itr) / float(len(times))) + label = get_year(cube) + plot_desc[time] = {'label': label, + 'c': [color, ], + 'lw': [line_width, ], + 'ls': ['-', ]} + + layer = str(layer) + qplt.contour(cube, + [threshold, ], + colors=plot_desc[time]['c'], + linewidths=plot_desc[time]['lw'], + linestyles=plot_desc[time]['ls'], + rasterized=True) + + # Add legend + legend_size = len(plot_desc) + 1 + ncols = int(legend_size / 25) + 1 + ax1.set_position([ + ax1.get_position().x0, + ax1.get_position().y0, + ax1.get_position().width * (1. - 0.1 * ncols), + ax1.get_position().height + ]) + + fig.set_size_inches(7 + ncols * 1.2, 7) + + # Construct dummy plots. + for i in sorted(plot_desc): + plt.plot( + [], + [], + c=plot_desc[i]['c'][0], + lw=plot_desc[i]['lw'][0], + ls=plot_desc[i]['ls'][0], + label=plot_desc[i]['label'], + ) + + legd = ax1.legend( + loc='center left', + ncol=ncols, + prop={'size': 10}, + bbox_to_anchor=(1., 0.5)) + legd.draw_frame(False) + legd.get_frame().set_alpha(0.) + + # Add title to plot + title = ' '.join([ + metadata['dataset'], + ]) + if layer: + title = ' '.join([ + title, '(', layer, + str(cube_layer.coords('depth')[0].units), ')' + ]) + plt.title(title) + + # Determine image filename: + suffix = '_'.join(['ortho_map', pole, season, str(layer_index)]) + suffix = suffix.replace(' ', '') + image_extention + if multi_model: + path = diagtools.folder(cfg['plot_dir']) + path = path + os.path.basename(filename) + path = path.replace('.nc', suffix) + else: + path = diagtools.get_image_path( + cfg, + metadata, + suffix=suffix, + ) + + # Saving files: + if cfg['write_plots']: + logger.info('Saving plots to %s', path) + plt.savefig(path) + plt.close() + + +def main(cfg): + """ + Load the config file and metadata, then pass them the plot making tools. + + Parameters + ---------- + cfg: dict + the opened global config dictionairy, passed by ESMValTool. + + """ + cartopy.config['data_dir'] = cfg['auxiliary_data_dir'] + + for index, metadata_filename in enumerate(cfg['input_files']): + logger.info( + 'metadata filename:\t%s', + metadata_filename, + ) + + metadatas = diagtools.get_input_files(cfg, index=index) + for filename in sorted(metadatas): + + logger.info('-----------------') + logger.info( + 'model filenames:\t%s', + filename, + ) + ###### + # extent maps plots of individual models + make_map_extent_plots(cfg, metadatas[filename], filename) + + ###### + # maps plots of individual models + make_map_plots(cfg, metadatas[filename], filename) + + ###### + # time series plots o + make_ts_plots(cfg, metadatas[filename], filename) + + logger.info('Success') + + +if __name__ == '__main__': + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/ocean/diagnostic_timeseries.py b/esmvaltool/diag_scripts/ocean/diagnostic_timeseries.py index 67e38557f1..4817635a1c 100644 --- a/esmvaltool/diag_scripts/ocean/diagnostic_timeseries.py +++ b/esmvaltool/diag_scripts/ocean/diagnostic_timeseries.py @@ -1,7 +1,8 @@ """ -Diagnostic profile: +Time series diagnostics +======================= -Diagnostic to produce png images of the time development of a metric from +Diagnostic to produce figures of the time development of a field from cubes. These plost show time on the x-axis and cube value (ie temperature) on the y-axis. @@ -16,25 +17,33 @@ and metadata.yml files) has a time component, no depth component, and no latitude or longitude coordinates. -Some approproate preprocessors for a 3D+time field would be: +An approproate preprocessor for a 3D+time field would be:: -preprocessors: - prep_timeseries_1:# For Global Volume Averaged - average_volume: - coord1: longitude - coord2: latitude - coordz: depth - prep_timeseries_2: # For Global surface Averaged - extract_levels: - levels: [0., ] - scheme: linear_extrap - average_area: - coord1: longitude - coord2: latitude + preprocessors: + prep_timeseries_1:# For Global Volume Averaged + average_volume: + coord1: longitude + coord2: latitude + coordz: depth +An approproate preprocessor for a 3D+time field at the surface would be:: -This tool is part of the ocean diagnostic tools package in the ESMValTool. + prep_timeseries_2: # For Global surface Averaged + extract_levels: + levels: [0., ] + scheme: linear_extrap + average_area: + coord1: longitude + coord2: latitude + +An approproate preprocessor for a 2D+time field would be:: + prep_timeseries_2: # For Global surface Averaged + average_area: + coord1: longitude + coord2: latitude + +This tool is part of the ocean diagnostic tools package in the ESMValTool. Author: Lee de Mora (PML) ledm@pml.ac.uk @@ -44,10 +53,10 @@ import os import iris -import iris.quickplot as qplt import matplotlib.pyplot as plt +import numpy as np -import diagnostic_tools as diagtools +from esmvaltool.diag_scripts.ocean import diagnostic_tools as diagtools from esmvaltool.diag_scripts.shared import run_diagnostic # This part sends debug statements to stdout @@ -56,15 +65,113 @@ def timeplot(cube, **kwargs): """ - Make a time series plot + Create a time series plot from the cube. + + Note that this function simple does the plotting, it does not save the + image or do any of the complex work. This function also takes and of the + key word arguments accepted by the matplotlib.pyplot.plot function. + These arguments are typically, color, linewidth, linestyle, etc... + + If there's only one datapoint in the cube, it is plotted as a + horizontal line. + + Parameters + ---------- + cube: iris.cube.Cube + Input cube + + """ + cubedata = np.ma.array(cube.data) + if len(cubedata.compressed()) == 1: + plt.axhline(cubedata.compressed(), **kwargs) + return + + times = diagtools.cube_time_to_float(cube) + plt.plot(times, cubedata, **kwargs) + - Needed because iris version 1.13 fails due to the time axis. +def moving_average(cube, window): """ - if iris.__version__ > '2.0': - qplt.plot(cube, kwargs) - else: - times = diagtools.timecoord_to_float(cube.coord('time')) - plt.plot(times, cube.data, **kwargs) + Calculate a moving average. + + The window is a string which is a number and a measuremet of time. + For instance, the following are acceptable window strings: + + * ``5 days`` + * ``12 years`` + * ``1 month`` + * ``5 yr`` + + Also note the the value used is the total width of the window. + For instance, if the window provided was '10 years', the the moving + average returned would be the average of all values within 5 years + of the central value. + + In the case of edge conditions, at the start an end of the data, they + only include the average of the data available. Ie the first value + in the moving average of a ``10 year`` window will only include the average + of the five subsequent years. + + Parameters + ---------- + cube: iris.cube.Cube + Input cube + window: str + A description of the window to use for the + + Returns + ---------- + iris.cube.Cube: + A cube with the movinage average set as the data points. + + """ + window = window.split() + window_len = int(window[0]) / 2. + win_units = str(window[1]) + + if win_units not in [ + 'days', 'day', 'dy', 'months', 'month', 'mn', 'years', 'yrs', + 'year', 'yr' + ]: + raise ValueError("Moving average window units not recognised: " + + "{}".format(win_units)) + + times = cube.coord('time').units.num2date(cube.coord('time').points) + + datetime = diagtools.guess_calendar_datetime(cube) + + output = [] + + times = np.array([ + datetime(time_itr.year, time_itr.month, time_itr.day, time_itr.hour, + time_itr.minute) for time_itr in times + ]) + + for time_itr in times: + if win_units in ['years', 'yrs', 'year', 'yr']: + tmin = datetime(time_itr.year - window_len, time_itr.month, + time_itr.day, time_itr.hour, time_itr.minute) + tmax = datetime(time_itr.year + window_len, time_itr.month, + time_itr.day, time_itr.hour, time_itr.minute) + + if win_units in ['months', 'month', 'mn']: + tmin = datetime(time_itr.year, time_itr.month - window_len, + time_itr.day, time_itr.hour, time_itr.minute) + tmax = datetime(time_itr.year, time_itr.month + window_len, + time_itr.day, time_itr.hour, time_itr.minute) + + if win_units in ['days', 'day', 'dy']: + tmin = datetime(time_itr.year, time_itr.month, + time_itr.day - window_len, time_itr.hour, + time_itr.minute) + tmax = datetime(time_itr.year, time_itr.month, + time_itr.day + window_len, time_itr.hour, + time_itr.minute) + + arr = np.ma.masked_where((times < tmin) + (times > tmax), cube.data) + output.append(arr.mean()) + cube.data = np.array(output) + return cube def make_time_series_plots( @@ -73,11 +180,21 @@ def make_time_series_plots( filename, ): """ - Make a simple plot for an indivudual model. + Make a simple time series plot for an indivudual model 1D cube. + + This tool loads the cube from the file, checks that the units are + sensible BGC units, checks for layers, adjusts the titles accordingly, + determines the ultimate file name and format, then saves the image. + + Parameters + ---------- + cfg: dict + the opened global config dictionairy, passed by ESMValTool. + metadata: dict + The metadata dictionairy for a specific model. + filename: str + The preprocessed model file. - The cfg is the opened global config, - metadata is the metadata dictionairy - filename is the preprocessing model file. """ # Load cube and set up units cube = iris.load_cube(filename) @@ -89,9 +206,14 @@ def make_time_series_plots( # Make a dict of cubes for each layer. cubes = diagtools.make_cube_layer_dict(cube) + # Load image format extention + image_extention = diagtools.get_image_format(cfg) + # Making plots for each layer for layer_index, (layer, cube_layer) in enumerate(cubes.items()): layer = str(layer) + if 'moving_average' in cfg: + cube_layer = moving_average(cube_layer, cfg['moving_average']) if multi_model: timeplot(cube_layer, label=metadata['dataset'], ls=':') @@ -100,23 +222,24 @@ def make_time_series_plots( # Add title, legend to plots title = ' '.join([metadata['dataset'], metadata['long_name']]) - if layer: - title = ' '.join( - [title, '(', layer, - str(cube_layer.coords('depth')[0].units), ')']) + if layer != '': + if cube_layer.coords('depth'): + z_units = cube_layer.coord('depth').units + else: + z_units = '' + title = ' '.join([title, '(', layer, str(z_units), ')']) plt.title(title) plt.legend(loc='best') + plt.ylabel(str(cube_layer.units)) - # Determine png filename: + # Determine image filename: if multi_model: - # path = diagtools.folder( - # cfg['plot_dir']) + os.path.basename(filename).replace( - # '.nc', '_timeseries_' + str(l) + '.png') path = diagtools.get_image_path( cfg, metadata, prefix='MultiModel', - suffix='_'.join(['timeseries', str(layer) + '.png']), + suffix='_'.join(['timeseries', + str(layer) + image_extention]), metadata_id_list=[ 'field', 'short_name', 'preprocessor', 'diagnostic', 'start_year', 'end_year' @@ -127,7 +250,7 @@ def make_time_series_plots( path = diagtools.get_image_path( cfg, metadata, - suffix='timeseries_' + str(layer_index) + '.png', + suffix='timeseries_' + str(layer_index) + image_extention, ) # Saving files: @@ -144,12 +267,21 @@ def multi_model_time_series( metadata, ): """ - Make a time series plot showing several models. + Make a time series plot showing several preprocesssed datasets. + + This tool loads several cubes from the files, checks that the units are + sensible BGC units, checks for layers, adjusts the titles accordingly, + determines the ultimate file name and format, then saves the image. + + Parameters + ---------- + cfg: dict + the opened global config dictionairy, passed by ESMValTool. + metadata: dict + The metadata dictionairy for a specific model. - This function makes a simple plot for an indivudual model. - The cfg is the opened global config, - metadata is the metadata dictionairy. """ + #### # Load the data for each layer as a separate cube model_cubes = {} @@ -163,6 +295,9 @@ def multi_model_time_series( for layer in cubes: layers[layer] = True + # Load image format extention + image_extention = diagtools.get_image_format(cfg) + # Make a plot for each layer for layer in layers: @@ -173,11 +308,21 @@ def multi_model_time_series( # Plot each file in the group for index, filename in enumerate(sorted(metadata)): - color = cmap(index / (len(metadata) - 1.)) + if len(metadata) > 1: + color = cmap(index / (len(metadata) - 1.)) + else: + color = 'blue' + + # Take a moving average, if needed. + if 'moving_average' in cfg: + cube = moving_average(model_cubes[filename][layer], + cfg['moving_average']) + else: + cube = model_cubes[filename][layer] if 'MultiModel' in metadata[filename]['dataset']: timeplot( - model_cubes[filename][layer], + cube, c=color, # label=metadata[filename]['dataset'], ls=':', @@ -191,7 +336,7 @@ def multi_model_time_series( } else: timeplot( - model_cubes[filename][layer], + cube, c=color, # label=metadata[filename]['dataset']) ls='-', @@ -206,13 +351,16 @@ def multi_model_time_series( title = metadata[filename]['long_name'] if layer != '': - z_units = model_cubes[filename][layer].coords('depth')[0].units - + if model_cubes[filename][layer].coords('depth'): + z_units = model_cubes[filename][layer].coord('depth').units + else: + z_units = '' # Add title, legend to plots if layer: title = ' '.join([title, '(', str(layer), str(z_units), ')']) plt.title(title) plt.legend(loc='best') + plt.ylabel(str(model_cubes[filename][layer].units)) # Saving files: if cfg['write_plots']: @@ -220,7 +368,8 @@ def multi_model_time_series( cfg, metadata[filename], prefix='MultipleModels_', - suffix='_'.join(['timeseries', str(layer) + '.png']), + suffix='_'.join(['timeseries', + str(layer) + image_extention]), metadata_id_list=[ 'field', 'short_name', 'preprocessor', 'diagnostic', 'start_year', 'end_year' @@ -239,15 +388,17 @@ def multi_model_time_series( def main(cfg): """ - Load the config file, and send it to the plot maker. + Load the config file and some metadata, then pass them the plot making + tools. + + Parameters + ---------- + cfg: dict + the opened global config dictionairy, passed by ESMValTool. - The cfg is the opened global config. """ for index, metadata_filename in enumerate(cfg['input_files']): - logger.info( - 'metadata filename:\t%s', - metadata_filename - ) + logger.info('metadata filename:\t%s', metadata_filename) metadatas = diagtools.get_input_files(cfg, index=index) @@ -258,7 +409,7 @@ def main(cfg): metadatas, ) - for filename in sorted(metadatas.keys()): + for filename in sorted(metadatas): logger.info('-----------------') logger.info( diff --git a/esmvaltool/diag_scripts/ocean/diagnostic_tools.py b/esmvaltool/diag_scripts/ocean/diagnostic_tools.py index 1998050fd2..03b2dbad7f 100644 --- a/esmvaltool/diag_scripts/ocean/diagnostic_tools.py +++ b/esmvaltool/diag_scripts/ocean/diagnostic_tools.py @@ -1,5 +1,6 @@ """ -Diagnostic tools: +Diagnostic tools +================ This module contains several python tools used elsewhere by the ocean diagnostics package. @@ -12,15 +13,38 @@ import logging import os import sys -import yaml +import iris +import numpy as np +import cftime import matplotlib.pyplot as plt +import yaml + +from esmvaltool.diag_scripts.shared._base import _get_input_data_files # This part sends debug statements to stdout logger = logging.getLogger(os.path.basename(__file__)) logging.getLogger().addHandler(logging.StreamHandler(sys.stdout)) +def get_obs_projects(): + """ + Return a list of strings with the names of observations projects. + + Please keep this list up to date, or replace it with something more + sensible. + + Returns + --------- + list + Returns a list of strings of the various types of observational data. + """ + obs_projects = [ + 'obs4mips', + ] + return obs_projects + + def folder(name): """ Make a directory out of a string or list or strings. @@ -28,6 +52,16 @@ def folder(name): Take a string or a list of strings, convert it to a directory style, then make the folder and the string. Returns folder string and final character is always os.sep. ('/') + + Arguments + --------- + name: list or string + A list of nested directories, or a path to a directory. + + Returns + --------- + str + Returns a string of a full (potentially new) path of the directory. """ sep = os.sep if isinstance(name, list): @@ -40,16 +74,32 @@ def folder(name): return name -def get_input_files(cfg, index=0): +def get_input_files(cfg, index=''): """ - Load input configuration file. + Load input configuration file as a Dictionairy. Get a dictionary with input files from the metadata.yml files. + This is a wrappper for the _get_input_data_files function from + diag_scripts.shared._base. + + Arguments + --------- + cfg: dict + the opened global config dictionairy, passed by ESMValTool. + index: int + the index of the file in the cfg file. + + Returns + --------- + dict + A dictionairy of the input files and their linked details. """ - metadata_file = cfg['input_files'][index] - with open(metadata_file) as input_file: - metadata = yaml.safe_load(input_file) - return metadata + if isinstance(index, int): + metadata_file = cfg['input_files'][index] + with open(metadata_file) as input_file: + metadata = yaml.safe_load(input_file) + return metadata + return _get_input_data_files(cfg) def bgc_units(cube, name): @@ -58,22 +108,42 @@ def bgc_units(cube, name): This is because many CMIP standard units are not the standard units used by the BGC community (ie, Celsius is prefered over Kelvin, etc.) + + Parameters + ---------- + cube: iris.cube.Cube + the opened dataset as a cube. + name: str + The string describing the data field. + + Returns + ------- + iris.cube.Cube + the cube with the new units. """ new_units = '' - if name in ['tos', 'thetao']: new_units = 'celsius' - if name in [ - 'no3', - ]: + if name in ['no3', ]: new_units = 'mmol m-3' - if name in [ - 'chl', - ]: + if name in ['chl', ]: new_units = 'mg m-3' + if name in ['intpp', ]: + new_units = 'mol m-2 d-1' + + if name in ['fgco2', ]: + new_units = 'g m-2 d-1' + + if name in ['spco2', 'dpco2', ]: + new_units = 'uatm' + + if name in ['mfo', 'amoc', 'msftmyz']: + # sverdrup are 1000000 m3.s-1, but mfo is kg s-1. + new_units = 'Tg s-1' + if new_units != '': logger.info(' '.join( ["Changing units from", @@ -83,93 +153,399 @@ def bgc_units(cube, name): return cube -def timecoord_to_float(times): +def match_model_to_key( + model_type, + cfg_dict, + input_files_dict, +): + """ + Match up model or observations dataset dictionairies from config file. + + This function checks that the control_model, exper_model and + observational_dataset dictionairies from the recipe are matched with the + input file dictionairy in the cfg metadata. + + Arguments + --------- + model_type: str + The string model_type to match (only used in debugging). + cfg_dict: dict + the config dictionairy item for this model type, parsed directly from + the diagnostics/ scripts, part of the recipe. + input_files_dict: dict + The input file dictionairy, loaded directly from the get_input_files() + function, in diagnostics_tools.py. + + Returns + --------- + dict + A dictionairy of the input files and their linked details. + """ + for input_file, intput_dict in input_files_dict.items(): + intersect_keys = intput_dict.keys() & cfg_dict.keys() + match = True + for key in intersect_keys: + if intput_dict[key] == cfg_dict[key]: + continue + match = False + if match: + return input_file + logger.warning("Unable to match model: %s", model_type) + return '' + + +def cube_time_to_float(cube): """ Convert from time coordinate into decimal time. Takes an iris time coordinate and returns a list of floats. + Parameters + ---------- + cube: iris.cube.Cube + the opened dataset as a cube. + + Returns + ------- + list + List of floats showing the time coordinate in decimal time. + """ + times = cube.coord('time') + datetime = guess_calendar_datetime(cube) + dtimes = times.units.num2date(times.points) floattimes = [] for dtime in dtimes: # TODO: it would be better to have a calendar dependent value # for daysperyear, as this is not accurate for 360 day calendars. daysperyear = 365.25 - floattime = dtime.year + dtime.dayofyr / daysperyear + dtime.hour / ( + + try: + dayofyr = dtime.dayofyr + except AttributeError: + time = datetime(dtime.year, dtime.month, dtime.day) + time0 = datetime(dtime.year, 1, 1, 0, 0) + dayofyr = (time - time0).days + + floattime = dtime.year + dayofyr / daysperyear + dtime.hour / ( 24. * daysperyear) + if dtime.hour: + floattime += dtime.hour / (24. * daysperyear) if dtime.minute: floattime += dtime.minute / (24. * 60. * daysperyear) floattimes.append(floattime) return floattimes -def add_legend_outside_right(plot_details, ax1, column_width=0.1): +def guess_calendar_datetime(cube): + """ + Guess the cftime.datetime form to create datetimes. + + Parameters + ---------- + cube: iris.cube.Cube + the opened dataset as a cube. + + Returns + ------- + cftime.datetime + A datetime creator function from cftime, based on the cube's calendar. + """ + time_coord = cube.coord('time') + + if time_coord.units.calendar in ['360_day', ]: + datetime = cftime.Datetime360Day + elif time_coord.units.calendar in ['365_day', 'noleap']: + datetime = cftime.DatetimeNoLeap + elif time_coord.units.calendar in ['julian', ]: + datetime = cftime.DatetimeJulian + elif time_coord.units.calendar in ['gregorian', ]: + datetime = cftime.DatetimeGregorian + elif time_coord.units.calendar in ['proleptic_gregorian', ]: + datetime = cftime.DatetimeProlepticGregorian + else: + logger.warning('Calendar set to Gregorian, instead of %s', + time_coord.units.calendar) + datetime = cftime.DatetimeGregorian + return datetime + + +def get_decade(coord, value): + """ + Determine the decade. + + Called by iris.coord_categorisation.add_categorised_coord. + """ + date = coord.units.num2date(value) + return date.year - date.year % 10 + + +def decadal_average(cube): + """ + Calculate the decadal_average. + + Parameters + ---------- + cube: iris.cube.Cube + The input cube + + Returns + ------- + iris.cube + """ + iris.coord_categorisation.add_categorised_coord(cube, 'decade', 'time', + get_decade) + return cube.aggregated_by('decade', iris.analysis.MEAN) + + +def load_thresholds(cfg, metadata): + """ + Load the thresholds for contour plots from the config files. + + Parameters + ---------- + cfg: dict + the opened global config dictionairy, passed by ESMValTool. + metadata: dict + the metadata dictionairy + + Returns + ------- + list: + List of thresholds + """ + thresholds = set() + + if 'threshold' in cfg: + thresholds.update(float(cfg['threshold'])) + + if 'threshold' in metadata: + thresholds.update(float(metadata['threshold'])) + + if 'thresholds' in cfg: + thresholds.update([float(thres) for thres in cfg['thresholds']]) + + if 'thresholds' in metadata: + thresholds.update([float(thres) for thres in metadata['thresholds']]) + + return sorted(list(thresholds)) + + +def get_colour_from_cmap(number, total, cmap='jet'): + """ + Get a colour `number` of `total` from a cmap. + + This function is used when several lines are created evenly along a + colour map. + + Parameters + ---------- + number: int, float + The + total: int + + cmap: string, plt.cm + A colour map, either by name (string) or from matplotlib + """ + if isinstance(cmap, str): + cmap = plt.get_cmap(cmap) + + if number > total: + raise ValueError('The cannot be larger than the total length ' + 'of the list ie: {} > {}'.format(number, total)) + + if total > 1: + colour = cmap(float(number) / float(total - 1.)) + else: + colour = cmap(0.) + return colour + + +def add_legend_outside_right(plot_details, ax1, column_width=0.1, loc='right'): """ Add a legend outside the plot, to the right. plot_details is a 2 level dict, where the first level is some key (which is hidden) and the 2nd level contains the keys: - 'c': color - 'lw': line width - 'label': label for the legend. + 'c': color + 'lw': line width + 'label': label for the legend. ax1 is the axis where the plot was drawn. + + Parameters + ---------- + plot_details: dict + A dictionary of the plot details (color, linestyle, linewidth, label) + ax1: matplotlib.pyplot.axes + The pyplot axes to add the + column_width: float + The width of the legend column. This is used to adjust for longer words + in the legends + loc: string + Location of the legend. Options are "right" and "below". + + Returns + ------- + cftime.datetime + A datetime creator function from cftime, based on the cube's calendar. + """ - ##### + # #### # Create dummy axes: - legend_size = len(plot_details.keys()) + 1 - ncols = int(legend_size / 25) + 1 + legend_size = len(plot_details) + 1 box = ax1.get_position() - ax1.set_position( - [box.x0, box.y0, box.width * (1. - column_width * ncols), box.height]) + if loc.lower() == 'right': + nrows = 25 + ncols = int(legend_size / nrows) + 1 + ax1.set_position([ + box.x0, box.y0, box.width * (1. - column_width * ncols), box.height + ]) + + if loc.lower() == 'below': + ncols = 4 + nrows = int(legend_size / ncols) + 1 + ax1.set_position([ + box.x0, box.y0 + (nrows * column_width), box.width, + box.height - (nrows * column_width) + ]) # Add emply plots to dummy axis. - for index in sorted(plot_details.keys()): - - plt.plot( - [], [], - c=plot_details[index]['c'], - lw=plot_details[index]['lw'], - ls=plot_details[index]['ls'], - label=plot_details[index]['label']) - - legd = ax1.legend( - loc='center left', - ncol=ncols, - prop={'size': 10}, - bbox_to_anchor=(1., 0.5)) + for index in sorted(plot_details): + colour = plot_details[index]['c'] + + linewidth = plot_details[index].get('lw', 1) + + linestyle = plot_details[index].get('ls', '-') + + label = plot_details[index].get('label', str(index)) + + plt.plot([], [], c=colour, lw=linewidth, ls=linestyle, label=label) + + if loc.lower() == 'right': + legd = ax1.legend( + loc='center left', + ncol=ncols, + prop={'size': 10}, + bbox_to_anchor=(1., 0.5)) + if loc.lower() == 'below': + legd = ax1.legend( + loc='upper center', + ncol=ncols, + prop={'size': 10}, + bbox_to_anchor=(0.5, -2. * column_width)) legd.draw_frame(False) legd.get_frame().set_alpha(0.) -def get_image_path(cfg, - metadata, - prefix='diag', - suffix='image', - metadata_id_list='default',): +def get_image_format(cfg, default='png'): + """ + Load the image format from the global config file. + + Current tested options are svg, png. + + The cfg is the opened global config. + The default format is used if no specific format is requested. + The default is set in the user config.yml + Individual diagnostics can set their own format which will + supercede the main config.yml. + + Arguments + --------- + cfg: dict + the opened global config dictionairy, passed by ESMValTool. + + Returns + --------- + str + The image format extention. + """ + image_extention = default + + # Load format from config.yml and set it as default + if 'output_file_type' in cfg: + image_extention = cfg['output_file_type'] + + # Load format from config.yml and set it as default + if 'image_format' in cfg: + image_extention = cfg['image_format'] + + matplotlib_image_formats = plt.gcf().canvas.get_supported_filetypes() + if image_extention not in matplotlib_image_formats: + logger.warning(' '.join([ + 'Image format ', image_extention, 'not in matplot:', + ', '.join(matplotlib_image_formats) + ])) + + image_extention = '.' + image_extention + image_extention = image_extention.replace('..', '.') + return image_extention + + +def get_image_path( + cfg, + metadata, + prefix='diag', + suffix='image', + metadata_id_list='default', +): """ Produce a path to the final location of the image. The cfg is the opened global config, metadata is the metadata dictionairy (for the individual dataset file) + + Arguments + --------- + cfg: dict + the opened global config dictionairy, passed by ESMValTool. + metadata: dict + The metadata dictionairy for a specific model. + prefix: str + A string to prepend to the image basename. + suffix: str + A string to append to the image basename + metadata_id_list: list + A list of strings to add to the file path. It loads these from the cfg. + + Returns + --------- + str + The ultimate image path + """ ##### if metadata_id_list == 'default': - metadata_id_list = ['project', 'dataset', 'mip', 'exp', 'ensemble', - 'field', 'short_name', 'preprocessor', - 'diagnostic', 'start_year', 'end_year', ] + metadata_id_list = [ + 'project', + 'dataset', + 'mip', + 'exp', + 'ensemble', + 'field', + 'short_name', + 'preprocessor', + 'diagnostic', + 'start_year', + 'end_year', + ] path = folder(cfg['plot_dir']) if prefix: path += prefix + '_' - path += '_'.join([str(metadata[b]) for b in metadata_id_list]) + # Check that the keys are in the dict. + intersection = [va for va in metadata_id_list if va in metadata] + path += '_'.join([str(metadata[b]) for b in intersection]) if suffix: path += '_' + suffix - image_extention = '.png' + image_extention = get_image_format(cfg) + if path.find(image_extention) == -1: path += image_extention + path = path.replace(' ', '_') + logger.info("Image path will be: %s", path) return path @@ -179,30 +555,130 @@ def make_cube_layer_dict(cube): Take a cube and return a dictionairy layer:cube Each item in the dict is a layer with a separate cube for each layer. - ie: - cubes[depth] = cube from specific layer + ie: cubes[depth] = cube from specific layer + + Cubes with no depth component are returned as dict, where the dict key + is a blank empty string, and the value is the cube. + + Parameters + ---------- + cube: iris.cube.Cube + the opened dataset as a cube. - Cubes with no depth component are returns as: - cubes[''] = cube with no depth component. + Returns + --------- + dict + A dictionairy of layer name : layer cube. """ ##### # Check layering: - depth = cube.coords('depth') - cubes = {} + coords = cube.coords() + layers = [] + for coord in coords: + if coord.standard_name in ['depth', 'region']: + layers.append(coord) - if depth == []: + cubes = {} + if layers == []: cubes[''] = cube - else: - # iris stores coords as a list with one entry: - depth = depth[0] - if len(depth.points) in [ - 1, - ]: - cubes[''] = cube - else: - coord_dim = cube.coord_dims('depth')[0] - for layer_index, layer in enumerate(depth.points): - slices = [slice(None) for index in cube.shape] - slices[coord_dim] = layer_index - cubes[layer] = cube[tuple(slices)] + return cubes + + # if len(layers) > 1: + # # This field has a strange number of layer dimensions. + # # depth and regions? + # print(cube) + # raise ValueError('This cube has both `depth` & `region` coordinates:' + # ' %s', layers) + + # iris stores coords as a list with one entry: + layer_dim = layers[0] + if len(layer_dim.points) in [ + 1, + ]: + cubes[''] = cube + return cubes + + if layer_dim.standard_name == 'depth': + coord_dim = cube.coord_dims('depth')[0] + for layer_index, layer in enumerate(layer_dim.points): + slices = [slice(None) for index in cube.shape] + slices[coord_dim] = layer_index + cubes[layer] = cube[tuple(slices)] + + if layer_dim.standard_name == 'region': + coord_dim = cube.coord_dims('region')[0] + for layer_index, layer in enumerate(layer_dim.points): + slices = [slice(None) for index in cube.shape] + slices[coord_dim] = layer_index + layer = layer.replace('_', ' ').title() + cubes[layer] = cube[tuple(slices)] return cubes + + +def get_cube_range(cubes): + """ + Determinue the minimum and maximum values of a list of cubes. + + Parameters + ---------- + cubes: list of iris.cube.Cube + A list of cubes. + + Returns + ---------- + list: + A list of two values: the overall minumum and maximum values of the + list of cubes. + + """ + mins = [] + maxs = [] + for cube in cubes: + mins.append(cube.data.min()) + maxs.append(cube.data.max()) + return [np.min(mins), np.max(maxs), ] + + +def get_cube_range_diff(cubes): + """ + Determinue the largest deviation from zero in an list of cubes. + + Parameters + ---------- + cubes: list of iris.cube.Cube + A list of cubes. + + Returns + ---------- + list: + A list of two values: the maximum deviation from zero and its opposite. + """ + ranges = [] + for cube in cubes: + ranges.append(np.abs(cube.data.min())) + ranges.append(np.abs(cube.data.max())) + return [-1. * np.max(ranges), np.max(ranges)] + + +def get_array_range(arrays): + """ + Determinue the minimum and maximum values of a list of arrays.. + + Parameters + ---------- + arrays: list of numpy.array + A list of numpy.array. + + Returns + ---------- + list: + A list of two values, the overall minumum and maximum values of the + list of cubes. + """ + mins = [] + maxs = [] + for arr in arrays: + mins.append(arr.min()) + maxs.append(arr.max()) + logger.info('get_array_range: %s, %s', np.min(mins), np.max(maxs)) + return [np.min(mins), np.max(maxs), ] diff --git a/esmvaltool/diag_scripts/ocean/diagnostic_transects.py b/esmvaltool/diag_scripts/ocean/diagnostic_transects.py index 1fe6b5efda..b217fcd6dc 100644 --- a/esmvaltool/diag_scripts/ocean/diagnostic_transects.py +++ b/esmvaltool/diag_scripts/ocean/diagnostic_transects.py @@ -1,37 +1,41 @@ """ -Diagnostic transect: +Transects diagnostics +===================== -Diagnostic to produce png images of a transect. -These plost show either latitude or longitude against depth, and the cube value -is used as the colour scale. +Diagnostic to produce images of a transect. These plost show either latitude or +longitude against depth, and the cube value is used as the colour scale. Note that this diagnostic assumes that the preprocessors do the bulk of the hard work, and that the cube received by this diagnostic (via the settings.yml and metadata.yml files) has no time component, and one of the latitude or longitude coordinates has been reduced to a single value. -An approproate preprocessor for a 3D+time field would be: -preprocessors: - prep_transect: - time_average: - extract_slice: # Atlantic Meridional Transect - latitude: [-50.,50.] - longitude: 332. +An approproate preprocessor for a 3D+time field would be:: + + preprocessors: + prep_transect: + time_average: + extract_slice: # Atlantic Meridional Transect + latitude: [-50.,50.] + longitude: 332. This tool is part of the ocean diagnostic tools package in the ESMValTool. Author: Lee de Mora (PML) ledm@pml.ac.uk + """ import logging import os import sys +from itertools import product import iris import iris.quickplot as qplt import matplotlib.pyplot as plt +import numpy as np -import diagnostic_tools as diagtools +from esmvaltool.diag_scripts.ocean import diagnostic_tools as diagtools from esmvaltool.diag_scripts.shared import run_diagnostic # This part sends debug statements to stdout @@ -39,18 +43,55 @@ logging.getLogger().addHandler(logging.StreamHandler(sys.stdout)) -def determine_transect_str(cube): +def titlify(title): + """ + Check whether a title is too long then add it to current figure. + + Parameters + ---------- + title: str + The title for the figure. + """ + cutoff = 40 + if len(title) > cutoff: + # Find good mid point + titles = title.split(' ') + length = 0 + for itr, word in enumerate(titles): + length += len(word) + if length > cutoff: + titles[itr] += '\n' + length = 0. + title = ' '.join(titles) + plt.title(title) + + +def determine_transect_str(cube, region=''): """ - Determine the Transect String + Determine the Transect String. Takes a guess at a string to describe the transect. + + Parameters + ---------- + cube: iris.cube.Cube + Input cube to use to determine the transect name. + """ + if region: + return region + options = ['latitude', 'longitude'] + cube_dims = [c.standard_name for c in cube.coords()] for option in options: + if option not in cube_dims: + continue coord = cube.coord(option) + if len(coord.points) > 1: continue value = coord.points.mean() + value = round(value, 2) if option == 'latitude': return str(value) + ' N' if option == 'longitude': @@ -60,6 +101,124 @@ def determine_transect_str(cube): return '' +def make_depth_safe(cube): + """ + Make the depth coordinate safe. + + If the depth coordinate has a value of zero or above, we replace the + zero with the average point of the first depth layer. + + Parameters + ---------- + cube: iris.cube.Cube + Input cube to make the depth coordinate safe + + Returns + ---------- + iris.cube.Cube: + Output cube with a safe depth coordinate + + """ + depth = cube.coord('depth') + + # it's fine + if depth.points.min() * depth.points.max() > 0.: + return cube + + if depth.attributes['positive'] != 'down': + raise Exception('The depth field is not set up correctly') + + depth_points = [] + bad_points = depth.points <= 0. + for itr, point in enumerate(depth.points): + if bad_points[itr]: + depth_points.append(depth.bounds[itr, :].mean()) + else: + depth_points.append(point) + + cube.coord('depth').points = depth_points + return cube + + +def make_cube_region_dict(cube): + """ + Take a cube and return a dictionairy region: cube. + + Each item in the dict is a layer with a separate cube for each layer. + ie: cubes[region] = cube from specific region + + Cubes with no region component are returns as: + cubes[''] = cube with no region component. + + This is based on the method diagnostics_tools.make_cube_layer_dict, + however, it wouldn't make sense to look for depth layers here. + + Parameters + ---------- + cube: iris.cube.Cube + the opened dataset as a cube. + + Returns + --------- + dict + A dictionairy of layer name : layer cube. + """ + ##### + # Check layering: + coords = cube.coords() + layers = [] + for coord in coords: + if coord.standard_name in ['region', ]: + layers.append(coord) + + cubes = {} + if layers == []: + cubes[''] = cube + return cubes + + # iris stores coords as a list with one entry: + layer_dim = layers[0] + if len(layer_dim.points) in [1, ]: + cubes[''] = cube + return cubes + + if layer_dim.standard_name == 'region': + coord_dim = cube.coord_dims('region')[0] + for layer_index, layer in enumerate(layer_dim.points): + slices = [slice(None) for index in cube.shape] + slices[coord_dim] = layer_index + layer = layer.replace('_', ' ').title() + cubes[layer] = cube[tuple(slices)] + return cubes + + +def determine_set_y_logscale(cfg, metadata): + """ + Determine whether to use a log scale y axis. + + Parameters + ---------- + cfg: dict + the opened global config dictionairy, passed by ESMValTool. + metadata: dict + The metadata dictionairy for a specific model. + + Returns + ---------- + bool: + Boolean to flag whether to plot as a log scale. + """ + set_y_logscale = True + + if 'set_y_logscale' in cfg: + set_y_logscale = cfg['set_y_logscale'] + + if 'set_y_logscale' in metadata: + set_y_logscale = metadata['set_y_logscale'] + + return set_y_logscale + + def make_transects_plots( cfg, metadata, @@ -68,9 +227,19 @@ def make_transects_plots( """ Make a simple plot of the transect for an indivudual model. - The cfg is the opened global config, - metadata is the metadata dictionairy - filename is the preprocessing model file. + This tool loads the cube from the file, checks that the units are + sensible BGC units, checks for layers, adjusts the titles accordingly, + determines the ultimate file name and format, then saves the image. + + Parameters + ---------- + cfg: dict + the opened global config dictionairy, passed by ESMValTool. + metadata: dict + The metadata dictionairy for a specific model. + filename: str + The preprocessed model file. + """ # Load cube and set up units cube = iris.load_cube(filename) @@ -79,42 +248,305 @@ def make_transects_plots( # Is this data is a multi-model dataset? multi_model = metadata['dataset'].find('MultiModel') > -1 - # Make a dict of cubes for each layer. + cube = make_depth_safe(cube) + cubes = make_cube_region_dict(cube) - qplt.contourf(cube, 25) - plt.axes().set_yscale('log') + # Determine y log scale. + set_y_logscale = determine_set_y_logscale(cfg, metadata) - # Add title to plot - title = ' '.join( - [metadata['dataset'], metadata['long_name'], - determine_transect_str(cube)]) - plt.title(title) + for region, cube in cubes.items(): + # Make a dict of cubes for each layer. + qplt.contourf(cube, 15, linewidth=0, rasterized=True) - # Determine png filename: - if multi_model: - path = diagtools.folder( - cfg['plot_dir']) + os.path.basename(filename).replace( - '.nc', '_transect.png') - else: - path = diagtools.get_image_path( - cfg, - metadata, - suffix='transect.png', - ) + if set_y_logscale: + plt.axes().set_yscale('log') + + if region: + region_title = region + else: + region_title = determine_transect_str(cube, region) + + # Add title to plot + title = ' '.join( + [metadata['dataset'], metadata['long_name'], region_title]) + titlify(title) + + # Load image format extention + image_extention = diagtools.get_image_format(cfg) + + # Determine image filename: + if multi_model: + path = diagtools.folder( + cfg['plot_dir']) + os.path.basename(filename).replace( + '.nc', region + '_transect' + image_extention) + else: + path = diagtools.get_image_path( + cfg, + metadata, + suffix=region + 'transect' + image_extention, + ) + + # Saving files: + if cfg['write_plots']: + logger.info('Saving plots to %s', path) + plt.savefig(path) + + plt.close() + + +def add_sea_floor(cube): + """ + Add a simple sea floor line from the cube mask. + + Parameters + ---------- + cube: iris.cube.Cube + Input cube to use to produce the sea floor. + + """ + land_cube = cube.copy() + land_cube.data = np.ma.array(land_cube.data) + mask = 1. * land_cube.data.mask + if mask.shape == (): + mask = np.zeros_like(land_cube.data) + land_cube.data = np.ma.masked_where(mask == 0, mask) + land_cube.data.mask = mask + qplt.contour(land_cube, 2, cmap='Greys_r', rasterized=True) + + +def make_transect_contours( + cfg, + metadata, + filename, +): + """ + Make a contour plot of the transect for an indivudual model. + + This tool loads the cube from the file, checks that the units are + sensible BGC units, checks for layers, adjusts the titles accordingly, + determines the ultimate file name and format, then saves the image. + + Parameters + ---------- + cfg: dict + the opened global config dictionairy, passed by ESMValTool. + metadata: dict + The metadata dictionairy for a specific model. + filename: str + The preprocessed model file. + + """ + # Load cube and set up units + cube = iris.load_cube(filename) + cube = diagtools.bgc_units(cube, metadata['short_name']) + cube = make_depth_safe(cube) + + # Load threshold/thresholds. + plot_details = {} + colours = [] + thresholds = diagtools.load_thresholds(cfg, metadata) + linewidths = [1 for thres in thresholds] + linestyles = ['-' for thres in thresholds] + + cubes = make_cube_region_dict(cube) + for region, cube in cubes.items(): + for itr, thres in enumerate(thresholds): + colour = diagtools.get_colour_from_cmap(itr, len(thresholds)) + label = str(thres) + ' ' + str(cube.units) + colours.append(colour) + plot_details[thres] = { + 'c': colour, + 'lw': 1, + 'ls': '-', + 'label': label + } + + qplt.contour( + cube, + thresholds, + colors=colours, + linewidths=linewidths, + linestyles=linestyles, + rasterized=True) + + # Determine y log scale. + if determine_set_y_logscale(cfg, metadata): + plt.axes().set_yscale('log') + + add_sea_floor(cube) + + # Add legend + diagtools.add_legend_outside_right( + plot_details, plt.gca(), column_width=0.08, loc='below') + + # Add title to plot + title = ' '.join([ + metadata['dataset'], metadata['long_name'], + determine_transect_str(cube, region) + ]) + titlify(title) + + # Load image format extention + image_extention = diagtools.get_image_format(cfg) + + # Determine image filename: + if metadata['dataset'].find('MultiModel') > -1: + path = diagtools.folder( + cfg['plot_dir']) + os.path.basename(filename) + path.replace('.nc', region + '_transect_contour' + image_extention) + else: + path = diagtools.get_image_path( + cfg, + metadata, + suffix=region + 'transect_contour' + image_extention, + ) + + # Saving files: + if cfg['write_plots']: + logger.info('Saving plots to %s', path) + plt.savefig(path) + + plt.close() + + +def multi_model_contours( + cfg, + metadatas, +): + """ + Make a multi model comparison plot showing several transect contour plots. + + This tool loads several cubes from the files, checks that the units are + sensible BGC units, checks for layers, adjusts the titles accordingly, + determines the ultimate file name and format, then saves the image. + + Parameters + ---------- + cfg: dict + the opened global config dictionairy, passed by ESMValTool. + metadatas: dict + The metadatas dictionairy for a specific model. + + """ + #### + # Load the data for each layer as a separate cube + model_cubes = {} + regions = {} + thresholds = {} + set_y_logscale = True + + for filename in sorted(metadatas): + cube = iris.load_cube(filename) + cube = diagtools.bgc_units(cube, metadatas[filename]['short_name']) + cube = make_depth_safe(cube) + cubes = make_cube_region_dict(cube) + model_cubes[filename] = cubes + for region in model_cubes[filename]: + regions[region] = True + + # Determine y log scale. + set_y_logscale = determine_set_y_logscale(cfg, metadatas[filename]) + + # Load threshold/thresholds. + tmp_thresholds = diagtools.load_thresholds(cfg, metadatas[filename]) + for threshold in tmp_thresholds: + thresholds[threshold] = True + + # Load image format extention + image_extention = diagtools.get_image_format(cfg) + + # Make a plot for each layer and each threshold + for region, threshold in product(regions, thresholds): + logger.info('plotting threshold: \t%s', threshold) + title = '' + plot_details = {} + + # Plot each file in the group + for index, filename in enumerate(sorted(metadatas)): + color = diagtools.get_colour_from_cmap(index, len(metadatas)) + linewidth = 1. + linestyle = '-' + # Determine line style for MultiModel statistics: + if 'MultiModel' in metadatas[filename]['dataset']: + linewidth = 2. + linestyle = ':' + # Determine line style for Observations + if metadatas[filename]['project'] in diagtools.get_obs_projects(): + color = 'black' + linewidth = 1.7 + linestyle = '-' + + qplt.contour( + model_cubes[filename][region], [ + threshold, + ], + colors=[ + color, + ], + linewidths=linewidth, + linestyles=linestyle, + rasterized=True) + + plot_details[filename] = { + 'c': color, + 'ls': linestyle, + 'lw': linewidth, + 'label': metadatas[filename]['dataset'] + } + + if set_y_logscale: + plt.axes().set_yscale('log') + + title = metadatas[filename]['long_name'] + units = str(model_cubes[filename][region].units) + + add_sea_floor(model_cubes[filename][region]) + + # Add title, threshold, legend to plots + title = ' '.join([ + title, + str(threshold), units, + determine_transect_str(model_cubes[filename][region], region) + ]) + titlify(title) + plt.legend(loc='best') + + # Saving files: + if cfg['write_plots']: + path = diagtools.get_image_path( + cfg, + metadatas[filename], + prefix='MultipleModels', + suffix='_'.join([ + 'contour_tramsect', region, + str(threshold) + image_extention + ]), + metadata_id_list=[ + 'field', 'short_name', 'preprocessor', 'diagnostic', + 'start_year', 'end_year' + ], + ) + + # Resize and add legend outside thew axes. + plt.gcf().set_size_inches(9., 6.) + diagtools.add_legend_outside_right( + plot_details, plt.gca(), column_width=0.15) - # Saving files: - if cfg['write_plots']: logger.info('Saving plots to %s', path) plt.savefig(path) - - plt.close() + plt.close() def main(cfg): """ - Load the config file, and send it to the plot maker. + Load the config file and some metadata, then pass them the plot making + tools. + + Parameters + ---------- + cfg: dict + the opened global config dictionairy, passed by ESMValTool. - The cfg is the opened global config. """ ##### for index, metadata_filename in enumerate(cfg['input_files']): @@ -123,8 +555,20 @@ def main(cfg): metadata_filename, ) - metadata = diagtools.get_input_files(cfg, index=index) - for filename in sorted(metadata.keys()): + metadatas = diagtools.get_input_files(cfg, index=index) + + thresholds = diagtools.load_thresholds(cfg, + next(iter(metadatas.values()))) + + ####### + # Multi model contour plots + if thresholds: + multi_model_contours( + cfg, + metadatas, + ) + + for filename in sorted(metadatas): logger.info('-----------------') logger.info( @@ -134,7 +578,12 @@ def main(cfg): ###### # Time series of individual model - make_transects_plots(cfg, metadata[filename], filename) + make_transects_plots(cfg, metadatas[filename], filename) + + ###### + # Contour maps of individual model + if thresholds: + make_transect_contours(cfg, metadatas[filename], filename) logger.info('Success') diff --git a/esmvaltool/diag_scripts/perfmetrics/collect.ncl b/esmvaltool/diag_scripts/perfmetrics/collect.ncl index ff8b7151ce..4702d7ad90 100644 --- a/esmvaltool/diag_scripts/perfmetrics/collect.ncl +++ b/esmvaltool/diag_scripts/perfmetrics/collect.ncl @@ -19,6 +19,7 @@ ; cm_interval: min and max color of the color table ; cm_reverse: reverse color table ; sort: sort datasets in alphabetic order (excluding MMM) +; diag_order: sort diagnostics in the specified order ; title: plot title ; scale_font: scaling factor applied to the default font size ; disp_values: switch on/off the grading values on the plot @@ -28,28 +29,29 @@ ; Caveats ; ; Modification history +; 20190315-A_hass_bg: added SMPI case ; 20180508-A_righ_ma: renamed and generalized ; 20151027-A_laue_ax: moved call to 'write_references' to the beginning ; of the code ; 20150325-A_laue_ax: modified reference tags used for acknowledgements ; (projects, observations, etc) ; 20150109-A_gott_kl: distinguish RMSD & BIAS in output file naming. -; 20140620-A_gott_kl: modified output file naming (variable outfile_plot) +; 20140620-A_gott_kl: modified output file naming (variable plotpath) ; and added option for explicit plot title. ; 20140204-A_fran_fr: written. ; ; ############################################################################# -load "interface_scripts/interface.ncl" +load "$diag_scripts/../interface_scripts/interface.ncl" -load "./diag_scripts/shared/plot/style.ncl" -load "./diag_scripts/shared/plot/portrait_plot.ncl" -load "./diag_scripts/shared/plot/taylor_plot.ncl" +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/portrait_plot.ncl" +load "$diag_scripts/shared/plot/taylor_plot.ncl" begin - enter_msg(diag_script, "") - diag_script_base = basename(diag_script) + enter_msg(DIAG_SCRIPT, "") + diag_script_base = basename(DIAG_SCRIPT) ; Check file type file_type = config_user_info@output_file_type @@ -57,14 +59,6 @@ begin file_type = "ps" end if - ; Write references ; FIX-ME to be replaced by new method - write_references(diag_script, "A_fran_fr", \ - (/"A_righ_ma", "A_eyri_ve", "A_gott_kl"/), \ - (/"D_righi15gmd", "D_gleckler08jgr"/), \ - (/"E_ncep", "E_erainterim", "E_airs", "E_ceresebaf", \ - "E_srb"/), \ - (/"P_embrace", "P_esmval"/)) - ; List of grading files input_files = diag_script_info@input_files + "/" + \ diag_script_info@metric + ".nc" @@ -78,138 +72,404 @@ begin f = addfile(input_files(ii), "r") curr_file = tostring(f->temp_list) - data_temp = ncdf_read(curr_file, "grade") - ; Change to 3 dimensional - if (dimsizes(dimsizes(data_temp)).eq.2) then + if (diag_script_info@metric.eq."SMPI") then + data_temp_smpi = ncdf_read(curr_file, "performance_index") - dim_temp = array_append_record(dimsizes(data_temp), 2, 0) - temp = new(dim_temp, typeof(data_temp)) - temp(:, :, 0) = data_temp - data_temp := temp - delete(temp) + if (.not.isdefined("data_all_smpi")) then - end if + dim_temp_smpi = dimsizes(data_temp_smpi) + n = dimsizes(dim_temp_smpi) + dim_data_smpi = new(n + 1, integer) + dim_data_smpi(0) = dimsizes(input_files) + dim_data_smpi(1:n) = dim_temp_smpi + vars = new(dimsizes(input_files), string) + data_all_smpi = new(dim_data_smpi, float) + delete(dim_data_smpi) + data_all_smpi(0, :, :) = data_temp_smpi + data_all_smpi!0 = "vars" + data_all_smpi!1 = "bootstrap_member" + data_all_smpi!2 = "models" + data_all_smpi&models = data_temp_smpi&models + data_all_smpi&bootstrap_member = data_temp_smpi&bootstrap_member - ; Create array for collecting all datasets - if (.not.isdefined("data_all")) then - dim_temp = dimsizes(data_temp) - dim_data = (/dimsizes(input_files), dim_temp(1), 2/) - data_all = new(dim_data, float) - data_all(0, :, :) = data_temp - data_all!1 = "models" - data_all&models = data_temp&models - end if + end if - ; Make sure dataset coordinate is consistent - consistent = False - if (dimsizes(data_temp&models).eq.dimsizes(data_all&models)) then - if (all(data_temp&models.eq.data_all&models)) then - consistent = True + ; Define the string array for input file provenance + if (.not.isdefined("preproc_files")) then + preproc_files = new(dimsizes(input_files) * \ + data_all_smpi@num_preproc_files, string) end if - end if - ; Append data - if (consistent) then - data_all(ii, :, :) = (/data_temp/) - copy_VarCoords(data_temp, data_all(ii:ii, :, :)) - else - do imod2 = 0, dimsizes(data_temp&models) - 1 - if (.not.any(data_temp&models(imod2).eq.data_all&models)) then - ; Append record for dataset(imod) - data_new = extend_var_at(data_all, 1, dimsizes(data_all&models)) - data_new(ii, dimsizes(data_all&models), :) = \ - (/data_temp(0, imod2, :)/) - data_new&models(dimsizes(data_all&models)) = \ - (/data_temp&models(imod2)/) - data_all := data_new - delete(data_new) - else - ; Loop over datasets of data - do imod = 0, dimsizes(data_all&models) - 1 - ; if no data dataset is similar to curreny entry, write data entry - if (data_all&models(imod).eq. data_temp&models(imod2)) then + ; Add the input file info to the file for provenance + do ipf = 0, toint(data_all_smpi@num_preproc_files) - 1 + num_preproc = "preproc_file_" + ipf + preproc_idx = ipf + ii * data_all_smpi@num_preproc_files + if (isatt(data_temp_smpi, num_preproc)) then + preproc_files(preproc_idx) = data_temp_smpi@$num_preproc$ + end if + end do + + ; Make sure dataset coordinate is consistent + consistent = False + if (dimsizes(data_temp_smpi&models).eq. \ + dimsizes(data_all_smpi&models)) then + if (all(data_temp_smpi&models.eq.data_all_smpi&models)) then + consistent = True + end if + end if + + ; Append data + if (consistent) then + data_all_smpi(ii, :, :) = (/data_temp_smpi/) + copy_VarCoords(data_temp_smpi, data_all_smpi(ii, :, :)) + else + ; Loop over datasets in new data entry + do imod_temp = 0, dimsizes(data_temp_smpi&models) - 1 + ; If current dataset is not already part of the model coordinate + if (.not.any(data_temp_smpi&models(imod_temp) .eq. \ + data_all_smpi&models)) then + ; Append record for dataset(imod) + data_new = extend_var_at(data_all_smpi, 2, \ + dimsizes(data_all_smpi&models)) + data_new(ii, :, dimsizes(data_all_smpi&models)) = \ + (/data_temp_smpi(:, imod_temp)/) + data_new&models(dimsizes(data_all_smpi&models)) = \ + (/data_temp_smpi&models(imod_temp)/) + delete(data_all_smpi) + data_all_smpi = data_new + delete(data_new) + else + ; Loop over datasets of data + do imod = 0, dimsizes(data_all_smpi&models) - 1 + ; if new dataset is identical to current entry, write data entry + if (data_all_smpi&models(imod).eq. \ + data_temp_smpi&models(imod_temp)) then + data_all_smpi(ii, :, imod) = (/data_temp_smpi(:, imod_temp)/) + copy_VarCoords(data_temp_smpi(:, imod_temp), \ + data_all_smpi(ii:ii, :, imod)) + end if + end do + end if + end do + end if + + vars(ii) = data_temp_smpi@invar + delete(data_temp_smpi) + + else ; metric not SMPI + + data_temp = ncdf_read(curr_file, "grade") + + ; Change to 3 dimensional + if (dimsizes(dimsizes(data_temp)).eq.2) then + dim_temp = array_append_record(dimsizes(data_temp), 2, 0) + temp = new(dim_temp, typeof(data_temp)) + temp(:, :, 0) = data_temp + data_temp := temp + delete(temp) + end if + + ; Create array for collecting all datasets + if (.not.isdefined("data_all")) then + dim_temp = dimsizes(data_temp) + dim_data = (/dimsizes(input_files), dim_temp(1), 2/) + data_all = new(dim_data, float) + data_all(0, :, :) = data_temp + data_all!1 = "models" + data_all&models = data_temp&models + end if + + ; Define the string array for input file provenance + if (.not.isdefined("preproc_files")) then + preproc_files = new(dimsizes(input_files) * \ + data_all@num_preproc_files, string) + end if + + ; Add the input file info to the file for provenance + do ipf = 0, toint(data_all@num_preproc_files) - 1 + num_preproc = "preproc_file_" + ipf + preproc_idx = ipf + ii * data_all@num_preproc_files + if (isatt(data_temp, num_preproc)) then + preproc_files(preproc_idx) = data_temp@$num_preproc$ + end if + end do + + ; Make sure model coordinate is consistent + consistent = False + if (dimsizes(data_temp&models).eq.dimsizes(data_all&models)) then + if (all(data_temp&models.eq.data_all&models)) then + consistent = True + end if + end if + + ; Append data + if (consistent) then + data_all(ii, :, :) = (/data_temp/) + copy_VarCoords(data_temp, data_all(ii:ii, :, :)) + else + do imod2 = 0, dimsizes(data_temp&models) - 1 + if (.not.any(data_temp&models(imod2).eq.data_all&models)) then + ; Append record for dataset(imod) + data_new = extend_var_at(data_all, 1, dimsizes(data_all&models)) + data_new(ii, dimsizes(data_all&models), :) = \ + (/data_temp(0, imod2, :)/) + data_new&models(dimsizes(data_all&models)) = \ + (/data_temp&models(imod2)/) + data_all := data_new + delete(data_new) + else + ; Loop over datasets of data + do imod = 0, dimsizes(data_all&models) - 1 + ; if no dataset is similar to curreny entry, write data entry + if (data_all&models(imod).eq. data_temp&models(imod2)) then data_all(ii, imod, :) = (/data_temp(0, imod2, :)/) copy_VarCoords(data_temp(0:0, imod2, :), \ data_all(ii:ii, imod, :)) + end if + end do + end if + end do + end if + delete(data_temp) + end if ; metric distinction end 1 + end do + + if (diag_script_info@metric.eq."SMPI") then + + data_all_smpi&vars = vars + delete(vars) + + ; Check for incomplete array, SMPI only for datasets with all vars + if any(ismissing(data_all_smpi)) then + do ivar = 0, dimsizes(data_all_smpi&vars) - 1 + do ii = 0, dimsizes(data_all_smpi&models) - 1 + if all(ismissing(data_all_smpi(ivar, :, ii))) then + if (isvar("miss_mods")) then + newmm = array_append_record(miss_mod, \ + data_all_smpi&models(ii), 0) + delete(miss_mod) + miss_mod = newmm + delete(newmm) + else + miss_mod = data_all_smpi&models(ii) end if - end do + end if + end do + if (isvar("miss_mods")) then + log_info("Variable " + data_all_smpi&vars(ivar) \ + + " is missing dataset(s) " \ + + str_join(miss_mods, ", ")) + delete(miss_mods) + missing_mods = True end if end do + if (isvar("missing_mods")) then + error_msg("f", DIAG_SCRIPT, "", "SMPI only supported for" \ + + " datasets containing all variables") + end if end if - delete(data_temp) - end do - ; Reduce size if all entries have only one reference - if (all(ismissing(data_all(:, :, 1)))) then - data_new = data_all(:, :, 0) - delete(data_all) - data_all = data_new - delete(data_new) - end if - delete(data_all@var) - delete(data_all@title) - delete(data_all@ncdf_dir) - if (isatt(data_all, "reference")) then - delete(data_all@reference) - end if + i2mean = dim_avg_n(data_all_smpi, 0) ; eq. 3 + dims = dimsizes(i2mean) + nboot = dims(0) - 1 + nmod = dims(1) + delete(dims) - ; Sort datasets in alphabetical order, excluding multi-model mean/median - ; which are placed at the beginning - if (isatt(diag_script_info, "sort")) then - if (diag_script_info@sort) then - idx = ind(data_all&models.eq."MultiModelMean" .or. \ - data_all&models.eq."MultiModelMedian") - pid = sort_alphabetically(data_all&models, idx, "begin") - if (dimsizes(dimsizes(data_all)).eq.3) then - data_all := data_all(:, pid, :) - else - data_all := data_all(:, pid) + data_smpi = new((/dimsizes(data_all_smpi&models), 2/), float) + data_smpi!0 = "models" + data_smpi&models = data_all_smpi&models + data_smpi(:, 0) = (/i2mean(0, :)/) ; clim. mean + + i5 = toint(0.05 * nboot) + i95 = toint(0.95 * nboot) + + do imod = 0, nmod - 1 + data_sorted = i2mean(1:nboot, imod) + qsort(data_sorted) + data_smpi(imod, 1) = 0.5 * (data_sorted(i95) - data_sorted(i5)) + delete(data_sorted) + end do + + data_smpi!1 = "statistic" + data_smpi&statistic = (/"SMPI", "95_conf"/) + data_smpi@diag_script = DIAG_SCRIPT + data_smpi@var = "SMPI" + data_smpi@ensemble_name = data_all_smpi@ensemble_name + + ; Sort datasets in alphabetical order (exclude MMM and set to end) + pid = sort_alphabetically(data_smpi&models, \ + ind(data_smpi&models.eq."Mean-model".or. \ + data_smpi&models.eq."Median-model"), "end") + tmp = data_smpi(pid, :) + tmp&models = data_smpi&models(pid) + delete(data_smpi) + data_smpi = tmp + delete(tmp) + delete(pid) + + ; Define output filenames + workpath = config_user_info@work_dir + "SMPI.nc" + plotpath = config_user_info@plot_dir + "SMPI" + + ; Write output + if (config_user_info@write_netcdf) then + system("mkdir -p " + config_user_info@work_dir) + ncdf_outfile = ncdf_write(data_smpi, workpath) + end if + + ; Attach plotting options + copy_VarAtts(diag_script_info, data_smpi) + if (isatt(diag_script_info, "title")) then + data_smpi@res_tiMainString = diag_script_info@title + end if + + else ; if metric not SMPI + + ; Reduce size if all entries have only one reference + if (all(ismissing(data_all(:, :, 1)))) then + data_new = data_all(:, :, 0) + delete(data_all) + data_all = data_new + delete(data_new) + end if + data_all@var = "grade" + if (isatt(data_all, "reference")) then + delete(data_all@reference) + end if + + ; Sort datasets in alphabetical order, excluding multi-model mean/median + ; which are placed at the beginning + if (isatt(diag_script_info, "sort")) then + if (diag_script_info@sort) then + idx = ind(data_all&models.eq."MultiModelMean" .or. \ + data_all&models.eq."MultiModelMedian") + pid = sort_alphabetically(data_all&models, idx, "begin") + if (dimsizes(dimsizes(data_all)).eq.3) then + data_all := data_all(:, pid, :) + else + data_all := data_all(:, pid) + end if + delete(pid) end if - delete(pid) end if - end if - ; Attach plotting options - copy_VarAtts(diag_script_info, data_all) - if (isatt(diag_script_info, "title")) then - data_all@res_tiMainString = diag_script_info@title - end if - if (diag_script_info@metric.eq."taylor") then - data_all@res_varLabels = data_all&models - data_all@res_caseLabels = data_all&diagnostics - end if + ; Sort diagnostics in the order specified in the settings + if (isatt(diag_script_info, "diag_order")) then + l_ok = True + if (dimsizes(data_all&diagnostics).ne. \ + dimsizes(diag_script_info@diag_order)) then + error_msg("w", DIAG_SCRIPT, "", "specified order of diagnostics " + \ + "cannot be applied, number of diagnostics does not match") + l_ok = False + end if + pid = new(dimsizes(diag_script_info@diag_order), integer) + do ii = 0, dimsizes(diag_script_info@diag_order) - 1 + tmp = ind(data_all&diagnostics.eq.diag_script_info@diag_order(ii)) + if (any(ismissing(tmp)) .or. dimsizes(tmp).gt.1) then + error_msg("w", DIAG_SCRIPT, "", "specified order of diagnostics " + \ + "cannot be applied, invalid entry in diag_order") + break + end if + pid(ii) = tmp + delete(tmp) + end do + if (l_ok) then + if (dimsizes(dimsizes(data_all)).eq.3) then + data_all := data_all(pid, :, :) + else + data_all := data_all(pid, :) + end if + end if + end if - ; Create outfile directory - system("mkdir -p " + config_user_info@plot_dir) - if (dimsizes(data_all&diagnostics).gt.1) then - diags = data_all&diagnostics(0) + "_to_" + \ - data_all&diagnostics(dimsizes(data_all&diagnostics) - 1) - else - diags = str_concat(data_all&diagnostics) - end if + ; Define output filenames + if (dimsizes(data_all&diagnostics).gt.1) then + diags = data_all&diagnostics(0) + "_to_" + \ + data_all&diagnostics(dimsizes(data_all&diagnostics) - 1) + else + diags = str_concat(data_all&diagnostics) + end if + workpath = config_user_info@work_dir + diags + plotpath = config_user_info@plot_dir + diags + if (isatt(data_all, "metric")) then + workpath = workpath + "_" + data_all@metric + ".nc" + plotpath = plotpath + "_" + data_all@metric + end if + delete(diags) + + ; Write output + if (config_user_info@write_netcdf) then + system("mkdir -p " + config_user_info@work_dir) + ncdf_outfile = ncdf_write(data_all, workpath) + end if + + ; Attach plotting options + copy_VarAtts(diag_script_info, data_all) + if (isatt(diag_script_info, "title")) then + data_all@res_tiMainString = diag_script_info@title + end if + if (diag_script_info@metric.eq."taylor") then + data_all@res_varLabels = data_all&models + data_all@res_caseLabels = data_all&diagnostics + end if - ; Define outfile name - outfile_plot = config_user_info@plot_dir + diags - if (isatt(data_all, "metric")) then - outfile_plot = outfile_plot + "_" + data_all@metric end if ; Create workspace - wks = gsn_open_wks(file_type, outfile_plot) + system("mkdir -p " + config_user_info@plot_dir) + wks = gsn_open_wks(file_type, plotpath) if (diag_script_info@metric.eq."taylor") then - wks@legendfile = outfile_plot + "_legend" + wks@legendfile = plotpath + "_legend" end if ; Call plotting function if (diag_script_info@metric.eq."taylor") then plot = taylor_plot(wks, data_all, "grade") + elseif (diag_script_info@metric.eq."SMPI") then + plot = circle_plot(wks, data_smpi, "performance_index", \ + data_smpi@ensemble_name) else plot = portrait_plot(wks, data_all, "grade") end if - log_info(" gv " + outfile_plot + "." + file_type) + ; Call provenance logger + if (diag_script_info@metric.eq."RMSD") then + caption = "RMSD performance metric" + statistics = (/"rmsd"/) + authors = (/"fran_fr", "righ_ma", "eyri_ve"/) + plottype = "portrait" + references = (/"righi15gmd", "gleckler08jgr"/) + elseif (diag_script_info@metric.eq."BIAS") then + caption = "Bias performance metric" + statistics = (/"diff"/) + authors = (/"fran_fr", "righ_ma", "eyri_ve"/) + plottype = "portrait" + references = (/"righi15gmd", "gleckler08jgr"/) + elseif (diag_script_info@metric.eq."taylor") then + caption = "Taylor diagram" + statistics = (/"rmsd", "corr"/) + authors = (/"fran_fr", "righ_ma", "eyri_ve"/) + plottype = "taylor" + references = (/"righi15gmd", "gleckler08jgr"/) + elseif (diag_script_info@metric.eq."SMPI") then + caption = \ + "Performance index I2. Similar to Figure 1 of Reichler and Kim (2008)." + statistics = "smpi" + authors = (/"gier_be", "hass_bg"/) + plottype = "circle" + references = (/"rk2008bams"/) + else + error_msg("f", DIAG_SCRIPT, "", "cannot add provenance information " + \ + "for metric " + diag_script_info@metric) + end if + domains = (/"global"/) + + ; Call provenance logger + preproc_files := preproc_files(ind(.not.ismissing(preproc_files))) + log_provenance(ncdf_outfile, plotpath, caption, statistics, domains, \ + plottype, authors, references, preproc_files) - leave_msg(diag_script, "") + leave_msg(DIAG_SCRIPT, "") end diff --git a/esmvaltool/diag_scripts/perfmetrics/cycle.ncl b/esmvaltool/diag_scripts/perfmetrics/cycle.ncl index dc0d4c5ba2..c230660d1d 100644 --- a/esmvaltool/diag_scripts/perfmetrics/cycle.ncl +++ b/esmvaltool/diag_scripts/perfmetrics/cycle.ncl @@ -4,21 +4,23 @@ ; ESMVal project ; ############################################################################# +load "$diag_scripts/shared/plot/xy_line.ncl" + procedure perfmetrics_ptype_script() begin ; Define output array if (diag_script_info@time_avg.eq."seasonalclim") then - var_all = new((/dim_MOD, 4, 2/), float) + var_all = new((/nDatasets, 4, 2/), float) var_all!1 = "season" var_all&season = (/"DJF", "MAM", "JJA", "SON"/) else if (diag_script_info@time_avg.eq."monthlyclim") then - var_all = new((/dim_MOD, 12, 2/), float) + var_all = new((/nDatasets, 12, 2/), float) var_all!1 = "month" var_all&month = (/"J", "F", "M", "A", "M", "J",\ "J", "A", "S", "O", "N", "D"/) else - error_msg("f", diag_script, "", "time_avg option " + \ + error_msg("f", DIAG_SCRIPT, "", "time_avg option " + \ diag_script_info@time_avg + \ " not compatible with plot_type cycle") end if @@ -30,44 +32,49 @@ begin ; Attach attributes var_all@var = var0 - var_all@diag_script = (/diag_script/) + var_all@diag_script = (/DIAG_SCRIPT/) copy_VarAtts(diag_script_info, var_all) - var_all@ref_model = variable_info@reference_dataset + var_all@ref_model = variable_info[0]@reference_dataset + + ; Search for level + f = addfile(info_items[0]@filename, "r") + if (isfilevar(f, "plev")) then + if (dimsizes(f->plev).eq.1) then + level = toint(f->plev/100.) + end if + end if + + ; Set path for saving processed data + system("mkdir -p " + config_user_info@work_dir) + if (isdefined("level")) then + vv = var0 + else + vv = var0 + level + end if + fname = str_join((/"perfmetrics", "cycle", vv, \ + diag_script_info@time_avg, diag_script_info@region/), "_") + workpath = config_user_info@work_dir + fname + ".nc" + plotpath = config_user_info@plot_dir + fname ; Loop over datasets - do imod = 0, dim_MOD - 1 + do imod = 0, nDatasets - 1 log_debug("Processing " + datasetnames(imod)) - ; Set path for saving processed data ; FIX-ME add preproc_id - fullpath = config_user_info@work_dir + "/" - system("mkdir -p " + fullpath) - fname = basename(systemfunc("basename " + dataset_info@filename(imod))) - fname = fname + "_" + basename(diag_script) + "_cycle" - fname = fname + "_" + diag_script_info@time_avg - if (isatt(diag_script_info, "level")) then - fname = fname + "_" + diag_script_info@level ; FIX-ME - end if - fname = fname + "_" + str_sub_str(diag_script_info@region, " ", "") - procpath = fullpath + fname + ".nc" - ; Determine start/end year - start_year = get_start_year(imod) - end_year = get_end_year(imod) + start_year = info_items[imod]@start_year + end_year = info_items[imod]@end_year ; Read data - var = read_data(imod, var0, field_type0) + var = read_data(info_items[imod]) dnames = getVarDimNames(var) ; Extract region and average over latitude and longitude - if (any(field_type0.eq.(/"T0M", "T0Ms", "T1M"/))) then - var_reg = var - else if (any(dnames.eq."lat") .and. any(dnames.eq."lon")) then + if (any(dnames.eq."lat") .and. any(dnames.eq."lon")) then var_reg = area_operations(var, region(0), region(1), \ region(2), region(3), "average", True) else - error_msg("f", diag_script, "", "dimensionality not implemented") - end if + var_reg = var end if delete(var) @@ -84,20 +91,22 @@ begin end if delete(var_reg) - ; Write output + ; Store in global array var_all(imod, :, 0) = var_avg var_all(imod, :, 1) = var_std delete(var_avg) delete(var_std) - if (config_user_info@write_netcdf.eq."True") then - var_all@ncdf = procpath - ncdf_outfile = ncdf_write(var_all(imod, :, :), procpath) - end if end do + ; Write output + if (config_user_info@write_netcdf) then + var_all@ncdf = workpath + ncdf_outfile = ncdf_write(var_all, workpath) + end if + ; Plotting - if (.not.diag_script_info@draw_plots) then + if (.not.config_user_info@write_plots) then return end if @@ -107,51 +116,32 @@ begin end if ; Annotation and file names - region_name = "" ; priority 3 - location = "" ; priority 3 - if (isatt(diag_script_info, "aux_info")) then - region_name = "_" + diag_script_info@aux_info ; priority 2 - location = " - " + diag_script_info@aux_info ; priority 2 - end if - if (isatt(diag_script_info, "location")) then - location = " - " + diag_script_info@location ; priority 1 - end if - if (isatt(diag_script_info, "region")) then - region_name = "_" + region@name ; priority 1 - location = " - " + diag_script_info@region - end if - if (isatt(var_all, "long_name")) then - varstring = var_all@long_name ; priority 3 - end if - if (isatt(var_all, "short_name")) then - varstring = var_all@short_name ; priority 2 - end if - if (isStrSubset(var0, "vmr").and.isStrSubset(var0, "_")) then - varstring = var0 ; priority 1 - end if - altitude = "" ; priority 2 - if (any(field_type0.eq.(/"T0M", "T0Ms"/))) then - if (isatt(diag_script_info, "altitude")) then - altitude = " - " + diag_script_info@altitude ; priority 1 - end if - else - if (isatt(diag_script_info, "level")) then ; FIX-ME - if (diag_script_info@level.ne."all") then - altitude = " - " + diag_script_info@level + " hPa" - end if - end if + title = var_all@long_name + caption = var0 + if (isdefined("level")) then + title = title + " " + level + " hPa" + caption = caption + level end if - outfile = config_user_info@plot_dir + var0 + "_cycle_" + \ - diag_script_info@time_avg + "_" + region_name - - wks = gsn_open_wks(file_type, outfile) - wks@legendfile = outfile + "_legend" - var_all@res_tiMainString = varstring - var_all@res_tiMainString = var_all@res_tiMainString + location + altitude - plot = cycle_plot(wks, var_all, var0) + title = title + " - " + diag_script_info@region + delete(level) + + ; Draw plot + wks = gsn_open_wks(file_type, plotpath) + wks@legendfile = plotpath + "_legend" + var_all@res_tiMainString = title + plot = cycle_plot(wks, var_all, var0, info_items) draw(plot) frame(wks) - log_info(" gv " + outfile + "." + file_type) + ; Call provenance logger + log_provenance(ncdf_outfile, \ + plotpath + "." + file_type, \ + "Cycle plot of variable " + caption, \ + (/"mean", "stddev"/), \ + diag_script_info@region, \ + "seas", \ + (/"fran_fr", "righ_ma", "eyri_ve"/), \ + (/"righi15gmd", "gleckler08jgr"/), \ + metadata_att_as_array(info_items, "filename")) end diff --git a/esmvaltool/diag_scripts/perfmetrics/cycle_latlon.ncl b/esmvaltool/diag_scripts/perfmetrics/cycle_latlon.ncl index f60d78cb91..1e70024916 100644 --- a/esmvaltool/diag_scripts/perfmetrics/cycle_latlon.ncl +++ b/esmvaltool/diag_scripts/perfmetrics/cycle_latlon.ncl @@ -4,11 +4,13 @@ ; ESMVal project ; ############################################################################# +load "$diag_scripts/shared/plot/style.ncl" + procedure perfmetrics_ptype_script() begin ; Set dataset indexes, with reference and alternative datasets first - modidx = ispan(0, dim_MOD - 1, 1) + modidx = ispan(0, nDatasets - 1, 1) if (l_altern) then modidx := array_append_record( \ (/ref_ind, alt_ind/), \ @@ -17,6 +19,8 @@ begin modidx := array_append_record(ref_ind, modidx(ind(modidx.ne.ref_ind)), 0) end if + annots = project_style(info_items, diag_script_info, "annots") + ; Loop over datasets do ii = 0, dimsizes(modidx) - 1 @@ -24,25 +28,19 @@ begin log_debug("Processing " + datasetnames(imod)) - ; Set path for saving processed data ; FIX-ME add preproc_id - fullpath = config_user_info@work_dir + "/" - system("mkdir -p " + fullpath) - fname = basename(systemfunc("basename " + dataset_info@filename(imod))) - fname = fname + "_" + basename(diag_script) + "_cycle_latlon" - fname = fname + "_" + diag_script_info@time_avg - if (isatt(diag_script_info, "level")) then - fname = fname + "_" + diag_script_info@level ; FIX-ME - end if - fname = fname + "_" + str_sub_str(diag_script_info@region, " ", "") - procpath = fullpath + fname + ".nc" + ; Set path for saving processed data + system("mkdir -p " + config_user_info@work_dir) + fname = \ + str_join((/"perfmetrics", "cycle_latlon", annots(imod), var0, \ + diag_script_info@time_avg, diag_script_info@region/), "_") + workpath = config_user_info@work_dir + fname + ".nc" ; Determine start/end year - start_year = get_start_year(imod) - end_year = get_end_year(imod) + start_year = info_items[imod]@start_year + end_year = info_items[imod]@end_year ; Read data - var = read_data(imod, var0, field_type0) - dnames = getVarDimNames(var) + var = read_data(info_items[imod]) ; Extract region var_reg = area_operations(var, region(0), region(1), \ @@ -57,24 +55,16 @@ begin ; Standard deviation calculation for this ptype is not yet implemented in ; diag_scripts/shared/statistics.ncl, but is anyway not required here - ; Write output + ; Store in global array var_all = new(array_append_record(dimsizes(var_avg), 2, 0), float) var_all!3 = "statistic" var_all&statistic = (/"mean", "stddev"/) var_all(:, :, :, 0) = var_avg var_all@var = var0 - var_all@diag_script = (/diag_script/) + var_all@diag_script = (/DIAG_SCRIPT/) copy_VarAtts(diag_script_info, var_all) - var_all@ref_model = variable_info@reference_dataset - if (config_user_info@write_netcdf.eq."True") then - var_all@ncdf = procpath - ncdf_outfile = ncdf_write(var_all, procpath) - end if - - ; Calculate grading - if (.not.diag_script_info@calc_grading) then - continue - end if + var_all@ref_model = variable_info[0]@reference_dataset + delete(var_avg) ; Store reference dataset if (imod.eq.ref_ind) then @@ -88,10 +78,33 @@ begin end if end if + ; Write output + if (config_user_info@write_netcdf) then + var_all@ncdf = workpath + ncdf = ncdf_write(var_all, workpath) + end if + + ; Call provenance logger + log_provenance(ncdf, \ + "n/a", \ + "n/a", \ + (/"mean", "stddev"/), \ + diag_script_info@region, \ + "other", \ + (/"fran_fr", "righ_ma", "eyri_ve"/), \ + (/"righi15gmd", "gleckler08jgr"/), \ + info_items[imod]@filename) + + ; Calculate grading + if (.not.diag_script_info@calc_grading) then + continue + end if + ; Loop over requested metrics do met = 0, nmetrics - 1 if (diag_script_info@metric(met).eq."taylor") then + locidx = ind(datasetnames(imod).eq.taylor&models) if (ismissing(locidx)) then continue @@ -102,7 +115,19 @@ begin taylor(0, locidx, 1) = \ calculate_metric(var_all(:, :, :, 0), var_ref(:, :, :, 0), \ "correlation") + + elseif (diag_script_info@metric(met).eq."SMPI") then + + locidx = ind(datasetnames(imod).eq.smpi&models) + if (ismissing(locidx)) then + continue + end if + smpi(:, locidx) = \ + (/calculate_metric(var_all(:, :, :, 0), var_ref(:, :, :, 0), \ + "SMPI")/) + else + locidx = ind(datasetnames(imod).eq.grading&models) if (ismissing(locidx)) then continue @@ -115,6 +140,7 @@ begin calculate_metric(var_all(:, :, :, 0), var_alt(:, :, :, 0), \ diag_script_info@metric(met)) end if + end if end do diff --git a/esmvaltool/diag_scripts/perfmetrics/cycle_zonal.ncl b/esmvaltool/diag_scripts/perfmetrics/cycle_zonal.ncl new file mode 100644 index 0000000000..2d4a84010d --- /dev/null +++ b/esmvaltool/diag_scripts/perfmetrics/cycle_zonal.ncl @@ -0,0 +1,149 @@ +; ############################################################################# +; PROCEDURE FOR THE CYCLE-ZONAL PLOT OF THE PERFORMANCE METRICS +; Author: Bettina Gier (University of Bremen, Germany) +; ESMVal project +; ############################################################################# + +load "$diag_scripts/shared/plot/style.ncl" + +procedure perfmetrics_ptype_script() +begin + + ; Set dataset indexes, with reference and alternative datasets first + modidx = ispan(0, nDatasets - 1, 1) + if (l_altern) then + modidx := array_append_record( \ + (/ref_ind, alt_ind/), \ + modidx(ind(modidx.ne.ref_ind .and. modidx.ne.alt_ind)), 0) + else + modidx := array_append_record(ref_ind, modidx(ind(modidx.ne.ref_ind)), 0) + end if + + annots = project_style(info_items, diag_script_info, "annots") + + ; Loop over datasets + do ii = 0, dimsizes(modidx) - 1 + + imod = modidx(ii) + + log_debug("Processing " + datasetnames(imod)) + + ; Set path for saving processed data + system("mkdir -p " + config_user_info@work_dir) + fname = \ + str_join((/"perfmetrics", "cycle_zonal", annots(imod), var0, \ + diag_script_info@time_avg, diag_script_info@region/), "_") + workpath = config_user_info@work_dir + fname + ".nc" + + ; Determine start/end year + start_year = info_items[imod]@start_year + end_year = info_items[imod]@end_year + + ; Read data + var = read_data(info_items[imod]) + + ; Calculate zonal mean + var_reg = dim_avg_Wrap(var) + delete(var) + + ; Calculate time average + var_avg = time_operations(var_reg, start_year, end_year, "average", \ + diag_script_info@time_avg, True) + delete(var_reg) + + ; Standard deviation calculation for this ptype is not yet implemented in + ; diag_scripts/shared/statistics.ncl, but is anyway not required here + + ; Store in global array + var_all = new(array_append_record(dimsizes(var_avg), 2, 0), float) + var_all!3 = "statistic" + var_all&statistic = (/"mean", "stddev"/) + var_all(:, :, :, 0) = var_avg + var_all@var = var0 + var_all@diag_script = (/DIAG_SCRIPT/) + copy_VarAtts(diag_script_info, var_all) + var_all@ref_model = variable_info[0]@reference_dataset + delete(var_avg) + + ; Store reference dataset + if (imod.eq.ref_ind) then + var_ref = var_all + end if + + ; Store alternative dataset + if (l_altern) then + if (imod.eq.alt_ind) then + var_alt = var_all + end if + end if + + ; Write output + if (config_user_info@write_netcdf) then + var_all@ncdf = workpath + ncdf = ncdf_write(var_all, workpath) + end if + + ; Call provenance logger + log_provenance(ncdf, \ + "n/a", \ + "n/a", \ + (/"mean", "rmsd"/), \ + diag_script_info@region, \ + "other", \ + (/"fran_fr", "righ_ma", "eyri_ve"/), \ + (/"righi15gmd", "gleckler08jgr"/), \ + info_items[imod]@filename) + + ; Calculate grading + if (.not.diag_script_info@calc_grading) then + continue + end if + + ; Loop over requested metrics + do met = 0, nmetrics - 1 + + if (diag_script_info@metric(met).eq."taylor") then + + locidx = ind(datasetnames(imod).eq.taylor&models) + if (ismissing(locidx)) then + continue + end if + taylor(0, locidx, 0) = \ + calculate_metric(var_all(:, :, :, 0), var_ref(:, :, :, 0), \ + "stddev_ratio") + taylor(0, locidx, 1) = \ + calculate_metric(var_all(:, :, :, 0), var_ref(:, :, :, 0), \ + "correlation") + + elseif (diag_script_info@metric(met).eq."SMPI") then + + locidx = ind(datasetnames(imod).eq.smpi&models) + if (ismissing(locidx)) then + continue + end if + smpi(:, locidx) = \ + (/calculate_metric(var_all(:, :, :, 0), var_ref(:, :, :, 0), \ + "SMPI")/) + + else + + locidx = ind(datasetnames(imod).eq.grading&models) + if (ismissing(locidx)) then + continue + end if + grading(met, 0, locidx, 0) = \ + calculate_metric(var_all(:, :, :, 0), var_ref(:, :, :, 0), \ + diag_script_info@metric(met)) + if (l_altern) then + grading(met, 0, locidx, 1) = \ + calculate_metric(var_all(:, :, :, 0), var_alt(:, :, :, 0), \ + diag_script_info@metric(met)) + end if + + end if + + end do + + end do + +end diff --git a/esmvaltool/diag_scripts/perfmetrics/latlon.ncl b/esmvaltool/diag_scripts/perfmetrics/latlon.ncl index ba00d466c3..f740b29d4e 100644 --- a/esmvaltool/diag_scripts/perfmetrics/latlon.ncl +++ b/esmvaltool/diag_scripts/perfmetrics/latlon.ncl @@ -4,13 +4,18 @@ ; ESMVal project ; ############################################################################# +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/contour_maps.ncl" + procedure perfmetrics_ptype_script() begin ; Set dataset indexes, with reference dataset first - modidx = ispan(0, dim_MOD - 1, 1) + modidx = ispan(0, nDatasets - 1, 1) modidx := array_append_record(ref_ind, modidx(ind(modidx.ne.ref_ind)), 0) + annots = project_style(info_items, diag_script_info, "annots") + ; Loop over datasets do ii = 0, dimsizes(modidx) - 1 @@ -18,67 +23,95 @@ begin log_debug("Processing " + datasetnames(imod)) + ; Check if difference is possible + if (diag_script_info@plot_diff .and. imod.ne.ref_ind) then + l_diff = True + else + l_diff = False + end if + ; Set path for saving processed data - fullpath = config_user_info@work_dir + "/" - system("mkdir -p " + fullpath) - fname = basename(systemfunc("basename " + dataset_info@filename(imod))) - fname = fname + "_" + basename(diag_script) + "_latlon" - fname = fname + "_" + diag_script_info@time_avg - procpath = fullpath + fname + "_Global.nc" + system("mkdir -p " + config_user_info@work_dir) + fname_abs = \ + str_join((/"perfmetrics", "latlon", annots(imod), var0, \ + diag_script_info@time_avg, diag_script_info@region/), "_") + workpath_abs = config_user_info@work_dir + fname_abs + ".nc" + plotpath_abs = config_user_info@plot_dir + fname_abs + fname_diff = \ + str_join((/"perfmetrics", "latlon", \ + annots(imod) + "-" + annots(ref_ind), var0, \ + diag_script_info@time_avg, diag_script_info@region/), "_") + workpath_diff = config_user_info@work_dir + fname_diff + ".nc" + plotpath_diff = config_user_info@plot_dir + fname_diff ; Determine start/end year - start_year = get_start_year(imod) - end_year = get_end_year(imod) + start_year = info_items[imod]@start_year + end_year = info_items[imod]@end_year ; Read data - var = read_data(imod, var0, field_type0) + var = read_data(info_items[imod]) dnames = getVarDimNames(var) ; Calculate time average and standard dev. w.r.t. interannual variablity var_avg = time_operations(var, start_year, end_year, "average", \ diag_script_info@time_avg, True) var_std = interannual_variability(var, start_year, end_year, \ - diag_script_info@time_avg) + diag_script_info@time_avg, "None") delete(var) - ; Write output + ; Store in global array var_all = new(array_append_record(dimsizes(var_avg), 2, 0), float) var_all!2 = "statistic" var_all&statistic = (/"mean", "stddev"/) var_all(:, :, 0) = var_avg var_all(:, :, 1) = var_std var_all@var = var0 - var_all@diag_script = (/diag_script/) + var_all@diag_script = (/DIAG_SCRIPT/) copy_VarAtts(diag_script_info, var_all) - var_all@ref_model = variable_info@reference_dataset - if (config_user_info@write_netcdf.eq."True") then - var_all@ncdf = procpath - ncdf_outfile = ncdf_write(var_all, procpath) - end if + var_all@ref_model = variable_info[0]@reference_dataset + delete(var_avg) + delete(var_std) ; Store reference dataset if (imod.eq.ref_ind) then var_ref = var_all end if + ; Calculate difference + if (l_diff) then + var_diff = var_all(:, :, 0) + var_diff = var_all(:, :, 0) - var_ref(:, :, 0) + end if + + ; Write output + if (config_user_info@write_netcdf) then + var_all@ncdf = workpath_abs + ncdf_abs = ncdf_write(var_all, workpath_abs) + if (l_diff) then + var_diff@ncdf = workpath_diff + ncdf_diff = ncdf_write(var_diff, workpath_diff) + end if + end if + ; Plotting - if (.not.diag_script_info@draw_plots) then + if (.not.config_user_info@write_plots) then continue end if ; Convert units for plotting (if required) if (isatt(diag_script_info, "plot_units")) then var_all = convert_units(var_all, diag_script_info@plot_units) + if (l_diff) + var_diff = convert_units(var_diff, diag_script_info@plot_units) + end if end if ; Absolute plot - outfile = config_user_info@plot_dir + datasetnames(imod) + "_" + var0 + \ - "_latlon_" + diag_script_info@time_avg + "_Glob" plot_var = var_all(:, :, 0) plot_var@projection = diag_script_info@projection plot_var@res_cnLinesOn = False plot_var@res_cnLevelSelectionMode = "ExplicitLevels" - plot_var@res_tiMainString = datasetnames(imod) + plot_var@res_tiMainString = annots(imod) plot_var@res_cnLevels = diag_script_info@abs_levs plot_var@res_gsnLeftString = \ plot_var@long_name + " [" + format_units(plot_var@units) + "]" @@ -87,26 +120,33 @@ begin plot_var@res_gsnRightString = \ sprintf("%5.2f", gavg) + " " + format_units(plot_var@units) end if - wks = gsn_open_wks(file_type, outfile) + wks = gsn_open_wks(file_type, plotpath_abs) gsn_define_colormap(wks, diag_script_info@latlon_cmap) plot = contour_map_ce(wks, plot_var, var0) draw(plot) frame(wks) - log_info(" gv " + outfile + "." + file_type) delete(plot_var@res_cnLevels) delete(wks) delete(plot) + ; Call provenance logger + log_provenance(ncdf_abs, \ + plotpath_abs + "." + file_type, \ + "Geographical distribution of variable " + var0, \ + (/"mean", "rmsd"/), \ + diag_script_info@region, \ + "geo", \ + (/"fran_fr", "righ_ma", "eyri_ve"/), \ + (/"righi15gmd", "gleckler08jgr"/), \ + info_items[imod]@filename) + ; Difference plot to the reference - if (.not.diag_script_info@plot_diff .or. imod.eq.ref_ind) then + if (.not.l_diff) then continue end if - plot_var = var_all(:, :, 0) - var_ref(:, :, 0) - outfile = config_user_info@plot_dir + datasetnames(imod) + "-" + \ - datasetnames(ref_ind) + "_" + var0 + "_latlon_" + \ - diag_script_info@time_avg + "_Glob" - plot_var@res_tiMainString = \ - datasetnames(imod) + " - " + datasetnames(ref_ind) + plot_var = var_diff + plot_var@projection = diag_script_info@projection + plot_var@res_tiMainString = annots(imod) + " - " + annots(ref_ind) if (isatt(diag_script_info, "t_test")) then plot_var@res_gsnLeftString = plot_var@res_gsnLeftString + " - " + \ sprinti("%2i", toint(100 * diag_script_info@conf_level)) + "% c.l." @@ -115,60 +155,72 @@ begin plot_var@res_cnLevels = diag_script_info@diff_levs plot_var@res_cnMissingValFillColor = "gray70" plot_var@res_gsnSpreadColorEnd = -2 - plot_var@projection = diag_script_info@projection if (diag_script_info@show_global_avg) then ; append global average gavg = area_operations(plot_var, -90., 90., 0., 360., "average", True) plot_var@res_gsnRightString = \ sprintf("%5.2f", gavg) + " " + format_units(plot_var@units) end if - wks = gsn_open_wks(file_type, outfile) + wks = gsn_open_wks(file_type, plotpath_diff) gsn_merge_colormaps(wks, "temp_19lev", "gray70") plot = contour_map_ce(wks, plot_var, var0) delete(plot_var@res_cnLevels) ; Apply t-test (assuming different population variances) - if (.not.diag_script_info@t_test) then - continue - end if - x1 = var_all(:, :, 0) - x2 = var_ref(:, :, 0) - s1 = var_all(:, :, 1) ^ 2 - s2 = var_ref(:, :, 1) ^ 2 - n1 = get_end_year(imod) - get_start_year(imod) + 1 - n2 = get_end_year(ref_ind) - get_start_year(ref_ind) + 1 - prob = ttest(x1, s1, n1, x2, s2, n2, True, False) - mask_var = 1. - prob - copy_VarCoords(plot_var, mask_var) - delete(prob) - - ; Mask non-significant values - res2 = True - res2@cnLevelSelectionMode = "ExplicitLevels" - res2@cnLevels = (/0.0, diag_script_info@conf_level, 1.0/) - res2@cnInfoLabelOn = False - res2@cnLinesOn = False - res2@cnLineLabelsOn = False - res2@gsnDraw = False - res2@gsnFrame = False - plot2 = gsn_csm_contour(wks, mask_var, res2) - opt = True - if (diag_script_info@stippling) then - opt@gsnShadeFillType = "pattern" - opt@gsnShadeLow = 17 - else - opt@gsnShadeFillType = "color" - opt@gsnShadeLow = "gray70" + caption = "Difference in geographical distribution of variable " + var0 + if (diag_script_info@t_test) then + x1 = var_all(:, :, 0) + x2 = var_ref(:, :, 0) + s1 = var_all(:, :, 1) ^ 2 + s2 = var_ref(:, :, 1) ^ 2 + n1 = info_items[imod]@end_year - info_items[imod]@start_year + 1 + n2 = info_items[ref_ind]@end_year - info_items[ref_ind]@start_year + 1 + prob = ttest(x1, s1, n1, x2, s2, n2, True, False) + mask_var = 1. - prob + copy_VarCoords(plot_var, mask_var) + delete(prob) + + ; Mask non-significant values + res2 = True + res2@cnLevelSelectionMode = "ExplicitLevels" + res2@cnLevels = (/0.0, diag_script_info@conf_level, 1.0/) + res2@cnInfoLabelOn = False + res2@cnLinesOn = False + res2@cnLineLabelsOn = False + res2@gsnDraw = False + res2@gsnFrame = False + plot2 = gsn_csm_contour(wks, mask_var, res2) + opt = True + if (diag_script_info@stippling) then + opt@gsnShadeFillType = "pattern" + opt@gsnShadeLow = 17 + caption = caption + " (non-significant points are stippled)" + else + opt@gsnShadeFillType = "color" + opt@gsnShadeLow = "gray70" + caption = caption + " (non-significant points are masked out in gray)" + end if + plot2 = gsn_contour_shade(plot2, diag_script_info@conf_level, 999., opt) + overlay(plot, plot2) + draw(plot) + frame(wks) + delete(wks) + delete(plot) + delete(plot2) + delete(mask_var) + delete(plot_var) + end if - plot2 = gsn_contour_shade(plot2, diag_script_info@conf_level, 999., opt) - overlay(plot, plot2) - draw(plot) - frame(wks) - log_info(" gv " + outfile + "." + file_type) - delete(wks) - delete(plot) - delete(plot2) - delete(mask_var) - delete(plot_var) + + ; Call provenance logger + log_provenance(ncdf_diff, \ + plotpath_diff + "." + file_type, \ + caption, \ + (/"mean", "rmsd", "diff"/), \ + diag_script_info@region, \ + "zonal", \ + (/"fran_fr", "righ_ma", "eyri_ve"/), \ + (/"righi15gmd", "gleckler08jgr"/), \ + info_items[imod]@filename) end do diff --git a/esmvaltool/diag_scripts/perfmetrics/main.ncl b/esmvaltool/diag_scripts/perfmetrics/main.ncl index 0f05f854cf..c836ce5ce1 100644 --- a/esmvaltool/diag_scripts/perfmetrics/main.ncl +++ b/esmvaltool/diag_scripts/perfmetrics/main.ncl @@ -3,10 +3,11 @@ ; Authors: Mattia Righi (DLR, Germany) and Franziska Frank (DLR, Germany) ; ESMVal project ; ############################################################################# +; ; Description ; Calculates and (optionally) plots annual/seasonal cycles, zonal means, -; lat-lon fields and time-lat-lon fields from input T3M or T2Ms data. -; The calculated fields can be also plotted as difference w.r.t. a given +; lat-lon fields and time-lat-lon fields from input 2D/3D monthly data. +; The calculated fields can be plotted as difference w.r.t. a given ; reference dataset. It also calculates grading and taylor metrics. ; Input data have to be regridded to a common grid in the preprocessor. ; @@ -17,19 +18,17 @@ ; diag_scripts/shared/statistics.ncl) ; region: selected region (see select_region in ; diag_scripts/shared/latlon.ncl) +; +; Optional diag_script_info attributes ; styleset (for cycle): as in diag_scripts/shared/plot/style.ncl functions ; plot_stddev (for cycle): plot standard deviation ; legend_outside (for cycle): save legend in a separate file -; -; Optional diag_script_info attributes ; t_test (for zonal and latlon): calculate t-test in difference plots ; (default: False) ; conf_level (for zonal and latlon): confidence level for the t-test ; (default: False) -; range_option: time range selection option (default: 0) ; projection: map projection for lat-lon plots (default: ; CylindricalEquidistant) -; draw_plots: draw plots (default: True) ; plot_diff: draw difference plots (default: False) ; calc_grading: calculate grading (default: False) ; stippling: use stippling to mark stat. significant differences (default: @@ -44,13 +43,18 @@ ; zonal_ymin (for zonal): minimum pressure on the plots (default: 5. hPa) ; latlon_cmap (for latlon): color table (default: "amwg_blueyellowred") ; plot_units: plotting units (if different from standard CMOR units) -; aux_info: additional information for stations data -; location: additional information for stations data (location) -; altitude: additional information for stations data (altitude) +; +; Required variable_info attributes: +; reference_dataset: reference dataset to compare with (usualy observations) +; +; Optional variable_info attributes: +; alternative_dataset: a second dataset to compare with ; ; Caveats ; ; Modification history +; 20190405-A_righ_ma: added provenance logging +; 20190315-A_hass_bg: extended to smpi metric ; 20180503-A_righ_ma: completely rewritten and modularized ; 20171215-A_righ_ma: merged with perfmetrics_grading and ; permetrics_taylor.ncl @@ -82,38 +86,24 @@ ; ; ############################################################################# -load "interface_scripts/interface.ncl" +load "$diag_scripts/../interface_scripts/interface.ncl" -load "./diag_scripts/shared/latlon.ncl" -load "./diag_scripts/shared/statistics.ncl" -load "./diag_scripts/shared/regridding.ncl" -load "./diag_scripts/shared/ensemble.ncl" -load "./diag_scripts/shared/scaling.ncl" +load "$diag_scripts/shared/latlon.ncl" +load "$diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/regridding.ncl" +load "$diag_scripts/shared/ensemble.ncl" +load "$diag_scripts/shared/scaling.ncl" -load "./diag_scripts/shared/plot/style.ncl" -load "./diag_scripts/shared/plot/xy_line.ncl" -load "./diag_scripts/shared/plot/zonalmean_profile.ncl" -load "./diag_scripts/shared/plot/contour_maps.ncl" +load "$diag_scripts/shared/plot/style.ncl" begin - enter_msg(diag_script, "") - log_info("++++++++++++++++++++++++++++++++++++++++++") - log_info(diag_script + " (var: " + variables(0) + ")") - log_info("++++++++++++++++++++++++++++++++++++++++++") + enter_msg(DIAG_SCRIPT, "") - dim_MOD = dimsizes(dataset_info@dataset) - dim_VAR = dimsizes(variables) - var0 = variables(0) - field_type0 = field_types(0) - - ; Write references ; FIX-ME to be replaced by new method - write_references(diag_script, "A_fran_fr", \ - (/"A_righ_ma", "A_eyri_ve", "A_gott_kl", "A_senf_da"/), \ - (/"D_righi15gmd", "D_gleckler08jgr"/), \ - (/"E_ncep", "E_erainterim", "E_airs", "E_ceresebaf", \ - "E_srb"/), \ - (/"P_embrace", "P_esmval"/)) + ; Get variables and datasets + var0 = variable_info[0]@short_name + info_items = select_metadata_by_name(input_file_info, var0) + nDatasets = ListCount(info_items) ; Check required diag_script_info attributes exit_if_missing_atts(diag_script_info, (/"plot_type", "time_avg", "region"/)) @@ -124,8 +114,9 @@ begin ; Store required attributes ptype = diag_script_info@plot_type - if (all(ptype.ne.(/"cycle", "zonal", "latlon", "cycle_latlon"/))) then - error_msg("f", diag_script, "", "plot_type " + ptype + " is not a " + \ + if (all(ptype.ne. \ + (/"cycle", "zonal", "latlon", "cycle_latlon", "cycle_zonal"/))) then + error_msg("f", DIAG_SCRIPT, "", "plot_type " + ptype + " is not a " + \ "supported plot_type in this diagnostic") end if @@ -136,28 +127,13 @@ begin end if if ((ptype.eq."zonal" .or. ptype.eq."latlon") .and. \ - diag_script_info@region.ne."Global") then - error_msg("f", diag_script, "", "plot_type " + ptype + \ - " implemented only for region='Global'") - end if - - ; Check valid field - if ((ptype.eq."zonal" .and. \ - all(field_type0.ne.(/"T3M", "T2Mz"/))) .or. \ - (ptype.eq."cycle" .and. \ - all(field_type0.ne.(/"T3M", "T2Ms", "T1M", "T0Ms"/))) .or. \ - (ptype.eq."latlon" .and. \ - all(field_type0.ne.(/"T3M", "T2Ms"/))) .or. \ - (ptype.eq."cycle_latlon" .and. \ - all(field_type0.ne.(/"T3M", "T2Ms"/)))) then - error_msg("f", diag_script, "", "input field " + field_type0 + \ - " is not compatible with plot_type " + ptype) + diag_script_info@region.ne."global") then + error_msg("f", DIAG_SCRIPT, "", "plot_type " + ptype + \ + " implemented only for region 'global'") end if ; Set default values for non-required diag_script_info attributes - set_default_att(diag_script_info, "range_option", 0) set_default_att(diag_script_info, "projection", "CylindricalEquidistant") - set_default_att(diag_script_info, "draw_plots", True) set_default_att(diag_script_info, "plot_diff", False) set_default_att(diag_script_info, "calc_grading", False) set_default_att(diag_script_info, "stippling", False) @@ -169,10 +145,10 @@ begin ; Check consistency of diff plots settings if (diag_script_info@t_test .and. .not.diag_script_info@plot_diff) then - error_msg("f", diag_script, "", "plot_diff must be True to apply t-test") + error_msg("f", DIAG_SCRIPT, "", "plot_diff must be True to apply t-test") end if if (diag_script_info@t_test .and. .not.diag_script_info@conf_level) then - error_msg("f", diag_script, "", \ + error_msg("f", DIAG_SCRIPT, "", \ "conf_level must be specified to apply t-test") end if @@ -181,38 +157,36 @@ begin exit_if_missing_atts(diag_script_info, (/"metric", "normalization"/)) if (dimsizes(diag_script_info@metric).ne.\ dimsizes(diag_script_info@normalization)) then - error_msg("f", diag_script, "", "normalization must be " + \ + error_msg("f", DIAG_SCRIPT, "", "normalization must be " + \ "provided for each requested metric") end if end if - ; Unique names for datasets - datasetnames = project_style(diag_script_info, "annots") + ; Set dataset names + datasetnames = metadata_att_as_array(info_items, "dataset") + + ; Save list of preproc files for provenance in collect.ncl + preproc_files = metadata_att_as_array(info_items, "filename") ; Check for reference dataset definition - if (variable_info@reference_dataset.eq."None") then - error_msg("f", diag_script, "", "no reference dataset is specified") + if (variable_info[0]@reference_dataset.eq."None") then + error_msg("f", DIAG_SCRIPT, "", "no reference dataset is specified") end if ; Set index of the reference (and alternative) dataset - if (isatt(variable_info, "alternative_dataset")) then + l_altern = False + nobs = 1 + ref_ind = ind(datasetnames.eq.variable_info[0]@reference_dataset) + ref_inds = ref_ind + if (isatt(variable_info[0], "alternative_dataset")) then l_altern = True nobs = 2 - ref_inds = get_ref_dataset_idx(dataset_info, \ - (/variable_info@reference_dataset, \ - variable_info@alternative_dataset/)) - ref_ind = ref_inds(0) - alt_ind = ref_inds(1) - else - l_altern = False - nobs = 1 - ref_inds = \ - get_ref_dataset_idx(dataset_info, variable_info@reference_dataset) - ref_ind = ref_inds + alt_ind = ind(datasetnames.eq.variable_info[0]@alternative_dataset) + ref_inds := (/ref_ind, alt_ind/) end if ; Create output plot directory - if (diag_script_info@draw_plots) then + if (config_user_info@write_plots) then plot_dir = config_user_info@plot_dir system("mkdir -p " + plot_dir) end if @@ -226,48 +200,18 @@ begin ; Grading settings if (diag_script_info@calc_grading) then - ; Define variable name - region_name = "" ; priority 3 - location = "" ; priority 3 - if(isatt(diag_script_info, "aux_info")) then - region_name = "_" + diag_script_info@aux_info ; priority 2 - location = " - " + diag_script_info@aux_info ; priority 2 - end if - if(isatt(diag_script_info, "location")) then - location = " - " + diag_script_info@location ; priority 1 - end if - if(isatt(diag_script_info, "region")) then - region_name = "_" + region@name ; priority 1 - location = " - " + diag_script_info@region - end if - altitude = "" ; priority 2 - if(any(field_type0.eq.(/"T0M", "T0Ms"/))) then - if(isatt(diag_script_info, "altitude")) then - altitude = " - " + diag_script_info@altitude ; priority 1 - end if - else - if(isatt(diag_script_info, "level")) then - altitude = " - " + diag_script_info@level + " hPa" ; priority 1 - end if - end if - var0_grade = var0 - var0_grade = var0_grade + region_name - if(isatt(diag_script_info, "level").and. \ - any(field_type0.eq.(/"T3M", "T1M"/))) then - var0_grade = var0_grade + "-" + diag_script_info@level ; FIX-ME - end if - ; Define grading arrays nmetrics = dimsizes(diag_script_info@metric) ncdf_dir = new(nmetrics, string) - ndatasets = dimsizes(datasetnames) - nobs + nModels = dimsizes(datasetnames) - nobs - grading = new((/nmetrics, 1, ndatasets, nobs/), float) + grading = new((/nmetrics, 1, nModels, nobs/), float) grading!0 = "metric" grading!1 = "diagnostics" ; dummy coord. to facilitate appending grading!2 = "models" grading!3 = "reference" - grading&diagnostics = var0_grade + grading&diagnostics = \ + variable_info[0]@diagnostic + "-" + diag_script_info@region grading&models = remove_index(datasetnames, ref_inds) if (isdim(grading, "reference")) then grading&reference = datasetnames(ref_inds) @@ -275,27 +219,38 @@ begin ; Special case Taylor if (any(diag_script_info@metric.eq."taylor")) then - ndatasets = dimsizes(datasetnames) - 1 ; always 1 reference dataset - taylor = new((/1, ndatasets, 2/), float) + nModels = dimsizes(datasetnames) - 1 ; always 1 reference dataset + taylor = new((/1, nModels, 2/), float) taylor!0 = "diagnostics" ; dummy coord. to facilitate appending taylor!1 = "models" taylor!2 = "statistic" - taylor&diagnostics = var0_grade + taylor&diagnostics = \ + variable_info[0]@diagnostic + "-" + diag_script_info@region taylor&statistic = (/"stddev_ratio", "correlation"/) taylor&models = remove_index(datasetnames, ref_ind) end if + ; Special case SMPI + if (any(diag_script_info@metric.eq."SMPI")) then + nModels = dimsizes(datasetnames) - 1 ; always 1 reference model + smpi = new((/diag_script_info@smpi_n_bootstrap + 1, nModels/), float) + smpi!0 = "bootstrap_member" + smpi!1 = "models" + smpi&bootstrap_member = ispan(0, diag_script_info@smpi_n_bootstrap, 1) + smpi&models = remove_index(datasetnames, ref_ind) + end if + ; Define grading filename do met = 0, nmetrics - 1 - ncdf_dir(met) = config_user_info@work_dir + "/" + \ - diag_script_info@metric(met) + "_" + var0_grade + ".nc" + ncdf_dir(met) = config_user_info@work_dir + \ + diag_script_info@metric(met) + "_" + \ + variable_info[0]@diagnostic + "-" + diag_script_info@region + ".nc" end do end if ; Load plot-type-specific script - print("diag_scripts/perfmetrics/" + ptype + ".ncl") - loadscript("diag_scripts/perfmetrics/" + ptype + ".ncl") + loadscript("$diag_scripts/perfmetrics/" + ptype + ".ncl") end @@ -316,7 +271,8 @@ begin end if ; Apply normalization - if (diag_script_info@metric(met).ne."taylor") then + if (diag_script_info@metric(met).ne."taylor" .and. \ + diag_script_info@metric(met).ne."SMPI") then do iobs = 0, nobs - 1 metric(:, :, iobs) = \ normalize_metric(metric(:, :, iobs), \ @@ -324,29 +280,77 @@ begin end do end if + ; Special case, SMPI normalization + if (diag_script_info@metric(met).eq."SMPI") then + ens_idx = new(dimsizes(smpi&models), integer) + atts = True + atts@project = diag_script_info@normalization(met) + info = select_metadata_by_atts(input_file_info, atts) + delete(atts) + + do ii = 0, dimsizes(ens_idx) - 1 + if (dimsizes(info).ne.0) then + ens_idx(ii) = ii + end if + end do + if (all(ismissing(ens_idx))) then + error_msg("f", DIAG_SCRIPT, "", "No datasets for the selected " + \ + "normalization (" + diag_script_info@normalization(met) + \ + ") found") + end if + ens_idx := ens_idx(ind(.not.ismissing(ens_idx))) + ; Apply normalization + do iboot = 0, dimsizes(smpi&bootstrap_member)-1 + smpi(iboot, :) = smpi(iboot, :) / avg(smpi(iboot, ens_idx)) + end do + end if + ; Reduce dimensionality if no alternative dataset if (.not.l_altern .and. diag_script_info@metric(met).ne."taylor") then metric := metric(:, :, 0) delete(metric@reference) end if - ; Attach attributes to the results - metric@title = diag_script_info@metric(met) + " metric" - metric@long_name = \ - "Grading table of metric " + diag_script_info@metric(met) - metric@metric = diag_script_info@metric(met) - metric@diag_script = (/diag_script/) - metric@var = "grade" - metric@region = location - metric@num_climofiles = dimsizes(dataset_info@dataset) ; FIX-ME ? - do imod = 0, dimsizes(dataset_info@dataset) - 1 - num_climo = "climofile_" + imod - metric@$num_climo$ = input_file_info@filename(imod) - end do - metric@ncdf_dir = ncdf_dir(met) - - ; Write NetCDF output - ncdf_outfile = ncdf_write(metric, metric@ncdf_dir) + ; Attach attributes to results and write NetCDF output + if (diag_script_info@metric(met).eq."SMPI") then + + smpi@title = "metrics" + smpi@long_name = "1 variable's Performance Index for " + \ + "the Single Model Performance Index" + smpi@metric = diag_script_info@metric(met) + smpi@diag_script = (/DIAG_SCRIPT/) + smpi@var = "performance_index" + smpi@invar = var0 + smpi@region = region@name + smpi@ensemble_name = diag_script_info@normalization(met) + smpi@num_preproc_files = dimsizes(preproc_files) + do imod = 0, smpi@num_preproc_files - 1 + num_preproc = "preproc_file_" + imod + smpi@$num_preproc$ = preproc_files(imod) + end do + smpi@ncdf_dir = ncdf_dir(met) + + ncdf_outfile = ncdf_write(smpi, smpi@ncdf_dir) + + else + + metric@title = diag_script_info@metric(met) + " metric" + metric@long_name = \ + "Grading table of metric " + diag_script_info@metric(met) + metric@metric = diag_script_info@metric(met) + metric@diag_script = (/DIAG_SCRIPT/) + metric@var = "grade" + metric@region = region@name + metric@num_preproc_files = dimsizes(preproc_files) + do imod = 0, metric@num_preproc_files - 1 + num_preproc = "preproc_file_" + imod + metric@$num_preproc$ = preproc_files(imod) + end do + metric@ncdf_dir = ncdf_dir(met) + + ncdf_outfile = ncdf_write(metric, metric@ncdf_dir) + + end if ; Write results of temporary grading list temp_dir = config_user_info@work_dir + "/" + \ @@ -375,6 +379,6 @@ begin end if - leave_msg(diag_script, "") + leave_msg(DIAG_SCRIPT, "") end diff --git a/esmvaltool/diag_scripts/perfmetrics/zonal.ncl b/esmvaltool/diag_scripts/perfmetrics/zonal.ncl index 18852b27cc..6550a6ae35 100644 --- a/esmvaltool/diag_scripts/perfmetrics/zonal.ncl +++ b/esmvaltool/diag_scripts/perfmetrics/zonal.ncl @@ -4,34 +4,52 @@ ; ESMVal project ; ############################################################################# +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/zonalmean_profile.ncl" + procedure perfmetrics_ptype_script() begin ; Start loop over datasets, make sure reference dataset is calculated first - modidx = ispan(0, dim_MOD - 1, 1) + modidx = ispan(0, nDatasets - 1, 1) modidx := array_append_record(ref_ind, modidx(ind(modidx.ne.ref_ind)), 0) + annots = project_style(info_items, diag_script_info, "annots") + + ; Loop over datasets do ii = 0, dimsizes(modidx) - 1 imod = modidx(ii) log_debug("Processing " + datasetnames(imod)) + ; Check if difference is possible + if (diag_script_info@plot_diff .and. imod.ne.ref_ind) then + l_diff = True + else + l_diff = False + end if + ; Set path for saving processed data - fullpath = config_user_info@work_dir + "/" - system("mkdir -p " + fullpath) - fname = basename(systemfunc("basename " + dataset_info@filename(imod))) - fname = fname + "_" + basename(diag_script) + "_zonal" - fname = fname + "_" + diag_script_info@time_avg - procpath = fullpath + fname + "_Global.nc" + system("mkdir -p " + config_user_info@work_dir) + fname_abs = \ + str_join((/"perfmetrics", "zonal", annots(imod), var0, \ + diag_script_info@time_avg, diag_script_info@region/), "_") + workpath_abs = config_user_info@work_dir + fname_abs + ".nc" + plotpath_abs = config_user_info@plot_dir + fname_abs + fname_diff = \ + str_join((/"perfmetrics", "zonal", \ + annots(imod) + "-" + annots(ref_ind), var0, \ + diag_script_info@time_avg, diag_script_info@region/), "_") + workpath_diff = config_user_info@work_dir + fname_diff + ".nc" + plotpath_diff = config_user_info@plot_dir + fname_diff ; Determine start/end year - start_year = get_start_year(imod) - end_year = get_end_year(imod) + start_year = info_items[imod]@start_year + end_year = info_items[imod]@end_year ; Read data - var = read_data(imod, var0, field_type0) - dnames = getVarDimNames(var) + var = read_data(info_items[imod]) ; Calculate zonal mean var_zon = dim_avg_Wrap(var) @@ -42,69 +60,90 @@ begin var_avg = time_operations(var_zon, start_year, end_year, "average", \ diag_script_info@time_avg, True) var_std = interannual_variability(var_zon, start_year, end_year, \ - diag_script_info@time_avg) + diag_script_info@time_avg, "None") delete(var_zon) - ; Write output + ; Store in global array var_all = new(array_append_record(dimsizes(var_avg), 2, 0), float) var_all!2 = "statistic" var_all&statistic = (/"mean", "stddev"/) var_all(:, :, 0) = var_avg var_all(:, :, 1) = var_std var_all@var = var0 - var_all@diag_script = (/diag_script/) + var_all@diag_script = (/DIAG_SCRIPT/) copy_VarAtts(diag_script_info, var_all) - var_all@ref_model = variable_info@reference_dataset - if (config_user_info@write_netcdf.eq."True") then - var_all@ncdf = procpath - ncdf_outfile = ncdf_write(var_all, procpath) - end if + var_all@ref_model = variable_info[0]@reference_dataset + delete(var_avg) + delete(var_std) ; Store reference dataset if (imod.eq.ref_ind) then var_ref = var_all end if + ; Calculate difference + if (l_diff) then + var_diff = var_all(:, :, 0) + var_diff = var_all(:, :, 0) - var_ref(:, :, 0) + end if + + ; Write output + if (config_user_info@write_netcdf) then + var_all@ncdf = workpath_abs + ncdf_abs = ncdf_write(var_all, workpath_abs) + if (l_diff) then + var_diff@ncdf = workpath_diff + ncdf_diff = ncdf_write(var_diff, workpath_diff) + end if + end if + ; Plotting - if (.not.diag_script_info@draw_plots) then + if (.not.config_user_info@write_plots) then continue end if ; Convert units for plotting (if required) if (isatt(diag_script_info, "plot_units")) then var_all = convert_units(var_all, diag_script_info@plot_units) + if (l_diff) + var_diff = convert_units(var_diff, diag_script_info@plot_units) + end if end if ; Absolute plot - outfile = config_user_info@plot_dir + datasetnames(imod) + "_" + var0 + \ - "_zonal_" + diag_script_info@time_avg + "_Glob" plot_var = var_all(:, :, 0) - plot_var@res_tiMainString = datasetnames(imod) + plot_var@res_tiMainString = annots(imod) plot_var@res_cnLevelSelectionMode = "ExplicitLevels" plot_var@res_cnLevels = diag_script_info@abs_levs plot_var@res_trYMinF = diag_script_info@zonal_ymin plot_var@res_gsnLeftString = \ plot_var@long_name + " [" + format_units(plot_var@units) + "]" - wks = gsn_open_wks(file_type, outfile) + wks = gsn_open_wks(file_type, plotpath_abs) gsn_define_colormap(wks, diag_script_info@zonal_cmap) plot = zonalmean_profile(wks, plot_var, var0) draw(plot) frame(wks) - log_info(" gv " + outfile + "." + file_type) delete(plot_var@res_cnLevels) delete(wks) delete(plot) + ; Call provenance logger + log_provenance(ncdf_abs, \ + plotpath_abs + "." + file_type, \ + "Zonal mean of variable " + var0, \ + (/"mean", "stddev"/), \ + diag_script_info@region, \ + "zonal", \ + (/"fran_fr", "righ_ma", "eyri_ve"/), \ + (/"righi15gmd", "gleckler08jgr"/), \ + info_items[imod]@filename) + ; Difference plot to the reference - if (.not.diag_script_info@plot_diff .or. imod.eq.ref_ind) then + if (.not.l_diff) then continue end if - plot_var = var_all(:, :, 0) - var_ref(:, :, 0) - outfile = config_user_info@plot_dir + datasetnames(imod) + "-" + \ - datasetnames(ref_ind) + "_" + var0 + "_zonal_" + \ - diag_script_info@time_avg + "_Glob" - plot_var@res_tiMainString = \ - datasetnames(imod) + " - " + datasetnames(ref_ind) + plot_var = var_diff + plot_var@res_tiMainString = annots(imod) + " - " + annots(ref_ind) if (isatt(diag_script_info, "t_test")) then plot_var@res_gsnLeftString = plot_var@res_gsnLeftString + " - " + \ sprinti("%2i", toint(100 * diag_script_info@conf_level)) + "% c.l." @@ -113,52 +152,66 @@ begin plot_var@res_cnLevels = diag_script_info@diff_levs plot_var@res_cnMissingValFillColor = "gray70" plot_var@res_gsnSpreadColorEnd = -2 - wks = gsn_open_wks(file_type, outfile) + wks = gsn_open_wks(file_type, plotpath_diff) gsn_merge_colormaps(wks, "temp_19lev", "gray70") plot = zonalmean_profile(wks, plot_var, var0) delete(plot_var@res_cnLevels) ; Apply t-test (assuming different population variances) - if (.not.diag_script_info@t_test) then - continue - end if - x1 = var_all(:, :, 0) - x2 = var_ref(:, :, 0) - s1 = var_all(:, :, 1) ^ 2 - s2 = var_ref(:, :, 1) ^ 2 - n1 = get_end_year(imod) - get_start_year(imod) + 1 - n2 = get_end_year(ref_ind) - get_start_year(ref_ind) + 1 - prob = ttest(x1, s1, n1, x2, s2, n2, True, False) - plot_var = 1. - prob - delete(prob) - - ; Mask non-significant values - plot_var@res_cnMissingValFillColor = -1 - plot_var@res_gsnDraw = False - plot_var@res_gsnFrame = False - plot_var@res_cnLevelSelectionMode = "ExplicitLevels" - plot_var@res_cnLevels := diag_script_info@conf_level - if (diag_script_info@stippling) then - plot_var@res_cnFillColors = (/"transparent", "black"/) - plot_var@res_cnFillPattern = 17 - else - plot_var@res_cnFillColors = (/"transparent", "gray70"/) + caption = "Difference in zonal mean of variable " + var0 + if (diag_script_info@t_test) then + + x1 = var_all(:, :, 0) + x2 = var_ref(:, :, 0) + s1 = var_all(:, :, 1) ^ 2 + s2 = var_ref(:, :, 1) ^ 2 + n1 = info_items[imod]@end_year - info_items[imod]@start_year + 1 + n2 = info_items[ref_ind]@end_year - info_items[ref_ind]@start_year + 1 + prob = ttest(x1, s1, n1, x2, s2, n2, True, False) + plot_var = 1. - prob + delete(prob) + + ; Mask non-significant values + plot_var@res_cnMissingValFillColor = -1 + plot_var@res_gsnDraw = False + plot_var@res_gsnFrame = False + plot_var@res_cnLevelSelectionMode = "ExplicitLevels" + plot_var@res_cnLevels := diag_script_info@conf_level + if (diag_script_info@stippling) then + plot_var@res_cnFillColors = (/"transparent", "black"/) + plot_var@res_cnFillPattern = 17 + caption = caption + " (non-significant points are stippled)" + else + plot_var@res_cnFillColors = (/"transparent", "gray70"/) + caption = caption + " (non-significant points are masked out in gray)" + end if + plot_var@res_cnInfoLabelOn = False + plot_var@res_cnLinesOn = False + plot_var@res_cnLineLabelsOn = False + plot_var@res_lbLabelBarOn = False + plot_var@res_gsnLeftString = "" + delete(plot_var@long_name) + plot2 = zonalmean_profile(wks, plot_var, var0) + overlay(plot, plot2) + draw(plot) + frame(wks) + delete(wks) + delete(plot) + delete(plot2) + delete(plot_var) + end if - plot_var@res_cnInfoLabelOn = False - plot_var@res_cnLinesOn = False - plot_var@res_cnLineLabelsOn = False - plot_var@res_lbLabelBarOn = False - plot_var@res_gsnLeftString = "" - delete(plot_var@long_name) - plot2 = zonalmean_profile(wks, plot_var, var0) - overlay(plot, plot2) - draw(plot) - frame(wks) - log_info(" gv " + outfile + "." + file_type) - delete(wks) - delete(plot) - delete(plot2) - delete(plot_var) + + ; Call provenance logger + log_provenance(ncdf_diff, \ + plotpath_diff + "." + file_type, \ + caption, \ + (/"mean", "stddev", "diff"/), \ + diag_script_info@region, \ + "zonal", \ + (/"fran_fr", "righ_ma", "eyri_ve"/), \ + (/"righi15gmd", "gleckler08jgr"/), \ + info_items[imod]@filename) end do diff --git a/esmvaltool/diag_scripts/quantilebias/quantilebias.R b/esmvaltool/diag_scripts/quantilebias/quantilebias.R new file mode 100644 index 0000000000..a0b27a1e9f --- /dev/null +++ b/esmvaltool/diag_scripts/quantilebias/quantilebias.R @@ -0,0 +1,174 @@ +# ############################################################################# +# quantilebias.r +# Authors: E. Arnone (ISAC-CNR, Italy) +# S. Terzago (ISAC-CNR, Italy) +# J. von Hardenberg (ISAC-CNR, Italy) +# ############################################################################# +# Description +# ESMValTool diagnostic for calculation of the precipitation quantile bias +# following Mehran et al. (2014) +# +# Required +# - #CDO +# - observational monthly mean global precipitation climatology +# - (e.g. from the GPCP project: http://gpcp.umd.edu) +# +# Optional +# +# Caveats +# +# Modification history +# 20180926-A_arno_en: Refined for usage as recipe +# 20180518-A_arno_en: Written for v2.0 +# +# ############################################################################# + +library(tools) +library(yaml) + +cdo <- function(command, args = "", input = "", options = "", output = "", + stdout = "", noout = F) { + if (args != "") args <- paste0(",", args) + if (stdout != "") stdout <- paste0(" > '", stdout, "'") + if (input[1] != "") { + for (i in 1:length(input)) { + input[i] <- paste0("'", input[i], "'") + } + input <- paste(input, collapse = " ") + } + output0 <- output + if (output != "") { + output <- paste0("'", output, "'") + } else if ( !noout ) { + output <- tempfile() + output0 <- output + } + argstr <- paste0(options, " ", command, args, " ", input, " ", output, + " ", stdout) + print(paste("cdo", argstr)) + ret <- system2("cdo", args = argstr) + if (ret != 0) { + stop(paste("Failed (", ret, "): cdo", argstr)) + } + return(output0) +} + + +# read settings and metadata files +args <- commandArgs(trailingOnly = TRUE) +settings <- yaml::read_yaml(args[1]) +metadata <- yaml::read_yaml(settings$input_files) +for (myname in names(settings)) { + temp <- get(myname, settings) + assign(myname, temp) +} + +# get name of climofile and list associated to first climofile +climofiles <- names(metadata) +climolist <- get(climofiles[1], metadata) + +# say hi +diag_base <- climolist$diagnostic +print(paste0(diag_base, ": starting routine")) + +# get variable name +varname <- climolist$short_name + +# create working dirs if they do not exist +dir.create(work_dir, recursive = T, showWarnings = F) +setwd(work_dir) + +# setup provenance file and list +provenance_file <- paste0(run_dir, "/", "diagnostic_provenance.yml") +provenance <- list() + +# extract metadata +models_name <- unname(sapply(metadata, "[[", "dataset")) +reference_model <- unname(sapply(metadata, "[[", "reference_dataset"))[1] +models_start_year <- unname(sapply(metadata, "[[", "start_year")) +models_end_year <- unname(sapply(metadata, "[[", "end_year")) +models_experiment <- unname(sapply(metadata, "[[", "exp")) +models_ensemble <- unname(sapply(metadata, "[[", "ensemble")) + +ref_idx <- which(models_name == reference_model) +ref_data_file <- climofiles[ref_idx] + +## Loop through input models +for (model_idx in c(1:(length(models_name)))) { + if (model_idx == ref_idx) { + next + } + # Setup parameters and path + exp <- models_name[model_idx] + year1 <- models_start_year[model_idx] + year2 <- models_end_year[model_idx] + infile <- climofiles[model_idx] + model_exp <- models_experiment[model_idx] + model_ens <- models_ensemble[model_idx] + + inregname <- paste0(exp, "_", model_exp, "_", model_ens, "_", + toString(year1), "-", toString(year2), "_", varname) + outfile <- paste0(work_dir, "/", inregname, "_", perc_lev, "qb.nc") + print(paste0(diag_base, ": pre-processing file: ", infile)) + + print(paste0(diag_base, ": ", perc_lev, " percent quantile")) + + # Select variable of interest + modf <- cdo("selvar", args = varname, input = infile) + + # Remap reference onto model grid + selectf <- cdo("selvar", args = varname, input = ref_data_file) + reff <- cdo("remapcon", args = modf, input = selectf) + + # Get (X)th percentile of reference dataset + refminf <- cdo("timmin", input = reff) + refmaxf <- cdo("timmax", input = reff) + ref_perc_pf <- cdo("timpctl", args = perc_lev, + input = c(reff, refminf, refmaxf)) + + # Select points with monthly precipitation greater than (75)th perc + mask_reff <- cdo("ge", input = c(reff, ref_perc_pf)) + mask_modf <- cdo("ge", input = c(modf, ref_perc_pf)) + + # Precipitation sums + mask_ref2f <- cdo("mul", input = c(mask_reff, reff)) + mask_mod2f <- cdo("mul", input = c(mask_modf, modf)) + ref_sumf <- cdo("timsum", input = mask_ref2f) + mod_sumf <- cdo("timsum", input = mask_mod2f) + + # Calculate quantile bias, set name and attributes + qb1f <- cdo("div", input = c(mod_sumf, ref_sumf)) + tempfile <- tempfile() + + temp1f <- cdo("chname", args = paste0(varname, ",qb"), input = qb1f) + temp2f <- cdo("setattribute", args = "qb@units=' '", input = temp1f) + temp1f <- cdo("setattribute", + args = "qb@long_name='Precipitation quantile bias'", + input = temp2f, output = temp1f) + cdo("setattribute", args = "qb@standard_name='precipitation_quantile_bias'", + input = temp1f, output = outfile) + + # Remove temporary files + unlink(c(modf, reff, ref_perc_pf, mask_reff, mask_modf, + ref_sumf, mod_sumf, qb1f, refminf, refmaxf, selectf, + mask_mod2f, mask_ref2f, temp1f, temp2f)) + + # Set provenance for this output file + caption <- paste0("Precipitation quantile bias ", perc_lev, "% for years ", + year1, " to ", year2, " according to ", exp) + xbase <- list(ancestors = list(infile, ref_data_file), + authors = list("arno_en", "hard_jo"), + projects = list("c3s-magic"), references = list("mehran14jgr"), + caption = caption, statistics = list("perc"), + realms = list("atmos"), themes = list("phys"), + domains = list("global"), reference_dataset = ref_data_file) + + # Store provenance in main provenance list + provenance[[outfile]] <- xbase +} + +# Write provenance to file +write_yaml(provenance, provenance_file) + +# End of diagnostic +print(paste0(diag_base, ": done.")) diff --git a/esmvaltool/diag_scripts/rainfarm/rainfarm.R b/esmvaltool/diag_scripts/rainfarm/rainfarm.R new file mode 100644 index 0000000000..6ee08846fe --- /dev/null +++ b/esmvaltool/diag_scripts/rainfarm/rainfarm.R @@ -0,0 +1,181 @@ +# ############################################################################# +# rainfarm.R +# Authors: E. Arnone (ISAC-CNR, Italy) +# J. von Hardenberg (ISAC-CNR, Italy) +# ############################################################################# +# Description +# ESMValTool diagnostic calling the RainFARM library written in Julia (by von Hardenberg, ISAC-CNR, Italy). +# RainFARM is a stochastic precipitation downscaling method, further adapted for climate downscaling. +# +# Required +# CDO +# Julia language: https://julialang.org +# RainFARM Julia library: https://github.com/jhardenberg/RainFARM.jl +# +# Optional +# +# Caveats +# +# Modification history +# 20181210 hard_jo: cleanup and using juliacall +# 20180508-A_arnone_e: Conversion to v2.0 +# 20170908-A_arnone_e: 1st github version +# +# ############################################################################ + +library(tools) +library(yaml) +library(JuliaCall) #nolint +library(ncdf4) + +julia_setup() +julia_library("RainFARM") + +diag_scripts_dir <- Sys.getenv("diag_scripts") + +# read settings and metadata files +args <- commandArgs(trailingOnly = TRUE) +settings <- yaml::read_yaml(args[1]) +for (myname in names(settings)) { + temp <- get(myname, settings) + assign(myname, temp) +} +metadata <- yaml::read_yaml(settings$input_files) + +# get name of climofile for selected variable and list associated to first climofile +climofiles <- names(metadata) +climolist <- get(climofiles[1], metadata) + +varname <- climolist$short_name + +diag_base <- climolist$diagnostic +print(paste(diag_base, ": starting routine")) + +# create working dirs if they do not exist +work_dir <- settings$work_dir +regridding_dir <- settings$run_dir +dir.create(work_dir, recursive = T, showWarnings = F) +dir.create(regridding_dir, recursive = T, showWarnings = F) + +# switch to working directory +setwd(work_dir) + +# setup provenance file and list +provenance_file <- paste0(regridding_dir, "/", "diagnostic_provenance.yml") +provenance <- list() + +# extract metadata +models_name <- unname(sapply(metadata, "[[", "dataset")) +reference_model <- unname(sapply(metadata, "[[", "reference_dataset"))[1] +models_start_year <- unname(sapply(metadata, "[[", "start_year")) +models_end_year <- unname(sapply(metadata, "[[", "end_year")) +models_experiment <- unname(sapply(metadata, "[[", "exp")) +models_ensemble <- unname(sapply(metadata, "[[", "ensemble")) + +# Loop through input models, apply pre-processing and call RainFARM +for (model_idx in c(1:(length(models_name)))) { + + exp <- models_name[model_idx] + year1 <- models_start_year[model_idx] + year2 <- models_end_year[model_idx] + infile <- climofiles[model_idx] + model_exp <- models_experiment[model_idx] + model_ens <- models_ensemble[model_idx] + infilename <- file_path_sans_ext(basename(infile)) + + print(paste0(diag_base, ": calling rainfarm")) + outfilename <- paste0(work_dir, "/", infilename, "_downscaled") + + ans <- julia_call("read_netcdf2d", infile, varname, need_return = "R" ) + pr <- ans[[1]] + lon_mat <- ans[[2]] + lat_mat <- ans[[3]] + + # Ensure grid is square and with even dims + nmin <- min(dim(pr)[1], dim(pr)[2]) + nmin <- floor(nmin / 2) * 2 + pr <- pr[1:nmin, 1:nmin, ] + if (is.vector(lon_mat)) { + lon_mat <- lon_mat[1:nmin] + lat_mat <- lat_mat[1:nmin] + } else { + lon_mat <- lon_mat[1:nmin, 1:nmin] + lat_mat <- lat_mat[1:nmin, 1:nmin] + } + + ans <- julia_call("lon_lat_fine", lon_mat, lat_mat, nf, need_return = "R" ) + lon_f <- ans[[1]] + lat_f <- ans[[2]] + if (slope == 0) { + ans <- julia_call("fft3d", pr, need_return = "R" ) + fxp <- ans[[1]] + ftp <- ans[[2]] + sx <- julia_call("fitslopex", fxp, kmin = 1, need_return = "R" ) + print(paste0("Computed spatial spectral slope: ", sx)) + } else { + sx <- slope + print(paste0("Fixed spatial spectral slope: ", sx)) + } + + if (weights_climo != F) { + if (!startsWith(weights_climo, "/")) { + weights_climo <- file.path(settings$auxiliary_data_dir, weights_climo) + } + print(paste0("Using external climatology for weights: ", weights_climo)) + fileweights <- paste0(work_dir, "/", infilename, "_w.nc") + + # Create support reference file + # This is temporary until the original Julia library is fixed + xvar <- ncdim_def("lon", "degrees_east", lon_mat, longname = "longitude") + yvar <- ncdim_def("lat", "degrees_north", lat_mat, longname = "latitude") + tvar <- ncdim_def("time", "days since 01-01-2000", 0., longname = "time", + unlim = T) + my_ncdf <- ncvar_def("grid", "m", list(xvar, yvar, tvar), -999, + longname = "grid", prec = "single") + reffile <- tempfile() + ncfile <- nc_create(reffile, my_ncdf, force_v4 = FALSE) + nc_close(ncfile) + + ww <- julia_call("rfweights", weights_climo, reffile, nf, + weightsfn = fileweights, varname = "grid", + fsmooth = conserv_smooth, need_return = "R"); + unlink(reffile) + } else { + print("Not using weights") + ww <- 1. + } + + if (conserv_glob) { + print("Conserving global field") + } else if (conserv_smooth) { + print("Smooth conservation") + } else { + print("Box conservation") + } + for (iens in 1:nens) { + print(paste0("Realization ", iens)) + rd <- julia_call("rainfarm", pr, sx, nf, ww, fglob = conserv_glob, + fsmooth = conserv_smooth, verbose = T, need_return = "R") + fname <- sprintf("%s_%04d.nc", outfilename, iens) + julia_call("write_netcdf2d", fname, rd, lon_f, lat_f, varname, infile ) + + # Set provenance for this output file + caption <- paste0("RainFARM precipitation downscaling") + xprov <- list(ancestors = list(infile), + authors = list("arno_en", "hard_jo"), + references = list("donofrio14jh", "rebora06jhm", + "terzago18nhess"), + projects = list("c3s-magic"), + caption = caption, + statistics = list("other"), + realms = list("atmos"), + themes = list("phys"), + domains = list("reg")) + + # Store provenance in main provenance list + provenance[[fname]] <- xprov + } +} + +# Write provenance to file +write_yaml(provenance, provenance_file) diff --git a/esmvaltool/diag_scripts/runoff_et/catchment_analysis.py b/esmvaltool/diag_scripts/runoff_et/catchment_analysis.py new file mode 100644 index 0000000000..953b4ffb49 --- /dev/null +++ b/esmvaltool/diag_scripts/runoff_et/catchment_analysis.py @@ -0,0 +1,667 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +"""Catchment specific water flux plots. + +############################################################### +runoff_et/catchment_analysis.py +Authors ESMValToolV1 Version + Philipp Sommer (philipp.sommer@mpimet.mpg.de) + Stefan Hagemann (stefan.hagemann@hzg.de) + Alexander Loew +Port to ESMValTool Version 2 + Tobias Stacke (tobias.stacke@mpimet.mpg.de) +############################################################### + +Description +----------- + Plots temporal and spatial averages of precipitation, runoff and + evaporation for specific land surface catchments. Additionally, + relations of runoff coefficient to relative precipitation bias + and runoff coefficient to evaporation coefficient are computed. + + Default reference data are included in this routine (default class) + but can be replaced with other datasets. In case a custom catchment + mask is used, the default class (catchment names, IDs, reference data) + has to be adapted. + +############################################################### + +""" +import calendar +import logging +import os +from itertools import cycle + +import iris +import numpy as np + +import esmvaltool.diag_scripts.shared as diag + +logger = logging.getLogger(os.path.basename(__file__)) + + +def get_defaults(): + """Return default reference values for predefined catchments. + + The entries are used in the routine analysecatchments. Catchments and + reference values are specific for the default catchment mask. All reference + values are given in mm a-1. Precip data is based on WFDEI, runoff is based + on GRDC, ET is derived as the difference of both. The values are updated + and differ slightly from the ESMValTool 1 version. + Dictionary entries are + catchments + mrro + pr + evspsbl + """ + defaults = { + 'catchments': { + # Catchments with name as used in make_catchment_plots and + # associated ID used in the catchment mask netCDF file + "Amazon": 94, + "Parana": 98, + "Mackenzie": 76, + "Mississippi": 86, + "Danube": 14, + "Congo": 68, + "Niger_Malanville": 65, + "Nile": 60, + "Lena": 40, + "Yangtze-Kiang": 52, + "Ganges-Brahmaputra": 54, + "Murray": 100, + }, + 'mrro': { + 'Amazon': 1194.63, + 'Congo': 365.45, + 'Danube': 250.75, + 'Ganges-Brahmaputra': 672.11, + 'Lena': 199.61, + 'Mackenzie': 173.87, + 'Mississippi': 182.12, + 'Murray': 8.20, + 'Niger_Malanville': 31.49, + 'Nile': 48.72, + 'Parana': 202.87, + 'Yangtze-Kiang': 531.33, + }, + 'pr': { + 'Amazon': 2210.25, + 'Congo': 1571.41, + 'Danube': 808.04, + 'Ganges-Brahmaputra': 1405.84, + 'Lena': 387.01, + 'Mackenzie': 450.16, + 'Mississippi': 897.18, + 'Murray': 474.62, + 'Niger_Malanville': 437.90, + 'Nile': 655.62, + 'Parana': 1314.66, + 'Yangtze-Kiang': 1074.79, + }, + 'evspsbl': { + 'Amazon': 1015.62, + 'Congo': 1205.96, + 'Danube': 557.29, + 'Ganges-Brahmaputra': 733.73, + 'Lena': 187.40, + 'Mackenzie': 276.29, + 'Mississippi': 715.06, + 'Murray': 466.42, + 'Niger_Malanville': 406.41, + 'Nile': 606.90, + 'Parana': 1111.80, + 'Yangtze-Kiang': 543.46, + } + } + + return defaults + + +def format_coef_plot(my_ax): + """Move axis from border to center, adapts ticks and labels accordingly. + + Parameters + ---------- + my_ax : object + plot axis object + """ + # Add infos to axis + my_ax.xaxis.set_label_coords(0.5, -0.025) + my_ax.yaxis.set_label_coords(-0.025, 0.5) + # Adapt axis range to center zero + xmax = np.ceil( + (np.absolute(np.array(my_ax.get_xlim())).max() + 5) / 10.0) * 10 - 5 + my_ax.set_xlim(xmax * -1, xmax) + ymax = np.ceil( + (np.absolute(np.array(my_ax.get_ylim())).max() + 5) / 10.0) * 10 - 5 + my_ax.set_ylim(ymax * -1, ymax) + # remove 0 from y and x axis + for key in ['x', 'y']: + ticks = list(getattr(my_ax, 'get_%sticks' % key)()) + try: + ticks.remove(0) + except ValueError: + pass + getattr(my_ax, 'set_%sticks' % key)(ticks) + + # Move left y-axis and bottim x-axis to centre, passing through (0,0) + my_ax.spines['left'].set_position('center') + my_ax.spines['bottom'].set_position('center') + # Eliminate upper and right axes + my_ax.spines['right'].set_color('none') + my_ax.spines['top'].set_color('none') + # Show ticks in the left and lower axes only + my_ax.xaxis.set_ticks_position('bottom') + my_ax.yaxis.set_ticks_position('left') + + +def data2file(cfg, filename, title, filedata): + """Write data dictionary into ascii file. + + Parameters + ---------- + cfg : dict + Configuration dictionary of the recipe + filename : str + String containing the file name + title : str + String containing the file header + filedata : dict + Dictionary of catchment averages per river + """ + # Write experiment data + filepath = os.path.join(cfg[diag.names.WORK_DIR], filename) + with open(filepath, 'w') as out: + out.write(title + '\n\n') + for river, value in sorted(filedata.items()): + out.write('{:25} : {:8.2f}\n'.format(river, value)) + + +def write_plotdata(cfg, plotdata, catchments): + """Write catchment averaged values for all datasets. + + Parameters + ---------- + cfg : dict + Configuration dictionary of the recipe + plotdata : dict + Dictionary containing the catchment averages + catchments : dict + Dictionary containing infomation about catchment mask, + grid cell size, and reference values + """ + ref_vars = [] + metric = "catchment averages" + unit = "[mm a-1]" + + for var in plotdata.keys(): + for identifier in plotdata[var].keys(): + # Write experiment data + filename = '_'.join([var, identifier]) + '.txt' + title = " ".join(identifier.split(' ') + [var, metric, unit]) + filedata = plotdata[var][identifier] + data2file(cfg, filename, title, filedata) + # Write reference data + if var not in ref_vars: + filename = '_'.join([var, 'reference']) + '.txt' + title = " ".join([catchments['refname'], metric, unit]) + filedata = catchments[var] + data2file(cfg, filename, title, filedata) + ref_vars.append(var) + + +def get_expdata(expdict, refdict): + """Get list with catchment averages for experiment and reference. + + Parameters + ---------- + expdict : dict + the catchment averages experiments dictionary + refdict : dict + the catchment averages reference dictionary + """ + expdata, refdata, rivers = [], [], [] + for riv, ref in sorted(refdict.items()): + rivers.append(riv) + refdata.append(ref) + for riv in rivers: + expdata.append(expdict[riv]) + return rivers, np.array(refdata), np.array(expdata) + + +def compute_diags(plotdata, identifier, catchments): + """Compute diagnostics for all variables of an experiment. + + Parameters + ---------- + plotdata : dict + Dictionary containing the catchment averages + identifier : str + Dataset name + catchments : dict + Dictionary containing infomation about catchment mask, + grid cell size, and reference values + """ + diags = {'ref': {}, 'exp': {}, 'abs': {}, 'rel': {}} + # 1. Absolute and relative variable biases + for var in plotdata.keys(): + diags['riv'], diags['ref'][var], diags['exp'][var] = get_expdata( + plotdata[var][identifier], catchments[var]) + diags['abs'][var] = diags['exp'][var] - diags['ref'][var] + diags['rel'][var] = diags['exp'][var] / diags['ref'][var] * 100 + diags['xrv'] = range(len(diags['riv'])) + + # 2. Coefficients + diags['prbias'] = diags['abs']['pr'] / diags['ref']['pr'] * 100 + diags['rocoef'] = (diags['exp']['mrro'] / diags['exp']['pr'] * 100) - ( + diags['ref']['mrro'] / diags['ref']['pr'] * 100) + diags['etcoef'] = (diags['exp']['evspsbl'] / diags['exp']['pr'] * 100) - ( + diags['ref']['evspsbl'] / diags['ref']['pr'] * 100) + + return diags + + +def setup_pdf(pltdir, identifier, outtype): + """Prepare pdf output. + + Parameters + ---------- + pltdir : str + Output directory for pdf plot + identifier : str + Dataset name + outtype : str + Plot file type [pdf,other] + """ + from matplotlib.backends.backend_pdf import PdfPages + + if outtype == 'pdf': + filepath = os.path.join(pltdir, identifier + ".pdf") + pdf = PdfPages(filepath) + else: + pdf = None + return pdf + + +def prep_barplot(diags, defs, identifier, var, pdf): + """Prepare barplot. + + Parameters + ---------- + diags : dict + Dictionary containing all metrics for plotting + defs : dict + Dictionary containing plot settings + identifier : str + Dataset name + var : str + short name of the actual variable + pdf : obj + pdf oject is pdf output is chosen, None otherwise + """ + import matplotlib.pyplot as plt + + fig, my_axs = plt.subplots(nrows=1, ncols=2, sharex=False) + fig.suptitle(identifier.upper() + ' vs ' + defs['refname'].upper()) + fig.subplots_adjust(bottom=0.35) + plottitle = ['\nBias for ', '\nRelative bias for '] + ylabel = [var.upper() + ' [mm a-1]', 'Relative bias [%]'] + + # Setup both plot axis + for iax, axs in enumerate(my_axs.tolist()): + axs.set_title(plottitle[iax] + var.upper()) + axs.set_ylabel(ylabel[iax]) + axs.set_xlabel('Catchment') + axs.set_xticks(diags['xrv']) + axs.set_xticklabels((diags['riv']), fontsize='small') + for tick in axs.get_xticklabels(): + tick.set_rotation(90) + axs.axhline(c='black', lw=2) + + # Plot absolut bias for every catchment + my_axs[0].bar(diags['xrv'], diags['abs'][var], color="C{}".format(0)) + # Plot relative bias for every catchment + my_axs[1].bar(diags['xrv'], diags['ref'][var], color="C{}".format(1)) + # Finish plot + finish_plot(fig, defs['pltdir'], identifier + '_' + var + '-bias', pdf) + + +def prep_scatplot(coeftype, diags, defs, identifier, pdf): + """Prepare scatterplot for different coefficients. + + Parameters + ---------- + coeftype : str + string indicting plot type [prbias,etcoef] + diags : dict + Dictionary containing all metrics for plotting + defs : dict + Dictionary containing plot settings + identifier : str + Dataset name + pdf : obj + pdf oject is pdf output is chosen, None otherwise + """ + import matplotlib.pyplot as plt + + fig, axs = plt.subplots(nrows=1, ncols=1, sharex=False) + axs.set_title(identifier.upper() + ' vs ' + defs['refname'].upper()) + axs.set_ylabel('Bias of runoff coefficient [%]') + + marker = cycle(defs['markerlist']) + + if coeftype == 'prbias': + for prbias, rocoef in zip(diags['prbias'], diags['rocoef']): + axs.scatter(prbias, rocoef, marker=next(marker)) + axs.set_xlabel('Relative bias of precipitation [%]') + tag = '_pr-vs-ro' + elif coeftype == 'etcoef': + for etcoef, rocoef in zip(diags['etcoef'], diags['rocoef']): + axs.scatter(etcoef, rocoef, marker=next(marker)) + axs.set_xlabel('Bias of ET coefficient [%]') + tag = '_et-vs-ro' + else: + raise ValueError('Unexpected coefficient combination in prep_scatplot') + + format_coef_plot(axs) + add_legend(fig, diags['riv'], defs['markerlist']) + finish_plot(fig, defs['pltdir'], identifier + tag, pdf) + + +def add_legend(fig, rivers, markerlist): + """Add scatter plot legend with separate axis. + + Parameters + ---------- + fig : obj + plot figure object + rivers : list + list of river catchment names + markerlist : list + list of marker strings for scatterplot legend + """ + # Define legend + fig.subplots_adjust(bottom=0.30) + marker = cycle(markerlist) + caxe = fig.add_axes([0.05, 0.01, 0.9, 0.20]) + for label in rivers: + caxe.scatter([], [], marker=next(marker), label=label) + caxe.legend(ncol=3, numpoints=1, loc="lower center", mode="expand") + caxe.set_axis_off() + + +def finish_plot(fig, pltdir, name, pdf): + """Save actual figure to either png or pdf. + + Parameters + ---------- + fig : obj + actual figure + pltdir : str + target directory to store plots + name : str + filename for png output without extension + pdf : obj + pdf object collection all pages in case of pdf output + """ + import matplotlib.pyplot as plt + if '-bias' in name: + plt.tight_layout() + if pdf is None: + filepath = os.path.join(pltdir, name + ".png") + fig.savefig(filepath) + else: + fig.savefig(pdf, dpi=80, format='pdf') + plt.close() + + +def make_catchment_plots(cfg, plotdata, catchments): + """Plot catchment averages for different metrics. + + Parameters + ---------- + cfg : dict + Configuration dictionary of the recipe + plotdata : dict + Dictionary containing the catchment averages + catchments : dict + Dictionary containing infomation about catchment mask, + grid cell size, and reference values + """ + import matplotlib.pyplot as plt + + # Get colorscheme from recipe + defs = { + 'colorscheme': cfg.get('colorscheme', 'default'), + 'markerlist': ('s', '+', 'o', '*', 'x', 'D'), + 'pltdir': cfg[diag.names.PLOT_DIR], + 'plttype': cfg.get('output_file_type', 'png'), + 'refname': catchments['refname'] + } + plt.style.use(defs['colorscheme']) + + # Loop over datasets + for identifier in plotdata[list(plotdata.keys())[0]].keys(): + # Prepare pdf file if output type chosen + pdf = setup_pdf(defs['pltdir'], identifier, defs['plttype']) + + # Compute diagnostics for plots + diags = compute_diags(plotdata, identifier, catchments) + + # Barplots for single variables + for var in plotdata.keys(): + prep_barplot(diags, defs, identifier, var, pdf) + + # Runoff coefficient vs relative precipitation bias + prep_scatplot('prbias', diags, defs, identifier, pdf) + + # Runoff coefficient vs evaporation coefficient bias + prep_scatplot('etcoef', diags, defs, identifier, pdf) + + # Finish pdf if it is the chosen output + if pdf is not None: + pdf.close() + + +def get_catchment_data(cfg): + """Read and prepare catchment mask. + + Parameters + ---------- + cfg : dict + Configuration dictionary of the recipe + """ + catchments = get_defaults() + catchments['refname'] = 'default' + catchment_filepath = cfg.get('catchmentmask') + catchments['cube'] = iris.load_cube(catchment_filepath) + if catchments['cube'].coord('latitude').bounds is None: + catchments['cube'].coord('latitude').guess_bounds() + if catchments['cube'].coord('longitude').bounds is None: + catchments['cube'].coord('longitude').guess_bounds() + catchments['area'] = iris.analysis.cartography.area_weights( + catchments['cube']) + + return catchments + + +def get_sim_data(cfg, datapath, catchment_cube): + """Read and postprocess netcdf data from experiments. + + Check units, aggregate to long term mean yearly sum and + regrid to resolution of catchment mask. + + Parameters + ---------- + cfg : dict + Configuration dictionary of the recipe. + dataset_path : str + Path to the netcdf file + catchment_cube : obj + iris cube object containing simulation data + """ + datainfo = diag.Datasets(cfg).get_dataset_info(path=datapath) + identifier = "_".join( + [datainfo['dataset'].upper(), datainfo['exp'], datainfo['ensemble']]) + # Load data into iris cube + new_cube = iris.load(datapath, diag.Variables(cfg).standard_names())[0] + # Check for expected unit + if new_cube.units != 'kg m-2 s-1': + raise ValueError('Unit [kg m-2 s-1] is expected for ', + new_cube.long_name.lower(), ' flux') + # Convert to unit mm per month + timelist = new_cube.coord('time') + daypermonth = [] + for mydate in timelist.units.num2date(timelist.points): + daypermonth.append(calendar.monthrange(mydate.year, mydate.month)[1]) + new_cube.data *= 86400.0 + for i, days in enumerate(daypermonth): + new_cube.data[i] *= days + # Aggregate over year --> unit mm per year + year_cube = new_cube.aggregated_by('year', iris.analysis.SUM) + year_cube.units = "mm a-1" + # Compute long term mean + mean_cube = year_cube.collapsed([diag.names.TIME], iris.analysis.MEAN) + # Regrid to catchment data grid --> maybe use area_weighted instead? + if mean_cube.coord('latitude').bounds is None: + mean_cube.coord('latitude').guess_bounds() + if mean_cube.coord('longitude').bounds is None: + mean_cube.coord('longitude').guess_bounds() + m_grid = [iris.analysis.Linear(), iris.analysis.AreaWeighted()] + mean_cube_regrid = mean_cube.regrid(catchment_cube, m_grid[1]) + + return datainfo['short_name'], identifier, mean_cube_regrid + + +def get_catch_avg(catchments, sim_cube): + """Compute area weighted averages for river catchments. + + Parameters + ---------- + catchments : dict + Dictionary containing infomation about catchment mask, + grid cell size, and reference values + sim_cube : obj + iris cube object containing the simulation data + """ + avg = {} + for river, rid in catchments['catchments'].items(): + data_catch = np.ma.masked_where( + catchments['cube'].data.astype(np.int) != rid, sim_cube.data) + area_catch = np.ma.masked_where( + catchments['cube'].data.astype(np.int) != rid, + catchments['area'].data) + avg[river] = (data_catch * (area_catch / area_catch.sum())).sum() + return avg + + +def update_reference(catchments, model, rivervalues, var): + """Update reference catchment averages. + + Parameters + ---------- + catchments : dict + Dictionary containing infomation about catchment mask, + grid cell size, and reference values + model : str + name of the data set + rivervalues : dict + dictionary of river catchment averages + var : str + short name of the variable + """ + if catchments['refname'] != model and catchments['refname'] != 'default': + raise ValueError('Reference must be the same for all variables!') + catchments[var] = rivervalues + catchments['refname'] = model + + +def update_plotdata(identifier, plotdata, rivervalues, var): + """Update simulation catchment averages. + + Parameters + ---------- + identifier : str + string consisting of dataset, experiment and ensemble information + plotdata : dict + river catchment averages for different variables and datasets + rivervalues : dict + river catchment averages for different variables + var : str + short name of the variable + """ + if var not in plotdata.keys(): + plotdata[var] = {} + if identifier in plotdata[var].keys(): + raise ValueError('Variable', var, 'already exists in plot dict') + else: + plotdata[var][identifier] = rivervalues + + +def main(cfg): + """Run the diagnostic. + + Parameters + ---------- + cfg : dict + Configuration dictionary of the recipe. + """ + # Get dataset and variable information + logging.debug("Found datasets in recipe:\n%s", diag.Datasets(cfg)) + logging.debug("Found variables in recipe:\n%s", diag.Variables(cfg)) + + # Check for correct variables + if not diag.Variables(cfg).vars_available('pr', 'mrro', 'evspsbl'): + raise ValueError( + "Diagnostic requires precipitation, runoff and evaporation data") + + # Read catchmentmask + # to check: Correct way to read auxillary data using recipes? + my_catch = get_catchment_data(cfg) + + # Read data, convert units and compute long term means + # to check: Shouldn't this be part of preprocessing? + # to check: How to regrid onto catchment_cube grid + # with preproc recipe statements + # instead of using regrid here? + allcubes = {} + plotdata = {} + for datapath in diag.Datasets(cfg): + # Get simulation data + var, identifier, cube = get_sim_data(cfg, datapath, my_catch['cube']) + # Get river catchment averages + rivervalues = get_catch_avg(my_catch, cube) + # Sort into data dictionaries + datainfo = diag.Datasets(cfg).get_dataset_info(path=datapath) + model = datainfo['dataset'] + if model == datainfo.get('reference_dataset', None): + update_reference(my_catch, model, rivervalues, var) + else: + update_plotdata(identifier, plotdata, rivervalues, var) + + # Append to cubelist for temporary output + if model not in allcubes.keys(): + allcubes[model] = [] + allcubes[model].append(cube) + + # Write regridded and temporal aggregated netCDF data files (one per model) + # to do: update attributes, something fishy with unlimited dimension + for model, mcube in allcubes.items(): + filepath = os.path.join(cfg[diag.names.WORK_DIR], + '_'.join(['postproc', model]) + '.nc') + if cfg[diag.names.WRITE_NETCDF]: + iris.save(mcube, filepath) + logger.info("Writing %s", filepath) + + # Write plotdata as ascii files for user information + write_plotdata(cfg, plotdata, my_catch) + + # Plot catchment data + make_catchment_plots(cfg, plotdata, my_catch) + + +if __name__ == '__main__': + + with diag.run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/shapeselect/diag_shapeselect.py b/esmvaltool/diag_scripts/shapeselect/diag_shapeselect.py new file mode 100644 index 0000000000..3e1d0e44bd --- /dev/null +++ b/esmvaltool/diag_scripts/shapeselect/diag_shapeselect.py @@ -0,0 +1,267 @@ +"""Diagnostic to select grid points within a shapefile.""" +import logging +import os +from copy import deepcopy + +import fiona +import iris +import numpy as np +import xlsxwriter +from netCDF4 import Dataset, num2date +from shapely.geometry import MultiPoint, shape +from shapely.ops import nearest_points + +from esmvaltool.diag_scripts.shared import (run_diagnostic, ProvenanceLogger, + get_diagnostic_filename) + +logger = logging.getLogger(os.path.basename(__file__)) + + +def get_provenance_record(cfg, basename, caption, extension, ancestor_files): + """Create a provenance record describing the diagnostic data and plot.""" + record = { + 'caption': caption, + 'statistics': ['other'], + 'domains': ['global'], + 'authors': ['berg_pe'], + 'references': ['acknow_project'], + 'ancestors': ancestor_files, + } + diagnostic_file = get_diagnostic_filename(basename, cfg, extension) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(diagnostic_file, record) + + +def main(cfg): + """Select grid points within shapefiles.""" + if 'evalplot' not in cfg: + cfg['evalplot'] = False + for filename, attributes in cfg['input_data'].items(): + logger.info("Processing variable %s from dataset %s", + attributes['standard_name'], attributes['dataset']) + logger.debug("Loading %s", filename) + cube = iris.load_cube(filename) + + ncts, nclon, nclat = shapeselect(cfg, cube) + name = os.path.splitext(os.path.basename(filename))[0] + '_polygon' + if cfg['write_xlsx']: + xname = name + '_table' + writexls(cfg, filename, ncts, nclon, nclat) + caption = 'Selected gridpoints within shapefile.' + get_provenance_record( + cfg, xname, caption, 'xlsx', ancestor_files=[filename]) + if cfg['write_netcdf']: + path = os.path.join( + cfg['work_dir'], + name + '.nc', + ) + write_netcdf(path, ncts, nclon, nclat, cube, cfg) + caption = 'Selected gridpoints within shapefile.' + get_provenance_record( + cfg, name, caption, 'nc', ancestor_files=[filename]) + + +def write_keyvalue_toxlsx(worksheet, row, key, value): + """Write a dictionary to excel sheet.""" + if type(value).__name__ == 'dict': + worksheet.write(row, 0, key) + row += 1 + for dictkey, dictvalue in value.items(): + row = write_keyvalue_toxlsx(worksheet, row, dictkey, dictvalue) + elif type(value).__name__ == 'list': + for listvalue in value: + row = write_keyvalue_toxlsx(worksheet, row, key, listvalue) + else: + worksheet.write(row, 0, key) + if type(value).__name__ == 'bool': + worksheet.write(row, 1, str(int(value))) + else: + worksheet.write(row, 1, value) + row += 1 + + return row + + +def writexls(cfg, filename, ncts, nclon1, nclat1): + """Write the content of a netcdffile as .xlsx.""" + ncfile = Dataset(filename, 'r') + dtime = num2date(ncfile.variables['time'][:], + ncfile.variables['time'].units, + ncfile.variables['time'].calendar) + wtime = [] + for dtim in dtime: + wtime.append(str(dtim)) + workbook = xlsxwriter.Workbook( + os.path.join( + cfg['work_dir'], + os.path.splitext(os.path.basename(filename))[0] + '_polygon_table' + + '.xlsx')) + worksheet = workbook.add_worksheet('Data') + worksheet.write(0, 0, 'Date') + worksheet.write(0, 1, 'Lon/Lat') + worksheet.write_column(2, 0, wtime) + for row in range(ncts.shape[1]): + worksheet.write( + 1, row + 1, + str("%#.3f" % round(float(nclon1[row]), 3)) + '_' + str( + "%#.3f" % round(float(nclat1[row]), 3))) + worksheet.write_column(2, row + 1, + np.around(np.squeeze(ncts[:, row]), decimals=8)) + worksheet.set_column(0, row + 1, 20) + worksheet = workbook.add_worksheet('NetCDFheader') + worksheet.set_column(0, 0, 20) + for row, attr in enumerate(ncfile.ncattrs()): + worksheet.write(row, 0, attr) + worksheet.write(row, 1, str(getattr(ncfile, attr))) + worksheet = workbook.add_worksheet('ESMValTool') + worksheet.set_column(0, 0, 20) + row = 0 + for key, value in cfg.items(): + row = write_keyvalue_toxlsx(worksheet, row, key, value) + workbook.close() + + +def shapeselect(cfg, cube): + """Select data inside a shapefile.""" + shppath = cfg['shapefile'] + if not os.path.isabs(shppath): + shppath = os.path.join(cfg['auxiliary_data_dir'], shppath) + wgtmet = cfg['weighting_method'] + if ((cube.coord('latitude').ndim == 1 + and cube.coord('longitude').ndim == 1)): + coordpoints = [(x, y) for x in cube.coord('longitude').points + for y in cube.coord('latitude').points] + for i, crd in enumerate(coordpoints): + if crd[0] > 180: + coordpoints[i] = (coordpoints[i][0] - 360., coordpoints[i][1]) + else: + raise ValueError("Support for 2-d coords not implemented!") + points = MultiPoint(coordpoints) + with fiona.open(shppath) as shp: + gpx = [] + gpy = [] + cnt = -1 + ncts = np.zeros((cube.coord('time').shape[0], len(shp))) + nclon = np.zeros((len(shp))) # Takes representative point + nclat = np.zeros((len(shp))) + for ishp, multipol in enumerate(shp): + cnt += 1 + multi = shape(multipol['geometry']) + if wgtmet == 'mean_inside': + gpx, gpy = mean_inside(gpx, gpy, points, multi, cube) + if not gpx: + gpx, gpy = representative(gpx, gpy, points, multi, cube) + elif wgtmet == 'representative': + gpx, gpy = representative(gpx, gpy, points, multi, cube) + if len(gpx) == 1: + ncts[:, ishp] = np.reshape(cube.data[:, gpy, gpx], + (cube.data.shape[0], )) + else: + ncts[:, ishp] = np.mean(cube.data[:, gpy, gpx], axis=1) + gxx, gyy = representative([], [], points, multi, cube) + nclon[ishp] = cube.coord('longitude').points[gxx] + nclat[ishp] = cube.coord('latitude').points[gyy] + return ncts, nclon, nclat + + +def mean_inside(gpx, gpy, points, multi, cube): + """Find points inside shape.""" + for point in points: + if point.within(multi): + if point.x < 0: + addx = 360. + else: + addx = 0. + xxx, yyy = best_match( + cube.coord('longitude').points, + cube.coord('latitude').points, point.x + addx, point.y) + gpx.append(xxx) + gpy.append(yyy) + return gpx, gpy + + +def representative(gpx, gpy, points, multi, cube): + """Find representative point in shape.""" + reprpoint = multi.representative_point() + nearest = nearest_points(reprpoint, points) + npx = nearest[1].coords[0][0] + npy = nearest[1].coords[0][1] + if npx < 0: + addx = 360. + else: + addx = 0. + xxx, yyy = best_match( + cube.coord('longitude').points, + cube.coord('latitude').points, npx + addx, npy) + gpx.append(xxx) + gpy.append(yyy) + return gpx, gpy + + +def best_match(iin, jin, pex, pey): + """Identify the grid points in 2-d with minimum distance.""" + if iin.shape != 2 or jin.shape != 2: + gpx = deepcopy(iin) + gpy = deepcopy(jin) + gpxx = np.zeros((len(gpx), len(gpy))) + gpyy = np.zeros((len(gpx), len(gpy))) + for gpi in range(0, len(gpy)): + gpxx[:, gpi] = gpx + for gpj in range(0, len(gpx)): + gpyy[gpj, :] = gpy + else: + gpxx = deepcopy(iin) + gpyy = deepcopy(jin) + distance = ((gpxx - pex)**2 + (gpyy - pey)**2)**0.5 + ind = np.unravel_index(np.argmin(distance, axis=None), distance.shape) + return ind[0], ind[1] + + +def write_netcdf(path, var, plon, plat, cube, cfg): + """Write results to a netcdf file.""" + polyid = [] + for row in range(var.shape[1]): + polyid.append( + str("%#.3f" % round(plon[row], 3)) + '_' + + str("%#.3f" % round(plat[row], 3))) + ncout = Dataset(path, mode='w') + ncout.createDimension('time', None) + ncout.createDimension('polygon', len(polyid)) + times = ncout.createVariable('time', 'f8', ('time'), zlib=True) + times.setncattr_string('standard_name', cube.coord('time').standard_name) + times.setncattr_string('long_name', cube.coord('time').long_name) + times.setncattr_string('calendar', cube.coord('time').units.calendar) + times.setncattr_string('units', cube.coord('time').units.origin) + polys = ncout.createVariable('polygon', 'S1', ('polygon'), zlib=True) + polys.setncattr_string('standard_name', 'polygon') + polys.setncattr_string('long_name', 'polygon') + polys.setncattr_string('shapefile', cfg['shapefile']) + lon = ncout.createVariable( + cube.coord('longitude').var_name, 'f8', 'polygon', zlib=True) + lon.setncattr_string('standard_name', + cube.coord('longitude').standard_name) + lon.setncattr_string('long_name', cube.coord('longitude').long_name) + lon.setncattr_string('units', cube.coord('longitude').units.origin) + lat = ncout.createVariable( + cube.coord('latitude').var_name, 'f8', 'polygon', zlib=True) + lat.setncattr_string('standard_name', cube.coord('latitude').standard_name) + lat.setncattr_string('long_name', cube.coord('latitude').long_name) + lat.setncattr_string('units', cube.coord('latitude').units.origin) + data = ncout.createVariable( + cube.var_name, 'f4', ('time', 'polygon'), zlib=True) + data.setncattr_string('standard_name', cube.standard_name) + data.setncattr_string('long_name', cube.long_name) + data.setncattr_string('units', cube.units.origin) + for key, val in cube.metadata[-2].items(): + ncout.setncattr_string(key, val) + times[:] = cube.coord('time').points + lon[:] = plon + lat[:] = plat + polys[:] = polyid[:] + data[:] = var[:] + ncout.close() + + +if __name__ == '__main__': + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/shapeselect/testdata/Elbe.dbf b/esmvaltool/diag_scripts/shapeselect/testdata/Elbe.dbf new file mode 100644 index 0000000000..3b4a615238 Binary files /dev/null and b/esmvaltool/diag_scripts/shapeselect/testdata/Elbe.dbf differ diff --git a/esmvaltool/diag_scripts/shapeselect/testdata/Elbe.prj b/esmvaltool/diag_scripts/shapeselect/testdata/Elbe.prj new file mode 100644 index 0000000000..f45cbadf00 --- /dev/null +++ b/esmvaltool/diag_scripts/shapeselect/testdata/Elbe.prj @@ -0,0 +1 @@ +GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]] \ No newline at end of file diff --git a/esmvaltool/diag_scripts/shapeselect/testdata/Elbe.shp b/esmvaltool/diag_scripts/shapeselect/testdata/Elbe.shp new file mode 100644 index 0000000000..b5177a23fd Binary files /dev/null and b/esmvaltool/diag_scripts/shapeselect/testdata/Elbe.shp differ diff --git a/esmvaltool/diag_scripts/shapeselect/testdata/Elbe.shx b/esmvaltool/diag_scripts/shapeselect/testdata/Elbe.shx new file mode 100644 index 0000000000..74a553d20c Binary files /dev/null and b/esmvaltool/diag_scripts/shapeselect/testdata/Elbe.shx differ diff --git a/esmvaltool/diag_scripts/shapeselect/testdata/MotalaStrom.dbf b/esmvaltool/diag_scripts/shapeselect/testdata/MotalaStrom.dbf new file mode 100644 index 0000000000..4450e776bf Binary files /dev/null and b/esmvaltool/diag_scripts/shapeselect/testdata/MotalaStrom.dbf differ diff --git a/esmvaltool/diag_scripts/shapeselect/testdata/MotalaStrom.prj b/esmvaltool/diag_scripts/shapeselect/testdata/MotalaStrom.prj new file mode 100644 index 0000000000..f45cbadf00 --- /dev/null +++ b/esmvaltool/diag_scripts/shapeselect/testdata/MotalaStrom.prj @@ -0,0 +1 @@ +GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]] \ No newline at end of file diff --git a/esmvaltool/diag_scripts/shapeselect/testdata/MotalaStrom.shp b/esmvaltool/diag_scripts/shapeselect/testdata/MotalaStrom.shp new file mode 100644 index 0000000000..35a5ce79af Binary files /dev/null and b/esmvaltool/diag_scripts/shapeselect/testdata/MotalaStrom.shp differ diff --git a/esmvaltool/diag_scripts/shapeselect/testdata/MotalaStrom.shx b/esmvaltool/diag_scripts/shapeselect/testdata/MotalaStrom.shx new file mode 100644 index 0000000000..c60ace698b Binary files /dev/null and b/esmvaltool/diag_scripts/shapeselect/testdata/MotalaStrom.shx differ diff --git a/esmvaltool/diag_scripts/shapeselect/testdata/Thames.dbf b/esmvaltool/diag_scripts/shapeselect/testdata/Thames.dbf new file mode 100644 index 0000000000..4b24c7e2ab Binary files /dev/null and b/esmvaltool/diag_scripts/shapeselect/testdata/Thames.dbf differ diff --git a/esmvaltool/diag_scripts/shapeselect/testdata/Thames.prj b/esmvaltool/diag_scripts/shapeselect/testdata/Thames.prj new file mode 100644 index 0000000000..f45cbadf00 --- /dev/null +++ b/esmvaltool/diag_scripts/shapeselect/testdata/Thames.prj @@ -0,0 +1 @@ +GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]] \ No newline at end of file diff --git a/esmvaltool/diag_scripts/shapeselect/testdata/Thames.shp b/esmvaltool/diag_scripts/shapeselect/testdata/Thames.shp new file mode 100644 index 0000000000..b4646f2b35 Binary files /dev/null and b/esmvaltool/diag_scripts/shapeselect/testdata/Thames.shp differ diff --git a/esmvaltool/diag_scripts/shapeselect/testdata/Thames.shx b/esmvaltool/diag_scripts/shapeselect/testdata/Thames.shx new file mode 100644 index 0000000000..0568fe63c0 Binary files /dev/null and b/esmvaltool/diag_scripts/shapeselect/testdata/Thames.shx differ diff --git a/esmvaltool/diag_scripts/shapeselect/testdata/multicatchment.dbf b/esmvaltool/diag_scripts/shapeselect/testdata/multicatchment.dbf new file mode 100644 index 0000000000..a37b3fbcd9 Binary files /dev/null and b/esmvaltool/diag_scripts/shapeselect/testdata/multicatchment.dbf differ diff --git a/esmvaltool/diag_scripts/shapeselect/testdata/multicatchment.prj b/esmvaltool/diag_scripts/shapeselect/testdata/multicatchment.prj new file mode 100644 index 0000000000..f45cbadf00 --- /dev/null +++ b/esmvaltool/diag_scripts/shapeselect/testdata/multicatchment.prj @@ -0,0 +1 @@ +GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]] \ No newline at end of file diff --git a/esmvaltool/diag_scripts/shapeselect/testdata/multicatchment.shp b/esmvaltool/diag_scripts/shapeselect/testdata/multicatchment.shp new file mode 100644 index 0000000000..ea8ed1da04 Binary files /dev/null and b/esmvaltool/diag_scripts/shapeselect/testdata/multicatchment.shp differ diff --git a/esmvaltool/diag_scripts/shapeselect/testdata/multicatchment.shx b/esmvaltool/diag_scripts/shapeselect/testdata/multicatchment.shx new file mode 100644 index 0000000000..8ef12c84b0 Binary files /dev/null and b/esmvaltool/diag_scripts/shapeselect/testdata/multicatchment.shx differ diff --git a/esmvaltool/diag_scripts/shapeselect/testdata/testfile.dbf b/esmvaltool/diag_scripts/shapeselect/testdata/testfile.dbf new file mode 100755 index 0000000000..c024cb40ec Binary files /dev/null and b/esmvaltool/diag_scripts/shapeselect/testdata/testfile.dbf differ diff --git a/esmvaltool/diag_scripts/shapeselect/testdata/testfile.prj b/esmvaltool/diag_scripts/shapeselect/testdata/testfile.prj new file mode 100755 index 0000000000..a30c00a55d --- /dev/null +++ b/esmvaltool/diag_scripts/shapeselect/testdata/testfile.prj @@ -0,0 +1 @@ +GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]] \ No newline at end of file diff --git a/esmvaltool/diag_scripts/shapeselect/testdata/testfile.shp b/esmvaltool/diag_scripts/shapeselect/testdata/testfile.shp new file mode 100755 index 0000000000..522c1f034b Binary files /dev/null and b/esmvaltool/diag_scripts/shapeselect/testdata/testfile.shp differ diff --git a/esmvaltool/diag_scripts/shapeselect/testdata/testfile.shx b/esmvaltool/diag_scripts/shapeselect/testdata/testfile.shx new file mode 100755 index 0000000000..3bcb5d73f2 Binary files /dev/null and b/esmvaltool/diag_scripts/shapeselect/testdata/testfile.shx differ diff --git a/esmvaltool/diag_scripts/shared/__init__.py b/esmvaltool/diag_scripts/shared/__init__.py index 50c5f9a159..b0f0876449 100644 --- a/esmvaltool/diag_scripts/shared/__init__.py +++ b/esmvaltool/diag_scripts/shared/__init__.py @@ -1,19 +1,39 @@ """Code that is shared between multiple diagnostic scripts.""" -from . import names, plot -from ._base import (get_cfg, group_metadata, run_diagnostic, select_metadata, - sorted_group_metadata, sorted_metadata) +from . import io, iris_helpers, names, plot +from ._base import (ProvenanceLogger, extract_variables, get_cfg, + get_diagnostic_filename, get_plot_filename, group_metadata, + run_diagnostic, select_metadata, sorted_group_metadata, + sorted_metadata, variables_available) from ._diag import Datasets, Variable, Variables +from ._validation import apply_supermeans, get_control_exper_obs __all__ = [ - 'names', - 'get_cfg', - 'plot', + # Main entry point for diagnostics 'run_diagnostic', - 'Variable', - 'Variables', - 'Datasets', + # Define output filenames + 'get_diagnostic_filename', + 'get_plot_filename', + # Log provenance + 'ProvenanceLogger', + # Select and sort input metadata 'select_metadata', 'sorted_metadata', 'group_metadata', 'sorted_group_metadata', + 'extract_variables', + 'variables_available', + 'names', + 'Variable', + 'Variables', + 'Datasets', + 'get_cfg', + # IO module + 'io', + # Iris helpers module + 'iris_helpers', + # Plotting module + 'plot', + # Validation module + 'get_control_exper_obs', + 'apply_supermeans', ] diff --git a/esmvaltool/diag_scripts/shared/_base.py b/esmvaltool/diag_scripts/shared/_base.py index 4c6f00cb64..4f5284ca95 100644 --- a/esmvaltool/diag_scripts/shared/_base.py +++ b/esmvaltool/diag_scripts/shared/_base.py @@ -14,6 +14,146 @@ logger = logging.getLogger(__name__) +def get_plot_filename(basename, cfg): + """Get a valid path for saving a diagnostic plot. + + Parameters + ---------- + basename: str + The basename of the file. + cfg: dict + Dictionary with diagnostic configuration. + + Returns + ------- + str: + A valid path for saving a diagnostic plot. + + """ + return os.path.join( + cfg['plot_dir'], + basename + '.' + cfg['output_file_type'], + ) + + +def get_diagnostic_filename(basename, cfg, extension='nc'): + """Get a valid path for saving a diagnostic data file. + + Parameters + ---------- + basename: str + The basename of the file. + cfg: dict + Dictionary with diagnostic configuration. + extension: str + File name extension. + + Returns + ------- + str: + A valid path for saving a diagnostic data file. + + """ + return os.path.join( + cfg['work_dir'], + basename + '.' + extension, + ) + + +class ProvenanceLogger(object): + """Open the provenance logger. + + Parameters + ---------- + cfg: dict + Dictionary with diagnostic configuration. + + Example + ------- + Use as a context manager:: + + record = { + 'caption': "This is a nice plot.", + 'statistics': ['mean'], + 'domain': 'global', + 'plot_type': 'zonal', + 'plot_file': '/path/to/result.png', + 'authors': [ + 'first_author', + 'second_author', + ], + 'references': [ + 'acknow_project', + ], + 'ancestors': [ + '/path/to/input_file_1.nc', + '/path/to/input_file_2.nc', + ], + } + output_file = '/path/to/result.nc' + + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(output_file, record) + + """ + + def __init__(self, cfg): + """Create a provenance logger.""" + self._log_file = os.path.join(cfg['run_dir'], + 'diagnostic_provenance.yml') + + if not os.path.exists(self._log_file): + self.table = {} + else: + with open(self._log_file, 'r') as file: + self.table = yaml.safe_load(file) + + def log(self, filename, record): + """Record provenance. + + Parameters + ---------- + filename: str + Name of the file containing the diagnostic data. + record: dict + Dictionary with the provenance information to be logged. + + Typical keys are: + - plot_type + - plot_file + - caption + - ancestors + - authors + - references + + Note + ---- + See also esmvaltool/config-references.yml + + """ + if filename in self.table: + raise KeyError( + "Provenance record for {} already exists.".format(filename)) + + self.table[filename] = record + + def _save(self): + """Save the provenance log to file.""" + dirname = os.path.dirname(self._log_file) + if not os.path.exists(dirname): + os.makedirs(dirname) + with open(self._log_file, 'w') as file: + yaml.safe_dump(self.table, file) + + def __enter__(self): + """Enter context.""" + return self + + def __exit__(self, *_): + """Save the provenance log before exiting context.""" + self._save() + + def select_metadata(metadata, **attributes): """Select specific metadata describing preprocessed data. @@ -135,6 +275,78 @@ def normalized_group_key(key): return groups +def extract_variables(cfg, as_iris=False): + """Extract basic variable information from configuration dictionary. + + Returns `short_name`, `standard_name`, `long_name` and `units` keys for + each variable. + + Parameters + ---------- + cfg : dict + Diagnostic script configuration. + as_iris : bool, optional + Replace `short_name` by `var_name`, this can be used directly in + :mod:`iris` classes. + + Returns + ------- + dict + Variable information in :obj:`dict`s (values) for each `short_name` + (key). + + """ + keys_to_extract = [ + 'short_name', + 'standard_name', + 'long_name', + 'units', + ] + + # Extract variables + input_data = cfg['input_data'].values() + variable_data = group_metadata(input_data, 'short_name') + variables = {} + for (short_name, data) in variable_data.items(): + data = data[0] + variables[short_name] = {} + info = variables[short_name] + for key in keys_to_extract: + info[key] = data[key] + + # Replace short_name by var_name if desired + if as_iris: + info['var_name'] = info.pop('short_name') + if info['standard_name'] == '': + info['standard_name'] = None + + return variables + + +def variables_available(cfg, short_names): + """Check if data from certain variables is available. + + Parameters + ---------- + cfg : dict + Diagnostic script configuration. + short_names : list of str + Variable `short_names` which should be checked. + + Returns + ------- + bool + `True` if all variables available, `False` if not. + + """ + input_data = cfg['input_data'].values() + available_short_names = list(group_metadata(input_data, 'short_name')) + for var in short_names: + if var not in available_short_names: + return False + return True + + def get_cfg(filename=None): """Read diagnostic script configuration from settings.yml.""" if filename is None: @@ -165,12 +377,27 @@ def _get_input_data_files(cfg): @contextlib.contextmanager def run_diagnostic(): - """Run a diagnostic. + """Run a Python diagnostic. + + This context manager is the main entry point for most Python diagnostics. Example ------- - See esmvaltool/diag_scripts/examples/diagnostic.py for an example of how to - start your diagnostic. + See esmvaltool/diag_scripts/examples/diagnostic.py for an extensive + example of how to start your diagnostic. + + Basic usage is as follows, add these lines at the bottom of your script:: + + def main(cfg): + # Your diagnostic code goes here. + print(cfg) + + if __name__ == '__main__': + with run_diagnostic() as cfg: + main(cfg) + + The `cfg` dict passed to `main` contains the script configuration that + can be used with the other functions in this module. """ # Implemented as context manager so we can support clean up actions later @@ -207,6 +434,7 @@ def run_diagnostic(): logging.basicConfig(format="%(asctime)s [%(process)d] %(levelname)-8s " "%(name)s,%(lineno)s\t%(message)s") logging.Formatter.converter = time.gmtime + logging.captureWarnings(True) logging.getLogger().setLevel(cfg['log_level'].upper()) # Read input metadata @@ -243,6 +471,10 @@ def run_diagnostic(): continue os.makedirs(output_directory) + provenance_file = os.path.join(cfg['run_dir'], 'diagnostic_provenance.yml') + if os.path.exists(provenance_file): + os.remove(provenance_file) + yield cfg logger.info("End of diagnostic script run.") diff --git a/esmvaltool/diag_scripts/shared/_supermeans.py b/esmvaltool/diag_scripts/shared/_supermeans.py new file mode 100644 index 0000000000..f09c82d26a --- /dev/null +++ b/esmvaltool/diag_scripts/shared/_supermeans.py @@ -0,0 +1,334 @@ +""" +Module to create 'Supermeans'. + +Supermeans are averages over several years. For monthly averages each calendar +month is periodically averaged over several years, an example is the average +surface temperature in April between 2000 and 2010. For seasonal averages +seasons are averaged periodically. +Annual 'Supermeans' are averages over several full years. +""" + +import os.path +import six +import cf_units +import iris +import iris.coord_categorisation +from iris.coord_categorisation import _pt_date +import numpy as np + + +class NoBoundsError(ValueError): + """Return error and pass.""" + + pass + + +class InvalidPeriod(ValueError): + """Return error and pass.""" + + pass + + +def get_supermean(name, season, data_dir, obs_flag=None): + """Calculated supermeans from retrieved data, which are pickled Iris cubes. + + :param name: Cube name. Should be CF-standard name. If no CF-standard name + exists the STASH code in msi format (for example m01s30i403) + is used as name. + :param season: Supermean for a season (including annual). + ['ann', 'djf', 'mam', 'jja', 'son'] + :param data_dir: Directory containing cubes of model output data for + supermeans. + :returns: Supermeaned cube. + :rtype Cube: + + The monthly and seasonal supermeans are periodic averages, for example + the seasonal supermean consists of the averaged season, where each + season is averaged over several years. + The annual supermean is a continuous mean over multiple years. + + Supermeans are only applied to full clima years (Starting Dec 1st). + """ + name_constraint = iris.Constraint(name=name) + + if not obs_flag: + cubes_path = os.path.join(data_dir, 'cubeList.nc') + else: + cubes_path = os.path.join(data_dir, obs_flag + '_cubeList.nc') + cubes = iris.load(cubes_path) + + # use STASH if no standard name + for cube in cubes: + if cube.name() == 'unknown': + cube.rename(str(cube.attributes['STASH'])) + + cube = cubes.extract_strict(name_constraint) + + if season in ['djf', 'mam', 'jja', 'son']: + supermeans_cube = periodic_mean(cube, period='season') + return supermeans_cube.extract(iris.Constraint(season=season)) + elif season == 'ann': + return periodic_mean(cube) + else: + raise ValueError( + "Argument 'season' must be one of " + "['ann', 'djf', 'mam', 'jja', 'son']. " + "It is: " + str(season)) + + +def contains_full_climate_years(cube): + """Test whether cube covers full climate year(s). + + A climate year begins at YYYY-12-01 00:00:00, + and ends at YYYY-12-01 00:00:00. + + In case of diurnal data, which is sampled at certain hours of the day, the + climate year is shifted by up to 23 hours. The climate year boundaries of + data sampled at 18:00 would be YYYY-12-01 18:00:00. + + :param Cube: Cube. + :returns: True if first and last time bound + in cube are at YYYY-12-01 00:00:00. + :rtype: boolean + """ + origin = cube.coord('time').units.origin + calendar = cube.coord('time').units.calendar + format_ = 'YYYY-%m-%d %H:%M:%S' + + if not cube.coord('time').has_bounds(): + raise NoBoundsError() + + def _num2date(num): + return cf_units.num2date(num, origin, calendar) + + if is_24h_sampled(cube): + # find out number of sampling intervals (difference < 24 h) + intervals = [] + for i in range(len(cube.coord('time').points) - 1): + diff = cube.coord('time').points[i] - cube.coord('time').points[0] + if diff < 24: + intervals.append(round(diff)) + intervals = len(intervals) + + year_boundaries = [ + 'YYYY-12-01 {:02d}:00:00'.format(hour) for hour in range(24) + ] + + bounding_datetimes = [] + time_bounds = cube.coord('time').bounds + for i in range(intervals): + start = _num2date(time_bounds[i][0]).strftime(format_) + end = _num2date(time_bounds[i - intervals][1]).strftime(format_) + bounding_datetimes.append((start, end)) + return all(start == end and start in year_boundaries and + end in year_boundaries + for start, end in bounding_datetimes) + else: + start = _num2date(cube.coord('time').bounds[0][0]).strftime(format_) + end = _num2date(cube.coord('time').bounds[-1][1]).strftime(format_) + year_boundary = 'YYYY-12-01 00:00:00' + return start == year_boundary and end == year_boundary + + +def is_24h_sampled(cube): + """Check if cube data was sample once per day.""" + meaning_periods = [] + for c_m in cube.cell_methods: + if c_m.method == 'mean' and 'time' in c_m.coord_names: + meaning_periods.extend(c_m.intervals) + return '24 hour' in meaning_periods + + +def periodic_mean(cube, period=None): + """Return cube in which all identical periods are averaged into one. + + In case of months this would be averages over all Januaries, Februaries, + etc. In case of season this would averages over all Winters, Springs, + Summers and Autumns. + If no period is specified the average of all data in `cube` is calculated. + + Averaging works with data sampled multiple times per day (diurnal data). + + The averaging takes the different lengths of periods in the Gregorian + calendar into account. + + Requires cube with data for full Climate Years. Climate years start at the + 1st of December. + + :param cube: Cube with data for each calendar month. + :param period: 'month', 'season' + :returns: Cube with periodic monthly averages. + :rtype: Cube + + Note: In the returned cube, the bounds for each + period are the start boundary + of the first period that is averaged over, + and the end boundary of the last period that is averaged over. + """ + if period not in [None, 'month', 'season']: + raise InvalidPeriod('Invalid period: ' + str(period)) + + _cube = cube.copy() + + if _cube.coord('time').has_bounds(): + add_start_hour(_cube, 'time', name='start_hour') + else: + iris.coord_categorisation.add_hour(_cube, 'time', name='start_hour') + + if period == 'month': + iris.coord_categorisation.add_month(_cube, 'time', name='month') + elif period == 'season': + iris.coord_categorisation.add_season(_cube, 'time') + elif period is None: + pass + else: + raise InvalidPeriod('Invalid period: ' + str(period)) + + time_points_per_day = len(set(_cube.coord('start_hour').points)) + if period is None: # multi-annual mean + if time_points_per_day > 1: + _cube = time_average_by(_cube, 'start_hour') + else: + _cube.remove_coord('start_hour') + _cube = time_average_by(_cube) + else: + if time_points_per_day > 1: + _cube = time_average_by(_cube, [period, 'start_hour']) + else: + _cube.remove_coord('start_hour') + _cube = time_average_by(_cube, period) + + return _cube + + +def add_start_hour(cube, coord, name='diurnal_sampling_hour'): + """Add AuxCoord for diurnal data. Diurnal data is sampled every 24 hours. + + The hour value is taken from the first time bound, or the time point if no + bounds exist. + """ + _add_categorised_coord(cube, name, coord, start_hour_from_bounds) + + +def start_hour_from_bounds(coord, _, bounds): + """Add hour from bounds.""" + return np.array([_pt_date(coord, _bounds[0]).hour for _bounds in bounds]) + + +def _add_categorised_coord(cube, + name, + from_coord, + category_function, + units='1'): + """ + Add categorized coordinate. + + This function creates a category from coordinate bounds. To derive the + category from the points use: + `iris.coord_categorisation.add_categorised_coord` + + This function has the same interface as + `iris.coord_categorisation.add_categorised_coord` + + ###################################################################### + + Add a new coordinate to a cube, by categorising an existing one. + Make a new :class:`iris.coords.AuxCoord` from mapped values, and add it to + the cube. + + + Args: + + * cube (:class:`iris.cube.Cube`): + the cube containing 'from_coord'. The new coord will be added into it. + * name (string): + name of the created coordinate + * from_coord (:class:`iris.coords.Coord` or string): + coordinate in 'cube', or the name of one + * category_function (callable): + function(coordinate, value), returning a category value for a + coordinate point-value + + Kwargs: + + * units: + units of the category value, typically 'no_unit' or '1'. + """ + # Interpret coord, if given as a name + if isinstance(from_coord, six.string_types): + from_coord = cube.coord(from_coord) + + if cube.coords(name): + msg = 'A coordinate "%s" already exists in the cube.' % name + raise ValueError(msg) + + new_coord = iris.coords.AuxCoord( + category_function(from_coord, from_coord.points, from_coord.bounds), + units=units, + attributes=from_coord.attributes.copy()) + new_coord.rename(name) + + # Add into the cube + cube.add_aux_coord(new_coord, cube.coord_dims(from_coord)) + + +def time_average_by(cube, periods='time'): + """Average cube over time or over periods. + + i. e. time-based categorical + coordinates, with calendar dependent weighting. + """ + if isinstance(periods, str): + periods = [periods] + + # create new cube with time coord and orig duration as data + durations_cube = iris.cube.Cube( + # durations normalised to 1 + durations(cube.coord('time')) / np.max(durations(cube.coord('time'))), + long_name='duration', + units='1', + attributes=None, + dim_coords_and_dims=[(cube.coord('time').copy(), 0)]) + # there must be an AuxCoord for each period + for period in periods: + if period != 'time': + durations_cube.add_aux_coord(cube.coord(period), 0) + + # calculate weighted sum + orig_cell_methods = cube.cell_methods + + # multiply each time slice by its duration + idx_obj = [None] * cube.data.ndim + idx_obj[cube.coord_dims('time')[0]] = slice( + None) # [None, slice(None), None] == [np.newaxis, :, np.newaxis] + cube.data *= durations_cube.data[idx_obj] + + if periods == ['time']: # duration weighted averaging + cube = cube.collapsed(periods, iris.analysis.SUM) + durations_cube = durations_cube.collapsed(periods, iris.analysis.SUM) + else: + cube = cube.aggregated_by(periods, iris.analysis.SUM) + durations_cube = durations_cube.aggregated_by(periods, + iris.analysis.SUM) + + # divide by aggregated weights + if durations_cube.data.shape == (): + cube.data /= durations_cube.data + else: + cube.data /= durations_cube.data[idx_obj] + + # correct cell methods + cube.cell_methods = orig_cell_methods + time_averaging_method = iris.coords.CellMethod( + method='mean', coords=periods) + cube.add_cell_method(time_averaging_method) + + return cube + + +def durations(time_coord): + """Return durations of time periods.""" + assert time_coord.has_bounds(), 'No bounds. Do not guess.' + durs = np.array( + [bounds[1] - bounds[0] for bounds in time_coord.bounds]) + return durs diff --git a/esmvaltool/diag_scripts/shared/_validation.py b/esmvaltool/diag_scripts/shared/_validation.py new file mode 100644 index 0000000000..3fd4fcc2b3 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/_validation.py @@ -0,0 +1,86 @@ +"""Load functions needed by diags with CONTROL and EXPERIMENT""" +import os +import logging +import iris +from esmvaltool.preprocessor import time_average +from esmvaltool.diag_scripts.shared import select_metadata + + +logger = logging.getLogger(os.path.basename(__file__)) + + +def get_control_exper_obs(short_name, input_data, cfg, cmip_type): + """ + Get control, exper and obs datasets + + This function is used when running recipes that need + a clear distinction between a control dataset, an experiment + dataset and have optional obs (OBS, obs4mips etc) datasets; + such recipes include recipe_validation, and all the autoassess + ones; + short_name: variable short name + input_data: dict containing the input data info + cfg: config file as used in this module + """ + # select data per short name and CMIP type + dataset_selection = select_metadata( + input_data, short_name=short_name, project=cmip_type) + + # get the obs datasets if specified in recipe + if 'observational_datasets' in cfg: + obs_selection = [ + select_metadata( + input_data, short_name=short_name, dataset=obs_dataset)[0] + for obs_dataset in cfg['observational_datasets'] + ] + else: + obs_selection = [] + + # determine CONTROL and EXPERIMENT datasets + for model in dataset_selection: + if model['dataset'] == cfg['control_model']: + logger.info("Control dataset %s", model['dataset']) + control = model + elif model['dataset'] == cfg['exper_model']: + logger.info("Experiment dataset %s", model['dataset']) + experiment = model + + if obs_selection: + logger.info("Observations dataset(s) %s", + [obs['dataset'] for obs in obs_selection]) + + return control, experiment, obs_selection + + +# apply supermeans: handy function that loads CONTROL, EXPERIMENT +# and OBS (if any) files and applies time_average() to mean the cubes +def apply_supermeans(ctrl, exper, obs_list): + """ + Apply supermeans on data components ie MEAN on time + + This function is an extension of time_average() meant to ease the + time-meaning procedure when dealing with CONTROL, EXPERIMENT and OBS + (if any) datasets. + ctrl: dictionary of CONTROL dataset + exper: dictionary of EXPERIMENT dataset + obs_lis: list of dicts for OBS datasets (0, 1 or many) + + Returns: control and experiment cubes and list of obs cubes + """ + ctrl_file = ctrl['filename'] + exper_file = exper['filename'] + ctrl_cube = iris.load_cube(ctrl_file) + exper_cube = iris.load_cube(exper_file) + ctrl_cube = time_average(ctrl_cube) + exper_cube = time_average(exper_cube) + if obs_list: + obs_cube_list = [] + for obs in obs_list: + obs_file = obs['filename'] + obs_cube = iris.load_cube(obs_file) + obs_cube = time_average(obs_cube) + obs_cube_list.append(obs_cube) + else: + obs_cube_list = None + + return ctrl_cube, exper_cube, obs_cube_list diff --git a/esmvaltool/diag_scripts/shared/ensemble.ncl b/esmvaltool/diag_scripts/shared/ensemble.ncl index 0056d57c82..a7ed176459 100644 --- a/esmvaltool/diag_scripts/shared/ensemble.ncl +++ b/esmvaltool/diag_scripts/shared/ensemble.ncl @@ -7,16 +7,12 @@ ; Contents: ; function get_start_year ; function get_end_year -; function multi_model_stats ; ; ############################################################################# -load "./interface_scripts/auxiliary.ncl" -load "./interface_scripts/data_handling.ncl" -load "./interface_scripts/logging.ncl" - -load "./diag_scripts/shared/regridding.ncl" -load "./diag_scripts/shared/statistics.ncl" +load "$diag_scripts/../interface_scripts/auxiliary.ncl" +load "$diag_scripts/../interface_scripts/data_handling.ncl" +load "$diag_scripts/../interface_scripts/logging.ncl" ; ############################################################################# undef("get_start_year") @@ -52,9 +48,9 @@ begin "contain the range_option attribute") end if if (diag_script_info@range_option.eq.0) then - start_year = toint(dataset_info@start_year(dataset_index)) + start_year = toint(input_file_info@start_year(dataset_index)) else if (diag_script_info@range_option.eq.1) then - start_year = max(toint(dataset_info@start_year)) + start_year = max(toint(input_file_info@start_year)) else error_msg("f", "ensemble.ncl", funcname, "range_option not " + \ "properly set, must be either 0 or 1") @@ -100,9 +96,9 @@ begin "contain the range_option attribute") end if if (diag_script_info@range_option.eq.0) then - end_year = toint(dataset_info@end_year(dataset_index)) + end_year = toint(input_file_info@end_year(dataset_index)) else if (diag_script_info@range_option.eq.1) then - end_year = min(toint(dataset_info@end_year)) + end_year = min(toint(input_file_info@end_year)) else error_msg("f", "ensemble.ncl", funcname, "range_option not " + \ "properly set, must be either 0 or 1") @@ -113,266 +109,3 @@ begin return(end_year) end - - -; ############################################################################# -undef("multi_model_stats") -function multi_model_stats(idx[*]:integer, - var:string, - field:string, - grd[1]:string, - statistics[*]:string, - opt[1]) -; -; Arguments -; idx: a 1-D array of dataset indexes to be considered. -; var: variable name. -; field: field type. -; grd: type of grid to be selected: -; "coarsest": returns the lowest resolution grid. -; "finest": returns the highest resolution grid. -; statistics: a 1-D array of strings for selecting the statistical -; fields to return. Currently implemented: -; "model_mean" -> multi model mean -; "stddev" -> standard deviation over "datasets" dimension -; "model_grd" -> models regridded to a common grid -; opt: not used yet, set to 0 -; -; Return value -; A field with target grid spatial dimensions (lat, lon) or -; (plev, lat, lon), time coordinate, datasets coordinate. -; The "datasets" coordinate contains the strings from dataset_info@dataset -; and statistics. -; Covers the overlapping time period of all datasets only. -; -; Description -; Determines statistical fields of all input datasets, after regridding them -; to a common grid. Statistics is over the "datasets" coordinate only. -; See function find_destination_grid (regridding.ncl) for details of -; target grid selection. -; -; Caveats -; Works for rectilinear grids only, but selection of regridding routines -; might be extended (e.g. select automatically, depending on input). -; Might be extended to return multi year monthly mean values. -; Wrap to add result to the structure returned by function read_data? -; Probably not very efficient: try to re-use intermediate data etc. -; -; References -; -; Modification history: -; 20140515-A_gott_kl: written. -; -local funcname, scriptname, dest_grd, monotonicity, field_type, rank_grd, \ - start_year, end_year, idx_mod, var_tmp, var_olap, var_grd, data_mmm, \ - plev_grd, ireturn, tmp, dataset, Fill, stats, nstats, tmp, iMod, \ - interpol_flag, nvalid, var_plev, result -begin - - funcname = "multi_model_stats" - scriptname = "diag_scripts/shared/ensemble.ncl" - enter_msg(scriptname, funcname) - - ; Determine destination grid (regridding.ncl) - field_type = str_get_cols(field, 0, 1) - if (field_type.eq."T2" .or. field_type.eq."T3") then - dest_grd = find_destination_grid(idx, var, field, grd) - rank_grd = dimsizes(dimsizes(dest_grd)) - ; check monotonicity of pressure coordinate - if (rank_grd.eq.3) then - plev_grd = profile1d&plev - monotonicity = isMonotonic(plev_grd) - if (monotonicity.eq.0) then ; crash - error_msg("f", "ensemble.ncl", funcname, "plev_dat is not monotonic") - end if - end if - else - error_msg("f", "ensemble.ncl", funcname, \ - "accepts T2* & T3*, but found " + field_type) - end if - - ; Determine overlapping time period - start_year = max(toint(dataset_info@start_year)) - end_year = min(toint(dataset_info@end_year)) - log_info("overlapping time period: " + tostring(start_year) + \ - " - " + tostring(end_year)) - - ; Create data structure for mmm datasets - do i = 0, dimsizes(idx) - 1 - idx_mod = idx(i) - data_mmm = read_data(idx_mod, var, field) - end do - - ; Loop over mmm datasets - do i = 0, dimsizes(idx) - 1 - dataset = dataset_info@dataset(idx(i)) - - ; extract full data - var_tmp = extract_data(i, data_mmm, -1, 0, 0) - - ; extract overlapping time period - var_olap = time_operations(var_tmp, start_year, end_year, "extract", "", 0) - delete(var_tmp) - - ; initialize target field for regridded data ************************* - if (i.eq.0) then - if (.not.isatt(var_olap, "_FillValue")) then - Fill = default_fillvalue(typeof(var_olap)) - else - Fill = var_olap@_FillValue - end if - if (rank_grd.eq.3) then - var_grd = new((/dimsizes(idx) + 2, dimsizes(var_olap&time), \ - dimsizes(dest_grd&plev), dimsizes(dest_grd&lat),\ - dimsizes(dest_grd&lon)/), typeof(var_olap), Fill) - var_grd!2 = "plev" - var_grd!3 = "lat" - var_grd!4 = "lon" - var_grd&plev = dest_grd&plev - else if (rank_grd.eq.2) then - var_grd = new((/dimsizes(idx) + 2, dimsizes(var_olap&time), \ - dimsizes(dest_grd&lat), \ - dimsizes(dest_grd&lon)/), typeof(var_olap), Fill) - var_grd!2 = "lat" - var_grd!3 = "lon" - end if - end if - var_grd!0 = "datasets" - var_grd!1 = "time" - var_grd&time = var_olap&time - var_grd&lat = dest_grd&lat - var_grd&lon = dest_grd&lon - - ; implemented statistics, hardcoded positions - stats = (/"model_mean", "stddev"/) - nstats = dimsizes(stats) - var_grd&datasets = \ - array_append_record(stats, dataset_info@dataset(idx), 0) - - ; select return fields - tmp = new(dimsizes(var_grd&datasets), integer) - if (any("model_mean" .eq. statistics)) then - tmp(0) = 1 - end if - if (any("stddev" .eq. statistics)) then - tmp(1) = 1 - end if - if (any("model_grd" .eq. statistics)) then - tmp(nstats:) = 1 - end if - ireturn = ind(tmp.eq.1) ; selected indices of datasets coordinate - delete(tmp) - end if ; target field initialization ********************************* - iMod = nstats + i - - ; ensure consistent missing values - var_olap = where(ismissing(var_olap), Fill, var_olap) - var_olap@_FillValue = Fill - - ; check that all data are on the same time coordinate - if (dimsizes(var_olap&time).ne.dimsizes(var_grd&time)) then - error_msg("f", "ensemble.ncl", funcname, \ - "inconsistent number of time steps for " + \ - var_grd&datasets(iMod)) - end if - if (any(var_olap&time.ne.var_grd&time)) then - error_msg("f", "ensemble.ncl", funcname, \ - "inconsistent time coordinate for " + \ - var_grd&datasets(iMod)) - end if - - ; pressure interpolation ********************************************* -; NOT TESTED YET -; make this a function: used in Emmons.ncl, perfmetrics? -; more flexible dimensions, e.g. time missing? - if (rank_grd.eq.3) then - ; check consistency of pressure levels - if (isMonotonic(var_olap&plev).eq.0) then ; crash - error_msg("f", "ensemble.ncl", funcname, \ - "plev is not monotonic for " + var_grd&datasets(iMod)) - end if - if (isMonotonic(var_olap&plev).ne.monotonicity) then - var_olap = var_olap(:, ::-1, :, :) ; reverse - end if - - ; interpolate to plev_grd - if (isatt(diag_script_info, "interpolation")) then - interpol_flag = diag_script_info@interpolation - log_debug("Interpolation set by diag_script_info") - else ; default - valid_plev = plev_grd(ind(.not.ismissing(plev_grd))) - nvalid = dimsizes(valid_plev) - if (max(var_olap&plev).gt.valid_plev(1) .and. \ - min(var_olap&plev).lt.valid_plev(nvalid - 2)) then - interpol_flag = -1 - log_debug("Linear interpolation with extrapolation") - else - interpol_flag = 1 - log_debug("Linear interpolation, no extrapolation") - end if - end if - var_plev = int2p_n_Wrap(var_olap&plev, var_olap,\ - plev_grd, interpol_flag, 0) - delete(var_olap) - var_olap = var_plev - copy_VarMeta(var_plev, var_olap) - delete([/var_plev, valid_plev, nvalid/]) - end if ; pressure interpolation ************************************** - - ; lat-lon interpolation, mmm, stdev ********************************** - if (grd.eq."coarsest") then - if (rank_grd.eq.3) then - var_grd(iMod, :, :, :, :) = \ - area_hi2lores_Wrap(var_olap&lon, var_olap&lat, var_olap, \ - True, 1, dest_grd&lon, dest_grd&lat, False) - else if (rank_grd.eq.2) then - var_grd(iMod, :, :, :) = \ - area_hi2lores_Wrap(var_olap&lon, var_olap&lat, var_olap, \ - True, 1, dest_grd&lon, dest_grd&lat, False) - else - error_msg("f", "ensemble.ncl", funcname, \ - "rank_grd = " + tostring(rank_grd) + " not implemented") - end if - end if - else - error_msg("f", "ensemble.ncl", funcname, \ - "interpolation option " + grd + " not yet implemented") - end if ; lat-lon interpolation *************************************** - ; clean up - delete([/var_olap/]) - end do ; i (loop over mmm datasets) - ; statistics - - if (rank_grd.eq.3) then - var_grd(0, :, :, :, :) = \ - dim_avg_n_Wrap(var_grd(nstats:, :, :, :, :), 0) - var_grd(1, :, :, :, :) = \ - dim_stddev_n_Wrap(var_grd(nstats:, :, :, :, :), 0) - result = var_grd(ireturn, :, :, :, :) - else if (rank_grd.eq.2) then - var_grd(0, :, :, :) = dim_avg_n_Wrap(var_grd(nstats:, :, :, :), 0) - var_grd(1, :, :, :) = dim_stddev_n_Wrap(var_grd(nstats:, :, :, :), 0) - result = var_grd(ireturn, :, :, :) - else - error_msg("f", "ensemble.ncl", funcname, \ - "multi_model_mean, rank_grd = " + \ - tostring(rank_grd) + " not implemented") - end if - end if - -; DEBUG+++ -; var_grd@diag_script = "void" -; var_grd@var = var_grd@standard_name -; printVarSummary(var_grd) -; outfile = ncdf_write(var_grd(0,:,:,:), "test0.nc") -; outfile = ncdf_write(var_grd(1,:,:,:), "test1.nc") -; outfile = ncdf_write(var_grd(2,:,:,:), "test2.nc") -; outfile = ncdf_write(var_grd(3,:,:,:), "test3.nc") -; outfile = ncdf_write(var_grd(4,:,:,:), "test4.nc") -; print("wrote "+outfile) -; DEBUG--- - - leave_msg(scriptname, funcname) - return(result) - -end diff --git a/esmvaltool/diag_scripts/shared/external.R b/esmvaltool/diag_scripts/shared/external.R new file mode 100644 index 0000000000..5a3c3fb60c --- /dev/null +++ b/esmvaltool/diag_scripts/shared/external.R @@ -0,0 +1,41 @@ +# Wrappers to call external commands using system2 +# Currently implemented: cdo, nco + +cdo <- function(command, args = "", input = "", options = "", output = "", + stdout = "", noout = F) { + if (args != "") args <- paste0(",", args) + if (stdout != "") { + stdout <- paste0(" > '", stdout, "'") + noout <- T + } + if (input[1] != "") { + for (i in 1:length(input)) { + input[i] <- paste0("'", input[i], "'") + } + input <- paste(input, collapse = " ") + } + output0 <- output + if (output != "") { + output <- paste0("'", output, "'") + } else if (!noout) { + output <- tempfile() + output0 <- output + } + argstr <- paste0( + options, " ", command, args, " ", input, " ", output, + " ", stdout + ) + print(paste("cdo", argstr)) + ret <- system2("cdo", args = argstr) + if (ret != 0) { + stop(paste("Failed (", ret, "): cdo", argstr)) + } + return(output0) +} + +nco <- function(cmd, argstr) { + ret <- system2(cmd, args = argstr) + if (ret != 0) { + stop(paste("Failed (", ret, "): ", cmd, " ", argstr)) + } +} diff --git a/esmvaltool/diag_scripts/shared/info_output.r b/esmvaltool/diag_scripts/shared/info_output.r deleted file mode 100644 index 932f222c96..0000000000 --- a/esmvaltool/diag_scripts/shared/info_output.r +++ /dev/null @@ -1,71 +0,0 @@ -info_output <- function(output_string, - verbosity, - required_verbosity) { - - main_wd = Sys.getenv(c("ESMValTool_interface_data")) - if (nchar(main_wd) == 0) { - print(paste("info: ", output_string, sep = "")) - } - else { - indent_step <- 3 - if (verbosity == 2) { - padding_space <- 13 - } - else { - padding_space <- 1 - } - - if (verbosity >= required_verbosity) { - entering_routine <- regexpr("<<<<<<<< Entering", output_string, fixed = T)[1] - indent_file <- file.path(main_wd, "curr_trace_indent.txt") - if (entering_routine != -1) { - indent <- read_integer(indent_file) - indent = indent + indent_step - write_integer(indent, indent_file) - } - - ## Add a prefix of ">>", "<<" or " " to output_string - indent_str = as.character(read_integer(indent_file)) - format <- paste("%0", indent_str, "d", sep = "") - entering <- regexpr("<", output_string, fixed = T)[1] - leaving <- regexpr(">", output_string, fixed = T)[1] - if (entering != -1) { - indent_str <- gsub("0", "<", sprintf(format, 0)) - } - else if (leaving != -1) { - indent_str <- gsub("0", ">", sprintf(format, 0)) - } - else { - indent_str <- sprintf(format, 0) - indent_str <- sub("0", "", indent_str) - indent_str <- gsub("0", " ", indent_str) - } - - pasted_string <- paste("info: ", indent_str, output_string, sep = "") - print(pasted_string) - - ## Decrease indentation if we're leaving an NCL routine - leaving_routine <- regexpr(">>>>>>>> Leaving", output_string, fixed = T)[1] - if (leaving_routine != -1) { - indent <- read_integer(indent_file) - indent = indent - indent_step - write_integer(indent, indent_file) - } - } - } -} - -read_integer <- function (filename) -{ - fileConn <- file(filename) - indent <- as.integer(readLines(filename)) - close(fileConn) - indent -} - -write_integer <- function (indent, filename) -{ - fileConn <- file(filename) - writeLines(as.character(indent), fileConn) - close(fileConn) -} diff --git a/esmvaltool/diag_scripts/shared/io.py b/esmvaltool/diag_scripts/shared/io.py new file mode 100644 index 0000000000..49027ad27e --- /dev/null +++ b/esmvaltool/diag_scripts/shared/io.py @@ -0,0 +1,298 @@ +"""Convenience functions for writing netcdf files.""" +import fnmatch +import logging +import os + +import iris +import numpy as np + +from .iris_helpers import unify_1d_cubes + +logger = logging.getLogger(__name__) + +VAR_KEYS = [ + 'long_name', + 'units', +] +NECESSARY_KEYS = VAR_KEYS + [ + 'dataset', + 'filename', + 'project', + 'short_name', +] + + +def _has_necessary_attributes(metadata, + only_var_attrs=False, + log_level='debug'): + """Check if dataset metadata has necessary attributes.""" + keys_to_check = (VAR_KEYS + ['short_name'] + if only_var_attrs else NECESSARY_KEYS) + for dataset in metadata: + for key in keys_to_check: + if key not in dataset: + getattr(logger, log_level)("Dataset '%s' does not have " + "necessary attribute '%s'", dataset, + key) + return False + return True + + +def get_all_ancestor_files(cfg, pattern=None): + """Return a list of all files in the ancestor directories. + + Parameters + ---------- + cfg : dict + Diagnostic script configuration. + pattern : str, optional + Only return files which match a certain pattern. + + Returns + ------- + list of str + Full paths to the ancestor files. + + """ + ancestor_files = [] + input_dirs = [ + d for d in cfg['input_files'] if not d.endswith('metadata.yml') + ] + for input_dir in input_dirs: + for (root, _, files) in os.walk(input_dir): + if pattern is not None: + files = fnmatch.filter(files, pattern) + files = [os.path.join(root, f) for f in files] + ancestor_files.extend(files) + return ancestor_files + + +def get_ancestor_file(cfg, pattern): + """Return a desired file in the ancestor directories. + + Parameters + ---------- + cfg : dict + Diagnostic script configuration. + pattern : str + Pattern which specifies the name of the file. + + Returns + ------- + str or None + Full path to the file or `None` if file not found. + + """ + files = get_all_ancestor_files(cfg, pattern=pattern) + if not files: + logger.warning( + "No file with requested name %s found in ancestor " + "directories", pattern) + return None + if len(files) != 1: + logger.warning( + "Multiple files with requested pattern %s found (%s), returning " + "first appearance", pattern, files) + return files[0] + + +def netcdf_to_metadata(cfg, pattern=None, root=None): + """Convert attributes of netcdf files to list of metadata. + + Parameters + ---------- + cfg : dict + Diagnostic script configuration. + pattern : str, optional + Only consider files which match a certain pattern. + root : str, optional (default: ancestor directories) + Root directory for the search. + + Returns + ------- + list of dict + List of dataset metadata. + + """ + if root is None: + all_files = get_all_ancestor_files(cfg, pattern) + else: + all_files = [] + for (base, _, files) in os.walk(root): + if pattern is not None: + files = fnmatch.filter(files, pattern) + files = [os.path.join(base, f) for f in files] + all_files.extend(files) + all_files = fnmatch.filter(all_files, '*.nc') + + # Iterate over netcdf files + metadata = [] + for path in all_files: + cube = iris.load_cube(path) + dataset_info = dict(cube.attributes) + for var_key in VAR_KEYS: + dataset_info[var_key] = getattr(cube, var_key) + if cube.standard_name is not None: + dataset_info['standard_name'] = cube.standard_name + dataset_info['short_name'] = cube.var_name + dataset_info['filename'] = path + + # Check if necessary keys are available + if _has_necessary_attributes([dataset_info], log_level='warning'): + metadata.append(dataset_info) + else: + logger.warning("Skipping '%s'", path) + + return metadata + + +def metadata_to_netcdf(cube, metadata): + """Convert single metadata dictionary to netcdf file. + + Parameters + ---------- + cube : iris.cube.Cube + Cube to be written. + metadata : dict + Metadata for the cube. + + """ + metadata = dict(metadata) + if not _has_necessary_attributes([metadata], log_level='warning'): + logger.warning("Cannot save cube\n%s", cube) + return + for var_key in VAR_KEYS: + setattr(cube, var_key, metadata.pop(var_key)) + cube.var_name = metadata.pop('short_name') + if 'standard_name' in metadata: + standard_name = metadata.pop('standard_name') + try: + cube.standard_name = standard_name + except ValueError: + logger.debug("Got invalid standard_name '%s'", standard_name) + for (attr, val) in metadata.items(): + if isinstance(val, bool): + metadata[attr] = str(val) + cube.attributes.update(metadata) + iris_save(cube, metadata['filename']) + + +def save_1d_data(cubes, path, coord_name, var_attrs, attributes=None): + """Save 1D data for multiple datasets. + + Create 2D cube with the dimensionsal coordinate `coord_name` and the + auxiliary coordinate `dataset` and save 1D data for every dataset given. + The cube is filled with missing values where no data exists for a dataset + at a certain point. + + Note + ---- + Does not check metadata of the `cubes`, i.e. different names or units + will be ignored. + + Parameters + ---------- + cubes : dict of iris.cube.Cube + 1D `iris.cube.Cube`s (values) and corresponding datasets (keys). + path : str + Path to the new file. + coord_name : str + Name of the coordinate. + var_attrs : dict + Attributes for the variable (`short_name`, `long_name`, or `units`). + attributes : dict, optional + Additional attributes for the cube. + + """ + var_attrs = dict(var_attrs) + if not cubes: + logger.warning("Cannot save 1D data, no cubes given") + return + if not _has_necessary_attributes( + [var_attrs], only_var_attrs=True, log_level='warning'): + logger.warning("Cannot write file '%s'", path) + return + datasets = list(cubes.keys()) + cube_list = iris.cube.CubeList(list(cubes.values())) + cube_list = unify_1d_cubes(cube_list, coord_name) + data = [c.data for c in cube_list] + dataset_coord = iris.coords.AuxCoord(datasets, long_name='dataset') + coord = cube_list[0].coord(coord_name) + if attributes is None: + attributes = {} + var_attrs['var_name'] = var_attrs.pop('short_name') + + # Create new cube + cube = iris.cube.Cube( + np.ma.array(data), + aux_coords_and_dims=[(dataset_coord, 0), (coord, 1)], + attributes=attributes, + **var_attrs) + iris_save(cube, path) + + +def iris_save(source, path): + """Save :mod:`iris` objects with correct attributes. + + Parameters + ---------- + source : iris.cube.Cube or iterable of iris.cube.Cube + Cube(s) to be saved. + path : str + Path to the new file. + + """ + if isinstance(source, iris.cube.Cube): + source.attributes['filename'] = path + else: + for cube in source: + cube.attributes['filename'] = path + iris.save(source, path) + logger.info("Wrote %s", path) + + +def save_scalar_data(data, path, var_attrs, aux_coord=None, attributes=None): + """Save scalar data for multiple datasets. + + Create 1D cube with the auxiliary dimension `dataset` and save scalar data + for every dataset given. + + Note + ---- + Missing values can be added by `np.nan`. + + Parameters + ---------- + data : dict + Scalar data (values) and corresponding datasets (keys). + path : str + Path to the new file. + var_attrs : dict + Attributes for the variable (`short_name`, `long_name` and `units`). + aux_coord : iris.coords.AuxCoord, optional + Optional auxiliary coordinate. + attributes : dict, optional + Additional attributes for the cube. + + """ + var_attrs = dict(var_attrs) + if not data: + logger.warning("Cannot save scalar data, no data given") + return + if not _has_necessary_attributes( + [var_attrs], only_var_attrs=True, log_level='warning'): + logger.warning("Cannot write file '%s'", path) + return + dataset_coord = iris.coords.AuxCoord(list(data), long_name='dataset') + if attributes is None: + attributes = {} + var_attrs['var_name'] = var_attrs.pop('short_name') + coords = [(dataset_coord, 0)] + if aux_coord is not None: + coords.append((aux_coord, 0)) + cube = iris.cube.Cube( + np.ma.masked_invalid(list(data.values())), + aux_coords_and_dims=coords, + attributes=attributes, + **var_attrs) + iris_save(cube, path) diff --git a/esmvaltool/diag_scripts/shared/iris_helpers.py b/esmvaltool/diag_scripts/shared/iris_helpers.py new file mode 100644 index 0000000000..f36186837a --- /dev/null +++ b/esmvaltool/diag_scripts/shared/iris_helpers.py @@ -0,0 +1,285 @@ +"""Convenience functions for :mod:`iris` objects.""" +import logging +from pprint import pformat + +import iris +import numpy as np + +from ._base import group_metadata + +logger = logging.getLogger(__name__) + + +def _transform_coord_to_ref(cubes, ref_coord): + """Transform coordinates of cubes to reference.""" + try: + # Convert AuxCoord to DimCoord if necessary and possible + ref_coord = iris.coords.DimCoord.from_coord(ref_coord) + except ValueError: + pass + if not np.array_equal( + np.unique(ref_coord.points), np.sort(ref_coord.points)): + raise ValueError( + f"Expected unique coordinate '{ref_coord.name()}', got " + f"{ref_coord}") + coord_name = ref_coord.name() + new_cubes = iris.cube.CubeList() + for cube in cubes: + coord = cube.coord(coord_name) + if not np.all(np.isin(coord.points, ref_coord.points)): + raise ValueError( + f"Coordinate {coord} of cube\n{cube}\nis not subset of " + f"reference coordinate {ref_coord}") + new_data = np.full(ref_coord.shape, np.nan) + indices = np.where(np.in1d(ref_coord.points, coord.points)) + new_data[indices] = np.ma.filled(cube.data, np.nan) + new_cube = iris.cube.Cube(np.ma.masked_invalid(new_data)) + if isinstance(ref_coord, iris.coords.DimCoord): + new_cube.add_dim_coord(ref_coord, 0) + else: + new_cube.add_aux_coord(ref_coord, 0) + for aux_coord in cube.coords(dim_coords=False): + if aux_coord.shape in ((), (1, )): + new_cube.add_aux_coord(aux_coord, []) + new_cube.metadata = cube.metadata + new_cubes.append(new_cube) + check_coordinate(new_cubes, coord_name) + logger.debug("Successfully unified coordinate '%s' to %s", coord_name, + ref_coord) + logger.debug("of cubes") + logger.debug(pformat(cubes)) + return new_cubes + + +def check_coordinate(cubes, coord_name): + """Compare coordinate of cubes and raise error if not identical. + + Parameters + ---------- + cubes : iris.cube.CubeList + Cubes to be compared. + coord_name : str + Name of the coordinate. + + Returns + ------- + numpy.array + Points of the coordinate. + + Raises + ------ + iris.exceptions.CoordinateNotFoundError + Coordinate `coord_name` is not a coordinate of one of the cubes. + ValueError + Given coordinate differs for the input cubes. + + """ + coord = None + for cube in cubes: + try: + new_coord = cube.coord(coord_name) + except iris.exceptions.CoordinateNotFoundError: + raise iris.exceptions.CoordinateNotFoundError( + f"'{coord_name}' is not a coordinate of cube\n{cube}") + if coord is None: + coord = new_coord + else: + if new_coord != coord: + raise ValueError( + f"Expected cubes with identical coordinates " + f"'{coord_name}', got {new_coord} and {coord}") + logger.debug("Successfully checked coordinate '%s' of cubes", coord_name) + logger.debug(pformat(cubes)) + return coord.points + + +def convert_to_iris(dict_): + """Change all appearances of `short_name` to `var_name`. + + Parameters + ---------- + dict_ : dict + Dictionary to convert. + + Returns + ------- + dict + Converted dictionary. + + """ + dict_ = dict(dict_) + if 'short_name' in dict_: + if 'var_name' in dict_: + logger.warning( + "Dictionary already contains 'var_name', replacing old value " + "'%s' by '%s'", dict_['var_name'], dict_['short_name']) + dict_['var_name'] = dict_.pop('short_name') + return dict_ + + +def iris_project_constraint(projects, cfg, negate=False): + """Create `iris.Constraint` to select specific projects from data. + + Parameters + ---------- + projects : list of str + Projects to be selected. + cfg : dict + Diagnostic script configuration. + negate : bool, optional (default: False) + Negate constraint (`False`: select all elements that fit `projects`, + `True`: select all elements that do NOT fit `projects`). + + Returns + ------- + iris.Constraint + constraint for coordinate `dataset`. + + """ + datasets = [] + grouped_data = group_metadata(cfg['input_data'].values(), 'project') + for project in projects: + for data in grouped_data.get(project, {}): + if 'dataset' not in data: + logger.warning("Dataset %s does not contain key 'dataset'", + data) + else: + datasets.append(data['dataset']) + + def project_constraint(cell): + """Constraint function.""" + if negate: + return cell not in datasets + return cell in datasets + + return iris.Constraint(dataset=project_constraint) + + +def intersect_dataset_coordinates(cubes): + """Compare dataset coordinates of cubes and match them if necessary. + + Use intersection of coordinate 'dataset' of all given cubes and remove + elements which are not given in all cubes. + + Parameters + ---------- + cubes : iris.cube.CubeList + Cubes to be compared. + + Returns + ------- + iris.cube.CubeList + Transformed cubes. + + Raises + ------ + iris.exceptions.CoordinateNotFoundError + Coordinate `dataset` is not a coordinate of one of the cubes. + ValueError + At least one of the cubes contains a `dataset` coordinate with + duplicate elements or the cubes do not share common elements. + + """ + common_elements = None + + # Get common elements + for cube in cubes: + try: + coord_points = cube.coord('dataset').points + except iris.exceptions.CoordinateNotFoundError: + raise iris.exceptions.CoordinateNotFoundError( + f"'dataset' is not a coordinate of cube\n{cube}") + if len(set(coord_points)) != len(coord_points): + raise ValueError( + f"Coordinate 'dataset' of cube\n{cube}\n contains duplicate " + f"elements") + if common_elements is None: + common_elements = set(coord_points) + else: + common_elements = common_elements.intersection(set(coord_points)) + common_elements = list(common_elements) + + # Save new cubes + new_cubes = iris.cube.CubeList() + for cube in cubes: + cube = cube.extract(iris.Constraint(dataset=common_elements)) + if cube is None: + raise ValueError(f"Cubes {cubes} do not share common elements") + sorted_idx = np.argsort(cube.coord('dataset').points) + new_cubes.append(cube[sorted_idx]) + check_coordinate(new_cubes, 'dataset') + logger.debug("Successfully matched 'dataset' coordinate to %s", + sorted(common_elements)) + logger.debug("of cubes") + logger.debug(pformat(cubes)) + return new_cubes + + +def unify_1d_cubes(cubes, coord_name): + """Unify 1D cubes by transforming them to identical coordinates. + + Use union of all coordinates as reference and transform other cubes to it + by adding missing values. + + Parameters + ---------- + cubes : iris.cube.CubeList + Cubes to be processed. + coord_name : str + Name of the coordinate. + + Returns + ------- + iris.cube.CubeList + Transformed cubes. + + Raises + ------ + ValueError + Cubes are not 1D, coordinate name differs or not all cube coordinates + are subsets of longest coordinate. + + """ + ref_coord = None + + # Get reference coordinate + for cube in cubes: + if cube.ndim != 1: + raise ValueError(f"Dimension of cube\n{cube}\nis not 1") + try: + new_coord = cube.coord(coord_name) + except iris.exceptions.CoordinateNotFoundError: + raise iris.exceptions.CoordinateNotFoundError( + f"'{coord_name}' is not a coordinate of cube\n{cube}") + if not np.array_equal( + np.unique(new_coord.points), np.sort(new_coord.points)): + raise ValueError( + f"Coordinate '{coord_name}' of cube\n{cube}\n is not unique, " + f"unifying not possible") + if ref_coord is None: + ref_coord = new_coord + else: + new_points = np.union1d(ref_coord.points, new_coord.points) + ref_coord = ref_coord.copy(new_points) + if coord_name == 'time': + iris.util.unify_time_units(cubes) + + # Transform all cubes + return _transform_coord_to_ref(cubes, ref_coord) + + +def var_name_constraint(var_name): + """:mod:`iris.Constraint` using `var_name` of an :mod:`iris.cube.Cube`. + + Parameters + ---------- + var_name : str + Short name (`var_name` in :mod:`iris`) for the constraint. + + Returns + ------- + iris.Constraint + Constraint to select only cubes with correct `var_name`. + + """ + return iris.Constraint(cube_func=lambda c: c.var_name == var_name) diff --git a/esmvaltool/diag_scripts/shared/latlon.ncl b/esmvaltool/diag_scripts/shared/latlon.ncl index 23374092fc..af31a07445 100644 --- a/esmvaltool/diag_scripts/shared/latlon.ncl +++ b/esmvaltool/diag_scripts/shared/latlon.ncl @@ -17,10 +17,10 @@ ; ############################################################################# -load "./interface_scripts/auxiliary.ncl" -load "./interface_scripts/constants.ncl" -load "./interface_scripts/data_handling.ncl" -load "./interface_scripts/logging.ncl" +load "$diag_scripts/../interface_scripts/auxiliary.ncl" +load "$diag_scripts/../interface_scripts/constants.ncl" +load "$diag_scripts/../interface_scripts/data_handling.ncl" +load "$diag_scripts/../interface_scripts/logging.ncl" ; load "./reformat_scripts/obs/reformat_obs_func.ncl" ; write_nc ; FIX-ME ; ############################################################################# @@ -586,6 +586,7 @@ function select_region(region:string) ; name as an attribute. ; ; Modification history +; 20190405-A_righ_ma: change region names to match provenance definitions. ; 20141205-A_gott_kl: adjusted names to Righi et al. (2015). ; 20140410-A_fran_fr: extended to midlat, equatorial and polar regions. ; 20140129-A_fran_fr: written. @@ -597,93 +598,93 @@ begin scriptname = "diag_scripts/shared/latlon.ncl" enter_msg(scriptname, funcname) - if (region.eq."Global") then + if (region.eq."global") then region_array = (/-90., 90., 0., 360./) - region_array@name = "Glob" + region_array@name = "Global" leave_msg(scriptname, funcname) return(region_array) end if - if (region.eq."Tropics") then + if (region.eq."trop") then region_array = (/-20., 20., 0., 360./) - region_array@name = "Trop" + region_array@name = "Tropics" leave_msg(scriptname, funcname) return(region_array) end if - if (region.eq."NH extratropics") then + if (region.eq."nhext") then region_array = (/20., 90., 0., 360./) - region_array@name = "NHext" + region_array@name = "Northern extratropics" leave_msg(scriptname, funcname) return(region_array) end if - if (region.eq."SH extratropics") then + if (region.eq."shext") then region_array = (/-90., -20., 0., 360./) - region_array@name = "SHext" + region_array@name = "Southern extratropics" leave_msg(scriptname, funcname) return(region_array) end if - if (region.eq."NH equatorial") then + if (region.eq."nhtrop") then region_array = (/0., 30., 0., 360./) - region_array@name = "NHtrop" + region_array@name = "Northern tropics" leave_msg(scriptname, funcname) return(region_array) end if - if (region.eq."SH equatorial") then + if (region.eq."shtrop") then region_array = (/-30., 0., 0., 360./) - region_array@name = "SHtrop" + region_array@name = "Southern tropics" leave_msg(scriptname, funcname) return(region_array) end if - if (region.eq."Northern Hemisphere") then + if (region.eq."nh") then region_array = (/20., 90., 0., 360./) - region_array@name = "NH" + region_array@name = "Northern hemisphere" leave_msg(scriptname, funcname) return(region_array) end if - if (region.eq."Southern Hemisphere") then + if (region.eq."sh") then region_array = (/-90., -20., 0., 360./) - region_array@name = "SH" + region_array@name = "Southern hemisphere" leave_msg(scriptname, funcname) return(region_array) end if - if (region.eq."NH midlatitudes") then + if (region.eq."nhmidlat") then region_array = (/35., 60., 0., 360./) - region_array@name = "NHmidlat" + region_array@name = "Northern midlatitudes" leave_msg(scriptname, funcname) return(region_array) end if - if (region.eq."SH midlatitudes") then + if (region.eq."shmidlat") then region_array = (/-60., -35., 0., 360./) - region_array@name = "SHmidlat" + region_array@name = "Southern midlatitudes" leave_msg(scriptname, funcname) return(region_array) end if - if (region.eq."Arctic") then + if (region.eq."nhpolar") then region_array = (/60., 90., 0., 360./) - region_array@name = "NHpolar" + region_array@name = "Northern polar" leave_msg(scriptname, funcname) return(region_array) end if - if (region.eq."Antarctic") then + if (region.eq."shpolar") then region_array = (/-90., -60., 0., 360./) - region_array@name = "SHpolar" + region_array@name = "Southern polar" leave_msg(scriptname, funcname) return(region_array) end if - if (region.eq."Equatorial") then + if (region.eq."eq") then region_array = (/-10., 10., 0., 360./) - region_array@name = "EQ" + region_array@name = "Equatorial" leave_msg(scriptname, funcname) return(region_array) end if diff --git a/esmvaltool/diag_scripts/shared/plot/GO_panels.ncl b/esmvaltool/diag_scripts/shared/plot/GO_panels.ncl index 91a4b24762..42326cdafd 100644 --- a/esmvaltool/diag_scripts/shared/plot/GO_panels.ncl +++ b/esmvaltool/diag_scripts/shared/plot/GO_panels.ncl @@ -23,8 +23,8 @@ ; ; ############################################################################# -load "./interface_scripts/auxiliary.ncl" -load "./interface_scripts/logging.ncl" +load "$diag_scripts/../interface_scripts/auxiliary.ncl" +load "$diag_scripts/../interface_scripts/logging.ncl" ; ############################################################################# @@ -34,12 +34,12 @@ end ; ############################################################################# -load "./diag_scripts/aux/SAMonsoon/SAMonsoon.ncl" -load "./diag_scripts/aux/SAMonsoon/SAMonsoon_panel_positions.ncl" -load "./diag_scripts/aux/SAMonsoon/exact_panel_positions_generic.ncl" +load "$diag_scripts/aux/SAMonsoon/SAMonsoon.ncl" +load "$diag_scripts/aux/SAMonsoon/SAMonsoon_panel_positions.ncl" +load "$diag_scripts/aux/SAMonsoon/exact_panel_positions_generic.ncl" -load "./diag_scripts/shared/plot/style.ncl" -load "./diag_scripts/shared/plot/xy_line.ncl" +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/xy_line.ncl" ; ############################################################################# @@ -105,7 +105,7 @@ begin ; ------------------------------------- ; Plot model entry (first plot) ; ------------------------------------- - main_title_prefix = "" ; dataset_info@dataset(idx_mod) + main_title_prefix = "" ; input_file_info@dataset(idx_mod) main_title_suffix = "" res = panel_two_by_one(res, 0) ; Update resource for second plot @@ -358,7 +358,7 @@ local aux_title_info, cn_levels_string, curr, diag_description, \ textres, var0, wks, diff_model_ref, txres begin - dim_MOD = dimsizes(dataset_info@dataset) + dim_MOD = dimsizes(input_file_info@dataset) dim_VAR = dimsizes(variables) var0 = variables(0) field_type0 = field_types(0) @@ -467,7 +467,7 @@ local aux_title_info, cn_levels_string, curr, diag_description, \ textres, var0, wks, diff_model_ref, txres begin - dim_MOD = dimsizes(dataset_info@dataset) + dim_MOD = dimsizes(input_file_info@dataset) dim_VAR = dimsizes(variables) var0 = variables(0) field_type0 = field_types(0) @@ -773,7 +773,7 @@ local aux_title_info, cn_levels_string, curr, diag_description, \ output_file_path, plot, plot_diff, plot_ref, ref, res, storage_record, \ textres, var0, wks, wks_debug, txres begin - dim_MOD = dimsizes(dataset_info@dataset) + dim_MOD = dimsizes(input_file_info@dataset) dim_VAR = dimsizes(variables) var0 = variables(0) var1 = variables(1) @@ -1147,7 +1147,7 @@ begin ; Which model goes where on each page? figure_panel_placement = new((/sum(figures_per_page)/), integer) - place_models_on_pages(dataset_info, \ + place_models_on_pages(input_file_info, \ plot_settings@idx_ref, \ figures_per_page, \ model_panel_placement, \ @@ -1530,7 +1530,7 @@ local aux_title_info, blank_plot, cn_levels_string, cols, curr_fig, \ total_no_of_pages, txres, type_specifier, ua, ua_ref, va, var0, var1, \ va_ref, wks, plot_ref_v begin - dim_MOD = dimsizes(dataset_info@dataset) + dim_MOD = dimsizes(input_file_info@dataset) dim_VAR = dimsizes(variables) var0 = variables(0) field_type0 = field_types(0) @@ -1578,7 +1578,7 @@ begin ; Which model goes where on each page? figure_panel_placement = new((/sum(figures_per_page)/), integer) - place_models_on_pages(dataset_info, \ + place_models_on_pages(input_file_info, \ plot_settings@idx_ref, \ figures_per_page, \ model_panel_placement, \ @@ -1707,7 +1707,7 @@ local am_infobox_id, am_labelbar_id, amres, debugbox, diag_script_base, \ precip_seasonal_maxY, precip_seasonal_minY, precip_seasonal_spreadY, res, \ storage_record, txres, var0, wks, xaxis, var1, field_type1 begin - dim_MOD = dimsizes(dataset_info@dataset) + dim_MOD = dimsizes(input_file_info@dataset) dim_VAR = dimsizes(variables) var0 = variables(0) field_type0 = field_types(0) @@ -1785,7 +1785,7 @@ begin ; Plot loop ; --------- do idx_mod = 0, dim_MOD - 1 - labels(idx_mod) = dataset_info@dataset(idx_mod) + labels(idx_mod) = input_file_info@dataset(idx_mod) res@gsnXYBarChartColors = di@colors_seasonal(idx_mod) ; Shift plot to the right and draw/redraw @@ -1865,7 +1865,7 @@ local annots, annots_mmm, avgstd, avgstd_mmm, colors, colors_mmm, dashes, \ storage_record, temp, thicks, thicks_mmm, values, values_stddev, \ values_stddev, var0, var1, wks, wks_debug begin - dim_MOD = dimsizes(dataset_info@dataset) + dim_MOD = dimsizes(input_file_info@dataset) dim_VAR = dimsizes(variables) var0 = variables(0) field_type0 = field_types(0) @@ -1930,9 +1930,9 @@ begin models = getvaratts(temp) do i = 0, dimsizes(models) - 1 ; Define all original attributes again, but empty - dataset_info@$models(i)$ = "" + input_file_info@$models(i)$ = "" end do - dataset_info@dataset = "model_mean" + input_file_info@dataset = "model_mean" ; See ./diag_scripts/shared/plot/style.ncl colors_mmm = project_style(diag_script_info, "colors") @@ -2010,7 +2010,7 @@ local annots, avgstd, diag_script_base, dim_VAR, field_type0, field_type1, \ storage_record, temp, val_area_stddev, values, values_stddev, var0, var1, \ wks, wks_debug begin - dim_MOD = dimsizes(dataset_info@dataset) + dim_MOD = dimsizes(input_file_info@dataset) dim_VAR = dimsizes(variables) var0 = variables(0) field_type0 = field_types(0) @@ -2054,8 +2054,8 @@ begin end if ; Determine time range - start_year = min(stringtoint(dataset_info@start_year)) - end_year = max(stringtoint(dataset_info@end_year)) + start_year = min(stringtoint(input_file_info@start_year)) + end_year = max(stringtoint(input_file_info@end_year)) ; Select attributes annots = project_style(diag_script_info, "annots") diff --git a/esmvaltool/diag_scripts/shared/plot/__init__.py b/esmvaltool/diag_scripts/shared/plot/__init__.py index d6998563c8..c6ab23768b 100644 --- a/esmvaltool/diag_scripts/shared/plot/__init__.py +++ b/esmvaltool/diag_scripts/shared/plot/__init__.py @@ -1,7 +1,4 @@ """Module that provides common plot functions.""" -# set matplotlib non-interactive backend -import matplotlib -matplotlib.use('Agg') # noqa from ._plot import ( get_path_to_mpl_style, diff --git a/esmvaltool/diag_scripts/shared/plot/_plot.py b/esmvaltool/diag_scripts/shared/plot/_plot.py index 00c06f3720..0fb8366579 100644 --- a/esmvaltool/diag_scripts/shared/plot/_plot.py +++ b/esmvaltool/diag_scripts/shared/plot/_plot.py @@ -1,10 +1,10 @@ """Common plot functions.""" import logging import os -import yaml import iris.quickplot import matplotlib.pyplot as plt +import yaml logger = logging.getLogger(__name__) @@ -61,10 +61,9 @@ def get_path_to_mpl_style(style_file=None): """Get path to matplotlib style file.""" if style_file is None: style_file = 'default.mplstyle' - if not isinstance(style_file, str): - raise TypeError("Invalid input: {} is not a " - "string".format(style_file)) - base_dir = os.path.dirname(__file__) + if not style_file.endswith('.mplstyle'): + style_file += '.mplstyle' + base_dir = os.path.dirname(os.path.realpath(__file__)) filepath = os.path.join(base_dir, 'styles_python', 'matplotlib', style_file) logger.debug("Using matplotlib style: %s", filepath) @@ -73,10 +72,11 @@ def get_path_to_mpl_style(style_file=None): def get_dataset_style(dataset, style_file=None): """Retrieve the style information for the given dataset.""" - # Default path if style_file is None: style_file = 'cmip5.yml' - base_dir = os.path.dirname(__file__) + if not style_file.endswith('.yml'): + style_file += '.yml' + base_dir = os.path.dirname(os.path.realpath(__file__)) default_dir = os.path.join(base_dir, 'styles_python') # Check if style_file is valid @@ -103,16 +103,18 @@ def get_dataset_style(dataset, style_file=None): # Check if dataset is available if not style.get(dataset): - logger.warning("Dataset '%s' not found in style file, using default " + - "entry", dataset) + logger.warning( + "Dataset '%s' not found in style file, using default " + "entry", dataset) return style[default_dataset] # Get compulsory information for option in options: if option not in style[dataset]: - logger.warning("No style information '%s' found for dataset " + - "'%s', using default value for unknown datasets", - option, dataset) + logger.warning( + "No style information '%s' found for dataset " + "'%s', using default value for unknown datasets", option, + dataset) style[dataset].update({option: style[default_dataset][option]}) return style[dataset] @@ -126,6 +128,7 @@ def quickplot(cube, filename, plot_type, **kwargs): plot_function(cube, **kwargs) # plt.gca().coastlines() fig.savefig(filename) + plt.close(fig) def multi_dataset_scatterplot(x_data, y_data, datasets, filepath, **kwargs): @@ -193,31 +196,33 @@ def multi_dataset_scatterplot(x_data, y_data, datasets, filepath, **kwargs): # Plot data for (idx, dataset) in enumerate(datasets): - style = get_dataset_style(dataset, kwargs.get('dataset_styles_file')) + style = get_dataset_style(dataset, kwargs.get('dataset_style_file')) # Fix problem when plotting ps file facecolor = style['color'] if filepath.endswith('ps') else \ style['facecolor'] # Plot - axes.plot(x_data[idx], y_data[idx], - markeredgecolor=style['color'], - markerfacecolor=facecolor, - marker=style['mark'], - **(kwargs.get('plot_kwargs', empty_dict)[idx])) + axes.plot( + x_data[idx], + y_data[idx], + markeredgecolor=style['color'], + markerfacecolor=facecolor, + marker=style['mark'], + **(kwargs.get('plot_kwargs', empty_dict)[idx])) # Costumize plot legend = _process_axes_functions(axes, kwargs.get('axes_functions')) # Save plot - fig.savefig(filepath, additional_artists=[legend], - **kwargs.get('save_kwargs', {})) - logger.info("Writing %s", filepath) + fig.savefig( + filepath, additional_artists=[legend], **kwargs.get('save_kwargs', {})) + logger.info("Wrote %s", filepath) plt.close() def scatterplot(x_data, y_data, filepath, **kwargs): - """Plot a multi dataset scatterplot. + """Plot a scatterplot. Notes ----- @@ -265,8 +270,8 @@ def scatterplot(x_data, y_data, filepath, **kwargs): raise TypeError("{} is not a valid keyword argument".format(kwarg)) # Check parameters - _check_size_of_parameters(x_data, y_data, - kwargs.get('plot_kwargs', x_data)) + _check_size_of_parameters(x_data, y_data, kwargs.get( + 'plot_kwargs', x_data)) empty_dict = [{} for _ in x_data] # Create matplotlib instances @@ -289,7 +294,7 @@ def scatterplot(x_data, y_data, filepath, **kwargs): legend = _process_axes_functions(axes, kwargs.get('axes_functions')) # Save plot - fig.savefig(filepath, additional_artists=[legend], - **kwargs.get('save_kwargs', {})) - logger.info("Writing %s", filepath) + fig.savefig( + filepath, additional_artists=[legend], **kwargs.get('save_kwargs', {})) + logger.info("Wrote %s", filepath) plt.close() diff --git a/esmvaltool/diag_scripts/shared/plot/aux_plotting.ncl b/esmvaltool/diag_scripts/shared/plot/aux_plotting.ncl index be08c5a609..dd665ccd95 100644 --- a/esmvaltool/diag_scripts/shared/plot/aux_plotting.ncl +++ b/esmvaltool/diag_scripts/shared/plot/aux_plotting.ncl @@ -21,8 +21,8 @@ ; ; ############################################################################# -load "./interface_scripts/auxiliary.ncl" -load "./interface_scripts/logging.ncl" +load "$diag_scripts/../interface_scripts/auxiliary.ncl" +load "$diag_scripts/../interface_scripts/logging.ncl" ; ############################################################################# undef("create_legend_lines") diff --git a/esmvaltool/diag_scripts/shared/plot/contour_maps.ncl b/esmvaltool/diag_scripts/shared/plot/contour_maps.ncl index 385a272f0c..8aa56f0cca 100644 --- a/esmvaltool/diag_scripts/shared/plot/contour_maps.ncl +++ b/esmvaltool/diag_scripts/shared/plot/contour_maps.ncl @@ -13,12 +13,12 @@ ; ; ############################################################################# -load "./interface_scripts/auxiliary.ncl" -load "./interface_scripts/logging.ncl" +load "$diag_scripts/../interface_scripts/auxiliary.ncl" +load "$diag_scripts/../interface_scripts/logging.ncl" -load "./diag_scripts/shared/set_operators.ncl" +load "$diag_scripts/shared/set_operators.ncl" -load "./diag_scripts/shared/plot/aux_plotting.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" ; ############################################################################# undef("contour_map") @@ -327,7 +327,7 @@ begin end if ; Check if a valid wks has been provided, otherwise invoke default - wks = get_wks(wks_in, diag_script, var) + wks = get_wks(wks_in, DIAG_SCRIPT, var) ; Default resources res = True diff --git a/esmvaltool/diag_scripts/shared/plot/contourplot.ncl b/esmvaltool/diag_scripts/shared/plot/contourplot.ncl index f6b3d1af2e..df2c8eaf7d 100644 --- a/esmvaltool/diag_scripts/shared/plot/contourplot.ncl +++ b/esmvaltool/diag_scripts/shared/plot/contourplot.ncl @@ -8,10 +8,10 @@ ; ; ############################################################################# -load "./interface_scripts/auxiliary.ncl" -load "./interface_scripts/logging.ncl" +load "$diag_scripts/../interface_scripts/auxiliary.ncl" +load "$diag_scripts/../interface_scripts/logging.ncl" -load "./diag_scripts/shared/plot/aux_plotting.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" ; ############################################################################# undef("contourplot") diff --git a/esmvaltool/diag_scripts/shared/plot/legends.ncl b/esmvaltool/diag_scripts/shared/plot/legends.ncl index 43337142b0..ff5057b76f 100644 --- a/esmvaltool/diag_scripts/shared/plot/legends.ncl +++ b/esmvaltool/diag_scripts/shared/plot/legends.ncl @@ -11,8 +11,8 @@ ; ; ############################################################################# -load "./interface_scripts/constants.ncl" -load "./interface_scripts/logging.ncl" +load "$diag_scripts/../interface_scripts/constants.ncl" +load "$diag_scripts/../interface_scripts/logging.ncl" ; ############################################################################# undef("legend_lines") diff --git a/esmvaltool/diag_scripts/shared/plot/mjo_level1.ncl b/esmvaltool/diag_scripts/shared/plot/mjo_level1.ncl index f4c795fbe6..4224f8aaff 100644 --- a/esmvaltool/diag_scripts/shared/plot/mjo_level1.ncl +++ b/esmvaltool/diag_scripts/shared/plot/mjo_level1.ncl @@ -16,14 +16,14 @@ ; Temporary file updated by Python on the fly -load "./interface_scripts/auxiliary.ncl" -load "./interface_scripts/data_handling.ncl" -load "./interface_scripts/logging.ncl" +load "$diag_scripts/../interface_scripts/auxiliary.ncl" +load "$diag_scripts/../interface_scripts/data_handling.ncl" +load "$diag_scripts/../interface_scripts/logging.ncl" -load "./diag_scripts/shared/set_operators.ncl" -load "./diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/set_operators.ncl" +load "$diag_scripts/shared/statistics.ncl" -load "./diag_scripts/aux/SAMonsoon/SAMonsoon.ncl" +load "$diag_scripts/aux/SAMonsoon/SAMonsoon.ncl" undef("pr_u850_mean_plot") function pr_u850_mean_plot(wks[1]:graphic,\ diff --git a/esmvaltool/diag_scripts/shared/plot/mjo_level2.ncl b/esmvaltool/diag_scripts/shared/plot/mjo_level2.ncl index 8bacc48dce..5cc163f5c1 100644 --- a/esmvaltool/diag_scripts/shared/plot/mjo_level2.ncl +++ b/esmvaltool/diag_scripts/shared/plot/mjo_level2.ncl @@ -1,12 +1,12 @@ -load "./interface_scripts/auxiliary.ncl" -load "./interface_scripts/data_handling.ncl" -load "./interface_scripts/logging.ncl" +load "$diag_scripts/../interface_scripts/auxiliary.ncl" +load "$diag_scripts/../interface_scripts/data_handling.ncl" +load "$diag_scripts/../interface_scripts/logging.ncl" -load "./diag_scripts/shared/set_operators.ncl" -load "./diag_scripts/shared/statistics.ncl" +load "$diag_scripts/shared/set_operators.ncl" +load "$diag_scripts/shared/statistics.ncl" -load "./diag_scripts/aux/SAMonsoon/SAMonsoon.ncl" +load "$diag_scripts/aux/SAMonsoon/SAMonsoon.ncl" ; ############################################################################# undef("mjo_wave_freq_plot") @@ -665,7 +665,7 @@ begin pltSubTitle = "Anomalous: OLR, U850, V850" do nSeason = 1, 2 - aux_title_info = dataset_info@dataset(idx_mod) + "_" \ + aux_title_info = input_file_info@dataset(idx_mod) + "_" \ + di@season_life_cycle(nSeason - 1) output_filename = \ interface_get_figure_filename(diag_script_base, var0, field_type0,\ @@ -679,14 +679,14 @@ begin txid = gsn_create_text(wks, pltSubTitle, txres) if (nSeason .eq. 1) then - resP@txString = dataset_info@dataset(idx_mod) + ": "\ - + dataset_info@start_year(idx_mod) + "-"\ - + dataset_info@end_year(idx_mod)\ + resP@txString = input_file_info@dataset(idx_mod) + ": "\ + + input_file_info@start_year(idx_mod) + "-"\ + + input_file_info@end_year(idx_mod)\ + ": May to Oct" else - resP@txString = dataset_info@dataset(idx_mod) + ": "\ - + dataset_info@start_year(idx_mod) + "-"\ - + dataset_info@end_year(idx_mod)\ + resP@txString = input_file_info@dataset(idx_mod) + ": "\ + + input_file_info@start_year(idx_mod) + "-"\ + + input_file_info@end_year(idx_mod)\ + ": Nov to Apr" end if do n = 0, nPhase - 1 diff --git a/esmvaltool/diag_scripts/shared/plot/monsoon_domain_panels.ncl b/esmvaltool/diag_scripts/shared/plot/monsoon_domain_panels.ncl index b352b26458..501e78c953 100644 --- a/esmvaltool/diag_scripts/shared/plot/monsoon_domain_panels.ncl +++ b/esmvaltool/diag_scripts/shared/plot/monsoon_domain_panels.ncl @@ -10,16 +10,16 @@ ; ; ############################################################################# -load "./interface_scripts/auxiliary.ncl" -load "./interface_scripts/data_handling.ncl" -load "./interface_scripts/logging.ncl" +load "$diag_scripts/../interface_scripts/auxiliary.ncl" +load "$diag_scripts/../interface_scripts/data_handling.ncl" +load "$diag_scripts/../interface_scripts/logging.ncl" -load "./diag_scripts/aux/SAMonsoon/SAMonsoon.ncl" +load "$diag_scripts/aux/SAMonsoon/SAMonsoon.ncl" exact_panel_position_file = \ "./diag_scripts/aux/SAMonsoon/exact_panel_positions_precip_world.ncl" -load "./diag_scripts/aux/SAMonsoon/SAMonsoon_panel_positions.ncl" -load "./diag_scripts/aux/SAMonsoon/exact_panel_positions_generic.ncl" -load "./diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/aux/SAMonsoon/SAMonsoon_panel_positions.ncl" +load "$diag_scripts/aux/SAMonsoon/exact_panel_positions_generic.ncl" +load "$diag_scripts/shared/plot/style.ncl" begin loadscript(exact_panel_position_file) @@ -299,7 +299,7 @@ begin scriptname = "plot_script/ncl/monsoon_domain_panels.ncl" enter_msg(scriptname, funcname) - dim_MOD = dimsizes(dataset_info@dataset) + dim_MOD = dimsizes(input_file_info@dataset) dim_VAR = dimsizes(variables) var0 = variables(0) field_type0 = field_types(0) @@ -341,7 +341,7 @@ begin ; Which model goes where on each page? figure_panel_placement = new((/sum(figures_per_page)/), integer) - place_models_on_pages(dataset_info, \ + place_models_on_pages(input_file_info, \ plot_settings@idx_ref, \ figures_per_page, \ model_panel_placement, \ diff --git a/esmvaltool/diag_scripts/shared/plot/monsoon_panels.ncl b/esmvaltool/diag_scripts/shared/plot/monsoon_panels.ncl index 4f875165f3..bf65740463 100644 --- a/esmvaltool/diag_scripts/shared/plot/monsoon_panels.ncl +++ b/esmvaltool/diag_scripts/shared/plot/monsoon_panels.ncl @@ -24,20 +24,20 @@ ; ; ############################################################################# -load "./interface_scripts/auxiliary.ncl" -load "./interface_scripts/data_handling.ncl" -load "./interface_scripts/logging.ncl" +load "$diag_scripts/../interface_scripts/auxiliary.ncl" +load "$diag_scripts/../interface_scripts/data_handling.ncl" +load "$diag_scripts/../interface_scripts/logging.ncl" begin loadscript(exact_panel_position_file) end -load "./diag_scripts/aux/SAMonsoon/SAMonsoon.ncl" -load "./diag_scripts/aux/SAMonsoon/SAMonsoon_panel_positions.ncl" -load "./diag_scripts/aux/SAMonsoon/exact_panel_positions_generic.ncl" +load "$diag_scripts/aux/SAMonsoon/SAMonsoon.ncl" +load "$diag_scripts/aux/SAMonsoon/SAMonsoon_panel_positions.ncl" +load "$diag_scripts/aux/SAMonsoon/exact_panel_positions_generic.ncl" -load "./diag_scripts/shared/plot/style.ncl" -load "./diag_scripts/shared/plot/xy_line.ncl" +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/xy_line.ncl" ; ############################################################################# undef("get_title_suffix") @@ -491,7 +491,7 @@ begin scriptname = "diag_scripts/shared/plot/monsoon_panels.ncl" enter_msg(scriptname, funcname) - dim_MOD = dimsizes(dataset_info@dataset) + dim_MOD = dimsizes(input_file_info@dataset) dim_VAR = dimsizes(variables) var0 = variables(0) field_type0 = field_types(0) @@ -634,7 +634,7 @@ begin scriptname = "diag_scripts/shared/plot/monsoon_panels.ncl" enter_msg(scriptname, funcname) - dim_MOD = dimsizes(dataset_info@dataset) + dim_MOD = dimsizes(input_file_info@dataset) dim_VAR = dimsizes(variables) var0 = variables(0) field_type0 = field_types(0) @@ -1025,7 +1025,7 @@ begin scriptname = "diag_scripts/shared/plot/monsoon_panels.ncl" enter_msg(scriptname, funcname) - dim_MOD = dimsizes(dataset_info@dataset) + dim_MOD = dimsizes(input_file_info@dataset) dim_VAR = dimsizes(variables) var0 = variables(0) var1 = variables(1) @@ -1483,7 +1483,7 @@ begin ; Which model goes where on each page? figure_panel_placement = new((/sum(figures_per_page)/), integer) - place_models_on_pages(dataset_info, plot_settings@idx_ref, \ + place_models_on_pages(input_file_info, plot_settings@idx_ref, \ figures_per_page, model_panel_placement, \ figure_panel_placement) @@ -1957,7 +1957,7 @@ begin scriptname = "diag_scripts/shared/plot/monsoon_panels.ncl" enter_msg(scriptname, funcname) - dim_MOD = dimsizes(dataset_info@dataset) + dim_MOD = dimsizes(input_file_info@dataset) dim_VAR = dimsizes(variables) var0 = variables(0) field_type0 = field_types(0) @@ -2004,7 +2004,7 @@ begin ; Which model goes where on each page? figure_panel_placement = new((/sum(figures_per_page)/), integer) - place_models_on_pages(dataset_info, \ + place_models_on_pages(input_file_info, \ plot_settings@idx_ref, \ figures_per_page, \ model_panel_placement, \ @@ -2165,7 +2165,7 @@ begin scriptname = "diag_scripts/shared/plot/monsoon_panels.ncl" enter_msg(scriptname, funcname) - dim_MOD = dimsizes(dataset_info@dataset) + dim_MOD = dimsizes(input_file_info@dataset) dim_VAR = dimsizes(variables) var0 = variables(0) field_type0 = field_types(0) @@ -2242,7 +2242,7 @@ begin ; Plot loop ; --------- do idx_mod = 0, dim_MOD - 1 - labels(idx_mod) = dataset_info@dataset(idx_mod) + labels(idx_mod) = input_file_info@dataset(idx_mod) res@gsnXYBarChartColors = di@colors_seasonal(idx_mod) ; Shift plot to the right and draw/redraw @@ -2348,7 +2348,7 @@ begin scriptname = "diag_scripts/shared/plot/monsoon_panels.ncl" enter_msg(scriptname, funcname) - dim_MOD = dimsizes(dataset_info@dataset) + dim_MOD = dimsizes(input_file_info@dataset) dim_VAR = dimsizes(variables) var0 = variables(0) field_type0 = field_types(0) @@ -2414,7 +2414,7 @@ begin ; Define all original attributes again, but empty models@$models(i)$ = "" end do - dataset_info@dataset = "model_mean" + input_file_info@dataset = "model_mean" ; See ./diag_scripts/shared/plot/style.ncl colors_mmm = project_style(diag_script_info, "colors") @@ -2531,7 +2531,7 @@ begin scriptname = "diag_scripts/shared/plot/monsoon_panels.ncl" enter_msg(scriptname, funcname) - dim_MOD = dimsizes(dataset_info@dataset) + dim_MOD = dimsizes(input_file_info@dataset) dim_VAR = dimsizes(variables) var0 = variables(0) field_type0 = field_types(0) @@ -2574,8 +2574,8 @@ begin end if ; Determine time range - start_year = min(stringtoint(dataset_info@start_year)) - end_year = max(stringtoint(dataset_info@end_year)) + start_year = min(stringtoint(input_file_info@start_year)) + end_year = max(stringtoint(input_file_info@end_year)) ; Select attributes annots = project_style(diag_script_info, "annots") @@ -2665,7 +2665,7 @@ begin scriptname = "diag_scripts/shared/plot/monsoon_panels.ncl" enter_msg(scriptname, funcname) - dim_MOD = dimsizes(dataset_info@name) + dim_MOD = dimsizes(input_file_info@name) dim_VAR = dimsizes(variables) var0 = variables(0) field_type0 = field_types(0) @@ -2706,8 +2706,8 @@ begin plot = new((/dim_MOD/), graphic) ; Determine time range - start_year = min(stringtoint(dataset_info@start_year)) - end_year = max(stringtoint(dataset_info@end_year)) + start_year = min(stringtoint(input_file_info@start_year)) + end_year = max(stringtoint(input_file_info@end_year)) ; Select attributes annots = project_style(diag_script_info, "annots") @@ -2741,9 +2741,9 @@ begin models = getvaratts(temp) ; use "models" to pass on attribute names do i = 0, dimsizes(models) - 1 ; Define all original attributes again, but empty - dataset_info@$models(i)$ = "" + input_file_info@$models(i)$ = "" end do - dataset_info@dataset = "model_mean" + input_file_info@dataset = "model_mean" ; See ./diag_scripts/shared/plot/style.ncl colors_mmm = project_style(diag_script_info, "colors") diff --git a/esmvaltool/diag_scripts/shared/plot/portrait_plot.ncl b/esmvaltool/diag_scripts/shared/plot/portrait_plot.ncl index c14191be25..366d53d756 100644 --- a/esmvaltool/diag_scripts/shared/plot/portrait_plot.ncl +++ b/esmvaltool/diag_scripts/shared/plot/portrait_plot.ncl @@ -5,13 +5,15 @@ ; Check the header of each routine for documentation. ; ; function portrait_plot +; function circle_plot +; ; ############################################################################# -load "./interface_scripts/auxiliary.ncl" -load "./interface_scripts/logging.ncl" +load "$diag_scripts/../interface_scripts/auxiliary.ncl" +load "$diag_scripts/../interface_scripts/logging.ncl" -load "./diag_scripts/shared/set_operators.ncl" -load "./diag_scripts/shared/plot/aux_plotting.ncl" +load "$diag_scripts/shared/set_operators.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" ; ############################################################################# @@ -52,554 +54,758 @@ local funcname, scriptname, wks, wks_in, data, source, res, atts, \ base, varname begin - funcname = "portrait_plot" - scriptname = "diag_scripts/shared/plot/portrait_plot.ncl" - enter_msg(scriptname, funcname) - - ; Get data, either directly or via netCDF file - if(typeof(source) .eq. "string") then - data = ncdf_read(source, varname) + funcname = "portrait_plot" + scriptname = "diag_scripts/shared/plot/portrait_plot.ncl" + enter_msg(scriptname, funcname) + + ; Get data, either directly or via netCDF file + if (typeof(source) .eq. "string") then + data = ncdf_read(source, varname) + else + data = source + copy_VarMeta(source, data) + end if + + ; Use -999 as fill value (easier to display as text) + data@_FillValue = -999. + + ; Retrieve basic metadata from data + defaults = (/"default", "dummy", "dummy_for_var", "Default", "Dummy"/) + if (any(varname .eq. defaults)) then + grade = att2var(data, "var") + DIAG_SCRIPT = att2var(data, "diag_script") + else + grade = varname + end if + + ; Check if a valid wks has been provided, otherwise invoke default + wks = get_wks(wks_in, DIAG_SCRIPT, grade) + + ; Check source dimensionality + grade_dim = dimsizes(data) + ndiag = grade_dim(0) + nmod = grade_dim(1) + + if (dimsizes(grade_dim).eq.3) then + if (grade_dim(2).le.2) then + nref = grade_dim(2) else - data = source - copy_VarMeta(source, data) + log_info("Maximum 2 references supported") + log_info("Only the first 2 will be plotted") + nref = 2 + var = data(:, :, 0:1) + delete(data) + data = var + delete(var) end if - - ; Use -999 as fill value (easier to display as text) - data@_FillValue = -999. - - ; Retrieve basic metadata from data - defaults = (/"default", "dummy", "dummy_for_var", "Default", "Dummy"/) - if (any(varname .eq. defaults)) then - grade = att2var(data, "var") - diag_script = att2var(data, "diag_script") + elseif (dimsizes(grade_dim).eq.2) then + nref = 1 + var = new((/ndiag, nmod, nref/), typeof(data)) + var(:, :, 0) = data + delete(data) + data = var + delete(var) + else + error_msg("f", scriptname, funcname, \ + "input variable must be eithed 2D or 3D") + end if + + ; Set levels + if (isatt(data, "label_bounds")) then + + ; User provided + chars = \ + tochar(tostring(abs(data@label_bounds(1) - \ + round(data@label_bounds(1), 1)) * 100)) + five = tochar("5") + zero = tochar("0") + if (chars(1).eq.five(0)).or.(chars(1).eq.zero(0)) then + dec_pl = 100. else - grade = varname + dec_pl = 10. end if + upperbound = tofloat(round(data@label_bounds(1) * dec_pl, 1)) / dec_pl + lowerbound = tofloat(round(data@label_bounds(0) * dec_pl, 1)) / dec_pl - ; Check if a valid wks has been provided, otherwise invoke default - wks = get_wks(wks_in, diag_script, grade) + else - ; Check source dimensionality - grade_dim = dimsizes(data) - ndiag = grade_dim(0) - nmod = grade_dim(1) - - if (dimsizes(grade_dim).eq.3) then - if (grade_dim(2).le.2) then - nref = grade_dim(2) - else - log_info("Maximum 2 references supported") - log_info("Only the first 2 will be plotted") - nref = 2 - var = data(:, :, 0:1) - delete(data) - data = var - delete(var) - end if - else if (dimsizes(grade_dim).eq.2) then - nref = 1 - var = new((/ndiag, nmod, nref/), typeof(data)) - var(:, :, 0) = data - delete(data) - data = var - delete(var) - else - error_msg("f", scriptname, funcname, \ - "input variable must be eithed 2D or 3D") - end if + ; Flexible in regard to the range of the data values + bound = max(abs(data(:, :, 0))) + if (all(ismissing(data))) then + delete(bound) + bound = 0.1 + error_msg("w", scriptname, funcname, "all data values are missing!") end if - - ; Set levels - if (isatt(data, "label_bounds")) then - - ; User provided - chars = tochar(tostring( \ - abs(data@label_bounds(1) - round(data@label_bounds(1), 1)) * 100)) - five = tochar("5") - zero = tochar("0") - if (chars(1).eq.five(0)).or.(chars(1).eq.zero(0)) then - dec_pl = 100. - else - dec_pl = 10. - end if - upperbound = tofloat(round(data@label_bounds(1) * dec_pl, 1)) / dec_pl - lowerbound = tofloat(round(data@label_bounds(0) * dec_pl, 1)) / dec_pl - + upperbound = tofloat(round(bound * 10, 1)) / 10. + upperbound = where(upperbound.lt.0.1, 0.1, upperbound) + if (min(data).lt.0) then + lowerbound = -1. * upperbound else - - ; Flexible in regard to the range of the data values - bound = max(abs(data(:, :, 0))) - if all(ismissing(data)) then - delete(bound) - bound = 0.1 - error_msg("w", scriptname, funcname, "all data " + \ - "values are missing!") - end if - upperbound = tofloat(round(bound * 10, 1)) / 10. - upperbound = where(upperbound.lt.0.1, 0.1, upperbound) - if (min(data).lt.0) then - lowerbound = -1. * upperbound - else - lowerbound = 0 - end if - + lowerbound = 0 end if - ; Label scale - if (isatt(data, "label_scale")) then - scale = data@label_scale - else - if (((upperbound - lowerbound) / 0.5).le.6) then - scale = 0.1 - else - scale = 0.5 - end if - end if + end if - ; Set levels - if (isatt(data, "label_lo")) then - label_lo = data@label_lo - else - label_lo = True - end if - if (isatt(data, "label_hi")) then - label_hi = data@label_hi + ; Label scale + if (isatt(data, "label_scale")) then + scale = data@label_scale + else + if (((upperbound - lowerbound) / 0.5).le.6) then + scale = 0.1 else - label_hi = True + scale = 0.5 end if - nlevels = round((upperbound - lowerbound + scale) / scale, 3) - levels = new(nlevels, "float") - levels(0) = lowerbound - do ii = 1, nlevels - 1 - levels(ii) = levels(ii - 1) + scale - end do - levels = round(100. * levels, 0) / 100. - - ; Display metrics value on the plot - flag_values = False - if (isatt(data, "disp_values")) then - if (data@disp_values) then - flag_values = True - text_v = new((/ndiag, nmod, nref/), graphic) - end if + end if + + ; Set levels + if (isatt(data, "label_lo")) then + label_lo = data@label_lo + else + label_lo = True + end if + if (isatt(data, "label_hi")) then + label_hi = data@label_hi + else + label_hi = True + end if + nlevels = round((upperbound - lowerbound + scale) / scale, 3) + levels = new(nlevels, "float") + levels(0) = lowerbound + do ii = 1, nlevels - 1 + levels(ii) = levels(ii - 1) + scale + end do + levels = round(100. * levels, 0) / 100. + + ; Display metrics value on the plot + flag_values = False + if (isatt(data, "disp_values")) then + if (data@disp_values) then + flag_values = True + text_v = new((/ndiag, nmod, nref/), graphic) end if - - ; Diplay model rankings on the plot - flag_ranks = False - if (isatt(data, "disp_rankings")) then - if (data@disp_rankings) then - flag_ranks = True - text_r = new((/ndiag, nmod, nref/), graphic) - ranks = new((/ndiag, nmod, nref/), integer) - end if + end if + + ; Diplay model rankings on the plot + flag_ranks = False + if (isatt(data, "disp_rankings")) then + if (data@disp_rankings) then + flag_ranks = True + text_r = new((/ndiag, nmod, nref/), graphic) + ranks = new((/ndiag, nmod, nref/), integer) end if - - ; Set a colormap - if (isatt(data, "colormap")) then - gsn_define_colormap(wks, data@colormap) - else - gsn_define_colormap(wks, "ncl_default") - data@colormap = "ncl_default" + end if + + ; Set a colormap + if (isatt(data, "colormap")) then + gsn_define_colormap(wks, data@colormap) + else + gsn_define_colormap(wks, "ncl_default") + data@colormap = "ncl_default" + end if + tmp = read_colormap_file(data@colormap) + if (isatt(data, "cm_interval")) then + cm_start = data@cm_interval(0) + cm_end = data@cm_interval(1) + else + cm_start = 2 + cm_size = dimsizes(tmp) + cm_end = cm_size(0) - 1 + end if + cmap = tmp(cm_start:cm_end, :) + ncmap = dimsizes(cmap(:, 0)) + delete(tmp) + + ; Sample colormap and extract colors + if (label_lo.and.label_hi) then ; 2 more colors for the triangles + pos = new((/nlevels + 1/), float) + end if + if (label_lo.xor.label_hi) then ; 1 more color for upper/lower triangle + pos = new((/nlevels/), float) + end if + if (.not.label_lo.and. .not.label_hi) ; no more colors + pos = new((/nlevels - 1/), float) + end if + pos(0) = cm_start + npos = dimsizes(pos) + do ii = 1, npos - 1 + pos(ii) = pos(ii - 1) + (ncmap - 1) / tofloat(npos - 1) + end do + delete([/cmap, ncmap/]) + + ; Reverse order if required + if (isatt(data, "cm_reverse")) then + if (data@cm_reverse) then + pos = pos(::-1) end if - tmp = read_colormap_file(data@colormap) - if (isatt(data, "cm_interval")) then - cm_start = data@cm_interval(0) - cm_end = data@cm_interval(1) + end if + + ; Default resources + res = True + res@gsnDraw = False + res@gsnFrame = False + res@tmBorderThicknessF = False + res@tmXBOn = False + res@tmXTOn = False + res@tmYLOn = False + res@tmYROn = False + res@trXMinF = 0. + res@trXMaxF = 1.0 + res@trYMinF = 0. + res@trYMaxF = 1.0 + + ; Set plot aspect depending on ndiag and nmod: longst dimension on y-axis + xbsize = 1. / nmod + ybsize = 1. / ndiag + if (ndiag.ge.nmod) then + res@vpWidthF = 0.6 * nmod / tofloat(ndiag) + res@vpHeightF = 0.6 + else + res@vpWidthF = 0.6 + if (isatt(diag_script_info, "scale_aspect")) then + ; code for ESA CCI paper Lauer et al. (2016) + scale_aspect = diag_script_info@scale_aspect + res@vpHeightF = 0.6 * ndiag / tofloat(nmod) * scale_aspect else - cm_start = 2 - cm_size = dimsizes(tmp) - cm_end = cm_size(0) - 1 + ; standard code + res@vpHeightF = 0.6 * ndiag / tofloat(nmod) + if (2 * ndiag.le.nmod) then + res@vpHeightF = 2 * res@vpHeightF + end if end if - cmap = tmp(cm_start:cm_end, :) - ncmap = dimsizes(cmap(:, 0)) - delete(tmp) - - ; Sample colormap and extract colors - if (label_lo.and.label_hi) then ; 2 more colors for the triangles - pos = new((/nlevels + 1/), float) + end if + res@vpYF = 0.2 + res@vpHeightF + + ; Title + res@tiMainFontHeightF = 0.020 + if (isatt(data, "metric")) then + res@tiMainString = data@metric + end if + + ; Override defaults with "res_" attributes of "data" + res_new = att2var(data, "res_") + copy_VarMeta(res_new, res) + + ; Draw a blank plot, polygons will be added below + plot = gsn_blank_plot(wks, res) + + ; Text resources + resTxt = True + resTxt@txFontHeightF = 0.010 + resTxt@txJust = "CenterCenter" + + ; Polygons resources + resPol = True + resPol@gsLineThicknessF = 0.5 + resPol@gsEdgesOn = True + resPol@gsLineColor = "black" + poly = new((/ndiag, nmod, nref/), graphic) + + ; Calculate rankings + if (flag_ranks) then + if (.not.isatt(data, "rank_order")) then + error_msg("f", scriptname, funcname, \ + "rank_order attribute must be specified if " + \ + "ranking display is desired") end if - if (label_lo.xor.label_hi) then ; 1 more color for upper/lower triangle - pos = new((/nlevels/), float) + if (all(data@rank_order.ne.(/-1, 1/))) then + error_msg("f", scriptname, funcname, \ + "rank_order attribute can be either 1 (increasing) " + \ + "or -1 (decreasing)") end if - if (.not.label_lo.and. .not.label_hi) ; no more colors - pos = new((/nlevels - 1/), float) - end if - pos(0) = cm_start - npos = dimsizes(pos) - do ii = 1, npos - 1 - pos(ii) = pos(ii - 1) + (ncmap - 1) / tofloat(npos - 1) + do idiag = 0, ndiag - 1 + do iref = 0, nref - 1 + pp = dim_pqsort(data(idiag, :, iref), data@rank_order) + rk = 1 + do imod = 0, nmod - 1 + if (.not.ismissing(data(idiag, pp(imod), iref))) then + ranks(idiag, pp(imod), iref) = rk + rk = rk + 1 + end if + end do + delete(rk) + delete(pp) + end do end do - delete([/cmap, ncmap/]) - - ; Reverse order if required - if (isatt(data, "cm_reverse")) then - if (data@cm_reverse) then - pos = pos(::-1) - end if + end if + + ; Draw polygons + do idiag = 0, ndiag - 1 + + ; Set triangle or square + ; - draw a square if only 1 reference model is available or if the + ; alternative model contains only missing values + ; - draw a triangle if an alternative reference is available and + ; contains valid values + l_square = True + if (nref.gt.1) then + if (.not.all(ismissing(data(idiag, :, 1)))) then + l_square = False + end if end if - ; Default resources - res = True - res@gsnDraw = False - res@gsnFrame = False - res@tmBorderThicknessF = False - res@tmXBOn = False - res@tmXTOn = False - res@tmYLOn = False - res@tmYROn = False - res@trXMinF = 0. - res@trXMaxF = 1.0 - res@trYMinF = 0. - res@trYMaxF = 1.0 - - ; Set plot aspect depending on ndiag and nmod: longst dimension on y-axis - xbsize = 1. / nmod - ybsize = 1. / ndiag - if (ndiag.ge.nmod) then - res@vpWidthF = 0.6 * nmod / tofloat(ndiag) - res@vpHeightF = 0.6 - else - res@vpWidthF = 0.6 -; A_laue_ax+ - if (isatt(diag_script_info, "scale_aspect")) then - ; code for ESA CCI paper Lauer et al. (2016) - scale_aspect = diag_script_info@scale_aspect - res@vpHeightF = 0.6 * ndiag / tofloat(nmod) * scale_aspect - else - ; standard code -; A_laue_ax- - res@vpHeightF = 0.6 * ndiag / tofloat(nmod) - if (2 * ndiag.le.nmod) then - res@vpHeightF = 2 * res@vpHeightF - end if -; A_laue_ax+ - end if -; A_laue_ax- - end if - res@vpYF = 0.2 + res@vpHeightF + do imod = 0, nmod - 1 - ; Title - res@tiMainFontHeightF = 0.020 - if (isatt(data, "metric")) then - res@tiMainString = data@metric - end if - if (isatt(data, "region")) then - if (isatt(res, "tiMainString")) then - res@tiMainString = res@tiMainString + data@region - else - res@tiMainString = data@region + ; Find corresponding color for primary reference + curr = data(idiag, imod, 0) + if (ismissing(curr)) then + resPol@gsFillColor = 0 + else + lidx = max(ind(curr.ge.levels)) + if (label_lo) then + lidx = lidx + 1 + lidx = where(ismissing(lidx), 0, lidx) end if - end if - - ; Override defaults with "res_" attributes of "data" - res_new = att2var(data, "res_") - copy_VarMeta(res_new, res) - - ; Draw a blank plot, polygons will be added below - plot = gsn_blank_plot(wks, res) - - ; Text resources - resTxt = True - resTxt@txFontHeightF = 0.010 - resTxt@txJust = "CenterCenter" - - ; Polygons resources - resPol = True - resPol@gsLineThicknessF = 0.5 - resPol@gsEdgesOn = True - resPol@gsLineColor = "black" - poly = new((/ndiag, nmod, nref/), graphic) + if (.not.label_hi .and. lidx.gt.npos - 1) then + error_msg("f", scriptname, funcname, \ + "metric value (" + curr + ") above the " + \ + "label upper bound (" + levels(nlevels - 1) + \ + "): change label_bounds or set " + \ + "label_hi = True") + end if + if (ismissing(lidx)) then + error_msg("f", scriptname, funcname, \ + "metric value (" + curr + ") below the " + \ + "label lower bound (" + levels(0) + \ + "): change label_bounds or set " + \ + "label_lo = True") + end if + resPol@gsFillColor = round(pos(lidx), 3) + end if + + if (l_square) then + xx = (/imod * xbsize, (imod + 1) * xbsize, \ + (imod + 1) * xbsize, imod * xbsize, imod * xbsize/) + yy = (/idiag * ybsize, idiag * ybsize, (idiag + 1) * ybsize, \ + (idiag + 1) * ybsize, idiag * ybsize/) + poly(idiag, imod, 0) = gsn_add_polygon(wks, plot, xx, yy, resPol) + if (flag_values) then + xt = (xx(0) + xx(1)) / 2. + yt = (yy(1) + yy(2)) / 2 + text_v(idiag, imod, 0) = \ + gsn_add_text(wks, plot, sprintf("%4.2f", data(idiag, imod, 0)), \ + xt, yt, resTxt) + delete(xt) + delete(yt) + end if + if (flag_ranks.and..not.ismissing(data(idiag, imod, 0))) then + xt = (xx(0) + xx(1)) / 2. + yt = (yy(1) + yy(2)) / 2. + text_r(idiag, imod, 0) = \ + gsn_add_text(wks, plot, "#" + ranks(idiag, imod, 0), \ + xt, yt, resTxt) + delete(xt) + delete(yt) + end if + delete(xx) + delete(yy) + + else + + ; Primary reference => lower triangle + xx = (/imod * xbsize, (imod + 1) * xbsize, \ + (imod + 1) * xbsize, imod * xbsize/) + yy = (/idiag * ybsize, idiag * ybsize, (idiag + 1) * ybsize, \ + idiag * ybsize/) + poly(idiag, imod, 0) = gsn_add_polygon(wks, plot, xx, yy, resPol) + if (flag_values) then + xt = sum(xx(0:2)) / 3. + yt = sum(yy(0:2)) / 3. + text_v(idiag, imod, 0) = \ + gsn_add_text(wks, plot, sprintf("%4.2f", data(idiag, imod, 0)), \ + xt, yt, resTxt) + delete(xt) + delete(yt) + end if + if (flag_ranks.and..not.ismissing(data(idiag, imod, 0))) then + xt = sum(xx(0:2)) / 3. + yt = sum(yy(0:2)) / 3. + text_r(idiag, imod, 0) = \ + gsn_add_text(wks, plot, "#" + ranks(idiag, imod, 0), \ + xt, yt, resTxt) + delete(xt) + delete(yt) + end if + delete(xx) + delete(yy) - ; Calculate rankings - if (flag_ranks) then - if (.not.isatt(data, "rank_order")) then + ; Find corresponding color for alternative reference + curr = data(idiag, imod, 1) + if (ismissing(curr)) then + resPol@gsFillColor = 0 + else + lidx = max(ind(curr.ge.levels)) + if (label_lo) then + lidx = lidx + 1 + lidx = where(ismissing(lidx), 0, lidx) + end if + if (.not.label_hi .and. lidx.gt.npos - 1) then error_msg("f", scriptname, funcname, \ - "rank_order attribute must be specified if " + \ - "ranking display is desired") - end if - if (all(data@rank_order.ne.(/-1, 1/))) then + "metric value (" + curr + ") above the " + \ + "label upper bound (" + \ + levels(nlevels - 1) + \ + "): change label_bounds or set " + \ + "label_hi = True") + end if + if (ismissing(lidx)) then error_msg("f", scriptname, funcname, \ - "rank_order attribute can be either 1 (increasing) " + \ - "or -1 (decreasing)") + "metric value (" + curr + ") below the " + \ + "label lower bound (" + levels(0) + \ + "): change label_bounds or set " + \ + "label_lo = True") + end if + resPol@gsFillColor = round(pos(lidx), 3) end if - do idiag = 0, ndiag - 1 - do iref = 0, nref - 1 - pp = dim_pqsort(data(idiag, :, iref), data@rank_order) - rk = 1 - do imod = 0, nmod - 1 - if (.not.ismissing(data(idiag, pp(imod), iref))) then - ranks(idiag, pp(imod), iref) = rk - rk = rk + 1 - end if - end do - delete(rk) - delete(pp) - end do - end do - end if - ; Draw polygons - do idiag = 0, ndiag - 1 - - ; Set triangle or square - ; - draw a square if only 1 reference model is available or if the - ; alternative model contains only missing values - ; - draw a triangle if an alternative reference is available and - ; contains valid values - l_square = True - if (nref.gt.1) then - if (.not.all(ismissing(data(idiag, :, 1)))) then - l_square = False - end if + ; Alternative reference => upper triangle + xx = (/imod * xbsize, (imod + 1) * xbsize, \ + imod * xbsize, imod * xbsize/) + yy = (/idiag * ybsize, (idiag + 1) * ybsize, \ + (idiag + 1) * ybsize, idiag * ybsize/) + poly(idiag, imod, 1) = gsn_add_polygon(wks, plot, xx, yy, resPol) + if (flag_values) then + xt = sum(xx(0:2)) / 3. + yt = sum(yy(0:2)) / 3. + text_v(idiag, imod, 1) = \ + gsn_add_text(wks, plot, sprintf("%4.2f", data(idiag, imod, 1)), \ + xt, yt, resTxt) + delete(xt) + delete(yt) end if + if (flag_ranks.and..not.ismissing(data(idiag, imod, 1))) then + xt = sum(xx(0:2)) / 3. + yt = sum(yy(0:2)) / 3. + text_r(idiag, imod, 1) = \ + gsn_add_text(wks, plot, "#" + ranks(idiag, imod, 1), \ + xt, yt, resTxt) + delete(xt) + delete(yt) + end if + delete(xx) + delete(yy) - do imod = 0, nmod - 1 - - ; Find corresponding color for primary reference - curr = data(idiag, imod, 0) - if (ismissing(curr)) then - resPol@gsFillColor = 0 - else - lidx = max(ind(curr.ge.levels)) - if (label_lo) then - lidx = lidx + 1 - lidx = where(ismissing(lidx), 0, lidx) - end if - if (.not.label_hi .and. lidx.gt.npos - 1) then - error_msg("f", scriptname, funcname, \ - "metric value (" + curr + ") above the " + \ - "label upper bound (" + levels(nlevels - 1) + \ - "): change label_bounds or set " + \ - "label_hi = True") - end if - if (ismissing(lidx)) then - error_msg("f", scriptname, funcname, \ - "metric value (" + curr + ") below the " + \ - "label lower bound (" + levels(0) + \ - "): change label_bounds or set " + \ - "label_lo = True") - end if - resPol@gsFillColor = round(pos(lidx), 3) - end if - - if (l_square) then - xx = (/imod * xbsize, (imod + 1) * xbsize, \ - (imod + 1) * xbsize, imod * xbsize, imod * xbsize/) - yy = (/idiag * ybsize, idiag * ybsize, (idiag + 1) * ybsize, \ - (idiag + 1) * ybsize, idiag * ybsize/) - poly(idiag, imod, 0) = \ - gsn_add_polygon(wks, plot, xx, yy, resPol) - if (flag_values) then - xt = (xx(0) + xx(1)) / 2. - yt = (yy(1) + yy(2)) / 2 - text_v(idiag, imod, 0) = \ - gsn_add_text(wks, plot, \ - sprintf("%4.2f", data(idiag, imod, 0)), \ - xt, yt, resTxt) - delete(xt) - delete(yt) - end if - if (flag_ranks.and..not.ismissing(data(idiag, imod, 0))) then - xt = (xx(0) + xx(1)) / 2. - yt = (yy(1) + yy(2)) / 2. - text_r(idiag, imod, 0) = \ - gsn_add_text(wks, plot, "#" + ranks(idiag, imod, 0), \ - xt, yt, resTxt) - delete(xt) - delete(yt) - end if - delete(xx) - delete(yy) - - else - - ; Primary reference => lower triangle - xx = (/imod * xbsize, (imod + 1) * xbsize, \ - (imod + 1) * xbsize, imod * xbsize/) - yy = (/idiag * ybsize, idiag * ybsize, (idiag + 1) * ybsize, \ - idiag * ybsize/) - poly(idiag, imod, 0) = \ - gsn_add_polygon(wks, plot, xx, yy, resPol) - if (flag_values) then - xt = sum(xx(0:2)) / 3. - yt = sum(yy(0:2)) / 3. - text_v(idiag, imod, 0) = \ - gsn_add_text(wks, plot, \ - sprintf("%4.2f", data(idiag, imod, 0)), \ - xt, yt, resTxt) - delete(xt) - delete(yt) - end if - if (flag_ranks.and..not.ismissing(data(idiag, imod, 0))) then - xt = sum(xx(0:2)) / 3. - yt = sum(yy(0:2)) / 3. - text_r(idiag, imod, 0) = \ - gsn_add_text(wks, plot, "#" + ranks(idiag, imod, 0), \ - xt, yt, resTxt) - delete(xt) - delete(yt) - end if - delete(xx) - delete(yy) - - ; Find corresponding color for alternative reference - curr = data(idiag, imod, 1) - if (ismissing(curr)) then - resPol@gsFillColor = 0 - else - lidx = max(ind(curr.ge.levels)) - if (label_lo) then - lidx = lidx + 1 - lidx = where(ismissing(lidx), 0, lidx) - end if - if (.not.label_hi .and. lidx.gt.npos - 1) then - error_msg("f", scriptname, funcname, \ - "metric value (" + curr + ") above the " + \ - "label upper bound (" + \ - levels(nlevels - 1) + \ - "): change label_bounds or set " + \ - "label_hi = True") - end if - if (ismissing(lidx)) then - error_msg("f", scriptname, funcname, \ - "metric value (" + curr + ") below the " + \ - "label lower bound (" + levels(0) + \ - "): change label_bounds or set " + \ - "label_lo = True") - end if - resPol@gsFillColor = round(pos(lidx), 3) - end if - - ; Alternative reference => upper triangle - xx = (/imod * xbsize, (imod + 1) * xbsize, \ - imod * xbsize, imod * xbsize/) - yy = (/idiag * ybsize, (idiag + 1) * ybsize, \ - (idiag + 1) * ybsize, idiag * ybsize/) - poly(idiag, imod, 1) = \ - gsn_add_polygon(wks, plot, xx, yy, resPol) - if (flag_values) then - xt = sum(xx(0:2)) / 3. - yt = sum(yy(0:2)) / 3. - text_v(idiag, imod, 1) = \ - gsn_add_text(wks, plot, \ - sprintf("%4.2f", data(idiag, imod, 1)), \ - xt, yt, resTxt) - delete(xt) - delete(yt) - end if - if (flag_ranks.and..not.ismissing(data(idiag, imod, 1))) then - xt = sum(xx(0:2)) / 3. - yt = sum(yy(0:2)) / 3. - text_r(idiag, imod, 1) = \ - gsn_add_text(wks, plot, "#" + ranks(idiag, imod, 1), \ - xt, yt, resTxt) - delete(xt) - delete(yt) - end if - delete(xx) - delete(yy) - - end if - delete(resPol@gsFillColor) - end do + end if + delete(resPol@gsFillColor) end do - - res@poly = poly - if (isdefined("text_v")) then - res@text_v = text_v - end if - if (isdefined("text_r")) then - res@text_v = text_r - end if - - ; Add legend text for the diagnostics - if(isatt(diag_script_info, "scale_font")) then - scale_font = diag_script_info@scale_font + end do + + res@poly = poly + if (isdefined("text_v")) then + res@text_v = text_v + end if + if (isdefined("text_r")) then + res@text_v = text_r + end if + + ; Add legend text for the diagnostics + if(isatt(diag_script_info, "scale_font")) then + scale_font = diag_script_info@scale_font + else + scale_font = 1. + end if + resTxt@txFontHeightF = min((/0.012 * scale_font, \ + 0.57 * scale_font / max((/ndiag, nmod/))/)) + resTxt@txJust = "CenterRight" + step = res@vpHeightF / ndiag + xpos = 0.195 + ypos = 0.2 + step / 2. + do idiag = 0, ndiag - 1 + gsn_text_ndc(wks, data&diagnostics(idiag), xpos, ypos, resTxt) + ypos = ypos + step + end do + + ; Add text for the models + resTxt@txAngleF = 90. + step = res@vpWidthF / nmod + resTxt@txJust = "CenterRight" + xpos = 0.2 + step / 2. + ypos = 0.19 + do imod = 0, nmod - 1 + gsn_text_ndc(wks, data&models(imod), xpos, ypos, resTxt) + xpos = xpos + step + end do + + ; Draw a labelbar + resTxt@txFontHeightF = 0.015 * scale_font + resTxt@txAngleF = 0. + resTxt@txJust = "CenterLeft" + height = res@vpHeightF / npos + xp1 = 0.2 + res@vpWidthF + 0.01 + xp2 = 0.2 + res@vpWidthF + 0.04 + xpm = (xp1 + xp2) / 2. + ypos = 0.2 + + il = 0 + do ii = 0, npos - 1 + resPol@gsFillColor = pos(ii) + if (ii.eq.0) then + if (label_lo) then + gsn_polygon_ndc(wks, (/xpm, xp1, xp2, xpm/),\ + (/ypos, ypos + height, \ + ypos + height, ypos/), resPol) + else + gsn_polygon_ndc(wks, (/xp1, xp1, xp2, xp2, xp1/),\ + (/ypos, ypos + height, \ + ypos + height, ypos, ypos/), resPol) + gsn_text_ndc(wks, sprintf("%4.2f", levels(il)), \ + xp2 + 0.01, ypos, resTxt) + il = il + 1 + end if + elseif (ii.eq.(npos - 1)) then + if (label_hi) then + gsn_polygon_ndc(wks, (/xp1, xpm, xp2, xp1/), \ + (/ypos, ypos + height, ypos, ypos/), resPol) + gsn_text_ndc(wks, sprintf("%4.2f", levels(il)), \ + xp2 + 0.01, ypos, resTxt) + il = il + 1 + else + gsn_polygon_ndc(wks, (/xp1, xp1, xp2, xp2, xp1/),\ + (/ypos, ypos + height, \ + ypos + height, ypos, ypos/), resPol) + gsn_text_ndc(wks, sprintf("%4.2f", levels(il)), \ + xp2 + 0.01, ypos, resTxt) + il = il + 1 + gsn_text_ndc(wks, sprintf("%4.2f", levels(il)), \ + xp2 + 0.01, ypos + height, resTxt) + il = il + 1 + end if else - scale_font = 1. + gsn_polygon_ndc(wks, (/xp1, xp1, xp2, xp2, xp1/), \ + (/ypos, ypos + height, ypos + height, ypos, ypos/), \ + resPol) + gsn_text_ndc(wks, sprintf("%4.2f", levels(il)), \ + xp2 + 0.01, ypos, resTxt) + il = il + 1 end if - resTxt@txFontHeightF = min((/0.012 * scale_font, \ - 0.57 * scale_font / max((/ndiag, nmod/))/)) - resTxt@txJust = "CenterRight" - step = res@vpHeightF / ndiag - xpos = 0.195 - ypos = 0.2 + step / 2. - do idiag = 0, ndiag - 1 - gsn_text_ndc(wks, data&diagnostics(idiag), xpos, ypos, resTxt) - ypos = ypos + step - end do + ypos = ypos + height + end do + draw(plot) + frame(wks) - ; Add text for the models - resTxt@txAngleF = 90. - step = res@vpWidthF / nmod - resTxt@txJust = "CenterRight" - xpos = 0.2 + step / 2. - ypos = 0.19 - do imod = 0, nmod - 1 - gsn_text_ndc(wks, data&models(imod), xpos, ypos, resTxt) - xpos = xpos + step - end do + leave_msg(scriptname, funcname) + return(plot) - ; Draw a labelbar - resTxt@txFontHeightF = 0.015 * scale_font - resTxt@txAngleF = 0. - resTxt@txJust = "CenterLeft" - height = res@vpHeightF / npos - xp1 = 0.2 + res@vpWidthF + 0.01 - xp2 = 0.2 + res@vpWidthF + 0.04 - xpm = (xp1 + xp2) / 2. - ypos = 0.2 - - il = 0 - do ii = 0, npos - 1 - resPol@gsFillColor = pos(ii) - if (ii.eq.0) then - if (label_lo) then - gsn_polygon_ndc(wks, (/xpm, xp1, xp2, xpm/),\ - (/ypos, ypos + height, \ - ypos + height, ypos/), resPol) - else - gsn_polygon_ndc(wks, (/xp1, xp1, xp2, xp2, xp1/),\ - (/ypos, ypos + height, \ - ypos + height, ypos, ypos/), resPol) - gsn_text_ndc(wks, sprintf("%4.2f", levels(il)), \ - xp2 + 0.01, ypos, resTxt) - il = il + 1 - end if - else if (ii.eq.(npos - 1)) then - if (label_hi) then - gsn_polygon_ndc(wks, (/xp1, xpm, xp2, xp1/), \ - (/ypos, ypos + height, ypos, ypos/), resPol) - gsn_text_ndc(wks, sprintf("%4.2f", levels(il)), \ - xp2 + 0.01, ypos, resTxt) - il = il + 1 - else - gsn_polygon_ndc(wks, (/xp1, xp1, xp2, xp2, xp1/),\ - (/ypos, ypos + height, \ - ypos + height, ypos, ypos/), resPol) - gsn_text_ndc(wks, sprintf("%4.2f", levels(il)), \ - xp2 + 0.01, ypos, resTxt) - il = il + 1 - gsn_text_ndc(wks, sprintf("%4.2f", levels(il)), \ - xp2 + 0.01, ypos + height, resTxt) - il = il + 1 - end if - else - gsn_polygon_ndc(wks, (/xp1, xp1, xp2, xp2, xp1/), \ - (/ypos, ypos + height, \ - ypos + height, ypos, ypos/), \ - resPol) - gsn_text_ndc(wks, sprintf("%4.2f", levels(il)), \ - xp2 + 0.01, ypos, resTxt) - il = il + 1 - end if - end if - ypos = ypos + height - end do - draw(plot) - frame(wks) +end - leave_msg(scriptname, funcname) - return(plot) +; ############################################################################# + +undef("circle_plot") +function circle_plot(wks_in[1], + source, + varname[1]: string, + ensname[1]: string) +; +; Arguments +; wks_in: workstations (graphic object or default will be used). +; source: data to be plotted or a NetCDF filename with data. +; varname: variable name in the file. +; +; Source prototype: +; source = (nmod, 2) +; source!0 = models +; source!1 = statistic +; source(:, 0) = SMPI +; source(:, 1) = 95% conf radius +; +; Return value: +; A graphic variable. +; +; Caveats +; +; Modification history: +; 20170424-A_gier_be: written. +; +local funcname, scriptname, verbosity, wks, wks_in, data, source, res, atts, \ + base, varname +begin + + funcname = "circle_plot" + scriptname = "plot_scripts/ncl/portrait_plot.ncl" + enter_msg(scriptname, funcname) + + ; Get data, either directly or via netCDF file + if (typeof(source) .eq. "string") then + data = ncdf_read(source, varname) + else + data = source + copy_VarMeta(source, data) + end if + + ; Use -999 as fill value (easier to display as text) + data@_FillValue = -999. + + ; Retrieve basic metadata from data + defaults = (/"default", "dummy", "dummy_for_var", "Default", "Dummy"/) + if (any(varname.eq.defaults)) then + performance_index = att2var(data, "var") + DIAG_SCRIPT = att2var(data, "diag_script") + else + performance_index = varname + end if + + ; Check if a valid wks has been provided, otherwise invoke default + wks = get_wks(wks_in, DIAG_SCRIPT, performance_index) + + ; Make list of markers for different projects + symbols_1 = array_append_record(inttochar(ispan(97, 122, 1)), \ + inttochar(ispan(65, 90, 1)), 0) + + ; Greek alphabet + symbols_2 = new(dimsizes(symbols_1), string) + do ii = 0, dimsizes(symbols_1) - 1 + symbols_2(ii) = "~F5~" + symbols_1(ii) + "~F~" + end do + + n_projects = 1 + x_min = min((/0.38, min(data(:, 0) - data(:, 1))-0.05/)) + x_max = max(data(:, 0) + data(:, 1)) + 0.25 + y_max = n_projects + 2 + y_min = n_projects - 2 + + ; Default resources + res = True + res@xyXStyle = "Log" + res@gsnDraw = False + res@gsnFrame = False + res@tmBorderThicknessF = False + res@tmXBBorderOn = False + res@tmXTBorderOn = False + res@tmYLBorderOn = False + res@tmYRBorderOn = False + res@tmXBOn = False + res@tmXTOn = False + res@tmYLOn = False + res@tmYROn = False + res@trXMinF = x_min + res@trXMaxF = x_max + res@trYMinF = y_min + res@trYMaxF = y_max + res@vpXF = 0.1 + res@vpWidthF = 0.9 + res@vpYF = 1.0 + res@vpHeightF = 0.4 + res@tiXAxisString = "I~S~2" + res@tiXAxisOffsetYF = 0.1 + + res_lines = True + res_lines@gsLineDashPattern = 0. + res_lines@gsLineThicknessF = 2. + res_lines@tfPolyDrawOrder = "PreDraw" + + res_circles = True + res_circles@gsEdgesOn = True + res_circles@gsEdgeThicknessF = 1.5 + + res_text = True + res_text@txFontHeightF = 0.02 + + res_marker = True + res_marker@gsMarkerSizeF = 0.02 + res_marker@gsMarkerIndex = 1 + res_marker@gsMarkerColor = "black" + res_marker@tfPolyDrawOrder = "Draw" + res_marker@txFontHeightF = 0.015 + res_marker@txFontThicknessF = 2 + + plot = gsn_csm_xy(wks, (/x_min, x_max/), (/1., 1./), res) + + ; Draw baseline + plot@$unique_string("dum")$ = \ + gsn_add_polyline(wks, plot, (/x_min, x_max/), (/1., 1./), res_lines) + + ; Draw ticks on baseline + x_ticks = 0.1 * ispan(toint(x_min * 10), toint(x_max * 10), 1) + x_big_ticks = 0.1 * ispan(0, toint(x_max * 10), 5) + x_numbers = ispan(toint(x_min) + 1, toint(x_max), 1) + + do ticks = 0, dimsizes(x_ticks) - 1 + plot@$unique_string("dum")$ = \ + gsn_add_polyline(wks, plot, (/x_ticks(ticks), x_ticks(ticks)/), \ + (/0.95, 1.05/), res_lines) + end do + + do ticks = 0, dimsizes(x_big_ticks) - 1 + plot@$unique_string("dum")$ = \ + gsn_add_polyline(wks, plot, \ + (/x_big_ticks(ticks), x_big_ticks(ticks)/), \ + (/0.85, 1.15/), res_lines) + end do + + do ticks = 0, dimsizes(x_numbers)-1 + plot@$unique_string("dum")$ = \ + gsn_add_text(wks, plot, tostring(x_numbers(ticks)), \ + x_numbers(ticks), 0.2, res_text) + end do + + ; Add models as circles, transform x-coords into ndc space + ; to keep circles in log + n = 50 ; nr of points to draw circle with + do imod = 0, dimsizes(data&models) - 1 + x_in = (/data(imod, 0), data(imod, 0) + data(imod, 1), data(imod, 0)/) + y_in = (/1., 1., 1.26/) + x_ndc = new(3, float) + y_ndc = new(3, float) + datatondc(plot, x_in, y_in, x_ndc, y_ndc) + mod_r = x_ndc(1) - x_ndc(0) + x_1 = fspan(-mod_r, mod_r, n) + y_1 = sqrt(mod_r ^ 2 - x_1 ^ 2) + x = array_append_record(x_1, x_1(::-1), 0) + y = array_append_record(y_1, -y_1(::-1), 0) + x = x + x_ndc(0) + y = y + y_ndc(0) + if (data&models(imod).eq."multi-model-mean") then + res_circles@gsFillColor = "black" + res_circles@gsFillOpacityF = 0.8 + else + res_circles@gsFillColor = "orange" + res_circles@gsFillOpacityF = 0.3 + end if + gsn_polygon_ndc(wks, x, y, res_circles) + gsn_polymarker_ndc(wks, x_ndc(0), y_ndc(0), res_marker) + delete([/mod_r, x_1, y_1, x, y, x_ndc, y_ndc, x_in, y_in/]) + end do + + ; Plot average I^2 and Project name (Multi Model Mean at the end) + x_in = (/1., 1./) ; arbitrary value + y_in = (/1., 1./) + x_ndc = new(2, float) + y_ndc = new(2, float) + datatondc(plot, x_in, y_in, x_ndc, y_ndc) + res_text@txJust = "CenterLeft" + gsn_text_ndc(wks, ensname, 0.01, y_ndc(0), res_text) + delete([/x_in, y_in, x_ndc, y_ndc/]) + + ; Text symbols after all circles are drawn, in case they fall inside + x_in = data(:, 0) + y_in = new(dimsizes(data(:, 0)), float) + y_in(:) = max(data(:, 1)) + 1.5 + x_ndc = new(dimsizes(data(:, 0)), float) + y_ndc = new(dimsizes(data(:, 0)), float) + datatondc(plot, x_in, y_in, x_ndc, y_ndc) + + do imod = 0, dimsizes(data&models) - 1 + gsn_text_ndc(wks, symbols_2(imod), x_ndc(imod), y_ndc(imod), res_marker) + end do + delete([/x_in, y_in, x_ndc, y_ndc/]) + + ; Draw legend + txres = True + txres@txFontHeightF = 0.015 + txres@txJust = "CenterLeft" + ypos = fspan(0.55, 0.1, 20) + half = toint(dimsizes(data&models) / 2) + do imod = 0, half + gsn_text_ndc(wks, symbols_2(imod) + ": " + \ + data&models(imod), 0.01, ypos(imod), txres) + end do + do imod = half + 1, dimsizes(data&models) - 1 + gsn_text_ndc(wks, symbols_2(imod) + ": " + \ + data&models(imod), 0.3, ypos(imod - half - 1), txres) + end do + + draw(plot) + frame(wks) + + leave_msg(scriptname, funcname) + return(plot) end diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-precip-absdelta.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-precip-absdelta.rgb new file mode 100644 index 0000000000..0fc2dd42e1 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-precip-absdelta.rgb @@ -0,0 +1,21 @@ +############################################### +# Similar to the colors used in IPCC Fig. 9.4 # +############################################### + +# number of colors in table + +ncolors = 7 + +# first color = GMT background color +# last color = GMT foreground color + +# r g b + +251 246 162 +239 220 133 +241 195 108 +220 151 84 +199 121 56 +174 95 39 +151 73 27 + diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-precip-reldelta.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-precip-reldelta.rgb new file mode 100644 index 0000000000..cb6a8de529 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-precip-reldelta.rgb @@ -0,0 +1,28 @@ +############################################### +# Similar to the colors used in IPCC Fig. 9.4 # +############################################### + +# number of colors in table + +ncolors = 14 + +# first color = GMT background color +# last color = GMT foreground color + +# r g b + +27 55 88 +47 93 128 +77 130 162 +108 169 198 +88 197 154 +147 216 154 +194 232 181 +251 246 162 +239 220 133 +241 195 108 +220 151 84 +199 121 56 +174 95 39 +151 73 27 + diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-tas-absdelta.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-tas-absdelta.rgb new file mode 100644 index 0000000000..46a4d17a87 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc-tas-absdelta.rgb @@ -0,0 +1,25 @@ +############################################### +# Similar to the colors used in IPCC Fig. 9.2 # +############################################### + +# number of colors in table + +ncolors = 11 + +# first color = GMT background color +# last color = GMT foreground color + +# r g b + +248 249 182 +245 244 98 +249 218 38 +251 184 28 +247 144 29 +245 112 31 +241 71 36 +238 30 35 +210 29 37 +231 60 76 +248 150 166 + diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc_color_tseries.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc_color_tseries.rgb new file mode 100644 index 0000000000..49d04d86eb --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc_color_tseries.rgb @@ -0,0 +1,7 @@ +255 255 255 +0 0 0 +190 190 190 +0 0 255 +121 188 255 +255 130 45 +255 0 0 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc_colors_blu2red_centered.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc_colors_blu2red_centered.rgb new file mode 100644 index 0000000000..197ef850eb --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc_colors_blu2red_centered.rgb @@ -0,0 +1,18 @@ + 10 40 100 + 5 75 170 + 51 102 217 + 77 128 230 +105 138 236 +133 166 255 +151 180 250 +179 204 245 +204 217 255 +255 255 179 +255 205 128 +252 181 107 +255 161 97 +255 115 56 +243 83 37 +217 51 26 +191 0 0 +140 0 0 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc_colors_red2blu_centered.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc_colors_red2blu_centered.rgb new file mode 100644 index 0000000000..ac37c3cc98 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc_colors_red2blu_centered.rgb @@ -0,0 +1,18 @@ +140 0 0 +191 0 0 +217 51 26 +243 83 37 +255 115 56 +255 161 97 +252 181 107 +255 205 128 +255 255 179 +204 217 255 +179 204 245 +151 180 250 +133 166 255 +105 138 236 + 77 128 230 + 51 102 217 + 5 75 170 + 10 40 100 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc_temp_scaling.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc_temp_scaling.rgb new file mode 100644 index 0000000000..5a6099ca6f --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc_temp_scaling.rgb @@ -0,0 +1,12 @@ +204 217 255 +255 245 204 +255 224 153 +255 203 102 +255 180 51 +255 140 51 +255 85 0 +230 40 30 +191 0 0 +140 0 0 +108 0 0 +110 0 70 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/ipcc_temperature_with_grey.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc_temperature_with_grey.rgb new file mode 100644 index 0000000000..d5ab2abfb3 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/ipcc_temperature_with_grey.rgb @@ -0,0 +1,17 @@ + 10 40 100 + 51 102 217 +105 138 236 +151 180 250 +204 217 255 +255 245 204 +255 224 153 +255 203 102 +255 180 51 +255 140 51 +255 85 0 +230 40 30 +191 0 0 +140 0 0 +108 0 0 +110 0 70 + 60 60 60 diff --git a/esmvaltool/diag_scripts/shared/plot/rgb/percent100.rgb b/esmvaltool/diag_scripts/shared/plot/rgb/percent100.rgb new file mode 100644 index 0000000000..7ab86a1b01 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/rgb/percent100.rgb @@ -0,0 +1,32 @@ +# number of colors in table not counting foreground/background color + +ncolors = 20 + +# first color = GMT background color +# last color = GMT foreground color + +# r g b + +0 0 125 +0 0 205 +0 0 230 +0 0 255 +0 105 255 +0 155 255 +0 205 255 +0 255 255 +100 255 255 +200 255 255 +255 255 255 +255 255 200 +255 255 100 +255 255 0 +255 205 0 +255 155 0 +255 105 0 +255 0 0 +230 0 0 +205 0 0 +180 0 0 +125 0 0 + diff --git a/esmvaltool/diag_scripts/shared/plot/scatterplot.ncl b/esmvaltool/diag_scripts/shared/plot/scatterplot.ncl index 9fa57ddcc9..efb74ce7d3 100644 --- a/esmvaltool/diag_scripts/shared/plot/scatterplot.ncl +++ b/esmvaltool/diag_scripts/shared/plot/scatterplot.ncl @@ -10,11 +10,11 @@ ; ; ############################################################################# -load "./interface_scripts/auxiliary.ncl" -load "./interface_scripts/logging.ncl" +load "$diag_scripts/../interface_scripts/auxiliary.ncl" +load "$diag_scripts/../interface_scripts/logging.ncl" -load "./diag_scripts/shared/plot/style.ncl" -load "./diag_scripts/shared/plot/aux_plotting.ncl" +load "$diag_scripts/shared/plot/style.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" ; ############################################################################# undef("scatterplot") @@ -22,22 +22,23 @@ function scatterplot(wks_in[1], source, varname[1]: string, reflines: logical, - stats: logical) + stats: logical, + inlist:list) ; ; Arguments ; wks_in: workstation ("graphic" object or default will be used). ; source: data to be plotted (see source prototype above) or a NetCDF ; filename with data. ; varname: variable name in the file. -; logmode: if true, log scale will be used on both axes. ; reflines: show/hide reference lines (1:1 and +/- factor 2). ; stats: show/hide summary of statistical values on the plot. +; inlist: list of input_file_info items ; ; Source prototype ; source = (2, npoints) ; source(0, :) = x-axis values ; source(1, :) = y-axis values -; source!0 = models +; source!0 = datasets ; ; Return value ; A graphic object. @@ -48,13 +49,14 @@ function scatterplot(wks_in[1], ; ; Caveats ; Linear axes are used as default, log axis can be optionally provided by -; the calling diag_script. +; the calling diag_script (option: diag_script_info@scatter_log). ; ; Modification history ; 20140228-A_righ_ma: written. ; local funcname, scriptname, scattermode, loc_res, resL, linec, lineh, linel, \ - resT, nstats, st, text, step, xpos, ypos, ii, tmp, ratio, fac2p, str + resT, nstats, st, text, step, xpos, ypos, ii, tmp, ratio, fac2p, str, + scatter_log begin funcname = "scatterplot" @@ -68,8 +70,16 @@ begin data = source end if + ; Get scatter_log option + scatter_log = False + if (isatt(diag_script_info, "scatter_log")) then + if (diag_script_info@scatter_log .eq. "True") then + scatter_log = True + end if + end if + ; Style options - colors = project_style(diag_script_info, "colors") + colors = project_style(inlist, diag_script_info, "colors") ; Define x and y points xpts = data(0, :) @@ -84,7 +94,7 @@ begin end if ; Check if a valid wks has been provided, otherwise invoke default - wks = get_wks(wks_in, diag_script, var) + wks = get_wks(wks_in, DIAG_SCRIPT, var) gsn_define_colormap(wks, array_append_record((/"white", "black"/), \ colors, 0)) @@ -105,9 +115,9 @@ begin res@gsnCenterString = res@gsnCenterString + " [" + \ format_units(data@units) + "]" end if - res@tiXAxisString = data&models(0) - res@tiYAxisString = data&models(1) - if (diag_script_info@scatter_log) then + res@tiXAxisString = data&datasets(0) + res@tiYAxisString = data&datasets(1) + if (scatter_log) then res@trXAxisType = "LogAxis" res@trYAxisType = "LogAxis" xpts = where(xpts.le.0, xpts@_FillValue, xpts) @@ -144,7 +154,7 @@ begin (/res@trYMinF, res@trYMaxF/), resL) plot@linec = linec resL@gsLineDashPattern = 1 - if (diag_script_info@scatter_log) then + if (scatter_log) then lineh = gsn_add_polyline(wks_in, plot, \ (/res@trXMinF, res@trXMaxF / 2./), \ (/2 * res@trYMinF, res@trYMaxF/), resL) @@ -176,7 +186,7 @@ begin text = new(nstats, graphic) ; Define labels horizontal position - if (diag_script_info@scatter_log) then + if (scatter_log) then step = (log10(res@trYMaxF) - log10(res@trYMinF)) / 20. xpos = 10 ^ (log10(res@trYMinF) + step) else @@ -187,7 +197,7 @@ begin ; Define labels vertical position ypos = new(nstats, float) do ii = 0, nstats - 1 - if (diag_script_info@scatter_log) then + if (scatter_log) then tmp = log10(res@trYMaxF) - step * (ii + 1) ypos(ii) = 10 ^ tmp delete(tmp) @@ -267,7 +277,7 @@ function scatterplot3D(wks_in[1], ; source(1, :) = y-axis values ; source(2, :) = z-axis values ; source!0 = statistic -; source!1 = models +; source!1 = datasets ; ; Return value ; A graphic object. @@ -311,7 +321,7 @@ begin end if ; Check if a valid wks has been provided, otherwise invoke default - wks = get_wks(wks_in, diag_script, var) + wks = get_wks(wks_in, DIAG_SCRIPT, var) ; Create a 2D scatter plot res = True @@ -392,7 +402,7 @@ begin xb = xb - 0.01 end if ndctodata(plot, xb, yb, xp, yp) - labels(imod) = gsn_add_text(wks, plot, data&models(imod), xp, yp, resT) + labels(imod) = gsn_add_text(wks, plot, data&datasets(imod), xp, yp, resT) str = "text" + imod plot@$str$ = labels(imod) @@ -430,13 +440,15 @@ end undef("scatterplot_markers") function scatterplot_markers(wks_in[1], source, - varname[1]) + varname[1], + inlist:list) ; ; Arguments ; wks_in: workstation ("graphic" object or default will be used). ; source: data to be plotted (see source prototype above) or a NetCDF ; filename with data. ; varname: variable name in the file. +; inlist: list of input_file_info items ; ; Source prototype ; source = (2, npoints) @@ -485,12 +497,26 @@ begin end if ; Check if a valid wks has been provided, otherwise invoke default - wks = get_wks(wks_in, diag_script, var) + wks = get_wks(wks_in, DIAG_SCRIPT, var) ; Define style - annots = project_style(diag_script_info, "annots") - colors = project_style(diag_script_info, "colors") - markers = project_style(diag_script_info, "markers") + ; new: if data array has attributes annots, colors, markers, use those + ; instead of function project_style + if (isatt(data, "annots")) then + annots = data@annots + else + annots = project_style(inlist, diag_script_info, "annots") + end if + if (isatt(data, "colors")) then + colors = data@colors + else + colors = project_style(inlist, diag_script_info, "colors") + end if + if (isatt(data, "markers")) then + markers = data@markers + else + markers = project_style(inlist, diag_script_info, "markers") + end if ; Create a 2D scatter plot res = True @@ -514,6 +540,25 @@ begin " [" + format_units(data@units(1)) + "]" end if + if (isatt(data, "trYMinF")) then + res@trYMinF = data@trYMinF ; min value on y-axis + end if + if (isatt(data, "trYMaxF")) then + res@trYMaxF = data@trYMaxF ; max value on y-axis + end if + if (isatt(data, "trXMinF")) then + res@trXMinF = data@trXMinF ; min value on x-axis + end if + if (isatt(data, "trXMaxF")) then + res@trXMaxF = data@trXMaxF ; max value on x-axis + end if + if (isatt(data, "trXReverse")) then + res@trXReverse = data@trXReverse ; flip the x axis + end if + if (isatt(data, "trYReverse")) then + res@trYReverse = data@trYReverse ; flip the y axis + end if + ; Override defaults with "res_" attributes of "data" res_new = att2var(data, "res_") copy_VarMeta(res_new, res) diff --git a/esmvaltool/diag_scripts/shared/plot/style.ncl b/esmvaltool/diag_scripts/shared/plot/style.ncl index de0f785a55..e9bf934567 100644 --- a/esmvaltool/diag_scripts/shared/plot/style.ncl +++ b/esmvaltool/diag_scripts/shared/plot/style.ncl @@ -5,8 +5,6 @@ ; Check the header of each routine for documentation. ; ; Contents: -; function unique_labels_min -; function unique_labels_all ; function project_style ; function place_debuginfo ; procedure place_description @@ -17,137 +15,18 @@ ; ; ############################################################################# - -load "./interface_scripts/auxiliary.ncl" -load "./interface_scripts/logging.ncl" - -load "./diag_scripts/shared/set_operators.ncl" - -; ############################################################################# -undef("unique_labels_min") -function unique_labels_min(prio: string) -; -; Arguments -; prio: string vector with attribute names (dataset_info@*), ordered by -; priority for annotation (starting with highest). -; -; Return value -; A vector (string) with one element for each dataset_info@dataset -> each -; label contains the least possible attribute strings. -; -; Description -; Builds the vector by looping over dataset_info@dataset. -; Adds "_attribute" to non-unique labels, until prio is exhausted. -; -; Caveats -; Uses dataset_info@*, which is available here anyway. -; -; References -; -; Modification history -; 20130422-A_gott_kl: written. -; -local funcname, scriptname, prio, result, unique, iprio, index -begin - - funcname = "unique_labels_min" - scriptname = "diag_scripts/shared/plot/style.ncl" - enter_msg(scriptname, funcname) - - result = dataset_info@$prio(0)$ ; Start with highest priority - unique = result(UNIQ(result)) - iprio = 1 ; counter for while loop (must not exceed dimsizes(prio)) - do while (dimsizes(unique) .ne. dimsizes(result)) - if (iprio .eq. dimsizes(prio)) then - error_msg("w", scriptname, funcname, "Add more attributes to prio " + \ - "to make labels unique! Continuing with non-unique labels.") - delete(unique) ; Reset - unique = result - else - do i = 0, dimsizes(unique) - 1 ; loop over different(sic!) labels - index = ind(result .eq. unique(i)) - if (dimsizes(index) .gt. 1) then ; More than one occurence - result(index) = \ - result(index) + "_" + dataset_info@$prio(iprio)$(index) - end if - delete(index) - end do ; Different labels - delete(unique) ; Reset - unique = result(UNIQ(result)) - iprio = iprio + 1 - end if ; iprio - end do ; while - - leave_msg(scriptname, funcname) - return(result) - -end - -; ############################################################################# -undef("unique_labels_all") -function unique_labels_all(prio:string) -; -; Arguments -; prio: string vector with attribute names (dataset_info@*), ordered by -; priority for annotation (starting with highest) -; -; Description -; Builds the vector by looping over dataset_info@dataset. -; Adds "_attribute" until prio is exhausted or until labels are unique. -; -; Return value -; A vector (string) with one element for each dataset_info@dataset -> all -; labels contain the same (least possible) number of attribute strings. -; -; Caveats -; Uses dataset_info@*, which is available here anyway. -; -; References -; -; Modification history -; 20130422-A_gott_kl: written. -; -local funcname, scriptname, prio, result, unique, iprio, index -begin - - funcname = "unique_labels_all" - scriptname = "diag_scripts/shared/plot/style.ncl" - enter_msg(scriptname, funcname) - - result = dataset_info@$prio(0)$ ; Start with highest priority - unique = result(UNIQ(result)) - iprio = 1 ; counter for while loop (must not exceed dimsizes(prio)) - do while (dimsizes(unique) .ne. dimsizes(result)) - if (iprio .eq. dimsizes(prio)) then - error_msg("w", scriptname, funcname, "Add more attributes to prio " + \ - "to make labels unique! Continuing with non-unique labels.") - delete(unique) ; Reset - unique = result - else - do i = 0, dimsizes(unique) - 1 ; loop over different(sic!) labels - index = ind(result .eq. unique(i)) - if (dimsizes(index) .gt. 1) then ; More than one occurence - result = result + "_" + dataset_info@$prio(iprio)$ - end if - delete(index) - end do ; Different labels - delete(unique) ; Reset - unique = result(UNIQ(result)) - iprio = iprio + 1 - end if ; iprio - end do ; while - - leave_msg(scriptname, funcname) - return(result) - -end +load "$diag_scripts/../interface_scripts/auxiliary.ncl" +load "$diag_scripts/../interface_scripts/logging.ncl" ; ############################################################################# undef("project_style") -function project_style(info, flag) +function project_style(items: list, + info[1]:logical, + flag[1]:string) ; ; Arguments -; info: info array, as defined in ./variable_defs. +; items: list of input_file_info items +; info: diag_script_info logical ; flag: string determining the type of array requested: ; "annots": annotation strings. ; "colors": colors (named colors, RGB or RGBA codes) @@ -160,7 +39,7 @@ function project_style(info, flag) ; observations and reanalyses). ; ; Return value -; An array of the same size of dataset_info@dataset, with the stlye +; An array of the same size of items list, with the stlye ; information for the given flag. The type depends on the flag. ; ; Description @@ -191,11 +70,7 @@ begin enter_msg(scriptname, funcname) ; Check for the available dictonary keys to be used for the annotations - ; (see get_dict_key in projects.py) - dkeys = (/"dataset", "ensemble", "case_name", "exp", "mip", "realm"/) - tmp = isatt(dataset_info, dkeys) - l_dkeys = dkeys(ind(tmp)) - delete(tmp) + dkeys = (/"dataset", "ensemble", "exp", "mip"/) ; Style information for this flag available as diag_script_info@$flag$ if (isatt(info, flag)) then @@ -213,7 +88,7 @@ begin ; Styleset defined, search for corresponding style file styleset = str_lower(styleset) - stylefile = "./diag_scripts/shared/plot/styles/" + styleset + ".style" + stylefile = "$diag_scripts/shared/plot/styles/" + styleset + ".style" if (.not.fileexists(stylefile)) then error_msg("f", scriptname, funcname, \ "style file " + stylefile + " not found") @@ -225,7 +100,32 @@ begin ; Model annotations, return unique model names based on dict keys if (flag.eq."annots") then - result = unique_labels_min(l_dkeys) + + ; Start with highest priority + result = metadata_att_as_array(items, dkeys(0)) + + unique = get_unique_values(result) + iprio = 1 + do while (dimsizes(unique) .ne. dimsizes(result)) + if (iprio .eq. dimsizes(dkeys)) then + error_msg("w", scriptname, funcname, "non-unique labels in dataset" + \ + "annotations") + unique := result + else + do i = 0, dimsizes(unique) - 1 + index = ind(result .eq. unique(i)) + if (dimsizes(index) .gt. 1) then ; More than one occurence + append = metadata_att_as_array(items, dkeys(iprio)) + result(index) = result(index) + "_" + append(index) + end if + delete(index) + end do + delete(unique) + unique = get_unique_values(result) + iprio = iprio + 1 + end if + end do + leave_msg(scriptname, funcname) return(result) end if @@ -242,16 +142,17 @@ begin if (ismissing(col)) then error_msg("f", scriptname, funcname, "flag " + flag + " not allowed") end if - nmodels = dimsizes(dataset_info@dataset) - column = new(nmodels, string) + datasets = metadata_att_as_array(items, "dataset") + ndatasets = dimsizes(datasets) + column = new(ndatasets, string) wildcard_cnt = 0 - do ii = 0, nmodels - 1 - row = ind(dataset_info@dataset(ii).eq.modelsinfile) + do ii = 0, ndatasets - 1 + row = ind(datasets(ii).eq.modelsinfile) ; Multiple entries, raise error if (dimsizes(row).gt.1) then - error_msg("f", scriptname, funcname, "Multiple entries for dataset" + \ - dataset_info@dataset(ii) + " in style file " + stylefile) + error_msg("f", scriptname, funcname, "Multiple entries for dataset " + \ + datasets(ii) + " in style file " + stylefile) end if ; Missing dataset @@ -260,8 +161,8 @@ begin ; Search for "default" row = ind("default".eq.modelsinfile) if (.not.ismissing(row)) then - log_debug("Dataset " + dataset_info@dataset(ii) + " not found in " + \ - stylefile + ", using default") + log_debug("Dataset " + datasets(ii) + \ + " not found in " + stylefile + ", using default") column(ii) = str_squeeze(str_get_field(sty(row), col, "|")) continue end if @@ -270,7 +171,7 @@ begin rows = ind("*".eq.modelsinfile) if (all(ismissing(rows))) then error_msg("f", scriptname, funcname, "Dataset " + \ - dataset_info@dataset(ii) + " not found in stylefile" + \ + datasets(ii) + " not found in stylefile" + \ stylefile) end if if (wildcard_cnt.ge.dimsizes(rows)) then @@ -278,7 +179,7 @@ begin " contains too few wildcard (*) " + "entries (" + \ wildcard_cnt + "), please add more") end if - log_debug("Dataset " + dataset_info@dataset(ii) + " not found in " + \ + log_debug("Dataset " + datasets(ii) + " not found in " + \ stylefile + ", using wildcard") row = rows(wildcard_cnt) wildcard_cnt = wildcard_cnt + 1 @@ -318,7 +219,8 @@ begin else error_msg("f", scriptname, funcname, \ "Invalid definition of color for dataset " + \ - dataset_info@dataset(ii) + ", check style file " + stylefile) + datasets(ii) + ", check style file " + \ + stylefile) end if end if end if @@ -343,69 +245,6 @@ begin end -; ############################################################################# -undef("project_style_GO") -function project_style_GO(flag:string) ; FiX-ME move to style-file -; -; Arguments: -; flag = string determining the type of array requested -; Return value: array of dimsizes(dataset_info@dataset) -; Definition of plot attribs; Returns arrays of dimsizes(dataset_info@dataset) -; flag = "colors": returns an array of colors (either RGB or named colors) -; flag = "dashes": returns an array of dash styles (integer numbers) -; flag = "thicks": returns an array of line thicknesses (numeric) -; flag = "annots": returns an array of annotation strings -; flag = "avgstd": returns an array of flags -; 0 -> (model) takes part in calculation of mean & stddev -; 1 -> (obs/reanalysis) does not take part -; Description: -; * Definition of plot attributes: type depending on flag -; Modification history: -; * 20130419 written (Klaus-Dirk.Gottschaldt@dlr.de) -; -local result, modelstyles, flag -begin - - funcname = "project_style_GO" - scriptname = "diag_scripts/shared/plot/style.ncl" - enter_msg(scriptname, funcname) - - ; define (/model, color, dash, thickness, avgstd/) for each model - modelstyles = \ - (/(/"PlankTOM5", "(/0.6, 0., 0.0078/)", "0", "1", "0"/), \ - (/"PlankTOM10", "(/0.7686, 0.4745, 0./)", "1", "1", "0"/), \ - (/"medusa", "(/0.5960, 0.9843, 0.5960/)", "1", "1", "0"/), \ - (/"hadocc", "(/0.9372, 0.3333, 0.0588/)", "1", "1", "0"/), \ - (/"ersem", "(/0.0588, 0.3333, 0.9372/)", "1", "1", "0"/), \ - (/"ref", "(/0., 0.3098, 0./)", "0", "1", "0"/), \ - (/"model_mean", "(/0.6, 0., 0.0078/)", "0", "3", "1"/), \ - (/"unknown", "black", "0", "1", "0"/)/) - - ; assign plot attributes (see function select_style in style_auxiliary.ncl) - if (flag .eq. "colors") then - result = select_style(modelstyles, 1, dataset_info@dataset) - else if (flag .eq. "dashes") then - result = select_style(modelstyles, 2, dataset_info@dataset) - else if (flag .eq. "thicks") then - result = toint(select_style(modelstyles, 3, dataset_info@dataset)) - else if (flag .eq. "annots") then - result = unique_labels_min((/"dataset", "ensemble", "exp"/)) - else if (flag .eq. "avgstd") then - result = toint(select_style(modelstyles, 4, dataset_info@dataset)) - else - print("fatal: (project_style_CMIP5) flag " + flag + " not allowed.") - status_exit(1) - end if - end if - end if - end if - end if - - leave_msg(scriptname, funcname) - return(result) - -end - ; ############################################################################# undef("place_debuginfo") function place_debuginfo(wks[1]:graphic, @@ -651,6 +490,10 @@ begin loc_str = str_sub_str(loc_str, "u", "~F33~m~F21~") end if + if (isStrSubset(loc_str, "degC")) then + loc_str = str_sub_str(loc_str, "deg", "~F35~J~F~") + end if + leave_msg(scriptname, funcname) return(loc_str) diff --git a/esmvaltool/diag_scripts/shared/plot/styles/cmip5.style b/esmvaltool/diag_scripts/shared/plot/styles/cmip5.style index 63a35c8a76..836a1a5b2d 100644 --- a/esmvaltool/diag_scripts/shared/plot/styles/cmip5.style +++ b/esmvaltool/diag_scripts/shared/plot/styles/cmip5.style @@ -95,14 +95,15 @@ MERRA | 0, 0, 0 | 8 | 3 | 16 | 1 ESACCI-AEROSOL | 0, 0, 0 | 0 | 3 | 16 | 1 ESACCI-CLOUD | 255, 0, 0 | 0 | 3 | 0 | 0 MODIS | 0, 255, 0 | 0 | 3 | 16 | 1 -NIWA | 0, 0, 0 | 1 | 3 | 8 | 1 +NIWA-BS | 0, 0, 0 | 1 | 3 | 8 | 1 BDBP | 0, 0, 0 | 1 | 3 | 4 | 1 IGAG/SPARC | 248, 154, 28 | 0 | 3 | 16 | 0 -PATMOS | 0, 0, 255 | 0 | 3 | 0 | 0 +PATMOS-x | 0, 0, 255 | 0 | 3 | 0 | 0 CLARA-A2 | 0, 255, 255 | 0 | 3 | 0 | 0 ERA-Interim | 255, 200, 0 | 0 | 3 | 0 | 0 ESACCI-SSMI | 0, 0, 0 | 0 | 4 | 0 | 0 ESACCI-AMSR | 0, 0, 0 | 1 | 4 | 0 | 0 NSIDC-NT | 0, 0, 0 | 2 | 3 | 0 | 0 NSIDC-BT | 0, 0, 0 | 12 | 3 | 0 | 0 +HadCRUT4 | 0, 0, 0 | 0 | 3 | 0 | 0 default | 0, 0, 0 | 0 | 1 | 16 | 0 diff --git a/esmvaltool/diag_scripts/shared/plot/styles/cmip5_esa.style b/esmvaltool/diag_scripts/shared/plot/styles/cmip5_esa.style index a70bc4973c..9add6da800 100644 --- a/esmvaltool/diag_scripts/shared/plot/styles/cmip5_esa.style +++ b/esmvaltool/diag_scripts/shared/plot/styles/cmip5_esa.style @@ -95,10 +95,10 @@ MERRA | 0, 0, 0 | 8 | 3 | 16 | 1 ESACCI-AEROSOL | 0, 0, 0 | 0 | 3 | 16 | 1 ESACCI-CLOUD | 255, 0, 0 | 0 | 3 | 0 | 0 MODIS | 0, 255, 0 | 0 | 3 | 16 | 1 -NIWA | 0, 0, 0 | 1 | 3 | 8 | 1 +NIWA-BS | 0, 0, 0 | 1 | 3 | 8 | 1 BDBP | 0, 0, 0 | 1 | 3 | 4 | 1 IGAG/SPARC | 248, 154, 28 | 0 | 3 | 16 | 0 -PATMOS | 0, 0, 255 | 0 | 3 | 0 | 0 +PATMOS-x | 0, 0, 255 | 0 | 3 | 0 | 0 CLARA-A2 | 0, 255, 255 | 0 | 3 | 0 | 0 ERA-Interim | 255, 200, 0 | 0 | 3 | 0 | 0 ESACCI-SSMI | 0, 0, 0 | 0 | 20 | 0 | 0 diff --git a/esmvaltool/diag_scripts/shared/plot/styles/cmip6.style b/esmvaltool/diag_scripts/shared/plot/styles/cmip6.style new file mode 100644 index 0000000000..f525fd9376 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/styles/cmip6.style @@ -0,0 +1,35 @@ +############################################################################### +# CMIP6 STYLES - PRELIMINARY +############################################################################### +# This file defines the plot attributes for the CMIP6 models. +# +# MODEL NAME: must be the same as given in the main recipe. +# COLOR: can be either an NCL named color, a RGB code (size 3) on a 0-255 scale +# or a RGBA code (size 4) with the last element indicating the opacity +# on a 0-1 scale (0 = transparent, 1 = full), see +# http://www.ncl.ucar.edu/Applications/Scripts/rgb.txt +# https://www.ncl.ucar.edu/Document/glossary.shtml#RGB +# https://www.ncl.ucar.edu/Document/glossary.shtml#RGBA +# https://www.ncl.ucar.edu/Document/Graphics/create_color_table.shtml +# DASH: the dash pattern index lines, see +# https://www.ncl.ucar.edu/Document/Graphics/Images/dashpatterns.png +# THICK: the line thickness (NCL default is 1) +# MARK: marker index for markers, see +# https://www.ncl.ucar.edu/Document/Graphics/Images/markers.png +# AVG-STD: 0 = takes part in the calculation of mean and stddev +# 1 = does not take part in the calculation of mean and stddev +# (usually 0 for models, 1 for observations/reanalysis) +# +# Mind the formatting: columns are separated by the | symbol , colors given +# as RGB/RGBA must be comma-separated. +############################################################################### +MODEL NAME # COLOR # DASH # THICK # MARK # AVG-STD +############################################################################### +CNRM-CM6-1 | 30, 76, 36 | 1 | 1 | 6 | 0 +GISS-E2-1-G | 119, 29, 123 | 0 | 1 | 4 | 0 +GFDL-AM4 | 35, 54, 109 | 0 | 1 | 4 | 0 +IPSL-CM6A-LR | 91, 83, 174 | 0 | 1 | 4 | 0 +IPSL-CM6A-MR | 91, 83, 174 | 1 | 1 | 6 | 0 +IPSL-CM6B-LR | 91, 83, 174 | 2 | 1 | 7 | 0 +MIROC6 | 184, 95, 182 | 1 | 1 | 6 | 0 +default | 0, 0, 0 | 0 | 1 | 16 | 0 diff --git a/esmvaltool/diag_scripts/shared/plot/styles/go.style b/esmvaltool/diag_scripts/shared/plot/styles/go.style new file mode 100644 index 0000000000..2f06adf3c5 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/styles/go.style @@ -0,0 +1,35 @@ +############################################################################### +# CMIP5 STYLES +############################################################################### +# This file defines the plot attributes for the GO models. +# +# MODEL NAME: must be the same as given in the main recipe. +# COLOR: can be either an NCL named color, a RGB code (size 3) on a 0-255 scale +# or a RGBA code (size 4) with the last element indicating the opacity +# on a 0-1 scale (0 = transparent, 1 = full), see +# http://www.ncl.ucar.edu/Applications/Scripts/rgb.txt +# https://www.ncl.ucar.edu/Document/glossary.shtml#RGB +# https://www.ncl.ucar.edu/Document/glossary.shtml#RGBA +# https://www.ncl.ucar.edu/Document/Graphics/create_color_table.shtml +# DASH: the dash pattern index lines, see +# https://www.ncl.ucar.edu/Document/Graphics/Images/dashpatterns.png +# THICK: the line thickness (NCL default is 1) +# MARK: marker index for markers, see +# https://www.ncl.ucar.edu/Document/Graphics/Images/markers.png +# AVG-STD: 0 = takes part in the calculation of mean and stddev +# 1 = does not take part in the calculation of mean and stddev +# (usually 0 for models, 1 for observations/reanalysis) +# +# Mind the formatting: columns are separated by the | symbol , colors given +# as RGB/RGBA must be comma-separated. +############################################################################### +MODEL NAME # COLOR # DASH # THICK # MARK # AVG-STD +############################################################################### +PlankTOM5 | 153, 0, 2 | 0 | 1 | 0 | 0 +PlankTOM10 | 196, 121, 0 | 1 | 1 | 0 | 0 +medusa | 152, 251, 152 | 1 | 1 | 0 | 0 +hadocc | 239, 85, 15 | 1 | 1 | 0 | 0 +ersem | 15 85, 239 | 1 | 1 | 0 | 0 +ref | 0 79, 0 | 0 | 1 | 0 | 0 +model_mean | 153, 0, 2 | 0 | 3 | 1 | 0 +unknown | 0, 0, 0 | 0 | 1 | 1 | 0 diff --git a/esmvaltool/diag_scripts/shared/plot/styles/righi15gmd.style b/esmvaltool/diag_scripts/shared/plot/styles/righi15gmd.style index ce2be85d21..871e0b5171 100644 --- a/esmvaltool/diag_scripts/shared/plot/styles/righi15gmd.style +++ b/esmvaltool/diag_scripts/shared/plot/styles/righi15gmd.style @@ -41,5 +41,5 @@ NCEP | orange1 | 1 | 2 | 16 | 1 AIRS | orange1 | 1 | 2 | 16 | 1 AURA-TES | orange1 | 1 | 2 | 16 | 1 AURA-MLS-OMI | orange1 | 1 | 2 | 16 | 1 -NIWA | orange1 | 1 | 2 | 16 | 1 +NIWA-BS | orange1 | 1 | 2 | 16 | 1 model_mean | black | 0 | 3 | 16 | 1 diff --git a/esmvaltool/diag_scripts/shared/plot/styles_python/cmip5.yml b/esmvaltool/diag_scripts/shared/plot/styles_python/cmip5.yml index d441e7c487..7a68ebbfc9 100644 --- a/esmvaltool/diag_scripts/shared/plot/styles_python/cmip5.yml +++ b/esmvaltool/diag_scripts/shared/plot/styles_python/cmip5.yml @@ -500,7 +500,7 @@ NCEP: facecolor: '#000000' mark: o thick: 3 -NIWA: +NIWA-BS: avgstd: 1 color: '#000000' dash: -- @@ -542,7 +542,7 @@ NorESM1-ME: facecolor: none mark: ^ thick: 1 -PATMOS: +PATMOS-x: avgstd: 0 color: '#0000ff' dash: '-' diff --git a/esmvaltool/diag_scripts/shared/plot/styles_python/cmip6.yml b/esmvaltool/diag_scripts/shared/plot/styles_python/cmip6.yml new file mode 100644 index 0000000000..b327d1e238 --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/styles_python/cmip6.yml @@ -0,0 +1,103 @@ +############################################################################### +# PYTHON STYLE FILE +# cmip6.yml +# created by 'convert_ncl_style.py'. +############################################################################### +# This file defines plot attributes for certain datasets. +# +# Template: +# +# DATASET: +# avgstd: 0 +# color: '#ffffff' +# dash: -- +# facecolor: none +# mark: x +# thick: 1 +# +# DATASET: dataset name (same as given in the main recipe) +# avgstd: 0 = takes part in the calculation of mean and stddev +# 1 = does not take part in the calculation of mean and stddev +# (usually 0 for models, 1 for observations/reanalysis) +# color: hex RGB or RGBA string (e.g. #0f0fa1), see +# http://www.ncl.ucar.edu/Applications/Scripts/rgb.txt +# https://www.ncl.ucar.edu/Document/glossary.shtml#RGB +# https://www.ncl.ucar.edu/Document/glossary.shtml#RGBA +# dash: matplotlib dash pattern (linestyle) +# facecolor: Color of the filling (none = no filling) +# mark: marker string for markers (used in matplotlib), see +# https://matplotlib.org/api/markers_api.html +# https://www.ncl.ucar.edu/Document/Graphics/Images/markers.png +# thick: line thickness +############################################################################### + +BCC-CSM2-MR: + avgstd: 0 + color: '#94191e' + dash: '-' + facecolor: none + mark: o + thick: 1 +CNRM-CM6-1: + avgstd: 0 + color: '#1e4c24' + dash: -- + facecolor: none + mark: s + thick: 1 +GFDL-AM4: + avgstd: 0 + color: '#23366d' + dash: '-' + facecolor: none + mark: o + thick: 1 +GISS-E2-1-G: + avgstd: 0 + color: '#771d7b' + dash: '-' + facecolor: none + mark: o + thick: 1 +IPSL-CM6A-LR: + avgstd: 0 + color: '#5b53ae' + dash: '-' + facecolor: none + mark: o + thick: 1 +IPSL-CM6A-MR: + avgstd: 0 + color: '#5b53ae' + dash: -- + facecolor: none + mark: s + thick: 1 +IPSL-CM6B-LR: + avgstd: 0 + color: '#5b53ae' + dash: ':' + facecolor: none + mark: ^ + thick: 1 +MIROC6: + avgstd: 0 + color: '#b85fb6' + dash: -- + facecolor: none + mark: s + thick: 1 +MRI-ESM2-0: + avgstd: 0 + color: '#adff2f' + dash: -- + facecolor: none + mark: ^ + thick: 1 +default: + avgstd: 0 + color: '#000000' + dash: '-' + facecolor: '#000000' + mark: o + thick: 1 diff --git a/esmvaltool/diag_scripts/shared/plot/styles_python/convert_ncl_style.py b/esmvaltool/diag_scripts/shared/plot/styles_python/convert_ncl_style.py index 1e44f2ab89..12bf2d3fbb 100644 --- a/esmvaltool/diag_scripts/shared/plot/styles_python/convert_ncl_style.py +++ b/esmvaltool/diag_scripts/shared/plot/styles_python/convert_ncl_style.py @@ -1,7 +1,5 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- - - """Script to convert nvl style files to python style files. Example @@ -13,20 +11,20 @@ Global attributes ----------------- INPUT_FILE : str - Path to the ncl style file. + Name of the ncl style file. OUTPUT_FILE : str - Path to the new python style file (yml format). + Name of the new python style file (yml format). """ - import os -import yaml +import yaml # Global variables -INPUT_FILE = '../styles/cmip5.style' -OUTPUT_FILE = 'cmip5.yml' +INPUT_FILE = 'cmip6.style' +OUTPUT_FILE = 'cmip6.yml' +BASE_DIR = os.path.dirname(os.path.realpath(__file__)) HEADER_FILE = 'style_header' DATASET = 'dataset' @@ -79,7 +77,8 @@ def read_line(line): '13': 'h', '14': '.', '15': 'x', - '16': 'o'} + '16': 'o', + } info = shape.get(info, 'o') # Convert dash index to matplotlib dash marker @@ -101,7 +100,8 @@ def read_line(line): '13': '--', '14': '--', '15': '--', - '16': '--'} + '16': '--', + } info = dash.get(info, '-') # Convert str to int @@ -116,8 +116,8 @@ def read_line(line): def read_ncl_style(file_name): """Read ncl style file.""" output = [] - with open(file_name, 'r') as file: - for line in file: + with open(file_name, 'r') as file_: + for line in file_: line = line.strip() # Ignore commentary lines @@ -131,6 +131,7 @@ def read_ncl_style(file_name): # Read line output.append(read_line(line)) + print("Read '{}'".format(file_name)) # Convert list to dictionary output_dict = {} @@ -144,15 +145,21 @@ def read_ncl_style(file_name): def write_yml_file(dataset_info, file_name): """Write configuration file.""" + header_path = os.path.join(BASE_DIR, HEADER_FILE) with open(file_name, 'w') as outfile: - with open(HEADER_FILE, 'r') as header_file: + with open(header_path, 'r') as header_file: header = header_file.read() - outfile.write(header.format(output_file=OUTPUT_FILE, - script=os.path.basename(__file__))) - yaml.dump(dataset_info, outfile, default_flow_style=False) + outfile.write( + header.format( + output_file=OUTPUT_FILE, script=os.path.basename(__file__))) + yaml.safe_dump(dataset_info, outfile, default_flow_style=False) + print("Wrote '{}'".format(file_name)) # Execute script if called directly if __name__ == '__main__': - STYLES = read_ncl_style(INPUT_FILE) - write_yml_file(STYLES, OUTPUT_FILE) + INPUT_PATH = os.path.normpath( + os.path.join(BASE_DIR, '..', 'styles', INPUT_FILE)) + OUTPUT_PATH = os.path.join(BASE_DIR, OUTPUT_FILE) + STYLES = read_ncl_style(INPUT_PATH) + write_yml_file(STYLES, OUTPUT_PATH) diff --git a/esmvaltool/diag_scripts/shared/plot/styles_python/cox18nature.yml b/esmvaltool/diag_scripts/shared/plot/styles_python/cox18nature.yml new file mode 100644 index 0000000000..7e167b85ca --- /dev/null +++ b/esmvaltool/diag_scripts/shared/plot/styles_python/cox18nature.yml @@ -0,0 +1,226 @@ +############################################################################### +# PYTHON STYLE FILE +# cox18nature.yml +# created by 'convert_ncl_style.py'. +############################################################################### +# This file defines plot attributes for certain datasets. +# +# Template: +# +# DATASET: +# avgstd: 0 +# color: '#ffffff' +# dash: -- +# facecolor: none +# mark: x +# thick: 1 +# +# DATASET: dataset name (same as given in the main recipe) +# avgstd: 0 = takes part in the calculation of mean and stddev +# 1 = does not take part in the calculation of mean and stddev +# (usually 0 for models, 1 for observations/reanalysis) +# color: hex RGB or RGBA string (e.g. #0f0fa1), see +# http://www.ncl.ucar.edu/Applications/Scripts/rgb.txt +# https://www.ncl.ucar.edu/Document/glossary.shtml#RGB +# https://www.ncl.ucar.edu/Document/glossary.shtml#RGBA +# dash: matplotlib dash pattern (linestyle) +# facecolor: Color of the filling (none = no filling) +# mark: marker string for markers (used in matplotlib), see +# https://matplotlib.org/api/markers_api.html +# https://www.ncl.ucar.edu/Document/Graphics/Images/markers.png +# thick: line thickness +# size: Size of the marker +############################################################################### + +ACCESS1-0: + color: '#800060' + dash: '-' + thick: 1 + mark: $a$ + size: 8 + avgstd: 0 + facecolor: none +bcc-csm1-1: + color: '#009900' + dash: '-' + thick: 1 + mark: $n$ + size: 7 + avgstd: 0 + facecolor: none +bcc-csm1-1-m: + color: '#009900' + dash: -- + thick: 1 + mark: $n^x$ + size: 7 + avgstd: 0 + facecolor: none +BNU-ESM: + color: '#009900' + dash: '-' + thick: 1 + mark: $p$ + size: 10 + avgstd: 0 + facecolor: none +CanESM2: + color: '#009900' + dash: -- + thick: 1 + mark: $b$ + size: 10 + avgstd: 0 + facecolor: none +CCSM4: + color: '#009900' + dash: '-' + thick: 1 + mark: $c$ + size: 8 + avgstd: 0 + facecolor: none +CNRM-CM5: + color: '#009900' + dash: '-' + thick: 1 + mark: $d$ + size: 10 + avgstd: 0 + facecolor: none +CSIRO-Mk3-6-0: + color: '#800060' + dash: '-' + thick: 1 + mark: $e$ + size: 8 + avgstd: 0 + facecolor: none +GFDL-CM3: + color: '#800060' + dash: -- + thick: 1 + mark: $f^y$ + size: 10 + avgstd: 0 + facecolor: none +GFDL-ESM2G: + color: '#009900' + dash: ':' + thick: 1 + mark: $f^x$ + size: 10 + avgstd: 0 + facecolor: none +GFDL-ESM2M: + color: '#009900' + dash: '-.' + thick: 1 + mark: $f$ + size: 10 + avgstd: 0 + facecolor: none +GISS-E2-H: + color: '#009900' + dash: '-' + thick: 1 + mark: $o^x$ + size: 8 + avgstd: 0 + facecolor: none +GISS-E2-R: + color: '#009900' + dash: ':' + thick: 1 + mark: $o$ + size: 8 + avgstd: 0 + facecolor: none +HadGEM2-ES: + color: '#800060' + dash: '-.' + thick: 1 + mark: $g$ + size: 10 + avgstd: 0 + facecolor: none +inmcm4: + color: '#009900' + dash: '-' + thick: 1 + mark: $h$ + size: 10 + avgstd: 0 + facecolor: none +IPSL-CM5A-LR: + color: '#800060' + dash: '-' + thick: 1 + mark: $i^x$ + size: 10 + avgstd: 0 + facecolor: none +IPSL-CM5B-LR: + color: '#009900' + dash: ':' + thick: 1 + mark: $i$ + size: 10 + avgstd: 0 + facecolor: none +MIROC5: + color: '#009900' + dash: -- + thick: 1 + mark: $j^x$ + size: 10 + avgstd: 0 + facecolor: none +MIROC-ESM: + color: '#800060' + dash: ':' + thick: 1 + mark: $j$ + size: 10 + avgstd: 0 + facecolor: none +MPI-ESM-LR: + color: '#009900' + dash: '-' + thick: 1 + mark: $k$ + size: 10 + avgstd: 0 + facecolor: none +MRI-CGCM3: + color: '#009900' + dash: '-' + thick: 1 + mark: $l$ + size: 10 + avgstd: 0 + facecolor: none +NorESM1-M: + color: '#009900' + dash: '-' + thick: 1 + mark: $m$ + size: 10 + avgstd: 0 + facecolor: none +OBS: + color: '#000000' + dash: '-' + thick: 1 + mark: o + size: 8 + avgstd: 0 + facecolor: '#000000' +default: + color: '#000000' + dash: '-' + thick: 1 + mark: o + size: 8 + avgstd: 0 + facecolor: '#000000' diff --git a/esmvaltool/diag_scripts/shared/plot/taylor_diagram_less_hardcoded.ncl b/esmvaltool/diag_scripts/shared/plot/taylor_diagram_less_hardcoded.ncl index 8fd7996fb9..94aab7ecc9 100644 --- a/esmvaltool/diag_scripts/shared/plot/taylor_diagram_less_hardcoded.ncl +++ b/esmvaltool/diag_scripts/shared/plot/taylor_diagram_less_hardcoded.ncl @@ -167,7 +167,8 @@ begin dist = rr + 1.0 ; uncertainty circle has to be cropped ; need to find intersection between uncertainty circule and outer arc - if (dist .gt. max(rxy@tmXBValues)) then +; if (dist .gt. max(rxy@tmXBValues)) then + if (dist .gt. xyMax) then xtmp = (xyMax ^ 2 - rr ^ 2 + 1.0) / 2.0 ytmp = sqrt(xyMax ^ 2 - xtmp ^ 2) xtmp = xtmp - 1.0 diff --git a/esmvaltool/diag_scripts/shared/plot/taylor_plot.ncl b/esmvaltool/diag_scripts/shared/plot/taylor_plot.ncl index 79b5ac2253..c01851840d 100644 --- a/esmvaltool/diag_scripts/shared/plot/taylor_plot.ncl +++ b/esmvaltool/diag_scripts/shared/plot/taylor_plot.ncl @@ -10,12 +10,12 @@ ; ; ############################################################################# -load "./interface_scripts/auxiliary.ncl" -load "./interface_scripts/logging.ncl" +load "$diag_scripts/../interface_scripts/auxiliary.ncl" +load "$diag_scripts/../interface_scripts/logging.ncl" -load "./diag_scripts/shared/set_operators.ncl" +load "$diag_scripts/shared/set_operators.ncl" -load "./diag_scripts/shared/plot/aux_plotting.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" ; ############################################################################# @@ -75,13 +75,13 @@ begin defaults = (/"default", "dummy", "dummy_for_var", "Default", "Dummy"/) if (any(varname .eq. defaults)) then grade = att2var(data, "var") - diag_script = att2var(data, "diag_script") + DIAG_SCRIPT = att2var(data, "diag_script") else grade = varname end if ; Check if a valid wks has been provided, otherwise invoke default - wks = get_wks(wks_in, diag_script, varname) + wks = get_wks(wks_in, DIAG_SCRIPT, varname) ; Set main quantities RATIO = data(:, :, 0) diff --git a/esmvaltool/diag_scripts/shared/plot/vector_scalar_map_polar.ncl b/esmvaltool/diag_scripts/shared/plot/vector_scalar_map_polar.ncl index 171c0ec83c..c3635568c6 100644 --- a/esmvaltool/diag_scripts/shared/plot/vector_scalar_map_polar.ncl +++ b/esmvaltool/diag_scripts/shared/plot/vector_scalar_map_polar.ncl @@ -9,10 +9,10 @@ ; ; ############################################################################# -load "./interface_scripts/auxiliary.ncl" -load "./interface_scripts/logging.ncl" +load "$diag_scripts/../interface_scripts/auxiliary.ncl" +load "$diag_scripts/../interface_scripts/logging.ncl" -load "./diag_scripts/shared/plot/aux_plotting.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" ; ############################################################################# undef("vector_scalar_map_polar") diff --git a/esmvaltool/diag_scripts/shared/plot/xy_line.ncl b/esmvaltool/diag_scripts/shared/plot/xy_line.ncl index 408382b9ea..31e159195c 100644 --- a/esmvaltool/diag_scripts/shared/plot/xy_line.ncl +++ b/esmvaltool/diag_scripts/shared/plot/xy_line.ncl @@ -10,18 +10,15 @@ ; function aerosol_profile ; function aerosol_sizedist ; procedure xy_line +; procedure xy_line_anom ; function timeseries_station ; function cycle_plot ; function errorbar_plot ; ; ############################################################################# -load "./interface_scripts/auxiliary.ncl" -load "./interface_scripts/logging.ncl" - -load "./diag_scripts/shared/set_operators.ncl" - -load "./diag_scripts/shared/plot/aux_plotting.ncl" +load "$diag_scripts/shared/set_operators.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" ; ############################################################################# undef("profile_plev") @@ -284,11 +281,12 @@ begin if (.not.isatt(diag_script_info, "styleset")) then diag_script_info@styleset = "DEFAULT" end if - colors = project_style(diag_script_info, "colors") - dashes = project_style(diag_script_info, "dashes") - thicks = tofloat(project_style(diag_script_info, "thicks")) - annots = project_style(diag_script_info, "annots") - avgstd = project_style(diag_script_info, "avgstd") + ; FIX-ME: project_style function has changed + ; colors = project_style(diag_script_info, "colors") + ; dashes = project_style(diag_script_info, "dashes") + ; thicks = tofloat(project_style(diag_script_info, "thicks")) + ; annots = project_style(diag_script_info, "annots") + ; avgstd = project_style(diag_script_info, "avgstd") ; Set default ressources res = True @@ -561,8 +559,9 @@ begin wks = get_wks(wks_in, diag_script, var) ; Define line colors (first color is used for the observations) - colors = project_style(diag_script_info, "colors") - annots = project_style(diag_script_info, "annots") + ; FIX-ME: project_style function has changed + ; colors = project_style(diag_script_info, "colors") + ; annots = project_style(diag_script_info, "annots") gsn_define_colormap(wks, array_append_record((/"white", "black"/), \ colors, 0)) @@ -1064,8 +1063,9 @@ begin wks = get_wks(wks_in, diag_script, var) ; Define line colors (first color is used for the observations) - colors = project_style(diag_script_info, "colors") - annots = project_style(diag_script_info, "annots") + ; FIX-ME: project_style function has changed + ; colors = project_style(diag_script_info, "colors") + ; annots = project_style(diag_script_info, "annots") gsn_define_colormap(wks, array_append_record((/"white", "black"/), \ colors, 0)) @@ -1473,17 +1473,16 @@ procedure xy_line(wks[1], source, source_x, source_stddev, - res_in : logical, - debuginfo[1] : logical) + res_in:logical, + items:list) ; ; Arguments: ; wks: workstation, must be passed - no default used yet! ; source: data to be plotted (no netCDF input possible yet) ; source_x: x-axis of array to be plotted (e.g. source&time, ... ) -; source_stddev: standard deviation of input, needed if -; diag_script_info@multi_model_mean is set to "y" +; source_stddev: standard deviation of input, needed for multi_model_mean ; res_in: diag_script-specific resources passed from diag_script -; debuginfo: description about diagnostic rendered onto plot +; items: list of input_file_info metadata ; ; Source prototype ; @@ -1512,40 +1511,42 @@ begin ; Select colors and other plotting attributes ; (see ./diag_scripts/shared/plot/style.ncl) - colors = project_style(diag_script_info, "colors") - dashes = project_style(diag_script_info, "dashes") - thicks = project_style(diag_script_info, "thicks") - annots = project_style(diag_script_info, "annots") - avgstd = project_style(diag_script_info, "avgstd") + colors = project_style(items, diag_script_info, "colors") + dashes = project_style(items, diag_script_info, "dashes") + thicks = project_style(items, diag_script_info, "thicks") + annots = project_style(items, diag_script_info, "annots") + avgstd = project_style(items, diag_script_info, "avgstd") + ; FIX-ME: no longer needed, as multi-model is now a dataset (?) ; Select colors and other plotting attributes for multi-model mean - if (diag_script_info@multi_model_mean .eq. "y") then - ; Project_style evaluates metadata of variable "dataset_info" - temp = dataset_info - - ; -> keep original "dataset_info" in "temp" and restore later - copy_VarMeta(dataset_info, temp) - delete(dataset_info) - - ; Use "dataset_info" to pass on attribute names - dataset_info = getvaratts(temp) - do i = 0, dimsizes(dataset_info) - 1 - ; Define all original attributes again, but empty - dataset_info@$dataset_info(i)$ = "" - end do - dataset_info@dataset = "model_mean" - - ; See ./diag_scripts/shared/plot/style.ncl - colors_mmm = project_style(diag_script_info, "colors") - dashes_mmm = project_style(diag_script_info, "dashes") - thicks_mmm = project_style(diag_script_info, "thicks") - annots_mmm = project_style(diag_script_info, "annots") - avgstd_mmm = project_style(diag_script_info, "avgstd") - delete(dataset_info) - dataset_info = temp ; restore original "dataset_info" - copy_VarMeta(temp, dataset_info) - delete(temp) - end if + ; if (diag_script_info@multi_model_mean .eq. "y") then + ; ; Project_style evaluates metadata of variable "input_file_info" + ; temp = input_file_info + + ; ; -> keep original "input_file_info" in "temp" and restore later + ; copy_VarMeta(input_file_info, temp) + ; delete(input_file_info) + + ; ; Use "input_file_info" to pass on attribute names + ; input_file_info = getvaratts(temp) + ; do i = 0, dimsizes(input_file_info) - 1 + ; ; Define all original attributes again, but empty + ; input_file_info@$input_file_info(i)$ = "" + ; end do + ; input_file_info@dataset = "model_mean" + + ; ; See ./diag_scripts/shared/plot/style.ncl + ; ; FIX-ME: project_style function has changed + ; ; colors_mmm = project_style(diag_script_info, "colors") + ; ; dashes_mmm = project_style(diag_script_info, "dashes") + ; ; thicks_mmm = project_style(diag_script_info, "thicks") + ; ; annots_mmm = project_style(diag_script_info, "annots") + ; ; avgstd_mmm = project_style(diag_script_info, "avgstd") + ; delete(input_file_info) + ; input_file_info = temp ; restore original "input_file_info" + ; copy_VarMeta(temp, input_file_info) + ; delete(temp) + ; end if ; Set/copy resources res = True @@ -1566,27 +1567,27 @@ begin ; Plot plot = gsn_csm_xy(wks, source_x, source, res) - ; Add multi model mean and stddev - if (diag_script_info@multi_model_mean .eq. "y") then - ; Stddev - res_stddev = True - copy_VarMeta(res, res_stddev) - res_stddev@gsnXYFillColors = "LightGrey" - delete(res_stddev@xyLineColors) - - ; We don't want the line, so make it transparent. - res_stddev@xyLineColor = -1 - shading_plot = gsn_csm_xy(wks, source_x, source_stddev(2:3, :), \ - res_stddev) - overlay(plot, shading_plot) - ; MMM - delete([/res@xyLineThicknesses, res@xyLineColors, res@xyDashPatterns/]) - res@xyLineThicknesses = thicks_mmm - res@xyLineColors = colors_mmm - res@xyDashPatterns = dashes_mmm - mmm = gsn_csm_xy(wks, source_x, source_stddev(0, :), res) - overlay(plot, mmm) - end if + ; ; Add multi model mean and stddev + ; if (diag_script_info@multi_model_mean .eq. "y") then + ; ; Stddev + ; res_stddev = True + ; copy_VarMeta(res, res_stddev) + ; res_stddev@gsnXYFillColors = "LightGrey" + ; delete(res_stddev@xyLineColors) + + ; ; We don't want the line, so make it transparent. + ; res_stddev@xyLineColor = -1 + ; shading_plot = gsn_csm_xy(wks, source_x, source_stddev(2:3, :), \ + ; res_stddev) + ; overlay(plot, shading_plot) + ; ; MMM + ; delete([/res@xyLineThicknesses, res@xyLineColors, res@xyDashPatterns/]) + ; res@xyLineThicknesses = thicks_mmm + ; res@xyLineColors = colors_mmm + ; res@xyDashPatterns = dashes_mmm + ; mmm = gsn_csm_xy(wks, source_x, source_stddev(0, :), res) + ; overlay(plot, mmm) + ; end if ; Resources for a customized legend. lgres = True @@ -1604,23 +1605,23 @@ begin diag_script_info@EMs_in_lg = True ; Set default end if if (.not.diag_script_info@EMs_in_lg) then - idcs_modelnames = UNIQ(dataset_info@dataset) + idcs_modelnames = annots colors := colors(idcs_modelnames) dashes := dashes(idcs_modelnames) thicks := thicks(idcs_modelnames) - annots := dataset_info@dataset(idcs_modelnames) - end if - if (diag_script_info@multi_model_mean .eq. "y") then - lgres@lgLineColors = array_append_record(colors, colors_mmm, 0) - lgres@lgDashIndexes = array_append_record(dashes, dashes_mmm, 0) - lgres@lgLineThicknesses = array_append_record(thicks, thicks_mmm, 0) + 0.5 - labels = array_append_record(annots, annots_mmm, 0) - else - lgres@lgLineColors = colors - lgres@lgDashIndexes = dashes - lgres@lgLineThicknesses = thicks + 0.5 - labels = annots - end if + annots := input_file_info@dataset(idcs_modelnames) + end if + ; if (diag_script_info@multi_model_mean .eq. "y") then + ; lgres@lgLineColors = array_append_record(colors, colors_mmm, 0) + ; lgres@lgDashIndexes = array_append_record(dashes, dashes_mmm, 0) + ; lgres@lgLineThicknesses = \ + ; array_append_record(thicks, thicks_mmm, 0) + 0.5 + ; labels = array_append_record(annots, annots_mmm, 0) + ; else + lgres@lgLineColors = colors + lgres@lgDashIndexes = dashes + lgres@lgLineThicknesses = thicks + 0.5 + labels = annots nitems = dimsizes(labels) lgres@lgItemOrder = ispan(nitems - 1, 0, 1) @@ -1708,14 +1709,6 @@ begin ; Redraw plot borders since the legend may (partially) cover some ; of the borders drawn in the first 'plot' draw(blank_plot2) - if (debuginfo) then - txres = True - txres@txFuncCode = "~" - debugbox = write_info(debuginfo) - am_infobox_id = place_debuginfo(wks, debugbox, txres, blank_plot2) - drawNDCGrid(wks) - place_description(wks, debuginfo@description, debuginfo@description_ycoord) - end if frame(wks) @@ -1730,6 +1723,254 @@ begin end +; ############################################################################# +undef("xy_line_anom") +procedure xy_line_anom(wks[1], + source_mean, + source, + source_x, + source_stddev, + ref_start, + ref_end, + res_in: logical, + res0_in: logical, + items: list) +; +; Arguments: +; wks: workstation, must be passed - no default used yet! +; source_mean: source_mean +; source: data to be plotted (no netCDF input possible yet) +; source_x: x-axis of array to be plotted (e.g. source&time, ... ) +; source_stddev: standard deviation of input, needed multi_model_mean +; ref_start: start year of the reference dataset +; ref_end: end year of the reference dataset +; res_in: diag_script-specific resources passed from diag_script +; res0_in: res0_in +; items: list of input_file_info metadata +; +; Source prototype +; +; Description +; Defines default ressources, which are overridden by argument res. +; Creates an xy-plot, according to wks & res. +; Adds multi model mean and standard deviation if +; diag_script_info@multi_model_mean is set to "y". +; +; Caveats +; +; Modification history +; 20160822_A_bock_li: written +; + +local funcname, scriptname, verbosity, res, res_in, res_stddev, source, \ + source_x, source_stddev, wks, wks_in, colors, colors_mm, dashes, \ + dashes_mm, thicks, thicks_mm, annots, annots_mm, avgstd, avgstd_mm, temp, \ + plot, shading_plot, mm, lgres, nitems, lbid, amres, annoid, labels, \ + psres, vpx, vph, vpy, vpw, bpres, tmborder + +begin + + funcname = "xy_line_anom" + scriptname = "plot_scripts/ncl/xy_line.ncl" + enter_msg(scriptname, funcname) + + ; Select colors and other plotting attributes + ; (see ./diag_scripts/lib/ncl/style.ncl) + colors = project_style(items, diag_script_info, "colors") + dashes = project_style(items, diag_script_info, "dashes") + thicks = project_style(items, diag_script_info, "thicks") + annots = project_style(items, diag_script_info, "annots") + avgstd = project_style(items, diag_script_info, "avgstd") + + ; individual case for HadCRUT4 observations + ; FIX-ME: mean value comes from climatology file (absolute.nc). + ; There are no missing values as in the anomaly data. + datasetnames = metadata_att_as_array(items, "dataset") + if (any(datasetnames.eq."HadCRUT4-clim")) then + ind_wo_clim = ind(datasetnames .ne. "HadCRUT4-clim") + tmp1 = colors(ind_wo_clim) + delete(colors) + colors = tmp1 + tmp2 = dashes(ind_wo_clim) + delete(dashes) + dashes = tmp2 + tmp3 = thicks(ind_wo_clim) + delete(thicks) + thicks = tmp3 + tmp4 = annots(ind_wo_clim) + delete(annots) + annots = tmp4 + tmp5 = avgstd(ind_wo_clim) + delete(avgstd) + avgstd = tmp5 + delete([/tmp1, tmp2, tmp3, tmp4, tmp5/]) + end if + + ; ************************************************ + ; plotting parameters + ; ************************************************ + + plot = new(1, graphic) + + res0 = True + res0@gsnDraw = False + res0@gsnFrame = False + res0@vpHeightF = 0.5 + res0@vpWidthF = 0.05 + res0@trYMinF = \ + min(source_mean) - 0.05 * (max(source_mean) - min(source_mean)) + res0@trYMaxF = \ + max(source_mean) + 0.05 * (max(source_mean) - min(source_mean)) + res0@trXMinF = 0. + res0@trXMaxF = 2. + res0@tmXTOn = False + res0@tmXBOn = False + res0@tmYLLabelsOn = False + res0@tmYRLabelsOn = True + res0@tmYRLabelFontHeightF = 0.016 + res0@tmYLLabelFontHeightF = 0.016 + res0@tiXAxisFontHeightF = 0.016 + res0@tiYAxisFontHeightF = 0.016 + res0@tiYAxisSide = "Right" + res0@tiYAxisAngleF = 90. + res0@tiYAxisOn = True + res0@pmLegendDisplayMode = "Never" + res0@tmXBMajorOutwardLengthF = 0.006 + res0@tmYLMajorOutwardLengthF = 0.006 + res0@tmXBMinorOutwardLengthF = 0.003 + res0@tmYLMinorOutwardLengthF = 0.003 + res0@tmXBMajorLengthF = 0.006 + res0@tmYLMajorLengthF = 0.006 + res0@tmXBMinorLengthF = 0.003 + res0@tmYLMinorLengthF = 0.003 + + res0@xyDashPatterns = dashes + res0@xyLineThicknesses = thicks + res0@xyLineColors = colors + + copy_VarMeta(res0_in, res0) ; copy passed resources + + res = True + res@gsnDraw = False + res@gsnFrame = False + + res@vpXF = 0.05 + res@vpYF = 0.7 + res@vpHeightF = 0.4 + res@vpWidthF = 0.7 + res@pmLegendDisplayMode = "Never" + res@tmYRLabelFontHeightF = 0.016 + res@tmYLLabelFontHeightF = 0.016 + res@tiXAxisFontHeightF = 0.016 + res@tiYAxisFontHeightF = 0.016 + res@tmXBMajorOutwardLengthF = 0.006 + res@tmYLMajorOutwardLengthF = 0.006 + res@tmXBMinorOutwardLengthF = 0.003 + res@tmYLMinorOutwardLengthF = 0.003 + res@tmXBMajorLengthF = 0.006 + res@tmYLMajorLengthF = 0.006 + res@tmXBMinorLengthF = 0.003 + res@tmYLMinorLengthF = 0.003 + + res@trYMinF = min(source) - 0.05 * (max(source) - min(source)) + res@trYMaxF = max(source) + 0.05 * (max(source) - min(source)) + res@tiYAxisOn = True + res@tiXAxisString = "Year" + res@gsnStringFontHeightF = 0.016 + + res@xyDashPatterns = dashes + res@xyLineThicknesses = thicks ; make 2nd lines thicker + res@xyLineColors = colors ; change line color + + copy_VarMeta(res_in, res) ; copy passed resources + + ; *************************************** + ; panel first two plots + ; *************************************** + + plot(0) = gsn_csm_xy(wks, source_x, source, res) ; create plot + + ; Add right panel with mean values in anomaly plot + + var = fspan(0., 2., 3) + mean = new((/dimsizes(source_mean), 3/), double) + mean(:, 0) = source_mean(:) + mean(:, 1) = source_mean(:) + mean(:, 2) = source_mean(:) + + plot2 = gsn_csm_xy(wks, var, mean, res0) ; create plot + + ; --------------------------------------------------------------------- + ; Procedure to attach a box to the given plot, given the lower left + ; corner, width, color, and opacity. + ; --------------------------------------------------------------------- + gsres = True + gsres@gsFillColor = "yellow" + gsres@gsFillOpacityF = 0.1 + xbox = (/ref_start, ref_end, ref_end, ref_start, ref_start/) + ybox = (/res@trYMinF, res@trYMinF, res@trYMaxF, res@trYMaxF, res@trYMinF/) + newplot00 = gsn_add_polygon(wks, plot(0), xbox, ybox, gsres) + + ; Draw some lines to create a legend + res_lines = True ; polyline mods desired + res_lines@tfPolyDrawOrder = "Predraw" + res_lines@gsLineColor = "grey" ; line color + res_lines@gsLineThicknessF = 1. ; line thicker + res_lines@gsLineDashPattern = 1. ; dash pattern + + xx = (/res@trXMinF, res@trXMaxF/) + yy = (/0.0, 0.0/) + dum0 = gsn_add_polyline(wks, plot(0), xx, yy, res_lines) + newplot = gsn_attach_plots(plot(0), plot2, res, res0) + + ; *********************************************** + ; legend resources + ; *********************************************** + + lgres = True + lgres@lgItemType = "Lines" ; show lines only (default) + lgres@lgLabelFontHeightF = .05 ; set the legend label font thickness + lgres@vpWidthF = 0.15 ; width of legend (NDC) + lgres@vpHeightF = 0.5 ; height of legend (NDC) + lgres@lgPerimColor = "gray" ; draw the box perimeter in orange + lgres@lgPerimThicknessF = 1.0 ; thicken the box perimeter + + if (.not.isatt(diag_script_info, "EMs_in_lg")) then + diag_script_info@EMs_in_lg = True ; Set default + end if + if (.not.diag_script_info@EMs_in_lg) then + idcs_modelnames = annots + colors := colors(idcs_modelnames) + dashes := dashes(idcs_modelnames) + thicks := thicks(idcs_modelnames) + annots := models@name(idcs_modelnames) + end if + lgres@lgLineColors = colors + lgres@lgDashIndexes = dashes + lgres@lgLineThicknesses = thicks + 0.5 + labels = annots + nitems = dimsizes(labels) + lgres@lgItemOrder = ispan(nitems - 1, 0, 1) + + ; Create legend + lbid = gsn_create_legend(wks, nitems, labels, lgres) + + amres = True + amres@amParallelPosF = 0.81 + amres@amOrthogonalPosF = 0.0 + annoid1 = gsn_add_annotation(plot(0), lbid, amres) + + resP = True + resP@gsnMaximize = True + resP@gsnPaperOrientation = "portrait" + resP@gsnPaperMargin = 0.8 + + gsn_panel(wks, plot, (/1, 1/), resP) + + leave_msg(scriptname, funcname) + +end + ; ############################################################################# undef("timeseries_station") function timeseries_station(wks_in[1], @@ -1895,12 +2136,14 @@ end undef("cycle_plot") function cycle_plot(wks_in[1], source, - varname[1] : string) + varname[1]: string, + items: list) ; ; Arguments ; wks_in: workstations (graphic object or default will be used). ; source: data to be plotted or a NetCDF filename with data. ; varname: variable name in the file. +; items: list of input_file_info metadata ; ; Source prototype ; source(*, *, 2) @@ -1941,7 +2184,7 @@ begin defaults = (/"default", "dummy", "dummy_for_var", "Default", "Dummy"/) if (any(varname .eq. defaults)) then var = att2var(data, "var") - diag_script = att2var(data, "diag_script") + DIAG_SCRIPT = att2var(data, "diag_script") else var = varname end if @@ -1952,14 +2195,14 @@ begin ; Select colors and other plotting attributes from the project style files ; See ./diag_scripts/shared/plot/style.ncl - colors = project_style(diag_script_info, "colors") - dashes = project_style(diag_script_info, "dashes") - thicks = project_style(diag_script_info, "thicks") - annots = project_style(diag_script_info, "annots") - avgstd = project_style(diag_script_info, "avgstd") + colors = project_style(items, diag_script_info, "colors") + dashes = project_style(items, diag_script_info, "dashes") + thicks = project_style(items, diag_script_info, "thicks") + annots = project_style(items, diag_script_info, "annots") + avgstd = project_style(items, diag_script_info, "avgstd") ; Check if a valid wks has been provided, otherwise invoke default - wks_out = get_wks(wks_in, diag_script, var) + wks_out = get_wks(wks_in, DIAG_SCRIPT, var) ; Define default plot resources res = True diff --git a/esmvaltool/diag_scripts/shared/plot/zonalmean_profile.ncl b/esmvaltool/diag_scripts/shared/plot/zonalmean_profile.ncl index 0a10c71a96..273f0c6c17 100644 --- a/esmvaltool/diag_scripts/shared/plot/zonalmean_profile.ncl +++ b/esmvaltool/diag_scripts/shared/plot/zonalmean_profile.ncl @@ -8,13 +8,8 @@ ; ; ############################################################################# -load "./interface_scripts/auxiliary.ncl" -load "./interface_scripts/logging.ncl" - -load "./diag_scripts/shared/set_operators.ncl" - -load "./diag_scripts/shared/plot/style.ncl" -load "./diag_scripts/shared/plot/aux_plotting.ncl" +load "$diag_scripts/shared/set_operators.ncl" +load "$diag_scripts/shared/plot/aux_plotting.ncl" ; ############################################################################# undef("zonalmean_profile") @@ -62,13 +57,13 @@ begin defaults = (/"default", "dummy", "dummy_for_var", "Default", "Dummy"/) if (any(varname .eq. defaults)) then var = att2var(data, "var") - diag_script = att2var(data, "diag_script") + DIAG_SCRIPT = att2var(data, "diag_script") else var = varname end if ; Check if a valid wks has been provided, otherwise invoke default - wks = get_wks(wks_in, diag_script, var) + wks = get_wks(wks_in, DIAG_SCRIPT, var) ; Define default plot resources res = True diff --git a/esmvaltool/diag_scripts/shared/regridding.ncl b/esmvaltool/diag_scripts/shared/regridding.ncl index 50b363f5a4..811598ab10 100644 --- a/esmvaltool/diag_scripts/shared/regridding.ncl +++ b/esmvaltool/diag_scripts/shared/regridding.ncl @@ -5,7 +5,6 @@ ; Check the header of each routine for documentation. ; ; Contents: -; function find_destination_grid ; function guestimate_average_grid_area ; function get_lower_limits ; function get_upper_limits @@ -20,233 +19,13 @@ ; ############################################################################# -load "./interface_scripts/auxiliary.ncl" -load "./interface_scripts/constants.ncl" -load "./interface_scripts/data_handling.ncl" -load "./interface_scripts/logging.ncl" +load "$diag_scripts/../interface_scripts/auxiliary.ncl" +load "$diag_scripts/../interface_scripts/constants.ncl" +load "$diag_scripts/../interface_scripts/data_handling.ncl" +load "$diag_scripts/../interface_scripts/logging.ncl" load "$NCARG_ROOT/lib/ncarg/nclscripts/esmf/ESMF_regridding.ncl" -; ############################################################################# -undef("find_destination_grid") -function find_destination_grid(indexes[*]:integer, - var:string, - field:string, - opt[1]:string) -; -; Arguments -; indexes: a 1-D array of dataset indexes to be considered. -; var: variable name. -; field: field type. -; opt: type of grid to be selected: -; "coarsest": returns the lowest resolution grid. -; "finest": returns the highest resolution grid. -; -; Return value -; A 2-D or 3-D dummy variable representing the grid with the attached -; plev/lat/lon or lat/lon coordinates. -; -; Description: -; Given an array of datasets, returns the coordinates of the coarsest or -; finest grid, to be used as a destination grid in regridding routines. -; For the vertical coordinate, the extent is considered as first priority -; (to avoid loss of data). -; All datasets are expect to have the same rank and dimension sizes. -; -; Caveats -; The returned plev, lat, lon coordinates do not necessarily come from the -; same input dataset, i.e. it is possible to get latitude from one dataset -; longitude from another dataset. -; -; Modification history -; 20150113-A_gott_kl: check that all datasets have the same dimensions. -; 20150113-A_gott_kl: revised to allow data without lev or lon coordinate. -; 20140903-A_righ_ma: revised plev coordinate selection. -; 20140311-A_righ_ma: added plev coordinate. -; 20140212-A_righ_ma: written. -; -local funcname, scriptname, data, tmp, dnames, ref_nplev, ref_nlat, ref_nlon, \ - ref_plev, ref_lat, ref_lon, flag_lat, flag_lon, flags, flags0 -begin - - funcname = "find_destination_grid" - scriptname = "diag_scripts/shared/regridding.ncl" - enter_msg(scriptname, funcname) - - ; Check - if (opt.ne."coarsest" .and. opt.ne."finest") then - error_msg("f", "regridding.ncl", funcname, "unrecognized option " + opt) - end if - - ; Initialize - if (opt.eq."coarsest") then - ref_nplev = 100000 - ref_nlat = 100000 - ref_nlon = 100000 - end if - - if (opt.eq."finest") - ref_nplev = -1 - ref_nlat = -1 - ref_nlon = -1 - end if - - ref_plev = 1. - upper_plev = 1.d6 - lower_plev = -1.d0 - - ref_lat = 1.d0 - ref_lon = 1.d0 - - ; Loop over datasets, select coarsest/finest lat and lon - ; Always use the finest in the vertical - do mID = 0, dimsizes(indexes) - 1 - - data = read_data(indexes(mID), var, field) - tmp = extract_data(indexes(mID), data, - 1, 0, 0) - delete(data) - - ; (non-)existence of dimensions - flag_plev = False - flag_lon = False - flag_lat = False - dnames = getVarDimNames(tmp) - if (any(dnames.eq."plev")) then - flag_plev = True - end if - if (any(dnames.eq."lon")) then - flag_lon = True - end if - if (any(dnames.eq."lat")) then - flag_lat = True - end if - flags = (/flag_plev, flag_lat, flag_lon/) - - ; Check that all datasets have the same dimensions - if(mID.eq.0) then - flags0 = flags - else - if(.not.all(flags.eq.flags0)) then - error_msg("f", scriptname, funcname, \ - "all datasets must have same dimensions") - end if - end if - - ; Level - if (flag_plev) then - ; Set difference in the uppermost/lowermost level - d_up = abs(min(tmp&plev) - upper_plev) - d_lo = abs(max(tmp&plev) - lower_plev) - if (d_up.gt.1.d-5 .and. d_lo.gt.1.d-5) then - delete(ref_plev) - upper_plev = min(tmp&plev) - lower_plev = max(tmp&plev) - ref_plev = tmp&plev - ref_nplev = dimsizes(ref_plev) - datasetID_plev = mID - else if (d_up.lt.1.d-5 .and. d_lo.lt.1.d-5) then - if (dimsizes(tmp&plev).gt.ref_nplev .and. opt.eq."finest") then - delete(ref_plev) - upper_plev = min(tmp&plev) - lower_plev = max(tmp&plev) - ref_plev = tmp&plev - ref_nplev = dimsizes(ref_plev) - datasetID_plev = mID - end if - if (dimsizes(tmp&plev).lt.ref_nplev .and. opt.eq."coarsest") then - delete(ref_plev) - upper_plev = min(tmp&plev) - lower_plev = max(tmp&plev) - ref_plev = tmp&plev - ref_nplev = dimsizes(ref_plev) - datasetID_plev = mID - end if - end if - end if - delete(d_up) - delete(d_lo) - end if - - ; Latitude - if(flag_lat) then - if ((dimsizes(tmp&lat).lt.ref_nlat .and. opt.eq."coarsest").or. \ - (dimsizes(tmp&lat).gt.ref_nlat .and. opt.eq."finest")) then - delete(ref_lat) - ref_lat = tmp&lat - ref_nlat = dimsizes(ref_lat) - datasetID_lat = mID - end if - end if - - ; Longitude - if(flag_lon) then - if ((dimsizes(tmp&lon).lt.ref_nlon .and. opt.eq."coarsest").or. \ - (dimsizes(tmp&lon).gt.ref_nlon .and. opt.eq."finest")) then - delete(ref_lon) - ref_lon = tmp&lon - ref_nlon = dimsizes(ref_lon) - datasetID_lon = mID - end if - end if - delete(tmp) - - end do ; datasets - - ; Assemble target grid, depending on (non-)existence of dimensions - if(flag_plev .and. flag_lat .and. flag_lon) then ; plev, lat, lon - grid = new((/ref_nplev, ref_nlat, ref_nlon/), float) - grid!0 = "plev" - grid!1 = "lat" - grid!2 = "lon" - else if(.not.flag_plev .and. flag_lat .and. flag_lon) then ; lat, lon - grid = new((/ref_nlat, ref_nlon/), float) - grid!0 = "lat" - grid!1 = "lon" - else if(flag_plev .and. .not.flag_lat .and. flag_lon) then ; plev, lon - grid = new((/ref_nplev, ref_nlon/), float) - grid!0 = "plev" - grid!1 = "lon" - else if(flag_plev .and. flag_lat .and. .not.flag_lon) then ; plev, lat - grid = new((/ref_nplev, ref_nlat/), float) - grid!0 = "plev" - grid!1 = "lat" - else if(flag_plev .and. .not.flag_lat .and. .not.flag_lon) then ; plev - grid = new((/ref_nplev/), float) - grid!0 = "plev" - else if(.not.flag_plev .and. flag_lat .and. .not.flag_lon) then ; lat - grid = new((/ref_nlat/), float) - grid!0 = "lat" - else if(.not.flag_plev .and. .not.flag_lat .and. flag_lon) then ; lon - grid = new((/ref_nlon/), float) - grid!0 = "lon" - else ; no dimensions - grid = default_fillvalue("float") - end if - end if - end if - end if - end if - end if - end if - - if(flag_plev) then - grid&plev = ref_plev - grid@gridlev_mID = datasetID_plev - end if - if(flag_lat) then - grid&lat = ref_lat - grid@gridlat_mID = datasetID_lat - end if - if(flag_lon) then - grid&lon = ref_lon - grid@gridlon_mID = datasetID_lon - end if - - leave_msg(scriptname, funcname) - return(grid) - -end - ; ############################################################################# undef("guestimate_average_grid_area") function guestimate_average_grid_area(data[*][*]:numeric) diff --git a/esmvaltool/diag_scripts/shared/scaling.ncl b/esmvaltool/diag_scripts/shared/scaling.ncl index fe4df50c4d..4800e9e02b 100644 --- a/esmvaltool/diag_scripts/shared/scaling.ncl +++ b/esmvaltool/diag_scripts/shared/scaling.ncl @@ -9,7 +9,7 @@ ; ; ############################################################################# -load "./interface_scripts/logging.ncl" +load "$diag_scripts/../interface_scripts/logging.ncl" ; ############################################################################# undef("convert_units") @@ -159,6 +159,11 @@ begin leave_msg(scriptname, funcname) return(out) end if + if (units_to.eq."mm") then ; change only the label + out@units = units_to + leave_msg(scriptname, funcname) + return(out) + end if end if if (units_from.eq."mol m-2 s-1") then @@ -203,6 +208,47 @@ begin end if end if + if (units_from.eq."K") then + if (units_to.eq."degC") then + out = out - 273.15 + out@units = units_to + leave_msg(scriptname, funcname) + return(out) + end if + end if + + if (units_from.eq."W m-2") then + if (units_to.eq."-W m-2") then ; change only the sign, not the label + out = -1. * out + leave_msg(scriptname, funcname) + return(out) + end if + if (units_to.eq."mm d-1") then + out = out / 2.5e6 * 24 * 3600. + out@units = units_to + leave_msg(scriptname, funcname) + return(out) + end if + end if + + if (units_from.eq."kg m-2 s-1") then + if (units_to.eq."mm d-1") then + out = out * 24 * 3600. + out@units = units_to + leave_msg(scriptname, funcname) + return(out) + end if + end if + + if (units_from.eq."Pa") then + if (units_to.eq."hPa") then + out = out / 100. + out@units = units_to + leave_msg(scriptname, funcname) + return(out) + end if + end if + error_msg("f", scriptname, funcname, "conversion from " + units_from + \ " to " + units_to + " not defined") diff --git a/esmvaltool/diag_scripts/shared/set_operators.ncl b/esmvaltool/diag_scripts/shared/set_operators.ncl index a1c434e0a9..7688f464cb 100644 --- a/esmvaltool/diag_scripts/shared/set_operators.ncl +++ b/esmvaltool/diag_scripts/shared/set_operators.ncl @@ -15,7 +15,7 @@ ; ; ############################################################################# -load "./interface_scripts/logging.ncl" +load "$diag_scripts/../interface_scripts/logging.ncl" ; ############################################################################# undef("UNIQ") diff --git a/esmvaltool/diag_scripts/shared/statistics.ncl b/esmvaltool/diag_scripts/shared/statistics.ncl index abfdeab4a3..a1525100ef 100644 --- a/esmvaltool/diag_scripts/shared/statistics.ncl +++ b/esmvaltool/diag_scripts/shared/statistics.ncl @@ -21,11 +21,11 @@ ; ; ############################################################################# -load "./interface_scripts/auxiliary.ncl" -load "./interface_scripts/logging.ncl" +load "$diag_scripts/../interface_scripts/auxiliary.ncl" +load "$diag_scripts/../interface_scripts/logging.ncl" -load "./diag_scripts/shared/latlon.ncl" -load "./diag_scripts/shared/regridding.ncl" +load "$diag_scripts/shared/latlon.ncl" +load "$diag_scripts/shared/regridding.ncl" ; ############################################################################# undef("dim_stddev_wgt_Wrap") @@ -122,8 +122,6 @@ function time_operations(field:numeric, ; "seasonalclim": seasonal climatology for the standard seasons DJF, ; MAM, JJA, SON. ; "monthlyclim": monthly climatology jan-dec. -; For monthly input data only! Apply mymm first, if necessary. -; "mymm": multi year monthly mean ; "yearly": time average over every year in [y1:y2]. ; [month strings]: climatology of selected (consecutive) months ; (e.g., "MAM", "SONDJ"). @@ -142,8 +140,7 @@ function time_operations(field:numeric, ; ; Caveats ; The weighted standard deviation is not yet implmented for all cases -; The weighted standard deviation is calculated using the unbiased -; estimator, c +; The weighted standard deviation is calculated using the unbiased estimator ; This should take into account missing values and exclude the w_i for ; which the field contains only missing values. This feature is not ; implemented yet. @@ -151,6 +148,8 @@ function time_operations(field:numeric, ; References ; ; Modification history +; 20190503-A_righ_ma: removed obsolete option "mymm" (used only in +; reformat_obs, now outdated). ; 20140703-A_gott_kl: added option "mymm". ; 20140312-A_righ_ma: extended with standard deviation. ; 20140109-A_righ_ma: written. @@ -470,147 +469,6 @@ begin return(out) end if - ; Multi year monthly mean - ; Output 12 months for each year that occurs in variable "year". - ; Months without an input value are set to missing. - if (opt.eq."mymm") then - - ; Concatenate year and month of input field - ym_in = 100 * toint(year) + toint(month) - - ; Init target field - years = ispan(toint(min(year)), toint(max(year)), 1) - nyear = dimsizes(years) - ym = new(nyear * 12, integer) - do yy = 0, nyear - 1 - do mm = 0, 11 ; concatenate as well, to avoid nested loop - ym((yy * 12) + mm) = 100 * years(yy) + mm + 1 - end do - end do - dims = dimsizes(subfield) - if(rank.eq.4) then - dims(0, :, :, :) = dimsizes(ym) - else if(rank.eq.3) then - dims(0, :, :) = dimsizes(ym) - else if(rank.eq.2) then - dims(0, :) = dimsizes(ym) - else if(rank.eq.1) then - dims(0) = dimsizes(ym) - else - error_msg("f", scriptname, funcname, "rank = " + tostring(rank) + \ - " not implemented for mymm in time_operations") - end if - end if - end if - end if - out = new(dims, typeof(subfield)) - copy_VarCoords_l1(subfield, out) ; auxiliary.ncl - timec = create_timec(min(years), max(years)) - out!0 = "time" - out&time = timec - - ; Determine FillValue - if(isatt(subfield, "_FillValue")) then - FillValue = subfield@_FillValue - else - FillValue = default_fillvalue(typeof(subfield)) - end if - out@_FillValue = FillValue - - ; Fill target field - if(rank.eq.4) then - do i = 0, dimsizes(ym) - 1 - index = ind(ym_in.eq.ym(i)) - if(.not.all(ismissing(index))) then - out(i, :, :, :) = \ - dim_avg_wgt_n_Wrap(subfield(index, :, :, :), weights(index), 1, 0) - if (oper.eq."stddev") then - p1 = subfield(index, :, :, :) - p2 = conform(p1, out(i, :, :, :), (/1, 2, 3/)) ^ 2 - d2 = (p1 - p2) ^ 2 - arg = dim_sum_wgt_n_Wrap(d2, weights(index), 1, 0) - v1 = sum(weights(index)) - v2 = sum(weights(index) ^ 2) - out(i) = sqrt(v1 / (v1 ^ 2 - v2) * arg) - delete([/p1, p2, d2, arg, v1, v2/]) - end if - else - out(i, :, :, :) = FillValue - end if - delete(index) - end do - else if(rank.eq.3) then - do i = 0, dimsizes(ym) - 1 - index = ind(ym_in.eq.ym(i)) - if(.not.all(ismissing(index))) then - out(i, :, :) = \ - dim_avg_wgt_n_Wrap(subfield(index, :, :), weights(index), 1, 0) - if (oper.eq."stddev") then - p1 = subfield(index, :, :) - p2 = conform(p1, out(i, :, :), (/1, 2/)) ^ 2 - d2 = (p1 - p2) ^ 2 - arg = dim_sum_wgt_n_Wrap(d2, weights(index), 1, 0) - v1 = sum(weights(index)) - v2 = sum(weights(index) ^ 2) - out(i) = sqrt(v1 / (v1 ^ 2 - v2) * arg) - delete([/p1, p2, d2, arg, v1, v2/]) - end if - else - out(i, :, :) = FillValue - end if - delete(index) - end do - else if(rank.eq.2) then - do i = 0, dimsizes(ym) - 1 - index = ind(ym_in.eq.ym(i)) - if(.not.all(ismissing(index))) then - out(i, :) = \ - dim_avg_wgt_n_Wrap(subfield(index, :), weights(index), 1, 0) - if (oper.eq."stddev") then - p1 = subfield(index, :) - p2 = conform(p1, out(i, :), 1) ^ 2 - d2 = (p1 - p2) ^ 2 - arg = dim_sum_wgt_n_Wrap(d2, weights(index), 1, 0) - v1 = sum(weights(index)) - v2 = sum(weights(index) ^ 2) - out(i) = sqrt(v1 / (v1 ^ 2 - v2) * arg) - delete([/p1, p2, d2, arg, v1, v2/]) - end if - else - out(i, :) = FillValue - end if - delete(index) - end do - else if(rank.eq.1) then - do i = 0, dimsizes(ym) - 1 - index = ind(ym_in.eq.ym(i)) - if(.not.all(ismissing(index))) then - out(i) = dim_avg_wgt_Wrap(subfield(index), weights(index), 1) - if (oper.eq."stddev") then - d2 = (subfield(index) - out(i)) ^ 2 - arg = dim_sum_wgt_Wrap(d2, weights(index), 1) - v1 = sum(weights(index)) - v2 = sum(weights(index) ^ 2) - out(i) = sqrt(v1 / (v1 ^ 2 - v2) * arg) - delete([/d2, arg, v1, v2/]) - end if - else - out(i) = FillValue - end if - delete(index) - end do - else - error_msg("f", scriptname, funcname, "rank = " + tostring(rank) + \ - " not implemented for mymm in time_operations") - end if - end if - end if - end if - - leave_msg(scriptname, funcname) - return(out) - end if - ; Months string (at least 2 consecutive months): define indexes if (.not.ismissing(str_match_ind_ic(monthstr, opt)).and. \ strlen(opt).ge.2.and.strlen(opt).le.12) then @@ -1155,7 +1013,8 @@ undef("interannual_variability") function interannual_variability(field: numeric, y1[1]: integer, y2[1]: integer, - opt[1]: string) + opt[1]: string, + dtr[1]: string) ; ; Arguments ; field: a numeric array of rank 1 to 4, first dimension must be time. @@ -1170,6 +1029,10 @@ function interannual_variability(field: numeric, ; (e.g. "MAM", "SONDJ"). ; [1, 12]: climatology of the selected month ("1"=Jan, "2"=Feb, ..., ; "12"=Dec). +; dtr: detrending option: +; "None": no detrending before standard deviation is calculated +; "linear": linear detrending using dtrend +; "quadratic": quadratic detrending using dtrend_quadratic ; ; Return value ; An array of the same rank as field or of rank-1, depending on opt. @@ -1185,6 +1048,7 @@ function interannual_variability(field: numeric, ; Reference ; ; Modification history +; 20181022-A_lore_ru: added option dtr for possible detrending of data v2 ; 20140314-A_righ_ma: written. ; local funcname, scriptname, monthstr, rank, field_avg, field_djf, field_mam, \ @@ -1219,6 +1083,22 @@ begin ; Annual climatology if (opt.eq."annualclim") then field_avg = time_operations(field, y1, y2, "average", "yearly", True) + if (dtr.eq."None") then + out = dim_stddev_n_Wrap(field_avg, 0) + else if (dtr.eq."linear") then + field_dtr = dtrend_n(field_avg, False, 0) + tmp = dim_avg_n_Wrap(field_avg, 0) + tmp_conf = conform(field_dtr, tmp, (/1, 2/)) + field_dtr = field_dtr + tmp_conf + copy_VarCoords(field_avg, field_dtr) + out = dim_stddev_n_Wrap(field_dtr, 0) + else if (dtr.eq."quadratic") then + field_dtr = dtrend_quadratic_msg_n(field_avg, False, False, 0) + copy_VarCoords(field_avg, field_dtr) + out = dim_stddev_n_Wrap(field_dtr, 0) + end if + end if + end if out = dim_stddev_n_Wrap(field_avg, 0) leave_msg(scriptname, funcname) return(out) @@ -1239,33 +1119,71 @@ begin "yearly", True) field_avg_son = time_operations(field_son, y1, y2, "average", \ "yearly", True) + if (dtr.eq."None") then + field_avg_djf_dtr = field_avg_djf + field_avg_mam_dtr = field_avg_mam + field_avg_jja_dtr = field_avg_jja + field_avg_son_dtr = field_avg_son + else if (dtr.eq."linear") then + field_avg_djf_dtr = dtrend_n(field_avg_djf, False, 0) + tmp = dim_avg_n_Wrap(field_avg_djf, 0) + tmp_conf = conform(field_avg_djf_dtr, tmp, (/1, 2/)) + field_avg_djf_dtr = field_avg_djf_dtr + tmp_conf + delete([/tmp, tmp_conf/]) + field_avg_mam_dtr = dtrend_n(field_avg_mam, False, 0) + tmp = dim_avg_n_Wrap(field_avg_mam, 0) + tmp_conf = conform(field_avg_mam_dtr, tmp, (/1, 2/)) + field_avg_mam_dtr = field_avg_mam_dtr + tmp_conf + delete([/tmp, tmp_conf/]) + field_avg_jja_dtr = dtrend_n(field_avg_jja, False, 0) + tmp = dim_avg_n_Wrap(field_avg_jja, 0) + tmp_conf = conform(field_avg_jja_dtr, tmp, (/1, 2/)) + field_avg_jja_dtr = field_avg_jja_dtr + tmp_conf + delete([/tmp, tmp_conf/]) + field_avg_son_dtr = dtrend_n(field_avg_son, False, 0) + tmp = dim_avg_n_Wrap(field_avg_son, 0) + tmp_conf = conform(field_avg_son_dtr, tmp, (/1, 2/)) + field_avg_son_dtr = field_avg_son_dtr + tmp_conf + delete([/tmp, tmp_conf/]) + else if (dtr.eq."quadratic") then + field_avg_djf_dtr = dtrend_quadratic_msg_n(field_avg_djf, False, \ + False, 0) + field_avg_mam_dtr = dtrend_quadratic_msg_n(field_avg_mam, False, \ + False, 0) + field_avg_jja_dtr = dtrend_quadratic_msg_n(field_avg_jja, False, \ + False, 0) + field_avg_son_dtr = dtrend_quadratic_msg_n(field_avg_son, False, \ + False, 0) + end if + end if + end if if (rank.eq.1) then out = field_djf(0:3) ; save metadata - out(0) = dim_stddev_Wrap(field_avg_djf) - out(1) = dim_stddev_Wrap(field_avg_mam) - out(2) = dim_stddev_Wrap(field_avg_jja) - out(3) = dim_stddev_Wrap(field_avg_son) + out(0) = dim_stddev_Wrap(field_avg_djf_dtr) + out(1) = dim_stddev_Wrap(field_avg_mam_dtr) + out(2) = dim_stddev_Wrap(field_avg_jja_dtr) + out(3) = dim_stddev_Wrap(field_avg_son_dtr) end if if (rank.eq.2) then out = field_djf(0:3, :) ; save metadata - out(0, :) = dim_stddev_n_Wrap(field_avg_djf, 0) - out(1, :) = dim_stddev_n_Wrap(field_avg_mam, 0) - out(2, :) = dim_stddev_n_Wrap(field_avg_jja, 0) - out(3, :) = dim_stddev_n_Wrap(field_avg_son, 0) + out(0, :) = dim_stddev_n_Wrap(field_avg_djf_dtr, 0) + out(1, :) = dim_stddev_n_Wrap(field_avg_mam_dtr, 0) + out(2, :) = dim_stddev_n_Wrap(field_avg_jja_dtr, 0) + out(3, :) = dim_stddev_n_Wrap(field_avg_son_dtr, 0) end if if (rank.eq.3) then out = field_djf(0:3, :, :) ; save metadata - out(0, :, :) = dim_stddev_n_Wrap(field_avg_djf, 0) - out(1, :, :) = dim_stddev_n_Wrap(field_avg_mam, 0) - out(2, :, :) = dim_stddev_n_Wrap(field_avg_jja, 0) - out(3, :, :) = dim_stddev_n_Wrap(field_avg_son, 0) + out(0, :, :) = dim_stddev_n_Wrap(field_avg_djf_dtr, 0) + out(1, :, :) = dim_stddev_n_Wrap(field_avg_mam_dtr, 0) + out(2, :, :) = dim_stddev_n_Wrap(field_avg_jja_dtr, 0) + out(3, :, :) = dim_stddev_n_Wrap(field_avg_son_dtr, 0) end if if (rank.eq.4) then out = field_djf(0:3, :, :, :) ; save metadata - out(0, :, :, :) = dim_stddev_n_Wrap(field_avg_djf, 0) - out(1, :, :, :) = dim_stddev_n_Wrap(field_avg_mam, 0) - out(2, :, :, :) = dim_stddev_n_Wrap(field_avg_jja, 0) - out(3, :, :, :) = dim_stddev_n_Wrap(field_avg_son, 0) + out(0, :, :, :) = dim_stddev_n_Wrap(field_avg_djf_dtr, 0) + out(1, :, :, :) = dim_stddev_n_Wrap(field_avg_mam_dtr, 0) + out(2, :, :, :) = dim_stddev_n_Wrap(field_avg_jja_dtr, 0) + out(3, :, :, :) = dim_stddev_n_Wrap(field_avg_son_dtr, 0) end if leave_msg(scriptname, funcname) return(out) @@ -1283,10 +1201,23 @@ begin strlen(opt).ge.2.and.strlen(opt).le.12) then field_ext = time_operations(field, y1, y2, "extract", opt, True) field_avg = time_operations(field_ext, y1, y2, "average", "yearly", True) + if (dtr.eq."None") then + field_avg_dtr = field_avg + else if (dtr.eq."linear") then + field_avg_dtr = dtrend_n(field_avg, False, 0) + tmp = dim_avg_n_Wrap(field_avg, 0) + tmp_conf = conform(field_avg_dtr, tmp, (/1, 2/)) + field_avg_dtr = field_avg_dtr + tmp_conf + delete([/tmp, tmp_conf/]) + else if (dtr.eq."quadratic") then + field_avg_dtr = dtrend_quadratic_msg_n(field_avg, False, False, 0) + end if + end if + end if if (rank.eq.1) then - out = dim_stddev_Wrap(field_avg) + out = dim_stddev_Wrap(field_avg_dtr) else - out = dim_stddev_n_Wrap(field_avg, 0) + out = dim_stddev_n_Wrap(field_avg_dtr, 0) end if leave_msg(scriptname, funcname) return(out) @@ -1397,10 +1328,28 @@ begin time_weights = sdays end if + ; Annual-mean time-series + if (dim_names(0).eq."year") then + time_weights = new(dims_var(0), float) + time_weights = 1. + end if + if (dim_names(1).eq."lat" .and. dim_names(2).eq."lon") then area_weights = map_area(var&lat, var&lon) end if + if (dim_names(1).eq."plev" .and. dim_names(2).eq."lat") then + areas = map_area(ref&lat, (/1.0, 2.0/)) + nlev = dimsizes(ref&plev) + ptop = ref&plev(nlev - 1) - \ + 0.5 * (ref&plev(nlev - 2) - ref&plev(nlev - 1)) + delta_p = dpres_plevel(ref&plev, 101325., ptop, 0) + area_weights = new((/nlev, dimsizes(ref&lat)/), float) + wdims = dimsizes(area_weights) + area_weights = conform_dims(wdims, delta_p, 0) * \ + conform_dims(wdims, areas(:, 0), 1) + end if + if (isdefined("time_weights").and.isdefined("area_weights")) then weights = new(dimsizes(var), float) do ii = 0, dimsizes(time_weights) - 1 @@ -1461,6 +1410,61 @@ begin return(out) end if + ; Single Model Performance Index + if (metric.eq."SMPI") then + nyears = dimsizes(var&year) + out = new(diag_script_info@smpi_n_bootstrap + 1, float) + do ibootstrap = 0, diag_script_info@smpi_n_bootstrap + if (ibootstrap.eq.0) then + bootvect = ispan(0, nyears - 1, 1) + else + icnt = 0 + do while (icnt .le. 10) + bootvect = generate_sample_indices(nyears, 1) + icnt = icnt + 1 + if (.not.all(bootvect(:).eq.bootvect(0))) then + break + end if + end do + if (all(bootvect(:).eq.bootvect(0))) then + error_msg("f", scriptname, funcname, \ + "Number of years too small for bootstrapping. Abort.") + end if + end if + obs = ref(bootvect, :, :) + mod1D = ndtooned(dim_avg_n(var, 0)) + ref1D = ndtooned(dim_avg_n(obs, 0)) + sig1D = ndtooned(dim_stddev_n_Wrap(obs, 0)) + sig1D@_FillValue = default_fillvalue(typeof(sig1D)) + sig1D = where(sig1D.eq.0, sig1D@_FillValue, sig1D) + + delete(weights) + delete(wgt1d) + if (isdim(obs, "lon").and.isdim(obs, "lat")) then + weights = map_area(obs&lat, obs&lon) + elseif (isdim(obs, "plev").and.isdim(obs, "lat")) then + areas = map_area(obs&lat, (/1.0, 2.0/)) + nlev = dimsizes(obs&plev) + ptop = \ + obs&plev(nlev - 1) - 0.5 * (obs&plev(nlev - 2) - obs&plev(nlev - 1)) + delta_p = dpres_plevel(obs&plev, 101325., ptop, 0) + weights = new((/dimsizes(obs&plev), dimsizes(obs&lat)/), float) + wdims = dimsizes(weights) + weights = \ + conform_dims(wdims, delta_p, 0) * conform_dims(wdims, areas(:, 0), 1) + else + error_msg("f", diag_script, "", "Unknown dimensions in variable obs.") + end if + + wgt1d = ndtooned(weights) + out(ibootstrap) = \ + dim_avg_wgt_Wrap((mod1D - ref1D) ^ 2 / sig1D ^ 2, wgt1d, 1) + + end do + leave_msg(scriptname, funcname) + return(out) + end if + error_msg("f", scriptname, funcname, "metric " + metric + " not available") end diff --git a/esmvaltool/diag_scripts/thermodyn_diagtool/__init__.py b/esmvaltool/diag_scripts/thermodyn_diagtool/__init__.py new file mode 100644 index 0000000000..72f011f220 --- /dev/null +++ b/esmvaltool/diag_scripts/thermodyn_diagtool/__init__.py @@ -0,0 +1 @@ +"""Initialize the ESMValTool thermodyn_diagtool package.""" diff --git a/esmvaltool/diag_scripts/thermodyn_diagtool/computations.py b/esmvaltool/diag_scripts/thermodyn_diagtool/computations.py new file mode 100644 index 0000000000..ecc562d04a --- /dev/null +++ b/esmvaltool/diag_scripts/thermodyn_diagtool/computations.py @@ -0,0 +1,797 @@ +"""INTERNAL COMPUTATIONS. + +Module containing all the core computations. + +This module contains all the basic computations needed by the thermodynamics +diagnostic tool. + +The functions that are here contained are: +- baroceff: function for the baroclinic efficiency; +- budgets: function for the energy budgets (TOA, atmospheric, surface); +- direntr: function for the material entropy production (direct method); +- entr: function for the computation of entropy as energy/temperature; +- evapentr: function for the evaporation related material entropy production; +- indentr: function for material entropy production (indirect method); +- kinentr: function for the kin. en. diss. related material entropy production; +- landoc_budg: function for budget computations over land and oceans; +- mask_precip: function for masking rainfall and snowfall regions; +- masktonull: function for masking nan values to null; +- meltentr: function for the entropy production from ground snow melting; +- potentr: function for the entropy production from pot. en. of the droplet; +- rainentr: function for the entropy production from rainfall precipitation; +- removeif: function for conditional file deleting; +- sensentr: function for the entropy production from sensible heat fluxes; +- snowentr: function for the entropy production from snowfall precipitation; +- wmbudg: function for water mass and latent energy budgets; +- write_eb: function for writing global mean energy budgets to file; + +@author: valerio.lembo@uni-hamburg.de, Valerio Lembo, Hamburg University, 2019. +""" + +import os +from shutil import move + +import numpy as np +from cdo import Cdo +from netCDF4 import Dataset + +from esmvaltool.diag_scripts.thermodyn_diagtool import mkthe + +L_C = 2501000 # latent heat of condensation +LC_SUB = 2835000 # latent heat of sublimation +L_S = 334000 # latent heat of solidification +GRAV = 9.81 # gravity acceleration + + +def baroceff(model, wdir, aux_file, toab_file, te_file): + """Compute the baroclinic efficiency of the atmosphere. + + The function computes the baroclinic efficiency of the atmosphere, i.e. + the efficiency of the meridional heat transports from the low latitudes, + where there is a net energy gain, towards the high latitudes, where there + is a net energy loss (after Lucarini et al., 2011). + + Arguments: + - model: the model name; + - wdir: the working directory where the outputs are stored; + - aux_file: the name of a dummy aux. file to be used for computations; + - toab_file: a file containing the annual mean TOA energy budgets + (time,lon,lat); + - te_file: a file containing the annual mean emission temperature + (time,lon,lat); + """ + cdo = Cdo() + removeif(aux_file) + gain_file = wdir + '/{}_maskGain.nc'.format(model) + cdo.gtc('0', input=toab_file, output=gain_file) + loss_file = wdir + '/{}_maskLoss.nc'.format(model) + cdo.ltc('0', input=toab_file, output=loss_file) + toabgain_file = wdir + '/{}_toabGain.nc'.format(model) + cdo.setrtomiss( + '-1000,0', + input='-mul {} {}'.format(toab_file, gain_file), + output=toabgain_file) + toabloss_file = wdir + '/{}_toabLoss.nc'.format(model) + cdo.setrtomiss( + '0,1000', + input='-mul {} {}'.format(toab_file, loss_file), + output=toabloss_file) + tegain_file = wdir + '/{}_teGain.nc'.format(model) + cdo.setrtomiss( + '-1000,0', + input='-mul {} {}'.format(te_file, gain_file), + output=tegain_file) + teloss_file = wdir + '/{}_teLoss.nc'.format(model) + cdo.setrtomiss( + '-1000,0', + input='-mul {} {}'.format(te_file, loss_file), + output=teloss_file) + tegainm_file = wdir + '/{}_teGainm.nc'.format(model) + cdo.div( + input='-fldmean {0} -fldmean -div {0} {1} '.format( + toabgain_file, tegain_file), + output=tegainm_file) + telossm_file = wdir + '/{}_teLossm.nc'.format(model) + cdo.div( + input='-fldmean {0} -fldmean -div {0} {1} '.format( + toabloss_file, teloss_file), + output=telossm_file) + aux_baroceff_file = (wdir + '/{}_aux_barocEff.nc'.format(model)) + cdo.sub( + input='-reci {} -reci {}'.format(telossm_file, tegainm_file), + output=aux_baroceff_file) + baroceff_file = wdir + '/{}_barocEff.nc'.format(model) + cdo.div( + input='{} -mulc,0.5 -add -reci {} -reci {}'.format( + aux_baroceff_file, tegainm_file, telossm_file), + output=baroceff_file) + with Dataset(baroceff_file) as f_l: + baroc = f_l.variables['toab'][0, 0, 0] + remove_files = [ + gain_file, loss_file, toabgain_file, toabloss_file, tegain_file, + teloss_file, tegainm_file, telossm_file, aux_baroceff_file + ] + for filen in remove_files: + os.remove(filen) + return baroc + + +def budgets(model, wdir, aux_file, filelist): + """Compute radiative budgets from radiative and heat fluxes. + + The function computes TOA and surface energy budgets from radiative and + heat fluxes, then writes the annual mean to the log info file and write + the (lat,lon) annual mean fields to a NetCDF file, as well as the time + series of the annual mean globally averaged fields. + + toab = rsdt - rsut - rlut + surb = rsds + rlds - rsus - rlus - hfls - hfss + atmb = toab - atmb + + Arguments: + - model: the model name; + - wdir: the working directory where the outputs are stored; + - aux_file: the name of a dummy aux. file to be used for computations; + - filelist: a list of file names containing the input fields; + """ + cdo = Cdo() + hfls_file = filelist[0] + hfss_file = filelist[1] + rlds_file = filelist[6] + rlus_file = filelist[7] + rlut_file = filelist[8] + rsds_file = filelist[9] + rsdt_file = filelist[10] + rsus_file = filelist[11] + rsut_file = filelist[12] + toab_file = wdir + '/{}_toab.nc'.format(model) + toab_gmean_file = wdir + '/{}_toab_gmean.nc'.format(model) + surb_file = wdir + '/{}_surb.nc'.format(model) + aux_surb_file = wdir + '/{}_aux_surb.nc'.format(model) + surb_gmean_file = wdir + '/{}_surb_gmean.nc'.format(model) + atmb_file = wdir + '/{}_atmb.nc'.format(model) + atmb_gmean_file = wdir + '/{}_atmb_gmean.nc'.format(model) + removeif(aux_file) + cdo.sub( + input="-sub {} {} {}".format(rsdt_file, rsut_file, rlut_file), + output=aux_file) + toab_gmean = write_eb('rsdt', 'toab', aux_file, toab_file, toab_gmean_file) + toab_ymm_file = wdir + '/{}_toab_ymm.nc'.format(model) + cdo.yearmonmean(input=toab_file, output=toab_ymm_file) + # Surface energy budget + removeif(aux_file) + cdo.add(input=" {} {}".format(rsds_file, rlds_file), output=aux_surb_file) + cdo.sub( + input="-sub -sub -sub {} {} {} {} {}".format( + aux_surb_file, rsus_file, rlus_file, hfls_file, hfss_file), + output=aux_file) + surb_gmean = write_eb('rsds', 'surb', aux_file, surb_file, surb_gmean_file) + # Atmospheric energy budget + removeif(aux_file) + cdo.sub(input="{} {}".format(toab_file, surb_file), output=aux_file) + atmb_gmean = write_eb('toab', 'atmb', aux_file, atmb_file, atmb_gmean_file) + eb_gmean = [toab_gmean, atmb_gmean, surb_gmean] + eb_file = [toab_file, atmb_file, surb_file] + # Delete files + filenames = [ + aux_surb_file, toab_gmean_file, atmb_gmean_file, surb_gmean_file + ] + for filen in filenames: + os.remove(filen) + return eb_gmean, eb_file, toab_ymm_file + + +def direntr(logger, model, wdir, filelist, aux_file, lect, lec, flags): + """Compute the material entropy production with the direct method. + + The function computes the material entropy production with the direct + method, explicitly retrieving the components related to evaporation, + rainfall and snowfall precipitation, snow melting at the ground, potential + energy of the droplet, sensible heat fluxes and kinetic energy dissipation + (from Lorenz Energy Cycle, LEC). The outputs are stored as NC files in + terms of global mean time series, and in terms of annual mean + (time,lat,lon) fields. + + Arguments: + - logger: the log file where the global mean values are printed out; + - model: the model name; + - wdir: the working directory where the outputs are stored; + - filelist: the list containing all the input files; + - aux_file: the name of a dummy aux. file to be used for computations; + - lect: the annual mean value of the LEC strength; + - lec: a flag having y (yes) value if the LEC is computed, n (no) if not. + In the latter case, a reference value of 0.010 W*m-2*K-1 is given for the + material entropy production related to the kinetic energy dissipation; + - flags: a list of flags containing information on whether the water mass + and energy budgets are computed, if the material entropy production has to + be computed, if using the indirect, the direct method, or both methods; + """ + _, _, _, aux_files = mkthe.init_mkthe(model, wdir, filelist, flags) + htop_file = aux_files[1] + prr_file = aux_files[2] + tabl_file = aux_files[3] + tasvert_file = aux_files[4] + tcloud_file = aux_files[5] + tcolumn_file = aux_files[6] + tlcl_file = aux_files[7] + hfls_file = filelist[0] + hfss_file = filelist[1] + prsn_file = filelist[4] + ts_file = filelist[15] + logger.info('Computation of the material entropy ' + 'production with the direct method\n') + logger.info('1. Sensible heat fluxes\n') + infile_list = [hfss_file, tabl_file, ts_file] + ssens, sensentr_file = sensentr(model, wdir, infile_list, aux_file) + logger.info( + 'Material entropy production associated with ' + 'sens. heat fluxes: %s\n', ssens) + logger.info('2. Hydrological cycle\n') + logger.info('2.1 Evaporation fluxes\n') + infile_list = [hfls_file, ts_file] + sevap, evapentr_file = evapentr(model, wdir, infile_list, aux_file) + logger.info( + 'Material entropy production associated with ' + 'evaporation fluxes: %s\n', sevap) + infile_mask = [prr_file, prsn_file, tlcl_file] + prrmask_file, prsnmask_file = mask_precip(model, wdir, infile_mask) + logger.info('2.2 Rainfall precipitation\n') + infile_rain = [prrmask_file, tcloud_file] + srain, rainentr_file = rainentr(model, wdir, infile_rain, aux_file) + logger.info( + 'Material entropy production associated with ' + 'rainfall: %s\n', srain) + logger.info('2.3 Snowfall precipitation\n') + infile_snow = [prsnmask_file, tcloud_file] + ssnow, latsnow_file, snowentr_file = snowentr(model, wdir, infile_snow, + aux_file) + logger.info( + 'Material entropy production associated with ' + 'snowfall: %s\n', ssnow) + logger.info('2.4 Melting of snow at the surface \n') + smelt, meltentr_file = meltentr(model, wdir, latsnow_file, aux_file) + logger.info( + 'Material entropy production associated with snow ' + 'melting: %s\n', smelt) + logger.info('2.5 Potential energy of the droplet\n') + infile_pot = [htop_file, prrmask_file, prsnmask_file, tcolumn_file] + spot, potentr_file = potentr(model, wdir, infile_pot, aux_file) + logger.info( + 'Material entropy production associated with ' + 'potential energy of the droplet: %s\n', spot) + os.remove(prrmask_file) + os.remove(prsnmask_file) + logger.info('3. Kinetic energy dissipation\n') + skin = kinentr(logger, aux_file, tasvert_file, lect, lec) + matentr = (float(ssens) - float(sevap) + float(srain) + float(ssnow) + + float(spot) + float(skin) - float(smelt)) + logger.info('Material entropy production with ' + 'the direct method: %s\n', matentr) + irrevers = ((matentr - float(skin)) / float(skin)) + for filen in aux_files: + os.remove(filen) + entr_list = [ + sensentr_file, evapentr_file, rainentr_file, snowentr_file, + meltentr_file, potentr_file + ] + return matentr, irrevers, entr_list + + +def entr(filelist, nin, nout, entr_file, entr_mean_file): + """Obtain the entropy dividing some energy by some working temperature. + + This function ingests an energy and a related temperature, then writes + (time,lat,lon) entropy fluxes and entropy flux annual mean values to NC + files. + + Arguments: + - filelist: a list of file containing the name of the energy file, of the + temperature file and of an auxiliary file needed for computation; + - nin: the variable name of the input energy fields; + - nout: the variable name to attribute to the entropy flux in the NC file; + - entr_file: the name of the file containing the 3D entropy fluxes; + - entr_mean_file: the name of the file containing the global annual mean + entropy value; + """ + cdo = Cdo() + en_file = filelist[0] + tem_file = filelist[1] + aux_file = filelist[2] + removeif(aux_file) + cdo.timmean( + input='-yearmonmean -monmean -div {} {}'.format(en_file, tem_file), + options='-b F32', + output=aux_file) + entr_gmean = write_eb(nin, nout, aux_file, entr_file, entr_mean_file) + return entr_gmean + + +def evapentr(model, wdir, infile, aux_file): + """Compute entropy production related to evaporation fluxes. + + The function computes the material entropy production related to + evaporation fluxes, as part of the material entropy production + obtained with the direct method (after Lucarini et al., 2011). + + Arguments: + - model: the model name; + - wdir: the working directory where the outputs are stored; + - infile: a list of file containing hfls and ts, respectively + (with dimensions (time,lat,lon); + - aux_file: the name of a dummy aux. file to be used for computations; + """ + evapentr_file = wdir + '/{}_evap_entr.nc'.format(model) + evapentr_mean_file = wdir + '/{}_evapEntropy_gmean.nc'.format(model) + flist = [infile[0], infile[1], aux_file] + evapentr_gmean = entr(flist, 'hfls', 'sevap', evapentr_file, + evapentr_mean_file) + evapentr_gmean = masktonull(evapentr_gmean) + os.remove(evapentr_mean_file) + return evapentr_gmean, evapentr_file + + +def indentr(model, wdir, infile, aux_file, toab_gmean): + """Compute the material entropy production with the indirect method. + + The function computes the material entropy production with the indirect + method, isolating a vertical and a horizontal component + (after Lucarini et al., 2011). The outputs are stored in terms of global + mean time series, and in terms of (lat,lon) fields for each year to a NC + file. + + Arguments: + - model: the model name; + - wdir: the working directory where the outputs are stored; + - infile: a list of files, containing each the fields rlds, rlus, rsds, + rsus, emission temperature (te), TOA energy budget (toab) and ts; + - toab_file: a file containing the annual mean TOA energy budgets + (time,lon,lat); + - aux_file: the name of a dummy aux. file to be used for computations; + - toab_gmean: the climatological annaul mean TOA energy budget; + """ + cdo = Cdo() + horzentropy_file = wdir + '/{}_horizEntropy.nc'.format(model) + vertenergy_file = wdir + '/{}_verticalEnergy.nc'.format(model) + vertentropy_file = wdir + '/{}_verticalEntropy.nc'.format(model) + vertentropy_mean_file = wdir + '/{}_vertEntropy_gmean.nc'.format(model) + horzentropy_mean_file = wdir + '/{}_horizEntropy_gmean.nc'.format(model) + removeif(aux_file) + cdo.yearmonmean( + input='-mulc,-1 -div -subc,{} {} {}'.format( + np.nanmean(toab_gmean), infile[5], infile[4]), + output=aux_file) + horzentr_mean = write_eb('toab', 'shor', aux_file, horzentropy_file, + horzentropy_mean_file) + cdo.yearmonmean( + input=' -add {} -sub {} -add {} {}'.format(infile[0], infile[2], + infile[1], infile[3]), + output=vertenergy_file) + cdo.mul( + input='{} -sub -yearmonmean -reci {} -yearmonmean -reci {}'.format( + vertenergy_file, infile[4], infile[6]), + output=aux_file) + vertentr_mean = write_eb('rlds', 'sver', aux_file, vertentropy_file, + vertentropy_mean_file) + remove_files = [ + horzentropy_mean_file, vertenergy_file, vertentropy_mean_file + ] + for filen in remove_files: + os.remove(filen) + return horzentr_mean, vertentr_mean, horzentropy_file, vertentropy_file + + +def kinentr(logger, aux_file, tasvert_file, lect, lec): + """Compute the material entropy production from kin. energy dissipation. + + The function computes the material entropy production associated with the + kinetic energy dissipation, through the intensity of the LEC. + + Arguments: + - aux_file: the name of a dummy aux. file to be used for computations; + - tasvert_file: a file containing the vertically integrated boundary layer + temperature; + - lect: an array containing the annual mean LEC intensity; + - lec: a flag marking whether the LEC has been previously computed or not + """ + cdo = Cdo() + removeif(aux_file) + if lec is True: + cdo.yearmonmean(input=tasvert_file, output=aux_file) + with Dataset(aux_file) as f_l: + tabl_mean = f_l.variables['ts'][:, 0, 0] + minentr_mean = np.nanmean(lect / tabl_mean) + logger.info( + 'Material entropy production associated with ' + 'kinetic energy dissipation: %s\n', minentr_mean) + minentr_mean = masktonull(minentr_mean) + else: + minentr_mean = 0.010 + logger.info('I cannot compute the material entropy ' + 'production without the LEC...\n') + logger.info('I will assign a given value for the material ' + 'entropy production attributed to LEC ' + '(0.01 W/m2*K)\n') + return minentr_mean + + +def landoc_budg(model, wdir, infile, mask, name): + """Compute budgets separately on land and oceans. + + Arguments: + - model: the model name; + - wdir: the working directory where the outputs are stored; + - infile: the file containing the original budget field as (time,lat,lon); + - mask: the file containing the land-sea mask; + - name: the variable name as in the input file; + """ + cdo = Cdo() + ocean_file = wdir + '/{}_{}_ocean.nc'.format(model, name) + oc_gmean_file = wdir + '/{}_{}_oc_gmean.nc'.format(model, name) + land_file = wdir + '/{}_{}_land.nc'.format(model, name) + la_gmean_file = wdir + '/{}_{}_la_gmean.nc'.format(model, name) + aux_file = wdir + '/aux.nc' + removeif(aux_file) + cdo.mul(input='{} -eqc,0 {}'.format(infile, mask), output=ocean_file) + cdo.timmean(input='-fldmean {}'.format(ocean_file), output=oc_gmean_file) + with Dataset(oc_gmean_file) as f_l: + oc_gmean = f_l.variables[name][0, 0, 0] + cdo.sub(input='{} {}'.format(infile, ocean_file), output=land_file) + cdo.setctomiss('0', input=ocean_file, output=aux_file) + move(aux_file, ocean_file) + cdo.setctomiss('0', input=land_file, output=aux_file) + move(aux_file, land_file) + cdo.timmean(input='-fldmean {}'.format(land_file), output=la_gmean_file) + with Dataset(la_gmean_file) as f_l: + la_gmean = f_l.variables[name][0, 0, 0] + remove_files = [ocean_file, oc_gmean_file, land_file, la_gmean_file] + for filen in remove_files: + os.remove(filen) + return oc_gmean, la_gmean + + +def mask_precip(model, wdir, infile): + """Mask precipitation according to the phase of the droplet. + + This function mask the rainfall and snowfall precipitation fields, as well + as in dependency of the temperature of the cloud at the droplet formation. + This allows to isolate some intermediate phase changes of the droplet life + cycle in the atmosphere. + + Arguments: + - model: the model name; + - wdir: the working directory where the outputs are stored; + - infile: a list of input file, containing rainfall precipitation (prr) and + prsn, respectively (dimensions (time,lat,lon)); + """ + cdo = Cdo() + prr_file = infile[0] + prsn_file = infile[1] + tlcl_file = infile[2] + # Prepare masks for snowfall and rainfall + maskrain_file = wdir + '/{}_maskprecr.nc'.format(model) + cdo.gtc('1.0E-7', input=prr_file, options=' -b F32', output=maskrain_file) + masksnow_file = wdir + '/{}_maskprecs.nc'.format(model) + cdo.gtc('1.0E-7', input=prsn_file, options=' -b F32', output=masksnow_file) + prrmask_file = wdir + '/{}_prr_masked.nc'.format(model) + cdo.mul( + input='{} {}'.format(maskrain_file, prr_file), + options='-b F32', + output=prrmask_file) + prsnmask_file = wdir + '/{}_prsn_masked.nc'.format(model) + cdo.mul( + input='{} {}'.format(masksnow_file, prsn_file), + options='-b F32', + output=prsnmask_file) + # Temperatures of the rainfall and snowfall clouds + tliq_file = wdir + '/{}_tliq.nc'.format(model) + cdo.setrtomiss( + '-1000,0', + input='-mul {} {}'.format(tlcl_file, maskrain_file), + options='-b F32', + output=tliq_file) + tsol_file = wdir + '/{}_tsol.nc'.format(model) + cdo.setrtomiss( + '-1000,0', + input='-mul {} {}'.format(tlcl_file, masksnow_file), + options='-b F32', + output=tsol_file) + tdegl_file = wdir + '/{}_tliqdeg.nc'.format(model) + cdo.subc('273.15', input=tliq_file, options='-b F32', output=tdegl_file) + tdegs_file = wdir + '/{}_tsoldeg.nc'.format(model) + cdo.subc('273.15', input=tsol_file, options='-b F32', output=tdegs_file) + # Mask for ice cloud and temperature for phase changes from ice to rain + maskice_file = wdir + '/{}_maskice.nc'.format(model) + cdo.ltc('0.0', input=tdegl_file, options='-b F32', output=maskice_file) + ticer_file = wdir + '/{}_t_icerain_file'.format(model) + cdo.setrtomiss( + '-1000,0', + input='-mul {} {}'.format(tliq_file, maskice_file), + options='-b F32', + output=ticer_file) + prrice_file = wdir + '/{}_prr_ice_file.nc'.format(model) + cdo.mul( + input='{} {}'.format(maskice_file, prr_file), + options='-b F32', + output=prrice_file) + # Mask for vapor cloud and temperature for phase changes from vapor to snow + maskvap_file = wdir + '/{}_maskvap.nc'.format(model) + cdo.gtc('0.0', input=tdegs_file, options='-b F32', output=maskvap_file) + tvaps_file = wdir + '/{}_t_vapsnow.nc'.format(model) + cdo.setrtomiss( + '-1000,0', + input='-mul {} {}'.format(tsol_file, maskvap_file), + options='-b F32', + output=tvaps_file) + prsnvap_file = wdir + '/{}_prsn_vap.nc'.format(model) + cdo.mul( + input='{} {}'.format(maskvap_file, prsn_file), + options='-b F32', + output=prsnvap_file) + remove_files = [ + maskrain_file, masksnow_file, tliq_file, tsol_file, tdegl_file, + tdegs_file, maskice_file, ticer_file, prrice_file, maskvap_file, + tvaps_file, prsnvap_file + ] + for filen in remove_files: + os.remove(filen) + return prrmask_file, prsnmask_file + + +def masktonull(value): + """Replace missing values with zeros.""" + try: + value = float(value) + except Warning: + value = 0 + return value + + +def meltentr(model, wdir, latsnow_file, aux_file): + """Compute entropy production related to snow melting at the ground. + + The function computes the material entropy production related to snow + melting at the ground, as part of the material entropy production + obtained with the direct method (after Lucarini et al., 2011). + + Arguments: + - model: the model name; + - wdir: the working directory where the outputs are stored; + - infile: the latent energy associated with snowfall precipitation; + - aux_file: the name of a dummy aux. file to be used for computations; + """ + cdo = Cdo() + removeif(aux_file) + latmelt_file = (wdir + '/{}_latentEnergy_snowmelt.nc'.format(model)) + meltentr_file = (wdir + '/{}_snowmelt_entr.nc'.format(model)) + meltentr_mean_file = wdir + '/{}_snowmeltEntropy_gmean.nc'.format(model) + cdo.mulc( + str(L_S), + input='-divc,{} {}'.format(str(LC_SUB), latsnow_file), + options='-b F32', + output=latmelt_file) + cdo.timmean( + input='-yearmonmean -monmean -setmisstoc,0 -divc,273.15 {}'.format( + latmelt_file), + options='-b F32', + output=aux_file) + cdo.chname( + 'prsn,smelt', input=aux_file, options='-b F32', output=meltentr_file) + cdo.fldmean( + input=meltentr_file, options='-b F32', output=meltentr_mean_file) + with Dataset(meltentr_mean_file) as f_l: + meltentr_gmean = f_l.variables['smelt'][0, 0, 0] + meltentr_gmean = masktonull(meltentr_gmean) + remove_files = [latmelt_file, meltentr_mean_file] + for filen in remove_files: + os.remove(filen) + os.remove(latsnow_file) + return meltentr_gmean, meltentr_file + + +def potentr(model, wdir, infile, aux_file): + """Compute entropy production related to potential energy of the droplet. + + The function computes the material entropy production related to the + potential energy of the snowfall or rainfall droplet. This term must be + part of a material entropy production budget, even though it does take part + to the energy exchanges of a model "normally". + + Arguments: + - model: the model name; + - wdir: the working directory where the outputs are stored; + - infile: a list of files containing the height of the bondary layer top + (htop), the masked rainfall precipitation (prrmask), the masked snowfall + precipitation (prsnmask), the temperature of the vertical column between + the cloud top and the ground (tcolumn); + - aux_file: the name of a dummy aux. file to be used for computations; + """ + cdo = Cdo() + removeif(aux_file) + htop_file = infile[0] + prrmask_file = infile[1] + prsnmask_file = infile[2] + tcolumn_file = infile[3] + poten_file = wdir + '/{}_potEnergy_drop.nc'.format(model) + potentr_file = wdir + '/{}_pot_drop_entr.nc'.format(model) + potentr_mean_file = wdir + '/{}_potEnergy_drop_gmean.nc'.format(model) + cdo.mulc( + GRAV, + input='-mul {} -add {} {}'.format(htop_file, prrmask_file, + prsnmask_file), + options='-b F32', + output=poten_file) + flist = [poten_file, tcolumn_file, aux_file] + potentr_gmean = entr(flist, 'htop', 'spotp', potentr_file, + potentr_mean_file) + potentr_gmean = masktonull(potentr_gmean) + remove_files = [poten_file, potentr_mean_file] + for filen in remove_files: + os.remove(filen) + return potentr_gmean, potentr_file + + +def rainentr(model, wdir, infile, aux_file): + """Compute entropy production related to rainfall precipitation. + + The function computes the material entropy production related to rainfall + precipitation, as part of the material entropy production obtained with the + direct method (after Lucarini et al., 2011). + + Arguments: + - model: the model name; + - wdir: the working directory where the outputs are stored; + - infile: a list of file containing the masked rainfall precipitation + (prrmask) and the temperature of the cloud (tcloud); + - aux_file: the name of a dummy aux. file to be used for computations; + """ + cdo = Cdo() + prrmask_file = infile[0] + removeif(aux_file) + latrain_file = wdir + '/{}_latentEnergy_rain.nc'.format(model) + rainentr_file = wdir + '/{}_rain_entr.nc'.format(model) + rainentr_mean_file = wdir + '/{}_rainEntropy_gmean.nc'.format(model) + cdo.mulc( + str(L_C), + input='-setmisstoc,0 {}'.format(prrmask_file), + options='-b F32', + output=latrain_file) + flist = [latrain_file, infile[1], aux_file] + rainentr_gmean = entr(flist, 'prr', 'srain', rainentr_file, + rainentr_mean_file) + rainentr_gmean = masktonull(rainentr_gmean) + remove_files = [latrain_file, rainentr_mean_file] + for filen in remove_files: + os.remove(filen) + return rainentr_gmean, rainentr_file + + +def removeif(filename): + """Remove filename if it exists.""" + try: + os.remove(filename) + except OSError: + pass + + +def sensentr(model, wdir, infile, aux_file): + """Compute entropy production related to sensible heat fluxes. + + The function computes the material entropy production related to sensible + heat fluxes, as part of the material entropy production obtained with the + direct method (after Lucarini et al., 2011). + + Arguments: + - model: the model name; + - wdir: the working directory where the outputs are stored; + - infile: a list of file containing hfss, the temperature at the boundary + layer top (tabl), ts, respectively (with dimensions (time,lat,lon); + - aux_file: the name of a dummy aux. file to be used for computations; + """ + cdo = Cdo() + difftemp_file = wdir + '/{}_difftemp_bl.nc'.format(model) + sensentr_file = (wdir + '/{}_sens_entr.nc'.format(model)) + sensentr_mean_file = wdir + '/{}_sensEntropy_gmean.nc'.format(model) + cdo.reci( + input='-sub -reci {} -reci {}'.format(infile[1], infile[2]), + options='-b F32', + output=difftemp_file) + flist = [infile[0], difftemp_file, aux_file] + sensentr_gmean = entr(flist, 'hfss', 'ssens', sensentr_file, + sensentr_mean_file) + sensentr_gmean = masktonull(sensentr_gmean) + remove_files = [difftemp_file, sensentr_mean_file] + for filen in remove_files: + os.remove(filen) + return sensentr_gmean, sensentr_file + + +def snowentr(model, wdir, infile, aux_file): + """Compute entropy production related to snowfall precipitation. + + The function computes the material entropy production related to snowfall + precipitation, as part of the material entropy production obtained with the + direct method (after Lucarini et al., 2011). + + Arguments: + - model: the model name; + - wdir: the working directory where the outputs are stored; + - infile: a list of file containing the masked snowfall precipitation + (prsnmask) and the temperature of the cloud (tcloud); + - aux_file: the name of a dummy aux. file to be used for computations; + """ + cdo = Cdo() + prsnmask_file = infile[0] + removeif(aux_file) + latsnow_file = wdir + '/{}_latentEnergy_snow.nc'.format(model) + snowentr_file = wdir + '/{}_snow_entr.nc'.format(model) + snowentr_mean_file = wdir + '/{}_snowEntropy_gmean.nc'.format(model) + cdo.mulc( + str(LC_SUB), + input='-setmisstoc,0 {}'.format(prsnmask_file), + options='-b F32', + output=latsnow_file) + flist = [latsnow_file, infile[1], aux_file] + snowentr_gmean = entr(flist, 'prsn', 'ssnow', snowentr_file, + snowentr_mean_file) + snowentr_gmean = masktonull(snowentr_gmean) + os.remove(snowentr_mean_file) + return snowentr_gmean, latsnow_file, snowentr_file + + +def wmbudg(model, wdir, aux_file, filelist, auxlist): + """Compute the water mass and latent energy budgets. + + This function computes the annual mean water mass and latent energy budgets + from the evaporation and rainfall/snowfall precipitation fluxes and prints + them to a NetCDF file. + The globally averaged annual mean budgets are also provided and saved to + a NetCDF file. + + Arguments: + - model: the model name; + - wdir: the working directory where the outputs are stored; + - aux_file: the name of a dummy aux. file to be used for computations; + - filelist: a list of file names containing the input fields; + - auxlist: a list of auxiliary files; + """ + cdo = Cdo() + wmbudg_file = wdir + '/{}_wmb.nc'.format(model) + wm_gmean_file = wdir + '/{}_wmb_gmean.nc'.format(model) + latene_file = wdir + '/{}_latent.nc'.format(model) + latene_gmean_file = wdir + '/{}_latent_gmean.nc'.format(model) + removeif(aux_file) + cdo.sub(input="{} {}".format(auxlist[0], filelist[3]), output=aux_file) + wmass_gmean = write_eb('hfls', 'wmb', aux_file, wmbudg_file, wm_gmean_file) + removeif(aux_file) + cdo.sub( + input="{} -add -mulc,{} {} -mulc,{} {}".format( + filelist[0], str(LC_SUB), filelist[4], str(L_C), auxlist[2]), + output=aux_file) + latent_gmean = write_eb('hfls', 'latent', aux_file, latene_file, + latene_gmean_file) + varlist = [wmass_gmean, latent_gmean] + filelist = [wmbudg_file, latene_file] + remove_files = [wm_gmean_file, latene_gmean_file] + for filen in remove_files: + os.remove(filen) + return varlist, filelist + + +def write_eb(namein, nameout, aux_file, d3_file, gmean_file): + """Change variable name in the NetCDF file and compute averages. + + Arguments: + - namein: initial name of the variable; + - nameout: final name of the variable; + - aux_file: the name of an auxiliary file; + - d3_file: the file containing (time,lat,lon) fields; + - gmean_file: the name of a file where to put the annual and globally + averaged fields; + """ + cdo = Cdo() + ch_name = '{},{}'.format(namein, nameout) + cdo.chname(ch_name, input=aux_file, options='-b F32', output=d3_file) + cdo.fldmean(input='-yearmonmean {}'.format(d3_file), output=gmean_file) + with Dataset(gmean_file) as f_l: + constant = f_l.variables[nameout][:] + return constant diff --git a/esmvaltool/diag_scripts/thermodyn_diagtool/fluxogram.py b/esmvaltool/diag_scripts/thermodyn_diagtool/fluxogram.py new file mode 100644 index 0000000000..887c28e956 --- /dev/null +++ b/esmvaltool/diag_scripts/thermodyn_diagtool/fluxogram.py @@ -0,0 +1,458 @@ +"""FLUX DIAGRAM PRODUCTION. + +Created on Tue Jun 19 16:41:47 2018. + +@author: Valerio2 + +Copyright 2018 Florian Ulrich Jehn + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +from matplotlib import pyplot as plt + + +class Fluxogram(): + """The diagram flux module. + + A class to draw and maintain all fluxes and storages from a model or + some similiar kind of thing to be drawn as a sequence of storages + and fluxes. + """ + + def __init__(self, max_flux, max_storage, grid_size=20): + """Initialize a fluxogram. must be called with. + + The arguments are: + - max_flux: aximum flux of all fluxes; needed for scaling + - max_storage: maximum storages of all storages; needed for scaling + - grid_size:grid_size for drawing the fluxogram, determines how big + everything is. Fluxes and storages scaled accordingly + - storages: all the storages the fluxogram has (usually empy to + begin with) + - fluxes: all the fluxes the fluxogram has (usually empty to begin + with). + """ + self.storages = [] + self.fluxes = [] + self.max_flux = max_flux + self.max_storage = max_storage + self.grid_size = grid_size + + def add_storage(self, name, amount, order, offset): + """Add a storage to the storages of the fluxogram.""" + self.storages.append( + Storage(name, self.grid_size, len(self.storages), amount, order, + offset)) + + def add_flux(self, name, from_storage, to_storage, amount): + """Add a flux to the fluxes of the fluxogram.""" + self.fluxes.append( + Flux(name, self.grid_size, from_storage, to_storage, amount)) + + def update_all_storages(self, amounts): + """Update the amount of all storages.""" + for storage, amount in zip(self.storages, amounts): + storage.update_storage(amount) + + def update_all_fluxes(self, amounts): + """Update the amount of all fluxes.""" + for flux, amount in zip(self.fluxes, amounts): + flux.update_flux(amount) + + def update_everything(self, amounts_storages, amounts_fluxes): + """Update all fluxes and storages.""" + self.update_all_fluxes(amounts_fluxes) + self.update_all_storages(amounts_storages) + + def draw(self, filen, listv): + """Draw all fluxes and storages.""" + fig = plt.figure() + frame1 = plt.axes() + fig.set_size_inches(18.5, 10.5) + # find the smallest/largest offset_ so the fluxogram can be drawn big + # enough + largest_offset = 0 + smallest_offset = 0 + largest_order = 0 + for storage in self.storages: + if storage.offset > largest_offset: + largest_offset = storage.offset + if storage.offset < smallest_offset: + smallest_offset = storage.offset + if storage.order > largest_order: + largest_order = storage.order + # set y and x limits + y_max = 0 + y_min = (largest_order + 1) * 2 * self.grid_size * -1 + x_max = (largest_offset + 2) * 2 * self.grid_size + x_min = (smallest_offset - 1) * 2 * self.grid_size + plt.axis([x_min, x_max, y_min, y_max]) + frame1.axes.get_xaxis().set_visible(False) + frame1.axes.get_yaxis().set_visible(False) + # draw all fluxes + dict_r = { + 'AZ+': listv[0], + 'ASE+': listv[2], + 'ATE+': listv[4], + 'A2KS': listv[6], + 'A2KT': listv[7], + 'KTE-': listv[8], + 'KSE-': listv[10], + 'KZ-': listv[12] + } + dict_oth = { + 'l': listv[14], + 'dn': listv[15], + 'rdn': listv[16], + 'ldn': listv[17], + 'up': listv[18], + 'lup': listv[19], + 'rup': listv[20] + } + switcher = { + 'l': self.leftarr_txt, + 'dn': self.dnarr_txt, + 'rdn': self.rdnarr_txt, + 'ldn': self.ldnarr_txt, + 'up': self.uparr_txt, + 'lup': self.luparr_txt, + 'rup': self.ruparr_txt + } + for flux in self.fluxes: + idb = flux.name + # scale the amount + scaled_amount_flux = self.scaler(flux.amount, self.max_flux) + # width multiplied because if not, the arrows are so tiny + arrow = plt.Arrow( + flux.x_start, + flux.y_start, + flux.d_x, + flux.d_y, + width=scaled_amount_flux * 1.7, + alpha=0.8) + if flux.dire == 'r': + for key in dict_r: + value = dict_r[key] + if idb == key: + plt.text( + flux.x_start + 0.25 * self.grid_size, + flux.y_start + 0.05 * self.grid_size, + value, + size=self.grid_size * 0.7) + else: + for key in dict_oth: + value = dict_oth[key] + if flux.dire == key: + switcher[flux.dire](value, flux, plt) + plt.gca().add_patch(arrow) + # draw all storages + for storage in self.storages: + # scale the amount + scaled_amount_stor = self.scaler(storage.amount, self.max_storage) + if scaled_amount_stor == 0: + scaled_amount_stor = 0.0001 + # change_x and y, so the storages are centered to the middle + # of their position and not to upper left + x_p = ( + storage.x_p + + (1 - storage.amount / self.max_storage) * 1.3 * self.grid_size) + y_p = ( + storage.y_p - + (1 - storage.amount / self.max_storage) * 1.3 * self.grid_size) + rectangle = plt.Rectangle((x_p, y_p), + scaled_amount_stor, + -scaled_amount_stor, + alpha=0.4) + # label all storages + plt.text( + storage.x_p + 0.6 * self.grid_size, + storage.y_p - 0.65 * self.grid_size, + storage.name, + fontsize=0.7 * self.grid_size) + dict_s = { + 'AZ': listv[1], + 'ASE': listv[3], + 'ATE': listv[5], + 'KTE': listv[9], + 'KSE': listv[11], + 'KZ': listv[13] + } + for key in dict_s: + value = dict_s[key] + if storage.name == key: + plt.text( + storage.x_p + 0.6 * self.grid_size, + storage.y_p - 0.85 * self.grid_size, + value, + fontsize=0.7 * self.grid_size) + # draw a date + plt.gca().add_patch(rectangle) + plt.savefig(filen) + plt.close(fig) + + def dnarr_txt(self, value, flux, pltt): + """Write text on arrow pointing down.""" + x_start = flux.x_start + y_start = flux.y_start + pltt.text( + x_start - 0.2 * self.grid_size, + y_start - 0.45 * self.grid_size, + value, + size=self.grid_size * 0.7, + rotation=-90) + + def leftarr_txt(self, value, flux, pltt): + """Write text on arrow pointing left.""" + x_start = flux.x_start + y_start = flux.y_start + pltt.text( + x_start - 1.35 * self.grid_size, + y_start + 0.05 * self.grid_size, + value, + size=self.grid_size * 0.7) + + def ldnarr_txt(self, value, flux, pltt): + """Write text on arrow pointing down-left.""" + x_start = flux.x_start + y_start = flux.y_start + pltt.text( + x_start - 0.35 * self.grid_size, + y_start - 0.25 * self.grid_size, + value, + size=self.grid_size * 0.5, + rotation=-110) + + def luparr_txt(self, value, flux, pltt): + """Write text on arrow pointing up-left.""" + x_start = flux.x_start + y_start = flux.y_start + pltt.text( + x_start - 0.35 * self.grid_size, + y_start + 0.45 * self.grid_size, + value, + size=self.grid_size * 0.5, + rotation=110) + + def rdnarr_txt(self, value, flux, pltt): + """Write text on arrow pointing down-right.""" + x_start = flux.x_start + y_start = flux.y_start + pltt.text( + x_start + 0.05 * self.grid_size, + y_start - 0.25 * self.grid_size, + value, + size=self.grid_size * 0.5, + rotation=-75) + + def ruparr_txt(self, value, flux, pltt): + """Write text on arrow pointing up-right.""" + x_start = flux.x_start + y_start = flux.y_start + pltt.text( + x_start - 0.1 * self.grid_size, + y_start + 0.45 * self.grid_size, + value, + size=self.grid_size * 0.5, + rotation=75) + + def uparr_txt(self, value, flux, pltt): + """Write text on arrow pointing up.""" + x_start = flux.x_start + y_start = flux.y_start + pltt.text( + x_start + 0.05 * self.grid_size, + y_start + 0.75 * self.grid_size, + value, + size=self.grid_size * 0.7, + rotation=90) + + def scaler(self, value_in, base_max): + """Scale the values in the blocks of the diagram. + + Scale the fluxes and storages, so they don't overstep their + grafical bounds must be called with: + - valueIn: the value that needs rescaling + - baseMax: the upper limit of the original dataset + ~ 100 for fluxes, ~250 for stores (in my model). + """ + # baseMin: the lower limit of the original dataset (usually zero) + base_min = 0 + # limitMin: the lower limit of the rescaled dataset (usually zero) + limit_min = 0 + # limitMax: the upper limit of the rescaled dataset (in our case grid) + limit_max = self.grid_size + # prevents wrong use of scaler + if value_in > base_max: + raise ValueError("Input value larger than base max") + return (((limit_max - limit_min) * (value_in - base_min) / + (base_max - base_min)) + limit_min) + + +class Flux: + """Contain a flux of a fluxogram.""" + + def __init__(self, name, grid_size, from_storage, to_storage, amount=0): + """Initialize a flux. + + Arguments are: + - name: name of the flux + - grid_size: grid size of the diagram + - from_storage: storage the flux is originating from + - to_storage: storage the flux is going into + - amount: how much stuff fluxes. + """ + self.name = name + self.from_storage = from_storage + self.to_storage = to_storage + self.amount = amount + self.grid_size = grid_size + (self.x_start, self.y_start, self.x_end, self.y_end, self.d_x, + self.d_y, self.dire) = (self.calc_start_end_dx_dy()) + + def update_flux(self, amount): + """Update the amount of the flux.""" + self.amount = amount + + def calc_start_end_dx_dy(self): + """Scale the arrows. + + Calculate the starting and ending point of an arrow depending on the + order and offset of the starting and ending storages. This helps + determine the direction of the arrow + returns the start and end xy coordinates of the arrow as tuples. + """ + # arrow pointing to left up + if (self.from_storage.offset > self.to_storage.offset + and self.from_storage.order > self.to_storage.order): + x_start = self.from_storage.x_p + 0.85 * self.grid_size + y_start = self.from_storage.y_p - self.grid_size * 0.5 + x_end = self.to_storage.x_p + self.grid_size * 0.65 + y_end = self.to_storage.y_p - 0.7 * self.grid_size + d_x = abs(x_start - x_end) * (-1) + d_y = abs(y_start - y_end) + dire = 'lup' + # arrow pointing up + elif (self.from_storage.offset == self.to_storage.offset + and self.from_storage.order > self.to_storage.order): + x_start = self.from_storage.x_p + 0.85 * self.grid_size + y_start = self.from_storage.y_p - 0.5 * self.grid_size + x_end = self.to_storage.x_p + 0.85 * self.grid_size + y_end = self.to_storage.y_p - 0.25 * self.grid_size + d_x = abs(x_start - x_end) + d_y = abs(y_start - y_end) + dire = 'up' + # arrow pointing right up + elif (self.from_storage.offset < self.to_storage.offset + and self.from_storage.order > self.to_storage.order): + x_start = (self.from_storage.x_p + self.grid_size) + y_start = self.from_storage.y_p - 0.5 * self.grid_size + x_end = self.to_storage.x_p + 0.05 * self.grid_size + y_end = self.to_storage.y_p - 0.75 * self.grid_size + d_x = abs(x_start - x_end) + d_y = abs(y_start - y_end) + dire = 'rup' + # arrow pointing right + elif (self.from_storage.offset < self.to_storage.offset + and self.from_storage.order == self.to_storage.order): + x_start = (self.from_storage.x_p + self.grid_size) + y_start = self.from_storage.y_p - 0.8 * self.grid_size + x_end = self.to_storage.x_p + 1.25 * self.grid_size + y_end = self.to_storage.y_p - 0.8 * self.grid_size + d_x = abs(x_start - x_end) + d_y = abs(y_start - y_end) + dire = 'r' + # arrow pointing right down + elif (self.from_storage.offset < self.to_storage.offset + and self.from_storage.order < self.to_storage.order): + x_start = (self.from_storage.x_p + 0.85 * self.grid_size) + y_start = self.from_storage.y_p - 1.12 * self.grid_size + x_end = self.to_storage.x_p + 0.85 * self.grid_size + y_end = self.to_storage.y_p - 0.9 * self.grid_size + d_x = abs(x_start - x_end) + d_y = abs(y_start - y_end) * (-1) + dire = 'rdn' + # arrow pointing down + elif (self.from_storage.offset == self.to_storage.offset + and self.from_storage.order < self.to_storage.order): + x_start = self.from_storage.x_p + 0.8 * self.grid_size + y_start = (self.from_storage.y_p - 1.12 * self.grid_size) + x_end = self.to_storage.x_p + 0.8 * self.grid_size + y_end = self.to_storage.y_p - 1.4 * self.grid_size + d_x = abs(x_start - x_end) + d_y = abs(y_start - y_end) * (-1) + dire = 'dn' + # arrow pointing left down + elif (self.from_storage.offset > self.to_storage.offset + and self.from_storage.order < self.to_storage.order): + x_start = self.from_storage.x_p + 0.75 * self.grid_size + y_start = (self.from_storage.y_p - 1.1 * self.grid_size) + x_end = self.to_storage.x_p + 0.6 * self.grid_size + y_end = self.to_storage.y_p - 0.9 * self.grid_size + d_x = abs(x_start - x_end) * (-1) + d_y = abs(y_start - y_end) * (-1) + dire = 'ldn' + # arrow pointing left + elif (self.from_storage.offset > self.to_storage.offset + and self.from_storage.order == self.to_storage.order): + x_start = self.from_storage.x_p + 0.5 * self.grid_size + y_start = self.from_storage.y_p - 0.75 * self.grid_size + x_end = self.to_storage.x_p + 0.25 * self.grid_size + y_end = self.to_storage.y_p - 0.75 * self.grid_size + d_x = abs(x_start - x_end) * (-1) + d_y = abs(y_start - y_end) + dire = 'l' + # multiply by 0.9 so there is a gap between storages and arrows + d_x = d_x * 0.75 + d_y = d_y * 0.75 + return x_start, y_start, x_end, y_end, d_x, d_y, dire + + +class Storage: + """Contain a storage of a fluxogram.""" + + def __init__(self, name, grid_size, number, amount=0, order=0, offset=0): + """Initialize a storage. + + Arguments are: + - name: name of the storage + - number: consecutive number + - grid_size of the diagram + - amount: how much stuff is in it + - order: how much down it is in the hierachie (starts with 0) + - offset = how much the storage is offset to the left/right + in relationship to the center. + """ + self.name = name + self.amount = amount + self.number = number + self.order = order + self.offset = offset + self.grid_size = grid_size + self.x_p, self.y_p = self.calculate_xy() + + def update_storage(self, amount): + """Update the amount of the storage.""" + self.amount = amount + + def calculate_xy(self): + """Provide coordinates of the blocks in the diagram. + + Calculate the xy coordinates of the starting point from where + the rectangle is drawn. The additional multiplication by two is + to produce the gaps in the diagram. + """ + x_p = self.offset * self.grid_size * 2 + # multiply by -1 to draw the diagram from top to bottom + y_p = self.order * self.grid_size * 2 * -1 + return x_p, y_p diff --git a/esmvaltool/diag_scripts/thermodyn_diagtool/fourier_coefficients.py b/esmvaltool/diag_scripts/thermodyn_diagtool/fourier_coefficients.py new file mode 100644 index 0000000000..218bedc66e --- /dev/null +++ b/esmvaltool/diag_scripts/thermodyn_diagtool/fourier_coefficients.py @@ -0,0 +1,296 @@ +"""Module retrieving Fourier coefficients computation from lonlat grid. + +Computation of the Fourier coefficients from lonlat grids +on pressure levels at every timestep. + +The spectral truncation is determined by the number of longitudinal +gridsteps. The outputs are given as (time,level,wave,lat) where wave stands +for the zonal wavenumber. In the context of the thermodynamic diagnostic tool, +this is used for the computation of the Lorenz Energy Cycle. + +@author: valerio.lembo@uni-hamburg.de, Valerio Lembo, Hamburg University, 2018. +""" + +import numpy as np +from netCDF4 import Dataset + +GP_RES = np.array([16, 32, 48, 64, 96, 128, 256, 384, 512, 1024, 2048, 4096]) +FC_RES = np.array([5, 10, 15, 21, 31, 43, 85, 127, 171, 341, 683, 1365]) +G_0 = 9.81 # Gravity acceleration +GAM = 0.0065 # Standard atmosphere lapse rate +GAS_CON = 287.0 # Gas constant +P_0 = 10000 # Reference tropospheric pressure + + +def fourier_coeff(tadiagfile, outfile, ta_input, tas_input): + """Compute Fourier coefficients in lon direction. + + Receive as input: + - tadiagfile: the name of a file to store modified t fields; + - outfile: the name of a file to store the Fourier coefficients; + - ta_input: the name of a file containing t,u,v,w fields; + - tas_input: the name of a file containing t2m field. + """ + with Dataset(ta_input) as dataset: + lon = dataset.variables['lon'][:] + lat = dataset.variables['lat'][:] + lev = dataset.variables['plev'][:] + time = dataset.variables['time'][:] + t_a = dataset.variables['ta'][:, :, :, :] + u_a = dataset.variables['ua'][:, :, :, :] + v_a = dataset.variables['va'][:, :, :, :] + wap = dataset.variables['wap'][:, :, :, :] + nlon = len(lon) + nlat = len(lat) + nlev = len(lev) + ntime = len(time) + i = np.min(np.where(2 * nlat <= GP_RES)) + trunc = FC_RES[i] + 1 + wave2 = np.linspace(0, trunc - 1, trunc) + with Dataset(tas_input) as dataset: + tas = dataset.variables['tas'][:, :, :] + tas = tas[:, ::-1, :] + ta1_fx = np.array(t_a) + deltat = np.zeros([ntime, nlev, nlat, nlon]) + p_s = np.full([ntime, nlat, nlon], P_0) + for i in np.arange(nlev - 1, 0, -1): + h_1 = np.ma.masked_where(ta1_fx[:, i, :, :] != 0, ta1_fx[:, i, :, :]) + if np.any(h_1.mask > 0): + deltat[:, i - 1, :, :] = np.where(ta1_fx[:, i - 1, :, :] != 0, + deltat[:, i - 1, :, :], + (ta1_fx[:, i, :, :] - tas)) + deltat[:, i - 1, :, :] = ( + (1 * np.array(h_1.mask)) * np.array(deltat[:, i - 1, :, :])) + d_p = -( + (P_0 * G_0 / (GAM * GAS_CON)) * deltat[:, i - 1, :, :] / tas) + p_s = np.where(ta1_fx[:, i - 1, :, :] != 0, p_s, lev[i - 1] + d_p) + for k in np.arange(0, nlev - i - 1, 1): + h_3 = np.ma.masked_where(ta1_fx[:, i + k, :, :] != 0, + ta1_fx[:, i + k, :, :]) + if np.any(h_3.mask > 0): + deltat[:, i - 1, :, :] = np.where( + ta1_fx[:, i + k, :, :] != 0, deltat[:, i - 1, :, :], + (ta1_fx[:, i + k + 1, :, :] - tas)) + d_p = -((P_0 * G_0 / + (GAM * GAS_CON)) * deltat[:, i - 1, :, :] / tas) + p_s = np.where(ta1_fx[:, i + k, :, :] != 0, p_s, + lev[i + k] + d_p) + ta2_fx = np.array(t_a) + mask = np.zeros([nlev, ntime, nlat, nlon]) + dat = np.zeros([nlev, ntime, nlat, nlon]) + tafr_bar = np.zeros([nlev, ntime, nlat, nlon]) + deltap = np.zeros([ntime, nlev, nlat, nlon]) + for i in np.arange(nlev): + deltap[:, i, :, :] = p_s - lev[i] + h_2 = np.ma.masked_where(ta2_fx[:, i, :, :] == 0, ta2_fx[:, i, :, :]) + mask[i, :, :, :] = np.array(h_2.mask) + tafr_bar[i, :, :, :] = (1 * np.array(mask[i, :, :, :]) * ( + tas - GAM * GAS_CON / (G_0 * p_s) * deltap[:, i, :, :] * tas)) + dat[i, :, :, :] = ( + ta2_fx[:, i, :, :] * (1 - 1 * np.array(mask[i, :, :, :]))) + t_a[:, i, :, :] = dat[i, :, :, :] + tafr_bar[i, :, :, :] + pr_output_diag(t_a, ta_input, tadiagfile, 'ta') + tafft_p = np.fft.fft(t_a, axis=3)[:, :, :, :int(trunc / 2)] / (nlon) + uafft_p = np.fft.fft(u_a, axis=3)[:, :, :, :int(trunc / 2)] / (nlon) + vafft_p = np.fft.fft(v_a, axis=3)[:, :, :, :int(trunc / 2)] / (nlon) + wapfft_p = np.fft.fft(wap, axis=3)[:, :, :, :int(trunc / 2)] / (nlon) + tafft = np.zeros([ntime, nlev, nlat, trunc]) + uafft = np.zeros([ntime, nlev, nlat, trunc]) + vafft = np.zeros([ntime, nlev, nlat, trunc]) + wapfft = np.zeros([ntime, nlev, nlat, trunc]) + tafft[:, :, :, 0::2] = np.real(tafft_p) + tafft[:, :, :, 1::2] = np.imag(tafft_p) + uafft[:, :, :, 0::2] = np.real(uafft_p) + uafft[:, :, :, 1::2] = np.imag(uafft_p) + vafft[:, :, :, 0::2] = np.real(vafft_p) + vafft[:, :, :, 1::2] = np.imag(vafft_p) + wapfft[:, :, :, 0::2] = np.real(wapfft_p) + wapfft[:, :, :, 1::2] = np.imag(wapfft_p) + dict_v = {'ta': tafft, 'ua': uafft, 'va': vafft, 'wap': wapfft} + file_desc = 'Fourier coefficients' + pr_output(dict_v, ta_input, outfile, file_desc, wave2) + + +def pr_output(dict_v, nc_f, fileo, file_desc, wave2): + """Print outputs to NetCDF. + + Save fields to NetCDF, retrieving information from an existing + NetCDF file. Metadata are transferred from the existing file to the + new one. + Arguments: + - var1, var2, var3, var4: the fields to be stored, with shape + (time,level,wave,lon); + - nc_f: the existing dataset, from where the metadata are + retrieved. Coordinates time,level and lon have to be the same + dimension as the fields to be saved to the new files; + - fileo: the name of the output file; + - wave2: an array containing the zonal wavenumbers; + - name1, name2, name3, name4: the name of the variables to be + saved; + + PROGRAMMER(S) + Chris Slocum (2014), modified by Valerio Lembo (2018). + """ + # Writing NetCDF files + with Dataset(fileo, 'w', format='NETCDF4') as var_nc_fid: + var_nc_fid.description = file_desc + with Dataset(nc_f, 'r') as nc_fid: + extr_time(nc_fid, var_nc_fid) + extr_lat(nc_fid, var_nc_fid, 'lat') + extr_plev(nc_fid, var_nc_fid) + # Write the wave dimension + var_nc_fid.createDimension('wave', len(wave2)) + var_nc_fid.createVariable('wave', nc_fid.variables['plev'].dtype, + ('wave', )) + var_nc_fid.variables['wave'][:] = wave2 + for key in dict_v: + value = dict_v[key] + var1_nc_var = var_nc_fid.createVariable( + key, 'f8', ('time', 'plev', 'lat', 'wave')) + varatts(var1_nc_var, key) + var_nc_fid.variables[key][:, :, :, :] = value + + +def pr_output_diag(var1, nc_f, fileo, name1): + """Print processed ta field to NetCDF file. + + Save fields to NetCDF, retrieving information from an existing + NetCDF file. Metadata are transferred from the existing file to the + new one. + Arguments: + - var1: the field to be stored, with shape (time,level,lat,lon); + - nc_f: the existing dataset, from where the metadata are + retrieved. Coordinates time,level, lat and lon have to be the + same dimension as the fields to be saved to the new files; + - fileo: the name of the output file; + - name1: the name of the variable to be saved; + + PROGRAMMER(S) + Chris Slocum (2014), modified by Valerio Lembo (2018). + """ + with Dataset(fileo, 'w', format='NETCDF4') as var_nc_fid: + var_nc_fid.description = "Fourier coefficients" + with Dataset(nc_f, 'r') as nc_fid: + # Extract data from NetCDF file nad write them to the new file + extr_time(nc_fid, var_nc_fid) + extr_lat(nc_fid, var_nc_fid, 'lat') + extr_lon(nc_fid, var_nc_fid) + extr_plev(nc_fid, var_nc_fid) + var1_nc_var = var_nc_fid.createVariable(name1, 'f8', + ('time', 'plev', 'lat', 'lon')) + varatts(var1_nc_var, name1) + var_nc_fid.variables[name1][:, :, :, :] = var1 + var_nc_fid.close() # close the new file + + +def extr_lat(nc_fid, var_nc_fid, latn): + """Extract lat coord. from NC files and save them to a new NC file. + + Arguments: + - nc_f: the existing dataset, from where the metadata are + retrieved. Time,level and lon dimensions + are retrieved; + - var_nc_fid: the id of the new NC dataset previously created; + - latn: the name of the latitude dimension; + """ + # Extract coordinates from NetCDF file + lats = nc_fid.variables['lat'][:] + var_nc_fid.createDimension(latn, len(lats)) + var_nc_dim = var_nc_fid.createVariable(latn, nc_fid.variables['lat'].dtype, + (latn, )) + for ncattr in nc_fid.variables['lat'].ncattrs(): + var_nc_dim.setncattr(ncattr, nc_fid.variables['lat'].getncattr(ncattr)) + var_nc_fid.variables[latn][:] = lats + + +def extr_lon(nc_fid, var_nc_fid): + """Extract lat coord. from NC files and save them to a new NC file. + + Arguments: + - nc_f: the existing dataset, from where the metadata are + retrieved. Time,level and lon dimensions + are retrieved; + - var_nc_fid: the id of the new NC dataset previously created; + """ + # Extract coordinates from NetCDF file + lons = nc_fid.variables['lon'][:] + var_nc_fid.createDimension('lon', len(lons)) + var_nc_dim = var_nc_fid.createVariable( + 'lon', nc_fid.variables['lon'].dtype, ('lon', )) + for ncattr in nc_fid.variables['lon'].ncattrs(): + var_nc_dim.setncattr(ncattr, nc_fid.variables['lon'].getncattr(ncattr)) + var_nc_fid.variables['lon'][:] = lons + + +def extr_plev(nc_fid, var_nc_fid): + """Extract plev coord. from NC files and save them to a new NC file. + + Arguments: + - nc_f: the existing dataset, from where the metadata are + retrieved. Time,level and lon dimensions + are retrieved; + - var_nc_fid: the id of the new NC dataset previously created; + """ + plev = nc_fid.variables['plev'][:] + var_nc_fid.createDimension('plev', len(plev)) + var_nc_dim = var_nc_fid.createVariable( + 'plev', nc_fid.variables['plev'].dtype, ('plev', )) + for ncattr in nc_fid.variables['plev'].ncattrs(): + var_nc_dim.setncattr(ncattr, + nc_fid.variables['plev'].getncattr(ncattr)) + var_nc_fid.variables['plev'][:] = plev + + +def extr_time(nc_fid, var_nc_fid): + """Extract time coord. from NC files and save them to a new NC file. + + Arguments: + - nc_f: the existing dataset, from where the metadata are + retrieved. Time,level and lon dimensions + are retrieved; + - var_nc_fid: the id of the new NC dataset previously created; + """ + # Extract coordinates from NetCDF file + time = nc_fid.variables['time'][:] + # Using our previous dimension info, we can create the new dimensions. + var_nc_fid.createDimension('time', len(time)) + var_nc_dim = var_nc_fid.createVariable( + 'time', nc_fid.variables['time'].dtype, ('time', )) + for ncattr in nc_fid.variables['time'].ncattrs(): + var_nc_dim.setncattr(ncattr, + nc_fid.variables['time'].getncattr(ncattr)) + var_nc_fid.variables['time'][:] = time + + +def varatts(w_nc_var, varname): + """Add attibutes to the variables, depending on their name. + + Arguments: + - w_nc_var: a variable object; + - varname: the name of the variable, among ta, ua, va and wap. + """ + if varname == 'ta': + w_nc_var.setncatts({ + 'long_name': "Air temperature", + 'units': "K", + 'level_desc': 'pressure levels' + }) + elif varname == 'ua': + w_nc_var.setncatts({ + 'long_name': "Eastward wind", + 'units': "m s-1", + 'level_desc': 'pressure levels' + }) + elif varname == 'va': + w_nc_var.setncatts({ + 'long_name': "Northward wind", + 'units': "m s-1", + 'level_desc': 'pressure levels' + }) + elif varname == 'wap': + w_nc_var.setncatts({ + 'long_name': 'Lagrangian tendency of ' + 'air pressure', + 'units': "Pa s-1", + 'level_desc': 'pressure levels' + }) diff --git a/esmvaltool/diag_scripts/thermodyn_diagtool/lorenz_cycle.py b/esmvaltool/diag_scripts/thermodyn_diagtool/lorenz_cycle.py new file mode 100644 index 0000000000..4bb0fdf9d3 --- /dev/null +++ b/esmvaltool/diag_scripts/thermodyn_diagtool/lorenz_cycle.py @@ -0,0 +1,1112 @@ +"""PROGRAM FOR LEC COMPUTATION. + +The module contains the following functions: + - lorenz: it is the main program, calling functions that compute the + reservoirs and conversion terms, storing them separately in + NetCDF files and providing a flux diagram and a table outputs, + the latter separately for the two hemispheres; + - averages: a script computing time, global and zonal averages; + - averages_comp: a script computing global mean of the output fields; + - bsslzr: it contains the coefficients for the conversion from regular + lonlat grid to Gaussian grid; + - diagram: it is the interface between the main program and a + class "Fluxogram", producing the flux diagram; + - gauaw: it uses the coefficients provided in bsslzr for the lonlat to + Gaussian grid conversion; + - globall_cg: it computes the global and hemispheric means at each + timestep; + - init: initializes the table and ingests input fields; + - makek: computes the KE reservoirs; + - makea: computes the APE reservoirs; + - mka2k: computes the APE->KE conversion terms; + - mkaeaz: computes the zonal APE - eddy APE conversion terms; + - mkkekz: computes the zonal KE - eddy KE conversion terms; + - mkatas: computes the stationay eddy - transient eddy APE conversions; + - mkktks: computes the stationay eddy - transient eddy KE conversions; + - output: compute vertical integrals and print NC output; + - preprocess_lec: a script handling the input files, separating the real + from imaginary part of the Fourier coefficients, + reordering the latitudinal dimension (from N to S), + interpolating on a reference sigma coordinate, + - pr_output: prints a single component of the LEC computations to a + single Nc file; + - removeif: removes a file if it exists; + - stabil: calculates the stability parameter; + - table: prints the global and hemispheric mean values of + the reservoirs; + - table_conv: prints the global and hemispheric mean values of the + conversion terms; + - varatts: prints the attributes of a variable in a Nc file; + - weights: computes the weights for vertical integrations and meridional + averages; + - write_to_tab: a script for writing global and hemispheric means to table; + +References. + Ulbrich P. and P. Speth (1991) The global energy cycle of stationary + and transient atmospheric waves: Results from ECMWF analyses, Met. + +@author: valerio.lembo@uni-hamburg.de, Valerio Lembo, Hamburg University, 2018. +""" + +import math +import os +import sys +import numpy as np +from cdo import Cdo +from netCDF4 import Dataset +from esmvaltool.diag_scripts.thermodyn_diagtool import fluxogram, \ + fourier_coefficients + +G = 9.81 +R = 287.00 +CP = 1003.5 +AA = 6.371E6 +PS = 101100.0 +NW_1 = 3 +NW_2 = 9 +NW_3 = 21 + + +def lorenz(outpath, model, year, filenc, plotfile, logfile): + """Manage input and output fields and calling functions. + + Receive fields t,u,v,w as input fields in Fourier + coefficients (time,level,wave,lon) and compute the LEC. + + Arguments: + - outpath: ath where otput fields are stored (as NetCDF fields); + - model: name of the model that is analysed; + - year: year that is considered; + - filenc: name of the file containing the input fields; + - plotfile: name of the file that will contain the flux diagram; + - logfile: name of the file containing the table as a .txt file. + """ + ta_c, ua_c, va_c, wap_c, dims, lev, lat, log = init(logfile, filenc) + nlev = int(dims[0]) + ntime = int(dims[1]) + nlat = int(dims[2]) + ntp = int(dims[3]) + d_s, y_l, g_w = weights(lev, nlev, lat) + # Compute time mean + ta_tmn = np.nanmean(ta_c, axis=1) + ta_ztmn, ta_gmn = averages(ta_tmn, g_w) + ua_tmn = np.nanmean(ua_c, axis=1) + va_tmn = np.nanmean(va_c, axis=1) + wap_tmn = np.nanmean(wap_c, axis=1) + _, wap_gmn = averages(wap_tmn, g_w) + # Compute stability parameter + gam_ztmn = np.zeros([nlev, nlat]) + for l_l in range(nlat): + gam_ztmn[:, l_l] = stabil(ta_ztmn[:, l_l], lev, nlev) + gam_tmn = stabil(ta_gmn, lev, nlev) + e_k = np.zeros([nlev, ntime, nlat, ntp - 1]) + ape = np.zeros([nlev, ntime, nlat, ntp - 1]) + a2k = np.zeros([nlev, ntime, nlat, ntp - 1]) + ae2az = np.zeros([nlev, ntime, nlat, ntp - 1]) + ke2kz = np.zeros([nlev, ntime, nlat, ntp - 1]) + at2as = np.zeros([nlev, ntime, nlat, ntp - 1]) + kt2ks = np.zeros([nlev, ntime, nlat, ntp - 1]) + for t_t in range(ntime): + ta_tan = ta_c[:, t_t, :, :] - ta_tmn + ua_tan = ua_c[:, t_t, :, :] - ua_tmn + va_tan = va_c[:, t_t, :, :] - va_tmn + wap_tan = wap_c[:, t_t, :, :] - wap_tmn + # Compute zonal means + _, ta_tgan = averages(ta_tan, g_w) + _, wap_tgan = averages(wap_tan, g_w) + # Compute kinetic energy + e_k[:, t_t, :, :] = makek(ua_tan, va_tan) + # Compute available potential energy + ape[:, t_t, :, :] = makea(ta_tan, ta_tgan, gam_tmn) + # Compute conversion between kin.en. and pot.en. + a2k[:, t_t, :, :] = mka2k(wap_tan, ta_tan, wap_tgan, ta_tgan, lev) + # Compute conversion between zonal and eddy APE + ae2az[:, t_t, :, :] = mkaeaz(va_tan, wap_tan, ta_tan, ta_tmn, ta_gmn, + lev, y_l, gam_tmn, nlat, nlev) + # Compute conversion between zonal and eddy KE + ke2kz[:, t_t, :, :] = mkkekz(ua_tan, va_tan, wap_tan, ua_tmn, va_tmn, + lev, y_l, nlat, ntp, nlev) + # Compute conversion between stationary and transient eddy APE + at2as[:, t_t, :, :] = mkatas(ua_tan, va_tan, wap_tan, ta_tan, ta_ztmn, + gam_ztmn, lev, y_l, nlat, ntp, nlev) + # Compute conversion between stationary and transient eddy KE + kt2ks[:, t_t, :, :] = mkktks(ua_tan, va_tan, ua_tmn, va_tmn, y_l, nlat, + ntp, nlev) + ek_tgmn = averages_comp(e_k, g_w, d_s, dims) + table(ek_tgmn, ntp, 'TOT. KIN. EN. ', logfile, flag=0) + ape_tgmn = averages_comp(ape, g_w, d_s, dims) + table(ape_tgmn, ntp, 'TOT. POT. EN. ', logfile, flag=0) + a2k_tgmn = averages_comp(a2k, g_w, d_s, dims) + table(a2k_tgmn, ntp, 'KE -> APE (trans) ', logfile, flag=1) + ae2az_tgmn = averages_comp(ae2az, g_w, d_s, dims) + table(ae2az_tgmn, ntp, 'AZ <-> AE (trans) ', logfile, flag=1) + ke2kz_tgmn = averages_comp(ke2kz, g_w, d_s, dims) + table(ke2kz_tgmn, ntp, 'KZ <-> KE (trans) ', logfile, flag=1) + at2as_tgmn = averages_comp(at2as, g_w, d_s, dims) + table(at2as_tgmn, ntp, 'ASE <-> ATE ', logfile, flag=1) + kt2ks_tgmn = averages_comp(kt2ks, g_w, d_s, dims) + table(kt2ks_tgmn, ntp, 'KSE <-> KTE ', logfile, flag=1) + ek_st = makek(ua_tmn, va_tmn) + ek_stgmn = globall_cg(ek_st, g_w, d_s, dims) + table(ek_stgmn, ntp, 'STAT. KIN. EN. ', logfile, flag=0) + ape_st = makea(ta_tmn, ta_gmn, gam_tmn) + ape_stgmn = globall_cg(ape_st, g_w, d_s, dims) + table(ape_stgmn, ntp, 'STAT. POT. EN. ', logfile, flag=0) + a2k_st = mka2k(wap_tmn, ta_tmn, wap_gmn, ta_gmn, lev) + a2k_stgmn = globall_cg(a2k_st, g_w, d_s, dims) + table(a2k_stgmn, ntp, 'KE -> APE (stat)', logfile, flag=1) + ae2az_st = mkaeaz(va_tmn, wap_tmn, ta_tmn, ta_tmn, ta_gmn, lev, y_l, + gam_tmn, nlat, nlev) + ae2az_stgmn = globall_cg(ae2az_st, g_w, d_s, dims) + table(ae2az_stgmn, ntp, 'AZ <-> AE (stat)', logfile, flag=1) + ke2kz_st = mkkekz(ua_tmn, va_tmn, wap_tmn, ua_tmn, va_tmn, lev, y_l, nlat, + ntp, nlev) + ke2kz_stgmn = globall_cg(ke2kz_st, g_w, d_s, dims) + table(ke2kz_stgmn, ntp, 'KZ <-> KE (stat)', logfile, flag=1) + list_diag = [ + ape_tgmn, ape_stgmn, ek_tgmn, ek_stgmn, ae2az_tgmn, ae2az_stgmn, + a2k_tgmn, a2k_stgmn, at2as_tgmn, kt2ks_tgmn, ke2kz_tgmn, ke2kz_stgmn + ] + lec_strength = diagram(plotfile, list_diag, dims) + nc_f = outpath + '/ek_tmap_{}_{}.nc'.format(model, year) + output(e_k, d_s, filenc, 'ek', nc_f) + nc_f = outpath + '/ape_tmap_{}_{}.nc'.format(model, year) + output(ape, d_s, filenc, 'ape', nc_f) + nc_f = outpath + '/a2k_tmap_{}_{}.nc'.format(model, year) + output(a2k, d_s, filenc, 'a2k', nc_f) + nc_f = outpath + '/ae2az_tmap_{}_{}.nc'.format(model, year) + output(ae2az, d_s, filenc, 'ae2az', nc_f) + nc_f = outpath + '/ke2kz_tmap_{}_{}.nc'.format(model, year) + output(ke2kz, d_s, filenc, 'ke2kz', nc_f) + log.close() + return lec_strength + + +def averages(x_c, g_w): + """Compute time, zonal and global mean averages of initial fields. + + Arguments: + - x_c: the input field as (lev, lat, wave); + - g_w: the Gaussian weights for meridional averaging; + """ + xc_ztmn = np.squeeze(np.real(x_c[:, :, 0])) + xc_gmn = np.nansum(xc_ztmn * g_w[np.newaxis, :], axis=1) / np.nansum(g_w) + return xc_ztmn, xc_gmn + + +def averages_comp(fld, g_w, d_s, dims): + """Compute the global mean averages of reservoirs and conversion terms. + + Arguments: + - fld: the component of the LEC (time, lev, lat, wave); + - g_w: the Gaussian weights for meridional averaging; + - d_s: the Delta sigma of the sigma levels; + - dims: a list containing the dimensions length0; + """ + fld_tmn = np.nanmean(fld, axis=1) + fld_tgmn = globall_cg(fld_tmn, g_w, d_s, dims) + return fld_tgmn + + +def bsslzr(kdim): + """Obtain parameters for the Gaussian coefficients. + + @author: Valerio Lembo + """ + ndim = 50 + p_i = math.pi + zbes = [ + 2.4048255577, 5.5200781103, 8.6537279129, 11.7915344391, 14.9309177086, + 18.0710639679, 21.2116366299, 24.3524715308, 27.4934791320, + 30.6346064684, 33.7758202136, 36.9170983537, 40.0584257646, + 43.1997917132, 46.3411883717, 49.4826098974, 52.6240518411, + 55.7655107550, 58.9069839261, 62.0484691902, 65.1899648002, + 68.3314693299, 71.4729816036, 74.6145006437, 77.7560256304, + 80.8975558711, 84.0390907769, 87.1806298436, 90.3221726372, + 93.4637187819, 96.6052679510, 99.7468198587, 102.8883742542, + 106.0299309165, 109.1714896498, 112.3130502805, 115.4546126537, + 118.5961766309, 121.7377420880, 124.8793089132, 128.0208770059, + 131.1624462752, 134.3040166383, 137.4455880203, 140.5871603528, + 143.7287335737, 146.8703076258, 150.0118824570, 153.1534580192, + 156.2950342685 + ] + pbes = np.zeros(kdim) + idim = min([kdim, ndim]) + pbes[0:idim] = zbes[0:idim] + for j in range(idim, kdim - 1, 1): + pbes[j] = pbes[j - 1] + p_i + return pbes + + +def diagram(filen, listf, dims): + """Diagram interface script. + + Call the class fluxogram, serving as + interface between the main script and the class for flux + diagrams design. + + Arguments: + - filen: the filename of the diagram flux; + - listf: a list containing the fluxes and storages; + - dims: the dimensions of the variables; + """ + ntp = int(dims[3]) + apet = listf[0] + apes = listf[1] + ekt = listf[2] + eks = listf[3] + ae2azt = listf[4] + ae2azs = listf[5] + a2kt = listf[6] + a2ks = listf[7] + at2as = listf[8] + kt2ks = listf[9] + ke2kzt = listf[10] + ke2kzs = listf[11] + apz = '{:.2f}'.format(apet[0, 0] + apes[0, 0]) + az2kz = '{:.2f}'.format(-1e5 * (a2kt[0, 0])) + az2at = '{:.2f}'.format(-1e5 * np.nansum(ae2azt[0, 1:ntp - 1])) + aps = '{:.2f}'.format(np.nansum(apes[0, 1:ntp - 1])) + as2ks = '{:.2f}'.format(1e5 * np.nansum(a2ks[0, 1:ntp - 1])) + apt = '{:.2f}'.format(np.nansum(apet[0, 1:ntp - 1])) + at2kt = '{:.2f}'.format(1e5 * np.nansum(a2kt[0, 1:ntp - 1])) + az2as = '{:.2f}'.format(-1e5 * np.nansum(ae2azs[0, 1:ntp - 1])) + as2at = '{:.2f}'.format(1e5 * np.nansum(at2as[0, 1:ntp - 1])) + azin = '{:.2f}'.format((float(az2at) + float(az2as) - float(az2kz))) + asein = '{:.2f}'.format((float(as2ks) + float(as2at) - float(az2as))) + atein = '{:.2f}'.format(float(at2kt) - float(az2at) - float(as2at)) + k_z = '{:.2f}'.format(ekt[0, 0] + eks[0, 0]) + kte = '{:.2f}'.format(np.nansum(ekt[0, 1:ntp - 1])) + kse = '{:.2f}'.format(np.nansum(eks[0, 1:ntp - 1])) + kt2kz = '{:.2f}'.format(1e5 * np.nansum(ke2kzt[0, 1:ntp - 1])) + ks2kt = '{:.2f}'.format(-1e5 * np.nansum(kt2ks[0, 1:ntp - 1])) + ks2kz = '{:.2f}'.format(1e5 * np.nansum(ke2kzs[0, 1:ntp - 1])) + kteout = '{:.2f}'.format(float(at2kt) - float(ks2kt) - float(kt2kz)) + kseout = '{:.2f}'.format(float(ks2kt) + float(as2ks) - float(ks2kz)) + kzout = '{:.2f}'.format(float(kt2kz) + float(ks2kz) - float(az2kz)) + list_lorenz = [ + azin, apz, asein, aps, atein, apt, as2ks, at2kt, kteout, kte, kseout, + kse, kzout, k_z, az2kz, az2at, az2as, as2at, kt2kz, ks2kt, ks2kz + ] + flux = fluxogram.Fluxogram(1000, 1000) + flux.add_storage("AZ", 600, 0, 0) + flux.add_storage("ASE", 600, 0.75, 0.25) + flux.add_storage("ATE", 600, 1.5, 0) + flux.add_storage("KTE", 600, 1.5, 1.5) + flux.add_storage("KSE", 600, 0.75, 1.25) + flux.add_storage("KZ", 600, 0, 1.5) + flux.add_storage("AZ+", 0, 0, -1) + flux.add_storage("ASE+", 0, 0.75, -1) + flux.add_storage("ATE+", 0, 1.5, -1) + flux.add_storage("KTE-", 0, 1.5, 2.5) + flux.add_storage("KSE-", 0, 0.75, 2.5) + flux.add_storage("KZ-", 0, 0, 2.5) + flux.add_flux("A2KZ", flux.storages[5], flux.storages[0], 100) + flux.add_flux("AE2AZ", flux.storages[0], flux.storages[2], 150) + flux.add_flux("AE2AS", flux.storages[0], flux.storages[1], 60) + flux.add_flux("AE2AT", flux.storages[1], flux.storages[2], 60) + flux.add_flux("A2KS", flux.storages[1], flux.storages[4], 60) + flux.add_flux("A2KT", flux.storages[2], flux.storages[3], 100) + flux.add_flux("KE2KS", flux.storages[3], flux.storages[4], 60) + flux.add_flux("KS2KZ", flux.storages[4], flux.storages[5], 60) + flux.add_flux("KE2KZ", flux.storages[3], flux.storages[5], 150) + flux.add_flux("AZ+", flux.storages[6], flux.storages[0], 60) + flux.add_flux("ASE+", flux.storages[7], flux.storages[1], 60) + flux.add_flux("ATE+", flux.storages[8], flux.storages[2], 60) + flux.add_flux("KTE-", flux.storages[3], flux.storages[9], 60) + flux.add_flux("KSE-", flux.storages[4], flux.storages[10], 60) + flux.add_flux("KZ-", flux.storages[5], flux.storages[11], 60) + flux.draw(filen, list_lorenz) + lec = float(kteout) + float(kseout) + float(kzout) + return lec + + +def gauaw(n_y): + """Compute the Gaussian coefficients for the Gaussian grid conversion. + + Arguments: + - n_y: the latitude dimension; + """ + c_c = (1 - (2 / math.pi)**2) / 4 + eps = 0.00000000000001 + k_k = n_y / 2 + p_a = np.zeros(n_y) + p_a[0:k_k] = bsslzr(k_k) + p_w = np.zeros(n_y) + for i_l in range(k_k): + x_z = np.cos(p_a[i_l] / math.sqrt((n_y + 0.5)**2 + c_c)) + iterr = 0. + zsp = 1.0 + while (abs(zsp) > eps and iterr <= 10): + pkm1 = x_z + pkm2 = 1.0 + for n_n in range(2, n_y, 1): + p_k = ((n_n * 2 - 1.0) * x_z * pkm1 - (n_n - 1.0) * pkm2) / n_n + pkm2 = pkm1 + pkm1 = p_k + pkm1 = pkm2 + pkmrk = (n_y * (pkm1 - x_z * p_k)) / (1.0 - x_z**2) + zsp = p_k / pkmrk + x_z = x_z - zsp + iterr = iterr + 1 + if iterr > 15: + sys.exit("*** no convergence in gauaw ***") + p_a[i_l] = x_z + p_w[i_l] = (2.0 * (1.0 - x_z**2)) / ((n_y**2) * (pkm1**2)) + p_a[n_y - 1 - i_l] = -p_a[i_l] + p_w[n_y - 1 - i_l] = p_w[i_l] + psi = p_a + pgw = p_w + return psi, pgw + + +def globall_cg(d3v, g_w, d_s, dims): + """Compute the global and hemispheric averages. + + Arguments: + - d3v: the 3D dataset to be averaged; + - g_w: the gaussian weights; + - d_s: the vertical levels; + - dims: a list containing the sizes of the dimensions; + """ + nlev = int(dims[0]) + nlat = int(dims[2]) + ntp = int(dims[3]) + gmn = np.zeros([3, ntp - 1]) + aux1 = np.zeros([nlev, int(nlat / 2), ntp - 1]) + aux2 = np.zeros([nlev, int(nlat / 2), ntp - 1]) + aux1v = np.zeros([nlev, ntp - 1]) + aux2v = np.zeros([nlev, ntp - 1]) + nhem = int(nlat / 2) + fac = 1 / G * PS / 1e5 + for l_l in range(nlev): + for i_h in range(nhem): + aux1[l_l, i_h, :] = fac * np.real(d3v[l_l, i_h, :]) * g_w[i_h] + aux2[l_l, i_h, :] = (fac * np.real(d3v[l_l, i_h + nhem - 1, :]) * + g_w[i_h + nhem - 1]) + aux1v[l_l, :] = (np.nansum(aux1[l_l, :, :], axis=0) / np.nansum( + g_w[0:nhem]) * d_s[l_l]) + aux2v[l_l, :] = (np.nansum(aux2[l_l, :, :], axis=0) / np.nansum( + g_w[0:nhem]) * d_s[l_l]) + gmn[1, :] = (np.nansum(aux1v, axis=0) / np.nansum(d_s)) + gmn[2, :] = (np.nansum(aux2v, axis=0) / np.nansum(d_s)) + gmn[0, :] = 0.5 * (gmn[1, :] + gmn[2, :]) + return gmn + + +def init(logfile, filep): + """Ingest input fields as complex fields and initialise tables. + + Receive fields t,u,v,w as input fields in Fourier + coefficients (time,level,wave,lon), with real as even and imaginary parts + as odd. Convert them to complex fields for Python. + + Arguments: + - filenc: name of the file containing the input fields; + - logfile: name of the file containing the table as a .txt file. + """ + with open(logfile, 'w') as log: + log.write('########################################################\n') + log.write('# #\n') + log.write('# LORENZ ENERGY CYCLE #\n') + log.write('# #\n') + log.write('########################################################\n') + with Dataset(filep) as dataset0: + t_a = dataset0.variables['ta'][:, :, :, :] + u_a = dataset0.variables['ua'][:, :, :, :] + v_a = dataset0.variables['va'][:, :, :, :] + wap = dataset0.variables['wap'][:, :, :, :] + lev = dataset0.variables['plev'][:] + time = dataset0.variables['time'][:] + lat = dataset0.variables['lat'][:] + nfc = np.shape(t_a)[3] + nlev = len(lev) + ntime = len(time) + nlat = len(lat) + ntp = nfc / 2 + 1 + dims = [nlev, ntime, nlat, ntp] + if max(lev) < 1000: + lev = lev * 100 + wap = wap * 100 + t_a = np.transpose(t_a, (1, 0, 2, 3)) + ta_r = t_a[:, :, :, 0::2] + ta_i = t_a[:, :, :, 1::2] + u_a = np.transpose(u_a, (1, 0, 2, 3)) + ua_r = u_a[:, :, :, 0::2] + ua_i = u_a[:, :, :, 1::2] + v_a = np.transpose(v_a, (1, 0, 2, 3)) + va_r = v_a[:, :, :, 0::2] + va_i = v_a[:, :, :, 1::2] + wap = np.transpose(wap, (1, 0, 2, 3)) + wap_r = wap[:, :, :, 0::2] + wap_i = wap[:, :, :, 1::2] + ta_c = ta_r + 1j * ta_i + ua_c = ua_r + 1j * ua_i + va_c = va_r + 1j * va_i + wap_c = wap_r + 1j * wap_i + with open(logfile, 'w') as log: + log.write(' \n') + log.write(' \n') + log.write('INPUT DATA:\n') + log.write('-----------\n') + log.write(' \n') + log.write('SPECTRAL RESOLUTION : {}\n'.format(nfc)) + log.write('NUMBER OF LATITUDES : {}\n'.format(nlat)) + log.write('NUMBER OF LEVEL : {}'.format(nlev)) + log.write('LEVEL : {} Pa\n'.format(lev)) + log.write(' \n') + log.write('WAVES:\n') + log.write(' \n') + log.write('(1) : 1 - {}\n'.format(NW_1)) + log.write('(2) : {} - {}\n'.format(NW_1, NW_2)) + log.write('(3) : {} - {}\n'.format(NW_2, NW_3)) + log.write(' \n') + log.write('GLOBAL DIAGNOSTIC: \n') + log.write(' \n') + log.write(' I GLOBAL I NORTH I SOUTH I\n') + log.write('------------------------------------------------------\n') + return ta_c, ua_c, va_c, wap_c, dims, lev, lat, log + + +def makek(u_t, v_t): + """Compute the kinetic energy reservoirs from u and v. + + Arguments: + - u_t: a 3D zonal velocity field; + - v_t: a 3D meridional velocity field; + """ + ck1 = u_t * np.conj(u_t) + ck2 = v_t * np.conj(v_t) + e_k = np.real(ck1 + ck2) + e_k[:, :, 0] = 0.5 * np.real(u_t[:, :, 0] * u_t[:, :, 0] + + v_t[:, :, 0] * v_t[:, :, 0]) + return e_k + + +def makea(t_t, t_g, gam): + """Compute the kinetic energy reservoirs from t. + + Arguments: + - t_t_ a 3D temperature field; + - t_g: a temperature vertical profile; + - gam: a vertical profile of the stability parameter; + """ + ape = gam[:, np.newaxis, np.newaxis] * np.real(t_t * np.conj(t_t)) + ape[:, :, 0] = (gam[:, np.newaxis] * 0.5 * np.real( + (t_t[:, :, 0] - t_g[:, np.newaxis]) * + (t_t[:, :, 0] - t_g[:, np.newaxis]))) + return ape + + +def mka2k(wap, t_t, w_g, t_g, p_l): + """Compute the KE to APE energy conversions from t and w. + + Arguments: + - wap: a 3D vertical velocity field; + - t_t: a 3D temperature field; + - w_g: a vertical velocity vertical profile; + - t_g: a temperature vertical profile; + - p_l: the pressure levels; + """ + a2k = -(R / p_l[:, np.newaxis, np.newaxis] * + (t_t * np.conj(wap) + np.conj(t_t) * wap)) + a2k[:, :, 0] = -(R / p_l[:, np.newaxis] * + (t_t[:, :, 0] - t_g[:, np.newaxis]) * + (wap[:, :, 0] - w_g[:, np.newaxis])) + return a2k + + +def mkaeaz(v_t, wap, t_t, ttt, ttg, p_l, lat, gam, nlat, nlev): + """Compute the zonal mean - eddy APE conversions from t and v. + + Arguments: + - v_t: a 3D meridional velocity field; + - wap: a 3D vertical velocity field; + - t_t: a 3D temperature field; + - ttt: a climatological mean 3D temperature field; + - p_l: the pressure levels; + - lat: the latudinal dimension; + - gam: a vertical profile of the stability parameter; + - nlat: the number of latitudes; + - nlev: the number of levels; + """ + dtdp = np.zeros([nlev, nlat]) + dtdy = np.zeros([nlev, nlat]) + for l_l in np.arange(nlev): + if l_l == 0: + t_1 = np.real(ttt[l_l, :, 0]) - ttg[l_l] + t_2 = np.real(ttt[l_l + 1, :, 0]) - ttg[l_l + 1] + dtdp[l_l, :] = (t_2 - t_1) / (p_l[l_l + 1] - p_l[l_l]) + elif l_l == nlev - 1: + t_1 = np.real(ttt[l_l - 1, :, 0]) - ttg[l_l - 1] + t_2 = np.real(ttt[l_l, :, 0]) - ttg[l_l] + dtdp[l_l, :] = (t_2 - t_1) / (p_l[l_l] - p_l[l_l - 1]) + else: + t_1 = np.real(ttt[l_l, :, 0]) - ttg[l_l] + t_2 = np.real(ttt[l_l + 1, :, 0]) - ttg[l_l + 1] + dtdp1 = (t_2 - t_1) / (p_l[l_l + 1] - p_l[l_l]) + t_2 = t_1 + t_1 = np.real(ttt[l_l - 1, :, 0]) - ttg[l_l - 1] + dtdp2 = (t_2 - t_1) / (p_l[l_l] - p_l[l_l - 1]) + dtdp[l_l, :] = ( + (dtdp1 * (p_l[l_l] - p_l[l_l - 1]) + dtdp2 * + (p_l[l_l + 1] - p_l[l_l])) / (p_l[l_l + 1] - p_l[l_l - 1])) + dtdp[l_l, :] = dtdp[l_l, :] - (R / (CP * p_l[l_l]) * + (ttt[l_l, :, 0] - ttg[l_l])) + for i_l in np.arange(nlat): + if i_l == 0: + t_1 = np.real(ttt[:, i_l, 0]) + t_2 = np.real(ttt[:, i_l + 1, 0]) + dtdy[:, i_l] = (t_2 - t_1) / (lat[i_l + 1] - lat[i_l]) + elif i_l == nlat - 1: + t_1 = np.real(ttt[:, i_l - 1, 0]) + t_2 = np.real(ttt[:, i_l, 0]) + dtdy[:, i_l] = (t_2 - t_1) / (lat[i_l] - lat[i_l - 1]) + else: + t_1 = np.real(ttt[:, i_l - 1, 0]) + t_2 = np.real(ttt[:, i_l + 1, 0]) + dtdy[:, i_l] = (t_2 - t_1) / (lat[i_l + 1] - lat[i_l - 1]) + dtdy = dtdy / AA + c_1 = np.real(v_t * np.conj(t_t) + t_t * np.conj(v_t)) + c_2 = np.real(wap * np.conj(t_t) + t_t * np.conj(wap)) + ae2az = (gam[:, np.newaxis, np.newaxis] * + (dtdy[:, :, np.newaxis] * c_1 + dtdp[:, :, np.newaxis] * c_2)) + ae2az[:, :, 0] = 0. + return ae2az + + +def mkkekz(u_t, v_t, wap, utt, vtt, p_l, lat, nlat, ntp, nlev): + """Compute the zonal mean - eddy KE conversions from u and v. + + Arguments: + - u_t: a 3D zonal velocity field; + - v_t: a 3D meridional velocity field; + - wap: a 3D vertical velocity field; + - utt: a climatological mean 3D zonal velocity field; + - vtt: a climatological mean 3D meridional velocity field; + - p_l: the pressure levels; + - lat: the latitude dimension; + - nlat: the number of latitudes; + - ntp: the number of wavenumbers; + - nlev: the number of vertical levels; + """ + dudp = np.zeros([nlev, nlat]) + dvdp = np.zeros([nlev, nlat]) + dudy = np.zeros([nlev, nlat]) + dvdy = np.zeros([nlev, nlat]) + for l_l in np.arange(nlev): + if l_l == 0: + dudp[l_l, :] = ((np.real(utt[l_l + 1, :, 0] - utt[l_l, :, 0])) / + (p_l[l_l + 1] - p_l[l_l])) + dvdp[l_l, :] = ((np.real(vtt[l_l + 1, :, 0] - vtt[l_l, :, 0])) / + (p_l[l_l + 1] - p_l[l_l])) + elif l_l == nlev - 1: + dudp[l_l, :] = ((np.real(utt[l_l, :, 0] - utt[l_l - 1, :, 0])) / + (p_l[l_l] - p_l[l_l - 1])) + dvdp[l_l, :] = ((np.real(vtt[l_l, :, 0] - vtt[l_l - 1, :, 0])) / + (p_l[l_l] - p_l[l_l - 1])) + else: + dudp1 = ((np.real(utt[l_l + 1, :, 0] - utt[l_l, :, 0])) / + (p_l[l_l + 1] - p_l[l_l])) + dvdp1 = ((np.real(vtt[l_l + 1, :, 0] - vtt[l_l, :, 0])) / + (p_l[l_l + 1] - p_l[l_l])) + dudp2 = ((np.real(utt[l_l, :, 0] - utt[l_l - 1, :, 0])) / + (p_l[l_l] - p_l[l_l - 1])) + dvdp2 = ((np.real(vtt[l_l, :, 0] - vtt[l_l - 1, :, 0])) / + (p_l[l_l] - p_l[l_l - 1])) + dudp[l_l, :] = ( + (dudp1 * (p_l[l_l] - p_l[l_l - 1]) + dudp2 * + (p_l[l_l + 1] - p_l[l_l])) / (p_l[l_l + 1] - p_l[l_l - 1])) + dvdp[l_l, :] = ( + (dvdp1 * (p_l[l_l] - p_l[l_l - 1]) + dvdp2 * + (p_l[l_l + 1] - p_l[l_l])) / (p_l[l_l + 1] - p_l[l_l - 1])) + for i_l in np.arange(nlat): + if i_l == 0: + dudy[:, i_l] = ((np.real(utt[:, i_l + 1, 0] - utt[:, i_l, 0])) / + (lat[i_l + 1] - lat[i_l])) + dvdy[:, i_l] = ((np.real(vtt[:, i_l + 1, 0] - vtt[:, i_l, 0])) / + (lat[i_l + 1] - lat[i_l])) + elif i_l == nlat - 1: + dudy[:, i_l] = ((np.real(utt[:, i_l, 0] - utt[:, i_l - 1, 0])) / + (lat[i_l] - lat[i_l - 1])) + dvdy[:, i_l] = ((np.real(vtt[:, i_l, 0] - vtt[:, i_l - 1, 0])) / + (lat[i_l] - lat[i_l - 1])) + else: + dudy[:, i_l] = ((np.real(utt[:, i_l + 1, 0] - utt[:, i_l - 1, 0])) + / (lat[i_l + 1] - lat[i_l - 1])) + dvdy[:, i_l] = ((np.real(vtt[:, i_l + 1, 0] - vtt[:, i_l - 1, 0])) + / (lat[i_l + 1] - lat[i_l - 1])) + dudy = dudy / AA + dvdy = dvdy / AA + c_1 = np.zeros([nlev, nlat, ntp - 1]) + c_2 = np.zeros([nlev, nlat, ntp - 1]) + c_3 = np.zeros([nlev, nlat, ntp - 1]) + c_4 = np.zeros([nlev, nlat, ntp - 1]) + c_5 = np.zeros([nlev, nlat, ntp - 1]) + c_6 = np.zeros([nlev, nlat, ntp - 1]) + u_u = u_t * np.conj(u_t) + u_t * np.conj(u_t) + u_v = u_t * np.conj(v_t) + v_t * np.conj(u_t) + v_v = v_t * np.conj(v_t) + v_t * np.conj(v_t) + u_w = u_t * np.conj(wap) + wap * np.conj(u_t) + v_w = v_t * np.conj(wap) + wap * np.conj(v_t) + for i_l in np.arange(nlat): + c_1[:, i_l, :] = dudy[:, i_l][:, np.newaxis] * u_v[:, i_l, :] + c_2[:, i_l, :] = dvdy[:, i_l][:, np.newaxis] * v_v[:, i_l, :] + c_5[:, i_l, :] = (np.tan(lat[i_l]) / AA * np.real( + utt[:, i_l, 0])[:, np.newaxis] * (u_v[:, i_l, :])) + c_6[:, i_l, :] = -(np.tan(lat[i_l]) / AA * np.real( + vtt[:, i_l, 0])[:, np.newaxis] * (u_u[:, i_l, :])) + for l_l in np.arange(nlev): + c_3[l_l, :, :] = dudp[l_l, :][:, np.newaxis] * u_w[l_l, :, :] + c_4[l_l, :, :] = dvdp[l_l, :][:, np.newaxis] * v_w[l_l, :, :] + ke2kz = (c_1 + c_2 + c_3 + c_4 + c_5 + c_6) + ke2kz[:, :, 0] = 0. + return ke2kz + + +def mkatas(u_t, v_t, wap, t_t, ttt, g_w, p_l, lat, nlat, ntp, nlev): + """Compute the stat.-trans. eddy APE conversions from u, v, wap and t. + + Arguments: + - u_t: a 3D zonal velocity field; + - v_t: a 3D meridional velocity field; + - wap: a 3D vertical velocity field; + - t_t: a 3D temperature field; + - ttt: a climatological mean 3D temperature field; + - g_w: the gaussian weights; + - p_l: the pressure levels; + - lat: the latitude dimension; + - nlat: the number of latitudes; + - ntp: the number of wavenumbers; + - nlev: the number of vertical levels; + """ + t_r = np.fft.ifft(t_t, axis=2) + u_r = np.fft.ifft(u_t, axis=2) + v_r = np.fft.ifft(v_t, axis=2) + w_r = np.fft.ifft(wap, axis=2) + tur = t_r * u_r + tvr = t_r * v_r + twr = t_r * w_r + t_u = np.fft.fft(tur, axis=2) + t_v = np.fft.fft(tvr, axis=2) + t_w = np.fft.fft(twr, axis=2) + c_1 = (t_u * np.conj(ttt[:, :, np.newaxis]) - + ttt[:, :, np.newaxis] * np.conj(t_u)) + c_6 = (t_w * np.conj(ttt[:, :, np.newaxis]) - + ttt[:, :, np.newaxis] * np.conj(t_w)) + c_2 = np.zeros([nlev, nlat, ntp - 1]) + c_3 = np.zeros([nlev, nlat, ntp - 1]) + c_5 = np.zeros([nlev, nlat, ntp - 1]) + for i_l in range(nlat): + if i_l == 0: + c_2[:, i_l, :] = ( + t_v[:, i_l, :] / (AA * (lat[i_l + 1] - lat[i_l])) * + np.conj(ttt[:, i_l + 1, np.newaxis] - ttt[:, i_l, np.newaxis])) + c_3[:, i_l, :] = ( + np.conj(t_v[:, i_l, :]) / (AA * (lat[i_l + 1] - lat[i_l])) * + (ttt[:, i_l + 1, np.newaxis] - ttt[:, i_l, np.newaxis])) + elif i_l == nlat - 1: + c_2[:, i_l, :] = ( + t_v[:, i_l, :] / (AA * (lat[i_l] - lat[i_l - 1])) * + np.conj(ttt[:, i_l, np.newaxis] - ttt[:, i_l - 1, np.newaxis])) + c_3[:, i_l, :] = ( + np.conj(t_v[:, i_l, :]) / (AA * (lat[i_l] - lat[i_l - 1])) * + (ttt[:, i_l, np.newaxis] - ttt[:, i_l - 1, np.newaxis])) + else: + c_2[:, i_l, :] = ( + t_v[:, i_l, :] / (AA * (lat[i_l + 1] - lat[i_l - 1])) * + np.conj(ttt[:, i_l + 1, np.newaxis] - + ttt[:, i_l - 1, np.newaxis])) + c_3[:, i_l, :] = ( + np.conj(t_v[:, i_l, :]) / (AA * (lat[i_l + 1] - lat[i_l - 1])) + * (ttt[:, i_l + 1, np.newaxis] - ttt[:, i_l - 1, np.newaxis])) + for l_l in range(nlev): + if l_l == 0: + c_5[l_l, :, :] = ( + (ttt[l_l + 1, :, np.newaxis] - ttt[l_l, :, np.newaxis]) / + (p_l[l_l + 1] - p_l[l_l])) + elif l_l == nlev - 1: + c_5[l_l, :, :] = ( + (ttt[l_l, :, np.newaxis] - ttt[l_l - 1, :, np.newaxis]) / + (p_l[l_l] - p_l[l_l - 1])) + else: + c51 = ((ttt[l_l + 1, :, np.newaxis] - ttt[l_l, :, np.newaxis]) / + (p_l[l_l + 1] - p_l[l_l])) + c52 = ((ttt[l_l, :, np.newaxis] - ttt[l_l - 1, :, np.newaxis]) / + (p_l[l_l] - p_l[l_l - 1])) + c_5[l_l, :, :] = ( + (c51 * (p_l[l_l] - p_l[l_l - 1]) + c52 * + (p_l[l_l + 1] - p_l[l_l])) / (p_l[l_l + 1] - p_l[l_l - 1])) + k_k = np.arange(0, ntp - 1) + at2as = ( + ((k_k - 1)[np.newaxis, np.newaxis, :] * np.imag(c_1) / + (AA * np.cos(lat[np.newaxis, :, np.newaxis])) + + np.real(t_w * np.conj(c_5) + np.conj(t_w) * c_5) + np.real(c_2 + c_3) + + R / (CP * p_l[:, np.newaxis, np.newaxis]) * np.real(c_6)) * + g_w[:, :, np.newaxis]) + at2as[:, :, 0] = 0. + return at2as + + +def mkktks(u_t, v_t, utt, vtt, lat, nlat, ntp, nlev): + """Compute the stat.-trans. eddy KE conversions from u, v and t. + + Arguments: + - u_t: a 3D zonal velocity field; + - v_t: a 3D meridional velocity field; + - utt: a climatological mean 3D zonal velocity field; + - vtt: a climatological mean 3D meridional velocity field; + - lat: the latitude dimension; + - nlat: the number of latitudes; + - ntp: the number of wavenumbers; + - nlev: the number of vertical levels; + """ + dut = np.zeros([nlev, nlat, ntp - 1]) + dvt = np.zeros([nlev, nlat, ntp - 1]) + dlat = np.zeros([nlat]) + u_r = np.fft.irfft(u_t, axis=2) + v_r = np.fft.irfft(v_t, axis=2) + uur = u_r * u_r + uvr = u_r * v_r + vvr = v_r * v_r + u_u = np.fft.rfft(uur, axis=2) + v_v = np.fft.rfft(vvr, axis=2) + u_v = np.fft.rfft(uvr, axis=2) + c_1 = u_u * np.conj(u_t) - u_t * np.conj(u_u) + # c_3 = u_v * np.conj(u_t) + u_t * np.conj(u_v) + c_5 = u_u * np.conj(v_t) + v_t * np.conj(u_u) + c_6 = u_v * np.conj(v_t) - v_t * np.conj(u_v) + for i_l in range(nlat): + if i_l == 0: + dut[:, i_l, :] = (utt[:, i_l + 1, :] - utt[:, i_l, :]) + dvt[:, i_l, :] = (vtt[:, i_l + 1, :] - vtt[:, i_l, :]) + dlat[i_l] = (lat[i_l + 1] - lat[i_l]) + elif i_l == nlat - 1: + dut[:, i_l, :] = (utt[:, i_l, :] - utt[:, i_l - 1, :]) + dvt[:, i_l, :] = (vtt[:, i_l, :] - vtt[:, i_l - 1, :]) + dlat[i_l] = (lat[i_l] - lat[i_l - 1]) + else: + dut[:, i_l, :] = (utt[:, i_l + 1, :] - utt[:, i_l - 1, :]) + dvt[:, i_l, :] = (vtt[:, i_l + 1, :] - vtt[:, i_l - 1, :]) + dlat[i_l] = (lat[i_l + 1] - lat[i_l - 1]) + c21 = np.conj(u_u) * dut / dlat[np.newaxis, :, np.newaxis] + c22 = u_u * np.conj(dut) / dlat[np.newaxis, :, np.newaxis] + c41 = np.conj(v_v) * dvt / dlat[np.newaxis, :, np.newaxis] + c42 = v_v * np.conj(dvt) / dlat[np.newaxis, :, np.newaxis] + k_k = np.arange(0, ntp - 1) + kt2ks = (np.real(c21 + c22 + c41 + c42) / AA + + np.tan(lat)[np.newaxis, :, np.newaxis] * np.real(c_1 - c_5) / AA + + np.imag(c_1 + c_6) * (k_k - 1)[np.newaxis, np.newaxis, :] / + (AA * np.cos(lat)[np.newaxis, :, np.newaxis])) + kt2ks[:, :, 0] = 0 + return kt2ks + + +def output(fld, d_s, filenc, name, nc_f): + """Compute vertical integrals and print (time,lat,ntp) to NC output. + + Arguments: + - fld: the annual mean fields (lev, lat, wave); + - d_s: Delta sigma; + - filenc: the input file containing the Fourier coefficients of t,u,v,w; + - name: the variable name; + - nc_f: the name of the output file (with path) + """ + fld_tmn = np.nanmean(fld, axis=1) + fld_aux = fld_tmn * d_s[:, np.newaxis, np.newaxis] + fld_vmn = np.nansum(fld_aux, axis=0) / np.nansum(d_s) + removeif(nc_f) + pr_output(fld_vmn, name, filenc, nc_f) + + +def pr_output(varo, varname, filep, nc_f): + """Print outputs to NetCDF. + + Save fields to NetCDF, retrieving information from an existing + NetCDF file. Metadata are transferred from the existing file to the + new one. + Arguments: + - varo: the field to be stored; + - varname: the name of the variables to be saved; + - filep: the existing dataset, containing the metadata; + - nc_f: the name of the output file; + + PROGRAMMER(S) + Chris Slocum (2014), modified by Valerio Lembo (2018). + """ + fourc = fourier_coefficients + with Dataset(nc_f, 'w', format='NETCDF4') as w_nc_fid: + w_nc_fid.description = "Outputs of LEC program" + with Dataset(filep, 'r') as nc_fid: + # Extract data from NetCDF file + wave = nc_fid.variables['wave'][:] + ntp = int(len(wave) / 2) + # Writing NetCDF files + fourc.extr_lat(nc_fid, w_nc_fid, 'lat') + w_nc_fid.createDimension('wave', ntp) + w_nc_dim = w_nc_fid.createVariable( + 'wave', nc_fid.variables['wave'].dtype, ('wave', )) + for ncattr in nc_fid.variables['wave'].ncattrs(): + w_nc_dim.setncattr(ncattr, + nc_fid.variables['wave'].getncattr(ncattr)) + w_nc_fid.variables['wave'][:] = wave[0:ntp] + w_nc_var = w_nc_fid.createVariable(varname, 'f8', ('lat', 'wave')) + varatts(w_nc_var, varname, 1, 0) + w_nc_fid.variables[varname][:] = varo + + +def removeif(filename): + """Remove filename if it exists.""" + try: + os.remove(filename) + except OSError: + pass + + +def preproc_lec(model, wdir, pdir, filelist): + """Preprocess fields for LEC computations and send it to lorenz program. + + This function computes the interpolation of ta, ua, va, wap daily fields to + fill gaps using near-surface data, then computes the Fourier coefficients + and performs the LEC computations. For every year, (lev,lat,wave) fields, + global and hemispheric time series of each conversion and reservoir term + of the LEC is provided. + + Arguments: + - model: the model name; + - wdir: the working directory where the outputs are stored; + - pdir: a new directory is created as a sub-directory of the plot directory + to store tables of conversion/reservoir terms and the flux diagram for + year; + - filelist: a list of file names containing the input fields; + """ + cdo = Cdo() + fourc = fourier_coefficients + ta_file = filelist[13] + tas_file = filelist[14] + ua_file = filelist[16] + uas_file = filelist[17] + va_file = filelist[18] + vas_file = filelist[19] + wap_file = filelist[20] + ldir = os.path.join(pdir, 'LEC_results') + os.makedirs(ldir) + maskorog = wdir + '/orog.nc' + ua_file_mask = wdir + '/ua_fill.nc' + va_file_mask = wdir + '/va_fill.nc' + energy3_file = wdir + '/energy_short.nc' + cdo.setmisstoc( + '0', + input='-setmisstoc,1 -sub {0} {0}'.format(ua_file), + options='-b F32', + output=maskorog) + cdo.add( + input=('-setmisstoc,0 -selvar,ua {} ' + '-setmisstoc,0 -mul {} -selvar,ua {}').format( + ua_file, uas_file, maskorog), + options='-b F32', + output=ua_file_mask) + cdo.add( + input=('-setmisstoc,0 -selvar,va {} ' + '-setmisstoc,0 -mul {} -selvar,ua {}').format( + va_file, vas_file, maskorog), + options='-b F32', + output=va_file_mask) + cdo.setmisstoc( + '0', + input=('-invertlat -sellevel,10000/90000 ' + '-merge {} {} {} {}').format(ta_file, ua_file_mask, + va_file_mask, wap_file), + options='-b F32', + output=energy3_file) + yrs = cdo.showyear(input=energy3_file) + yrs = str(yrs) + yrs2 = yrs.split() + y_i = 0 + lect = np.zeros(len(yrs2)) + for y_r in yrs2: + y_rl = [y_n for y_n in y_r] + y_ro = '' + for e_l in y_rl: + e_l = str(e_l) + if e_l.isdigit() is True: + y_ro += e_l + # print(filter(str.isdigit, str(y_r))) + enfile_yr = wdir + '/inputen.nc' + tasfile_yr = wdir + '/tas_yr.nc' + tadiag_file = wdir + '/ta_filled.nc' + ncfile = wdir + '/fourier_coeff.nc' + cdo.selyear( + y_ro, input=energy3_file, options='-b F32', output=enfile_yr) + cdo.selyear(y_ro, input=tas_file, options='-b F32', output=tasfile_yr) + fourc.fourier_coeff(tadiag_file, ncfile, enfile_yr, tasfile_yr) + diagfile = (ldir + '/{}_{}_lec_diagram.png'.format(model, y_ro)) + logfile = (ldir + '/{}_{}_lec_table.txt'.format(model, y_ro)) + lect[y_i] = lorenz(wdir, model, y_ro, ncfile, diagfile, logfile) + y_i = y_i + 1 + os.remove(enfile_yr) + os.remove(tasfile_yr) + os.remove(tadiag_file) + os.remove(ncfile) + os.remove(maskorog) + os.remove(ua_file_mask) + os.remove(va_file_mask) + os.remove(energy3_file) + return lect + + +def stabil(ta_gmn, p_l, nlev): + """Compute the stability parameter from temp. and pressure levels. + + Arguments + - ta_gmn: a temperature vertical profile; + - p_l: the vertical levels; + - nlev: the number of vertical levels; + """ + cpdr = CP / R + t_g = ta_gmn + g_s = np.zeros(nlev) + for i_l in range(nlev): + if i_l == 0: + dtdp = (t_g[i_l + 1] - t_g[i_l]) / (p_l[i_l + 1] - p_l[i_l]) + elif i_l == nlev - 1: + dtdp = (t_g[i_l] - t_g[i_l - 1]) / (p_l[i_l] - p_l[i_l - 1]) + else: + dtdp1 = (t_g[i_l + 1] - t_g[i_l]) / (p_l[i_l + 1] - p_l[i_l]) + dtdp2 = (t_g[i_l] - t_g[i_l - 1]) / (p_l[i_l] - p_l[i_l - 1]) + dtdp = ( + (dtdp1 * (p_l[i_l] - p_l[i_l - 1]) + dtdp2 * + (p_l[i_l + 1] - p_l[i_l])) / (p_l[i_l + 1] - p_l[i_l - 1])) + g_s[i_l] = CP / (t_g[i_l] - p_l[i_l] * dtdp * cpdr) + return g_s + + +def table(varin, ntp, name, logfile, flag): + """Write global and hem. storage terms to .txt table. + + Arguments: + - varin: the variable to be printed out; + - ntp: the number of wavenumbers; + - name: the name of the variable to be printed out; + - logfile: the filename of the .txt where the variable is printed out; + - flag: a flag for NH, SH, global; + """ + if flag is True: + fac = 1e5 + varin = fac * varin + varzon = varin[:, 0] + vared = np.nansum(varin[:, 1:ntp - 1], axis=1) + vared1 = np.nansum(varin[:, 1:NW_1 - 1], axis=1) + vared2 = np.nansum(varin[:, NW_1:NW_2 - 1], axis=1) + vared3 = np.nansum(varin[:, NW_2:NW_3 - 1], axis=1) + vared_tog = [vared, vared1, vared2, vared3] + write_to_tab(logfile, name, vared_tog, varzon) + + +def varatts(w_nc_var, varname, tres, vres): + """Add attibutes to the variables, depending on name and time res. + + Arguments: + - w_nc_var: a variable object; + - varname: the name of the variable, among ta, ua, va and wap; + - tres: the time resolution (daily or annual); + - vres: the vertical resolution (pressure levels or vert. integr.). + + @author: Chris Slocum (2014), modified by Valerio Lembo (2018). + """ + if tres == 0: + tatt = "Daily\nM" + elif tres == 1: + tatt = "Annual mean\nM" + if vres == 0: + vatt = "Pressure levels\n" + elif vres == 1: + vatt = "Vertically integrated\n" + if varname == 'a': + w_nc_var.setncatts({ + 'long_name': "Available Potential Energy", + 'units': "W m-2", + 'level_desc': vatt, + 'var_desc': "APE -> KE", + 'statistic': tatt + }) + elif varname == 'ek': + w_nc_var.setncatts({ + 'long_name': "Kinetic Energy", + 'units': "W m-2", + 'level_desc': vatt, + 'var_desc': "APE -> KE", + 'statistic': tatt + }) + elif varname == 'a2k': + w_nc_var.setncatts({ + 'long_name': "Conversion between APE and KE", + 'units': "W m-2", + 'level_desc': vatt, + 'var_desc': "APE <-> KE", + 'statistic': tatt + }) + elif varname == 'k': + w_nc_var.setncatts({ + 'long_name': "Kinetic Energy", + 'units': "W m-2", + 'level_desc': vatt, + 'var_desc': "APE -> KE", + 'statistic': tatt + }) + + +def weights(lev, nlev, lat): + """Compute weigths for vertical integration and meridional averages. + + Arguments: + - lev: the pressure levels; + - nlev: the number of pressure levels; + - lat: the latitudes in degrees; + - nlat: the number of latitudinal gridsteps; + """ + # Compute sigma level and dsigma + sig = lev / PS + d_s = np.zeros(nlev) + for j_l in range(1, nlev - 1, 1): + d_s[j_l] = 0.5 * abs(sig[j_l + 1] - sig[j_l - 1]) + d_s[0] = sig[0] + 0.5 * abs(sig[1] - sig[0]) + d_s[nlev - + 1] = 1 - sig[nlev - 1] + 0.5 * abs(sig[nlev - 1] - sig[nlev - 2]) + # Compute Gaussian weights + y_l = np.zeros(lat.shape) + np.deg2rad(lat, out=y_l) + g_w = np.cos(y_l) + return d_s, y_l, g_w + + +def write_to_tab(logfile, name, vared, varzon): + """Specify the formats for table entries. + + Arguments: + - log: the logfile where the entries must be written; + - name: the name of the variable; + - vared: a list of arrays containing the overall eddy components, the LW, + the SW and the KW components; + - varzon: an array containing the zonal mean component; + """ + vartot = varzon + vared[0] + with open(logfile, 'w') as log: + log.write(' {} TOTAL {: 4.3f} {: 4.3f} {: 4.3f}\n'.format( + name, vartot[0], vartot[1], vartot[2])) + log.write('--------------------------------------\n') + log.write(' {} ZONAL {: 4.3f} {: 4.3f} {: 4.3f}\n'.format( + name, varzon[0], varzon[1], varzon[2])) + log.write('--------------------------------------\n') + log.write(' {} EDDY {: 4.3f} {: 4.3f} {: 4.3f}\n'.format( + name, vared[0][0], vared[0][1], vared[0][2])) + log.write('--------------------------------------\n') + log.write(' {} EDDY(LW) {: 4.3f} {: 4.3f} {: 4.3f}\n'.format( + name, vared[1][0], vared[1][1], vared[1][2])) + log.write('--------------------------------------\n') + log.write(' {} EDDY(SW) {: 4.3f} {: 4.3f} {: 4.3f}\n'.format( + name, vared[2][0], vared[2][1], vared[2][2])) + log.write('--------------------------------------\n') + log.write(' {} EDDY(KW) {: 4.3f} {: 4.3f} {: 4.3f}\n'.format( + name, vared[3][0], vared[3][1], vared[3][2])) + log.write('--------------------------------------\n') diff --git a/esmvaltool/diag_scripts/thermodyn_diagtool/mkthe.py b/esmvaltool/diag_scripts/thermodyn_diagtool/mkthe.py new file mode 100644 index 0000000000..67f58ffdef --- /dev/null +++ b/esmvaltool/diag_scripts/thermodyn_diagtool/mkthe.py @@ -0,0 +1,407 @@ +"""AUXILIARY FIELDS RETRIEVAL. + +Module for computation of the auxiliary variables needed by the tool. + +Here the thermodynamic diagnostic tool script computes +some auxiliary variables. + +It computes equivalent potential temperatures and temperatures representative +of the sensible and latent heat exchanges in the lower layers of the +troposphere. Estimates of the boundary layer height and lifting condensation +level are also provided. + +It ingests monthly mean fields of: +- specific humidity (near-surface or 3D) (hus); +- skin temperature (ts); +- surface pressure (ps); +- near-surface horizontal velocity (uas and vas); +- surface turbulent sensible heat fluxes (hfss); +- emission temperature (te). + +Authors: Frank Lunkeit and Valerio Lembo (University of Hamburg) + +Created on Fri Jun 15 10:06:30 2018 +""" +import os +from shutil import move + +import numpy as np +from cdo import Cdo +from netCDF4 import Dataset + +from esmvaltool.diag_scripts.thermodyn_diagtool import fourier_coefficients + +ALV = 2.5008e6 # Latent heat of vaporization +G_0 = 9.81 # Gravity acceleration +P_0 = 100000. # reference pressure +RV = 461.51 # Gas constant for water vapour +T_MELT = 273.15 # freezing temp. +AKAP = 0.286 # Kappa (Poisson constant R/Cp) +GAS_CON = 287.0 # Gas constant +RA_1 = 610.78 # Parameter for Magnus-Teten-Formula +H_S = 300. # stable boundary layer height (m) +H_U = 1000. # unstable boundary layer height (m) +RIC_RS = 0.39 # Critical Richardson number for stable layer +RIC_RU = 0.28 # Critical Richardson number for unstable layer +L_C = 2501000 # latent heat of condensation +SIGMAINV = 17636684.3034 # inverse of the Stefan-Boltzmann constant + + +def init_mkthe(model, wdir, filelist, flags): + """Compute auxiliary fields or perform time averaging of existing fields. + + Arguments: + - model: the model name; + - wdir: the working directory where the outputs are stored; + - filelist: a list of file names containing the input fields; + - flags: (wat: a flag for the water mass budget module (y or n), + entr: a flag for the material entropy production (y or n); + met: a flag for the material entropy production method + (1: indirect, 2, direct, 3: both)); + + Author: + Valerio Lembo, University of Hamburg (2019). + """ + cdo = Cdo() + wat = flags[0] + entr = flags[1] + met = flags[2] + hfss_file = filelist[1] + hus_file = filelist[2] + ps_file = filelist[5] + rlut_file = filelist[8] + tas_file = filelist[14] + ts_file = filelist[15] + uas_file = filelist[17] + vas_file = filelist[19] + # Compute monthly mean fields from 2D surface daily fields + aux_file = wdir + '/aux.nc' + cdo.selvar('tas', input=tas_file, output=aux_file) + move(aux_file, tas_file) + tasmn_file = wdir + '/{}_tas_mm.nc'.format(model) + cdo.selvar( + 'tas', + input='-monmean {}'.format(tas_file), + option='-b F32', + output=tasmn_file) + cdo.selvar('uas', input=uas_file, output=aux_file) + move(aux_file, uas_file) + uasmn_file = wdir + '/{}_uas_mm.nc'.format(model) + cdo.selvar( + 'uas', + input='-monmean {}'.format(uas_file), + option='-b F32', + output=uasmn_file) + cdo.selvar('vas', input=vas_file, output=aux_file) + move(aux_file, vas_file) + vasmn_file = wdir + '/{}_vas_mm.nc'.format(model) + cdo.selvar( + 'vas', + input='-monmean {}'.format(vas_file), + option='-b F32', + output=vasmn_file) + # emission temperature + te_file = wdir + '/{}_te.nc'.format(model) + cdo.sqrt( + input="-sqrt -mulc,{} {}".format(SIGMAINV, rlut_file), output=te_file) + te_ymm_file = wdir + '/{}_te_ymm.nc'.format(model) + cdo.yearmonmean(input=te_file, output=te_ymm_file) + te_gmean_file = wdir + '/{}_te_gmean.nc'.format(model) + cdo.timmean(input='-fldmean {}'.format(te_ymm_file), output=te_gmean_file) + with Dataset(te_gmean_file) as f_l: + te_gmean_constant = f_l.variables['rlut'][0, 0, 0] + if wat is True and entr is False: + evspsbl_file, prr_file = wfluxes(model, wdir, filelist) + aux_files = [evspsbl_file, prr_file] + if entr: + if met in {'2', '3'}: + evspsbl_file, prr_file = wfluxes(model, wdir, filelist) + mk_list = [ + ts_file, hus_file, ps_file, uasmn_file, vasmn_file, hfss_file, + te_file + ] + htop_file, tabl_file, tlcl_file = mkthe_main(wdir, mk_list, model) + # Working temperatures for the hydrological cycle + tcloud_file = (wdir + '/{}_tcloud.nc'.format(model)) + removeif(tcloud_file) + cdo.mulc( + '0.5', + input='-add {} {}'.format(tlcl_file, te_file), + options='-b F32', + output=tcloud_file) + tcolumn_file = (wdir + '/{}_t_vertav_pot.nc'.format(model)) + removeif(tcolumn_file) + cdo.mulc( + '0.5', + input='-add {} {}'.format(ts_file, tcloud_file), + options='-b F32', + output=tcolumn_file) + # Working temperatures for the kin. en. diss. (updated) + tasvert_file = (wdir + '/{}_tboundlay.nc'.format(model)) + removeif(tasvert_file) + cdo.fldmean( + input='-mulc,0.5 -add {} {}'.format(ts_file, tabl_file), + options='-b F32', + output=tasvert_file) + aux_files = [ + evspsbl_file, htop_file, prr_file, tabl_file, tasvert_file, + tcloud_file, tcolumn_file, tlcl_file + ] + remove_files = [tasmn_file, uasmn_file, vasmn_file, te_gmean_file] + for filen in remove_files: + os.remove(filen) + return te_ymm_file, te_gmean_constant, te_file, aux_files + + +def input_data(wdir, file_list): + """Manipulate input fields and read datasets. + + Arguments: + - wdir: the working directory path; + - file_list: the list of file containing ts, hus, + ps, uas, vas, hfss, te; + + Author: + Valerio Lembo, University of Hamburg, 2019 + """ + cdo = Cdo() + ts_miss_file = wdir + '/ts.nc' + removeif(ts_miss_file) + cdo.setctomiss('0', input=file_list[0], output=ts_miss_file) + hus_miss_file = wdir + '/hus.nc' + removeif(hus_miss_file) + cdo.setctomiss('0', input=file_list[1], output=hus_miss_file) + ps_miss_file = wdir + '/ps.nc' + removeif(ps_miss_file) + cdo.setctomiss('0', input=file_list[2], output=ps_miss_file) + vv_missfile = wdir + '/V.nc' + removeif(vv_missfile) + vv_file = wdir + '/V_miss.nc' + removeif(vv_file) + cdo.sqrt( + input='-add -sqr {} -sqr {}'.format(file_list[3], file_list[4]), + options='-b F32', + output=vv_file) + cdo.setctomiss('0', input=vv_file, output=vv_missfile) + os.remove(vv_file) + hfss_miss_file = wdir + '/hfss.nc' + removeif(hfss_miss_file) + cdo.setctomiss('0', input=file_list[5], output=hfss_miss_file) + te_miss_file = wdir + '/te.nc' + removeif(te_miss_file) + cdo.setctomiss('0', input=file_list[6], output=te_miss_file) + with Dataset(ts_miss_file) as dataset: + t_s = dataset.variables['ts'][:, :, :] + with Dataset(hus_miss_file) as dataset: + hus = dataset.variables['hus'][:, :, :, :] + lev = dataset.variables['plev'][:] + with Dataset(ps_miss_file) as dataset: + p_s = dataset.variables['ps'][:, :, :] + with Dataset(vv_missfile) as dataset: + vv_hor = dataset.variables['uas'][:, :, :] + with Dataset(hfss_miss_file) as dataset: + hfss = dataset.variables['hfss'][:, :, :] + with Dataset(te_miss_file) as dataset: + t_e = dataset.variables['rlut'][:, :, :] + huss = hus[:, 0, :, :] + huss = np.where(lev[0] >= p_s, huss, 0.) + nlev = len(lev) + for l_l in range(nlev): + aux = hus[:, l_l, :, :] + aux = np.where((p_s >= lev[l_l]), aux, 0.) + huss = huss + aux + remove_files = [ + ts_miss_file, hus_miss_file, ps_miss_file, vv_missfile, hfss_miss_file, + te_miss_file + ] + for filen in remove_files: + os.remove(filen) + return hfss, huss, p_s, t_e, t_s, vv_hor + + +def mkthe_main(wdir, file_list, modelname): + """Compute the auxiliary variables for the Thermodynamic diagnostic tool. + + Arguments: + - wdir: the working directory path; + - file_list: the list of file containing ts, hus, + ps, uas, vas, hfss, te; + - modelname: the name of the model from which the fields are; + """ + hfss, huss, p_s, t_e, t_s, vv_hor = input_data(wdir, file_list) + ricr = RIC_RU + h_bl = H_U + ricr = np.where(hfss >= 0.75, ricr, RIC_RS) + h_bl = np.where(hfss >= 0.75, h_bl, H_S) + ev_p = huss * p_s / (huss + GAS_CON / RV) # Water vapour pressure + td_inv = (1 / T_MELT) - (RV / ALV) * np.log(ev_p / RA_1) # Dewpoint t. + t_d = 1 / td_inv + hlcl = 125. * (t_s - t_d) # Empirical formula for LCL height + # Negative heights are replaced by the height of the stable + # boundary layer (lower constraint to the height of the cloud layer) + hlcl = np.where(hlcl >= 0., hlcl, h_bl) + cp_d = GAS_CON / AKAP + ztlcl = t_s - (G_0 / cp_d) * hlcl + # Compute the pseudo-adiabatic lapse rate to obtain the height of cloud + # top knowing emission temperature. + gw_pa = (G_0 / cp_d) * (1 + ((ALV * huss) / (GAS_CON * ztlcl)) / (1 + ( + (ALV**2 * huss * 0.622) / (cp_d * GAS_CON * ztlcl**2)))) + htop = -(t_e - ztlcl) / gw_pa + hlcl + # Use potential temperature and critical Richardson number to compute + # temperature and height of the boundary layer top + ths = t_s * (P_0 / p_s)**AKAP + thz = ths + 0.03 * ricr * (vv_hor)**2 / h_bl + p_z = p_s * np.exp((-G_0 * h_bl) / (GAS_CON * t_s)) # Barometric eq. + t_z = thz * (P_0 / p_z)**(-AKAP) + outlist = [ztlcl, t_z, htop] + htop_file, tabl_file, tlcl_file = write_output(wdir, modelname, file_list, + outlist) + return htop_file, tabl_file, tlcl_file + + +def removeif(filename): + """Remove filename if it exists.""" + try: + os.remove(filename) + except OSError: + pass + + +def wfluxes(model, wdir, filelist): + """Compute auxiliary fields and perform time averaging of existing fields. + + Arguments: + - model: the model name; + - wdir: the working directory where the outputs are stored; + - filelist: a list of file names containing the input fields; + + Author: + Valerio Lembo, University of Hamburg (2019). + """ + cdo = Cdo() + hfls_file = filelist[0] + pr_file = filelist[3] + prsn_file = filelist[4] + aux_file = wdir + '/aux.nc' + evspsbl_file = (wdir + '/{}_evspsbl.nc'.format(model)) + cdo.divc(str(L_C), input="{}".format(hfls_file), output=evspsbl_file) + # Rainfall precipitation + prr_file = wdir + '/{}_prr.nc'.format(model) + cdo.sub(input="{} {}".format(pr_file, prsn_file), output=aux_file) + cdo.chname('pr,prr', input=aux_file, output=prr_file) + return evspsbl_file, prr_file + + +def write_output(wdir, model, file_list, varlist): + """Write auxiliary variables to new NC files, write new attributes. + + Arguments: + - wdir: the work directory where the outputs are stored; + - model: the name of the model; + - file_list: the list containing the input fields; + - varlist: a list containing the variables to be written to NC files, i.e. + tlcl (the temperature at the LCL), t_z (the temperature at the boundary + layer top), htop (the height of the boundary layer top); their dimensions + are as (time, lat, lon); + + Author: + Valerio Lembo, University of Hamburg (2019). + """ + cdo = Cdo() + fourc = fourier_coefficients + + dataset = Dataset(file_list[0]) + ztlcl = varlist[0] + t_z = varlist[1] + htop = varlist[2] + tlcl_temp = wdir + '/tlcl.nc' + removeif(tlcl_temp) + with Dataset(tlcl_temp, 'w', format='NETCDF4') as w_nc_fid: + w_nc_fid.description = ( + "Monthly mean LCL temperature from {} model. ".format(model), + "Calculated by Thermodynamics model diagnostics ", + "in ESMValTool. Author Valerio Lembo, ", + "Meteorologisches Institut, Universitaet ", "Hamburg.") + with Dataset(file_list[0]) as dataset: + fourc.extr_time(dataset, w_nc_fid) + fourc.extr_lat(dataset, w_nc_fid, 'lat') + fourc.extr_lon(dataset, w_nc_fid) + w_nc_var = w_nc_fid.createVariable('tlcl', 'f8', + ('time', 'lat', 'lon')) + w_nc_var.setncatts({ + 'long_name': + "LCL Temperature", + 'units': + "K", + 'level_desc': + "surface", + 'var_desc': + ("LCL temperature from LCL ", "height (Magnus formulas and dry ", + "adiabatic lapse ratio)"), + 'statistic': + 'monthly mean' + }) + w_nc_fid.variables['tlcl'][:] = ztlcl + tabl_temp = wdir + '/tabl.nc' + removeif(tabl_temp) + with Dataset(tabl_temp, 'w', format='NETCDF4') as w_nc_fid: + w_nc_fid.description = ( + "Monthly mean BL top temperature for {} model. ".format(model), + "Calculated by Thermodynamics model diagnostics ", + "in ESMValTool. Author Valerio ", + "Lembo, Meteorologisches Institut, ", "Universitaet Hamburg.") + with Dataset(file_list[0]) as dataset_tabl: + fourc.extr_time(dataset_tabl, w_nc_fid) + fourc.extr_lat(dataset_tabl, w_nc_fid, 'lat') + fourc.extr_lon(dataset_tabl, w_nc_fid) + w_nc_var = w_nc_fid.createVariable('tabl', 'f8', + ('time', 'lat', 'lon')) + w_nc_var.setncatts({ + 'long_name': + "Temperature at BL top", + 'units': + "K", + 'level_desc': + "surface", + 'var_desc': + ("Temperature at the Boundary Layer ", + "top, from boundary layer thickness and ", "barometric equation"), + 'statistic': + 'monthly mean' + }) + w_nc_fid.variables['tabl'][:] = t_z + htop_temp = wdir + '/htop.nc' + removeif(htop_temp) + with Dataset(htop_temp, 'w', format='NETCDF4') as w_nc_fid: + w_nc_fid.description = ( + "Monthly mean height of the BL top for {} model. ".format(model), + "Calculated by Thermodynamics model diagnostics ", + "in ESMValTool. Author Valerio ", + "Lembo, Meteorologisches Institut, ", "Universitaet Hamburg.") + with Dataset(file_list[0]) as dataset_htop: + fourc.extr_time(dataset_htop, w_nc_fid) + fourc.extr_lat(dataset_htop, w_nc_fid, 'lat') + fourc.extr_lon(dataset_htop, w_nc_fid) + w_nc_var = w_nc_fid.createVariable('htop', 'f8', + ('time', 'lat', 'lon')) + w_nc_var.setncatts({ + 'long_name': + "Height at BL top", + 'units': + "m", + 'level_desc': + "surface", + 'var_desc': + ("Height at the Boundary Layer top, ", + "from boundary layer thickness and ", "barometric equation"), + 'statistic': + 'monthly mean' + }) + w_nc_fid.variables['htop'][:] = htop + tlcl_file = wdir + '/{}_tlcl.nc'.format(model) + cdo.setrtomiss('400,1e36', input=tlcl_temp, output=tlcl_file) + tabl_file = wdir + '/{}_tabl.nc'.format(model) + cdo.setrtomiss('400,1e36', input=tabl_temp, output=tabl_file) + htop_file = wdir + '/{}_htop.nc'.format(model) + cdo.setrtomiss('12000,1e36', input=htop_temp, output=htop_file) + return htop_file, tabl_file, tlcl_file diff --git a/esmvaltool/diag_scripts/thermodyn_diagtool/plot_script.py b/esmvaltool/diag_scripts/thermodyn_diagtool/plot_script.py new file mode 100644 index 0000000000..2634485a04 --- /dev/null +++ b/esmvaltool/diag_scripts/thermodyn_diagtool/plot_script.py @@ -0,0 +1,963 @@ +"""FUNCTIONS FOR PLOTS. + +Plotting module for Thermodyn_diagtool. + +The module provides plots for a single model of: +- climatological mean maps of TOA, atmospheric and surface energy budgets; +- annual mean time series of TOA, atmospheric and surface energy budgets anom.; +- climatological mean maps of latent energy and water mass budgets; +- annual mean time series of latent energy and water mass budget anom.; +- meridional section of meridional enthalpy transports; +- meridional section of meridional water mass transports; +- scatter plots of atmospheric vs. oceani peak magnitudes in the two hem.; +- climatological mean maps of every component of the entropy budget. + +@author: valerio.lembo@uni-hamburg.de, Valerio Lembo, Hamburg University, 2018. +""" +import math +import os +from shutil import move +import cartopy.crs as ccrs +import matplotlib.pyplot as plt +import numpy as np +from cdo import Cdo +from matplotlib import rcParams +from netCDF4 import Dataset +from scipy import interpolate, stats +from esmvaltool.diag_scripts.shared import ProvenanceLogger +from esmvaltool.diag_scripts.thermodyn_diagtool import fourier_coefficients, \ + provenance_meta + + +def balances(cfg, wdir, plotpath, filena, name, model): + """Plot everything related to energy and water mass budgets. + + This method provides climatological annal mean maps of TOA, atmospheric + and surface energy budgets, time series of annual mean anomalies in the + two hemispheres and meridional sections of meridional enthalpy + transports. Scatter plots of oceanic vs. atmospheric meridional + enthalpy transports are also provided. + + Arguments: + - wdir: the working directory; + - plotpath: the path where the plot has to be saved; + - filena: the files containing input fields; + - name: the name of the variable associated with the input field; + - model: the name of the model to be analysed; + """ + cdo = Cdo() + provlog = ProvenanceLogger(cfg) + nsub = len(filena) + pdir = plotpath + plotentname = pdir + '/{}_heat_transp.png'.format(model) + plotwmbname = pdir + '/{}_wmb_transp.png'.format(model) + plotlatname = pdir + '/{}_latent_transp.png'.format(model) + + # timesery = np.zeros([nsub, 2]) + dims, ndims, tmean, zmean, timeser = global_averages(nsub, filena, name) + transp_mean = np.zeros([nsub, ndims[1]]) + lat_maxm = np.zeros([nsub, 2, len(dims[3])]) + tr_maxm = np.zeros([nsub, 2, len(dims[3])]) + lim = [55, 55, 25] + for i_f in np.arange(nsub): + transp = transport(zmean[i_f, :, :], timeser[i_f, :, 0], dims[1]) + transp_mean[i_f, :], list_peak = transports_preproc( + dims[1], ndims[3], lim[i_f], transp) + lat_maxm[i_f, :, :] = list_peak[0] + tr_maxm[i_f, :, :] = list_peak[1] + if nsub == 3: + ext_name = [ + 'TOA Energy Budget', 'Atmospheric Energy Budget', + 'Surface Energy Budget' + ] + transpty = (-6E15, 6E15) + coords = [dims[0], dims[1]] + plot_climap_eb(model, pdir, coords, tmean, ext_name) + fig = plt.figure() + strings = ['Meridional heat transports', 'Latitude [deg]', '[W]'] + lats = dims[1] + for i in np.arange(nsub): + filename = filena[i] + '.nc' + if name[i] == 'toab': + nameout = 'total' + elif name[i] == 'atmb': + nameout = 'atmos' + elif name[i] == 'surb': + nameout = 'ocean' + nc_f = wdir + '/{}_transp_mean_{}.nc'.format(nameout, model) + removeif(nc_f) + lat_model = 'lat_{}'.format(model) + pr_output(transp_mean[i, :], filename, nc_f, nameout, lat_model) + name_model = '{}_{}'.format(nameout, model) + cdo.chname( + '{},{}'.format(nameout, name_model), + input=nc_f, + output='aux.nc') + move('aux.nc', nc_f) + cdo.chname('lat,{}'.format(lat_model), input=nc_f, output='aux.nc') + move('aux.nc', nc_f) + attr = ['{} meridional enthalpy transports'.format(nameout), model] + provrec = provenance_meta.get_prov_transp(attr, filename, + plotentname) + provlog.log(nc_f, provrec) + plot_1m_transp(lats, transp_mean[i, :], transpty, strings) + plt.grid() + plt.savefig(plotentname) + plt.close(fig) + plot_1m_scatter(model, pdir, lat_maxm, tr_maxm) + elif nsub == 2: + ext_name = ['Water mass budget', 'Latent heat budget'] + transpwy = (-2E9, 2E9) + transply = (-6E15, 6E15) + coords = [dims[0], dims[1]] + plot_climap_wm(model, pdir, coords, tmean, ext_name, name) + nc_f = wdir + '/{}_transp_mean_{}.nc'.format('wmb', model) + removeif(nc_f) + filena[0] = filena[0].split('.nc', 1)[0] + filename = filena[0] + '.nc' + pr_output(transp_mean[0, :], filename, nc_f, 'wmb', 'lat') + attr = ['water mass transport', model] + provrec = provenance_meta.get_prov_transp(attr, filename, plotwmbname) + provlog.log(nc_f, provrec) + nc_f = wdir + '/{}_transp_mean_{}.nc'.format('latent', model) + removeif(nc_f) + filena[1] = filena[1].split('.nc', 1)[0] + filename = filena[1] + '.nc' + pr_output(transp_mean[1, :], filename, nc_f, 'latent', 'lat') + attr = ['latent energy transport', model] + provrec = provenance_meta.get_prov_transp(attr, filename, plotlatname) + provlog.log(nc_f, provrec) + strings = ['Water mass transports', 'Latitude [deg]', '[kg*s-1]'] + fig = plt.figure() + plot_1m_transp(dims[1], transp_mean[0, :], transpwy, strings) + plt.grid() + plt.savefig(plotwmbname) + plt.close(fig) + strings = ['Latent heat transports', 'Latitude [deg]', '[W]'] + fig = plt.figure() + plot_1m_transp(dims[1], transp_mean[1, :], transply, strings) + plt.grid() + plt.savefig(plotlatname) + plt.close(fig) + for i_f in np.arange(nsub): + fig = plt.figure() + axi = plt.subplot(111) + axi.plot(dims[3], timeser[i_f, :, 0], 'k', label='Global') + axi.plot(dims[3], timeser[i_f, :, 1], 'r', label='SH') + axi.plot(dims[3], timeser[i_f, :, 2], 'b', label='NH') + plt.title('Annual mean {}'.format(ext_name[i_f])) + plt.xlabel('Years') + plt.ylabel('[W/m2]') + axi.legend( + loc='upper center', + bbox_to_anchor=(0.5, -0.07), + shadow=True, + ncol=3) + plt.tight_layout() + plt.grid() + plt.savefig(pdir + '/{}_{}_timeser.png'.format(model, name[i_f])) + plt.close(fig) + + +def entropy(plotpath, filename, name, ext_name, model): + """Plot everything rleated to annual mean maps of mat. entr. prod. + + Arguments: + - plotpath: the path where the plot has to be saved; + - filename: the file containing input fields; + - name: the name of the variable associated with the input field; + - ext_name: the long name of the input field + - model: the name of the model to be analysed; + """ + pdir = plotpath + if ext_name == 'Vertical entropy production': + rangec = [-0.01, 0.1] + c_m = 'YlOrBr' + elif ext_name == 'Horizontal entropy production': + rangec = [-0.5, 0.5] + c_m = 'bwr' + elif ext_name == 'Sensible Heat entropy production': + rangec = [-0.01, 0.01] + c_m = 'YlOrBr' + elif ext_name == 'Evaporation entropy production': + rangec = [0, 1] + c_m = 'YlOrBr' + elif ext_name == 'Rainfall precipitation entropy production': + rangec = [0, 1] + c_m = 'YlOrBr' + elif ext_name == 'Snowfall precipitation entropy production': + rangec = [0, 0.25] + c_m = 'YlOrBr' + elif ext_name == 'Snow melting entropy production': + rangec = [0, 0.05] + c_m = 'YlOrBr' + elif ext_name == 'Potential energy entropy production': + rangec = [0, 0.1] + c_m = 'YlOrBr' + else: + quit() + with Dataset(filename) as dataset: + var = dataset.variables[name][:, :, :] + lats = dataset.variables['lat'][:] + lons = dataset.variables['lon'][:] + tmean = np.nanmean(var, axis=0) + fig = plt.figure() + axi = plt.axes(projection=ccrs.PlateCarree()) + coords = [lons, lats] + title = 'Climatological Mean {}'.format(ext_name) + plot_climap(axi, coords, tmean, title, rangec, c_m) + plt.savefig(pdir + '/{}_{}_climap.png'.format(model, name)) + plt.close(fig) + + +def global_averages(nsub, filena, name): + """Compute zonal mean, global mean, time mean averages. + + Arguments: + - nsub: the number of variables for which averages must be computed; + - filena: the name of the file containing the variable (without extension); + - name: the names of the variables; + """ + sep = '.nc' + filena[0] = filena[0].split(sep, 1)[0] + filename = filena[0] + sep + with Dataset(filename) as dataset: + lats = dataset.variables['lat'][:] + lons = dataset.variables['lon'][:] + time = dataset.variables['time'][:] + nlats = len(lats) + nlons = len(lons) + ntime = len(time) + yr_0 = int(len(time) / 12) + timey = np.linspace(0, yr_0 - 1, num=yr_0) + dims = [lons, lats, time, timey] + ndims = [nlons, nlats, ntime, yr_0] + var = np.zeros([nsub, ntime, nlats, nlons]) + for i in np.arange(nsub): + filena[i] = filena[i].split(sep, 1)[0] + filename = filena[i] + '.nc' + with Dataset(filename) as dataset: + dataset = Dataset(filename) + var[i, :, :, :] = dataset.variables[name[i]][:, :, :] + var_r = np.reshape(var, + (nsub, int(np.shape(var)[1] / 12), 12, nlats, nlons)) + vary = np.nanmean(var_r, axis=2) + zmean = np.nanmean(vary, axis=3) + tmean = np.nanmean(vary, axis=1) + timeser = np.zeros([nsub, yr_0, 3]) + for i_f in np.arange(nsub): + zmean_w = latwgt(lats, zmean[i_f, :, :]) + gmean = np.nansum(zmean_w, axis=1) + shmean = hemean(0, lats, zmean[i_f, :, :]) + nhmean = hemean(1, lats, zmean[i_f, :, :]) + timeser[i_f, :, :] = np.column_stack((gmean, shmean, nhmean)) + return dims, ndims, tmean, zmean, timeser + + +def hemean(hem, lat, inp): + """Compute hemispheric averages. + + Arguments: + - hem: a parameter for the choice of the hemisphere (1 stands for SH); + - lat: latitude (in degrees); + - inp: input field; + """ + j_end = np.shape(inp)[1] + zmn = latwgt(lat, inp) + hmean = [] + if hem == 1: + if j_end % 2 == 0: + hmean = 2 * np.nansum(zmn[:, int(j_end / 2):j_end], axis=1) + else: + hmean = 2 * np.nansum(zmn[:, int((j_end + 1) / 2):j_end], axis=1) + else: + if j_end % 2 == 0: + hmean = 2 * np.nansum(zmn[:, 1:int(j_end / 2)], axis=1) + else: + hmean = 2 * np.nansum(zmn[:, 1:int((j_end - 1) / 2)], axis=1) + return hmean + + +def init_plotentr(model, pdir, flist): + """Define options for plotting maps of entropy production components. + + Arguments: + - model: the name of the model; + - path: the path to the plots directory; + - flist: a list of files containing the components of the entropy + production with the direct method; + """ + entropy(pdir, flist[0], 'ssens', 'Sensible Heat entropy production', model) + entropy(pdir, flist[1], 'sevap', 'Evaporation entropy production', model) + entropy(pdir, flist[2], 'srain', + 'Rainfall precipitation entropy production', model) + entropy(pdir, flist[3], 'ssnow', + 'Snowfall precipitation entropy production', model) + entropy(pdir, flist[4], 'smelt', 'Snow melting entropy production', model) + entropy(pdir, flist[5], 'spotp', 'Potential energy entropy production', + model) + + +def latwgt(lat, t_r): + """Compute weighted average over latitudes. + + Arguments: + - lat: latitude (in degrees); + - tr: the field to be averaged (time,lat); + """ + p_i = math.pi + conv = 2 * p_i / 360 + dlat = np.zeros(len(lat)) + for i in range(len(lat) - 1): + dlat[i] = abs(lat[i + 1] - lat[i]) + dlat[len(lat) - 1] = dlat[len(lat) - 2] + latr = conv * lat + dlatr = conv * dlat + tr2 = np.zeros((np.shape(t_r)[0], np.shape(t_r)[1])) + for j in range(len(lat)): + tr2[:, j] = t_r[:, j] * np.cos(latr[j]) * dlatr[j] / 2 + return tr2 + + +def plot_climap_eb(model, pdir, coords, tmean, ext_name): + """Plot climatological mean maps of TOA, atmospheric, oceanic energy budg. + + Arguments: + - model: the name of the model; + - pdir: a plots directory; + - coords: the lon and lat coordinates; + - tmean: the climatological mean (3,lat,lon) maps of the three budgets; + - ext_name: the extended name of the budget, to be used for the title; + """ + rangect = [-100, 100] + fig = plt.figure(figsize=(12, 22)) + axi = plt.subplot(311, projection=ccrs.PlateCarree()) + title = 'Climatological Mean {}'.format(ext_name[0]) + plot_climap(axi, coords, tmean[0, :, :], title, rangect, 'bwr') + axi = plt.subplot(312, projection=ccrs.PlateCarree()) + title = 'Climatological Mean {}'.format(ext_name[1]) + plot_climap(axi, coords, tmean[1, :, :], title, rangect, 'bwr') + axi = plt.subplot(313, projection=ccrs.PlateCarree()) + title = 'Climatological Mean {}'.format(ext_name[2]) + plot_climap(axi, coords, tmean[2, :, :], title, rangect, 'bwr') + plt.savefig(pdir + '/{}_energy_climap.png'.format(model)) + plt.close(fig) + + +def plot_climap_wm(model, pdir, coords, tmean, ext_name, name): + """Plot climatological mean maps of water mass and latent energy budgets. + + Arguments: + - model: the name of the model; + - pdir: a plots directory; + - coords: the lon and lat coordinates; + - tmean: the climatological mean (3,lat,lon) maps of the three budgets; + - ext_name: the extended name of the budget, to be used for the title; + - name: the variable name, used for the file name of the figure; + """ + rangecw = [-1E-4, 1E-4] + rangecl = [-150, 150] + fig = plt.figure() + axi = plt.subplot(111, projection=ccrs.PlateCarree()) + title = 'Climatological Mean {}'.format(ext_name[0]) + plot_climap(axi, coords, tmean[0, :, :], title, rangecw, 'bwr') + plt.savefig(pdir + '/{}_{}_climap.png'.format(model, name[0])) + plt.close(fig) + fig = plt.figure() + axi = plt.subplot(111, projection=ccrs.PlateCarree()) + title = 'Climatological Mean {}'.format(ext_name[1]) + plot_climap(axi, coords, tmean[1, :, :], title, rangecl, 'bwr') + plt.savefig(pdir + '/{}_{}_climap.png'.format(model, name[1])) + plt.close(fig) + + +def plot_climap(axi, coords, fld, title, rrange, c_m): + """Plot very colourful maps. + + Arguments: + - axi: an axis identifier; + - coords: the lon and lat coordinates; + - fld: the field to be plotted; + - title: the title to appear on the figure; + - rrange: the range for the color bar; + - c_m: a color map identifier; + """ + axi.coastlines() + lons = np.linspace(0, 360, len(coords[0])) - (coords[0][1] - coords[0][0]) + plt.contourf(lons, coords[1], fld, 60, transform=ccrs.PlateCarree()) + plt.pcolor( + lons, + coords[1], + fld, + vmin=rrange[0], + vmax=rrange[1], + cmap=c_m, + antialiaseds='True') + plt.colorbar() + plt.title(title) + plt.grid() + + +def plot_ellipse(semimaj, semimin, phi, x_cent, y_cent, a_x): + """Plot ellipses in Python in a simple way. + + This method plots ellipses with matplotlib. + + Arguments: + - semimaj: the length of the major axis; + - semimin: the length of the minor axis; + - phi: the tilting of the semimaj axis; + - (x_cent, y_cent): the coordinates of the ellipse centre; + - theta_num: the number of points to sample along ellipse from 0-2pi; + - ax: an object containing the axis properties; + - plot_kwargs: matplotlib.plot keyword arguments; + - fill: a flag to fill the inside of the ellipse; + - fill_kwargs: keyword arguments for matplotlib.fill; + - data_out: a flag to return the ellipse samples without plotting; + - cov: a 2x2 covariance matrix; + - mass_level: a number defining the fractional probability enclosed, if + cov is given; + """ + theta = np.linspace(0, 2 * np.pi, 100) + r_r = 1 / np.sqrt((np.cos(theta))**2 + (np.sin(theta))**2) + x_x = r_r * np.cos(theta) + y_x = r_r * np.sin(theta) + data = np.array([x_x, y_x]) + s_ax = np.array([[semimaj, 0], [0, semimin]]) + r_angle = np.array([[np.cos(phi), -np.sin(phi)], + [np.sin(phi), np.cos(phi)]]) + t_t = np.dot(r_angle, s_ax) + data = np.dot(t_t, data) + data[0] += x_cent + data[1] += y_cent + a_x.plot(data[0], data[1], color='b', linestyle='-') + + +def plot_1m_scatter(model, pdir, lat_maxm, tr_maxm): + """Plot the scatter plots of atmospheric vs. oceanic peaks and locations. + + The function produces scatter plots for the atmospheric vs. oceanic peak + magnitudes in the NH (a) and SH (b), atmospheric vs. ocean peak locations + in the NH (c) and SH (d). + + Arguments: + - model: the name of the model; + - pdir: a plots directory; + - lat_maxm: the positions of the peaks; + - tr_maxm: the magnitudes of the peaks; + """ + fig = plt.figure() + fig.set_size_inches(12, 12) + axi = plt.subplot(221) + axi.set_figsize = (50, 50) + plt.scatter(tr_maxm[1, 0, :], tr_maxm[2, 0, :], c=(0, 0, 0), alpha=1) + plt.title('(a) Atm. vs ocean magnitude - SH', fontsize=13, y=1.02) + plt.xlabel('Atmos. trans. [W]', fontsize=11) + plt.ylabel('Oceanic trans. [W]', fontsize=11) + plt.grid() + axi = plt.subplot(222) + axi.set_figsize = (50, 50) + plt.scatter(tr_maxm[1, 1, :], tr_maxm[2, 1, :], c=(0, 0, 0), alpha=1) + plt.title('(b) Atm. vs ocean magnitude - NH', fontsize=13, y=1.02) + plt.xlabel('Atmos. trans. [W]', fontsize=11) + plt.ylabel('Oceanic trans. [W]', fontsize=11) + plt.grid() + axi = plt.subplot(223) + axi.set_figsize = (50, 50) + plt.scatter(lat_maxm[1, 0, :], lat_maxm[2, 0, :], c=(0, 0, 0), alpha=1) + plt.title('(c) Atm. vs ocean location - SH', fontsize=13, y=1.02) + plt.xlabel('Atmos. trans. position [degrees of latitude]', fontsize=11) + plt.ylabel('Oceanic trans. position [degrees of latitude]', fontsize=11) + plt.grid() + axi = plt.subplot(224) + axi.set_figsize = (50, 50) + plt.scatter(lat_maxm[1, 1, :], lat_maxm[2, 1, :], c=(0, 0, 0), alpha=1) + plt.title('(d) Atm. vs ocean location - NH', fontsize=13, y=1.02) + plt.xlabel('Atmos. trans. position [degrees of latitude]', fontsize=11) + plt.ylabel('Oceanic trans. position [degrees of latitude]', fontsize=11) + plt.grid() + plt.savefig(pdir + '/{}_scatpeak.png'.format(model)) + plt.close(fig) + + +def plot_1m_transp(lats, yval, ylim, strings): + """Plot a meridional section of enthalpy transport for one model. + + This function plots total, atmospheric and oceanic meridional enthalpy + transports on the same panel. + + Arguments: + - lats: the latitudinal dimension as a 1D array; + - yval: the meridional enthalpy transports as a 2D array (3,lat), where + row 1 is the total, row 2 the atmospheric, row 3 the oceanic transport; + - ylim: a range for the y-axis; + - strings: a list of strings containing the title of the figure, the names + of the x and y axes; + """ + plt.subplot(111) + plt.plot(lats, yval) + plt.title(strings[0], fontsize=10) + plt.xlabel(strings[1], fontsize=10) + plt.ylabel(strings[2]) + plt.tight_layout() + plt.ylim(ylim) + plt.xlim(-90, 90) + + +def plot_mm_ebscatter(pdir, eb_list): + """Plot multi-model scatter plots of EB mean values vs. their variability. + + The function produces a plot containing 4 scatter plots: + - (a) TOA mean energy budget vs. its interannual variability; + - (b) Atmospheric mean energy budget vs. its interannual variability; + - (c) Surface mean energy budget vs. its interannual variability; + - (d) Atmospheric vs. surface energy budget with whiskers encompassing the + 1sigma uncertainty range; + + Arguments: + - pdir: a plots directory; + - eb_list: a list containing the TOA, atmospheri and surface energy budgets + as a 2D array (model, 2), with the first column being the mean value and + the second column being the inter-annual variance; + """ + toab_all = eb_list[0] + atmb_all = eb_list[1] + surb_all = eb_list[2] + fig = plt.figure() + fig.set_size_inches(12, 22) + axi = plt.subplot(221) + plt.ylim(bottom=0) + title = '(a) TOA energy budget' + xlabel = 'R_t [W m-2]' + ylabel = 'Sigma (R_t) [W m-2]' + varlist = [toab_all[:, 0], toab_all[:, 1]] + plot_mm_scatter(axi, varlist, title, xlabel, ylabel) + axi = plt.subplot(222) + plt.ylim(bottom=0) + title = '(b) Atmospheric energy budget' + xlabel = 'F_a [W m-2]' + ylabel = 'Sigma (F_a) [W m-2]' + varlist = [atmb_all[:, 0], atmb_all[:, 1]] + plot_mm_scatter(axi, varlist, title, xlabel, ylabel) + axi = plt.subplot(223) + plt.ylim(bottom=0) + title = '(b) Surface energy budget' + xlabel = 'F_s [W m-2]' + ylabel = 'Sigma (F_s) [W m-2]' + varlist = [surb_all[:, 0], surb_all[:, 1]] + plot_mm_scatter(axi, varlist, title, xlabel, ylabel) + axi = plt.subplot(224) + axi.set_figsize = (50, 50) + plt.errorbar( + x=atmb_all[:, 0], + y=surb_all[:, 0], + xerr=atmb_all[:, 1], + yerr=surb_all[:, 1], + fmt='none', + ecolor=(0, 0, 0)) + title = '(b) Atmospheric vs. Surface budget' + xlabel = 'F_a [W m-2]' + ylabel = 'F_s [W m-2]' + varlist = [atmb_all[:, 0], surb_all[:, 0]] + plot_mm_scatter(axi, varlist, title, xlabel, ylabel) + plt.savefig(pdir + '/scatters_variability.png') + plt.close(fig) + + +def plot_mm_scatter(axi, varlist, title, xlabel, ylabel): + """Plot a multi-model scatter plot. + + The function produces a scatter plot of a multi-model ensemble, with an + ellipse encompassing the 1sigma uncertainty around the multi-model mean. + + Arguments: + - axi: an axis identifier; + - varlist: a list containing the array for the x and y values (they have to + be the same length); + - title: a string containing the title of the plot; + - xlabel: a string containing the x-axis label; + - ylabel: a string containing the y-axis label; + """ + xval = varlist[0] + yval = varlist[1] + modnum = len(xval) + axi.set_figsize = (50, 50) + plt.scatter(xval, yval, c=(0, 0, 0), alpha=1) + plt.scatter(np.nanmean(xval), np.nanmean(yval), c='red') + s_l, _, _, _, _ = stats.linregress(xval, yval) + semimaj = np.max([np.nanstd(xval), np.nanstd(yval)]) + semimin = np.min([np.nanstd(xval), np.nanstd(yval)]) + plot_ellipse( + semimaj, + semimin, + phi=np.arctan(s_l), + x_cent=np.nanmean(xval), + y_cent=np.nanmean(yval), + a_x=axi) + plt.title(title, fontsize=12) + rcParams['axes.titlepad'] = 1 + rcParams['axes.labelpad'] = 1 + plt.xlabel(xlabel, fontsize=14) + plt.ylabel(ylabel, fontsize=14) + d_x = 0.01 * (max(xval) - min(xval)) + d_y = 0.01 * (max(yval) - min(yval)) + for i_m in np.arange(modnum): + axi.annotate( + str(i_m + 1), (xval[i_m], yval[i_m]), + xytext=(xval[i_m] + d_x, yval[i_m] + d_y), + fontsize=12) + axi.tick_params(axis='both', which='major', labelsize=12) + plt.subplots_adjust(hspace=.3) + plt.grid() + + +def plot_mm_scatter_spec(axi, varlist, title, xlabel, ylabel): + """Plot a multi-model scatter plot ("special version"). + + The function produces a scatter plot of a multi-model ensemble, with dashed + diagonal lines containing the sum of the x and y values, an ellipse + encompassing the 1sigma uncertainty around the multi-model mean. + + Arguments: + - axi: an axis identifier; + - varlist: a list containing the array for the x and y values (they have to + be the same length); + - title: a string containing the title of the plot; + - xlabel: a string containing the x-axis label; + - ylabel: a string containing the y-axis label; + """ + xval = varlist[0] + yval = varlist[1] + axi.set_figsize = (50, 50) + xrang = abs(max(xval) - min(xval)) + yrang = abs(max(yval) - min(yval)) + plt.xlim(min(xval) - 0.1 * xrang, max(xval) + 0.1 * xrang) + plt.ylim(min(yval) - 0.1 * yrang, max(yval) + 0.1 * yrang) + x_x = np.linspace(min(xval) - 0.1 * xrang, max(xval) + 0.1 * xrang, 10) + y_y = np.linspace(min(yval) - 0.1 * yrang, max(yval) + 0.1 * yrang, 10) + x_m, y_m = np.meshgrid(x_x, y_y) + z_m = x_m + y_m + c_p = plt.contour( + x_m, y_m, z_m, colors='black', linestyles='dashed', linewidths=1.) + plt.clabel(c_p, inline=True, inline_spacing=-4, fontsize=8) + plot_mm_scatter(axi, varlist, title, xlabel, ylabel) + + +def plot_mm_summaryscat(pdir, summary_varlist): + """Plot multi-model scatter plots of some key quantities. + + The function produces a plot containing 6 scatter plots: + - (a) TOA vs. atmospheric energy budget; + - (b) Baroclinic efficiency vs. Intensity of LEC; + - (c) Vertical vs. horizontal component; + - (d) Indirect vs. direct method; + - (e) Indirect vs. emission temperature; + - (f) Baroclinic efficiency vs. emission temperature; + + Arguments: + - pdir: a plots directory; + - summary_varlist: a list containing the quantities to be plotted as a 1D + (model) array, or a 2D array (model, 2), with the first column being the + mean value and the second column being the inter-annual variance; + """ + atmb_all = summary_varlist[0] + baroceff_all = summary_varlist[1] + horzentr_all = summary_varlist[2] + lec_all = summary_varlist[3] + matentr_all = summary_varlist[4] + te_all = summary_varlist[5] + toab_all = summary_varlist[6] + vertentr_all = summary_varlist[7] + indentr_all = horzentr_all[:, 0] + vertentr_all[:, 0] + fig = plt.figure() + fig.set_size_inches(12, 22) + axi = plt.subplot(321) + title = '(a) TOA vs. atmospheric energy budget' + xlabel = 'R_t [W m-2]' + ylabel = 'F_a [W m-2]' + varlist = [toab_all[:, 0], atmb_all[:, 0]] + plot_mm_scatter(axi, varlist, title, xlabel, ylabel) + axi = plt.subplot(322) + title = '(b) Baroclinic efficiency vs. Intensity of LEC' + xlabel = 'Eta' + ylabel = 'W [W/m2]' + varlist = [baroceff_all, lec_all[:, 0]] + plot_mm_scatter(axi, varlist, title, xlabel, ylabel) + axi = plt.subplot(323) + title = '(c) Vertical vs. horizontal component' + xlabel = 'S_hor [W m-2 K-1]' + ylabel = 'S_ver [W m-2 K-1]' + varlist = [horzentr_all[:, 0], vertentr_all[:, 0]] + plot_mm_scatter_spec(axi, varlist, title, xlabel, ylabel) + axi = plt.subplot(324) + title = '(d) Indirect vs. direct method' + xlabel = 'S_ind [W m-2 K-1]' + ylabel = 'S_dir [W m-2 K-1]' + varlist = [indentr_all, matentr_all[:, 0]] + plot_mm_scatter(axi, varlist, title, xlabel, ylabel) + axi = plt.subplot(325) + title = '(e) Indirect vs. emission temperature' + xlabel = 'T_E [K]' + ylabel = 'S_mat [W m-2 K-1]' + varlist = [te_all, indentr_all] + plot_mm_scatter(axi, varlist, title, xlabel, ylabel) + axi = plt.subplot(326) + title = '(f) Baroclinic efficiency vs. emission temperature' + xlabel = 'T_E [K]' + ylabel = 'Eta' + varlist = [te_all, indentr_all] + plot_mm_scatter(axi, varlist, title, xlabel, ylabel) + oname = pdir + '/scatters_summary.png' + plt.savefig(oname) + plt.subplots_adjust(hspace=.3) + + +def plot_mm_transp(model_names, wdir, pdir): + """Plot multi-model meridional enthalpy transports. + + The function plots in three panels the total, atmospheric and oceanic + enthalpy transports, respectively. + + Arguments: + - model_names: a list of model names contained in the ensemble; + - wdir: a working directory; + - pdir: a plots directory; + """ + fig = plt.figure() + fig.set_size_inches(12, 22) + axi = plt.subplot(311) + yrange = [-6.25E15, 6.25E15] + plot_mm_transp_panel(model_names, wdir, axi, 'total', yrange) + axi = plt.subplot(312) + plot_mm_transp_panel(model_names, wdir, axi, 'atmos', yrange) + axi = plt.subplot(313) + yrange = [-3E15, 3E15] + plot_mm_transp_panel(model_names, wdir, axi, 'ocean', yrange) + oname = pdir + '/meridional_transp.png' + plt.savefig(oname) + plt.close(fig) + + +def plot_mm_transp_panel(model_names, wdir, axi, domn, yrange): + """Plot a meridional section of enthalpy transport from a model ensemble. + + Arguments: + - model_names: a list of model names contained in the ensemble; + - wdir: a working directory; + - axis: the axis of the pllot; + - domn: the domain (total, atmospheric or oceanic); + - yrange: a range for the y-axis; + """ + axi.set_figsize = (50, 50) + for model in model_names: + tot_transp_file = (wdir + '/{}_transp_mean_{}.nc'.format(domn, model)) + name = '{}_{}'.format(domn, model) + with Dataset(tot_transp_file) as dataset: + toat = dataset.variables[name][:] + lats = dataset.variables['lat_{}'.format(model)][:] + plt.plot(np.array(lats), np.array(toat), color='black', linewidth=1.) + plt.title('(a) {} heat transports'.format(domn), fontsize=18) + plt.xlabel('Latitude [deg]', fontsize=14) + plt.ylabel('[W]', fontsize=14) + plt.tight_layout() + plt.ylim(yrange) + plt.xlim(-90, 90) + axi.tick_params(axis='both', which='major', labelsize=12) + plt.grid() + + +def pr_output(varout, filep, nc_f, nameout, latn): + """Print processed ta field to NetCDF file. + + Save fields to NetCDF, retrieving information from an existing + NetCDF file. Metadata are transferred from the existing file to the + new one. + + Arguments: + - varout: the field to be stored, with shape (time,level,lat,lon); + - filep: the existing dataset, from where the metadata are + retrieved. Coordinates time,level, lat and lon have to be the + same dimension as the fields to be saved to the new files; + - nc_f: the name of the output file; + - nameout: the name of the variable to be saved; + - latn: the name of the latitude dimension; + + PROGRAMMER(S) + Chris Slocum (2014), modified by Valerio Lembo (2018). + """ + fourc = fourier_coefficients + nc_fid = Dataset(filep, 'r') + w_nc_fid = Dataset(nc_f, 'w', format='NETCDF4') + w_nc_fid.description = ("Total, atmospheric and oceanic annual ", + "mean meridional heat transports") + fourc.extr_lat(nc_fid, w_nc_fid, latn) + w_nc_var = w_nc_fid.createVariable(nameout, 'f8', (latn)) + varatts(w_nc_var, nameout) + w_nc_fid.variables[nameout][:] = varout + w_nc_fid.close() + nc_fid.close() + + +def removeif(filename): + """Remove filename if it exists.""" + try: + os.remove(filename) + except OSError: + pass + + +def transport(zmean, gmean, lat): + """Integrate the energy/water mass budgets to obtain meridional transp. + + Arguments: + - zmean: zonal mean input fields; + - gmean: the global mean of the input fields; + - lat: a latitudinal array (in degrees of latitude); + """ + p_i = math.pi + dlat = np.zeros(len(lat)) + for i in range(len(lat) - 1): + dlat[i] = abs(lat[i + 1] - lat[i]) + dlat[len(lat) - 1] = dlat[len(lat) - 2] + zmn_ub = np.zeros((np.shape(zmean)[0], np.shape(zmean)[1])) + for index, value in enumerate(gmean): + for j_l in range(np.shape(zmean)[1]): + zmn_ub[index, j_l] = zmean[index, j_l] - value + zmn_ub[np.isnan(zmn_ub)] = 0 + cumb = np.zeros((np.shape(zmean)[0], np.shape(zmean)[1])) + transp = np.zeros((np.shape(zmean)[0], np.shape(zmean)[1])) + for j_l in range(len(lat) - 1): + cumb[:, j_l] = (-2 * np.nansum( + latwgt(lat[j_l:len(lat)], zmn_ub[:, j_l:len(lat)]), axis=1)) + r_earth = 6.371 * 10**6 + transp = 2 * p_i * cumb * r_earth * r_earth + return [zmn_ub, transp] + + +def transp_max(lat, transp, lim): + """Obtain transport peak magnitude and location from interpolation. + + Arguments: + - lat: a latitudinal array; + - transp: the meridional transport a 1D array (lat); + - lim: limits to constrain the peak search in + (necessary for ocean transp.) + """ + deriv = np.gradient(transp) + x_c = zerocross1d(lat, deriv) + y_i = np.zeros(2) + xc_cut = np.zeros(2) + j_p = 0 + for value in x_c: + if abs(value) <= lim: + xc_cut[j_p] = value + y_i[j_p] = interpolate.interp1d(lat, transp, kind='cubic')(value) + j_p = j_p + 1 + if j_p == 2: + break + return [xc_cut, y_i] + + +def transports_preproc(lats, yrs, lim, transp): + """Compute the peaks magnitude and locations of a meridional transport. + + This function computes the peaks magnitudes and locations recursively at + each time through the function transp_max and stores them in a list. + + Arguments: + - lats: a latitudinal array; + - yrs: the number of years through which iterating; + - lim: the range (-lim,lim) in which the function transp_max has to search + for the peaks; + - transp: the array containing the transport; + """ + transpp = transp[1] + transp_mean = np.nanmean(transpp, axis=0) + yr_ext = [] + lat_maxm = np.zeros([2, yrs]) + tr_maxm = np.zeros([2, yrs]) + lat_max = list() + tr_max = list() + for t_t in np.arange(int(yrs)): + yr_ext = transp_max(lats, transpp[t_t, :], lim) + lat_max.append(yr_ext[0]) + tr_max.append(yr_ext[1]) + for t_t in np.arange(int(yrs)): + lat_maxm[:, t_t] = lat_max[t_t] + tr_maxm[:, t_t] = tr_max[t_t] + list_peak = [lat_maxm, tr_maxm] + return transp_mean, list_peak + + +def varatts(w_nc_var, varname): + """Add attibutes to the variables, depending on name and time res. + + Arguments: + - w_nc_var: a variable object; + - varname: the name of the variable, among total, atmos, ocean, wmb, + latent; + """ + if varname == 'total': + w_nc_var.setncatts({ + 'long_name': "Total merid. heat transport", + 'units': "W", + 'level_desc': 'TOA' + }) + elif varname == 'atmos': + w_nc_var.setncatts({ + 'long_name': "Atmos. merid. heat transport", + 'units': "W", + 'level_desc': 'Vertically integrated' + }) + elif varname == 'ocean': + w_nc_var.setncatts({ + 'long_name': "Ocean. merid. heat transport", + 'units': "W", + 'level_desc': 'sfc' + }) + elif varname == 'wmb': + w_nc_var.setncatts({ + 'long_name': "Merid. water mass transport", + 'units': "Kg*s-1", + 'level_desc': 'sfc' + }) + elif varname == 'latent': + w_nc_var.setncatts({ + 'long_name': "Merid. latent heat transport", + 'units': "W", + 'level_desc': 'sfc' + }) + + +def zerocross1d(x_x, y_y): + """Find the zero crossing points in 1d data. + + Find the zero crossing events in a discrete data set. Linear interpolation + is used to determine the actual locations of the zero crossing between + two data points showing a change in sign. Data point which are zero + are counted in as zero crossings if a sign change occurs across them. + Note that the first and last data point will not be considered whether + or not they are zero. + + Arguments: + x_x, y_y : arrays. Ordinate and abscissa data values. + + Credits: + The PyA group (https://github.com/sczesla/PyAstronomy). + Modified by Valerio Lembo (valerio.lembo@uni-hamburg.de). + + License: + Copyright (c) 2011, PyA group. + """ + indi = np.where(y_y[1:] * y_y[0:-1] < 0.0)[0] + d_x = x_x[indi + 1] - x_x[indi] + d_y = y_y[indi + 1] - y_y[indi] + z_c = -y_y[indi] * (d_x / d_y) + x_x[indi] + z_i = np.where(y_y == 0.0)[0] + z_i = z_i[np.where((z_i > 0) & (z_i < x_x.size - 1))] + z_i = z_i[np.where(y_y[z_i - 1] * y_y[z_i + 1] < 0.0)] + zzindi = np.concatenate((indi, z_i)) + z_z = np.concatenate((z_c, x_x[z_i])) + sind = np.argsort(z_z) + z_z, zzindi = z_z[sind], zzindi[sind] + return z_z diff --git a/esmvaltool/diag_scripts/thermodyn_diagtool/provenance_meta.py b/esmvaltool/diag_scripts/thermodyn_diagtool/provenance_meta.py new file mode 100644 index 0000000000..25526f9bac --- /dev/null +++ b/esmvaltool/diag_scripts/thermodyn_diagtool/provenance_meta.py @@ -0,0 +1,117 @@ +"""ATTRIBUTES FOR PROVENANCE TAGGING. + +Module containing functions to create the metadata for the output files. + +The module contains the following functions: +- get_prov_map: create a record of metadata for 2D outputs; +- get_prov_transp: create a record of metadata for 1D outputs (e.g. transports) +- meta_direntr: write metadata to a file containing one of the components of + the material entropy production with the direct method; +- meta_indentr: write metadata to a file containing one of the components of + the material entropy production with the indirect method; + +@author: Valerio Lembo, University of Hamburg, 2019. +""" + +from esmvaltool.diag_scripts.shared import ProvenanceLogger + + +def get_prov_map(attr, ancestor_files): + """Create a provenance record for the 2D diagnostic outputs.""" + caption = ( + "Thermodynamic Diagnostic Tool - Monthly mean {} (lat, lon) fields" + "for model {}.".format(attr[0], attr[1])) + + record = { + 'caption': caption, + 'statistics': ['mean'], + 'domains': ['global'], + 'plot_type': ['geo'], + 'authors': ['lemb_va'], + 'references': ['lembo16climdyn', 'lembo19gmdd', 'lucarini14revgeop'], + 'ancestors': ancestor_files, + } + return record + + +def get_prov_transp(attr, ancestor_file, plotname): + """Create a provenance record for the 1d meridional transports.""" + caption = ("Thermodynamic Diagnostic Tool - Annual mean zonally averaged" + " meridional {} transports" + " for model {}.".format(attr[0], attr[1])) + + record = { + 'caption': caption, + 'statistics': ['mean'], + 'domains': ['global'], + 'plot_type': ['sect'], + 'plot_file': plotname, + 'authors': ['lemb_va'], + 'references': ['lembo16climdyn', 'lembo19gmdd', 'lucarini14revgeop'], + 'ancestors': ancestor_file, + } + return record + + +def meta_direntr(cfg, model, inlist, flist): + """Write metadata to components of the direct entropy prod maps. + + Arguments:r + - model: the name of the model; + - inlist: the list of the input filenames; + - flist: the list of the entropy filenames; + + @author: Valerio Lembo, University of Hamburg, 2019. + """ + with ProvenanceLogger(cfg) as provlog: + attr = ['sensible heat entropy production', model] + ancestor = [ + inlist[1], inlist[2], inlist[5], inlist[15], inlist[17], inlist[19] + ] + record = get_prov_map(attr, ancestor) + provlog.log(flist[0], record) + attr = ['evaporation entropy production', model] + ancestor = [inlist[0], inlist[2], inlist[5], inlist[15]] + record = get_prov_map(attr, ancestor) + provlog.log(flist[1], record) + attr = ['rainfall precipitation entropy production', model] + ancestor = [inlist[2], inlist[3], inlist[5], inlist[15]] + record = get_prov_map(attr, ancestor) + provlog.log(flist[2], record) + attr = ['snowfall precipitation entropy production', model] + ancestor = [inlist[2], inlist[4], inlist[5], inlist[15]] + record = get_prov_map(attr, ancestor) + provlog.log(flist[3], record) + attr = ['snow melt entropy production', model] + ancestor = [inlist[4], inlist[15]] + record = get_prov_map(attr, ancestor) + provlog.log(flist[4], record) + attr = ['potential energy entropy production', model] + ancestor = [ + inlist[2], inlist[3], inlist[4], inlist[5], inlist[8], inlist[15] + ] + record = get_prov_map(attr, ancestor) + provlog.log(flist[5], record) + + +def meta_indentr(cfg, model, inlist, flist): + """Write metadata to components of the indirect entropy prod maps. + + Arguments: + - model: the name of the model; + - inlist: the list of the input filenames; + - flist: the list of the entropy filenames; + + @author: Valerio Lembo, University of Hamburg, 2019. + """ + with ProvenanceLogger(cfg) as provlog: + attr = ['horizontal entropy production', model] + ancestor = [inlist[8], inlist[10], inlist[12]] + record = get_prov_map(attr, ancestor) + provlog.log(flist[0], record) + attr = ['vertical entropy production', model] + ancestor = [ + inlist[6], inlist[7], inlist[8], inlist[9], inlist[11], inlist[15] + ] + record = get_prov_map(attr, ancestor) + provlog.log(flist[1], record) diff --git a/esmvaltool/diag_scripts/thermodyn_diagtool/thermodyn_diagnostics.py b/esmvaltool/diag_scripts/thermodyn_diagtool/thermodyn_diagnostics.py new file mode 100644 index 0000000000..6d7bef293d --- /dev/null +++ b/esmvaltool/diag_scripts/thermodyn_diagtool/thermodyn_diagnostics.py @@ -0,0 +1,525 @@ +r"""MAIN PROGRAM. + +TheDiaTo - The diagnostic tool for climate system thermodynamics. + +Author +Valerio Lembo +(Meteorological Institute, Hamburg University - valerio.lembo@uni-hamburg.de) + +Contributors +Frank Lunkeit +(Meteorological Insitute, Hamburg University - f.lunkeit@uni-hamburg.de) +Nikolay Koldunov +(MARUM/AWI, nikolay.koldunov@awi.de, Germany) + +Project +CRC - TRR 181 "Energy transfers in Atmosphere and Ocean" + +############################################################################# + +SOFTWARE DESCRIPTION + +The tool consists of three modules; one for the +computation of energy budgets and transports, one for the water mass budgets +(and related meridional transports), one for the Lorenz Energy Cycle (LEC), +one for the material entropy production. + +The first module is run by default, the others are optional. If the lsm option +is set to true, the module 1 and the module 2 will be run with additional +separate results over land and oceans. The land-sea mask is provided by the +ESMValTool preprocessor. + +- MODULE 1 (default) +Earth's energy budgets from radiative and heat fluxes at Top-of-Atmosphere, +at the surface and in the atmosphere (as a residual). +Meridional transports, magnitude and location of the peaks in each +hemisphere (only for heat transports) are also computed. +The baroclinic efficiency is computed from TOA energy budgets, emission +temperature (in turn retrieved from OLR) and near-surface temperature. + +- MODULE 2 (optional) +Water mass and latent energy budgets and meridional transports are computed +from latent heat fluxes, snowfall and rainfall precipitation fluxes. Magnitude +and location of the peaks in each hemisphere (only for heat transports) are +also computed, as for module 1. + +- MODULE 3 (optional) +The Lorenz Energy Cycle (LEC) is computed in spectral components from near- +surface temperatures, temperatures and the three components of velocities +over pressure levels. +The storage and conversion terms are directly computed, the sources and +sinks are retrieved as residuals. +Components are grouped into a zonal mean, stationary and transient eddy +part. + +- MODULE 4 (optional) +The material entropy production is computed using the indirect method, the +direct method or both (following Lucarini et al., 2014). +For the indirect method a vertical and a horizontal component are provided. +For the direct method, all components are combined, related to the +hydrological cycle (attributable to evaporation, rainfall and snowfall +precipitation, phase changes and potential energy of the droplet), to the +sensible heat fluxes and to kinetic energy dissipation. For the latter the +LEC computation is required, given that the strength of the LEC can be +considered as equal to the kinetic energy dissipated to heating. If the option +for module 3 is set to false, a reference value for the material entropy +production related to the kinetic energy dissipation is provided. + +PREREQUISITES + +The program shares the same prerequisites with the overall ESMValTool +architecture (see http://esmvaltool.readthedocs.io/en/latest/install.html) + +USAGE + +1: Obtain the datasets: the program accepts the following variables as + input for the computations: + Monthly mean resolution or higher: + - TOA shortwave radiation downwards; + - TOA shortwave radiation upwards; + - TOA longwave radiation upwards (OLR); + - Surface shortwave radiation downwards; + - Surface shortwave radiation upwards; + - Surface longwave radiation downwards; + - Surface longwave radiation upwards; + - Surface turbulent latent heat fluxes; + - Surface turbulent sensible heat fluxes; + - Surface temperature; + - Specific humidity; + Daily mean resolution or higher: + - Near-surface temperature; + - Near-surface (or 10m) zonal velocity; + - Near-surface (or 10m) meridional velocity; + - Air temperature (on pressure levels); + - Horizontal velocity (on pressure levels); + - Meridional velocity (on pressure levels); + - Vertical velocity (on pressure levels); + Data on lonlat grid are accepted, with CMOR-compliant coordinate system. + The pre-processing modules of ESMValTool scheme will take care of + converting known grids and recognized datasets to CMOR standards. For a + a list of known formats, see + http://esmvaltool.readthedocs.io/en/latest/running.html#tab-obs-dat + +2: A configuration template is available in the ESMValTool release. Set your + own paths to local directories here. Input datasets are read in MODELPATH, + MODELPATH2, OBSPATH or OBSPATH2 output datasets are stored in WORKPATH, + plots in PLOTPATH (refer to the manual for ESMValTool). + +3: Go to the recipe file in ~/recipes/recipe_thermodyn_diagtool.yml. + Set the namelist with the datasets that you neeed, following the ESMValTool + naming convention. Here you can also set the length of the dataset you want + to subset. + In the 'scripts' section, set the options with the modules that you want the + program to use: + - wat: if set to true, the program will compute the water mass and + latent energy budget, + - lec: if set to true, the program will compute the Lorenz Energy Cycle + (LEC) averaged on each year; + - entr: if set to true, the program will compute the material entropy + production (MEP); + - met: if set to 1, the program will compute the MEP with the indirect + method, if set to 2 with the direct method, if set to 3, both + methods will be computed and compared with each other; +4: Run the tool by typing: + esmvaltool -c $CONFIG_FILE \\ + esmvaltool/recipes/recipe_thermodyn_diagtool.yml + +OUTPUT + +The output directory contains the following NetCDF files: + - (output directory): + atmos_transp_mean_.nc + latent_transp_mean_.nc + ocean_transp_mean_.nc + total_transp_mean_.nc + wmb_transp_mean_.nc + + contain annual mean meridional sections of heat transports in the + atmosphere, oceans, and as a total; latent energy transports and water + mass transports; + + - (output directory)/: + _atmb.nc + (_latent.nc; if wat is set to true) + _surb.nc + _toab.nc + (_wmb.nc; is wat is set to true) + + contain annual mean 2D fields of energy budget, latent heat and water + mass budgets; + + _barocEff.nc + + contains the evolution of annual mean baroclinic efficiency + (Lucarini et al., 2011). + + (if entr is set to true): + _evap_entr.nc (if met is set to 2 or 3) + _horizEntropy.nc (if met is set to 1 or 3) + _pot_drop_entr.nc (if met is set to 2 or 3) + _rain_entr.nc (if met is set to 2 or 3) + _sens_entr.nc (if met is set to 2 or 3) + _snow_entr.nc (if met is set to 2 or 3) + _snowmelt_entr.nc (if met is set to 2 or 3) + _verticalEntropy.nc (if met is set to 1 or 3) + contain the evolution of annual mean components of the material entropy + production. + + - (plots directory): + meridional_transp.png: contains the model inter-comparison of total, + atmospheric and oceanic of meridional sections in zonally averaged + meridional heat transports; + scatters_summary.png: contains the scatter plots of + model intercomparisons of various metrics retrieved in the program; + scattes_variability: contains scatter plots of model intercomparisons + between TOA, atmospheric and surface global mean energy budgets and + their inter-annual variability; + + - (plots directory)/: + _atmb_timeser.png: the atmospheric budget annual mean + global and hemispheric time series; + _energy_climap.png: the TOA, atmospheric and surface + climatological mean fields; + _heat_transp.png: the meridional sections of total, + atmospheric and oceanic meridional heat transports (implied from energy + budgets); + _latent_climap.png: the climatological mean latent heat + field; + _latent_timeser.png: the latent heat annual mean global and + hemispheric evolutions; + _latent_transp.png: the meridional section of annual mean + meridional latent heat transport; + _scatpeak.png: the scatter plots of atmospheric vs. oceanic + peak magnitude in both hemispheres; + _sevap_climap.png: the annual mean field of material + entropy production due to evaporation; + _smelt_climap.png: the annual mean field of material + entropy production due to snow melting; + _spotp_climap.png: the annual mean field of material + entropy production due to potential energy of the droplet; + _srain_climap.png: the annual mean field of material + entropy production due to rainfall precipitation; + _ssens_climap.png: the annual mean field of material + entropy production due to sensible heat fluxes; + _ssnow_climap.png: the annual mean field of material + entropy production due to snowfall precipitation; + _surb_timeser.png: the surface budget annual mean + global and hemispheric time series; + _sver_climap.png: the annual mean field of vertical + material entropy production through the indirect method; + _toab_timeser.png: the TOA budget annual mean + global and hemispheric time series; + _wmb_climap.png: the climatological mean water mass budget + field; + _wmb_timeser.png: the water mass annual mean global and + hemispheric evolutions; + _wmb_transp.png: the meridional section of annual mean + meridional water mass transport; + + - (plots directory)//LEC_results: + __lec_diagram.png: the flux diagram for the annual + mean LEC cycle in a specific year; + __lec_table.txt: the table containing the storage and + conversion terms for the annual mean LEC cycle in a specific year; + +The file log.txt in the '$WORK_PATH/recipe_thermodyn_diagtool_date_hour/run' +sub-directory contains the values for the metrics and all useful information +for immediate model intercomparison. + + +############################################################################# + +20170803-A_lemb_va: Modified header with description and caveats +20170629-A_kold_ni: Atmospheric budgets diagnostics written +20180524-A_lemb_va: first complete working thermodynamics diagnostics +20190325-A_lemb_va: complete updated version for ESMValTool v2.0b + +############################################################################# +""" + +# New packages for version 2.0 of ESMValTool +import logging +import os +import warnings + +import numpy as np + +import esmvaltool.diag_scripts.shared as e +from esmvaltool.diag_scripts.shared import ProvenanceLogger + +from esmvaltool.diag_scripts.thermodyn_diagtool import computations, \ + lorenz_cycle, mkthe, plot_script, provenance_meta + +warnings.filterwarnings("ignore", message="numpy.dtype size changed") +logger = logging.getLogger(os.path.basename(__file__)) + + +def main(cfg): + """Execute the program. + + Argument cfg, containing directory paths, preprocessed input dataset + filenames and user-defined options, is passed by ESMValTool preprocessor. + """ + provlog = ProvenanceLogger(cfg) + lorenz = lorenz_cycle + comp = computations + logger.info('Entering the diagnostic tool') + # Load paths + wdir_up = cfg['work_dir'] + pdir_up = cfg['plot_dir'] + logger.info('Work directory: %s \n', wdir_up) + logger.info('Plot directory: %s \n', pdir_up) + plotsmod = plot_script + data = e.Datasets(cfg) + logger.debug(data) + models = data.get_info_list('dataset') + model_names = list(set(models)) + model_names.sort() + logger.info(model_names) + varnames = data.get_info_list('short_name') + curr_vars = list(set(varnames)) + logger.debug(curr_vars) + # load user-defined options + lsm = str(cfg['lsm']) + wat = str(cfg['wat']) + lec = str(cfg['lec']) + entr = str(cfg['entr']) + met = str(cfg['met']) + flags = [wat, entr, met] + # Initialize multi-model arrays + modnum = len(model_names) + te_all = np.zeros(modnum) + toab_all = np.zeros([modnum, 2]) + toab_oc_all = np.zeros(modnum) + toab_la_all = np.zeros(modnum) + atmb_all = np.zeros([modnum, 2]) + atmb_oc_all = np.zeros(modnum) + atmb_la_all = np.zeros(modnum) + surb_all = np.zeros([modnum, 2]) + surb_oc_all = np.zeros(modnum) + surb_la_all = np.zeros(modnum) + wmb_all = np.zeros([modnum, 2]) + wmb_oc_all = np.zeros(modnum) + wmb_la_all = np.zeros(modnum) + latent_all = np.zeros([modnum, 2]) + latent_oc_all = np.zeros(modnum) + latent_la_all = np.zeros(modnum) + baroc_eff_all = np.zeros(modnum) + lec_all = np.zeros([modnum, 2]) + horzentr_all = np.zeros([modnum, 2]) + vertentr_all = np.zeros([modnum, 2]) + matentr_all = np.zeros([modnum, 2]) + irrevers_all = np.zeros(modnum) + diffentr_all = np.zeros([modnum, 2]) + logger.info("Entering main loop\n") + i_m = 0 + for model in model_names: + # Load paths to individual models output and plotting directories + wdir = os.path.join(wdir_up, model) + pdir = os.path.join(pdir_up, model) + os.makedirs(wdir) + os.makedirs(pdir) + # Reading file names for the specific model + filenames = data.get_info_list('filename', dataset=model) + logger.info('Processing model: %s \n', model) + rlds_file = filenames[6] + rlus_file = filenames[7] + rsds_file = filenames[9] + rsus_file = filenames[11] + ta_file = filenames[13] + ts_file = filenames[15] + # Read path to land-sea mask + for filename, attributes in cfg['input_data'].items(): + if filename == ta_file: + sftlf_fx = attributes['fx_files']['sftlf'] + aux_file = wdir + '/aux.nc' + te_ymm_file, te_gmean_constant, _, _ = mkthe.init_mkthe( + model, wdir, filenames, flags) + te_all[i_m] = te_gmean_constant + logger.info('Computing energy budgets\n') + eb_gmean, eb_file, toab_ymm_file = comp.budgets( + model, wdir, aux_file, filenames) + prov_rec = provenance_meta.get_prov_map( + ['TOA energy budgets', model], + [filenames[10], filenames[12], filenames[8]]) + provlog.log(eb_file[0], prov_rec) + prov_rec = provenance_meta.get_prov_map( + ['atmospheric energy budgets', model], [ + filenames[1], filenames[6], filenames[7], filenames[8], + filenames[9], filenames[10], filenames[11], filenames[12] + ]) + provlog.log(eb_file[1], prov_rec) + prov_rec = provenance_meta.get_prov_map( + ['surface energy budgets', model], [ + filenames[1], filenames[6], filenames[7], filenames[9], + filenames[11] + ]) + provlog.log(eb_file[2], prov_rec) + toab_all[i_m, 0] = np.nanmean(eb_gmean[0]) + toab_all[i_m, 1] = np.nanstd(eb_gmean[0]) + atmb_all[i_m, 0] = np.nanmean(eb_gmean[1]) + atmb_all[i_m, 1] = np.nanstd(eb_gmean[1]) + surb_all[i_m, 0] = np.nanmean(eb_gmean[2]) + surb_all[i_m, 1] = np.nanstd(eb_gmean[2]) + logger.info('Global mean emission temperature: %s\n', + te_gmean_constant) + logger.info('TOA energy budget: %s\n', toab_all[i_m, 0]) + logger.info('Atmospheric energy budget: %s\n', atmb_all[i_m, 0]) + logger.info('Surface energy budget: %s\n', surb_all[i_m, 0]) + logger.info('Done\n') + baroc_eff_all[i_m] = comp.baroceff(model, wdir, aux_file, + toab_ymm_file, te_ymm_file) + logger.info('Baroclinic efficiency (Lucarini et al., 2011): %s\n', + baroc_eff_all[i_m]) + logger.info('Running the plotting module for the budgets\n') + plotsmod.balances(cfg, wdir_up, pdir, + [eb_file[0], eb_file[1], eb_file[2]], + ['toab', 'atmb', 'surb'], model) + logger.info('Done\n') + # Water mass budget + if wat == 'True': + logger.info('Computing water mass and latent energy budgets\n') + _, _, _, aux_list = mkthe.init_mkthe(model, wdir, filenames, flags) + wm_gmean, wm_file = comp.wmbudg(model, wdir, aux_file, filenames, + aux_list) + wmb_all[i_m, 0] = np.nanmean(wm_gmean[0]) + wmb_all[i_m, 1] = np.nanstd(wm_gmean[0]) + logger.info('Water mass budget: %s\n', wmb_all[i_m, 0]) + latent_all[i_m, 0] = np.nanmean(wm_gmean[1]) + latent_all[i_m, 1] = np.nanstd(wm_gmean[1]) + logger.info('Latent energy budget: %s\n', latent_all[i_m, 0]) + logger.info('Done\n') + logger.info('Plotting the water mass and latent energy budgets\n') + plotsmod.balances(cfg, wdir_up, pdir, [wm_file[0], wm_file[1]], + ['wmb', 'latent'], model) + logger.info('Done\n') + for filen in aux_list: + os.remove(filen) + if lsm == 'True': + logger.info('Computing energy budgets over land and oceans\n') + toab_oc_gmean, toab_la_gmean = comp.landoc_budg( + model, wdir, eb_file[0], sftlf_fx, 'toab') + toab_oc_all[i_m] = toab_oc_gmean + toab_la_all[i_m] = toab_la_gmean + logger.info('TOA energy budget over oceans: %s\n', toab_oc_gmean) + logger.info('TOA energy budget over land: %s\n', toab_la_gmean) + atmb_oc_gmean, atmb_la_gmean = comp.landoc_budg( + model, wdir, eb_file[1], sftlf_fx, 'atmb') + atmb_oc_all[i_m] = atmb_oc_gmean + atmb_la_all[i_m] = atmb_la_gmean + logger.info('Atmospheric energy budget over oceans: %s\n', + atmb_oc_gmean) + logger.info('Atmospheric energy budget over land: %s\n', + atmb_la_gmean) + surb_oc_gmean, surb_la_gmean = comp.landoc_budg( + model, wdir, eb_file[2], sftlf_fx, 'surb') + surb_oc_all[i_m] = surb_oc_gmean + surb_la_all[i_m] = surb_la_gmean + logger.info('Surface energy budget over oceans: %s\n', + surb_oc_gmean) + logger.info('Surface energy budget over land: %s\n', surb_la_gmean) + logger.info('Done\n') + if wat == 'True': + logger.info('Computing water mass and latent energy' + ' budgets over land and oceans\n') + wmb_oc_gmean, wmb_la_gmean = comp.landoc_budg( + model, wdir, wm_file[0], sftlf_fx, 'wmb') + wmb_oc_all[i_m] = wmb_oc_gmean + wmb_la_all[i_m] = wmb_la_gmean + logger.info('Water mass budget over oceans: %s\n', + wmb_oc_gmean) + logger.info('Water mass budget over land: %s\n', wmb_la_gmean) + latent_oc_gmean, latent_la_gmean = comp.landoc_budg( + model, wdir, wm_file[1], sftlf_fx, 'latent') + latent_oc_all[i_m] = latent_oc_gmean + latent_la_all[i_m] = latent_la_gmean + logger.info('Latent energy budget over oceans: %s\n', + latent_oc_gmean) + logger.info('Latent energy budget over land: %s\n', + latent_la_gmean) + logger.info('Done\n') + if lec == 'True': + logger.info('Computation of the Lorenz Energy ' + 'Cycle (year by year)\n') + lect = lorenz.preproc_lec(model, wdir, pdir, filenames) + lec_all[i_m, 0] = np.nanmean(lect) + lec_all[i_m, 1] = np.nanstd(lect) + logger.info( + 'Intensity of the annual mean Lorenz Energy ' + 'Cycle: %s\n', lec_all[i_m, 0]) + logger.info('Done\n') + else: + lect = np.repeat(2.0, len(eb_gmean[0])) + lec_all[i_m, 0] = 2.0 + lec_all[i_m, 1] = 0.2 + if entr == 'True': + if met in {'1', '3'}: + _, _, te_file, _ = mkthe.init_mkthe(model, wdir, filenames, + flags) + logger.info('Computation of the material entropy production ' + 'with the indirect method\n') + indentr_list = [ + rlds_file, rlus_file, rsds_file, rsus_file, te_file, + eb_file[0], ts_file + ] + horz_mn, vert_mn, horzentr_file, vertentr_file = comp.indentr( + model, wdir, indentr_list, aux_file, eb_gmean[0]) + listind = [horzentr_file, vertentr_file] + provenance_meta.meta_indentr(cfg, model, filenames, listind) + horzentr_all[i_m, 0] = np.nanmean(horz_mn) + horzentr_all[i_m, 1] = np.nanstd(horz_mn) + vertentr_all[i_m, 0] = np.nanmean(vert_mn) + vertentr_all[i_m, 1] = np.nanstd(vert_mn) + logger.info( + 'Horizontal component of the material entropy ' + 'production: %s\n', horzentr_all[i_m, 0]) + logger.info( + 'Vertical component of the material entropy ' + 'production: %s\n', vertentr_all[i_m, 0]) + logger.info('Done\n') + logger.info('Running the plotting module for the material ' + 'entropy production (indirect method)\n') + plotsmod.entropy(pdir, vertentr_file, 'sver', + 'Vertical entropy production', model) + os.remove(te_file) + logger.info('Done\n') + if met in {'2', '3'}: + matentr, irrevers, entr_list = comp.direntr( + logger, model, wdir, filenames, aux_file, lect, lec, flags) + provenance_meta.meta_direntr(cfg, model, filenames, entr_list) + matentr_all[i_m, 0] = matentr + if met in {'3'}: + diffentr = (float(np.nanmean(vert_mn)) + float( + np.nanmean(horz_mn)) - matentr) + logger.info('Difference between the two ' + 'methods: %s\n', diffentr) + diffentr_all[i_m, 0] = diffentr + logger.info('Degree of irreversibility of the ' + 'system: %s\n', irrevers) + irrevers_all[i_m] = irrevers + logger.info('Running the plotting module for the material ' + 'entropy production (direct method)\n') + plotsmod.init_plotentr(model, pdir, entr_list) + logger.info('Done\n') + os.remove(te_ymm_file) + logger.info('Done for model: %s \n', model) + i_m = i_m + 1 + logger.info('I will now start multi-model plots') + logger.info('Meridional heat transports\n') + plotsmod.plot_mm_transp(model_names, wdir_up, pdir_up) + logger.info('Scatter plots') + summary_varlist = [ + atmb_all, baroc_eff_all, horzentr_all, lec_all, matentr_all, te_all, + toab_all, vertentr_all + ] + plotsmod.plot_mm_summaryscat(pdir_up, summary_varlist) + logger.info('Scatter plots for inter-annual variability of' + ' some quantities') + eb_list = [toab_all, atmb_all, surb_all] + plotsmod.plot_mm_ebscatter(pdir_up, eb_list) + logger.info("The diagnostic has finished. Now closing...\n") + + +if __name__ == '__main__': + with e.run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/validation.py b/esmvaltool/diag_scripts/validation.py new file mode 100644 index 0000000000..aad9f53742 --- /dev/null +++ b/esmvaltool/diag_scripts/validation.py @@ -0,0 +1,255 @@ +""" +Validation Diagnostic + +This diagnostic uses two datasets (control and experiment), +applies operations on their data, and plots one against the other. +It can optionally use a number of OBS, OBS4MIPS datasets. + +This diagnostic uses CMIP5 data; to switch to CMIP6 change _CMIP_TYPE +""" + +import logging +import os + +import iris +import iris.analysis.maths as imath +import iris.quickplot as qplt +import matplotlib.pyplot as plt +import numpy as np + +from esmvaltool.diag_scripts.shared import (apply_supermeans, + get_control_exper_obs, + group_metadata, run_diagnostic) +from esmvaltool.preprocessor import extract_region, extract_season + +logger = logging.getLogger(os.path.basename(__file__)) + + +_CMIP_TYPE = 'CMIP5' + + +def plot_contour(cube, plt_title, file_name): + """Plot a contour with iris.quickplot (qplot)""" + if len(cube.shape) == 2: + qplt.contourf(cube, cmap='RdYlBu_r', bbox_inches='tight') + else: + qplt.contourf(cube[0], cmap='RdYlBu_r', bbox_inches='tight') + plt.title(plt_title) + plt.gca().coastlines() + plt.tight_layout() + plt.savefig(file_name) + plt.close() + + +def plot_latlon_cubes(cube_1, cube_2, cfg, data_names, obs_name=None): + """ + Plot lat-lon vars for control, experiment, and obs + + Also plot Difference plots (control-exper, control-obs) + cube_1: first cube (dataset: dat1) + cube_2: second cube (dataset: dat2) + cfg: configuration dictionary + data_names: var + '_' + dat1 + '_vs_' + dat2 + """ + plot_name = cfg['analysis_type'] + '_' + data_names + '.png' + plot_title = cfg['analysis_type'] + ': ' + data_names + cubes = [cube_1, cube_2] + + # plot difference: cube_1 - cube_2; use numpy.ma.abs() + diffed_cube = imath.subtract(cube_1, cube_2) + plot_contour(diffed_cube, 'Difference ' + plot_title, + os.path.join(cfg['plot_dir'], 'Difference_' + plot_name)) + + # plot each cube + var = data_names.split('_')[0] + if not obs_name: + cube_names = [data_names.split('_')[1], data_names.split('_')[3]] + for cube, cube_name in zip(cubes, cube_names): + plot_contour( + cube, cube_name + ' ' + cfg['analysis_type'] + ' ' + var, + os.path.join(cfg['plot_dir'], cube_name + '_' + var + '.png')) + else: + # obs is always cube_2 + plot_contour( + cube_2, obs_name + ' ' + cfg['analysis_type'] + ' ' + var, + os.path.join(cfg['plot_dir'], obs_name + '_' + var + '.png')) + + +def plot_zonal_cubes(cube_1, cube_2, cfg, plot_data): + """Plot cubes data vs latitude or longitude when zonal meaning""" + # xcoordinate: latotude or longitude (str) + data_names, xcoordinate, period = plot_data + var = data_names.split('_')[0] + cube_names = [data_names.split('_')[1], data_names.split('_')[3]] + lat_points = cube_1.coord(xcoordinate).points + plt.plot(lat_points, cube_1.data, label=cube_names[0]) + plt.plot(lat_points, cube_2.data, label=cube_names[1]) + if xcoordinate == 'latitude': + plt.title(period + ' Zonal Mean for ' + var + ' ' + data_names) + elif xcoordinate == 'longitude': + plt.title(period + ' Meridional Mean for ' + var + ' ' + data_names) + plt.xlabel(xcoordinate + ' (deg)') + plt.ylabel(var) + plt.tight_layout() + plt.grid() + plt.legend() + if xcoordinate == 'latitude': + png_name = 'Zonal_Mean_' + xcoordinate + '_' + data_names + '.png' + elif xcoordinate == 'longitude': + png_name = 'Merid_Mean_' + xcoordinate + '_' + data_names + '.png' + plt.savefig(os.path.join(cfg['plot_dir'], period, png_name)) + plt.close() + + +def apply_seasons(data_set_dict): + """Extract seaons and apply a time mean per season""" + data_file = data_set_dict['filename'] + logger.info("Loading %s for seasonal extraction", data_file) + data_cube = iris.load_cube(data_file) + seasons = ['DJF', 'MAM', 'JJA', 'SON'] + season_cubes = [extract_season(data_cube, season) for season in seasons] + season_meaned_cubes = [ + season_cube.collapsed('time', iris.analysis.MEAN) + for season_cube in season_cubes + ] + + return season_meaned_cubes + + +def coordinate_collapse(data_set, cfg): + """Perform coordinate-specific collapse and (if) area slicing and mask""" + # see what analysis needs performing + analysis_type = cfg['analysis_type'] + + # if subset on LAT-LON + if 'lat_lon_slice' in cfg: + start_longitude = cfg['lat_lon_slice']['start_longitude'] + end_longitude = cfg['lat_lon_slice']['end_longitude'] + start_latitude = cfg['lat_lon_slice']['start_latitude'] + end_latitude = cfg['lat_lon_slice']['end_latitude'] + data_set = extract_region(data_set, start_longitude, end_longitude, + start_latitude, end_latitude) + + # if apply mask + if '2d_mask' in cfg: + mask_file = os.path.join(cfg['2d_mask']) + mask_cube = iris.load_cube(mask_file) + if 'mask_threshold' in cfg: + thr = cfg['mask_threshold'] + data_set.data = np.ma.masked_array( + data_set.data, mask=(mask_cube.data > thr)) + else: + logger.warning('Could not find masking threshold') + logger.warning('Please specify it if needed') + logger.warning('Masking on 0-values = True (masked value)') + data_set.data = np.ma.masked_array( + data_set.data, mask=(mask_cube.data == 0)) + + # if zonal mean on LON + if analysis_type == 'zonal_mean': + data_set = data_set.collapsed('longitude', iris.analysis.MEAN) + + # if zonal mean on LAT + if analysis_type == 'meridional_mean': + data_set = data_set.collapsed('latitude', iris.analysis.MEAN) + + # if vertical mean + elif analysis_type == 'vertical_mean': + data_set = data_set.collapsed('pressure', iris.analysis.MEAN) + + return data_set + + +def do_preamble(cfg): + """Execute some preamble functionality""" + # prepare output dirs + time_chunks = ['alltime', 'DJF', 'MAM', 'JJA', 'SON'] + time_plot_dirs = [ + os.path.join(cfg['plot_dir'], t_dir) for t_dir in time_chunks + ] + for time_plot_dir in time_plot_dirs: + if not os.path.exists(time_plot_dir): + os.makedirs(time_plot_dir) + + # get data + input_data = cfg['input_data'].values() + grouped_input_data = group_metadata( + input_data, 'short_name', sort='dataset') + + return input_data, grouped_input_data + + +def plot_ctrl_exper(ctrl, exper, cfg, plot_key): + """Call plotting functions and make plots depending on case""" + if cfg['analysis_type'] == 'lat_lon': + plot_latlon_cubes(ctrl, exper, cfg, plot_key) + elif cfg['analysis_type'] == 'zonal_mean': + plot_info = [plot_key, 'latitude', 'alltime'] + plot_zonal_cubes(ctrl, exper, cfg, plot_info) + elif cfg['analysis_type'] == 'meridional_mean': + plot_info = [plot_key, 'longitude', 'alltime'] + plot_zonal_cubes(ctrl, exper, cfg, plot_info) + + +def plot_ctrl_exper_seasons(ctrl_seasons, exper_seasons, cfg, plot_key): + """Call plotting functions and make plots with seasons""" + seasons = ['DJF', 'MAM', 'JJA', 'SON'] + if cfg['analysis_type'] == 'zonal_mean': + for c_i, e_i, s_n in zip(ctrl_seasons, exper_seasons, seasons): + plot_info = [plot_key, 'latitude', s_n] + plot_zonal_cubes(c_i, e_i, cfg, plot_info) + elif cfg['analysis_type'] == 'meridional_mean': + for c_i, e_i, s_n in zip(ctrl_seasons, exper_seasons, seasons): + plot_info = [plot_key, 'longitude', s_n] + plot_zonal_cubes(c_i, e_i, cfg, plot_info) + + +def main(cfg): + """Execute validation analysis and plotting""" + logger.setLevel(cfg['log_level'].upper()) + input_data, grouped_input_data = do_preamble(cfg) + + # select variables and their corresponding obs files + for short_name in grouped_input_data: + logger.info("Processing variable %s", short_name) + + # get the control, experiment and obs dicts + ctrl, exper, obs = get_control_exper_obs(short_name, input_data, + cfg, _CMIP_TYPE) + # set a plot key holding info on var and data set names + plot_key = short_name + '_' + ctrl['dataset'] \ + + '_vs_' + exper['dataset'] + + # get seasons if needed then apply analysis + if cfg['seasonal_analysis']: + ctrl_seasons = apply_seasons(ctrl) + exper_seasons = apply_seasons(exper) + ctrl_seasons = [ + coordinate_collapse(cts, cfg) for cts in ctrl_seasons + ] + exper_seasons = [ + coordinate_collapse(exps, cfg) for exps in exper_seasons + ] + plot_ctrl_exper_seasons(ctrl_seasons, exper_seasons, cfg, plot_key) + + # apply the supermeans (MEAN on time), collapse a coord and plot + ctrl, exper, obs_list = apply_supermeans(ctrl, exper, obs) + ctrl = coordinate_collapse(ctrl, cfg) + exper = coordinate_collapse(exper, cfg) + plot_ctrl_exper(ctrl, exper, cfg, plot_key) + + # apply desired analysis on obs's + if obs_list: + for obs_i, obsfile in zip(obs_list, obs): + obs_analyzed = coordinate_collapse(obs_i, cfg) + obs_name = obsfile['dataset'] + plot_key = short_name + '_CONTROL_vs_' + obs_name + if cfg['analysis_type'] == 'lat_lon': + plot_latlon_cubes( + ctrl, obs_analyzed, cfg, plot_key, obs_name=obs_name) + + +if __name__ == '__main__': + + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/zmnam/zmnam.py b/esmvaltool/diag_scripts/zmnam/zmnam.py new file mode 100644 index 0000000000..16b62867e8 --- /dev/null +++ b/esmvaltool/diag_scripts/zmnam/zmnam.py @@ -0,0 +1,110 @@ +""" +Zonal-mean Northern Annular Mode main routine. + +Author: Federico Serva (ISAC-CNR & ISMAR-CNR, Italy) +Copernicus C3S 34a lot 2 (MAGIC) + +Description: +Evaluation of stratosphere-troposphere coupling +based on EOF/PC analysis of the geopotential height field. + +Modification history +20180512-A_serv_fe: Added output netCDFs, more use of preprocessor. +20180510-A_serv_fe: Routines written. + +""" + +import os +import logging + +from esmvaltool.diag_scripts.shared import run_diagnostic, ProvenanceLogger + +# Import zmnam diagnostic routines +from zmnam_calc import zmnam_calc +from zmnam_plot import zmnam_plot +from zmnam_preproc import zmnam_preproc + +logger = logging.getLogger(__name__) + + +def get_provenance_record(vatt, ancestor_files): + """Create a provenance record describing the diagnostic data and plot.""" + caption = ("Compute Zonal-mean Northern Annular Modes between " + "{start_year} and {end_year} ".format(**vatt)) + record = { + 'caption': caption, + 'authors': ['serv_fe', 'hard_jo', 'arno_en', 'cagn_ch'], + 'projects': ['c3s-magic'], + 'references': ['baldwin09qjrms'], + 'plot_types': ['polar', 'zonal'], + 'realms': ['atmos'], + 'domains': ['polar'], + 'ancestors': ancestor_files, + } + return record + + +def main(cfg): + """ + Run the zonal-mean NAM diagnostic. + + Calling in order: + - preprocessing + - index calculation + - regression and plot + """ + logger.setLevel(cfg['log_level'].upper()) + + input_files = cfg['input_data'] + + plot_dir = cfg['plot_dir'] + out_dir = cfg['work_dir'] + write_plots = cfg['write_plots'] + fig_fmt = cfg['output_file_type'] + + filenames_cat = [] + fileprops_cat = [] + + # Loop over input cfg + for key, value in input_files.items(): + + # Collect file names + filenames_cat.append(key) + + # Collect relevant information for outputs naming + fileprops_cat.append([ + value['project'], value['dataset'], value['exp'], + value['ensemble'], + str(value['start_year']) + '-' + str(value['end_year']) + ]) + + # Go to work_dir for running + os.chdir(out_dir) + + # Process list of input files + for indfile, ifile in enumerate(filenames_cat): + + ifile_props = fileprops_cat[indfile] + + # Call diagnostics functions + print("prepro") + (file_da_an_zm, file_mo_an) = zmnam_preproc(ifile) + print("calc") + outfiles = zmnam_calc(file_da_an_zm, out_dir + '/', ifile_props) + provenance_record = get_provenance_record( + list(input_files.values())[0], ancestor_files=ifile) + if write_plots: + print("plot_files") + plot_files = zmnam_plot(file_mo_an, out_dir + '/', plot_dir + + '/', ifile_props, fig_fmt, write_plots) + else: + plot_files = [] + for file in outfiles + plot_files: + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(file, provenance_record) + + +# Run the diagnostics +if __name__ == '__main__': + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/zmnam/zmnam_calc.py b/esmvaltool/diag_scripts/zmnam/zmnam_calc.py new file mode 100644 index 0000000000..d30748b6e1 --- /dev/null +++ b/esmvaltool/diag_scripts/zmnam/zmnam_calc.py @@ -0,0 +1,306 @@ +""" +Zonal-mean annular mode calculation routine. + +Author: Federico Serva (ISAC-CNR & ISMAR-CNR, Italy) +Copernicus C3S 34a lot 2 (MAGIC) +""" + +import numpy as np +import netCDF4 as nc4 +from scipy import signal + + +def butter_filter(data, freq, lowcut=None, order=2): + """Function to perform time filtering.""" + if lowcut is not None: + filttype = 'lowpass' + + # Sampling determines Nyquist frequency + nyq = 0.5 * freq + + if filttype == 'lowpass': + low = lowcut / nyq + freqs = low + + bpoly, apoly = signal.butter(order, freqs, btype=filttype) + # _, h = signal.freqs(b, a) + ysig = signal.filtfilt(bpoly, apoly, data, axis=0) + + return ysig + + +def zmnam_calc(da_fname, outdir, src_props): + """Function to do EOF/PC decomposition of zg field.""" + deg_to_r = np.pi / 180. + lat_weighting = True + outfiles = [] + + # Note: daily/monthly means have been + # already subtracted from daily/monthly files + + # Open daily data + + in_file = nc4.Dataset(da_fname, "r") + time_dim = in_file.variables['time'][:] + time_nam = in_file.variables['time'].long_name + time_uni = in_file.variables['time'].units + time_cal = in_file.variables['time'].calendar + time = np.array(time_dim[:], dtype='d') + # startdate = nc4.num2date(time[0], time_uni, time_cal) + date = nc4.num2date(time, in_file.variables['time'].units, + in_file.variables['time'].calendar) + + lev = np.array(in_file.variables['plev'][:], dtype='d') + lev_nam = in_file.variables['plev'].long_name + lev_uni = in_file.variables['plev'].units + lev_pos = in_file.variables['plev'].positive + lev_axi = in_file.variables['plev'].axis + + lat = np.array(in_file.variables['lat'][:], dtype='d') + # lat_nam = in_file.variables['lat'].long_name + lat_uni = in_file.variables['lat'].units + lat_axi = in_file.variables['lat'].axis + + lon = np.array(in_file.variables['lon'][:], dtype='d') + # lon_nam = in_file.variables['lon'].long_name + lon_uni = in_file.variables['lon'].units + lon_axi = in_file.variables['lon'].axis + + zg_da = np.squeeze(np.array(in_file.variables['zg'][:], dtype='d')) + + in_file.close() + + n_tim = len(time_dim) + print('end infile close') + + # Start zmNAM index calculation + + # Lowpass filter + zg_da_lp = butter_filter(zg_da, 1, lowcut=1. / 90, order=2) + + # Outputs: stored by level + # EOFs, eigenvalues, daily and monthly PCs + eofs = np.zeros((len(lev), len(lat)), dtype='d') + eigs = np.zeros(len(lev), dtype='d') + pcs_da = np.zeros((n_tim, len(lev)), dtype='d') + + # Calendar-independent monthly mean + sta_mon = [] # first day of the month + mid_mon = [] # 15th of the month + end_mon = [] # last day of the month (add +1 when slicing) + + mon = 999 + idate = 0 + + while idate < len(date): + + # Save first day of the month + if date[idate].month != mon: + mon = date[idate].month + sta_mon.append(idate) + + # Save month mid-day + if date[idate].day == 15: + mid_mon.append(idate) + + # Save last day of the month + if ((idate == len(date) - 1) or + (date[idate].month == mon and + date[idate + 1].month != mon)): + end_mon.append(idate) + + idate += 1 + + pcs_mo = np.zeros((len(date[mid_mon]), len(lev)), dtype='d') + + # Perform analysis by level + for i_lev in np.arange(len(lev)): + # Latitude weighting + if lat_weighting is True: + for j_lat in np.arange(len(lat)): + zg_da_lp[:, i_lev, j_lat] *= np.sqrt( + abs(np.cos(lat[j_lat] * deg_to_r))) + + zg_da_lp_an = zg_da_lp[:, i_lev, :] - np.mean( + zg_da_lp[:, i_lev, :], axis=0) + cov = np.dot(zg_da_lp_an.T, zg_da_lp_an) / (n_tim - 1) + + # Compute eigenvectors and eigenvalues + eigenval, eigenvec = np.linalg.eig(cov) + + sum_eigenval = np.sum(eigenval) + + eigenval_norm = eigenval[:] / sum_eigenval + + # Largest eigenvalue + max_eigenval = eigenval_norm.argmax() + + # PC calculation + pc = np.dot(zg_da_lp_an[:, :], eigenvec) + + # Latitude de-weighting + if lat_weighting is True: + for i_lat in np.arange(len(lat)): + eigenvec[i_lat, :] /= np.sqrt( + abs(np.cos(lat[i_lat] * deg_to_r))) + + # Retain leading standardized PC & EOF + lead_pc_mean = np.mean(pc[:, max_eigenval], axis=0) + lead_pc_std = np.std(pc[:, max_eigenval], ddof=1, axis=0) + lead_pc = (pc[:, max_eigenval] - lead_pc_mean) / lead_pc_std + lead_eof = eigenvec[:, max_eigenval] + + max_lat = max(range(len(lat)), key=lambda x: lat[x]) + min_lat = min(range(len(lat)), key=lambda x: lat[x]) + + if lead_eof[max_lat] > lead_eof[min_lat]: + lead_pc *= -1 + lead_eof *= -1 + + lead_pc_mo = np.zeros(len(date[mid_mon]), dtype='d') + time_mo = np.zeros(len(date[mid_mon]), dtype='d') + + # print(lead_pc) + + for k_mo in range(len(date[mid_mon])): + lead_pc_mo[k_mo] = np.mean( + lead_pc[sta_mon[k_mo]:end_mon[k_mo] + 1]) + time_mo[k_mo] = time[mid_mon[k_mo]] + + # Store PC/EOF for this level (no time dependent) + eigs[i_lev] = eigenval_norm[max_eigenval] + eofs[i_lev, :] = lead_eof + pcs_da[:, i_lev] = lead_pc + pcs_mo[:, i_lev] = lead_pc_mo + + # Save output files + + # (1) daily PCs + fname = outdir + '_'.join(src_props) + '_pc_da.nc' + outfiles.append(fname) + file_out = nc4.Dataset(fname, mode='w', format='NETCDF3_CLASSIC') + file_out.title = 'Zonal mean annular mode (1)' + file_out.contact = 'F. Serva (federico.serva@artov.isac.cnr.it); \ + C. Cagnazzo (chiara.cagnazzo@cnr.it)' + + file_out.createDimension('time', None) + file_out.createDimension('plev', np.size(lev)) + file_out.createDimension('lat', np.size(lat)) + file_out.createDimension('lon', np.size(lon)) + + time_var = file_out.createVariable('time', 'd', ('time', )) + time_var.setncattr('long_name', time_nam) + time_var.setncattr('units', time_uni) + time_var.setncattr('calendar', time_cal) + time_var[:] = time_dim[:] + + lev_var = file_out.createVariable('plev', 'd', ('plev', )) + lev_var.setncattr('long_name', lev_nam) + lev_var.setncattr('units', lev_uni) + lev_var.setncattr('positive', lev_pos) + lev_var.setncattr('axis', lev_axi) + lev_var[:] = lev[:] + + pcs_da_var = file_out.createVariable('PC_da', 'd', ( + 'time', + 'plev', + )) + pcs_da_var.setncattr('long_name', 'Daily zonal mean annular mode PC') + pcs_da_var.setncattr( + 'comment', + 'Reference: Baldwin and Thompson (2009), doi:10.1002/qj.479') + pcs_da_var[:] = pcs_da[:, :] + + file_out.close() + + # (2) monthly PCs + fname = outdir + '_'.join(src_props) + '_pc_mo.nc' + file_out = nc4.Dataset(fname, mode='w', format='NETCDF3_CLASSIC') + outfiles.append(fname) + file_out.title = 'Zonal mean annular mode (2)' + file_out.contact = 'F. Serva (federico.serva@artov.isac.cnr.it); \ + C. Cagnazzo (chiara.cagnazzo@cnr.it)' + + file_out.createDimension('time', None) + file_out.createDimension('plev', np.size(lev)) + + time_var = file_out.createVariable('time', 'd', ('time', )) + time_var.setncattr('long_name', time_nam) + time_var.setncattr('units', time_uni) + time_var.setncattr('calendar', time_cal) + time_var[:] = time_mo + + lev_var = file_out.createVariable('plev', 'd', ('plev', )) + lev_var.setncattr('long_name', lev_nam) + lev_var.setncattr('units', lev_uni) + lev_var.setncattr('positive', lev_pos) + lev_var.setncattr('axis', lev_axi) + lev_var[:] = lev[:] + + pcs_mo_var = file_out.createVariable('PC_mo', 'd', ( + 'time', + 'plev', + )) + pcs_mo_var.setncattr('long_name', 'Monthly zonal mean annular mode PC') + pcs_mo_var.setncattr( + 'comment', + 'Reference: Baldwin and Thompson (2009), doi:10.1002/qj.479') + pcs_mo_var[:] = pcs_mo[:, :] + + file_out.close() + + # (3) EOFs and explained varianceo + fname = outdir + '_'.join(src_props) + '_eofs.nc' + file_out = nc4.Dataset(fname, mode='w', format='NETCDF3_CLASSIC') + outfiles.append(fname) + + file_out.title = 'Zonal mean annular mode (3)' + file_out.contact = 'F. Serva (federico.serva@artov.isac.cnr.it); \ + C. Cagnazzo (chiara.cagnazzo@cnr.it)' + + file_out.createDimension('time', None) + file_out.createDimension('plev', np.size(lev)) + file_out.createDimension('lat', np.size(lat)) + file_out.createDimension('lon', np.size(lon)) + + time_var = file_out.createVariable('time', 'd', ('time', )) + time_var.setncattr('long_name', time_nam) + time_var.setncattr('units', time_uni) + time_var.setncattr('calendar', time_cal) + time_var[:] = 0 + # + lev_var = file_out.createVariable('plev', 'd', ('plev', )) + lev_var.setncattr('long_name', lev_nam) + lev_var.setncattr('units', lev_uni) + lev_var.setncattr('positive', lev_pos) + lev_var.setncattr('axis', lev_axi) + lev_var[:] = lev[:] + # + lat_var = file_out.createVariable('lat', 'd', ('lat', )) + lat_var.setncattr('units', lat_uni) + lev_var.setncattr('axis', lat_axi) + lat_var[:] = lat[:] + # + lon_var = file_out.createVariable('lon', 'd', ('lon', )) + lon_var.setncattr('units', lon_uni) + lon_var.setncattr('axis', lon_axi) + lon_var[:] = lon[:] + # + eofs_var = file_out.createVariable('EOF', 'd', ('plev', 'lat')) + eofs_var.setncattr('long_name', 'Zonal mean annular mode EOF') + eofs_var.setncattr( + 'comment', + 'Reference: Baldwin and Thompson (2009), doi:10.1002/qj.479') + eofs_var[:] = eofs[:, :] + # + eigs_var = file_out.createVariable('eigenvalues', 'd', ('plev')) + eigs_var.setncattr('long_name', + 'Zonal mean annular mode EOF explained variance') + eigs_var.setncattr( + 'comment', + 'Reference: Baldwin and Thompson (2009), doi:10.1002/qj.479') + eigs_var[:] = eigs[:] + # + file_out.close() + + return outfiles diff --git a/esmvaltool/diag_scripts/zmnam/zmnam_plot.py b/esmvaltool/diag_scripts/zmnam/zmnam_plot.py new file mode 100644 index 0000000000..1af7928951 --- /dev/null +++ b/esmvaltool/diag_scripts/zmnam/zmnam_plot.py @@ -0,0 +1,270 @@ +""" +Zonal-mean annular mode plot routine. + +Author: Federico Serva (ISAC-CNR & ISMAR-CNR, Italy) +Copernicus C3S 34a lot 2 (MAGIC) + +""" + +import numpy as np +import matplotlib as mpl +import matplotlib.pyplot as plt +import netCDF4 as nc4 +import cartopy.crs as ccrs +from cartopy.util import add_cyclic_point + + +def zmnam_plot(file_gh_mo, datafolder, figfolder, src_props, + fig_fmt, write_plots): + """Plotting of timeseries and maps for zmnam diagnostics.""" + plot_files = [] + # Open daily and monthly PCs + file_name = '_'.join(src_props) + '_pc_da.nc' + # print(datafolder + file_name) + in_file = nc4.Dataset(datafolder + file_name, "r") + # + # time_da = in_file.variables['time'][:] + # time_da_uni = in_file.variables['time'].units + # time_da_cal = in_file.variables['time'].calendar + # + lev = np.array(in_file.variables['plev'][:], dtype='d') + # lev_units = in_file.variables['plev'].units + # + pc_da = np.array(in_file.variables['PC_da'][:], dtype='d') + in_file.close() + + file_name = '_'.join(src_props) + '_pc_mo.nc' + # print(datafolder + file_name) + in_file = nc4.Dataset(datafolder + file_name, "r") + # + time_mo = np.array(in_file.variables['time'][:], dtype='d') + time_mo_uni = in_file.variables['time'].units + time_mo_cal = in_file.variables['time'].calendar + # + pc_mo = np.array(in_file.variables['PC_mo'][:], dtype='d') + in_file.close() + + # Open monthly gh field + file_name = file_gh_mo + # print(datafolder + file_name) + in_file = nc4.Dataset(file_name, "r") + dims = list(in_file.dimensions.keys())[::-1] # py3 + print('mo full dims', dims) + + # Double check on lat/lon names, possibly redundant + if 'latitude' in dims: + latn = 'latitude' + if 'lat' in dims: + latn = 'lat' + if 'longitude' in dims: + lonn = 'longitude' + if 'lon' in dims: + lonn = 'lon' + lat = np.array(in_file.variables[latn][:]) + lon = np.array(in_file.variables[lonn][:]) + + zg_mo = np.array(in_file.variables['zg'][:]) + + # Record attributes for output netCDFs + time_nam = in_file.variables['time'].long_name + time_uni = in_file.variables['time'].units + time_cal = in_file.variables['time'].calendar + + lev_nam = in_file.variables['plev'].long_name + lev_uni = in_file.variables['plev'].units + lev_pos = in_file.variables['plev'].positive + lev_axi = in_file.variables['plev'].axis + + lat_uni = in_file.variables[latn].units + lat_axi = in_file.variables[latn].axis + + lon_uni = in_file.variables[lonn].units + lon_axi = in_file.variables[lonn].axis + + in_file.close() + + # Save dates for timeseries + date_list = [] + for i_date in np.arange(len(time_mo)): + yydate = nc4.num2date(time_mo, time_mo_uni, time_mo_cal)[i_date].year + mmdate = nc4.num2date(time_mo, time_mo_uni, time_mo_cal)[i_date].month + date_list.append(str(yydate) + '-' + str(mmdate)) + + # Prepare array for outputting regression maps (lev/lat/lon) + regr_arr = np.zeros((len(lev), len(lat), len(lon)), dtype='f') + + for i_lev in np.arange(len(lev)): + + # Plot monthly PCs + plt.figure() + plt.plot(time_mo, pc_mo[:, i_lev]) + + # Make only a few ticks + plt.xticks(time_mo[0:len(time_mo) + 1:60], + date_list[0:len(time_mo) + 1:60]) + plt.title(str(int(lev[i_lev])) + ' Pa ' + + src_props[1] + ' ' + src_props[2]) + plt.xlabel('Time') + plt.ylabel('Zonal mean NAM') + + if write_plots: + fname = (figfolder + '_'.join(src_props) + '_' + + str(int(lev[i_lev])) + 'Pa_mo_ts.' + fig_fmt) + plt.savefig(fname, format=fig_fmt) + plot_files.append(fname) + + plt.figure() + + # PDF of the daily PC + plt.figure() + min_var = -5 + max_var = 5 + n_bars = 50 + + _, bins, _ = plt.hist(pc_da[:, i_lev], n_bars, density=True, + range=(min_var, max_var), facecolor='b', + alpha=0.75) + + # Reference normal Gaussian + plt.plot(bins, 1. / (np.sqrt(2 * np.pi)) * + np.exp(- bins**2 / 2.), + linewidth=2, color='k', linestyle='--') + + plt.xlim(min_var, max_var) + plt.title('Daily PDF ' + str(int(lev[i_lev])) + + ' Pa ' + src_props[1] + ' ' + src_props[2]) + plt.xlabel('Zonal mean NAM') + plt.ylabel('Normalized probability') + plt.tight_layout() + + if write_plots: + fname = (figfolder + '_'.join(src_props) + '_' + + str(int(lev[i_lev])) + 'Pa_da_pdf.' + fig_fmt) + plt.savefig(fname, format=fig_fmt) + plot_files.append(fname) + + plt.close('all') + + # Regression of 3D zg field onto monthly PC + slope = np.zeros((len(lat), len(lon)), dtype='d') + + for j_lat in np.arange(len(lat)): + + for k_lon in np.arange(len(lon)): + + # Following BT09, the maps are Z_m^l*PC_m^l/|PC_m^l|^2 + slope[j_lat, k_lon] = np.dot(zg_mo[:, i_lev, j_lat, k_lon], + (pc_mo[:, i_lev]) / + np.dot(pc_mo[:, i_lev], + pc_mo[:, i_lev])) + + # Plots of regression maps + plt.figure() + + # Fixed contour levels. May be improved somehow. + regr_levs = -1000 + np.arange(201) * 10 + + # Create the projections + ortho = ccrs.Orthographic(central_longitude=0, central_latitude=90) + ccrs.Geodetic() + + # Create the geoaxes for an orthographic projection + axis = plt.axes(projection=ortho) + + # Add wrap-around point in longitude. + slopew, lonw = add_cyclic_point(slope, lon) + + # lons, lats = np.meshgrid(lonw, lat) + + plt.contourf(lonw, lat, slopew, + colors=('#cccccc', '#ffffff'), + levels=[-10000, 0, 10000], + transform=ccrs.PlateCarree()) + + # Switch temporarily to solid negative lines + mpl.rcParams['contour.negative_linestyle'] = 'solid' + plt.contour(lonw, lat, slopew, levels=regr_levs, + colors='k', transform=ccrs.PlateCarree(), + zorder=5) + + # Invisible contours, only for labels. + # Workaround for cartopy issue, as of Dec 18 + inv_map = plt.contour(lonw, lat, slopew, levels=regr_levs, + colors='k', transform=ccrs.PlateCarree(), + zorder=10) + + mpl.rcParams['contour.negative_linestyle'] = 'dashed' + + for cmap in inv_map.collections: + cmap.set_visible(False) + + plt.clabel(inv_map, fontsize=8, fmt='%1.0f', zorder=15) + + axis.coastlines() + axis.set_global() + + plt.text(0.20, 0.80, str(int(lev[i_lev])) + ' Pa', + fontsize=12, transform=plt.gcf().transFigure) + plt.text(0.75, 0.80, src_props[1], + fontsize=12, transform=plt.gcf().transFigure) + plt.text(0.75, 0.75, src_props[2], + fontsize=12, transform=plt.gcf().transFigure) + + if write_plots: + fname = (figfolder + '_'.join(src_props) + '_' + + str(int(lev[i_lev])) + 'Pa_mo_reg.' + fig_fmt) + plt.savefig(fname, format=fig_fmt) + plot_files.append(fname) + + plt.close('all') + + # Save regression results in array + regr_arr[i_lev, :, :] = slope + + # Save 3D regression results in output netCDF + file_out = nc4.Dataset(datafolder + '_'.join(src_props) + '_regr_map.nc', + mode='w', format='NETCDF3_CLASSIC') + + file_out.title = 'Zonal mean annular mode (4)' + file_out.contact = 'F. Serva (federico.serva@artov.isac.cnr.it); \ + C. Cagnazzo (chiara.cagnazzo@cnr.it)' + + # + file_out.createDimension('time', None) + file_out.createDimension('plev', np.size(lev)) + file_out.createDimension('lat', np.size(lat)) + file_out.createDimension('lon', np.size(lon)) + # + time_var = file_out.createVariable('time', 'd', ('time', )) + time_var.setncattr('long_name', time_nam) + time_var.setncattr('units', time_uni) + time_var.setncattr('calendar', time_cal) + time_var[:] = 0 # singleton + # + lev_var = file_out.createVariable('plev', 'd', ('plev', )) + lev_var.setncattr('long_name', lev_nam) + lev_var.setncattr('units', lev_uni) + lev_var.setncattr('positive', lev_pos) + lev_var.setncattr('axis', lev_axi) + lev_var[:] = lev[:] + # + lat_var = file_out.createVariable('lat', 'd', ('lat', )) + lat_var.setncattr('units', lat_uni) + lev_var.setncattr('axis', lat_axi) + lat_var[:] = lat[:] + # + lon_var = file_out.createVariable('lon', 'd', ('lon', )) + lon_var.setncattr('units', lon_uni) + lon_var.setncattr('axis', lon_axi) + lon_var[:] = lon[:] + # + regr_var = file_out.createVariable('regr', 'f', ('plev', 'lat', 'lon')) + regr_var.setncattr('long_name', 'Zonal mean annular mode regression map') + regr_var.setncattr( + 'comment', + 'Reference: Baldwin and Thompson ' + '(2009), doi:10.1002/qj.479') + regr_var[:] = regr_arr[:, :, :] + # + file_out.close() + + return plot_files diff --git a/esmvaltool/diag_scripts/zmnam/zmnam_preproc.py b/esmvaltool/diag_scripts/zmnam/zmnam_preproc.py new file mode 100644 index 0000000000..08e5fa9252 --- /dev/null +++ b/esmvaltool/diag_scripts/zmnam/zmnam_preproc.py @@ -0,0 +1,26 @@ +""" +Zonal-mean annular mode preproc routine. + +Author: Federico Serva (ISAC-CNR & ISMAR-CNR, Italy) +Copernicus C3S 34a lot 2 (MAGIC) +""" + +import cdo as cd + + +def zmnam_preproc(ifile): + """Preprocessing of the input dataset files.""" + cdo = cd.Cdo() + # Delete leap day, if any. + full_da_nl = cdo.delete('month=2,day=29', input=ifile) + + # Compute anomalies from the daily/monthly means. + gh_da_dm = cdo.ydaymean(input=full_da_nl) + gh_da_an = cdo.sub(input=full_da_nl + ' ' + gh_da_dm) + gh_da_an_zm = cdo.zonmean(input=gh_da_an) + + gh_mo = cdo.monmean(input=full_da_nl) + gh_mo_mm = cdo.ymonmean(input=gh_mo) + gh_mo_an = cdo.sub(input=gh_mo + ' ' + gh_mo_mm) + + return (gh_da_an_zm, gh_mo_an) diff --git a/esmvaltool/install/Julia/julia_requirements.txt b/esmvaltool/install/Julia/julia_requirements.txt new file mode 100644 index 0000000000..83599400a9 --- /dev/null +++ b/esmvaltool/install/Julia/julia_requirements.txt @@ -0,0 +1,5 @@ +ArgParse +Compat +DataFrames +RainFARM +YAML diff --git a/esmvaltool/install/Julia/setup.jl b/esmvaltool/install/Julia/setup.jl new file mode 100644 index 0000000000..ea4a7f1c8f --- /dev/null +++ b/esmvaltool/install/Julia/setup.jl @@ -0,0 +1,29 @@ +#!/usr/bin/env julia + +scriptDir=@__DIR__ + +if VERSION >= v"0.7.0-DEV.2005" + using Pkg +end + +@info "Installing the packages from" scriptDir * "/julia_requirements.txt" +pkgName=in +open(scriptDir * "/julia_requirements.txt") do f + for i in enumerate(eachline(f)) + + pkgId=i[1] + pkgName=i[2] + @info "Installing" pkgName + Pkg.add(pkgName) + + @info "Testing: ", pkgName + # load the package this needs to be called at top-level + Expr(:toplevel, :(module ($pkgName) end)) + + end +end + +# Show the package list +@info "Installed Julia packages:" +Pkg.installed() +Pkg.status() diff --git a/esmvaltool/install/R/r_requirements.txt b/esmvaltool/install/R/r_requirements.txt new file mode 100644 index 0000000000..8510439047 --- /dev/null +++ b/esmvaltool/install/R/r_requirements.txt @@ -0,0 +1,26 @@ +abind +akima +climdex.pcic +ClimProjDiags +dotCall64 +functional +ggplot2 +gridExtra +JuliaCall +lintr +logging +mapproj +maps +multiApply +ncdf4 +ncdf4.helpers +PCICt +plyr +RColorBrewer +Rcpp +s2dverification +snow +SPEI +tools +udunits2 +yaml diff --git a/esmvaltool/install/R/setup.R b/esmvaltool/install/R/setup.R new file mode 100644 index 0000000000..515b564723 --- /dev/null +++ b/esmvaltool/install/R/setup.R @@ -0,0 +1,78 @@ +log <- function(..., level="INFO") { + cat(format(Sys.time(), "%Y-%m-%d %X"), level, ":", ..., "\n") +} + +#check for present library paths +RLIBPATH <- .libPaths() + +#check if we can write in the present R libaries paths +if (any(file.access(RLIBPATH, 2) == 0)) { + #if possible, use the standard one for following instalation + RLIBLOC <- RLIBPATH[which(file.access(RLIBPATH, 2) == 0)[1]] +} else { + #if not possible, create a local library in the home directory + RLIBLOC <- Sys.getenv("R_LIBS_USER") + dir.create(path = Sys.getenv("R_LIBS_USER"), showWarnings = FALSE, + recursive = TRUE) +} + +log("Installing packages to --> ", RLIBLOC) + +# define the R mirror to download packages +pkg_mirror <- "https://cloud.r-project.org" +log("Using mirror: ", pkg_mirror) + +# get the script path +initial_options <- commandArgs(trailingOnly = FALSE) +file_arg_name <- "--file=" +script_name <- sub(file_arg_name, "", + initial_options[grep(file_arg_name, initial_options)]) +script_dirname <- dirname(script_name) + +# read the dependencies +dependencies <- scan( + paste(script_dirname, "r_requirements.txt", sep = "/"), + what = "character" +) +# TODO: find a solution for script directory +inst_packages <- installed.packages() +package_list <- dependencies[!(dependencies %in% inst_packages[, "Package"])] + +if (length(package_list) == 0) { + log("All packages are already installed!") +} else { + log("Number of packages to be installed: ", length(package_list)) +} + +Ncpus <- parallel::detectCores() +if (is.na(Ncpus)) { + Ncpus <- 1 +} + +log("Installing packages:", package_list) +if ( length(package_list) != 0 ) { + install.packages( + package_list, + repos = pkg_mirror, + Ncpus = Ncpus, + dependencies = c("Depends", "Imports", "LinkingTo") + ) +} + +failed <- list() +for (package_name in dependencies) { + success <- library( + package_name, + character.only = TRUE, + logical.return = TRUE + ) + if ( ! success ) { + failed <- c(failed, package_name) + } +} +if ( length(failed) != 0 ) { + log("Failed to install packages:", paste(failed, collapse = ", ")) + quit(status = 1, save = "no") +} + +log("Successfully installed all packages") diff --git a/esmvaltool/interface_scripts/auxiliary.ncl b/esmvaltool/interface_scripts/auxiliary.ncl index e9021b0881..926cff299a 100644 --- a/esmvaltool/interface_scripts/auxiliary.ncl +++ b/esmvaltool/interface_scripts/auxiliary.ncl @@ -5,244 +5,241 @@ ; Check the header of each routine for documentation. ; ; Contents: -; procedure copy_VarCoords_l1 -; procedure check_min_max_datasets -; function tstep +; function bname +; function basename +; function att2var +; function att2var_default ; function get_ncdf_name ; function get_ncdf_dir ; function ncdf_read +; function ncdf_define ; function ncdf_write ; function ncdf_att -; function ncdf_define -; function att2var_default -; function att2var -; function bname -; function basename -; function extract_years -; function extend_var_at ; function copy_CoordNames_n -; function empty_str -; function write_info +; function extend_var_at ; function remove_index ; procedure set_default_att -; function filter_attrs -; procedure write_ignore_warnings -; function get_ref_dataset_idx +; function empty_str ; ; ############################################################################# -undef("copy_VarCoords_l1") -procedure copy_VarCoords_l1(var_from, - var_to) +undef("bname") +function bname(a[*]:string) ; ; Arguments -; var_from: An array of any dimensionality. -; Must have named dimensions and coordinate variables. -; var_to: An array of any dimensionality. +; a: (array of) file name strings to analyse. +; +; Return value +; An array of strings, without paths and suffixes. ; ; Description -; This procedure copies all named dimensions and coordinate variables -; from one variable to another except for the leftmost dimension. +; Strips paths and suffixes from file names. ; ; Caveats ; ; References ; ; Modification history -; 20140703-A_gott_kl: Written for use with mymm in time_operations -; (diag_scripts/shared/statistics.ncl). +; 20131106-A_gott_kl: written. ; -local funcname, scriptname, dims_from, dims, rank_from, rank, dimdiff, idimdiff +local funcname, scriptname, a, i, b, rm_prefix, rm_suffix begin - funcname = "copy_VarCoords_l1" + funcname = "bname" scriptname = "interface_scripts/auxiliary.ncl" enter_msg(scriptname, funcname) - dims_from = getvardims(var_from) - rank_from = dimsizes(dims_from) - dims = getvardims(var_to) - rank = dimsizes(dims) - - if(rank.gt.1) then - - ; Check input - if(rank_from.ne.rank) then ; check rank - error_msg("f", scriptname, funcname, \ - "var_from and var_to must have same rank") - else ; check dimensions - dimdiff = dimsizes(var_from) - dimsizes(var_to) - idimdiff = ind(dimdiff.ne.0) - if(.not.all(ismissing(idimdiff))) then - if(sum(idimdiff).ne.0) then ; more or other than leftmost dim - error_msg("f", scriptname, funcname, \ - "var_from and var_to must have same " + \ - "dimension sizes, except the leftmost") - end if - end if - end if + b = a + do i = 0, dimsizes(a) - 1 + rm_prefix = systemfunc("basename " + a(i)) + rm_suffix = str_split(rm_prefix, ".") + b(i) = rm_suffix(0) + delete(rm_suffix) + end do - ; Loop over dimensions - do i = 1, rank - 1 - var_to!i = dims_from(i) - var_to&$dims_from(i)$ = var_from&$dims_from(i)$ - end do + leave_msg(scriptname, funcname) + return(b) - else +end - log_debug("No coordinates to process in copy_VarCoords_l1") +; ############################################################################# +undef("basename") +function basename(name[1]:string) +; +; Arguments +; name: input string. +; +; Return value +; A string. +; +; Description +; Removes the suffix from the string 'name' and returns the string. +; +; Caveats +; +; References +; +; Modification history +; 20130419-A_gott_kl: written. +; +local funcname, scriptname, suffix +begin - end if + funcname = "basename" + scriptname = "interface_scripts/auxiliary.ncl" + enter_msg(scriptname, funcname) + + suffix = get_file_suffix(name, 0) + bsn = str_sub_str(name, suffix, "") leave_msg(scriptname, funcname) + return(bsn) end ; ############################################################################# -undef("check_min_max_datasets") -procedure check_min_max_datasets(no_datasets[1]:integer, - min_no_datasets[1]:integer, - max_no_datasets[1]:integer, - diag_script[1]) +undef("att2var") +function att2var(data, + meta[1]:string) ; ; Arguments -; no_datasets: number of datasets. -; min_no_datasets: minimum no of datasets required for this diag script. -; max_no_datasets: maximum no of datasets required for this diagg script. -; diag_script: name of diagnostic script. +; data: variable of any type with plot specific metadata. +; meta: string specifying the requested attribute, "var", "res_" and +; "diag_file" get special treatment. +; +; Return value +; Requested attribute of data is returned as variable: +; var (string): name string, with "var_" attached as attributes +; (without prefix). +; diag_file (string): name bases of registred scripts, joined into one +; string. +; res (logical): attributes needed for plotting. +; other attributes (any type): as requested by meta. ; ; Description -; Checks if sufficient number of datasets are present for the given -; diagnostic, tries to abort with a intelligible message. +; Puts attributes of data into individual variables. ; ; Caveats ; ; References ; ; Modification history +; 20131107-A_gott_kl: written. ; -local funcname, scriptname +local funcname, scriptname, data, meta, result, temp, atts, att, iatt begin - funcname = "check_min_max_datasets" + funcname = "att2var" scriptname = "interface_scripts/auxiliary.ncl" enter_msg(scriptname, funcname) - if (no_datasets .lt. min_no_datasets) then - error_msg("f", scriptname, funcname,\ - "Too few datasets (=" + no_datasets + ") for " + diag_script \ - + ". Mininum is " + min_no_datasets) - end if + ; Attribute "var" + if (meta .eq. "var") + if (isatt(data, meta)) then + result = data@$meta$ + atts = getvaratts(data) + do iatt = 0, dimsizes(atts) - 1 + att = atts(iatt) + if (isStrSubset(att, "var_")) then + ; Remove prefix "var_" + temp = str_get_cols(att, 4, strlen(att)) + ; Assign contents to variable "var" + result@$temp$ = data@$att$ + end if + end do + else + error_msg("f", scriptname, funcname, get_script_name() + \ + " requires variable attribute " + meta) + end if + + ; Attribute "diag_script" + elseif (meta .eq. "diag_script") + if (isatt(data, meta)) then + base = bname(data@$meta$) + ; Join names of all contributing scripts + result = str_join(base, "__") + else + error_msg("f", scriptname, funcname, get_script_name() + \ + " requires variable attribute " + meta) + end if + + ; Attribute "res_" + ; DEBUG info: different attribute types are not anticipated + ; -> check this if it crashes + elseif (meta .eq. "res_") + result = True + atts = getvaratts(data) + do iatt = 0, dimsizes(atts) - 1 + att = atts(iatt) + if (isStrSubset(att, "res_")) then + ; Remove prefix "res_" + temp = str_get_cols(att, 4, strlen(att)) + ; Assign contents to variable "res" + result@$temp$ = data@$att$ + end if + end do - if (no_datasets .gt. max_no_datasets) then - error_msg("f", scriptname, funcname,\ - "Too many datasets (=" + no_datasets + ") for " + diag_script \ - + ". Maximum is " + max_no_datasets) + ; Optional/generic attributes + else + if (isatt(data, meta)) then + result = data@$meta$ + else + error_msg("w", scriptname, funcname, "attribute " + meta + \ + " not found") + result = default_fillvalue("string") + end if end if leave_msg(scriptname, funcname) + return(result) end ; ############################################################################# -undef("tstep") -function tstep(utc:numeric) +undef("att2var_default") +function att2var_default(data, + meta[1]:string, + default, + option) ; ; Arguments -; utc: UTC time as returned by cd_calendar -; (dimsizes(time) x 6, with y,m,d,h,m,s). +; data: variable of any type with plot specific metadata. +; meta: string specifying the requested attribute +; "var", "res_" and "diag_file" get special treatment. +; default: variable of any type. +; option: not used yet. ; ; Return value -; "yea"=yearly, "sea"=seasonal, "mon"=monthly, "d"=daily, "h", "m", "s". +; Requested attribute of data is returned as variable. +; See function att2var for details on 'magic' strings. +; Those get special treatment and may trigger a crash, if missing. +; Default is returned if (non magic) data@meta is missing. ; ; Description -; Determines time step size in given UTC time axis. +; Puts attributes of data into individual variables, +; and returns default if the requested attribute is missing. ; ; Caveats ; ; References ; ; Modification history -; 20140211-A_gott_kl: written. +; 20140403-A_GoKl: written. ; -local funcname, scriptname, t, dt, nt, y, m, d, h, n, s, option, units +local funcname, scriptname, data, meta, result, default, option begin - funcname = "tstep" + funcname = "att2var_default" scriptname = "interface_scripts/auxiliary.ncl" enter_msg(scriptname, funcname) - ; extract vectors - y = toint(utc(:, 0)) - m = toint(utc(:, 1)) - d = toint(utc(:, 2)) - h = toint(utc(:, 3)) - n = toint(utc(:, 4)) ; minutes - s = toint(utc(:, 5)) - - ; convert to seconds after y(0)-01-01 00:00:00 - units = "seconds after " + tostring(y(0)) + "-01-01 00:00:00" - option = 0 - option@calendar = "standard" - t = cd_inv_calendar(y, m, d, h, n, s, units, option) - - ; create vector of tstep lengths - nt = dimsizes(t) - dt = tolong(t(1:nt - 1)) - tolong(t(0:nt - 2)) - - ; check mean dt - mdt = avg(dt) - if (mdt.eq.1) then - result = "s" - else if (mdt.eq.60) then - result = "m" - else if (mdt.eq.3600) then - result = "h" - else if (mdt.eq.86400) then - result = "d" - else if (mdt.ge.2419200 .and. mdt.le.2678400) then - result = "mon" ; 28 ... 31 days - else if (mdt.ge.7776000 .and. mdt.le.7948800) then - result = "sea" ; 90 ... 92 days - else if (mdt.ge.31536000 .and. mdt.le.31622400) then - result = "yea" ; 365 ... 366 days - else - error_msg("f", scriptname, funcname, "could not determine " + \ - "step length for mdt = " + mdt) - end if - end if - end if - end if - end if - end if - end if - - ; check if all dt match the result - check_flag = False - if (result.eq."s" .or. result.eq."m" .or. \ - result.eq."h" .or. result.eq."d") then - if (max(dt) - min(dt) .ne. 0) then - check_flag = True + result = att2var(data, meta) + if(dimsizes(result).eq.1) then + if(result.eq.default_fillvalue("string")) then + log_debug("Using default: " + meta + " = " + tostring(default)) + result = default + copy_VarMeta(default, result) end if - else if (result.eq."mon") then - if (max(dt) - min(dt) .gt. 259200) then - check_flag = True - end if ; 31-28 = 3 days - else if (result.eq."sea") then - if (max(dt) - min(dt) .gt. 172800) then - check_flag = True - end if ; 92-90 = 2 days - else if (result.eq."yea") then - if (max(dt) - min(dt) .gt. 86400) then - check_flag = True - end if ; 366-365 = 1 days - end if - end if - end if - end if - if (check_flag) then - error_msg("f", scriptname, funcname, "range of dt is not " + \ - "consistent with " + result) end if leave_msg(scriptname, funcname) @@ -403,21 +400,18 @@ begin end if if (varname .eq. "dummy" .and. dimsizes(var) .eq. 1) then data = ncdf->$var$ - else if (varname .eq. "dummy" .and. dimsizes(var) .ne. 1) then + elseif (varname .eq. "dummy" .and. dimsizes(var) .ne. 1) then error_msg("f", scriptname, funcname, "please select variable " + \ "from " + infile) - else if (varname .ne. "dummy" .and. any(varname .eq. vNames)) then + elseif (varname .ne. "dummy" .and. any(varname .eq. vNames)) then i = ind(vNames .eq. varname) data = ncdf->$vNames(i)$ - else if (varname .ne. "dummy" .and. .not. any(varname .eq. vNames)) then + elseif (varname .ne. "dummy" .and. .not. any(varname .eq. vNames)) then error_msg("f", scriptname, funcname, "variable " + varname + \ " not in " + infile) else error_msg("f", scriptname, funcname, "case not covered") end if - end if - end if - end if ; Convert attributes to type logical, if they meet above conditions atts = getvaratts(data) @@ -451,211 +445,45 @@ begin end ; ############################################################################# -undef("ncdf_write") -function ncdf_write(data, - outfile_in) +undef("ncdf_define") +function ncdf_define(ncdf, data) ; ; Arguments -; data: data with metadata (attributes) to be written to a netCDF file. -; outfile_in: full path & name of the netCDF file to be written. It may -; may contain an attribute "existing" with the values -; "overwrite" (default) / "append" / "rename" -; default is used for "default", "dummy" or non-string values. +; ncdf: file specified. +; data: data with metadata (attributes). +; ; Return value -; A string with the outfile. +; Modifies the file specifier "ncdf". ; ; Description -; Creates or modifies outfile, adding a single variable. All attributes of -; "data" are added to the variable in the netCDF file. It may rename an -; existing file to keep it. -; Two options for netCDF output, one disabled (see "Option"), change if -; needed. -; If data has coordinates which are named with strings, these names are -; converted to characeter arrays and the coordinates are appended as two -; dimensional variables. ; ; Caveats -; Appending not tested. -; netCDF does not support data type "logical" and converts True (logical) -; to 1 (integer), False (logical) to 0 (integer). -; A workaround could be to convert logical data to string before writing to -; netCDF, and adjust reading routine accordingly. -; Currently ncdf_read interprets variable attributes of type "integer" and -; dimsizes = 1 & value 0 or 1 as logicals. +; No unlimited dimensions supported (yet). ; ; References -; http://www.ncl.ucar.edu/Document/Manuals/Ref_Manual/ -; NclFormatSupport.shtml#NetCDF -; http://www.ncl.ucar.edu/Applications/o-netcdf.shtml -; http://www.ncl.ucar.edu/Support/talk_archives/2009/0367.html +; http://www.ncl.ucar.edu/Applications/method_2.shtml ; ; Modification history -; 20131107-A_gott_kl: written. +; 20131112-A_gott_kl: written. ; -local funcname, scriptname, data, outfile, outfile_in, ncdf, varname +local funcname, scriptname, data, diag_script, var, fAtt, dimNames, dimSzs, \ + dimUnlim, atts begin - funcname = "ncdf_write" + funcname = "ncdf_define" scriptname = "interface_scripts/auxiliary.ncl" enter_msg(scriptname, funcname) - ; Retrieve special attributes - varname = att2var(data, "var") + diag_script = att2var(data, "diag_script") + var = att2var(data, "var") - ; Determine outfile - defaulting = (/"default", "dummy"/) - if (typeof(outfile_in) .ne. "string" .or. \ - .not.ismissing(ind(defaulting .eq. outfile_in))) then - outfile = get_ncdf_name(varname) - else - outfile = outfile_in - end if + ; Explicitly declare file definition mode. Improve efficiency. + setfileoption(ncdf, "DefineMode", True) - ; Determine mode for dealing with existing files - if (isatt(outfile, "existing")) then - existing = outfile@existing - else - existing = "overwrite" ; Default - end if - - ; Open outfile (see NCL documentation for "addfile") - if (fileexists(outfile)) then ; Outfile already exists - if (existing .eq. "rename") then - sysdate = systemfunc("date +%Y%m%d%H%M%S") - system("mv " + outfile + " " + outfile + "." + sysdate) - ncdf = addfile(outfile, "c") - log_debug(" Keeping existing " + outfile + " with suffix ." + sysdate) - else if (existing .eq. "append") then - ncdf = addfile(outfile, "w") - else - system("rm -f " + outfile) - ncdf = addfile(outfile, "c") - end if - end if - else ; Outfile does not exist yet - ncdf = addfile(outfile, "c") - end if - - ; Write to netCDF - ; Option 1: q&d -; ncdf->$varname$ = data - ; Option 2: write with explicit file definition - dimNames = getvardims(data) - ncdf = ncdf_define(ncdf, data) - do i = 0, dimsizes(dimNames) - 1 - if (typeof(data&$dimNames(i)$).eq."string") then - ; Convert string array to two dimensional character array - char_array = stringtochar(data&$dimNames(i)$) - ncdf->$dimNames(i)$ = (/char_array/) - delete(char_array) - else - ncdf->$dimNames(i)$ = (/data&$dimNames(i)$/) - end if - end do - ncdf->$varname$ = (/data/) - - log_info("Wrote variable " + varname + " to " + outfile) - - leave_msg(scriptname, funcname) - return(outfile) - -end - -; ############################################################################# -undef("ncdf_att") -function ncdf_att(infile[1], - att[1]:string) -; -; Arguments -; infile: full path & name of the netCDF file to be read OR a file -; variable already created by addfile(...). -; att: attribute to be read. -; -; Return value -; Contents of attribute att (could be any type supported by netCDF). -; -; Description -; Checks existence of att in infile, then retrieves contents. -; -; Caveats -; str_infile does not resolve the file name for a file variable. -; -; References -; -; Modification history -; 20140131-A_gott_kl: written. -; -local funcname, scriptname, f, file_atts, i, result -begin - - funcname = "ncdf_att" - scriptname = "interface_scripts/auxiliary.ncl" - enter_msg(scriptname, funcname) - - if (typeof(infile).eq."string") then - f = addfile(infile, "r") - str_infile = infile - else if (typeof(infile).eq."file") then - f = infile - str_infile = "file associated with file variable argument." - else - error_msg("f", scriptname, funcname, "wrong type of infile argument") - end if - end if - file_atts = getvaratts(f) ; retrieve global attributes - if (any(file_atts.eq.att)) then - i = ind(file_atts.eq.att) - result = f@$file_atts(i)$ - else - error_msg("w", scriptname, funcname, att + " not found in " + str_infile) - result = "missing" - end if - - leave_msg(scriptname, funcname) - return(result) - -end - -; ############################################################################# -undef("ncdf_define") -function ncdf_define(ncdf, data) -; -; Arguments -; ncdf: file specified. -; data: data with metadata (attributes). -; -; Return value -; Modifies the file specifier "ncdf". -; -; Description -; -; Caveats -; No unlimited dimensions supported (yet). -; -; References -; http://www.ncl.ucar.edu/Applications/method_2.shtml -; -; Modification history -; 20131112-A_gott_kl: written. -; -local funcname, scriptname, data, diag_script, var, fAtt, dimNames, dimSzs, \ - dimUnlim -begin - - funcname = "ncdf_define" - scriptname = "interface_scripts/auxiliary.ncl" - enter_msg(scriptname, funcname) - - diag_script = att2var(data, "diag_script") - var = att2var(data, "var") - - ; Explicitly declare file definition mode. Improve efficiency. - setfileoption(ncdf, "DefineMode", True) - - ; Create global attributes of the file - fAtt = True ; assign file attributes - fAtt@creation_date = systemfunc("date") - fAtt@created_by = "ESMValTool, " + diag_script + ; Create global attributes of the file + fAtt = True ; assign file attributes + fAtt@creation_date = systemfunc("date") + fAtt@created_by = "ESMValTool, " + diag_script fileattdef(ncdf, fAtt) ; copy file attributes @@ -732,278 +560,240 @@ begin end ; ############################################################################# -undef("att2var_default") -function att2var_default(data, - meta[1]:string, - default, - option) +undef("ncdf_write") +function ncdf_write(data, + outfile_in) ; ; Arguments -; data: variable of any type with plot specific metadata. -; meta: string specifying the requested attribute -; "var", "res_" and "diag_file" get special treatment. -; default: variable of any type. -; option: not used yet. -; +; data: data with metadata (attributes) to be written to a netCDF file. +; outfile_in: full path & name of the netCDF file to be written. It may +; may contain an attribute "existing" with the values +; "overwrite" (default) / "append" / "rename" +; default is used for "default", "dummy" or non-string values. ; Return value -; Requested attribute of data is returned as variable. -; See function att2var for details on 'magic' strings. -; Those get special treatment and may trigger a crash, if missing. -; Default is returned if (non magic) data@meta is missing +; A string with the outfile. ; ; Description -; Puts attributes of data into individual variables, -; and returns default if the requested attribute is missing +; Creates or modifies outfile, adding a single variable. All attributes of +; "data" are added to the variable in the netCDF file. It may rename an +; existing file to keep it. +; Two options for netCDF output, one disabled (see "Option"), change if +; needed. +; If data has coordinates which are named with strings, these names are +; converted to characeter arrays and the coordinates are appended as two +; dimensional variables. ; ; Caveats +; Appending not tested. +; netCDF does not support data type "logical" and converts True (logical) +; to 1 (integer), False (logical) to 0 (integer). +; A workaround could be to convert logical data to string before writing to +; netCDF, and adjust reading routine accordingly. +; Currently ncdf_read interprets variable attributes of type "integer" and +; dimsizes = 1 & value 0 or 1 as logicals. ; ; References +; http://www.ncl.ucar.edu/Document/Manuals/Ref_Manual/ +; NclFormatSupport.shtml#NetCDF +; http://www.ncl.ucar.edu/Applications/o-netcdf.shtml +; http://www.ncl.ucar.edu/Support/talk_archives/2009/0367.html ; ; Modification history -; 20140403-A_GoKl: written. +; 20131107-A_gott_kl: written. ; -local funcname, scriptname, data, meta, result, default, option +local funcname, scriptname, data, outfile, outfile_in, ncdf, varname begin - funcname = "att2var_default" + funcname = "ncdf_write" scriptname = "interface_scripts/auxiliary.ncl" enter_msg(scriptname, funcname) - result = att2var(data, meta) - if(dimsizes(result).eq.1) then - if(result.eq.default_fillvalue("string")) then - log_debug("Using default: " + meta + " = " + tostring(default)) - result = default - copy_VarMeta(default, result) - end if - end if - - leave_msg(scriptname, funcname) - return(result) - -end + ; Retrieve special attributes + varname = att2var(data, "var") -; ############################################################################# -undef("att2var") -function att2var(data, - meta[1]:string) -; -; Arguments -; data: variable of any type with plot specific metadata. -; meta: string specifying the requested attribute, "var", "res_" and -; "diag_file" get special treatment. -; -; Return value -; Requested attribute of data is returned as variable: -; var (string): name string, with "var_" attached as attributes -; (without prefix). -; diag_file (string): name bases of registred scripts, joined into one -; string. -; res (logical): attributes needed for plotting. -; other attributes (any type): as requested by meta. -; -; Description -; Puts attributes of data into individual variables. -; -; Caveats -; -; References -; -; Modification history -; 20131107-A_gott_kl: written. -; -local funcname, scriptname, data, meta, result, temp, atts, att, iatt -begin + ; Determine outfile + defaulting = (/"default", "dummy"/) + if (typeof(outfile_in) .ne. "string" .or. \ + .not.ismissing(ind(defaulting .eq. outfile_in))) then + outfile = get_ncdf_name(varname) + else + outfile = outfile_in + end if - funcname = "att2var" - scriptname = "interface_scripts/auxiliary.ncl" - enter_msg(scriptname, funcname) + ; Determine mode for dealing with existing files + if (isatt(outfile, "existing")) then + existing = outfile@existing + else + existing = "overwrite" ; Default + end if - ; Attribute "var" - if (meta .eq. "var") - if (isatt(data, meta)) then - result = data@$meta$ - atts = getvaratts(data) - do iatt = 0, dimsizes(atts) - 1 - att = atts(iatt) - if (isStrSubset(att, "var_")) then - ; Remove prefix "var_" - temp = str_get_cols(att, 4, strlen(att)) - ; Assign contents to variable "var" - result@$temp$ = data@$att$ + ; Open outfile (see NCL documentation for "addfile") + if (fileexists(outfile)) then ; Outfile already exists + if (existing .eq. "rename") then + sysdate = systemfunc("date +%Y%m%d%H%M%S") + system("mv " + outfile + " " + outfile + "." + sysdate) + ncdf = addfile(outfile, "c") + log_debug(" Keeping existing " + outfile + " with suffix ." + sysdate) + elseif (existing .eq. "append") then + ncdf = addfile(outfile, "w") + dimNames = getvardims(data) + dim_dim = dimsizes(dimNames) + ; define dimensions in case not defined yet + dimSzs = new(dimsizes(dimNames), long) + dimUnlim = new(dimsizes(dimNames), logical) + dimUnlim(:) = False + do i = 0, dim_dim - 1 + dimSzs(i) = dimsizes(data&$dimNames(i)$) + end do + filedimdef(ncdf, dimNames, dimSzs, dimUnlim) + filevardef(ncdf, varname, typeof(data), dimNames) + ; Copy attributes associated with each variable to the file + do i = 0, dim_dim - 1 + ; No attribute assignment to variable if no attributes are available + if (.not.(all(ismissing(getvaratts(data&$dimNames(i)$))))) then + filevarattdef(ncdf, dimNames(i), data&$dimNames(i)$) end if end do + filevarattdef(ncdf, varname, data) else - error_msg("f", scriptname, funcname, get_script_name() + \ - " requires variable attribute " + meta) - end if - - ; Attribute "diag_script" - else if (meta .eq. "diag_script") - if (isatt(data, meta)) then - base = bname(data@$meta$) - ; Join names of all contributing scripts - result = str_join(base, "__") - else - error_msg("f", scriptname, funcname, get_script_name() + \ - " requires variable attribute " + meta) + system("rm -f " + outfile) + ncdf = addfile(outfile, "c") + ncdf = ncdf_define(ncdf, data) end if + else ; Outfile does not exist yet + ncdf = addfile(outfile, "c") + ncdf = ncdf_define(ncdf, data) + end if - ; Attribute "res_" - ; DEBUG info: different attribute types are not anticipated - ; -> check this if it crashes - else if (meta .eq. "res_") - result = True - atts = getvaratts(data) - do iatt = 0, dimsizes(atts) - 1 - att = atts(iatt) - if (isStrSubset(att, "res_")) then - ; Remove prefix "res_" - temp = str_get_cols(att, 4, strlen(att)) - ; Assign contents to variable "res" - result@$temp$ = data@$att$ - end if - end do - - ; Optional/generic attributes - else - if (isatt(data, meta)) then - result = data@$meta$ + ; Write to netCDF + ; Option 1: q&d +; ncdf->$varname$ = data + ; Option 2: write with explicit file definition + dimNames = getvardims(data) + do i = 0, dimsizes(dimNames) - 1 + if (typeof(data&$dimNames(i)$).eq."string") then + ; Convert string array to two dimensional character array + char_array = stringtochar(data&$dimNames(i)$) + ncdf->$dimNames(i)$ = (/char_array/) + delete(char_array) else - error_msg("w", scriptname, funcname, "attribute " + meta + \ - " not found") - result = default_fillvalue("string") + ncdf->$dimNames(i)$ = (/data&$dimNames(i)$/) end if - end if - end if - end if - - leave_msg(scriptname, funcname) - return(result) - -end - -; ############################################################################# -undef("bname") -function bname(a:string) -; -; Arguments -; a: (array of) file name strings to analyse. -; -; Return value -; An array of strings, without paths and suffixes. -; -; Description -; Strips paths and suffixes from file names. -; -; Caveats -; -; References -; -; Modification history -; 20131106-A_gott_kl: written. -; -local funcname, scriptname, a, i, b, rm_prefix, rm_suffix -begin - - funcname = "bname" - scriptname = "interface_scripts/auxiliary.ncl" - enter_msg(scriptname, funcname) - - b = a - do i = 0, dimsizes(a) - 1 - rm_prefix = systemfunc("basename " + a(i)) - rm_suffix = str_split(rm_prefix, ".") - b(i) = rm_suffix(0) - delete(rm_suffix) end do + ncdf->$varname$ = (/data/) + + log_info("Wrote variable " + varname + " to " + outfile) leave_msg(scriptname, funcname) - return(b) + return(outfile) end ; ############################################################################# -undef("basename") -function basename(name[1]:string) +undef("ncdf_att") +function ncdf_att(infile[1], + att[1]:string) ; ; Arguments -; name: input string. +; infile: full path & name of the netCDF file to be read OR a file +; variable already created by addfile(...). +; att: attribute to be read. ; ; Return value -; A string. +; Contents of attribute att (could be any type supported by netCDF). ; ; Description -; Removes the suffix from the string 'name' and returns the string. +; Checks existence of att in infile, then retrieves contents. ; ; Caveats +; str_infile does not resolve the file name for a file variable. ; ; References ; ; Modification history -; 20130419-A_gott_kl: written. +; 20140131-A_gott_kl: written. ; -local funcname, scriptname, suffix +local funcname, scriptname, f, file_atts, i, result begin - funcname = "basename" + funcname = "ncdf_att" scriptname = "interface_scripts/auxiliary.ncl" enter_msg(scriptname, funcname) - suffix = get_file_suffix(name, 0) - bsn = str_sub_str(name, suffix, "") + if (typeof(infile).eq."string") then + f = addfile(infile, "r") + str_infile = infile + elseif (typeof(infile).eq."file") then + f = infile + str_infile = "file associated with file variable argument." + else + error_msg("f", scriptname, funcname, "wrong type of infile argument") + end if + file_atts = getvaratts(f) ; retrieve global attributes + if (any(file_atts.eq.att)) then + i = ind(file_atts.eq.att) + result = f@$file_atts(i)$ + else + error_msg("w", scriptname, funcname, att + " not found in " + str_infile) + result = "missing" + end if leave_msg(scriptname, funcname) - return(bsn) + return(result) end ; ############################################################################# -undef("extract_years") -function extract_years(data, - startYr, - endYr) +undef("copy_CoordNames_n") +function copy_CoordNames_n(var_from, + var_to, + n) ; ; Arguments +; var_from: an array of any dimensionality. Must have named dimensions and +; coordinate variables. +; var_to: an array of any dimensionality. +; n: index of coordinate which is not to be copied. ; ; Return value +; An array with the same dimensionality of var_from. ; ; Description +;: Copies all named dimensions and coordinate variables except the n-th one. ; ; Caveats ; ; References ; ; Modification history +; 20130419-A_gott_kl: written. ; -local funcname, scriptname, data_new, tim, t_utc, t_utc_yr, idx_t +local funcname, scriptname, var_from, var_to, n, rank begin - funcname = "extract_years" + funcname = "copy_CoordNames_n" scriptname = "interface_scripts/auxiliary.ncl" enter_msg(scriptname, funcname) - ds = dimsizes(data) - rank = dimsizes(ds) - - tim = data&time - - t_utc = cd_calendar(tim, 0) - t_utc_yr = floattoint(t_utc(:, 0)) - idx_t = ind((t_utc_yr .ge. startYr) .and. (t_utc_yr .le. endYr)) + rank = dimsizes(dimsizes(var_from)) - if (rank .eq. 2) then - data_new = data(idx_t, :) - end if - if (rank .eq. 3) then - data_new = data(idx_t, :, :) - end if - if (rank .eq. 4) then - data_new = data(idx_t, :, :, :) + if (rank.le.n) then + error_msg("f", scriptname, funcname, "index is not in the " + \ + "range of the variables") + else + names = getvardims(var_from) + do ii = 0, rank - 1 + if (.not.ismissing(names(ii))) then + var_to!ii = names(ii) + if (ii.ne.n) then + var_to&$names(ii)$ = var_from&$names(ii)$ + end if + end if + end do end if leave_msg(scriptname, funcname) - return(data_new) + return(var_to) end @@ -1043,7 +833,7 @@ begin var_dim_plus = var_dim var_dim_plus(coord) = var_dim(coord) + 1 - ; Define new variables with one more entry for diagnostics + ; Define new variables with one more entry for diagnostics rank = dimsizes(var_dim) var_temp = new((/var_dim_plus/), typeof(var)) @@ -1058,7 +848,7 @@ begin if (index.lt.var_dim(0)) then var_new(index + 1:) = var(index:) end if - else if (rank.eq.2) then + elseif (rank.eq.2) then if (coord.eq.0) then if (index - 1.ge.0) then var_new(:index - 1, :) = var(:index - 1, :) @@ -1074,7 +864,7 @@ begin var_new(:, index + 1:) = var(:, index:) end if end if - else if (rank.eq.3) then + elseif (rank.eq.3) then if (coord.eq.0) then if (index - 1.ge.0) then var_new(:index - 1, :, :) = var(:index - 1, :, :) @@ -1082,7 +872,7 @@ begin if (index.lt.var_dim(0)) then var_new(index + 1:, :, :) = var(index:, :, :) end if - else if (coord.eq.1) then + elseif (coord.eq.1) then if (index - 1.ge.0) then var_new(:, :index - 1, :) = var(:, :index - 1, :) end if @@ -1097,155 +887,16 @@ begin var_new(:, :, index + 1:) = var(:, :, index:) end if end if - end if else error_msg("f", scriptname, funcname, "implemented only for " + \ "variables with dimension less or equal 3!") end if - end if - end if leave_msg(scriptname, funcname) return(var_new) end -; ############################################################################# -undef("copy_CoordNames_n") -function copy_CoordNames_n(var_from, - var_to, - n) -; -; Arguments -; var_from: an array of any dimensionality. Must have named dimensions and -; coordinate variables. -; var_to: an array of any dimensionality. -; n: index of coordinate which is not to be copied. -; -; Return value -; An array with the same dimensionality of var_from. -; -; Description -;: Copies all named dimensions and corrdinate variables except the n-th one. -; -; Caveats -; -; References -; -; Modification history -; 20130419-A_gott_kl: written. -; -local funcname, scriptname, var_from, var_to, n, rank -begin - - funcname = "copy_CoordNames_n" - scriptname = "interface_scripts/auxiliary.ncl" - enter_msg(scriptname, funcname) - - rank = dimsizes(dimsizes(var_from)) - - if (rank.le.n) then - error_msg("f", scriptname, funcname, "index is not in the " + \ - "range of the variables") - else - names = getvardims(var_from) - do ii = 0, rank - 1 - if (.not.ismissing(names(ii))) then - var_to!ii = names(ii) - if (ii.ne.n) then - var_to&$names(ii)$ = var_from&$names(ii)$ - end if - end if - end do - end if - - leave_msg(scriptname, funcname) - return(var_to) - -end - -; ############################################################################# -undef("empty_str") -function empty_str(nn[1]:integer) -; -; Arguments -; nn: length of the returned string. -; -; Return value -; An empty string of length nn. -; -; Description -; Returns an empty string of length nn (useful for text formatting). -; -; Caveats -; -; References -; -; Modification history -; 20140228-A_righ_ma: written. -; -local funcname, scriptname, ii -begin - - funcname = "empty_str" - scriptname = "interface_scripts/auxiliary.ncl" - enter_msg(scriptname, funcname) - - str = " " - do ii = 1, nn - 1 - str = str + " " - end do - - leave_msg(scriptname, funcname) - return(str) - -end - -; ############################################################################# -undef("write_info") -function write_info(display[1]:logical) -; -; Arguments -; display: logical with attributes listing the items to put in the infobox -; -; Return value -; A string to write to plot. -; -; Description -; Read the attributes in displayinfo and returns them in a formatted -; string. -; -; Caveats -; -; References -; -; Modification history -; -local funcname, scriptname, infobox, info_id -begin - - funcname = "write_info" - scriptname = "interface_scripts/auxiliary.ncl" - enter_msg(scriptname, funcname) - - infobox = "" - infobox = infobox + "~F22~DEBUG INFO:~F21~ ~C~" - display_varatts = getvaratts(display) - exceptions = (/"description", "description_ycoord"/) - do info_id = 0, dimsizes(display_varatts) - 1 - info_tag = display_varatts(info_id) - if (any(info_tag .eq. exceptions)) then - continue - end if - ; ~C~ = new line - infobox = infobox + info_tag + ": " + display@$info_tag$ + "~C~" - end do - - leave_msg(scriptname, funcname) - return(infobox) - -end - ; ############################################################################# undef("remove_index") function remove_index(array, @@ -1306,7 +957,6 @@ begin end - ; ############################################################################# undef("set_default_att") procedure set_default_att(info:logical, @@ -1343,120 +993,38 @@ begin end ; ############################################################################# -undef("filter_attrs") -function filter_attrs(source[1]:logical,\ - target[1]:logical,\ - lb_prefix[1]:string) -; -; Arguments -; -; Description -; Copy over certain attributes if they have a specified prefix, -; the prefix is removed on the target attribute -; -; Caveats -; -; References -; -; Modification history -; 20150630-A_eval_ma: written. -; -local funcname, scriptname, source_attrs, att_idx, prefix_removed -begin - - funcname = "filter_attrs" - scriptname = "interface_scripts/auxiliary.ncl" - enter_msg(scriptname, funcname) - - source_attrs = getvaratts(source) - do att_idx = 0, dimsizes(source_attrs) - 1 - if (.not. ismissing(str_match(source_attrs(att_idx), lb_prefix))) then - prefix_removed = str_get_cols(source_attrs(att_idx), \ - strlen(lb_prefix), -1) - target@$prefix_removed$ = source@$source_attrs(att_idx)$ - end if - end do - - leave_msg(scriptname, funcname) - return(target) - -end - -; ############################################################################# -undef("write_ignore_warnings") -procedure write_ignore_warnings(warnings[*]:string, - warn_file_str[1]:string) +undef("empty_str") +function empty_str(nn[1]:integer) ; ; Arguments +; nn: length of the returned string. ; -; Description -; Write a file with warnings that will be disregarded by the Pyhton -; launcher. -; -; Caveats -; -; References -; -; Modification history -; 20150630-A_eval_ma: written. -; -local funcname, scriptname, idx_warn -begin - - funcname = "write_ignore_warnings" - scriptname = "interface_scripts/auxiliary.ncl" - enter_msg(scriptname, funcname) - - if (fileexists(warn_file_str)) then - system("rm -f " + warn_file_str) - end if - do idx_warn = 0, dimsizes(warnings) - 1 - system("echo " + warnings(idx_warn) + " >> " + warn_file_str) - end do - - leave_msg(scriptname, funcname) - -end - -; ############################################################################# -undef("get_ref_dataset_idx") -function get_ref_dataset_idx(datasets[1]:logical,\ - dataset_ref_name:string) -; -; Arguments -; datasets: list of all datasets -; dataset_ref_name: reference dataset name, possibly two, separated by comma +; Return value +; An empty string of length nn. ; ; Description -; Fetches the reference dataset index in dataset_info@dataset +; Returns an empty string of length nn (useful for text formatting). ; ; Caveats ; ; References ; ; Modification history -; 20171208-A_righ_ma: rewritten for the new backend +; 20140228-A_righ_ma: written. ; -local funcname, scriptname, nrefs, idx_ref +local funcname, scriptname, ii begin - funcname = "get_ref_dataset_idx" + funcname = "empty_str" scriptname = "interface_scripts/auxiliary.ncl" enter_msg(scriptname, funcname) - nrefs = dimsizes(dataset_ref_name) - - idx_ref = new(nrefs, integer) - do ii = 0, nrefs - 1 - idx_ref(ii) = \ - ind(dataset_info@dataset.eq.str_squeeze(dataset_ref_name(ii))) - if (ismissing(idx_ref(ii))) then - error_msg("f", scriptname, funcname, "reference dataset " + \ - dataset_ref_name(ii) + " not found in the datasets list") - end if + str = " " + do ii = 1, nn - 1 + str = str + " " end do leave_msg(scriptname, funcname) - return(idx_ref) + return(str) end diff --git a/esmvaltool/interface_scripts/data_handling.ncl b/esmvaltool/interface_scripts/data_handling.ncl index 5a08755a5a..6fd66f219b 100644 --- a/esmvaltool/interface_scripts/data_handling.ncl +++ b/esmvaltool/interface_scripts/data_handling.ncl @@ -11,29 +11,27 @@ ; ############################################################################# undef("read_data") -function read_data(index[1]:integer, - variable[1]:string, - field_type:string) +function read_data(info_item[1]:logical) ; ; Arguments -; index: index to current input file as defined in the _info.ncl -; file in the preproc directory of the current variable -; variable: name of the variable to read -; field_type: the variable's field type +; info: a logical describing the current input file as defined in the +; _info.ncl file in the preproc director. This is usually +; an element of the input_file_info list. ; ; Return value ; The actual data array with all metadata. ; ; Description ; Retrieve the variable data and metadata based on the input_file_info -; logical defined in the _info.ncl file automatically generated -; by the preprocessor and stored in the preproc// subdirectory. +; logical defined in the _info.ncl automatically generated by +; the preprocessor and stored in preproc// subdirectory. ; ; Caveats ; ; References ; ; Modification history +; 20180511_A-righ_ma: adapted to the revised list-based metadata structure. ; 20180212_A-righ_ma: written. ; local funcname, scriptname, infile, fin @@ -43,56 +41,49 @@ begin scriptname = "interface_scripts/data_handling.ncl" enter_msg(scriptname, funcname) - ; Check array boundaries first - if (index.lt.0 .or. index.gt.(dimsizes(input_file_info@filename) - 1)) then - error_msg("f", scriptname, funcname, "dataset index out of range") + ; Check attribute availability + if (.not.isatt(info_item, "filename")) then + error_msg("f", scriptname, funcname, "filename attribute not defined " + \ + "in the interface file " + variable + "_info.ncl") end if + infile = info_item@filename - infile = input_file_info@filename(index) - - ; Check existance and supported format + ; Check file existence and supported format if (.not.isfilepresent(infile)) then error_msg("f", scriptname, funcname, "input file " + infile + \ " not found or not a supported format") end if - ; Check field type correspondance - if (input_file_info@field(index) .ne. field_type) then - error_msg("f", scriptname, funcname, "requested field type " + \ - field_type + " does not match") - end if - ; Open file log_info("Reading in file = " + infile) fin = addfile(infile, "r") ; Check that variable is in the file - if (.not. isfilevar(fin, variable)) then + if (.not. isfilevar(fin, info_item@short_name)) then error_msg("f", scriptname, funcname, "variable " + variable + \ " not found in file " + infile) end if ; Read data - data = fin->$variable$ + data = fin->$info_item@short_name$ leave_msg(scriptname, funcname) return(data) end - ; ############################################################################# undef("read_fx_data") -function read_fx_data(index[1]:integer, - variable[1]:string) +function read_fx_data(info_item[1]:logical, + fx_variable[1]:string) ; ; Arguments ; index: index to current input file as defined in the _info.ncl ; file in the preproc directory of the current variable -; variable: name of the fx variable to read +; fx_variable: name of the fx variable to read ; ; Return value -; The a list of data array(s) with all metadata or a missing value if the +; A list of data array(s) with all metadata or a missing value if the ; fx variable is not found. ; ; Description @@ -110,6 +101,7 @@ function read_fx_data(index[1]:integer, ; References ; ; Modification history +; 20180511_A-righ_ma: adapted to the revised list-based metadata structure. ; 20180526_A-righ_ma: written. ; local funcname, scriptname, infile, fin @@ -119,23 +111,19 @@ begin scriptname = "interface_scripts/data_handling.ncl" enter_msg(scriptname, funcname) - ; Check array boundaries first - if (index.lt.0 .or. index.gt.(dimsizes(input_file_info@filename) - 1)) then - error_msg("f", scriptname, funcname, "dataset index out of range") - end if - - if (.not.isatt(input_file_info, variable)) then - error_msg("f", scriptname, funcname, "input path for fx variable " + \ - variable + " not found in the interface file, please add " + \ - "the required fx variable to the recipe") + ; Check attribute availability + if (.not.isatt(info_item, fx_variable)) then + error_msg("f", scriptname, funcname, "input path for the fx_variable " + \ + fx_variable + " not found in the metadata file, please add " + \ + "'fx_files: [" + fx_variable + "]' to the variable " + \ + "dictionary in the recipe") end if - - infile = input_file_info@$variable$(index) + infile = info_item@$fx_variable$ ; Return missing if variable not available - if (infile.eq."None") then - error_msg("w", scriptname, funcname, "fx variable " + variable + \ - " not found for dataset " + dataset_info@dataset(index)) + if (ismissing(infile)) then + error_msg("w", scriptname, funcname, "fx variable " + fx_variable + \ + " not found for dataset " + info_item@dataset) out = new(1, float) out = default_fillvalue(typeof(out)) return(out) @@ -146,8 +134,8 @@ begin fin = addfile(infile, "r") ; Check that variable is in the file - if (.not. isfilevar(fin, variable)) then - error_msg("f", scriptname, funcname, "variable " + variable + \ + if (.not. isfilevar(fin, fx_variable)) then + error_msg("f", scriptname, funcname, "variable " + fx_variable + \ " not found in " + infile) end if @@ -155,10 +143,10 @@ begin outlist = NewList("lifo") ; Check if auxiliary coordinates are defined - atts = getfilevaratts(fin, variable) + atts = getfilevaratts(fin, fx_variable) if (any("coordinates".eq.atts)) then ; Read auxiliary coordinates in reverse order (for correct order in list) - coords = fin->$variable$@coordinates + coords = fin->$fx_variable$@coordinates ncoords = str_fields_count(coords, " ") do ii = 1, ncoords curr_coord = str_squeeze(str_get_field(coords, ncoords + 1 - ii, " ")) @@ -172,7 +160,7 @@ begin end if ; Push variable to the list - ListPush(outlist, fin->$variable$) + ListPush(outlist, fin->$fx_variable$) leave_msg(scriptname, funcname) return(outlist) diff --git a/esmvaltool/interface_scripts/interface.ncl b/esmvaltool/interface_scripts/interface.ncl index e01538021c..d323549848 100644 --- a/esmvaltool/interface_scripts/interface.ncl +++ b/esmvaltool/interface_scripts/interface.ncl @@ -6,44 +6,220 @@ ; preprocessor set. These files contains the NCL logical _info varialbe with ; all the required information to run the diagnostic script ; -; No functions/procedures shall be further added to this script. +; Contents: +; function select_metadata_by_atts +; function select_metadata_by_name +; function metadata_att_as_array +; +; No further functions/procedures shall be added to this script. +; ; ############################################################################# ; Load config_user_info and diag_script_info from settings.ncl begin - print("INFO Loading settings from " + getenv("settings")) - loadscript("$settings") + if .not. isvar("__interface_ncl__") then + print("INFO Loading settings from " + getenv("settings")) + loadscript("$settings") + end if end +; ############################################################################# + ; Load input_file_info, dataset_info and variable_info from _info.ncl begin - vardeffiles = \ - str_match_ic_regex(diag_script_info@input_files, ".*_info\" + ".ncl") - if (.not. ismissing(vardeffiles)) then - vardeffile = diag_script_info@input_files(0) - print("INFO Loading input data description from " + vardeffile) - loadscript(vardeffile) + if .not. isvar("__interface_ncl__") then + vardeffiles = diag_script_info@input_files + nvdf = dimsizes(vardeffiles) + do i = 0, nvdf - 1 + isncl = .not.ismissing(str_match_ic_regex(vardeffiles(i), \ + ".*_info\" + ".ncl")) + if (.not. ismissing(vardeffiles(i)) .and. isncl) then + vardeffile = vardeffiles(i) + print("INFO Loading input data description from " + vardeffile) + loadscript(vardeffile) + end if + end do + end if +end + +; ############################################################################# + +begin + if .not. isvar("__interface_ncl__") then + ; Add trailing slash to paths + config_user_info@plot_dir = config_user_info@plot_dir + "/" + config_user_info@run_dir = config_user_info@run_dir + "/" + config_user_info@work_dir = config_user_info@work_dir + "/" + + ; Define current script name as global variable (following Python syntax) + DIAG_SCRIPT = diag_script_info@script end if end +; ############################################################################# +; Load standard interface scripts +load "$diag_scripts/../interface_scripts/logging.ncl" +load "$diag_scripts/../interface_scripts/auxiliary.ncl" +load "$diag_scripts/../interface_scripts/data_handling.ncl" + +; ############################################################################# +; Convenience functions for selecting input_file_info and related attributes +undef("select_metadata_by_atts") +function select_metadata_by_atts(info:list, + att[1]:logical) +; +; Arguments +; info: a list of input_file_info logicals containing all metadata +; atts: a scalar logical with the attributes to be matched in the input list +; +; Return value +; A list of logicals containing only the metadata matching the given +; attribute value pairs +; +; Description +; Metadata information are stored in a list of input_file_info logicals. +; This functions allows to extract the list elements whose attributes +; match the requested ones. +; +; Caveats +; +; References +; +; Modification history +; 20190507-A_laue_ax: missing attributes are now allowed; a warning message +; is issued for each missing attribute +; 20181106_A-righ_ma: written. +; +local funcname, scriptname, ninfo, natts, l_flag, ii, jj, ca begin - ; Add trailing slash to paths - config_user_info@plot_dir = config_user_info@plot_dir + "/" - config_user_info@run_dir = config_user_info@run_dir + "/" - config_user_info@work_dir = config_user_info@work_dir + "/" + funcname = "select_metadata_by_atts" + scriptname = "interface_scripts/interface.ncl" + enter_msg(scriptname, funcname) + + ninfo = ListCount(info) + atts = getvaratts(att) + natts = dimsizes(atts) + item = NewList("fifo") + do ii = 0, ninfo - 1 ; loop over input_file_info elements + l_flag = True + do jj = 0, natts - 1 ; loop over requested attributes + if (.not.isatt(info[ii], atts(jj))) then + error_msg("w", scriptname, funcname, \ + "missing " + atts(jj) + " attribute") + l_flag = False + else + if (info[ii]@$atts(jj)$.ne.att@$atts(jj)$) then + l_flag = False + end if + end if + end do + if (l_flag) then + ListAppend(item, info[ii]) + end if + end do - ; Copy some info into legacy names - diag_script = diag_script_info@script - if isvar("variable_info") then - variables = (/variable_info@short_name/) - field_types = (/variable_info@field/) + leave_msg(scriptname, funcname) + return(item) + +end + +; ############################################################################# +undef("select_metadata_by_name") +function select_metadata_by_name(info:list, + variable:string) +; +; Arguments +; info: the input_file_info logical containing all metadata +; variable: variable's short name (according to CMOR standard) +; +; Return value +; A list of logicals containing only the metadata matching the variable's +; short name +; +; Description +; Metadata information are stored in a list of input_file_info logicals. +; This functions allows to extract the list elements matching the given +; variable's short name. +; +; Caveats +; +; References +; +; Modification history +; 20181105_A-righ_ma: written. +; +local funcname, scriptname +begin + + funcname = "select_metadata_by_name" + scriptname = "interface_scripts/interface.ncl" + enter_msg(scriptname, funcname) + + atts = True + atts@short_name = variable + item = select_metadata_by_atts(info, atts) + + leave_msg(scriptname, funcname) + return(item) + +end + +; ############################################################################# +undef("metadata_att_as_array") +function metadata_att_as_array(inlist:list, + att[1]:string) +; +; Arguments +; inlist: a list +; att: a string with the attribute to be searched for +; +; Description +; This function retrieves the values of the attribute att from each element +; of the list and returns them as an array. +; +; Caveats +; +; References +; +; Modification history +; 20181119-A_laue_ax: missing attributes are now allowed; error message is +; only issued if all attributes are missing +; 20180711-A_righ_ma: written +; +local funcname, scriptname +begin + + funcname = "metadata_att_as_array" + scriptname = "interface_scripts/auxiliary.ncl" + enter_msg(scriptname, funcname) + + do ii = 0, ListCount(inlist) - 1 + if (isatt(inlist[ii], att)) then + if (.not.isdefined("out")) then + set_type = typeof(inlist[ii]@$att$) + out = new(ListCount(inlist), set_type) + end if + if (typeof(inlist[ii]@$att$).ne.set_type) then + error_msg("f", scriptname, funcname, "attributes must be of the " + \ + "same type") + end if + out(ii) = inlist[ii]@$att$ + end if + end do + + if (all(ismissing(out))) then + error_msg("f", scriptname, funcname, "attribute " + att + " not " + \ + "found in list element(s)") end if + return(out) + leave_msg(scriptname, funcname) + end -; Load other interface scripts -load "./interface_scripts/logging.ncl" -load "./interface_scripts/auxiliary.ncl" -load "./interface_scripts/data_handling.ncl" +; Remember that the interface is now loaded. +begin + __interface_ncl__ = True +end diff --git a/esmvaltool/interface_scripts/logging.ncl b/esmvaltool/interface_scripts/logging.ncl index 73931a7292..9cd90d1247 100644 --- a/esmvaltool/interface_scripts/logging.ncl +++ b/esmvaltool/interface_scripts/logging.ncl @@ -12,16 +12,10 @@ ; procedure error_msg ; procedure tool_stop ; procedure exit_if_missing_atts -; procedure write_filelist -; procedure write_references -; procedure inquire_and_save_fileinfo +; procedure log_provenance ; ; ############################################################################# -gOldVar = "" -gOldDiag = "" -load "./interface_scripts/write_header.ncl" - ; WARNING: no other interface_scripts shall be loaded here, to avoid nested ; loops (since they will try to load logging.ncl themselves). @@ -221,7 +215,7 @@ procedure tool_stop() ; Arguments ; ; Description -; Forces the tool execution to stop at any point of an ncl script. +; Force the tool execution to stop at any point of an ncl script. ; For dubugging purposes only. ; ; Caveats @@ -280,368 +274,77 @@ begin end ; ############################################################################# -undef("write_filelist") -procedure write_filelist(script:string, - variable:string, - dataset:string, - filelist:string, - sourcefilelist:string, - fixfile:string, - sourcefixfile:string, - sourcereflist:string, - sourcetidlist:string, - version:string) +undef("log_provenance") +procedure log_provenance(nc_file:string, + outfile:string, + caption:string, + statistics[*]:string, + domains[*]:string, + plot_types[*]:string, + diag_authors[*]:string, + diag_refs[*]:string, + infiles[*]:string) ; ; Arguments -; script: name of the diagnostic script for which the input files are -; written to the log file -; variable: name of variable -; dataset: name of dataset -; filelist: list of filenames processed to produce the file to be written -; sourcefilelist: list of filenames processed to produce the input files -; of the file to be written (e.g., input files used to -; produce preproc files then used to process a derived -; variable) -; fixfile: filename of a FIXFILE applied to process the data to be written -; (if applicable) -; sourcefixfile: filename of a FIXFILE applied to the files in -; sourcefilelist (if applicable) -; sourcereflist: references from original source files (if available) -; sourcetidlist: tracking_id(s) of original source files (if available) -; version: version of the ESMValTool +; nc_file: netcdf file with data related to the plot +; outfile: file name of the figure (including path) +; caption: figure caption +; statistics: list of metrics/statistics calculated +; domains: of the data displayed +; plot_types: plot type of the figure +; diag_authors: authors related to the diagnostic +; diag_refs: references related to the diagnotic +; infiles: input files (from preproc) used to generate the plot ; ; Description -; Save the following details to the log file -; - ESMValTtool version -; - list of input files processed -; - name of the "FIXFILE" applied (if any) +; Create a yaml file with meta data from the diagnostic script and save it +; in the run directory ; ; Caveats ; ; References ; ; Modification history -; 20150630-A_laue_ax: written. +; 20190415-A_righ_ma: turn domains into a list. +; 20190415-A_righ_ma: extended to avoid overwriting previous call. +; 20190225-A_bock_li: written. ; -local funcname, scriptname, output_refs, hline +local funcname, scriptname, yaml_file, outstring, existing begin - funcname = "write_filelist" + funcname = "log_provenance" scriptname = "interface_scripts/logging.ncl" enter_msg(scriptname, funcname) - if (filelist(0).eq."") then ; derived variable or no information - return - end if - - hline = "-------------------------" - hline = hline + hline + hline - - ; Output refs file - output_refs = config_user_info@run_dir + "/references-acknowledgements.txt" - - s_open = "echo " + str_get_dq - s_close = str_get_dq + " >> " + output_refs - - ; If first time (empty output_refs) write header - if (toint(systemfunc("wc -c < " + output_refs)) .eq. 0) then - write_header(s_open, hline, s_close) - end if - - if (script.ne.gOldDiag) then - ; Add script name - system(s_open + "PREPROCESSING/REFORMATTING (ESMValTool v" + \ - version + "):" + s_close) - system("echo " + " >> " + output_refs) - gOldDiag = script - end if - - nfields = str_fields_count(filelist(0), "/") - if (nfields.gt.1) then - path = "/" - do i = 1, nfields - 1 - path = path + str_get_field(filelist(0), i, "/") + "/" - end do - else - path = "" - end if - - if (gOldVar.ne.variable) then - system(s_open + " Variable: " + variable + s_close) - system("echo " + " >> " + output_refs) - end if - gOldVar = variable - system(s_open + " Dataset: " + dataset + s_close) - system(s_open + " Input path: " + path + s_close) - system(s_open + " Input file(s):" + s_close) - - dsizes = dimsizes(filelist) - do i = 0, dsizes(0) - 1 - nfields = str_fields_count(filelist(i), "/") - fname = str_get_field(filelist(i), nfields, "/") - system(s_open + " " + sprinti("(%i) ", i + 1) + fname + s_close) - end do - - dsizes = dimsizes(sourcefilelist) - if (sourcefilelist(0).ne."") then - system(s_open + " Original source file(s) of all input file(s):" + \ - s_close) - do i = 0, dsizes(0) - 1 - if (.not.ismissing(sourcetidlist(i))) then - system(s_open + " -S- " + sprinti("(%i) ", i + 1) + \ - sourcefilelist(i) + " (tracking_id: " + \ - sourcetidlist(i) + ") " + s_close) - else - system(s_open + " -S- " + sprinti("(%i) ", i + 1) + \ - sourcefilelist(i) + s_close) - end if - end do + ; Define output + outstring = new(9, string) + + ; Set yaml file path + yaml_file = config_user_info@run_dir + "diagnostic_provenance.yml" + + ; Save entries + outstring(0) = "? " + nc_file + outstring(1) = ": ancestors: [" + str_join(infiles, ", ") + "]" + outstring(2) = " authors: [" + str_join(diag_authors, ", ") + "]" + outstring(3) = " caption: '" + caption + "'" + outstring(4) = " domains: [" + str_join(domains, ", ") + "]" + outstring(5) = " plot_file: " + outfile + outstring(6) = " plot_types: [" + str_join(plot_types, ", ") + "]" + outstring(7) = " references: [" + str_join(diag_refs, ", ") + "]" + outstring(8) = " statistics: [" + str_join(statistics, ", ") + "]" + + ; Save existing information to avoid overwriting + if (fileexists(yaml_file)) then + existing = asciiread(yaml_file, -1, "string") + outstring := array_append_record(existing, outstring, 0) + delete(existing) end if - if ((sourcefixfile.ne."").and.(sourcefilelist(0).ne."")) then - system(s_open + " Fixes applied to original source file(s): " + \ - sourcefixfile + s_close) - else - system(s_open + " Fixes applied to input file(s): " + fixfile + s_close) - end if - - dsizes = dimsizes(sourcereflist) - if (sourcereflist(0).ne."") then - system(s_open + " Reference(s) of original source file(s):" + s_close) - do i = 0, dsizes(0) - 1 - system(s_open + " " + sprinti("(%i) ", i + 1) + \ - sourcereflist(i) + s_close) - end do - end if + ; Save provenance information + asciiwrite(yaml_file, outstring) - system("echo " + " >> " + output_refs) + log_info("Provenance information saved in " + yaml_file) leave_msg(scriptname, funcname) -end - -; ############################################################################# -undef("write_references") -procedure write_references(script[1]:string, - auth:string, - contr:string, - diag:string, - obs:string, - proj:string) -; -; Arguments -; script: name of the diagnostic script for which reference are written -; auth: list of authors tags (A_YyXx) -; contr: list of cotributors tags (A_YyXx) -; diag: list of references for the diag_scripte diagnostics (DNNN) -; obs: list of references for the observations (ONNN) -; proj: list of projects to be acknowledged (PNNN) -; -; Description -; Write references and acknowledgments in the log file -; -; Caveats -; -; References -; -; Modification history -; 20151028-A_laue_ax: moved writing of diagnostic section header to -; separate routine (write_diag_header) for easier -; use with Python routines -; 20150630-A_laue_ax: moved writing of header to separate routine -; (write_header) -; 20121128-A_righ_ma: written. -; -local funcname, scriptname, master_refs, output_refs, hline, pos, str -begin - - funcname = "write_references" - scriptname = "interface_scripts/logging.ncl" - enter_msg(scriptname, funcname) - - ; hline = "-------------------------" - ; hline_dble = "=========================" - ; hline = hline + hline + hline - ; hline_dble = hline_dble + hline_dble + hline_dble - - ; Master refs file - ; master_refs = MASTER_REFS - - ; Output refs file - ; output_refs = config_user_info@run_dir + "/references-acknowledgements.txt" - - ; s_open = "echo " + str_get_dq - ; s_close = str_get_dq + " >> " + output_refs - - ; If first time (empty output_refs) write header - ; if (.not. fileexists(output_refs)) then - ; write_header(s_open, hline, s_close) - ; end if - - ; write diagnostic header - ; write_diag_header(s_open, hline_dble, s_close, master_refs, output_refs, \ - ; auth, contr, diag, obs, proj, script) - - leave_msg(scriptname, funcname) - -end - -; ############################################################################# -undef("inquire_and_save_fileinfo") -procedure inquire_and_save_fileinfo(filename:string, - dataset:string, - variable:string) -; -; Arguments -; filename: file name of input file (netCDF) -; dataset: name of dataset -; variable: name of variable -; -; Description -; Read attributes from given file and write info to the log file -; -; Caveats -; -; References -; -; Modification history -; 20151028-A_laue_ax: written. -; -begin - fid = addfile(filename, "r") - - filelist = new(10000, string) - - if (isatt(fid, "fixfile")) then - fixfile = fid@fixfile - else - fixfile = "none" - end if - - if (isatt(fid, "version")) then - version = fid@version - else - version = "unknown" - end if - - delete(fid) - - filelist(0) = filename - n = 1 - ns = 0 - nr = 0 - - if (n.eq.0) then - filelist(0) = "not specified" ; list of files read - n = 1 - sourcefilelist = "" ; FIXFILE applied to original source files - sourcefixfile = "" ; FIXFILE - sourcereflist = "" ; list of references - sourcetidlist = "" ; list of tracking_ids - else - ; Now we check whether the files read (i.e., all names in "filelist") - ; contain attribute(s) "infile_????". If so, we read these attributes - ; and store them in "sourcefilelist". sourcefilelist then contains - ; a list of original source files from which the files in "filelist" - ; have originally been created, e.g., the original CMIP5 files or the - ; original observational files. - - sourcefilelist = new(dimsizes(filelist), string) - sourcefilelist(0) = "" - sourcereflist = new(dimsizes(filelist), string) - sourcereflist(0) = "" - sourcefixfile = "none" - sourcetidlist = new(dimsizes(filelist), string) - - do m = 0, n - 1 - if (isfilepresent(filelist(m))) then - sfile = addfile(filelist(m), "r") - if (isatt(sfile, "fixfile")) then - sourcefixfile = sfile@fixfile - end if - - ns = 0 - do while (ns.lt.dimsizes(sourcefilelist)) - att = sprinti("infile_%04d", ns) - if (isatt(sfile, att)) then - sourcefilelist(ns) = sfile@$att$ - ns = ns + 1 - else - break - end if - end do - delete(sfile) - end if - end do - - ; Check for possible references and/or tracking_ids in the original - ; source files. - - if (ns.gt.0) then - ns = ns - 1 - - do i = 0, ns - if (isfilepresent(sourcefilelist(i))) then - sfile = addfile(sourcefilelist(i), "r") - - ; check for tracking_id - if (isatt(sfile, "tracking_id")) then - sourcetidlist(i) = sfile@tracking_id - end if - - ; check for references - if (isatt(sfile, "reference")) then - ; check if reference is already in list; - ; if reference is already in list, do not add again - newref = True - do j = 0, nr - 1 - if (sourcereflist(j).eq.sfile@reference) then - newref = False - break - end if - end do - - ; reference was not found in list --> add to list - if (newref) then - sourcereflist(nr) = sfile@reference - nr = nr + 1 - end if - end if - - ; if the source files contain derived variables they might contain - ; lists of source files as well - ; ---> check for source files of source files and (if any found) - ; add them to the list of files to be written to the log-file - nss = 1 - do while ((ns + nss).lt.dimsizes(sourcefilelist)) - att = sprinti("infile_%04d", nss - 1) - if (isatt(sfile, att)) then - sourcefilelist(ns + nss) = sfile@$att$ - nss = nss + 1 - else - break - end if - end do - - delete(sfile) - ns = ns + nss - 1 - - end if - end do - if (nr.gt.0) then - nr = nr - 1 - end if - - end if ; if ns > 0 - end if - - ; Write information on processed files to log-file - write_filelist(diag_script, variable, dataset, filelist(0:n - 1), \ - sourcefilelist(0:ns), fixfile, sourcefixfile, \ - sourcereflist(0:nr), sourcetidlist(0:ns), version) - - delete(filelist) - delete(sourcefilelist) - delete(sourcereflist) - delete(sourcetidlist) end diff --git a/esmvaltool/interface_scripts/write_header.ncl b/esmvaltool/interface_scripts/write_header.ncl deleted file mode 100644 index c61941ebd9..0000000000 --- a/esmvaltool/interface_scripts/write_header.ncl +++ /dev/null @@ -1,190 +0,0 @@ -; ############################################################################# -undef("write_header") -procedure write_header(s_open:string, - hline:string, - s_close:string) -; -; Arguments -; s_open: 1st part of system command line to add text to log file -; hline: separator (horizontal line) -; s_close: 2nd part of system command line to add text to log file -; -; Description -; Writes header of the log file -; -; Caveats -; -; References -; -; Modification history -; 20160713-A_laue_ax: updated ESMValTool reference -; 20150630-A_laue_ax: written. -; -local funcname, scriptname, recipe, version -begin - - funcname = "write_header" - scriptname = "interface_scripts/logging.ncl" - - ; Recipe in use - recipe = diag_script_info@recipe - - ; ESMValTool version - version = diag_script_info@version - - dq = "\" + str_get_dq() - - system(s_open + hline + s_close) - system(s_open + "+++++++++++++ " + \ - "ESMVal TOOL REFERENCES and ACKNOWLEDGEMENTS LOG" + \ - " +++++++++++++" + s_close) - system(s_open + hline + s_close) - system(s_open + "" + s_close) - system(s_open + "Recipe file: " + recipe + s_close) - system(s_open + "Creation date: `date`" + s_close) - system(s_open + "ESMValTool version: " + version + s_close) - system(s_open + "Host name: " + getenv("HOST") + s_close) - system(s_open + "User name: " + getenv("USER") + s_close) - system(s_open + "" + s_close) - - system(s_open + "Licensed under the Apache License, Version 2.0 (the " \ - + dq + "License" + dq + "); you may" + s_close) - system(s_open + "not use this file except in compliance with the " \ - + "License. You may obtain" + s_close) - system(s_open + "a copy of the License at" + s_close) - system(s_open + "" + s_close) - system(s_open + "http://www.apache.org/licenses/LICENSE-2.0" + s_close) - system(s_open + "" + s_close) - system(s_open + "Unless required by applicable law or agreed to in " \ - + "writing, software" + s_close) - system(s_open + "distributed under the License is distributed on an " \ - + dq + "AS IS" + dq + "BASIS," + s_close) - system(s_open + "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either " \ - + "express or implied." + s_close) - system(s_open + "See the License for the specific language governing " \ - + "permissions and" + s_close) - system(s_open + "limitations under the License." + s_close) - system(s_open + "" + s_close) - system(s_open + "Please acknowledge the use of the ESMValTool." + s_close) - system(s_open + "Please cite Eyring et al., Geosci. Model Dev., " + \ - "9, 1747-1802, doi:10.5194/gmd-9-1747-2016, 2016." + s_close) - system(s_open + "For the specific diagnostics, see below." + s_close) - system(s_open + "" + s_close) - -end - -; ############################################################################# -undef("write_diag_header") -procedure write_diag_header(s_open:string, - hline:string, - s_close:string, - master_refs:string, - output_refs:string, - auth:string, - contr:string, - diag:string, - obs:string, - proj:string, - script:string) -; -; Arguments -; s_open: 1st part of system command line to add text to log file -; hline: separator (horizontal line) -; s_close: 2nd part of system command line to add text to log file -; master_ref: file name of the master reference file -; (i.e. MASTER_authors-refs-acknow.txt) -; output_refs: filename of log file -; auth: list of authors -; contr: list of contributors -; diag: list of references for diagnostic -; obs: list of references for observations -; proj: list of project acknowledgements -; script: name of diagnostic script -; -; Description -; Writes diagnostic header section to the log file -; -; Caveats -; -; References -; -; Modification history -; 20151028-A_laue_ax: written. -; -begin - - ; D*** entries that shall not be extended by "Please cite " - stand_alone_entries = (/"D000", "D003", "D004"/) - - ; Add script name - system(s_open + hline + s_close) - system(s_open + "=== " + script + " ===" + s_close) - system("echo " + " >> " + output_refs) - - ; Read master refs file - data = readAsciiTable(master_refs, 1, "string", 0) - - ; Author list - if (all(auth.ne."")) then - system(s_open + "AUTHOR(S): " + s_close) - do ii = 0, dimsizes(auth) - 1 - pos = data(ind(str_squeeze( \ - str_get_field(data(:, 0), 1, "=")) .eq. auth(ii)), 0) - str = str_squeeze(str_get_field(pos, 2, "=")) - system(s_open + " -A- " + str + s_close) - end do - system("echo " + " >> " + output_refs) - end if - - ; Contributor list - if (all(contr.ne."")) then - system(s_open + "CONTRIBUTOR(S): " + s_close) - do ii = 0, dimsizes(contr) - 1 - pos = data(ind(str_squeeze( \ - str_get_field(data(:, 0), 1, "=")) .eq. contr(ii)), 0) - str = str_squeeze(str_get_field(pos, 2, "=")) - system(s_open + " -C- " + str + s_close) - end do - system("echo " + " >> " + output_refs) - end if - - ; Diagnostic reference list - if (all(diag.ne."")) then - system(s_open + "REFERENCE(S) FOR THIS DIAGNOSTIC: " + s_close) - do ii = 0, dimsizes(diag) - 1 - pos = data(ind(str_squeeze( \ - str_get_field(data(:, 0), 1, "=")) .eq. diag(ii)), 0) - str = str_squeeze(str_get_field(pos, 2, "=")) - if (all(diag(ii).ne.stand_alone_entries)) then - str = "Please cite " + str - end if - system(s_open + " -R- " + str + s_close) - end do - system("echo " + " >> " + output_refs) - end if - - ; Observations reference list - if (all(obs.ne."")) then - system(s_open + "REFERENCE(S) FOR THE OBSERVATIONS: " + s_close) - do ii = 0, dimsizes(obs) - 1 - pos = data(ind(str_squeeze( \ - str_get_field(data(:, 0), 1, "=")) .eq. obs(ii)), 0) - str = str_squeeze(str_get_field(pos, 2, "=")) - system(s_open + " -R- " + str + s_close) - end do - system("echo " + " >> " + output_refs) - end if - - ; Projects reference list - if (all(proj.ne."")) then - system(s_open + "ACKNOWLEDGEMENTS FOR THE PROJECTS: " + s_close) - do ii = 0, dimsizes(proj) - 1 - pos = data(ind(str_squeeze( \ - str_get_field(data(:, 0), 1, "=")) .eq. proj(ii)), 0) - str = str_squeeze(str_get_field(pos, 2, "=")) - system(s_open + " -P- " + str + s_close) - end do - system("echo " + " >> " + output_refs) - end if - -end diff --git a/esmvaltool/interface_scripts/write_references.ncl b/esmvaltool/interface_scripts/write_references.ncl deleted file mode 100644 index e21e3dbd27..0000000000 --- a/esmvaltool/interface_scripts/write_references.ncl +++ /dev/null @@ -1,36 +0,0 @@ -; ####################################################################### -; This code reads references information from the master file and -; writes it to the reference/acknowledgement output file. -; This is a stand-alone version of the function "write_references" -; (interface_scripts/logging.ncl) called by Python to write the -; reference/acknowledgement output file. -; ####################################################################### - -; definition of functions write_header and write_diag_header -load "./interface_scripts/write_header.ncl" - - -begin - - hline = "-------------------------" - hline_dble = "=========================" - hline = hline + hline + hline - hline_dble = hline_dble + hline_dble + hline_dble - - ; Master refs file "master_refs" and output refs file "output_refs" as - ; well as "auth", "contr", "diag", "obs", "proj" are defined in - ; "$ESMValTool_interface_data/ncl.ref.info" - - s_open = "echo " + str_get_dq - s_close = str_get_dq + " >> " + out_refs - - ; If first time (empty output_refs) write header - if (toint(systemfunc("wc -c < " + out_refs)) .eq. 0) then - write_header(s_open, hline, s_close) - end if - - ; write diagnostic header section - write_diag_header(s_open, hline_dble, s_close, in_refs, out_refs, ref_auth, \ - ref_contr, ref_diag, ref_obs, ref_proj, ref_script) - -end diff --git a/esmvaltool/preprocessor/__init__.py b/esmvaltool/preprocessor/__init__.py index 9609f324e7..2f7d53f6f8 100644 --- a/esmvaltool/preprocessor/__init__.py +++ b/esmvaltool/preprocessor/__init__.py @@ -1,30 +1,30 @@ """Preprocessor module.""" +import copy +import inspect import logging -import os -from iris.cube import Cube +import six +from iris.cube import Cube, CubeList -from .._task import AbstractTask -from ._area_pp import area_average as average_region -from ._area_pp import area_slice as extract_region -from ._area_pp import zonal_means +from .._provenance import TrackedFile +from .._task import BaseTask +from ._area import (average_region, extract_named_regions, extract_region, + zonal_means) from ._derive import derive from ._download import download -from ._io import cleanup, concatenate, extract_metadata, load_cubes, save -from ._mask import (mask_fillvalues, mask_landsea, mask_above_threshold, - mask_below_threshold, mask_inside_range, - mask_outside_range) +from ._io import (_get_debug_filename, cleanup, concatenate, load, save, + write_metadata) +from ._mask import (mask_above_threshold, mask_below_threshold, + mask_fillvalues, mask_inside_range, mask_landsea, + mask_landseaice, mask_outside_range) from ._multimodel import multi_model_statistics from ._reformat import (cmor_check_data, cmor_check_metadata, fix_data, fix_file, fix_metadata) -from ._regrid import regrid -from ._regrid import vinterp as extract_levels -from ._volume_pp import depth_integration, extract_trajectory, extract_transect -from ._volume_pp import volume_average as average_volume -from ._volume_pp import volume_slice as extract_volume -from ._time_area import time_slice as extract_time -from ._time_area import (extract_month, extract_season, seasonal_mean, - time_average) +from ._regrid import extract_levels, regrid +from ._time import (annual_mean, extract_month, extract_season, extract_time, + regrid_time, seasonal_mean, time_average) +from ._volume import (average_volume, depth_integration, extract_trajectory, + extract_transect, extract_volume) logger = logging.getLogger(__name__) @@ -32,8 +32,8 @@ 'download', # File reformatting/CMORization 'fix_file', - # Load cube from file - 'load_cubes', + # Load cubes from file + 'load', # Derive variable 'derive', # Metadata reformatting/CMORization @@ -51,6 +51,8 @@ 'extract_levels', # Mask landsea (fx or Natural Earth) 'mask_landsea', + # Mask landseaice, sftgif only + 'mask_landseaice', # Regridding 'regrid', # Masking missing values @@ -66,239 +68,349 @@ 'extract_transect', # 'average_zone': average_zone, # 'cross_section': cross_section, - # Time operations - # 'annual_cycle': annual_cycle, - # 'diurnal_cycle': diurnal_cycle, 'multi_model_statistics', # Grid-point operations + 'extract_named_regions', 'depth_integration', 'average_region', 'average_volume', + # Time operations + # 'annual_cycle': annual_cycle, + # 'diurnal_cycle': diurnal_cycle, 'zonal_means', + 'annual_mean', 'seasonal_mean', 'time_average', + 'regrid_time', 'cmor_check_data', # Save to file 'save', 'cleanup', - 'extract_metadata', ] DEFAULT_ORDER = tuple(__all__) -assert set(DEFAULT_ORDER).issubset(set(globals())) +# The order of intial and final steps cannot be configured INITIAL_STEPS = DEFAULT_ORDER[:DEFAULT_ORDER.index('fix_data') + 1] FINAL_STEPS = DEFAULT_ORDER[DEFAULT_ORDER.index('cmor_check_data'):] MULTI_MODEL_FUNCTIONS = { 'multi_model_statistics', 'mask_fillvalues', - 'extract_metadata', } -assert MULTI_MODEL_FUNCTIONS.issubset(set(DEFAULT_ORDER)) -# Preprocessor functions that take a list instead of a file/Cube as input. -_LIST_INPUT_FUNCTIONS = MULTI_MODEL_FUNCTIONS | { - 'download', - 'load_cubes', - 'concatenate', - 'derive', - 'save', - 'cleanup', -} -assert _LIST_INPUT_FUNCTIONS.issubset(set(DEFAULT_ORDER)) -# Preprocessor functions that return a list instead of a file/Cube. -_LIST_OUTPUT_FUNCTIONS = MULTI_MODEL_FUNCTIONS | { - 'download', - 'load_cubes', - 'save', - 'cleanup', -} -assert _LIST_OUTPUT_FUNCTIONS.issubset(set(DEFAULT_ORDER)) +def _get_itype(step): + """Get the input type of a preprocessor function.""" + function = globals()[step] + itype = inspect.getargspec(function).args[0] + return itype + + +def check_preprocessor_settings(settings): + """Check preprocessor settings.""" + # The inspect functions getargspec and getcallargs are deprecated + # in Python 3, but their replacements are not available in Python 2. + # TODO: Use the new Python 3 inspect API + for step in settings: + if step not in DEFAULT_ORDER: + raise ValueError( + "Unknown preprocessor function '{}', choose from: {}".format( + step, ', '.join(DEFAULT_ORDER))) + + function = function = globals()[step] + argspec = inspect.getargspec(function) + args = argspec.args[1:] + # Check for invalid arguments + invalid_args = set(settings[step]) - set(args) + if invalid_args: + raise ValueError( + "Invalid argument(s): {} encountered for preprocessor " + "function {}. \nValid arguments are: [{}]".format( + ', '.join(invalid_args), step, ', '.join(args))) + + # Check for missing arguments + defaults = argspec.defaults + end = None if defaults is None else -len(defaults) + missing_args = set(args[:end]) - set(settings[step]) + if missing_args: + raise ValueError( + "Missing required argument(s) {} for preprocessor " + "function {}".format(missing_args, step)) + # Final sanity check in case the above fails to catch a mistake + try: + inspect.getcallargs(function, None, **settings[step]) + except TypeError: + logger.error( + "Wrong preprocessor function arguments in " + "function '%s'", step) + raise + + +def _check_multi_model_settings(products): + """Check that multi dataset settings are identical for all products.""" + multi_model_steps = (step for step in MULTI_MODEL_FUNCTIONS + if any(step in p.settings for p in products)) + for step in multi_model_steps: + reference = None + for product in products: + settings = product.settings.get(step) + if settings is None: + continue + elif reference is None: + reference = product + elif reference.settings[step] != settings: + raise ValueError( + "Unable to combine differing multi-dataset settings for " + "{} and {}, {} and {}".format( + reference.filename, product.filename, + reference.settings[step], settings)) + + +def _get_multi_model_settings(products, step): + """Select settings for multi model step""" + _check_multi_model_settings(products) + settings = {} + exclude = set() + for product in products: + if step in product.settings: + settings = product.settings[step] + else: + exclude.add(product) + return settings, exclude -def split_settings(settings, step, order=DEFAULT_ORDER): - """Split settings, using step as a separator.""" - before = {} - for _step in order: - if _step == step: - break - if _step in settings: - before[_step] = settings[_step] - after = { - k: v - for k, v in settings.items() if not (k == step or k in before) - } - return before, after +def _run_preproc_function(function, items, kwargs): + """Run preprocessor function.""" + msg = "{}({}, {})".format(function.__name__, items, kwargs) + logger.debug("Running %s", msg) + try: + return function(items, **kwargs) + except Exception: + logger.error("Failed to run %s", msg) + raise -def _get_multi_model_settings(all_settings, step): - """Select settings for multi model step""" - for settings in all_settings.values(): - if step in settings: - return {step: settings[step]} - return None - - -def _group_input(in_files, out_files): - """Group a list of input files by output file.""" - grouped_files = {} - - def get_matching(in_file): - """Find the output file which matches input file best.""" - in_chunks = os.path.basename(in_file).split('_') - score = 0 - fname = [] - for out_file in out_files: - out_chunks = os.path.basename(out_file).split('_') - tmp = sum(c in out_chunks for c in in_chunks) - if tmp > score: - score = tmp - fname = [out_file] - elif tmp == score: - fname.append(out_file) - if not fname: - logger.warning( - "Unable to find matching output file for input file %s", - in_file) - return fname - - # Group input files by output file - for in_file in in_files: - for out_file in get_matching(in_file): - if out_file not in grouped_files: - grouped_files[out_file] = [] - grouped_files[out_file].append(in_file) - - return grouped_files - - -def preprocess_multi_model(input_files, all_settings, order, debug=False): - """Run preprocessor on multiple models for a single variable.""" - # Group input files by output file - all_items = _group_input(input_files, all_settings) - logger.debug("Processing %s", all_items) - - # List of all preprocessor steps used - steps = [ - step for step in order - if any(step in settings for settings in all_settings.values()) - ] - # Find multi model steps - # This assumes that the multi model settings are the same for all models - multi_model_steps = [ - step for step in steps if step in MULTI_MODEL_FUNCTIONS - ] - # Append a dummy multi model step if the final step is not multi model - dummy_step = object() - if steps[-1] not in MULTI_MODEL_FUNCTIONS: - multi_model_steps.append(dummy_step) - - # Process - for step in multi_model_steps: - multi_model_settings = _get_multi_model_settings(all_settings, step) - # Run single model steps - for name in all_settings: - settings, all_settings[name] = split_settings( - all_settings[name], step, order) - all_items[name] = preprocess(all_items[name], settings, order, - debug) - if step is not dummy_step: - # Run multi model step - multi_model_items = [ - item for name in all_items for item in all_items[name] - ] - all_items = {} - result = preprocess(multi_model_items, multi_model_settings, order, - debug) - for item in result: - if isinstance(item, Cube): - name = item.attributes['_filename'] - if name not in all_items: - all_items[name] = [] - all_items[name].append(item) - else: - all_items[item] = [item] - - return [filename for name in all_items for filename in all_items[name]] - - -def preprocess(items, settings, order, debug=False): +def preprocess(items, step, **settings): """Run preprocessor""" - steps = (step for step in order if step in settings) - for step in steps: - logger.debug("Running preprocessor step %s", step) - function = globals()[step] - args = settings[step] - - if step in _LIST_INPUT_FUNCTIONS: - logger.debug("Running %s(%s, %s)", function.__name__, items, args) - result = [function(items, **args)] + logger.debug("Running preprocessor step %s", step) + function = globals()[step] + itype = _get_itype(step) + + result = [] + if itype.endswith('s'): + result.append(_run_preproc_function(function, items, settings)) + else: + for item in items: + result.append(_run_preproc_function(function, item, settings)) + + items = [] + for item in result: + if isinstance(item, + (PreprocessorFile, Cube, six.string_types)): + items.append(item) else: - result = [] - for item in items: - logger.debug("Running %s(%s, %s)", function.__name__, item, - args) - result.append(function(item, **args)) - - if step in _LIST_OUTPUT_FUNCTIONS: - items = tuple(item for subitem in result for item in subitem) - else: - items = tuple(result) + items.extend(item) + + return items + +def get_step_blocks(steps, order): + """Group steps into execution blocks.""" + blocks = [] + prev_step_type = None + for step in order[order.index('load') + 1:order.index('save')]: + if step in steps: + step_type = step in MULTI_MODEL_FUNCTIONS + if step_type is not prev_step_type: + block = [] + blocks.append(block) + prev_step_type = step_type + block.append(step) + return blocks + + +class PreprocessorFile(TrackedFile): + """Preprocessor output file.""" + + def __init__(self, attributes, settings, ancestors=None): + super(PreprocessorFile, self).__init__(attributes['filename'], + attributes, ancestors) + + self.settings = copy.deepcopy(settings) + if 'save' not in self.settings: + self.settings['save'] = {} + self.settings['save']['filename'] = self.filename + + self.files = [a.filename for a in ancestors or ()] + + self._cubes = None + self._prepared = False + + def check(self): + """Check preprocessor settings.""" + check_preprocessor_settings(self.settings) + + def apply(self, step, debug=False): + """Apply preprocessor step to product.""" + if step not in self.settings: + raise ValueError( + "PreprocessorFile {} has no settings for step {}".format( + self, step)) + self.cubes = preprocess(self.cubes, step, **self.settings[step]) if debug: - logger.debug("Result %s", items) - cubes = [item for item in items if isinstance(item, Cube)] - save(cubes, debug=debug, step=step) + logger.debug("Result %s", self.cubes) + filename = _get_debug_filename(self.filename, step) + save(self.cubes, filename) + + def prepare(self): + """Apply preliminary file operations on product.""" + if not self._prepared: + for step in DEFAULT_ORDER[:DEFAULT_ORDER.index('load')]: + if step in self.settings: + self.files = preprocess(self.files, step, + **self.settings[step]) + self._prepared = True + + @property + def cubes(self): + """Cubes.""" + if self.is_closed: + self.prepare() + self._cubes = preprocess(self.files, 'load', + **self.settings.get('load', {})) + return self._cubes + + @cubes.setter + def cubes(self, value): + self._cubes = value + + def save(self): + """Save cubes to disk.""" + if self._cubes is not None: + self.files = preprocess(self._cubes, 'save', + **self.settings['save']) + self.files = preprocess(self.files, 'cleanup', + **self.settings.get('cleanup', {})) + + def close(self): + """Close the file.""" + self.save() + self._cubes = None + + @property + def is_closed(self): + """Check if the file is closed.""" + return self._cubes is None + + def _initialize_entity(self): + """Initialize the entity representing the file.""" + super(PreprocessorFile, self)._initialize_entity() + settings = { + 'preprocessor:' + k: str(v) + for k, v in self.settings.items() + } + self.entity.add_attributes(settings) - return items +# TODO: use a custom ProductSet that raises an exception if you try to +# add the same Product twice + + +def _apply_multimodel(products, step, debug): + """Apply multi model step to products.""" + settings, exclude = _get_multi_model_settings(products, step) + + logger.debug("Applying %s to\n%s", step, '\n'.join( + str(p) for p in products - exclude)) + result = preprocess(products - exclude, step, **settings) + products = set(result) | exclude -class PreprocessingTask(AbstractTask): + if debug: + for product in products: + logger.debug("Result %s", product.filename) + if not product.is_closed: + for cube in product.cubes: + logger.debug("with cube %s", cube) + + return products + + +class PreprocessingTask(BaseTask): """Task for running the preprocessor""" - def __init__(self, - settings, - output_dir, - ancestors=None, - input_files=None, - order=DEFAULT_ORDER, - debug=None): + def __init__( + self, + products, + ancestors=None, + name='', + order=DEFAULT_ORDER, + debug=None, + write_ncl_interface=False, + ): """Initialize""" - super(PreprocessingTask, self).__init__( - settings=settings, output_dir=output_dir, ancestors=ancestors) + super(PreprocessingTask, self).__init__(ancestors=ancestors, name=name) + _check_multi_model_settings(products) + self.products = set(products) self.order = list(order) self.debug = debug - self._input_files = input_files - - def _run(self, input_files): - # If input_data is not available from ancestors and also not - # specified in self.run(input_data), use default - if not self.ancestors and not input_files: - input_files = self._input_files - output_files = preprocess_multi_model( - input_files, self.settings, self.order, debug=self.debug) - return output_files + self.write_ncl_interface = write_ncl_interface + + def _intialize_product_provenance(self): + """Initialize product provenance.""" + for product in self.products: + product.initialize_provenance(self.activity) + + # Hacky way to initialize the multi model products as well. + step = 'multi_model_statistics' + input_products = [p for p in self.products if step in p.settings] + if input_products: + statistic_products = input_products[0].settings[step].get( + 'output_products', {}).values() + for product in statistic_products: + product.initialize_provenance(self.activity) + + def _run(self, _): + """Run the preprocessor.""" + self._intialize_product_provenance() + + steps = { + step + for product in self.products for step in product.settings + } + blocks = get_step_blocks(steps, self.order) + for block in blocks: + logger.debug("Running block %s", block) + if block[0] in MULTI_MODEL_FUNCTIONS: + for step in block: + self.products = _apply_multimodel(self.products, step, + self.debug) + else: + for product in self.products: + logger.debug("Applying single-model steps to %s", product) + for step in block: + if step in product.settings: + product.apply(step, self.debug) + if block == blocks[-1]: + product.close() + + for product in self.products: + product.close() + metadata_files = write_metadata(self.products, + self.write_ncl_interface) + return metadata_files def __str__(self): """Get human readable description.""" - settings = dict(self.settings) - self.settings = { - os.path.basename(k): v - for k, v in self.settings.items() - } - - txt = "{}:\norder: {}\n{}".format( + order = [ + step for step in self.order + if any(step in product.settings for product in self.products) + ] + products = '\n\n'.join(str(p) for p in self.products) + txt = "{}:\norder: {}\n{}\n{}".format( self.__class__.__name__, - tuple( - step for step in self.order - if any(step in settings for settings in settings.values())), + order, + products, super(PreprocessingTask, self).str(), ) - - self.settings = settings - - if self._input_files is not None: - txt += '\ninput_files: {}'.format(self._input_files) return txt diff --git a/esmvaltool/preprocessor/_area.py b/esmvaltool/preprocessor/_area.py new file mode 100644 index 0000000000..9c7eb3589b --- /dev/null +++ b/esmvaltool/preprocessor/_area.py @@ -0,0 +1,294 @@ +""" +Area operations on data cubes. + +Allows for selecting data subsets using certain latitude and longitude bounds; +selecting geographical regions; constructing area averages; etc. +""" +import logging + +import iris +import numpy as np + + +logger = logging.getLogger(__name__) + + +# guess bounds tool +def _guess_bounds(cube, coords): + """Guess bounds of a cube, or not.""" + # check for bounds just in case + for coord in coords: + if not cube.coord(coord).has_bounds(): + cube.coord(coord).guess_bounds() + return cube + + +# slice cube over a restricted area (box) +def extract_region(cube, start_longitude, end_longitude, start_latitude, + end_latitude): + """ + Extract a region from a cube. + + Function that subsets a cube on a box (start_longitude, end_longitude, + start_latitude, end_latitude) + This function is a restriction of masked_cube_lonlat(). + + Arguments + --------- + cube: iris.cube.Cube + input cube. + + start_longitude: float + Western boundary longitude. + + end_longitude: float + Eastern boundary longitude. + + start_latitude: float + Southern Boundary latitude. + + end_latitude: float + Northern Boundary Latitude. + + Returns + ------- + iris.cube.Cube + smaller cube. + """ + # Converts Negative longitudes to 0 -> 360. standard + start_longitude = float(start_longitude) + end_longitude = float(end_longitude) + start_latitude = float(start_latitude) + end_latitude = float(end_latitude) + + if cube.coord('latitude').ndim == 1: + region_subset = cube.intersection( + longitude=(start_longitude, end_longitude), + latitude=(start_latitude, end_latitude)) + region_subset = region_subset.intersection(longitude=(0., 360.)) + return region_subset + # irregular grids + lats = cube.coord('latitude').points + lons = cube.coord('longitude').points + mask = np.ma.array(cube.data).mask + mask += np.ma.masked_where(lats < start_latitude, lats).mask + mask += np.ma.masked_where(lats > end_latitude, lats).mask + mask += np.ma.masked_where(lons > start_longitude, lons).mask + mask += np.ma.masked_where(lons > end_longitude, lons).mask + cube.data = np.ma.masked_where(mask, cube.data) + return cube + + +def get_iris_analysis_operation(operator): + """ + Determine the iris analysis operator from a string. + + Arguments + --------- + operator: string + A named operator. + Returns + ------- + function: A function from iris.analysis + """ + operators = ['mean', 'median', 'std_dev', 'variance', 'min', 'max'] + operator = operator.lower() + if operator not in operators: + raise ValueError("operator {} not recognised. " + "Accepted values are: {}." + "".format(operator, ', '.join(operators))) + operation = getattr(iris.analysis, operator.upper()) + return operation + + +def zonal_means(cube, coordinate, mean_type): + """ + Get zonal means. + + Function that returns zonal means along a coordinate `coordinate`; + the type of mean is controlled by mean_type variable (string):: + + 'mean' -> MEAN + 'median' -> MEDIAN + 'std_dev' -> STD_DEV + 'variance' -> VARIANCE + 'min' -> MIN + 'max' -> MAX + + Arguments + --------- + cube: iris.cube.Cube + input cube. + + coordinate: str + name of coordinate to make mean + + mean_type: str + Type of analysis to use, from iris.analysis. + + Returns + ------- + iris.cube.Cube + Returns a cube + """ + operation = get_iris_analysis_operation(mean_type) + return cube.collapsed(coordinate, operation) + + +def tile_grid_areas(cube, fx_files): + """ + Tile the grid area data to match the dataset cube. + + Arguments + --------- + cube: iris.cube.Cube + input cube. + fx_files: dictionary + dictionary of field:filename for the fx_files + + Returns + ------- + iris.cube.Cube + Freshly tiled grid areas cube. + """ + grid_areas = np.empty(0) + if fx_files: + for key, fx_file in fx_files.items(): + if fx_file is None: + continue + logger.info('Attempting to load %s from file: %s', key, fx_file) + fx_cube = iris.load_cube(fx_file) + + grid_areas = fx_cube.data + cube_shape = cube.data.shape + if cube.data.ndim == 4 and grid_areas.ndim == 2: + grid_areas = np.tile(grid_areas, + [cube_shape[0], cube_shape[1], 1, 1]) + elif cube.data.ndim == 4 and grid_areas.ndim == 3: + grid_areas = np.tile(grid_areas, + [cube_shape[0], 1, 1, 1]) + elif cube.data.ndim == 3 and grid_areas.ndim == 2: + grid_areas = np.tile(grid_areas, + [cube_shape[0], 1, 1]) + else: + raise ValueError('Grid and dataset number of dimensions not ' + 'recognised: {} and {}.' + ''.format(cube.data.ndim, grid_areas.ndim)) + return grid_areas + + +# get the area average +def average_region(cube, coord1, coord2, operator='mean', fx_files=None): + """ + Determine the area average. + + The average in the horizontal direction requires the coord1 and coord2 + arguments. These strings are usually 'longitude' and 'latitude' but + may depends on the cube. + + While this function is named `average_region`, it can be used to apply + several different operations in the horizonal plane: mean, standard + deviation, median variance, minimum and maximum. These options are + specified using the `operator` argument and the following key word + arguments: + + +------------+--------------------------------------------------+ + | `mean` | Area weighted mean. | + +------------+--------------------------------------------------+ + | `median` | Median (not area weighted) | + +------------+--------------------------------------------------+ + | `std_dev` | Standard Deviation (not area weighted) | + +------------+--------------------------------------------------+ + | `variance` | Variance (not area weighted) | + +------------+--------------------------------------------------+ + | `min`: | Minimum value | + +------------+--------------------------------------------------+ + | `max` | Maximum value | + +------------+--------------------------------------------------+ + + + Arguments + --------- + cube: iris.cube.Cube + input cube. + coord1: str + Name of the firct coordinate dimension + coord2: str + Name of the second coordinate dimension + operator: str + Name of the operation to apply (default: mean) + fx_files: dictionary + dictionary of field:filename for the fx_files + + Returns + ------- + iris.cube.Cube + collapsed cube. + """ + grid_areas = tile_grid_areas(cube, fx_files) + + if not fx_files and cube.coord('latitude').points.ndim == 2: + logger.error('average_region ERROR: fx_file needed to calculate grid ' + 'cell area for irregular grids.') + raise iris.exceptions.CoordinateMultiDimError(cube.coord('latitude')) + + if not grid_areas.any(): + cube = _guess_bounds(cube, [coord1, coord2]) + grid_areas = iris.analysis.cartography.area_weights(cube) + logger.info('Calculated grid area:{}'.format(grid_areas.shape)) + + if cube.data.shape != grid_areas.shape: + raise ValueError('Cube shape ({}) doesn`t match grid area shape ' + '({})'.format(cube.data.shape, grid_areas.shape)) + + operation = get_iris_analysis_operation(operator) + + # TODO: implement weighted stdev, median, and var when available in iris. + # See iris issue: https://github.com/SciTools/iris/issues/3208 + + if operator in ['mean', ]: + return cube.collapsed([coord1, coord2], + operation, + weights=grid_areas) + + # Many IRIS analysis functions do not accept weights arguments. + return cube.collapsed([coord1, coord2], operation) + + +def extract_named_regions(cube, regions): + """ + Extract a specific named region. + + The region coordinate exist in certain CMIP datasets. + This preprocessor allows a specific named regions to be extracted. + + Arguments + --------- + cube: iris.cube.Cube + input cube. + + regions: str, list + A region or list of regions to extract. + + Returns + ------- + iris.cube.Cube + collapsed cube. + """ + # Make sure regions is a list of strings + if isinstance(regions, str): + regions = [regions, ] + + if not isinstance(regions, (list, tuple, set)): + raise ValueError('Regions "{}" is not an acceptable format.' + ''.format(regions)) + + available_regions = set(cube.coord('region').points) + invalid_regions = set(regions) - available_regions + if invalid_regions: + raise ValueError('Region(s) "{}" not in cube region(s): ' + '{}'.format(invalid_regions, available_regions)) + + constraints = iris.Constraint(region=lambda r: r in regions) + cube = cube.extract(constraint=constraints) + return cube diff --git a/esmvaltool/preprocessor/_area_pp.py b/esmvaltool/preprocessor/_area_pp.py deleted file mode 100644 index 3b706b3c74..0000000000 --- a/esmvaltool/preprocessor/_area_pp.py +++ /dev/null @@ -1,163 +0,0 @@ -""" -Area operations on data cubes - -Allows for selecting data subsets using certain latitude and longitude bounds; -selecting geographical regions; constructing area averages; etc. -""" -import iris - - -# slice cube over a restricted area (box) -def area_slice(cube, start_longitude, end_longitude, start_latitude, - end_latitude): - """ - Subset a cube on area - - Function that subsets a cube on a box (start_longitude, end_longitude, - start_latitude, end_latitude) - This function is a restriction of masked_cube_lonlat(); - - Arguments - --------- - cube: iris.cube.Cube - input cube. - - start_longitude: float - Western boundary longitude. - - end_longitude: float - Eastern boundary longitude. - - start_latitude: float - Southern Boundary latitude. - - end_latitude: float - Northern Boundary Latitude. - - Returns - ------- - iris.cube.Cube - smaller cube. - """ - # Converts Negative longitudes to 0 -> 360. standard - start_longitude = float(start_longitude) - end_longitude = float(end_longitude) - start_latitude = float(start_latitude) - end_latitude = float(end_latitude) - - region_subset = cube.intersection( - longitude=(start_longitude, end_longitude), - latitude=(start_latitude, end_latitude)) - region_subset = region_subset.intersection(longitude=(0., 360.)) - - return region_subset - - -# get zonal means -def zonal_means(cube, coordinate, mean_type): - """ - Get zonal means - - Function that returns zonal means along a coordinate `coordinate`; - the type of mean is controlled by mean_type variable (string): - 'mean' -> MEAN - 'stdev' -> STD_DEV - 'variance' -> VARIANCE - 'min' -> MIN - 'max' -> MAX - - Arguments - --------- - cube: iris.cube.Cube - input cube. - - coordinate: str - name of coordinate to make mean - - mean_type: str - Type of analysis to use, from iris.analysis. - - Returns - ------- - iris.cube.Cube - Returns a cube - """ - if mean_type == 'mean': - result = cube.collapsed(coordinate, iris.analysis.MEAN) - elif mean_type == 'stdev': - result = cube.collapsed(coordinate, iris.analysis.STD_DEV) - elif mean_type == 'variance': - result = cube.collapsed(coordinate, iris.analysis.VARIANCE) - elif mean_type.lower() in ['minimum', 'min']: - result = cube.collapsed(coordinate, iris.analysis.MIN) - elif mean_type.lower() in ['maximum', 'max']: - result = cube.collapsed(coordinate, iris.analysis.MAX) - return result - - -# get the area average -def area_average(cube, coord1, coord2): - """ - Determine the area average. - - Can be used with coord1 and coord2 (strings, - usually 'longitude' and 'latitude' but depends on the cube); - - Arguments - --------- - cube: iris.cube.Cube - input cube. - - coord1: str - name of first coordinate - - coord2: str - name of second coordinate - - Returns - ------- - iris.cube.Cube - collapsed cube. - """ - # check for bounds just in case - for coord in (coord1, coord2): - if not cube.coord(coord).has_bounds(): - cube.coord(coord).guess_bounds() - grid_areas = iris.analysis.cartography.area_weights(cube) - result = cube.collapsed( - [coord1, coord2], iris.analysis.MEAN, weights=grid_areas) - return result - - -# operate along a trajectory line -def trajectory_cube(cube, long1, long2, lat1, lat2, plong1, plong2, plat1, - plat2, samplecounts): - """ - Build a trajectory - - Function that subsets a cube on a box (long1,long2,lat1,lat2) - then creates a trajectory with waypoints (plong1,plong2,plat1, plat2), - populates it with samplecounts number of points - and subsets the cube along the trajectory - """ - from iris.analysis import trajectory - sublon = iris.Constraint( - longitude=lambda cell: float(long1) <= cell <= float(long2)) - sublat = iris.Constraint( - latitude=lambda cell: float(lat1) <= cell <= float(lat2)) - wspd_subset = cube.extract(sublon & sublat) - pnts = [{ - 'longitude': float(plong1), - 'latitude': float(plat1) - }, { - 'longitude': float(plong2), - 'latitude': float(plat2) - }] - traj = trajectory.Trajectory(pnts, sample_count=int(samplecounts)) - lon = [d['longitude'] for d in traj.sampled_points] - lat = [d['latitude'] for d in traj.sampled_points] - sampled_points = [('longitude', lon), ('latitude', lat)] - section = trajectory.interpolate(wspd_subset, sampled_points) - lon = wspd_subset.coord('longitude').points - lat = wspd_subset.coord('latitude').points - return section, lon, lat diff --git a/esmvaltool/preprocessor/_derive.py b/esmvaltool/preprocessor/_derive.py deleted file mode 100644 index 9238470229..0000000000 --- a/esmvaltool/preprocessor/_derive.py +++ /dev/null @@ -1,713 +0,0 @@ -"""Miscellaneous functions for deriving variables.""" - -import logging - -import cf_units -import iris -import numba -import numpy as np -import yaml -from iris import Constraint -from scipy import constants - -logger = logging.getLogger(__name__) - -Avogadro_const = constants.value('Avogadro constant') -Avogadro_const_unit = constants.unit('Avogadro constant') -g = 9.81 -g_unit = cf_units.Unit('m s^-2') -mw_air = 29 -mw_air_unit = cf_units.Unit('g mol^-1') -mw_O3 = 48 -mw_O3_unit = cf_units.Unit('g mol^-1') -Dobson_unit = cf_units.Unit('2.69e20 m^-2') - - -def get_required(short_name, field=None): - """Get variable short_name and field pairs required to derive variable""" - frequency = field[2] if field else 'M' - required = { - 'lwcre': [ - ('rlut', 'T2' + frequency + 's'), - ('rlutcs', 'T2' + frequency + 's'), - ], - 'lwp': [ - ('clwvi', 'T2' + frequency + 's'), - ('clivi', 'T2' + frequency + 's'), - ], - 'swcre': [ - ('rsut', 'T2' + frequency + 's'), - ('rsutcs', 'T2' + frequency + 's'), - ], - 'toz': [ - ('tro3', 'T3' + frequency), - ('ps', 'T2' + frequency + 's'), - ], - 'rtnt': [('rsdt', 'T2' + frequency + 's'), - ('rsut', 'T2' + frequency + 's'), ('rlut', - 'T2' + frequency + 's')], - 'rsnt': [ - ('rsdt', 'T2' + frequency + 's'), - ('rsut', 'T2' + frequency + 's'), - ], - 'rsns': [ - ('rsds', 'T2' + frequency + 's'), - ('rsus', 'T2' + frequency + 's'), - ], - 'rlns': [ - ('rlds', 'T2' + frequency + 's'), - ('rlus', 'T2' + frequency + 's'), - ], - 'cllmtisccp': [('clisccp', 'T4' + frequency)], - 'clltkisccp': [('clisccp', 'T4' + frequency)], - 'clmmtisccp': [('clisccp', 'T4' + frequency)], - 'clmtkisccp': [('clisccp', 'T4' + frequency)], - 'clhmtisccp': [('clisccp', 'T4' + frequency)], - 'clhtkisccp': [('clisccp', 'T4' + frequency)] - } - - if short_name in required: - return required[short_name] - - raise NotImplementedError("Don't know how to derive {}".format(short_name)) - - -def derive(cubes, variable): - """Derive variable""" - short_name = variable['short_name'] - # Do nothing if variable is already available - if short_name == cubes[0].var_name: - return cubes[0] - - # Available derivation functions - functions = { - 'lwcre': calc_lwcre, - 'lwp': calc_lwp, - 'swcre': calc_swcre, - 'toz': calc_toz, - 'rtnt': calc_rtnt, - 'rsnt': calc_rsnt, - 'rsns': calc_rsns, - 'rlns': calc_rlns, - 'cllmtisccp': calc_cllmtisccp, - 'clltkisccp': calc_clltkisccp, - 'clmmtisccp': calc_clmmtisccp, - 'clmtkisccp': calc_clmtkisccp, - 'clhmtisccp': calc_clhmtisccp, - 'clhtkisccp': calc_clhtkisccp - } - - if short_name not in functions: - raise NotImplementedError( - "Don't know how to derive {}".format(short_name)) - - # Preprare input cubes and derive - cubes = iris.cube.CubeList(cubes) - cube = functions[short_name](cubes) - - # Set standard attributes - cube.var_name = short_name - if variable['standard_name'] not in iris.std_names.STD_NAMES: - iris.std_names.STD_NAMES[variable['standard_name']] = { - 'canonical_units': variable['units'] - } - for attribute in ('standard_name', 'long_name', 'units'): - setattr(cube, attribute, variable[attribute]) - - # Set attributes required by preprocessor - cube.attributes['_filename'] = variable['filename'] - cube.attributes['metadata'] = yaml.safe_dump(variable) - - return cube - - -def calc_lwcre(cubes): - """Compute longwave cloud radiative effect from all-sky and clear-sky flux. - - Arguments - ---- - cubes: cubelist containing rlut (toa_outgoing_longwave_flux) and rlutcs - (toa_outgoing_longwave_flux_assuming_clear_sky). - - Returns - ------- - Cube containing longwave cloud radiative effect. - - """ - rlut_cube = cubes.extract_strict( - Constraint(name='toa_outgoing_longwave_flux')) - rlutcs_cube = cubes.extract_strict( - Constraint(name='toa_outgoing_longwave_flux_assuming_clear_sky')) - - lwcre = rlutcs_cube - rlut_cube - lwcre.units = rlut_cube.units - - return lwcre - - -def calc_lwp(cubes): - """Compute liquid water path. - - Liquid water path is calculated by subtracting clivi (ice water) from clwvi - (condensed water path). - Note: Some datasets output the variable "clwvi" which only contains lwp. In - these cases, the input clwvi cube is just returned. - - Arguments - --------- - cubes: cubelist containing clwvi_cube and clivi_cube - - Returns - ------- - Cube containing liquid water path. - - """ - clwvi_cube = cubes.extract_strict( - Constraint(name='atmosphere_cloud_condensed_water_content')) - clivi_cube = cubes.extract_strict( - Constraint(name='atmosphere_cloud_ice_content')) - - dataset = clwvi_cube.attributes.get('model_id') - project = clwvi_cube.attributes.get('project_id') - # Should we check that the model_id/project_id are the same on both cubes? - - bad_datasets = [ - 'CESM1-CAM5-1-FV2', 'CESM1-CAM5', 'CMCC-CESM', 'CMCC-CM', 'CMCC-CMS', - 'IPSL-CM5A-MR', 'IPSL-CM5A-LR', 'IPSL-CM5B-LR', 'CCSM4', - 'IPSL-CM5A-MR', 'MIROC-ESM', 'MIROC-ESM-CHEM', 'MIROC-ESM', - 'CSIRO-Mk3-6-0', 'MPI-ESM-MR', 'MPI-ESM-LR', 'MPI-ESM-P' - ] - if ((project in ["CMIP5", "CMIP5_ETHZ"] and dataset in bad_datasets) - or (project == 'OBS' and dataset == 'UWisc')): - logger.info( - "Assuming that variable clwvi from %s dataset %s " - "contains only liquid water", project, dataset) - lwp_cube = clwvi_cube - else: - lwp_cube = clwvi_cube - clivi_cube - - return lwp_cube - - -def calc_swcre(cubes): - """Compute shortwave cloud radiative effect from all-sky and clear-sky - - flux. - - Arguments - ---- - cubes: cubelist containing rsut (toa_outgoing_shortwave_flux) and - rsutcs (toa_outgoing_shortwave_flux_assuming_clear_sky). - - Returns - ------- - Cube containing shortwave cloud radiative effect. - - """ - rsut_cube = cubes.extract_strict( - Constraint(name='toa_outgoing_shortwave_flux')) - rsutcs_cube = cubes.extract_strict( - Constraint(name='toa_outgoing_shortwave_flux_assuming_clear_sky')) - - swcre = rsutcs_cube - rsut_cube - - return swcre - - -def calc_toz(cubes): - """Compute total column ozone from ozone mol fraction on pressure levels. - - The surface pressure is used as a lower integration bound. A fixed upper - integration bound of 100 Pa is used. - - Arguments - ---- - cubes: cubelist containing tro3_cube (mole_fraction_of_ozone_in_air) - and ps_cube (surface_air_pressure). - - Returns - ------- - Cube containing total column ozone. - - """ - tro3_cube = cubes.extract_strict( - Constraint(name='mole_fraction_of_ozone_in_air')) - ps_cube = cubes.extract_strict(Constraint(name='surface_air_pressure')) - - p_layer_widths = _pressure_level_widths(tro3_cube, ps_cube, top_limit=100) - toz = tro3_cube * p_layer_widths / g * mw_O3 / mw_air - toz = toz.collapsed('air_pressure', iris.analysis.SUM) - toz.units = (tro3_cube.units * p_layer_widths.units / g_unit * mw_O3_unit / - mw_air_unit) - - # Convert from kg m^-2 to Dobson unit (2.69e20 m^-2 ) - toz = toz / mw_O3 * Avogadro_const - toz.units = toz.units / mw_O3_unit * Avogadro_const_unit - toz.convert_units(Dobson_unit) - - return toz - - -def calc_rtnt(cubes): - """Compute rtnt: TOA Net downward Total Radiation. - - Arguments - ---- - cubes: cubelist containing rsut (toa_outgoing_shortwave_flux) and - rsdt (toa_incoming_shortwave_flux) and - rlut (toa_outgoing_longwave_flux). - - Returns - ------- - Cube containing TOA Net downward Total Radiation. - Units: W m-2 - - """ - rsdt_cube = cubes.extract_strict( - Constraint(name='toa_incoming_shortwave_flux')) - rsut_cube = cubes.extract_strict( - Constraint(name='toa_outgoing_shortwave_flux')) - rlut_cube = cubes.extract_strict( - Constraint(name='toa_outgoing_longwave_flux')) - - # rtnt = (rsdt - rsut) - rlut - rtnt = rsdt_cube - rsut_cube - rlut_cube - - return rtnt - - -def calc_rsnt(cubes): - """Compute rsnt: TOA Net downward Shortwave Radiation. - - Arguments - ---- - cubes: cubelist containing rsut (toa_outgoing_shortwave_flux) and - rsdt (toa_incoming_shortwave_flux). - - Returns - ------- - Cube containing TOA Net downward Shortwave Radiation. - Units: W m-2 - - """ - rsdt_cube = cubes.extract_strict( - Constraint(name='toa_incoming_shortwave_flux')) - rsut_cube = cubes.extract_strict( - Constraint(name='toa_outgoing_shortwave_flux')) - - # rsnt = rsdt - rsut - rsnt = rsdt_cube - rsut_cube - - return rsnt - - -def calc_rsns(cubes): - """Compute rsns: Surface Net downward Shortwave Radiation. - - Arguments - ---- - cubes: cubelist containing - rsus (surface_upwelling_shortwave_flux_in_air) and - rsds (surface_downwelling_shortwave_flux_in_air). - - Returns - ------- - Cube containing Surface Net downward Shortwave Radiation. - Units: W m-2 - - """ - rsds_cube = cubes.extract_strict( - Constraint(name='surface_downwelling_shortwave_flux_in_air')) - rsus_cube = cubes.extract_strict( - Constraint(name='surface_upwelling_shortwave_flux_in_air')) - - # rsns = rsds - rsus - rsns = rsds_cube - rsus_cube - - return rsns - - -def calc_rlns(cubes): - """Compute rlns: Surface Net downward Longwave Radiation. - - Arguments - ---- - cubes: cubelist containing - rlds (surface_downwelling_longwave_flux_in_air) and - rlus (surface_upwelling_longwave_flux_in_air). - - Returns - ------- - Cube containing Surface Net downward Longwave Radiation. - Units: W m-2 - - """ - rlds_cube = cubes.extract_strict( - Constraint(name='surface_downwelling_longwave_flux_in_air')) - rlus_cube = cubes.extract_strict( - Constraint(name='surface_upwelling_longwave_flux_in_air')) - - # rlns = rlds - rlus - rlns = rlds_cube - rlus_cube - - return rlns - - -def calc_cllmtisccp(cubes): - """Compute cllmtisccp: - - long name: ISCCP Low Level Medium-Thickness Cloud Area Fraction - short name: same - - Arguments - ---- - cubes: cubelist containing - clisccp(isccp_cloud_area_fraction) - - Returns - ------- - Cube: ISCCP Low Level Medium-Thickness Cloud Area Fraction. - Units: % - - """ - clisccp_cube = cubes.extract_strict( - Constraint(name='isccp_cloud_area_fraction')) - - tau = iris.Constraint( - atmosphere_optical_thickness_due_to_cloud=lambda t: 3.6 < t <= 23.) - plev = iris.Constraint(air_pressure=lambda p: p > 68000.) - cllmtisccp_cube = clisccp_cube - cllmtisccp_cube = cllmtisccp_cube.extract(tau & plev) - coord_names = [ - coord.standard_name for coord in cllmtisccp_cube.coords() - if len(coord.points) > 1 - ] - if 'atmosphere_optical_thickness_due_to_cloud' in coord_names: - cllmtisccp_cube = cllmtisccp_cube.collapsed( - 'atmosphere_optical_thickness_due_to_cloud', iris.analysis.SUM) - if 'air_pressure' in coord_names: - cllmtisccp_cube = cllmtisccp_cube.collapsed('air_pressure', - iris.analysis.SUM) - - return cllmtisccp_cube - - -def calc_clltkisccp(cubes): - """Compute clltkisccp: - - long name: ISCCP low level thick cloud area fraction - short name: same - - Arguments - ---- - cubes: cubelist containing - clisccp(isccp_cloud_area_fraction) - - Returns - ------- - Cube: ISCCP low level thick cloud area fraction. - Units: % - - """ - clisccp_cube = cubes.extract_strict( - Constraint(name='isccp_cloud_area_fraction')) - - tau = iris.Constraint( - atmosphere_optical_thickness_due_to_cloud=lambda t: t > 23.) - plev = iris.Constraint(air_pressure=lambda p: p > 68000.) - clltkisccp_cube = clisccp_cube - clltkisccp_cube = clltkisccp_cube.extract(tau & plev) - coord_names = [ - coord.standard_name for coord in clltkisccp_cube.coords() - if len(coord.points) > 1 - ] - if 'atmosphere_optical_thickness_due_to_cloud' in coord_names: - clltkisccp_cube = clltkisccp_cube.collapsed( - 'atmosphere_optical_thickness_due_to_cloud', iris.analysis.SUM) - if 'air_pressure' in coord_names: - clltkisccp_cube = clltkisccp_cube.collapsed('air_pressure', - iris.analysis.SUM) - - return clltkisccp_cube - - -def calc_clmmtisccp(cubes): - """Compute clmmtisccp: - - long name: ISCCP Middle Level Medium-Thickness Cloud Area Fraction - short name: same - - Arguments - ---- - cubes: cubelist containing - clisccp(isccp_cloud_area_fraction) - - Returns - ------- - Cube: ISCCP Middle Level Medium-Thickness Cloud Area Fraction. - Units: % - - """ - clisccp_cube = cubes.extract_strict( - Constraint(name='isccp_cloud_area_fraction')) - - tau = iris.Constraint( - atmosphere_optical_thickness_due_to_cloud=lambda t: 3.6 < t <= 23.) - plev = iris.Constraint(air_pressure=lambda p: 44000. < p <= 68000.) - clmmtisccp_cube = clisccp_cube - clmmtisccp_cube = clmmtisccp_cube.extract(tau & plev) - coord_names = [ - coord.standard_name for coord in clmmtisccp_cube.coords() - if len(coord.points) > 1 - ] - if 'atmosphere_optical_thickness_due_to_cloud' in coord_names: - clmmtisccp_cube = clmmtisccp_cube.collapsed( - 'atmosphere_optical_thickness_due_to_cloud', iris.analysis.SUM) - if 'air_pressure' in coord_names: - clmmtisccp_cube = clmmtisccp_cube.collapsed('air_pressure', - iris.analysis.SUM) - - return clmmtisccp_cube - - -def calc_clmtkisccp(cubes): - """Compute clmtkisccp: - - long name: ISCCP Middle Level Thick Cloud Area Fraction - short name: same - - Arguments - ---- - cubes: cubelist containing - clisccp(isccp_cloud_area_fraction) - - Returns - ------- - Cube: ISCCP Middle Level Thick Cloud Area Fraction. - Units: % - - """ - clisccp_cube = cubes.extract_strict( - Constraint(name='isccp_cloud_area_fraction')) - - tau = iris.Constraint( - atmosphere_optical_thickness_due_to_cloud=lambda t: t > 23.) - plev = iris.Constraint(air_pressure=lambda p: 44000. < p <= 68000.) - clmtkisccp_cube = clisccp_cube - clmtkisccp_cube = clmtkisccp_cube.extract(tau & plev) - coord_names = [ - coord.standard_name for coord in clmtkisccp_cube.coords() - if len(coord.points) > 1 - ] - if 'atmosphere_optical_thickness_due_to_cloud' in coord_names: - clmtkisccp_cube = clmtkisccp_cube.collapsed( - 'atmosphere_optical_thickness_due_to_cloud', iris.analysis.SUM) - if 'air_pressure' in coord_names: - clmtkisccp_cube = clmtkisccp_cube.collapsed('air_pressure', - iris.analysis.SUM) - - return clmtkisccp_cube - - -def calc_clhmtisccp(cubes): - """Compute clhmtisccp: - - long name: ISCCP High Level Medium-Thickness Cloud Area Fraction - short name: same - - Arguments - ---- - cubes: cubelist containing - clisccp(isccp_cloud_area_fraction) - - Returns - ------- - Cube: ISCCP High Level Medium-Thickness Cloud Area Fraction. - Units: % - - """ - clisccp_cube = cubes.extract_strict( - Constraint(name='isccp_cloud_area_fraction')) - - tau = iris.Constraint( - atmosphere_optical_thickness_due_to_cloud=lambda t: 3.6 < t <= 23.) - plev = iris.Constraint(air_pressure=lambda p: p <= 44000.) - clhmtisccp_cube = clisccp_cube - clhmtisccp_cube = clhmtisccp_cube.extract(tau & plev) - coord_names = [ - coord.standard_name for coord in clhmtisccp_cube.coords() - if len(coord.points) > 1 - ] - if 'atmosphere_optical_thickness_due_to_cloud' in coord_names: - clhmtisccp_cube = clhmtisccp_cube.collapsed( - 'atmosphere_optical_thickness_due_to_cloud', iris.analysis.SUM) - if 'air_pressure' in coord_names: - clhmtisccp_cube = clhmtisccp_cube.collapsed('air_pressure', - iris.analysis.SUM) - - return clhmtisccp_cube - - -def calc_clhtkisccp(cubes): - """Compute clhtkisccp: - - long name: ISCCP high level thick cloud area fraction - short name: same - - Arguments - ---- - cubes: cubelist containing - clisccp(isccp_cloud_area_fraction) - - Returns - ------- - Cube: ISCCP high level thick cloud area fraction. - Units: % - - """ - clisccp_cube = cubes.extract_strict( - Constraint(name='isccp_cloud_area_fraction')) - - tau = iris.Constraint( - atmosphere_optical_thickness_due_to_cloud=lambda t: t > 23.) - plev = iris.Constraint(air_pressure=lambda p: p <= 44000.) - clhtkisccp_cube = clisccp_cube - clhtkisccp_cube = clhtkisccp_cube.extract(tau & plev) - coord_names = [ - coord.standard_name for coord in clhtkisccp_cube.coords() - if len(coord.points) > 1 - ] - if 'atmosphere_optical_thickness_due_to_cloud' in coord_names: - clhtkisccp_cube = clhtkisccp_cube.collapsed( - 'atmosphere_optical_thickness_due_to_cloud', iris.analysis.SUM) - if 'air_pressure' in coord_names: - clhtkisccp_cube = clhtkisccp_cube.collapsed('air_pressure', - iris.analysis.SUM) - - return clhtkisccp_cube - - -def _pressure_level_widths(tro3_cube, ps_cube, top_limit=100): - """Create a cube with pressure level widths. - - This is done by taking a 2D surface pressure field as lower bound. - - Arguments - --------- - tro3_cube: Cube containing mole_fraction_of_ozone_in_air - ps_cube: Surface air pressure cube. - top_limit: Pressure in Pa. - - Returns - ------- - Cube of same shape as tro3_cube containing pressure level widths. - - """ - assert ps_cube.units == 'Pa' - assert tro3_cube.coord('air_pressure').units == 'Pa' - - pressure_array = _create_pressure_array(tro3_cube, ps_cube, top_limit) - - data = _apply_pressure_level_widths(pressure_array) - p_level_widths_cube = tro3_cube.copy(data=data) - p_level_widths_cube.rename('pressure level widths') - p_level_widths_cube.units = ps_cube.units - - return p_level_widths_cube - - -def _create_pressure_array(tro3_cube, ps_cube, top_limit): - """Create an array filled with the 'air_pressure' coord values. - - The array is created from the tro3_cube with the same dimensions - as tro3_cube. This array is then sandwiched with a 2D array containing - the surface pressure, and a 2D array containing the top pressure limit. - """ - # create 4D array filled with pressure level values - p_levels = tro3_cube.coord('air_pressure').points - p_4d_array = iris.util.broadcast_to_shape(p_levels, tro3_cube.shape, [1]) - assert p_4d_array.shape == tro3_cube.shape - - # create 4d array filled with surface pressure values - shape = tro3_cube.shape - ps_4d_array = iris.util.broadcast_to_shape(ps_cube.data, shape, [0, 2, 3]) - assert ps_4d_array.shape == tro3_cube.shape - - # set pressure levels below the surface pressure to NaN - pressure_4d = np.where((ps_4d_array - p_4d_array) < 0, np.NaN, p_4d_array) - - # make top_limit last pressure level - top_limit_array = np.ones(ps_cube.shape) * top_limit - data = top_limit_array[:, np.newaxis, :, :] - pressure_4d = np.concatenate((pressure_4d, data), axis=1) - assert (pressure_4d[:, -1, :, :] == top_limit).all() - - # make surface pressure the first pressure level - data = ps_cube.data[:, np.newaxis, :, :] - pressure_4d = np.concatenate((data, pressure_4d), axis=1) - assert (pressure_4d[:, 0, :, :] == ps_cube.data).all() - - return pressure_4d - - -def _apply_pressure_level_widths(array, air_pressure_axis=1): - """Compute pressure level widths. - - For a 1D array with pressure level columns, return a 1D array with - pressure level widths. - """ - return np.apply_along_axis(_p_level_widths, air_pressure_axis, array) - - -@numba.jit() # ~10x faster -def _p_level_widths(array): - """Create pressure level widths from an array with pressure level values. - - The array is assumed to be monotonic and the values are decreasing. - - The first element is the lower boundary (surface pressure), the last value - is the upper boundary. Thicknesses are only calculated for the values - between these boundaries, the returned array, therefore, contains two - elements less. - - >>> _p_level_widths(np.array([1020, 1000, 700, 500, 5])) - array([170., 250., 595.]) - - >>> _p_level_widths(np.array([990, np.NaN, 700, 500, 5])) - array([ 0., 390., 595.]) - """ - surface_pressure = array[0] - top_limit = array[-1] - array = array[1:-1] - - p_level_widths = np.ones(array.shape) * np.NAN - - last_pressure_level = len(array) - 1 - for i, val in enumerate(array): - # numba would otherwise initialise it to 0 and - # hide bugs that would occur in raw Python - bounds_width = np.NAN - if np.isnan(val): - bounds_width = 0 - else: - # distance to lower bound - if i == 0 or np.isnan(array[i - 1]): - # first pressure level with value - dist_to_lower_bound = surface_pressure - val - else: - dist_to_lower_bound = 0.5 * (array[i - 1] - val) - - # distance to upper bound - if i == last_pressure_level: # last pressure level - dist_to_upper_bound = val - top_limit - else: - dist_to_upper_bound = 0.5 * (val - array[i + 1]) - - # Check monotonicity - all distances must be >= 0 - if dist_to_lower_bound < 0.0 or dist_to_upper_bound < 0.0: - raise ValueError('Pressure level value increased with height.') - - bounds_width = dist_to_lower_bound + dist_to_upper_bound - - p_level_widths[i] = bounds_width - return p_level_widths diff --git a/esmvaltool/preprocessor/_derive/__init__.py b/esmvaltool/preprocessor/_derive/__init__.py new file mode 100644 index 0000000000..8268a36a70 --- /dev/null +++ b/esmvaltool/preprocessor/_derive/__init__.py @@ -0,0 +1,122 @@ +"""Automatically derive variables.""" + +import importlib +import logging +from copy import deepcopy +from pathlib import Path + +import iris + +logger = logging.getLogger(__name__) + + +def _get_all_derived_variables(): + """Get all possible derived variables. + + Returns + ------- + dict + All derived variables with `short_name` (keys) and the associated + python classes (values). + + """ + derivers = {} + for path in Path(__file__).parent.glob('[a-z]*.py'): + short_name = path.stem + module = importlib.import_module( + f'esmvaltool.preprocessor._derive.{short_name}') + derivers[short_name] = getattr(module, 'DerivedVariable') + return derivers + + +ALL_DERIVED_VARIABLES = _get_all_derived_variables() + +__all__ = list(ALL_DERIVED_VARIABLES) + + +def get_required(short_name): + """Return all required variables for derivation. + + Get all information (at least `short_name`) required for derivation and + optionally a list of needed fx files. + + Parameters + ---------- + short_name : str + `short_name` of the variable to derive. + + Returns + ------- + list + List of dictionaries (including at least the key `short_name`) + and occasionally mip or fx_files. + + """ + DerivedVariable = ALL_DERIVED_VARIABLES[short_name] + variables = deepcopy(DerivedVariable().required) + return variables + + +def derive(cubes, + short_name, + long_name, + units, + standard_name=None, + fx_files=None): + """Derive variable. + + Parameters + ---------- + cubes: iris.cube.CubeList + Includes all the needed variables for derivation defined in + :func:`get_required`. + short_name: str + short_name + long_name: str + long_name + units: str + units + standard_name: str, optional + standard_name + fx_files: dict, optional + If required, dictionary containing fx files with `short_name` + (keys) and path (values) of the fx variable. + + Returns + ------- + iris.cube.Cube + The new derived variable. + + """ + if short_name == cubes[0].var_name: + return cubes[0] + + cubes = iris.cube.CubeList(cubes) + # Preprare input cubes and add fx files if necessary + if fx_files: + for (fx_var, fx_path) in fx_files.items(): + if fx_path is not None: + fx_cube = iris.load_cube( + fx_path, + constraint=iris.Constraint( + cube_func=lambda c, var=fx_var: c.var_name == var)) + cubes.append(fx_cube) + else: + logger.debug( + "Requested fx variable '%s' for derivation of " + "'%s' not found", fx_var, short_name) + + # Derive variable + DerivedVariable = ALL_DERIVED_VARIABLES[short_name] + cube = DerivedVariable().calculate(cubes) + + # Set standard attributes + cube.var_name = short_name + cube.standard_name = standard_name if standard_name else None + cube.long_name = long_name + cube.units = units + for temp in cubes: + if 'source_file' in temp.attributes: + cube.attributes['source_file'] = temp.attributes['source_file'] + + return cube diff --git a/esmvaltool/preprocessor/_derive/_baseclass.py b/esmvaltool/preprocessor/_derive/_baseclass.py new file mode 100644 index 0000000000..58844d9763 --- /dev/null +++ b/esmvaltool/preprocessor/_derive/_baseclass.py @@ -0,0 +1,40 @@ +"""Contains the base class for derived variables.""" +from abc import abstractmethod + + +class DerivedVariableBase: + """Base class for derived variables.""" + + @property + @staticmethod + @abstractmethod + def required(): + """List of required variables for derivation.""" + + @staticmethod + @abstractmethod + def calculate(cubes): + """Compute desired derived variable. + + This method needs to be overridden in the child class belonging to the + desired variable to derive. + + Parameters + ---------- + cubes : iris.cube.CubeList + Includes all the needed variables (incl. fx variables) for + derivation defined in the static class variable + `_required_variables`. + + Returns + ------- + iris.cube.Cube + New derived variable. + + Raises + ------ + NotImplementedError + If the desired variable derivation is not implemented, i.e. if this + method is called from this base class and not a child class. + + """ diff --git a/esmvaltool/preprocessor/_derive/_shared.py b/esmvaltool/preprocessor/_derive/_shared.py new file mode 100644 index 0000000000..63ebc08aad --- /dev/null +++ b/esmvaltool/preprocessor/_derive/_shared.py @@ -0,0 +1,82 @@ +"""Auxiliary derivation functions used for multiple variables.""" + +import logging + +import iris +from iris import Constraint + +logger = logging.getLogger(__name__) + + +def _get_land_fraction(cubes, standard_name, derive_from_ocean_fraction=False): + """Extract land fraction as :mod:`dask.array`.""" + cube = cubes.extract_strict(Constraint(name=standard_name)) + if derive_from_ocean_fraction: + fx_vars = ['sftof', 'sftlf'] + else: + fx_vars = ['sftlf'] + land_fraction = None + for fx_var in fx_vars: + if land_fraction is not None: + break + try: + fx_cube = cubes.extract_strict(_var_name_constraint(fx_var)) + except iris.exceptions.ConstraintMismatchError: + logger.debug( + "Cannot correct cube '%s' with '%s', fx file not found", + standard_name, fx_var) + else: + if not _shape_is_broadcastable(fx_cube.shape, cube.shape): + logger.debug("Cannot broadcast fx cube '%s' to cube '%s'", + fx_var, standard_name) + else: + if fx_var == 'sftof': + land_fraction = 1.0 - fx_cube.core_data() / 100.0 + else: + land_fraction = fx_cube.core_data() / 100.0 + logger.debug("Using fx cube '%s' to fix '%s'", fx_var, + standard_name) + return land_fraction + + +def _shape_is_broadcastable(shape_1, shape_2): + """Check if two :mod:`numpy.array' shapes are broadcastable.""" + return all((m == n) or (m == 1) or (n == 1) + for (m, n) in zip(shape_1[::-1], shape_2[::-1])) + + +def _var_name_constraint(var_name): + """:mod:`iris.Constraint` using `var_name` of a :mod:`iris.cube.Cube`.""" + return Constraint(cube_func=lambda c: c.var_name == var_name) + + +def cloud_area_fraction(cubes, tau_constraint, plev_constraint): + """Calculate cloud area fraction for different parameters.""" + clisccp_cube = cubes.extract_strict( + iris.Constraint(name='isccp_cloud_area_fraction')) + new_cube = clisccp_cube + new_cube = new_cube.extract(tau_constraint & plev_constraint) + coord_names = [ + coord.standard_name for coord in new_cube.coords() + if len(coord.points) > 1 + ] + if 'atmosphere_optical_thickness_due_to_cloud' in coord_names: + new_cube = new_cube.collapsed( + 'atmosphere_optical_thickness_due_to_cloud', iris.analysis.SUM) + if 'air_pressure' in coord_names: + new_cube = new_cube.collapsed('air_pressure', iris.analysis.SUM) + + return new_cube + + +def grid_area_correction(cubes, standard_name, ocean_var=False): + """Correct (flux) variable defined relative to land/sea area.""" + cube = cubes.extract_strict(Constraint(name=standard_name)) + core_data = cube.core_data() + land_fraction = _get_land_fraction( + cubes, standard_name, derive_from_ocean_fraction=ocean_var) + if land_fraction is not None: + if ocean_var: + land_fraction = 1.0 - land_fraction + cube.data = core_data * land_fraction + return cube diff --git a/esmvaltool/preprocessor/_derive/alb.py b/esmvaltool/preprocessor/_derive/alb.py new file mode 100644 index 0000000000..5cea242c5d --- /dev/null +++ b/esmvaltool/preprocessor/_derive/alb.py @@ -0,0 +1,36 @@ +"""Derivation of variable `alb`. + +authors: + - crez_ba + +""" + +from iris import Constraint + +from ._baseclass import DerivedVariableBase + + +class DerivedVariable(DerivedVariableBase): + """Derivation of variable `alb`.""" + + # Required variables + required = [ + { + 'short_name': 'rsds' + }, + { + 'short_name': 'rsus' + }, + ] + + @staticmethod + def calculate(cubes): + """Compute surface albedo.""" + rsds_cube = cubes.extract_strict( + Constraint(name='surface_downwelling_shortwave_flux_in_air')) + rsus_cube = cubes.extract_strict( + Constraint(name='surface_upwelling_shortwave_flux_in_air')) + + rsns_cube = rsus_cube / rsds_cube + + return rsns_cube diff --git a/esmvaltool/preprocessor/_derive/amoc.py b/esmvaltool/preprocessor/_derive/amoc.py new file mode 100644 index 0000000000..5095f74dde --- /dev/null +++ b/esmvaltool/preprocessor/_derive/amoc.py @@ -0,0 +1,54 @@ +"""Derivation of variable `amoc`.""" +import iris +import numpy as np + +from ._baseclass import DerivedVariableBase + + +class DerivedVariable(DerivedVariableBase): + """Derivation of variable `amoc`.""" + + # Required variables + required = [{'short_name': 'msftmyz', 'mip': 'Omon'}] + + @staticmethod + def calculate(cubes): + """Compute Atlantic meriodinal overturning circulation. + + Arguments + --------- + cube: iris.cube.Cube + input cube. + + Returns + --------- + iris.cube.Cube + Output AMOC cube. + """ + # 0. Load the msftmyz cube. + cube = cubes.extract_strict( + iris.Constraint( + name='ocean_meridional_overturning_mass_streamfunction')) + + # 1: find the relevant region + atlantic_region = 'atlantic_arctic_ocean' + atl_constraint = iris.Constraint(region=atlantic_region) + cube = cube.extract(constraint=atl_constraint) + + # 2: Remove the shallowest 500m to avoid wind driven mixed layer. + depth_constraint = iris.Constraint(depth=lambda d: d >= 500.) + cube = cube.extract(constraint=depth_constraint) + + # 3: Find the latitude closest to 26N + rapid_location = 26.5 + lats = cube.coord('latitude').points + rapid_index = np.argmin(np.abs(lats - rapid_location)) + rapid_constraint = iris.Constraint(latitude=lats[rapid_index]) + cube = cube.extract(constraint=rapid_constraint) + + # 4: find the maximum in the water column along the time axis. + cube = cube.collapsed( + ['depth', 'region'], + iris.analysis.MAX, + ) + return cube diff --git a/esmvaltool/preprocessor/_derive/clhmtisccp.py b/esmvaltool/preprocessor/_derive/clhmtisccp.py new file mode 100644 index 0000000000..8eb9dd2358 --- /dev/null +++ b/esmvaltool/preprocessor/_derive/clhmtisccp.py @@ -0,0 +1,22 @@ +"""Derivation of variable `clhmtisccp`.""" + +from iris import Constraint + +from ._baseclass import DerivedVariableBase +from ._shared import cloud_area_fraction + + +class DerivedVariable(DerivedVariableBase): + """Derivation of variable `clhmtisccp`.""" + + # Required variables + required = [{'short_name': 'clisccp'}] + + @staticmethod + def calculate(cubes): + """Compute ISCCP high level medium-thickness cloud area fraction.""" + tau = Constraint( + atmosphere_optical_thickness_due_to_cloud=lambda t: 3.6 < t <= 23.) + plev = Constraint(air_pressure=lambda p: p <= 44000.) + + return cloud_area_fraction(cubes, tau, plev) diff --git a/esmvaltool/preprocessor/_derive/clhtkisccp.py b/esmvaltool/preprocessor/_derive/clhtkisccp.py new file mode 100644 index 0000000000..3f73c8ff13 --- /dev/null +++ b/esmvaltool/preprocessor/_derive/clhtkisccp.py @@ -0,0 +1,22 @@ +"""Derivation of variable `clhtkisccp`.""" + +from iris import Constraint + +from ._baseclass import DerivedVariableBase +from ._shared import cloud_area_fraction + + +class DerivedVariable(DerivedVariableBase): + """Derivation of variable `clhtkisccp`.""" + + # Required variables + required = [{'short_name': 'clisccp'}] + + @staticmethod + def calculate(cubes): + """Compute ISCCP high level thick cloud area fraction.""" + tau = Constraint( + atmosphere_optical_thickness_due_to_cloud=lambda t: t > 23.) + plev = Constraint(air_pressure=lambda p: p <= 44000.) + + return cloud_area_fraction(cubes, tau, plev) diff --git a/esmvaltool/preprocessor/_derive/cllmtisccp.py b/esmvaltool/preprocessor/_derive/cllmtisccp.py new file mode 100644 index 0000000000..e6564142b5 --- /dev/null +++ b/esmvaltool/preprocessor/_derive/cllmtisccp.py @@ -0,0 +1,22 @@ +"""Derivation of variable `cllmtisccp`.""" + +from iris import Constraint + +from ._baseclass import DerivedVariableBase +from ._shared import cloud_area_fraction + + +class DerivedVariable(DerivedVariableBase): + """Derivation of variable `cllmtisccp`.""" + + # Required variables + required = [{'short_name': 'clisccp'}] + + @staticmethod + def calculate(cubes): + """Compute ISCCP low level medium-thickness cloud area fraction.""" + tau = Constraint( + atmosphere_optical_thickness_due_to_cloud=lambda t: 3.6 < t <= 23.) + plev = Constraint(air_pressure=lambda p: p > 68000.) + + return cloud_area_fraction(cubes, tau, plev) diff --git a/esmvaltool/preprocessor/_derive/clltkisccp.py b/esmvaltool/preprocessor/_derive/clltkisccp.py new file mode 100644 index 0000000000..79da0ee280 --- /dev/null +++ b/esmvaltool/preprocessor/_derive/clltkisccp.py @@ -0,0 +1,22 @@ +"""Derivation of variable `clltkisccp`.""" + +from iris import Constraint + +from ._baseclass import DerivedVariableBase +from ._shared import cloud_area_fraction + + +class DerivedVariable(DerivedVariableBase): + """Derivation of variable `clltkisccp`.""" + + # Required variables + required = [{'short_name': 'clisccp'}] + + @staticmethod + def calculate(cubes): + """Compute ISCCP low level thick cloud area fraction.""" + tau = Constraint( + atmosphere_optical_thickness_due_to_cloud=lambda t: t > 23.) + plev = Constraint(air_pressure=lambda p: p > 68000.) + + return cloud_area_fraction(cubes, tau, plev) diff --git a/esmvaltool/preprocessor/_derive/clmmtisccp.py b/esmvaltool/preprocessor/_derive/clmmtisccp.py new file mode 100644 index 0000000000..fc70859c4d --- /dev/null +++ b/esmvaltool/preprocessor/_derive/clmmtisccp.py @@ -0,0 +1,22 @@ +"""Derivation of variable `clmmtisccp`.""" + +from iris import Constraint + +from ._baseclass import DerivedVariableBase +from ._shared import cloud_area_fraction + + +class DerivedVariable(DerivedVariableBase): + """Derivation of variable `clmmtisccp`.""" + + # Required variables + required = [{'short_name': 'clisccp'}] + + @staticmethod + def calculate(cubes): + """Compute ISCCP middle level medium-thickness cloud area fraction.""" + tau = Constraint( + atmosphere_optical_thickness_due_to_cloud=lambda t: 3.6 < t <= 23.) + plev = Constraint(air_pressure=lambda p: 44000. < p <= 68000.) + + return cloud_area_fraction(cubes, tau, plev) diff --git a/esmvaltool/preprocessor/_derive/clmtkisccp.py b/esmvaltool/preprocessor/_derive/clmtkisccp.py new file mode 100644 index 0000000000..eb6ef501a7 --- /dev/null +++ b/esmvaltool/preprocessor/_derive/clmtkisccp.py @@ -0,0 +1,22 @@ +"""Derivation of variable `clmtkisccp`.""" + +from iris import Constraint + +from ._baseclass import DerivedVariableBase +from ._shared import cloud_area_fraction + + +class DerivedVariable(DerivedVariableBase): + """Derivation of variable `clmtkisccp`.""" + + # Required variables + required = [{'short_name': 'clisccp'}] + + @staticmethod + def calculate(cubes): + """Compute ISCCP middle level thick cloud area fraction.""" + tau = Constraint( + atmosphere_optical_thickness_due_to_cloud=lambda t: t > 23.) + plev = Constraint(air_pressure=lambda p: 44000. < p <= 68000.) + + return cloud_area_fraction(cubes, tau, plev) diff --git a/esmvaltool/preprocessor/_derive/fgco2_grid.py b/esmvaltool/preprocessor/_derive/fgco2_grid.py new file mode 100644 index 0000000000..49a2290abe --- /dev/null +++ b/esmvaltool/preprocessor/_derive/fgco2_grid.py @@ -0,0 +1,31 @@ +"""Derivation of variable `fgco2_grid`.""" +from ._baseclass import DerivedVariableBase +from ._shared import grid_area_correction + + +class DerivedVariable(DerivedVariableBase): + """Derivation of variable `fgco2_grid`.""" + + # Required variables + required = [{ + 'short_name': 'fgco2', + 'fx_files': ['sftof', 'sftlf'], + }] + + @staticmethod + def calculate(cubes): + """Compute gas exchange flux of CO2 relative to grid cell area. + + Note + ---- + By default, `fgco2` is defined relative to sea area. For spatial + integration, the original quantity is multiplied by the sea area + fraction (`sftof`), so that the resuting derived variable is defined + relative to the grid cell area. This correction is only relevant for + coastal regions. + + """ + return grid_area_correction( + cubes, + 'surface_downward_mass_flux_of_carbon_dioxide_expressed_as_carbon', + ocean_var=True) diff --git a/esmvaltool/preprocessor/_derive/gtfgco2.py b/esmvaltool/preprocessor/_derive/gtfgco2.py new file mode 100644 index 0000000000..f8295c8c6e --- /dev/null +++ b/esmvaltool/preprocessor/_derive/gtfgco2.py @@ -0,0 +1,81 @@ +"""Derivation of variable `gtfgco2`.""" +import iris +import numpy as np + +from ._baseclass import DerivedVariableBase + + +def calculate_total_flux(fgco2_cube, cube_area): + """ + Calculate the area of unmasked cube cells. + + Requires a cube with two spacial dimensions. (no depth coordinate). + + Parameters + ---------- + cube: iris.cube.Cube + Data Cube + cube_area: iris.cube.Cube + Cell area Cube + + Returns + ------- + numpy.array: + An numpy array containing the total flux of CO2. + + """ + data = [] + times = fgco2_cube.coord('time') + + fgco2_cube.data = np.ma.array(fgco2_cube.data) + for time_itr in np.arange(len(times.points)): + + total_flux = fgco2_cube[time_itr].data * cube_area.data + + total_flux = np.ma.masked_where(fgco2_cube[time_itr].data.mask, + total_flux) + data.append(total_flux.sum()) + + ###### + # Create a small dummy output array + data = np.array(data) + return data + + +class DerivedVariable(DerivedVariableBase): + """Derivation of variable `gtfgco2`.""" + + # Required variables + required = [ + { + 'short_name': 'fgco2', + 'mip': 'Omon', + 'fx_files': [ + 'areacello', + ], + }, + ] + + @staticmethod + def calculate(cubes): + """Compute longwave cloud radiative effect.""" + fgco2_cube = cubes.extract_strict( + iris.Constraint(name='surface_downward_mass_flux_of_carbon_dioxide' + '_expressed_as_carbon')) + + try: + cube_area = cubes.extract_strict(iris.Constraint(name='cell_area')) + except iris.exceptions.ConstraintMismatchError: + pass + + total_flux = calculate_total_flux(fgco2_cube, cube_area) + + # Dummy result cube + result = fgco2_cube.collapsed( + ['latitude', 'longitude'], + iris.analysis.MEAN, + ) + result.units = fgco2_cube.units * cube_area.units + + result.data = total_flux + return result diff --git a/esmvaltool/preprocessor/_derive/lwcre.py b/esmvaltool/preprocessor/_derive/lwcre.py new file mode 100644 index 0000000000..3526562fec --- /dev/null +++ b/esmvaltool/preprocessor/_derive/lwcre.py @@ -0,0 +1,32 @@ +"""Derivation of variable `lwcre`.""" + +from iris import Constraint + +from ._baseclass import DerivedVariableBase + + +class DerivedVariable(DerivedVariableBase): + """Derivation of variable `lwcre`.""" + + # Required variables + required = [ + { + 'short_name': 'rlut' + }, + { + 'short_name': 'rlutcs' + }, + ] + + @staticmethod + def calculate(cubes): + """Compute longwave cloud radiative effect.""" + rlut_cube = cubes.extract_strict( + Constraint(name='toa_outgoing_longwave_flux')) + rlutcs_cube = cubes.extract_strict( + Constraint(name='toa_outgoing_longwave_flux_assuming_clear_sky')) + + lwcre_cube = rlutcs_cube - rlut_cube + lwcre_cube.units = rlut_cube.units + + return lwcre_cube diff --git a/esmvaltool/preprocessor/_derive/lwp.py b/esmvaltool/preprocessor/_derive/lwp.py new file mode 100644 index 0000000000..6b795c5665 --- /dev/null +++ b/esmvaltool/preprocessor/_derive/lwp.py @@ -0,0 +1,72 @@ +"""Derivation of variable `lwp`.""" + +import logging + +from iris import Constraint + +from ._baseclass import DerivedVariableBase + +logger = logging.getLogger(__name__) + + +class DerivedVariable(DerivedVariableBase): + """Derivation of variable `lwp`.""" + + # Required variables + required = [ + { + 'short_name': 'clwvi' + }, + { + 'short_name': 'clivi' + }, + ] + + @staticmethod + def calculate(cubes): + """Compute liquid water path. + + Note + ---- + Some datasets output the variable `clwvi` which only contains `lwp`. In + these cases, the input `clwvi` cube is just returned. + + """ + clwvi_cube = cubes.extract_strict( + Constraint(name='atmosphere_cloud_condensed_water_content')) + clivi_cube = cubes.extract_strict( + Constraint(name='atmosphere_cloud_ice_content')) + + dataset = clwvi_cube.attributes.get('model_id') + project = clwvi_cube.attributes.get('project_id') + # Should we check that the model_id/project_id are the same on both + # cubes? + + bad_datasets = [ + 'CESM1-CAM5-1-FV2', + 'CESM1-CAM5', + 'CMCC-CESM', + 'CMCC-CM', + 'CMCC-CMS', + 'IPSL-CM5A-MR', + 'IPSL-CM5A-LR', + 'IPSL-CM5B-LR', + 'CCSM4', + 'IPSL-CM5A-MR', + 'MIROC-ESM', + 'MIROC-ESM-CHEM', + 'MIROC-ESM', + 'CSIRO-Mk3-6-0', + 'MPI-ESM-MR', + 'MPI-ESM-LR', + 'MPI-ESM-P', + ] + if (project in ["CMIP5", "CMIP5_ETHZ"] and dataset in bad_datasets): + logger.info( + "Assuming that variable clwvi from %s dataset %s " + "contains only liquid water", project, dataset) + lwp_cube = clwvi_cube + else: + lwp_cube = clwvi_cube - clivi_cube + + return lwp_cube diff --git a/esmvaltool/preprocessor/_derive/nbp_grid.py b/esmvaltool/preprocessor/_derive/nbp_grid.py new file mode 100644 index 0000000000..54a6b95ce6 --- /dev/null +++ b/esmvaltool/preprocessor/_derive/nbp_grid.py @@ -0,0 +1,31 @@ +"""Derivation of variable `nbp_grid`.""" +from ._baseclass import DerivedVariableBase +from ._shared import grid_area_correction + + +class DerivedVariable(DerivedVariableBase): + """Derivation of variable `nbp_grid`.""" + + # Required variables + required = [{ + 'short_name': 'nbp', + 'fx_files': ['sftlf'], + }] + + @staticmethod + def calculate(cubes): + """Compute net biome production relative to grid cell area. + + Note + ---- + By default, `nbp` is defined relative to land area. For spatial + integration, the original quantity is multiplied by the land area + fraction (`sftlf`), so that the resuting derived variable is defined + relative to the grid cell area. This correction is only relevant for + coastal regions. + + """ + return grid_area_correction( + cubes, + 'surface_net_downward_mass_flux_of_carbon_dioxide_expressed_as_' + 'carbon_due_to_all_land_processes') diff --git a/esmvaltool/preprocessor/_derive/netcre.py b/esmvaltool/preprocessor/_derive/netcre.py new file mode 100644 index 0000000000..7ee9a496a8 --- /dev/null +++ b/esmvaltool/preprocessor/_derive/netcre.py @@ -0,0 +1,44 @@ +"""Derivation of variable `netcre`.""" + +from ._baseclass import DerivedVariableBase +from .lwcre import DerivedVariable as Lwcre +from .swcre import DerivedVariable as Swcre + + +class DerivedVariable(DerivedVariableBase): + """Derivation of variable `netcre`.""" + + # Required variables + required = [ + { + 'short_name': 'rlut' + }, + { + 'short_name': 'rlutcs' + }, + { + 'short_name': 'rsut' + }, + { + 'short_name': 'rsutcs' + }, + ] + + @staticmethod + def calculate(cubes): + """Compute net cloud radiative effect. + + Note + ---- + Calculate net cloud radiative effect as sum of longwave and shortwave + cloud radiative effects. + """ + lwcre_var = Lwcre() + swcre_var = Swcre() + lwcre_cube = lwcre_var.calculate(cubes) + swcre_cube = swcre_var.calculate(cubes) + + netcre_cube = lwcre_cube + swcre_cube + netcre_cube.units = lwcre_cube.units + + return netcre_cube diff --git a/esmvaltool/preprocessor/_derive/rlns.py b/esmvaltool/preprocessor/_derive/rlns.py new file mode 100644 index 0000000000..2a80e6386a --- /dev/null +++ b/esmvaltool/preprocessor/_derive/rlns.py @@ -0,0 +1,31 @@ +"""Derivation of variable `rlns`.""" + +from iris import Constraint + +from ._baseclass import DerivedVariableBase + + +class DerivedVariable(DerivedVariableBase): + """Derivation of variable `rlns`.""" + + # Required variables + required = [ + { + 'short_name': 'rlds' + }, + { + 'short_name': 'rlus' + }, + ] + + @staticmethod + def calculate(cubes): + """Compute surface net downward longwave radiation.""" + rlds_cube = cubes.extract_strict( + Constraint(name='surface_downwelling_longwave_flux_in_air')) + rlus_cube = cubes.extract_strict( + Constraint(name='surface_upwelling_longwave_flux_in_air')) + + rlns_cube = rlds_cube - rlus_cube + + return rlns_cube diff --git a/esmvaltool/preprocessor/_derive/rsns.py b/esmvaltool/preprocessor/_derive/rsns.py new file mode 100644 index 0000000000..31a6bad867 --- /dev/null +++ b/esmvaltool/preprocessor/_derive/rsns.py @@ -0,0 +1,31 @@ +"""Derivation of variable `rsns`.""" + +from iris import Constraint + +from ._baseclass import DerivedVariableBase + + +class DerivedVariable(DerivedVariableBase): + """Derivation of variable `rsns`.""" + + # Required variables + required = [ + { + 'short_name': 'rsds' + }, + { + 'short_name': 'rsus' + }, + ] + + @staticmethod + def calculate(cubes): + """Compute surface net downward shortwave radiation.""" + rsds_cube = cubes.extract_strict( + Constraint(name='surface_downwelling_shortwave_flux_in_air')) + rsus_cube = cubes.extract_strict( + Constraint(name='surface_upwelling_shortwave_flux_in_air')) + + rsns_cube = rsds_cube - rsus_cube + + return rsns_cube diff --git a/esmvaltool/preprocessor/_derive/rsnt.py b/esmvaltool/preprocessor/_derive/rsnt.py new file mode 100644 index 0000000000..1a71035e3b --- /dev/null +++ b/esmvaltool/preprocessor/_derive/rsnt.py @@ -0,0 +1,31 @@ +"""Derivation of variable `rsnt`.""" + +from iris import Constraint + +from ._baseclass import DerivedVariableBase + + +class DerivedVariable(DerivedVariableBase): + """Derivation of variable `rsnt`.""" + + # Required variables + required = [ + { + 'short_name': 'rsdt' + }, + { + 'short_name': 'rsut' + }, + ] + + @staticmethod + def calculate(cubes): + """Compute toa net downward shortwave radiation.""" + rsdt_cube = cubes.extract_strict( + Constraint(name='toa_incoming_shortwave_flux')) + rsut_cube = cubes.extract_strict( + Constraint(name='toa_outgoing_shortwave_flux')) + + rsnt_cube = rsdt_cube - rsut_cube + + return rsnt_cube diff --git a/esmvaltool/preprocessor/_derive/rtnt.py b/esmvaltool/preprocessor/_derive/rtnt.py new file mode 100644 index 0000000000..590b296e58 --- /dev/null +++ b/esmvaltool/preprocessor/_derive/rtnt.py @@ -0,0 +1,36 @@ +"""Derivation of variable `rtnt`.""" + +from iris import Constraint + +from ._baseclass import DerivedVariableBase + + +class DerivedVariable(DerivedVariableBase): + """Derivation of variable `rtnt`.""" + + # Required variables + required = [ + { + 'short_name': 'rsdt' + }, + { + 'short_name': 'rsut' + }, + { + 'short_name': 'rlut' + }, + ] + + @staticmethod + def calculate(cubes): + """Compute toa net downward total radiation.""" + rsdt_cube = cubes.extract_strict( + Constraint(name='toa_incoming_shortwave_flux')) + rsut_cube = cubes.extract_strict( + Constraint(name='toa_outgoing_shortwave_flux')) + rlut_cube = cubes.extract_strict( + Constraint(name='toa_outgoing_longwave_flux')) + + rtnt_cube = rsdt_cube - rsut_cube - rlut_cube + + return rtnt_cube diff --git a/esmvaltool/preprocessor/_derive/sm.py b/esmvaltool/preprocessor/_derive/sm.py new file mode 100644 index 0000000000..be69f13803 --- /dev/null +++ b/esmvaltool/preprocessor/_derive/sm.py @@ -0,0 +1,37 @@ +"""Derivation of variable `sm`.""" + +import cf_units +import numpy as np +from iris import Constraint + +from ._baseclass import DerivedVariableBase + + +class DerivedVariable(DerivedVariableBase): + """Derivation of variable `sm`.""" + + # Required variables + required = [{'short_name': 'mrsos'}] + + @staticmethod + def calculate(cubes): + """Compute soil moisture. + + Note + ---- + Convert moisture content of soil layer (kg/m2) into volumetric soil + moisture (m3/m3), assuming density of water 998.2 kg/m2 (at temperature + 20 deg C). + + """ + mrsos_cube = cubes.extract_strict( + Constraint(name='moisture_content_of_soil_layer')) + + depth = mrsos_cube.coord('depth').bounds + layer_thickness = depth[..., 1] - depth[..., 0] + + sm_cube = mrsos_cube / layer_thickness / 998.2 + sm_cube.units = cf_units.Unit('m3 m^-3') + sm_cube.data = np.ma.array(sm_cube.data, dtype=np.dtype('float32')) + + return sm_cube diff --git a/esmvaltool/preprocessor/_derive/swcre.py b/esmvaltool/preprocessor/_derive/swcre.py new file mode 100644 index 0000000000..ae89777547 --- /dev/null +++ b/esmvaltool/preprocessor/_derive/swcre.py @@ -0,0 +1,31 @@ +"""Derivation of variable `swcre`.""" + +from iris import Constraint + +from ._baseclass import DerivedVariableBase + + +class DerivedVariable(DerivedVariableBase): + """Derivation of variable `swcre`.""" + + # Required variables + required = [ + { + 'short_name': 'rsut' + }, + { + 'short_name': 'rsutcs' + }, + ] + + @staticmethod + def calculate(cubes): + """Compute shortwave cloud radiative effect.""" + rsut_cube = cubes.extract_strict( + Constraint(name='toa_outgoing_shortwave_flux')) + rsutcs_cube = cubes.extract_strict( + Constraint(name='toa_outgoing_shortwave_flux_assuming_clear_sky')) + + swcre_cube = rsutcs_cube - rsut_cube + + return swcre_cube diff --git a/esmvaltool/preprocessor/_derive/toz.py b/esmvaltool/preprocessor/_derive/toz.py new file mode 100644 index 0000000000..b3d101389a --- /dev/null +++ b/esmvaltool/preprocessor/_derive/toz.py @@ -0,0 +1,192 @@ +"""Derivation of variable `toz`.""" + +import cf_units +import iris +import numba +import numpy as np +from scipy import constants + +from ._baseclass import DerivedVariableBase + +# Constants +AVOGADRO_CONST = constants.value('Avogadro constant') +AVOGADRO_CONST_UNIT = constants.unit('Avogadro constant') +STANDARD_GRAVITY = 9.81 +STANDARD_GRAVITY_UNIT = cf_units.Unit('m s^-2') +MW_AIR = 29 +MW_AIR_UNIT = cf_units.Unit('g mol^-1') +MW_O3 = 48 +MW_O3_UNIT = cf_units.Unit('g mol^-1') +DOBSON_UNIT = cf_units.Unit('2.69e20 m^-2') + + +class DerivedVariable(DerivedVariableBase): + """Derivation of variable `toz`.""" + + # Required variables + required = [ + { + 'short_name': 'tro3' + }, + { + 'short_name': 'ps' + }, + ] + + @staticmethod + def calculate(cubes): + """Compute total column ozone. + + Note + ---- + The surface pressure is used as a lower integration bound. A fixed + upper integration bound of 0 Pa is used. + + """ + tro3_cube = cubes.extract_strict( + iris.Constraint(name='mole_fraction_of_ozone_in_air')) + ps_cube = cubes.extract_strict( + iris.Constraint(name='surface_air_pressure')) + + p_layer_widths = _pressure_level_widths( + tro3_cube, ps_cube, top_limit=0.0) + toz_cube = ( + tro3_cube * p_layer_widths / STANDARD_GRAVITY * MW_O3 / MW_AIR) + toz_cube = toz_cube.collapsed('air_pressure', iris.analysis.SUM) + toz_cube.units = (tro3_cube.units * p_layer_widths.units / + STANDARD_GRAVITY_UNIT * MW_O3_UNIT / MW_AIR_UNIT) + + # Convert from kg m^-2 to Dobson unit (2.69e20 m^-2 ) + toz_cube = toz_cube / MW_O3 * AVOGADRO_CONST + toz_cube.units = toz_cube.units / MW_O3_UNIT * AVOGADRO_CONST_UNIT + toz_cube.convert_units(DOBSON_UNIT) + toz_cube.data = np.ma.array(toz_cube.data, dtype=np.dtype('float32')) + + return toz_cube + + +# Helper functions +def _pressure_level_widths(tro3_cube, ps_cube, top_limit=0.0): + """Create a cube with pressure level widths. + + This is done by taking a 2D surface pressure field as lower bound. + + Parameters + ---------- + tro3_cube : iris.cube.Cube + `Cube` containing `mole_fraction_of_ozone_in_air`. + ps_cube : iris.cube.Cube + `Cube` containing `surface_air_pressure`. + top_limit : double + Pressure in Pa. + + Returns + ------- + iris.cube.Cube + `Cube` of same shape as `tro3_cube` containing pressure level widths. + + """ + pressure_array = _create_pressure_array(tro3_cube, ps_cube, top_limit) + + data = _apply_pressure_level_widths(pressure_array) + p_level_widths_cube = tro3_cube.copy(data=data) + p_level_widths_cube.rename('pressure level widths') + p_level_widths_cube.units = ps_cube.units + + return p_level_widths_cube + + +def _create_pressure_array(tro3_cube, ps_cube, top_limit): + """Create an array filled with the `air_pressure` coord values. + + The array is created from the `tro3_cube` with the same dimensions + as `tro3_cube`. This array is then sandwiched with a 2D array + containing the surface pressure and a 2D array containing the top + pressure limit. + """ + # Create 4D array filled with pressure level values + p_levels = tro3_cube.coord('air_pressure').points + p_4d_array = iris.util.broadcast_to_shape(p_levels, tro3_cube.shape, [1]) + + # Create 4d array filled with surface pressure values + shape = tro3_cube.shape + ps_4d_array = iris.util.broadcast_to_shape(ps_cube.data, shape, [0, 2, 3]) + + # Set pressure levels below the surface pressure to NaN + pressure_4d = np.where((ps_4d_array - p_4d_array) < 0, np.NaN, p_4d_array) + + # Make top_limit last pressure level + top_limit_array = np.ones(ps_cube.shape) * top_limit + data = top_limit_array[:, np.newaxis, :, :] + pressure_4d = np.concatenate((pressure_4d, data), axis=1) + + # Make surface pressure the first pressure level + data = ps_cube.data[:, np.newaxis, :, :] + pressure_4d = np.concatenate((data, pressure_4d), axis=1) + + return pressure_4d + + +def _apply_pressure_level_widths(array, air_pressure_axis=1): + """Compute pressure level widths. + + For a 1D array with pressure level columns, return a 1D array with + pressure level widths. + """ + return np.apply_along_axis(_p_level_widths, air_pressure_axis, array) + + +@numba.jit() # ~10x faster +def _p_level_widths(array): + """Create pressure level widths. + + The array with pressure levels is assumed to be monotonic and the + values are decreasing. + + The first element is the lower boundary (surface pressure), the last + value is the upper boundary. Thicknesses are only calculated for the + values between these boundaries, the returned array, therefore, + contains two elements less. + + >>> _p_level_widths(np.array([1020, 1000, 700, 500, 5])) + array([170., 250., 595.]) + + >>> _p_level_widths(np.array([990, np.NaN, 700, 500, 5])) + array([ 0., 390., 595.]) + """ + surface_pressure = array[0] + top_limit = array[-1] + array = array[1:-1] + + p_level_widths = np.ones(array.shape) * np.NAN + + last_pressure_level = len(array) - 1 + for i, val in enumerate(array): + # numba would otherwise initialize it to 0 and + # hide bugs that would occur in raw Python + bounds_width = np.NAN + if np.isnan(val): + bounds_width = 0 + else: + # Distance to lower bound + if i == 0 or np.isnan(array[i - 1]): + # First pressure level with value + dist_to_lower_bound = surface_pressure - val + else: + dist_to_lower_bound = 0.5 * (array[i - 1] - val) + + # Distance to upper bound + if i == last_pressure_level: # last pressure level + dist_to_upper_bound = val - top_limit + else: + dist_to_upper_bound = 0.5 * (val - array[i + 1]) + + # Check monotonicity - all distances must be >= 0 + if dist_to_lower_bound < 0.0 or dist_to_upper_bound < 0.0: + raise ValueError("Pressure level value increased with " + "height.") + + bounds_width = dist_to_lower_bound + dist_to_upper_bound + + p_level_widths[i] = bounds_width + return p_level_widths diff --git a/esmvaltool/preprocessor/_download.py b/esmvaltool/preprocessor/_download.py index 8bc18e5df5..d1272f86e7 100644 --- a/esmvaltool/preprocessor/_download.py +++ b/esmvaltool/preprocessor/_download.py @@ -11,7 +11,7 @@ def synda_search(variable): """Search files using synda.""" query = { - 'dataset': variable.get('dataset'), + 'model': variable.get('dataset'), 'project': variable.get('project'), 'cmor_table': variable.get('mip'), 'ensemble': variable.get('ensemble'), @@ -69,8 +69,7 @@ def synda_download(synda_name, dest_folder): def download(files, dest_folder): """Download files that are not available locally""" - if not os.path.exists(dest_folder): - os.makedirs(dest_folder) + os.makedirs(dest_folder, exist_ok=True) local_files = [] for name in files: diff --git a/esmvaltool/preprocessor/_io.py b/esmvaltool/preprocessor/_io.py index 45d30df5f4..71e7144058 100644 --- a/esmvaltool/preprocessor/_io.py +++ b/esmvaltool/preprocessor/_io.py @@ -1,13 +1,17 @@ -"""Functions for loading and saving cubes""" +"""Functions for loading and saving cubes.""" +import copy import logging import os import shutil +from collections import OrderedDict from itertools import groupby +import numpy as np import iris import iris.exceptions import yaml +from .._config import use_legacy_iris from .._task import write_ncl_settings logger = logging.getLogger(__name__) @@ -47,88 +51,58 @@ def concatenate_callback(raw_cube, field, _): coord.units = units -def load_cubes(files, filename, metadata, constraints=None, callback=None): - """Load iris cubes from files""" - logger.debug("Loading:\n%s", "\n".join(files)) - cubes = iris.load_raw(files, constraints=constraints, callback=callback) - iris.util.unify_time_units(cubes) - if not cubes: - raise Exception('Can not load cubes from {0}'.format(files)) +def load(file, callback=None): + """Load iris cubes from files.""" + logger.debug("Loading:\n%s", file) + raw_cubes = iris.load_raw(file, callback=callback) + if not raw_cubes: + raise Exception('Can not load cubes from {0}'.format(file)) + for cube in raw_cubes: + cube.attributes['source_file'] = file + return raw_cubes - for cube in cubes: - cube.attributes['_filename'] = filename - cube.attributes['metadata'] = yaml.safe_dump(metadata) - # TODO add block below when using iris 2.0 - # always set fillvalue to 1e+20 - # if np.ma.is_masked(cube.data): - # np.ma.set_fill_value(cube.data, GLOBAL_FILL_VALUE) - return cubes +def _fix_cube_attributes(cubes): + """Unify attributes of different cubes to allow concatenation.""" + attributes = {} + for cube in cubes: + for (attr, val) in cube.attributes.items(): + if attr not in attributes: + attributes[attr] = val + else: + if not np.array_equal(val, attributes[attr]): + attributes[attr] = '{};{}'.format( + str(attributes[attr]), str(val)) + for cube in cubes: + cube.attributes = attributes def concatenate(cubes): - """Concatenate all cubes after fixing metadata""" + """Concatenate all cubes after fixing metadata.""" + _fix_cube_attributes(cubes) try: cube = iris.cube.CubeList(cubes).concatenate_cube() return cube except iris.exceptions.ConcatenateError as ex: logger.error('Can not concatenate cubes: %s', ex) - logger.error('Differences: %s', ex.differences) logger.error('Cubes:') for cube in cubes: logger.error(cube) - raise ConcatenationError('Can not concatenate cubes {0}'.format(cubes)) - - -def _save_cubes(cubes, **args): - """Save iris cube to file.""" - filename = args['target'] - optimize_accesss = args.pop('optimize_access') - - dirname = os.path.dirname(filename) - if not os.path.exists(dirname): - os.makedirs(dirname) - - if (os.path.exists(filename) - and all(cube.has_lazy_data() for cube in cubes)): - logger.debug("Not saving cubes %s to %s to avoid data loss. " - "The cube is probably unchanged.", cubes, filename) - else: - logger.debug("Saving cubes %s to %s", cubes, filename) - if optimize_accesss: - cube = cubes[0] - if optimize_accesss == 'map': - dims = set(cube.coord_dims('latitude') + - cube.coord_dims('longitude')) - elif optimize_accesss == 'timeseries': - dims = set(cube.coord_dims('time')) - else: - dims = tuple() - for coord_dims in (cube.coord_dims(dimension) for dimension - in optimize_accesss.split(' ')): - dims += coord_dims - dims = set(dims) + raise ex - args['chunksizes'] = tuple(length if index in dims else 1 - for index, length - in enumerate(cube.shape)) - iris.save(cubes, **args) - - return filename - -def save(cubes, optimize_access=None, - compress=False, debug=False, step=None): +def save(cubes, filename, optimize_access='', compress=False, **kwargs): """ - Save iris cubes to file - - Path is taken from the _filename attributte in the code. + Save iris cubes to file. Parameters ---------- cubes: iterable of iris.cube.Cube Data cubes to be saved + filename: str + Name of target file + optimize_access: str Set internal NetCDF chunking to favour a reading scheme @@ -141,43 +115,63 @@ def save(cubes, optimize_access=None, compress: bool, optional Use NetCDF internal compression. - debug: bool, optional - Inform the function if this save is an intermediate save - - step: int, optional - Number of the preprocessor step. - - Only used if debug is True - Returns ------- + str + filename """ - paths = {} - for cube in cubes: - if '_filename' not in cube.attributes: - raise ValueError("No filename specified in cube {}".format(cube)) - if debug: - dirname = os.path.splitext(cube.attributes.get('_filename'))[0] - if os.path.exists(dirname) and os.listdir(dirname): - num = int(sorted(os.listdir(dirname)).pop()[:2]) + 1 - else: - num = 0 - filename = os.path.join(dirname, '{:02}_{}.nc'.format(num, step)) + # Rename some arguments + kwargs['target'] = filename + kwargs['zlib'] = compress + + dirname = os.path.dirname(filename) + if not os.path.exists(dirname): + os.makedirs(dirname) + + if (os.path.exists(filename) + and all(cube.has_lazy_data() for cube in cubes)): + logger.debug( + "Not saving cubes %s to %s to avoid data loss. " + "The cube is probably unchanged.", cubes, filename) + return filename + + logger.debug("Saving cubes %s to %s", cubes, filename) + if optimize_access: + cube = cubes[0] + if optimize_access == 'map': + dims = set( + cube.coord_dims('latitude') + cube.coord_dims('longitude')) + elif optimize_access == 'timeseries': + dims = set(cube.coord_dims('time')) else: - filename = cube.attributes.pop('_filename') - if filename not in paths: - paths[filename] = [] - paths[filename].append(cube) + dims = tuple() + for coord_dims in (cube.coord_dims(dimension) + for dimension in optimize_access.split(' ')): + dims += coord_dims + dims = set(dims) + + kwargs['chunksizes'] = tuple( + length if index in dims else 1 + for index, length in enumerate(cube.shape)) - # TODO replace block when using iris 2.0 - for filename in paths: - # _save_cubes(cubes=paths[filename], target=filename, - # fill_value=GLOBAL_FILL_VALUE) - _save_cubes(cubes=paths[filename], target=filename, zlib=compress, - optimize_access=optimize_access) + if not use_legacy_iris(): + kwargs['fill_value'] = GLOBAL_FILL_VALUE - return list(paths) + iris.save(cubes, **kwargs) + + return filename + + +def _get_debug_filename(filename, step): + """Get a filename for debugging the preprocessor.""" + dirname = os.path.splitext(filename)[0] + if os.path.exists(dirname) and os.listdir(dirname): + num = int(sorted(os.listdir(dirname)).pop()[:2]) + 1 + else: + num = 0 + filename = os.path.join(dirname, '{:02}_{}.nc'.format(num, step)) + return filename def cleanup(files, remove=None): @@ -194,22 +188,43 @@ def cleanup(files, remove=None): return files -def extract_metadata(files, write_ncl=False): - """Extract the metadata attribute from cubes and write to file.""" +def _ordered_safe_dump(data, stream): + """Write data containing OrderedDicts to yaml file.""" + + class _OrderedDumper(yaml.SafeDumper): + pass + + def _dict_representer(dumper, data): + return dumper.represent_mapping( + yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, data.items()) + + _OrderedDumper.add_representer(OrderedDict, _dict_representer) + return yaml.dump(data, stream, _OrderedDumper) + + +def write_metadata(products, write_ncl=False): + """Write product metadata to file.""" output_files = [] - for output_dir, filenames in groupby(files, os.path.dirname): - metadata = {} - for filename in filenames: - cube = iris.load_cube(filename) - raw_cube_metadata = cube.attributes.get('metadata') - if raw_cube_metadata: - cube_metadata = yaml.safe_load(raw_cube_metadata) - metadata[filename] = cube_metadata + for output_dir, prods in groupby(products, + lambda p: os.path.dirname(p.filename)): + sorted_products = sorted( + prods, + key=lambda p: ( + p.attributes.get('recipe_dataset_index', 1e6), + p.attributes.get('dataset', ''), + ), + ) + metadata = OrderedDict() + for product in sorted_products: + if isinstance(product.attributes.get('exp'), (list, tuple)): + product.attributes = dict(product.attributes) + product.attributes['exp'] = '-'.join(product.attributes['exp']) + metadata[product.filename] = product.attributes output_filename = os.path.join(output_dir, 'metadata.yml') output_files.append(output_filename) with open(output_filename, 'w') as file: - yaml.safe_dump(metadata, file) + _ordered_safe_dump(metadata, file) if write_ncl: output_files.append(_write_ncl_metadata(output_dir, metadata)) @@ -217,51 +232,36 @@ def extract_metadata(files, write_ncl=False): def _write_ncl_metadata(output_dir, metadata): - """Write NCL metadata files to output_dir""" - variables = list(metadata.values()) - # 'variables' is a list of dicts, but NCL does not support nested - # dicts, so convert to dict of lists. - keys = sorted({k for v in variables for k in v}) - input_file_info = {k: [v.get(k) for v in variables] for k in keys} - fx_file_list = input_file_info.pop('fx_files', None) - if fx_file_list: - for fx_files in fx_file_list: - for key in fx_files: - if key not in input_file_info: - input_file_info[key] = [] - input_file_info[key].append(fx_files[key]) - - info = { - 'input_file_info': input_file_info, - 'dataset_info': {}, - 'variable_info': {} - } + """Write NCL metadata files to output_dir.""" + variables = [copy.deepcopy(v) for v in metadata.values()] + + for variable in variables: + fx_files = variable.pop('fx_files', {}) + for fx_type in fx_files: + variable[fx_type] = fx_files[fx_type] + + info = {'input_file_info': variables} # Split input_file_info into dataset and variable properties # dataset keys and keys with non-identical values will be stored # in dataset_info, the rest in variable_info - for key, values in input_file_info.items(): - dataset_specific = any(values[0] != v for v in values) - if (dataset_specific or key in DATASET_KEYS) and \ - key not in VARIABLE_KEYS: - info['dataset_info'][key] = values - else: - # Select a value that is filled - attribute_value = None - for value in values: - if value is not None: - attribute_value = value - break - info['variable_info'][key] = attribute_value - - short_name = info['variable_info']['short_name'] - filename = os.path.join(output_dir, short_name + '_info.ncl') + variable_info = {} + info['variable_info'] = [variable_info] + info['dataset_info'] = [] + for variable in variables: + dataset_info = {} + info['dataset_info'].append(dataset_info) + for key in variable: + dataset_specific = any( + variable[key] != var.get(key, object()) for var in variables) + if ((dataset_specific or key in DATASET_KEYS) + and key not in VARIABLE_KEYS): + dataset_info[key] = variable[key] + else: + variable_info[key] = variable[key] + + filename = os.path.join(output_dir, + variable_info['short_name'] + '_info.ncl') write_ncl_settings(info, filename) return filename - - -class ConcatenationError(Exception): - """Exception class for concatenation errors""" - - pass diff --git a/esmvaltool/preprocessor/_mapping.py b/esmvaltool/preprocessor/_mapping.py new file mode 100644 index 0000000000..17a47e7481 --- /dev/null +++ b/esmvaltool/preprocessor/_mapping.py @@ -0,0 +1,238 @@ +# -*- coding: utf-8 -*- +"""Provides mapping of a cube.""" + +import collections + +import iris +import numpy as np +import six + + +def _is_single_item(testee): + """ + Check if testee is a single item. + + Return whether this is a single item, rather than an iterable. + We count string types as 'single', also. + """ + return (isinstance(testee, six.string_types) + or not isinstance(testee, collections.Iterable)) + + +def _as_list_of_coords(cube, names_or_coords): + """Convert a name, coord, or list of names/coords to a list of coords.""" + # If not iterable, convert to list of a single item + if _is_single_item(names_or_coords): + names_or_coords = [names_or_coords] + coords = [] + for name_or_coord in names_or_coords: + if isinstance(name_or_coord, (iris.coords.Coord, six.string_types)): + coords.append(cube.coord(name_or_coord)) + else: + # Don't know how to handle this type + msg = ("Don't know how to handle coordinate of type %s. " + "Ensure all coordinates are of type six.string_types " + "or iris.coords.Coord.") % (type(name_or_coord), ) + raise TypeError(msg) + return coords + + +def ref_to_dims_index_as_coordinate(cube, ref): + """Get dims for coord ref.""" + coord = _as_list_of_coords(cube, ref)[0] + dims = cube.coord_dims(coord) + if not dims: + msg = ('Requested an iterator over a coordinate ({}) ' + 'which does not describe a dimension.') + msg = msg.format(coord.name()) + raise ValueError(msg) + return dims + + +def ref_to_dims_index_as_index(cube, ref): + """Get dim for index ref.""" + try: + dim = int(ref) + except (ValueError, TypeError): + raise ValueError('{} Incompatible type {} for ' + 'slicing'.format(ref, type(ref))) + if dim < 0 or dim > cube.ndim: + msg = ('Requested an iterator over a dimension ({}) ' + 'which does not exist.'.format(dim)) + raise ValueError(msg) + dims = [dim] + return dims + + +def ref_to_dims_index(cube, ref_to_slice): + """ + Map a list of :class:`iris.coords.DimCoord` to a tuple of indices. + + This method finds the indices of the dimensions in a cube that collectively + correspond to the given list of :class:`iris.coords.DimCoord`. + + Parameters + ---------- + cube: :class:`iris.cube.Cube` + The cube to examine. + ref_to_slice: iterable of or single :class:`iris.coords.DimCoord` + Specification of the dimensions in terms of coordinates. + + Returns + ------- + tuple: + A tuple of indices corresponding to the given dimensions. + """ + # Required to handle a mix between types + if _is_single_item(ref_to_slice): + ref_to_slice = [ref_to_slice] + dim_to_slice = [] + dim_to_slice_set = set() + for ref in ref_to_slice: + try: + dims = ref_to_dims_index_as_coordinate(cube, ref) + except TypeError: + dims = ref_to_dims_index_as_index(cube, ref) + for dim in dims: + if dim not in dim_to_slice_set: + dim_to_slice.append(dim) + dim_to_slice_set.add(dim) + return dim_to_slice + + +def get_associated_coords(cube, dimensions): + """ + Return all coords containing any of the given dimensions. + + Return all coords, dimensional and auxiliary, that contain any of + the given dimensions. + """ + dims = [] + dim_set = set() + for dim in dimensions: + if dim not in dim_set: + dims.append(dim) + dim_set.add(dim) + dim_coords = [] + for i in dims: + coords = cube.coords(contains_dimension=i, dim_coords=True) + if coords: + dim_coords.append(coords[0]) + aux_coords = [] + for i in dims: + coords = cube.coords(contains_dimension=i, dim_coords=False) + if coords: + aux_coords.append(coords[0]) + return dim_coords, aux_coords + + +def get_empty_data(shape, dtype=np.float32): + """ + Create an empty data object of the given shape. + + Creates an emtpy data object of the given shape, potentially of the lazy + kind from biggus or dask, depending on the used iris version. + """ + data = np.empty(shape, dtype=dtype) + mask = np.empty(shape, dtype=bool) + return np.ma.masked_array(data, mask) + + +def get_slice_spec(cube, ref_to_slice): + """ + Turn a slice reference into a specification for the slice. + + Turns a slice reference into a specification comprised of the shape as well + as the relevant dimensional and auxiliary coordinates. + """ + slice_dims = ref_to_dims_index(cube, ref_to_slice) + slice_shape = tuple(cube.shape[d] for d in slice_dims) + dim_coords, aux_coords = get_associated_coords(cube, slice_dims) + return slice_shape, dim_coords, aux_coords + + +def index_iterator(dims_to_slice, shape): + """ + Return iterator for subsets of multidimensional objects. + + An iterator over a multidimensional object, giving both source and + destination indices. + """ + dst_slices = (slice(None, None),) * len(dims_to_slice) + dims = [1 if n in dims_to_slice else i for n, i in enumerate(shape)] + for index_tuple in np.ndindex(*dims): + src_ind = tuple( + slice(None, None) if n in dims_to_slice else i + for n, i in enumerate(index_tuple)) + dst_ind = tuple(i for n, i in enumerate(index_tuple) + if n not in dims_to_slice) + dst_slices + yield src_ind, dst_ind + + +def get_slice_coords(cube): + """Return ordered set of unique coordinates.""" + slice_coords = [] + slice_set = set() + for i in range(cube.ndim): + coords = cube.coords(contains_dimension=i) + for coord in coords: + if coord not in slice_set: + slice_coords.append(coord) + slice_set.add(coord) + return slice_coords + + +def map_slices(src, func, src_rep, dst_rep): + """ + Map slices of a cube, replacing them with different slices. + + This method is similar to the standard cube collapsed and aggregated_by + methods, however, where they completely remove the mapped dimensions, this + method allows for their replacement with other dimensions. + The new dimensions are specified with a destination representant and will + be the last dimensions of the resulting cube, even if the removed + dimensions are can be any of the source cubes dimensions. + + Parameters + ---------- + src: :class:`iris.cube.Cube` + Source cube to be mapped. + func: callable + Callable that takes a single cube and returns a single numpy array. + src_rep: :class:`iris.cube.Cube` + Source representant that specifies the dimensions to be removed from + the source cube. + dst_rep: :class:`iris.cube.Cube` + Destination representant that specifies the shape of the new + dimensions. + + Returns + ------- + :class:`iris.cube.Cube`: + New cube that has the shape of the source cube with the removed + dimensions replaced with the destination dimensions. + All coordinates that span any of the removed dimensions are removed; + :class:`iris.coords.DimCoord` for the new dimensions are taken from + `dst_rep`. + """ + ref_to_slice = get_slice_coords(src_rep) + src_slice_dims = ref_to_dims_index(src, ref_to_slice) + src_keep_dims = list(set(range(src.ndim)) - set(src_slice_dims)) + src_keep_spec = get_slice_spec(src, src_keep_dims) + res_shape = src_keep_spec[0] + dst_rep.shape + dim_coords = src_keep_spec[1] + dst_rep.coords(dim_coords=True) + dim_coords_and_dims = [(c, i) for i, c in enumerate(dim_coords)] + dst = iris.cube.Cube( + data=get_empty_data(res_shape, dtype=src.dtype), + standard_name=src.standard_name, + long_name=src.long_name, + var_name=src.var_name, + units=src.units, + attributes=src.attributes, + cell_methods=src.cell_methods, + dim_coords_and_dims=dim_coords_and_dims, + ) + for src_ind, dst_ind in index_iterator(src_slice_dims, src.shape): + res = func(src[src_ind]) + dst.data[dst_ind] = res + return dst diff --git a/esmvaltool/preprocessor/_mask.py b/esmvaltool/preprocessor/_mask.py index 4d65a75cde..72adcef6fc 100644 --- a/esmvaltool/preprocessor/_mask.py +++ b/esmvaltool/preprocessor/_mask.py @@ -5,13 +5,13 @@ and geographical area eslection """ -from __future__ import print_function - -import os import logging +import os +import cartopy.io.shapereader as shpreader import iris import numpy as np +import shapely.vectorized as shp_vect from iris.analysis import Aggregator from iris.util import rolling_window @@ -28,8 +28,8 @@ def _check_dims(cube, mask_cube): len_y = len(cube.coord('latitude').points) len_mx = len(mask_cube.coord('longitude').points) len_my = len(mask_cube.coord('latitude').points) - if (x_dim == mx_dim and y_dim == my_dim and - len_x == len_mx and len_y == len_my): + if (x_dim == mx_dim and y_dim == my_dim and len_x == len_mx + and len_y == len_my): logger.debug('Data cube and fx mask have same dims') return True @@ -57,6 +57,13 @@ def _get_fx_mask(fx_data, fx_option, mask_type): elif fx_option == 'sea': # Mask sea out inmask[fx_data >= 50.] = True + elif mask_type == 'sftgif': + if fx_option == 'ice': + # Mask ice out + inmask[fx_data > 50.] = True + elif fx_option == 'landsea': + # Mask landsea out + inmask[fx_data <= 50.] = True return inmask @@ -78,8 +85,33 @@ def _apply_fx_mask(fx_mask, var_data): def mask_landsea(cube, fx_files, mask_out): - """Apply a land/sea mask""" - # mask_out: is either 'land' or 'sea' + """ + Mask out either land or sea + + Function that masks out either land mass or seas (oceans, seas and lakes) + + It uses dedicated fx files (sftlf or sftof) or, in their absence, it + applies a Natural Earth mask (land or ocean contours). Not that the + Natural Earth masks have different resolutions: 10m for land, and 50m + for seas; these are more than enough for ESMValTool puprpose. + + Parameters + ---------- + + * cube (iris.Cube.cube instance): + data cube to be masked. + + * fx_files (list): + list holding the full paths to fx files. + + * mask_out (string): + either "land" to mask out land mass or "sea" to mask out seas. + + Returns + ------- + masked iris cube + + """ # Dict to store the Natural Earth masks cwd = os.path.dirname(__file__) # ne_10m_land is fast; ne_10m_ocean is very slow @@ -95,14 +127,14 @@ def mask_landsea(cube, fx_files, mask_out): fx_cubes[fx_root] = iris.load_cube(fx_file) # preserve importance order: try stflf first then sftof - if ('sftlf' in fx_cubes.keys() and - _check_dims(cube, fx_cubes['sftlf'])): + if ('sftlf' in fx_cubes.keys() + and _check_dims(cube, fx_cubes['sftlf'])): landsea_mask = _get_fx_mask(fx_cubes['sftlf'].data, mask_out, 'sftlf') cube.data = _apply_fx_mask(landsea_mask, cube.data) logger.debug("Applying land-sea mask: sftlf") - elif ('sftof' in fx_cubes.keys() and - _check_dims(cube, fx_cubes['sftof'])): + elif ('sftof' in fx_cubes.keys() + and _check_dims(cube, fx_cubes['sftof'])): landsea_mask = _get_fx_mask(fx_cubes['sftof'].data, mask_out, 'sftof') cube.data = _apply_fx_mask(landsea_mask, cube.data) @@ -129,64 +161,47 @@ def mask_landsea(cube, fx_files, mask_out): return cube -def masked_cube_simple(mycube, slicevar, v_min, v_max, threshold): +def mask_landseaice(cube, fx_files, mask_out): """ - Mask function 1 -- simple cube cropping + Mask out either landsea (combined) or ice - masking for a specific variable slicevar (string) - arguments: cube, variable, min value, max value, threshold + Function that masks out either landsea (land and seas) or ice (Antarctica + and Greenland and some wee glaciers). It uses dedicated fx files (sftgif). - """ - import numpy.ma as ma - coord_names = [coord.name() for coord in mycube.coords()] - if slicevar in coord_names: - coord = mycube.coord(slicevar) - print('Masking on variable: %s' % coord.standard_name) - cubeslice = mycube.extract( - iris.Constraint(coord_values={ - coord.standard_name: - lambda cell: v_min <= cell.point <= v_max - })) - if cubeslice is not None: - masked_cubeslice = cubeslice.copy() - masked_cubeslice.data = ma.masked_greater(cubeslice.data, - threshold) - return masked_cubeslice - else: - logger.info('NOT masking the cube') - return mycube - else: - logger.info('Var is not a cube dimension, leaving cube untouched') - return mycube + Parameters + ---------- + * cube (iris.Cube.cube instance): + data cube to be masked. -def masked_cube_lonlat(mycube, lonlat_list, threshold): - """ - Mask function 2 -- simple cube cropping on (min,max) lon,lat + * fx_files (list): + list holding the full paths to fx files. + + * mask_out (string): + either "landsea" to mask out landsea or "ice" to mask out ice. - Builds a box and keeps only the values inside the box - args: cube, min value, max value, where value=(lon, lat), threshold + Returns + ------- + masked iris cube """ - import numpy.ma as ma - lon1, lon2, lat1, lat2 = lonlat_list - cubeslice = mycube.extract( - iris.Constraint( - longitude=lambda v: lon1 <= v.point <= lon2, - latitude=lambda v: lat1 <= v.point <= lat2)) - if cubeslice is not None: - masked_cubeslice = cubeslice.copy() - masked_cubeslice.data = ma.masked_greater(cubeslice.data, threshold) - print('Masking cube on lon-lat') - return masked_cubeslice + # sftgif is the only one so far + if fx_files: + for fx_file in fx_files: + fx_cube = iris.load_cube(fx_file) + + if _check_dims(cube, fx_cube): + landice_mask = _get_fx_mask(fx_cube.data, mask_out, 'sftgif') + cube.data = _apply_fx_mask(landice_mask, cube.data) + logger.debug("Applying landsea-ice mask: sftgif") else: - print('NOT masking the cube') - return mycube + logger.warning("Landsea-ice mask could not be found ") + + return cube def _get_geometry_from_shp(shapefilename): """Get the mask geometry out from a shapefile""" - import cartopy.io.shapereader as shpreader reader = shpreader.Reader(shapefilename) # Index 0 grabs the lowest resolution mask (no zoom) main_geom = [contour for contour in reader.geometries()][0] @@ -195,8 +210,6 @@ def _get_geometry_from_shp(shapefilename): def _mask_with_shp(cube, shapefilename): """Apply a Natural Earth land/sea mask""" - import shapely.vectorized as shp_vect - # Create the region region = _get_geometry_from_shp(shapefilename) @@ -207,7 +220,8 @@ def _mask_with_shp(cube, shapefilename): # 1D regular grids if cube.coord('longitude').points.ndim < 2: x_p, y_p = np.meshgrid( - cube.coord(axis='X').points, cube.coord(axis='Y').points) + cube.coord(axis='X').points, + cube.coord(axis='Y').points) # 2D irregular grids; spit an error for now else: logger.error('No fx-files found (sftlf or sftof)!\n \ @@ -237,36 +251,6 @@ def _mask_with_shp(cube, shapefilename): return cube -def polygon_shape(xlist, ylist): - """ - Make a polygon - - Function that takes a list of x-coordinates and a list of y-coordinates - and returns a polygon and its (x,y) points on the polygon's border - """ - from shapely.geometry import Polygon - poly = Polygon(xlist, ylist) - x_p, y_p = poly.exterior.coords.xy - return poly, x_p, y_p - - -""" -Calculating a custom statistic -============================== - -This example shows how to define and use a custom -:class:`iris.analysis.Aggregator`, that provides a new statistical operator for -use with cube aggregation functions such as :meth:`~iris.cube.Cube.collapsed`, -:meth:`~iris.cube.Cube.aggregated_by` or -:meth:`~iris.cube.Cube.rolling_window`. - -In this case, we have a time sequence of measurements (time unit dt), and we -want to calculate how many times N the measurements exceed a certain threshold -R over a sliding window dT (multiple of dt). The threshold could be 0 for any -unwanted value for instance. -""" - - # Define a function to perform the custom statistical operation. # Note: in order to meet the requirements of iris.analysis.Aggregator, it must # do the calculation over an arbitrary (given) data axis. @@ -375,99 +359,102 @@ def mask_cube_counts(mycube, value_threshold, counts_threshold, window_size): return counts_windowed_cube, newmask, masked_cube -def mask_above_threshold(mycube, threshold): +def mask_above_threshold(cube, threshold): """ Mask above a specific threshold value. - Takes a value `threshold' and masks off anything that is above + Takes a value 'threshold' and masks off anything that is above it in the cube data. Values equal to the threshold are not masked. """ - mycube.data = np.ma.masked_where(mycube.data > threshold, mycube.data) - return mycube + cube.data = np.ma.masked_where(cube.data > threshold, cube.data) + return cube -def mask_below_threshold(mycube, threshold): +def mask_below_threshold(cube, threshold): """ Mask below a specific threshold value. - Takes a value `threshold' and masks off anything that is below + Takes a value 'threshold' and masks off anything that is below it in the cube data. Values equal to the threshold are not masked. """ - mycube.data = np.ma.masked_where(mycube.data < threshold, mycube.data) - return mycube + cube.data = np.ma.masked_where(cube.data < threshold, cube.data) + return cube -def mask_inside_range(mycube, minimum, maximum): +def mask_inside_range(cube, minimum, maximum): """ Mask inside a specific threshold range. Takes a MINIMUM and a MAXIMUM value for the range, and masks off anything that's between the two in the cube data. """ - mycube.data = np.ma.masked_inside(mycube.data, minimum, maximum) - return mycube + cube.data = np.ma.masked_inside(cube.data, minimum, maximum) + return cube -def mask_outside_range(mycube, minimum, maximum): +def mask_outside_range(cube, minimum, maximum): """ Mask outside a specific threshold range. Takes a MINIMUM and a MAXIMUM value for the range, and masks off anything that's outside the two in the cube data. """ - mycube.data = np.ma.masked_outside(mycube.data, minimum, maximum) - return mycube + cube.data = np.ma.masked_outside(cube.data, minimum, maximum) + return cube -def mask_fillvalues(cubes, threshold_fraction, min_value=-1.e10, +def mask_fillvalues(products, + threshold_fraction, + min_value=-1.e10, time_window=1): - """Get the final fillvalues mask""" - # function idea copied from preprocess.py - - # Ensure all cubes have masked arrays - for cube in cubes: - cube.data = np.ma.fix_invalid(cube.data, copy=False) - - # Get the fillvalue masks from all datasets - masks = (_get_fillvalues_mask(cube, threshold_fraction, min_value, - time_window) for cube in cubes) - - # Combine all fillvalue masks + """Compute and apply a multi-dataset fillvalues mask""" combined_mask = None - for mask in masks: - if combined_mask is None: - combined_mask = np.zeros_like(mask) - # Select only valid (not all masked) pressure levels - n_dims = len(mask.shape) - if n_dims == 2: - valid = ~np.all(mask) - if valid: - combined_mask |= mask - elif n_dims == 3: - valid = ~np.all(mask, axis=(1, 2)) - combined_mask[valid] |= mask[valid] - else: - raise NotImplementedError("Unable to handle {} dimensional data" - .format(n_dims)) + + logger.debug("Creating fillvalues mask") + used = set() + for product in products: + for cube in product.cubes: + cube.data = np.ma.fix_invalid(cube.data, copy=False) + mask = _get_fillvalues_mask(cube, threshold_fraction, min_value, + time_window) + if combined_mask is None: + combined_mask = np.zeros_like(mask) + # Select only valid (not all masked) pressure levels + n_dims = len(mask.shape) + if n_dims == 2: + valid = ~np.all(mask) + if valid: + combined_mask |= mask + used.add(product) + elif n_dims == 3: + valid = ~np.all(mask, axis=(1, 2)) + combined_mask[valid] |= mask[valid] + if np.any(valid): + used.add(product) + else: + raise NotImplementedError( + "Unable to handle {} dimensional data".format(n_dims)) if np.any(combined_mask): - # Apply masks logger.debug("Applying fillvalues mask") - for cube in cubes: - cube.data.mask |= combined_mask + used = {p.copy_provenance() for p in used} + for product in products: + for cube in product.cubes: + cube.data.mask |= combined_mask + for other in used: + if other.filename != product.filename: + product.wasderivedfrom(other) - return cubes + return products def _get_fillvalues_mask(cube, threshold_fraction, min_value, time_window): - # function idea copied from preprocess.py - logger.debug("Creating fillvalues mask") # basic checks if threshold_fraction < 0 or threshold_fraction > 1.0: raise ValueError( - "Fraction of missing values {} should be between 0 and 1.0" - .format(threshold_fraction)) + "Fraction of missing values {} should be between 0 and 1.0".format( + threshold_fraction)) nr_time_points = len(cube.coord('time').points) if time_window > nr_time_points: logger.warning("Time window (in time units) larger " diff --git a/esmvaltool/preprocessor/_multimodel.py b/esmvaltool/preprocessor/_multimodel.py index 35584ce95e..b644b1810e 100644 --- a/esmvaltool/preprocessor/_multimodel.py +++ b/esmvaltool/preprocessor/_multimodel.py @@ -1,4 +1,4 @@ -"""multimodel statistics +"""multimodel statistics. Functions for multi-model operations supports a multitude of multimodel statistics @@ -13,21 +13,20 @@ """ import logging -from datetime import datetime, timedelta +from datetime import datetime from functools import reduce import cf_units import iris import numpy as np -import yaml -from ._io import save +from .._config import use_legacy_iris logger = logging.getLogger(__name__) def _get_time_offset(time_unit): - """Return a datetime object equivalent to tunit""" + """Return a datetime object equivalent to tunit.""" # tunit e.g. 'day since 1950-01-01 00:00:00.0000000 UTC' cfunit = cf_units.Unit(time_unit, calendar=cf_units.CALENDAR_STANDARD) time_offset = cfunit.num2date(0) @@ -35,7 +34,7 @@ def _get_time_offset(time_unit): def _plev_fix(dataset, pl_idx): - """Extract valid plev data + """Extract valid plev data. this function takes care of situations in which certain plevs are completely @@ -56,14 +55,14 @@ def _plev_fix(dataset, pl_idx): return statj -def _compute_statistic(datas, name): - """Compute multimodel statistic""" +def _compute_statistic(datas, statistic_name): + """Compute multimodel statistic.""" datas = np.ma.array(datas) statistic = datas[0] - if name == 'median': + if statistic_name == 'median': statistic_function = np.ma.median - elif name == 'mean': + elif statistic_name == 'mean': statistic_function = np.ma.mean else: raise NotImplementedError @@ -101,13 +100,11 @@ def _compute_statistic(datas, name): return statistic -def _put_in_cube(template_cube, cube_data, stat_name, - file_name, time_bounds, t_axis): - """Quick cube building and saving""" - # grab coordinates from any cube - times = template_cube.coord('time') - # or get the FULL time axis - if t_axis is not None: +def _put_in_cube(template_cube, cube_data, statistic, t_axis): + """Quick cube building and saving.""" + if t_axis is None: + times = template_cube.coord('time') + else: times = iris.coords.DimCoord( t_axis, standard_name='time', @@ -123,35 +120,28 @@ def _put_in_cube(template_cube, cube_data, stat_name, plev = template_cube.coord('air_pressure') cspec = [(times, 0), (plev, 1), (lats, 2), (lons, 3)] elif len(template_cube.shape) == 1: - cspec = [(times, 0), ] + cspec = [ + (times, 0), + ] elif len(template_cube.shape) == 2: # If you're going to hardwire air_pressure into this, # might as well have depth here too. plev = template_cube.coord('depth') - cspec = [(times, 0), (plev, 1), ] + cspec = [ + (times, 0), + (plev, 1), + ] # correct dspec if necessary fixed_dspec = np.ma.fix_invalid(cube_data, copy=False, fill_value=1e+20) # put in cube stats_cube = iris.cube.Cube( - fixed_dspec, dim_coords_and_dims=cspec, long_name=stat_name) + fixed_dspec, dim_coords_and_dims=cspec, long_name=statistic) coord_names = [coord.name() for coord in template_cube.coords()] if 'air_pressure' in coord_names: if len(template_cube.shape) == 3: stats_cube.add_aux_coord(template_cube.coord('air_pressure')) - stats_cube.attributes['_filename'] = file_name - - metadata = {'dataset': 'MultiModel' + stat_name.title(), - 'filename': file_name} - metadata_template = yaml.safe_load(template_cube.attributes['metadata']) - for attr in ('short_name', 'standard_name', 'long_name', 'units', 'field', - 'start_year', 'end_year', 'diagnostic', 'preprocessor'): - if attr in metadata_template: - metadata[attr] = metadata_template[attr] - metadata['start_year'] = time_bounds[0] - metadata['end_year'] = time_bounds[1] - stats_cube.attributes['metadata'] = yaml.safe_dump(metadata) - # complete metadata + stats_cube.var_name = template_cube.var_name stats_cube.long_name = template_cube.long_name stats_cube.standard_name = template_cube.standard_name @@ -160,11 +150,15 @@ def _put_in_cube(template_cube, cube_data, stat_name, def _datetime_to_int_days(cube): - """Return list of int(days) converted from cube datetime cells""" - # TODO replace the block when using iris 2.0 - # time_cells = [cell.point for cell in cube.coord('time').cells()] - time_cells = [cube.coord('time').units.num2date(cell.point) - for cell in cube.coord('time').cells()] + """Return list of int(days) converted from cube datetime cells.""" + if use_legacy_iris(): + time_cells = [ + cube.coord('time').units.num2date(cell.point) + for cell in cube.coord('time').cells() + ] + else: + time_cells = [cell.point for cell in cube.coord('time').cells()] + time_unit = cube.coord('time').units.name time_offset = _get_time_offset(time_unit) @@ -208,10 +202,10 @@ def _get_overlap(cubes): def _slice_cube(cube, t_1, t_2): """ - Efficient slicer + Efficient slicer. Simple cube data slicer on indices - of common time-data elements + of common time-data elements. """ time_pts = [t for t in cube.coord('time').points] converted_t = _datetime_to_int_days(cube) @@ -223,14 +217,14 @@ def _slice_cube(cube, t_1, t_2): def _monthly_t(cubes): - """Rearrange time points for monthly data""" + """Rearrange time points for monthly data.""" # get original cubes tpoints days = {day for cube in cubes for day in _datetime_to_int_days(cube)} return sorted(days) def _full_time_slice(cubes, ndat, indices, ndatarr, t_idx): - """Construct a contiguous collection over time""" + """Construct a contiguous collection over time.""" for idx_cube, cube in enumerate(cubes): # reset mask ndat.mask = True @@ -245,27 +239,29 @@ def _full_time_slice(cubes, ndat, indices, ndatarr, t_idx): return ndatarr -def _assemble_overlap_data(cubes, ovlp, stat_type, filename, time_bounds): - """Get statistical data in iris cubes for OVERLAP""" - start, stop = ovlp +def _assemble_overlap_data(cubes, interval, statistic): + """Get statistical data in iris cubes for OVERLAP.""" + start, stop = interval sl_1, sl_2 = _slice_cube(cubes[0], start, stop) stats_dats = np.ma.zeros(cubes[0].data[sl_1:sl_2 + 1].shape) + # keep this outside the following loop + # this speeds up the code by a factor of 15 + indices = [_slice_cube(cube, start, stop) for cube in cubes] + for i in range(stats_dats.shape[0]): - indices = [_slice_cube(cube, start, stop) for cube in cubes] time_data = [ cube.data[indx[0]:indx[1] + 1][i] for cube, indx in zip(cubes, indices) ] - stats_dats[i] = _compute_statistic(time_data, stat_type) + stats_dats[i] = _compute_statistic(time_data, statistic) stats_cube = _put_in_cube( - cubes[0][sl_1:sl_2 + 1], stats_dats, stat_type, filename, - time_bounds, t_axis=None) + cubes[0][sl_1:sl_2 + 1], stats_dats, statistic, t_axis=None) return stats_cube -def _assemble_full_data(cubes, stat_type, filename, time_bounds): - """Get statistical data in iris cubes for FULL""" +def _assemble_full_data(cubes, statistic): + """Get statistical data in iris cubes for FULL.""" # all times, new MONTHLY data time axis time_axis = [float(fl) for fl in _monthly_t(cubes)] @@ -298,82 +294,54 @@ def _assemble_full_data(cubes, stat_type, filename, time_bounds): time_data = [] for j in range(len(cubes)): time_data.append(new_datas_array[j]) - stats_dats[i] = _compute_statistic(time_data, stat_type) - stats_cube = _put_in_cube(cubes[0], stats_dats, stat_type, filename, - time_bounds, time_axis) + stats_dats[i] = _compute_statistic(time_data, statistic) + stats_cube = _put_in_cube(cubes[0], stats_dats, statistic, time_axis) return stats_cube -def _update_filename(filename, interval, time_unit): - """Update netCDF file names based on time properties""" - start, stop = [(_get_time_offset(time_unit) + timedelta(int(ts))).year - for ts in interval] - filename = "{}_{}-{}.nc".format(filename.rpartition('_')[0], start, stop) - return filename, start, stop - - -def multi_model_statistics(cubes, span, filenames, exclude, statistics): +def multi_model_statistics(products, span, output_products, statistics): """Compute multi-model mean and median.""" - logger.debug('Multi model statistics: excluding files: %s', exclude) - logger.debug('Multimodel statistics: computing: %s', statistics) - selection = [ - cube for cube in cubes - if not all(cube.attributes.get(k) in exclude[k] for k in exclude) - ] - - if len(selection) < 2: + if len(products) < 2: logger.info("Single dataset in list: will not compute statistics.") - return cubes - - # unify units - iris.util.unify_time_units(selection) + return products + cubes = [cube for product in products for cube in product.cubes] # check if we have any time overlap - interval = _get_overlap(selection) + interval = _get_overlap(cubes) if interval is None: - logger.info("Time overlap between cubes is none or a single point.") - logger.info("check datasets: will not compute statistics.") - return cubes + logger.info("Time overlap between cubes is none or a single point." + "check datasets: will not compute statistics.") + return products - time_unit = selection[0].coord('time').units.name - - # cases - files = [] if span == 'overlap': logger.debug("Using common time overlap between " "datasets to compute statistics.") - - # assemble data - for stat_name in statistics: - filename, startT, stopT = _update_filename(filenames[stat_name], - interval, - time_unit) - time_bounds = [startT, stopT] - cube_of_stats = _assemble_overlap_data(selection, interval, - stat_name, filename, - time_bounds) - cube_of_stats.data = np.ma.array(cube_of_stats.data, - dtype=np.dtype('float32')) - save([cube_of_stats]) - files.append(filename) - elif span == 'full': - logger.debug("Using full time spans " "to compute statistics.") - # assemble data - time_points = _monthly_t(selection) - interval = [min(time_points), max(time_points)] - for stat_name in statistics: - filename, startT, stopT = _update_filename(filenames[stat_name], - interval, - time_unit) - time_bounds = [startT, stopT] - cube_of_stats = _assemble_full_data(selection, stat_name, filename, - time_bounds) - cube_of_stats.data = np.ma.array(cube_of_stats.data, - dtype=np.dtype('float32')) - save([cube_of_stats]) - files.append(filename) - - cubes.extend(files) - return cubes + logger.debug("Using full time spans to compute statistics.") + else: + raise ValueError( + "Unexpected value for span {}, choose from 'overlap', 'full'" + .format(span)) + + statistic_products = set() + for statistic in statistics: + # Compute statistic + if span == 'overlap': + statistic_cube = _assemble_overlap_data(cubes, interval, statistic) + elif span == 'full': + statistic_cube = _assemble_full_data(cubes, statistic) + statistic_cube.data = np.ma.array( + statistic_cube.data, dtype=np.dtype('float32')) + + # Add to output product and log provenance + statistic_product = output_products[statistic] + statistic_product.cubes = [statistic_cube] + for product in products: + statistic_product.wasderivedfrom(product) + logger.info("Generated %s", statistic_product) + statistic_products.add(statistic_product) + + products |= statistic_products + + return products diff --git a/esmvaltool/preprocessor/_regrid.py b/esmvaltool/preprocessor/_regrid.py index b5055ec9ba..6a64775402 100644 --- a/esmvaltool/preprocessor/_regrid.py +++ b/esmvaltool/preprocessor/_regrid.py @@ -1,35 +1,29 @@ -""" -_regrid.py - -A package for performing horizontal regridding, -and vertical level extraction -or vertical level interpolation. - -""" - -from __future__ import absolute_import, division, print_function +"""Horizontal and vertical regridding module.""" import os import re from copy import deepcopy import iris -import iris.exceptions import numpy as np import six import stratify from iris.analysis import AreaWeighted, Linear, Nearest, UnstructuredNearest -from numpy import ma +from ._io import concatenate_callback, load +from ._regrid_esmpy import ESMF_REGRID_METHODS +from ._regrid_esmpy import regrid as esmpy_regrid +from ..cmor.fix import fix_file, fix_metadata from ..cmor.table import CMOR_TABLES # Regular expression to parse a "MxN" cell-specification. -_CELL_SPEC = re.compile(r'''\A - \s*(?P\d+(\.\d+)?)\s* - x - \s*(?P\d+(\.\d+)?)\s* - \Z - ''', re.IGNORECASE | re.VERBOSE) +_CELL_SPEC = re.compile( + r'''\A + \s*(?P\d+(\.\d+)?)\s* + x + \s*(?P\d+(\.\d+)?)\s* + \Z + ''', re.IGNORECASE | re.VERBOSE) # Default fill-value. _MDI = 1e+20 @@ -43,24 +37,50 @@ _LON_RANGE = _LON_MAX - _LON_MIN # A cached stock of standard horizontal target grids. -_cache = dict() +_CACHE = dict() # Supported horizontal regridding schemes. -horizontal_schemes = dict( - linear=Linear(extrapolation_mode='mask'), - nearest=Nearest(extrapolation_mode='mask'), - area_weighted=AreaWeighted(), - unstructured_nearest=UnstructuredNearest()) +HORIZONTAL_SCHEMES = { + 'linear': Linear(extrapolation_mode='mask'), + 'linear_extrapolate': Linear(extrapolation_mode='extrapolate'), + 'nearest': Nearest(extrapolation_mode='mask'), + 'area_weighted': AreaWeighted(), + 'unstructured_nearest': UnstructuredNearest(), +} # Supported vertical interpolation schemes. -vertical_schemes = ['linear', 'nearest', +VERTICAL_SCHEMES = ('linear', 'nearest', 'linear_horizontal_extrapolate_vertical', - 'nearest_horizontal_extrapolate_vertical'] + 'nearest_horizontal_extrapolate_vertical') + + +def parse_cell_spec(spec): + """Parse an MxN cell specification string.""" + cell_match = _CELL_SPEC.match(spec) + if cell_match is None: + emsg = 'Invalid MxN cell specification for grid, got {!r}.' + raise ValueError(emsg.format(spec)) + + cell_group = cell_match.groupdict() + dlon = float(cell_group['dlon']) + dlat = float(cell_group['dlat']) + + if (np.trunc(_LON_RANGE / dlon) * dlon) != _LON_RANGE: + emsg = ('Invalid longitude delta in MxN cell specification ' + 'for grid, got {!r}.') + raise ValueError(emsg.format(dlon)) + + if (np.trunc(_LAT_RANGE / dlat) * dlat) != _LAT_RANGE: + emsg = ('Invalid latitude delta in MxN cell specification ' + 'for grid, got {!r}.') + raise ValueError(emsg.format(dlat)) + + return dlon, dlat -def _stock_cube(spec): +def _stock_cube(spec, lat_offset=True, lon_offset=True): """ - Create a stock cube + Create a stock cube. Create a global cube with M degree-east by N degree-north regular grid cells. @@ -69,52 +89,57 @@ def _stock_cube(spec): -90 to 90 degrees. Each cell grid point is calculated as the mid-point of the associated MxN cell. - Paramaters + Parameters ---------- spec : str Specifies the 'MxN' degree cell-specification for the global grid. + lat_offset : bool + Offset the grid centers of the latitude coordinate w.r.t. the + pole by half a grid step. This argument is ignored if `target_grid` + is a cube or file. + lon_offset : bool + Offset the grid centers of the longitude coordinate w.r.t. Greenwich + meridian by half a grid step. + This argument is ignored if `target_grid` is a cube or file. Returns ------- A :class:`~iris.cube.Cube`. """ - # Parse the MxN cell specification string. - cell_match = _CELL_SPEC.match(spec) - if cell_match is None: - emsg = 'Invalid MxN cell specification for stock cube, got {!r}.' - raise ValueError(emsg.format(spec)) - - cell_group = cell_match.groupdict() - dx = float(cell_group['dx']) - dy = float(cell_group['dy']) - - if (np.trunc(_LON_RANGE / dx) * dx) != _LON_RANGE: - emsg = ('Invalid longitude delta in MxN cell specification ' - 'for stock cube, got {!r}.') - raise ValueError(emsg.format(dx)) + dlon, dlat = parse_cell_spec(spec) + mid_dlon, mid_dlat = dlon / 2, dlat / 2 - if (np.trunc(_LAT_RANGE / dy) * dy) != _LAT_RANGE: - emsg = ('Invalid latitude delta in MxN cell specification ' - 'for stock cube, got {!r}.') - raise ValueError(emsg.format(dy)) + # Construct the latitude coordinate, with bounds. + if lat_offset: + latdata = np.linspace(_LAT_MIN + mid_dlat, _LAT_MAX - mid_dlat, + _LAT_RANGE / dlat) + else: + latdata = np.linspace(_LAT_MIN, _LAT_MAX, _LAT_RANGE / dlat + 1) - mid_dx, mid_dy = dx / 2, dy / 2 + # Construct the longitude coordinat, with bounds. + if lon_offset: + londata = np.linspace(_LON_MIN + mid_dlon, _LON_MAX - mid_dlon, + _LON_RANGE / dlon) + else: + londata = np.linspace(_LON_MIN, _LON_MAX - dlon, _LON_RANGE / dlon) - # Construct the latitude coordinate, with bounds. - ydata = np.linspace(_LAT_MIN + mid_dy, _LAT_MAX - mid_dy, _LAT_RANGE / dy) lats = iris.coords.DimCoord( - ydata, standard_name='latitude', units='degrees_north', var_name='lat') + latdata, + standard_name='latitude', + units='degrees_north', + var_name='lat') lats.guess_bounds() - # Construct the longitude coordinate, with bounds. - xdata = np.linspace(_LON_MIN + mid_dx, _LON_MAX - mid_dx, _LON_RANGE / dx) lons = iris.coords.DimCoord( - xdata, standard_name='longitude', units='degrees_east', var_name='lon') + londata, + standard_name='longitude', + units='degrees_east', + var_name='lon') lons.guess_bounds() # Construct the resultant stock cube, with dummy data. - shape = (ydata.size, xdata.size) + shape = (latdata.size, londata.size) dummy = np.empty(shape, dtype=np.dtype('int8')) coords_spec = [(lats, 0), (lons, 1)] cube = iris.cube.Cube(dummy, dim_coords_and_dims=coords_spec) @@ -122,21 +147,47 @@ def _stock_cube(spec): return cube -def regrid(src_cube, target_grid, scheme): +def _attempt_irregular_regridding(cube, scheme): + """Check if irregular regridding with ESMF should be used.""" + if scheme in ESMF_REGRID_METHODS: + try: + lat_dim = cube.coord('latitude').ndim + lon_dim = cube.coord('longitude').ndim + if lat_dim == lon_dim == 2: + return True + except iris.exceptions.CoordinateNotFoundError: + pass + return False + + +def regrid(cube, target_grid, scheme, lat_offset=True, lon_offset=True): """ Perform horizontal regridding. Parameters ---------- - src_cube : cube + cube : cube The source cube to be regridded. - tgt_cube : cube or str + target_grid : cube or str The cube that specifies the target or reference grid for the regridding operation. Alternatively, a string cell specification may be provided, of the form 'MxN', which specifies the extent of the cell, longitude by latitude (degrees) for a global, regular target grid. scheme : str - The regridding scheme to perform, see `regrid.horizontal_schemes`. + The regridding scheme to perform, choose from + 'linear', + 'linear_extrapolate', + 'nearest', + 'area_weighted', + 'unstructured_nearest'. + lat_offset : bool + Offset the grid centers of the latitude coordinate w.r.t. the + pole by half a grid step. This argument is ignored if `target_grid` + is a cube or file. + lon_offset : bool + Offset the grid centers of the longitude coordinate w.r.t. Greenwich + meridian by half a grid step. + This argument is ignored if `target_grid` is a cube or file. Returns ------- @@ -144,22 +195,10 @@ def regrid(src_cube, target_grid, scheme): See Also -------- - vinterp : Perform vertical regridding. + extract_levels : Perform vertical regridding. """ - if target_grid is None and scheme is None: - # nop - return src_cube - - if target_grid is None: - emsg = 'A target grid must be specified for horizontal regridding.' - raise ValueError(emsg) - - if scheme is None: - emsg = 'A scheme must be specified for horizontal regridding.' - raise ValueError(emsg) - - if horizontal_schemes.get(scheme.lower()) is None: + if HORIZONTAL_SCHEMES.get(scheme.lower()) is None: emsg = 'Unknown regridding scheme, got {!r}.' raise ValueError(emsg.format(scheme)) @@ -169,34 +208,38 @@ def regrid(src_cube, target_grid, scheme): else: # Generate a target grid from the provided cell-specification, # and cache the resulting stock cube for later use. - target_grid = _cache.setdefault(target_grid, - _stock_cube(target_grid)) + target_grid = _CACHE.setdefault( + target_grid, + _stock_cube(target_grid, lat_offset, lon_offset), + ) # Align the target grid coordinate system to the source # coordinate system. - src_cs = src_cube.coord_system() + src_cs = cube.coord_system() xcoord = target_grid.coord(axis='x', dim_coords=True) ycoord = target_grid.coord(axis='y', dim_coords=True) xcoord.coord_system = src_cs ycoord.coord_system = src_cs if not isinstance(target_grid, iris.cube.Cube): - emsg = 'Expecting a cube or cell-specification, got {}.' - raise ValueError(emsg.format(type(target_grid))) + raise ValueError('Expecting a cube, got {}.'.format(target_grid)) # Unstructured regridding requires x2 2d spatial coordinates, # so ensure to purge any 1d native spatial dimension coordinates # for the regridder. if scheme == 'unstructured_nearest': for axis in ['x', 'y']: - coords = src_cube.coords(axis=axis, dim_coords=True) + coords = cube.coords(axis=axis, dim_coords=True) if coords: [coord] = coords - src_cube.remove_coord(coord) + cube.remove_coord(coord) # Perform the horizontal regridding. - result = src_cube.regrid(target_grid, horizontal_schemes[scheme]) + if _attempt_irregular_regridding(cube, scheme): + cube = esmpy_regrid(cube, target_grid, scheme) + else: + cube = cube.regrid(target_grid, HORIZONTAL_SCHEMES[scheme]) - return result + return cube def _create_cube(src_cube, data, levels): @@ -281,21 +324,63 @@ def _create_cube(src_cube, data, levels): return result -def vinterp(src_cube, levels, scheme): +def _vertical_interpolate(cube, levels, interpolation, extrapolation): + """Perform vertical interpolation.""" + # Determine the source levels and axis for vertical interpolation. + src_levels = cube.coord(axis='z', dim_coords=True) + z_axis, = cube.coord_dims(src_levels) + + # Broadcast the 1d source cube vertical coordinate to fully + # describe the spatial extent that will be interpolated. + broadcast_shape = cube.shape[z_axis:] + reshape = [1] * len(broadcast_shape) + reshape[0] = cube.shape[z_axis] + src_levels_reshaped = src_levels.points.reshape(reshape) + src_levels_broadcast = np.broadcast_to(src_levels_reshaped, + broadcast_shape) + + # force mask onto data as nan's + if np.ma.is_masked(cube.data): + cube.data[cube.data.mask] = np.nan + + # Now perform the actual vertical interpolation. + new_data = stratify.interpolate( + levels, + src_levels_broadcast, + cube.data, + axis=z_axis, + interpolation=interpolation, + extrapolation=extrapolation) + + # Calculate the mask based on the any NaN values in the interpolated data. + mask = np.isnan(new_data) + + if np.any(mask): + # Ensure that the data is masked appropriately. + new_data = np.ma.array(new_data, mask=mask, fill_value=_MDI) + + # Construct the resulting cube with the interpolated data. + return _create_cube(cube, new_data, levels.astype(float)) + + +def extract_levels(cube, levels, scheme): """ Perform vertical interpolation. Parameters ---------- - src_cube : cube + cube : cube The source cube to be vertically interpolated. levels : array One or more target levels for the vertical interpolation. Assumed to be in the same S.I. units of the source cube vertical dimension coordinate. scheme : str - The vertical interpolation scheme to perform. Currently supported - schemes are 'linear' or 'nearest'. + The vertical interpolation scheme to use. Choose from + 'linear', + 'nearest', + 'nearest_horizontal_extrapolate_vertical', + 'linear_horizontal_extrapolate_vertical'. Returns ------- @@ -306,25 +391,10 @@ def vinterp(src_cube, levels, scheme): regrid : Perform horizontal regridding. """ - # Default to passing thru the original source cube. - result = src_cube - - if levels is None and scheme is None: - # nop - return src_cube - - if levels is None: - emsg = 'Target levels must be specified for vertical interpolation.' - raise ValueError(emsg) - - if scheme is None: - emsg = 'A scheme must be specified for vertical interpolation.' - raise ValueError(emsg) - - if scheme not in vertical_schemes: + if scheme not in VERTICAL_SCHEMES: emsg = 'Unknown vertical interpolation scheme, got {!r}. ' emsg += 'Possible schemes: {!r}' - raise ValueError(emsg.format(scheme, list(vertical_schemes))) + raise ValueError(emsg.format(scheme, VERTICAL_SCHEMES)) # This allows us to put level 0. to load the ocean surface. extrap_scheme = 'nan' @@ -340,62 +410,26 @@ def vinterp(src_cube, levels, scheme): levels = np.array(levels, ndmin=1) # Get the source cube vertical coordinate, if available. - src_levels = src_cube.coord(axis='z', dim_coords=True) - - # Only perform vertical extraction/interploation if the source - # and target levels are not "similar" enough. - if src_levels.shape != levels.shape or \ - not np.allclose(src_levels.points, levels): - - # Determine whether we can simply extract the target levels, - # if they *all* exist in the source cube, otherwise - # perform vertical interpolation. - if set(levels).issubset(set(src_levels.points)): - name = src_levels.name() - coord_values = {name: lambda cell: cell.point in set(levels)} - constraint = iris.Constraint(coord_values=coord_values) - result = src_cube.extract(constraint) - - # Ensure the constraint did not fail. - if not isinstance(result, iris.cube.Cube): - emsg = 'Failed to extract levels {!r} from cube {!r}.' - raise ValueError(emsg.format(list(levels), name)) - else: - # Determine the source axis for vertical interpolation. - z_axis, = src_cube.coord_dims(src_levels) - - # Broadcast the 1d source cube vertical coordinate to fully - # describe the spatial extent that will be interpolated. - broadcast_shape = src_cube.shape[z_axis:] - reshape = [1] * len(broadcast_shape) - reshape[0] = src_cube.shape[z_axis] - src_levels_reshaped = src_levels.points.reshape(reshape) - src_levels_broadcast = np.broadcast_to(src_levels_reshaped, - broadcast_shape) - - # force mask onto data as nan's - if np.ma.is_masked(src_cube.data): - src_cube.data[src_cube.data.mask] = np.nan - - # Now perform the actual vertical interpolation. - new_data = stratify.interpolate( - levels, - src_levels_broadcast, - src_cube.data, - axis=z_axis, - interpolation=scheme, - extrapolation=extrap_scheme) - - # Calculate the mask based on the any - # NaN values in the interpolated data. - mask = np.isnan(new_data) - - if np.any(mask): - # Ensure that the data is masked appropriately. - new_data = ma.array(new_data, mask=mask, fill_value=_MDI) - - # Construct the resulting cube with the interpolated data. - result = _create_cube(src_cube, new_data, levels.astype(float)) + src_levels = cube.coord(axis='z', dim_coords=True) + + if (src_levels.shape == levels.shape + and np.allclose(src_levels.points, levels)): + # Only perform vertical extraction/interploation if the source + # and target levels are not "similar" enough. + result = cube + elif set(levels).issubset(set(src_levels.points)): + # If all target levels exist in the source cube, simply extract them. + name = src_levels.name() + coord_values = {name: lambda cell: cell.point in set(levels)} + constraint = iris.Constraint(coord_values=coord_values) + result = cube.extract(constraint) + # Ensure the constraint did not fail. + if not result: + emsg = 'Failed to extract levels {!r} from cube {!r}.' + raise ValueError(emsg.format(list(levels), name)) + else: + # As a last resort, perform vertical interpolation. + result = _vertical_interpolate(cube, levels, scheme, extrap_scheme) return result @@ -422,8 +456,9 @@ def get_cmor_levels(cmor_table, coordinate): """ if cmor_table not in CMOR_TABLES: - raise ValueError("Level definition cmor_table '{}' not available" - .format(cmor_table)) + raise ValueError( + "Level definition cmor_table '{}' not available".format( + cmor_table)) if coordinate not in CMOR_TABLES[cmor_table].coords: raise ValueError('Coordinate {} not available for {}'.format( @@ -433,22 +468,25 @@ def get_cmor_levels(cmor_table, coordinate): if cmor.requested: return [float(level) for level in cmor.requested] - elif cmor.value: + if cmor.value: return [float(cmor.value)] - else: - raise ValueError('Coordinate {} in {} does not have requested values' - .format(coordinate, cmor_table)) + + raise ValueError( + 'Coordinate {} in {} does not have requested values'.format( + coordinate, cmor_table)) -def get_reference_levels(filename, coordinate='air_pressure'): +def get_reference_levels(filename, + project, + dataset, + short_name, + fix_dir): """Get level definition from a CMOR coordinate. Parameters ---------- filename: str Path to the reference file - coordinate: str - Coordinate name Returns ------- @@ -461,9 +499,12 @@ def get_reference_levels(filename, coordinate='air_pressure'): levels or the string is badly formatted. """ + filename = fix_file(filename, short_name, project, dataset, fix_dir) + cubes = load(filename, callback=concatenate_callback) + cubes = fix_metadata(cubes, short_name, project, dataset) + cube = cubes[0] try: - coord = iris.load_cube(filename).coord(coordinate) + coord = cube.coord(axis='Z') except iris.exceptions.CoordinateNotFoundError: - raise ValueError('Coordinate {} not available in {}'.format( - coordinate, filename)) + raise ValueError('z-coord not available in {}'.format(filename)) return coord.points.tolist() diff --git a/esmvaltool/preprocessor/_regrid_esmpy.py b/esmvaltool/preprocessor/_regrid_esmpy.py new file mode 100755 index 0000000000..4ac6192451 --- /dev/null +++ b/esmvaltool/preprocessor/_regrid_esmpy.py @@ -0,0 +1,324 @@ +# -*- coding: utf-8 -*- +"""Provides regridding for irregular grids.""" + +import ESMF +import iris +import numpy as np + +from ._mapping import get_empty_data, map_slices, ref_to_dims_index + + +ESMF_MANAGER = ESMF.Manager(debug=False) + +ESMF_LON, ESMF_LAT = 0, 1 + +ESMF_REGRID_METHODS = { + 'linear': ESMF.RegridMethod.BILINEAR, + 'area_weighted': ESMF.RegridMethod.CONSERVE, + 'nearest': ESMF.RegridMethod.NEAREST_STOD, +} + +MASK_REGRIDDING_MASK_VALUE = { + ESMF.RegridMethod.BILINEAR: np.array([1]), + ESMF.RegridMethod.CONSERVE: np.array([1]), + ESMF.RegridMethod.NEAREST_STOD: np.array([]), +} + +# ESMF_REGRID_METHODS = { +# 'bilinear': ESMF.RegridMethod.BILINEAR, +# 'patch': ESMF.RegridMethod.PATCH, +# 'conserve': ESMF.RegridMethod.CONSERVE, +# 'nearest_stod': ESMF.RegridMethod.NEAREST_STOD, +# 'nearest_dtos': ESMF.RegridMethod.NEAREST_DTOS, +# } + + +def cf_2d_bounds_to_esmpy_corners(bounds, circular): + """Convert cf style 2d bounds to normal (esmpy style) corners.""" + no_lat_points, no_lon_points = bounds.shape[:2] + no_lat_bounds = no_lat_points + 1 + if circular: + no_lon_bounds = no_lon_points + else: + no_lon_bounds = no_lon_points + 1 + esmpy_corners = np.empty((no_lon_bounds, no_lat_bounds)) + esmpy_corners[:no_lon_points, :no_lat_points] = bounds[:, :, 0].T + esmpy_corners[:no_lon_points, no_lat_points:] = bounds[-1:, :, 3].T + esmpy_corners[no_lon_points:, :no_lat_points] = bounds[:, -1:, 1].T + esmpy_corners[no_lon_points:, no_lat_points:] = bounds[-1:, -1:, 2].T + return esmpy_corners + + +def coords_iris_to_esmpy(lat, lon, circular): + """Build ESMF compatible coordinate information from iris coords.""" + dim = lat.ndim + if lon.ndim != dim: + msg = 'Different dimensions in latitude({}) and longitude({}) coords.' + raise ValueError(msg.format(lat.ndim, lon.ndim)) + if dim == 1: + for coord in [lat, lon]: + if not coord.has_bounds(): + coord.guess_bounds() + esmpy_lat, esmpy_lon = np.meshgrid(lat.points, lon.points) + lat_corners = np.concatenate([lat.bounds[:, 0], lat.bounds[-1:, 1]]) + if circular: + lon_corners = lon.bounds[:, 0] + else: + lon_corners = np.concatenate([lon.bounds[:, 0], + lon.bounds[-1:, 1]]) + esmpy_lat_corners, esmpy_lon_corners = np.meshgrid(lat_corners, + lon_corners) + elif dim == 2: + esmpy_lat, esmpy_lon = lat.points.T.copy(), lon.points.T.copy() + esmpy_lat_corners = cf_2d_bounds_to_esmpy_corners(lat.bounds, circular) + esmpy_lon_corners = cf_2d_bounds_to_esmpy_corners(lon.bounds, circular) + else: + raise NotImplementedError('Coord dimension is {}. Expected 1 or 2.' + ''.format(dim)) + return esmpy_lat, esmpy_lon, esmpy_lat_corners, esmpy_lon_corners + + +def get_grid(esmpy_lat, esmpy_lon, + esmpy_lat_corners, esmpy_lon_corners, circular): + """Build EMSF grid from given coordinate information.""" + if circular: + num_peri_dims = 1 + else: + num_peri_dims = 0 + grid = ESMF.Grid(np.array(esmpy_lat.shape), + num_peri_dims=num_peri_dims, + staggerloc=[ESMF.StaggerLoc.CENTER]) + grid.get_coords(ESMF_LON)[...] = esmpy_lon + grid.get_coords(ESMF_LAT)[...] = esmpy_lat + grid.add_coords([ESMF.StaggerLoc.CORNER]) + grid_lon_corners = grid.get_coords(ESMF_LON, + staggerloc=ESMF.StaggerLoc.CORNER) + grid_lat_corners = grid.get_coords(ESMF_LAT, + staggerloc=ESMF.StaggerLoc.CORNER) + grid_lon_corners[...] = esmpy_lon_corners + grid_lat_corners[...] = esmpy_lat_corners + grid.add_item(ESMF.GridItem.MASK, ESMF.StaggerLoc.CENTER) + return grid + + +def is_lon_circular(lon): + """Determine if longitudes are circular.""" + if isinstance(lon, iris.coords.DimCoord): + circular = lon.circular + elif isinstance(lon, iris.coords.AuxCoord): + if lon.ndim == 1: + seam = lon.bounds[-1, 1] - lon.bounds[0, 0] + elif lon.ndim == 2: + seam = (lon.bounds[1:-1, -1, (1, 2)] + - lon.bounds[1:-1, 0, (0, 3)]) + else: + raise NotImplementedError('AuxCoord longitude is higher ' + 'dimensional than 2d. Giving up.') + circular = np.alltrue(abs(seam) % 360. < 1.e-3) + else: + raise ValueError('longitude is neither DimCoord nor AuxCoord. ' + 'Giving up.') + return circular + + +def cube_to_empty_field(cube): + """Build an empty ESMF field from a cube.""" + lat = cube.coord('latitude') + lon = cube.coord('longitude') + circular = is_lon_circular(lon) + esmpy_coords = coords_iris_to_esmpy(lat, lon, circular) + grid = get_grid(*esmpy_coords, circular=circular) + field = ESMF.Field(grid, + name=cube.long_name, + staggerloc=ESMF.StaggerLoc.CENTER) + return field + + +def get_representant(cube, ref_to_slice): + """Get a representative slice from a cube.""" + slice_dims = ref_to_dims_index(cube, ref_to_slice) + rep_ind = [0] * cube.ndim + for dim in slice_dims: + rep_ind[dim] = slice(None, None) + rep_ind = tuple(rep_ind) + return cube[rep_ind] + + +def build_regridder_2d(src_rep, dst_rep, regrid_method, mask_threshold): + """Build regridder for 2d regridding.""" + dst_field = cube_to_empty_field(dst_rep) + src_field = cube_to_empty_field(src_rep) + regridding_arguments = { + 'srcfield': src_field, + 'dstfield': dst_field, + 'regrid_method': regrid_method, + 'unmapped_action': ESMF.UnmappedAction.IGNORE, + 'ignore_degenerate': True, + } + if np.ma.is_masked(src_rep.data): + src_field.data[...] = ~src_rep.data.mask.T + src_mask = src_field.grid.get_item(ESMF.GridItem.MASK, + ESMF.StaggerLoc.CENTER) + src_mask[...] = src_rep.data.mask.T + center_mask = dst_field.grid.get_item(ESMF.GridItem.MASK, + ESMF.StaggerLoc.CENTER) + center_mask[...] = 0 + mask_regridder = ESMF.Regrid( + src_mask_values=MASK_REGRIDDING_MASK_VALUE[regrid_method], + dst_mask_values=np.array([]), + **regridding_arguments) + regr_field = mask_regridder(src_field, dst_field) + dst_mask = regr_field.data[...].T < mask_threshold + center_mask[...] = dst_mask.T + else: + dst_mask = False + field_regridder = ESMF.Regrid(src_mask_values=np.array([1]), + dst_mask_values=np.array([1]), + **regridding_arguments) + + def regridder(src): + """Regrid 2d for irregular grids.""" + res = get_empty_data(dst_rep.shape, src.dtype) + data = src.data + if np.ma.is_masked(data): + data = data.data + src_field.data[...] = data.T + regr_field = field_regridder(src_field, dst_field) + res.data[...] = regr_field.data[...].T + res.mask[...] = dst_mask + return res + + return regridder + + +def build_regridder_3d(src_rep, dst_rep, regrid_method, mask_threshold): + # pylint: disable=too-many-locals + # The necessary refactoring will be done for the full 3d regridding. + """Build regridder for 2.5d regridding.""" + esmf_regridders = [] + no_levels = src_rep.shape[0] + for level in range(no_levels): + esmf_regridders.append( + build_regridder_2d(src_rep[level], dst_rep[level], + regrid_method, mask_threshold) + ) + + def regridder(src): + """Regrid 2.5d for irregular grids.""" + res = get_empty_data(dst_rep.shape, src.dtype) + for i, esmf_regridder in enumerate(esmf_regridders): + res[i, ...] = esmf_regridder(src[i]) + return res + + return regridder + + +def build_regridder(src_rep, dst_rep, method, mask_threshold=.99): + """Build regridders from representants.""" + regrid_method = ESMF_REGRID_METHODS[method] + if src_rep.ndim == 2: + regridder = build_regridder_2d(src_rep, dst_rep, + regrid_method, mask_threshold) + elif src_rep.ndim == 3: + regridder = build_regridder_3d(src_rep, dst_rep, + regrid_method, mask_threshold) + return regridder + + +def get_grid_representant(cube, horizontal_only=False): + """Extract the spatial grid from a cube.""" + horizontal_slice = ['latitude', 'longitude'] + ref_to_slice = horizontal_slice + if not horizontal_only: + try: + cube_z_coord = cube.coord(axis='Z') + n_zdims = len(cube.coord_dims(cube_z_coord)) + if n_zdims == 0: + # scalar z coordinate, go on with 2d regridding + pass + elif n_zdims == 1: + ref_to_slice = [cube_z_coord] + horizontal_slice + else: + raise ValueError("Cube has multidimensional Z coordinate.") + except iris.exceptions.CoordinateNotFoundError: + # no z coordinate, go on with 2d regridding + pass + return get_representant(cube, ref_to_slice) + + +def get_grid_representants(src, dst): + """ + Construct cubes representing the source and destination grid. + + This method constructs two new cubes that representant the grids, + i.e. the spatial dimensions of the given cubes. + + Parameters + ---------- + src: :class:`iris.cube.Cube` + Cube to be regridded. Typically a time series of 2d or 3d slices. + dst: :class:`iris.cube.Cube` + Cube defining the destination grid. Usually just a 2d or 3d cube. + + Returns + ------- + tuple of :class:`iris.cube.Cube`: + A tuple containing two cubes, representing the source grid and the + destination grid, respectively. + """ + src_rep = get_grid_representant(src) + dst_horiz_rep = get_grid_representant(dst, horizontal_only=True) + if src_rep.ndim == 3: + dst_shape = (src_rep.shape[0],) + dim_coords = [src_rep.coord(dimensions=[0], dim_coords=True)] + else: + dst_shape = tuple() + dim_coords = [] + dst_shape += dst_horiz_rep.shape + dim_coords += dst_horiz_rep.coords(dim_coords=True) + dim_coords_and_dims = [(c, i) for i, c in enumerate(dim_coords)] + dst_rep = iris.cube.Cube( + data=get_empty_data(dst_shape, src.dtype), + standard_name=src.standard_name, + long_name=src.long_name, + var_name=src.var_name, + units=src.units, + attributes=src.attributes, + cell_methods=src.cell_methods, + dim_coords_and_dims=dim_coords_and_dims, + ) + return src_rep, dst_rep + + +def regrid(src, dst, method='linear'): + """ + Regrid src_cube to the grid defined by dst_cube. + + Regrid the data in src_cube onto the grid defined by dst_cube. + + Parameters + ---------- + src_cube: :class:`iris.cube.Cube` + Source data. Must have latitude and longitude coords. + These can be 1d or 2d and should have bounds. + dst_cube: :class:`iris.cube.Cube` + Defines the target grid. + regrid_method: + Selects the regridding method. + Can be 'linear', 'area_weighted', + or 'nearest'. See ESMPy_. + + Returns + ------- + :class:`iris.cube.Cube`: + The regridded cube. + + + .. _ESMPy: http://www.earthsystemmodeling.org/ + esmf_releases/non_public/ESMF_7_0_0/esmpy_doc/html/ + RegridMethod.html#ESMF.api.constants.RegridMethod + """ + src_rep, dst_rep = get_grid_representants(src, dst) + regridder = build_regridder(src_rep, dst_rep, method) + res = map_slices(src, regridder, src_rep, dst_rep) + return res diff --git a/esmvaltool/preprocessor/_time.py b/esmvaltool/preprocessor/_time.py new file mode 100644 index 0000000000..d706feebf5 --- /dev/null +++ b/esmvaltool/preprocessor/_time.py @@ -0,0 +1,297 @@ +"""Time operations on cubes. + +Allows for selecting data subsets using certain time bounds; +constructing seasonal and area averages. +""" +import datetime +import logging + +import cf_units +import iris +import iris.coord_categorisation +import numpy as np + +logger = logging.getLogger(__name__) + + +def extract_time(cube, start_year, start_month, start_day, end_year, end_month, + end_day): + """Extract a time range from a cube. + + Parameters + ---------- + cube: iris.cube.Cube + input cube. + start_year: int + start year + start_month: int + start month + start_day: int + start day + end_year: int + end year + end_month: int + end month + end_day: int + end day + + Returns + ------- + iris.cube.Cube + Sliced cube. + + """ + time_units = cube.coord('time').units + if time_units.calendar == '360_day': + if start_day > 30: + start_day = 30 + if end_day > 30: + end_day = 30 + start_date = datetime.datetime( + int(start_year), int(start_month), int(start_day)) + end_date = datetime.datetime(int(end_year), int(end_month), int(end_day)) + + t_1 = time_units.date2num(start_date) + t_2 = time_units.date2num(end_date) + constraint = iris.Constraint( + time=lambda t: t_1 < time_units.date2num(t.point) < t_2) + + cube_slice = cube.extract(constraint) + if cube_slice is None: + start_cube = str(cube.coord('time').points[0]) + end_cube = str(cube.coord('time').points[-1]) + raise ValueError( + f"Time slice {start_date} to {end_date} is outside cube " + f"time bounds {start_cube} to {end_cube}.") + + # Issue when time dimension was removed when only one point as selected. + if cube_slice.ndim != cube.ndim: + time_1 = cube.coord('time') + time_2 = cube_slice.coord('time') + if time_1 == time_2: + logger.debug('No change needed to time.') + return cube + + return cube_slice + + +def extract_season(cube, season): + """ + Slice cube to get only the data belonging to a specific season. + + Parameters + ---------- + cube: iris.cube.Cube + Original data + season: str + Season to extract. Available: DJF, MAM, JJA, SON + """ + if not cube.coords('clim_season'): + iris.coord_categorisation.add_season(cube, 'time', name='clim_season') + if not cube.coords('season_year'): + iris.coord_categorisation.add_season_year( + cube, 'time', name='season_year') + return cube.extract(iris.Constraint(clim_season=season.lower())) + + +def extract_month(cube, month): + """ + Slice cube to get only the data belonging to a specific month. + + Parameters + ---------- + cube: iris.cube.Cube + Original data + month: int + Month to extract as a number from 1 to 12 + """ + if month not in range(1, 13): + raise ValueError('Please provide a month number between 1 and 12.') + return cube.extract(iris.Constraint(month_number=month)) + + +def get_time_weights(cube): + """ + Compute the weighting of the time axis. + + Parameters + ---------- + cube: iris.cube.Cube + input cube. + + Returns + ------- + numpy.array + Array of time weights for averaging. + """ + time = cube.coord('time') + time_thickness = time.bounds[..., 1] - time.bounds[..., 0] + + # The weights need to match the dimensionality of the cube. + slices = [None for i in cube.shape] + coord_dim = cube.coord_dims('time')[0] + slices[coord_dim] = slice(None) + time_thickness = np.abs(time_thickness[tuple(slices)]) + ones = np.ones_like(cube.data) + time_weights = time_thickness * ones + return time_weights + + +def time_average(cube): + """ + Compute time average. + + Get the time average over the entire cube. The average is weighted by the + bounds of the time coordinate. + + Parameters + ---------- + cube: iris.cube.Cube + input cube. + + Returns + ------- + iris.cube.Cube + time averaged cube. + """ + time_weights = get_time_weights(cube) + + return cube.collapsed('time', iris.analysis.MEAN, weights=time_weights) + + +# get the seasonal mean +def seasonal_mean(cube): + """ + Compute seasonal means with MEAN. + + Chunks time in 3-month periods and computes means over them; + + Arguments + --------- + cube: iris.cube.Cube + input cube. + + Returns + ------- + iris.cube.Cube + Seasonal mean cube + """ + if not cube.coords('clim_season'): + iris.coord_categorisation.add_season(cube, 'time', name='clim_season') + if not cube.coords('season_year'): + iris.coord_categorisation.add_season_year( + cube, 'time', name='season_year') + cube = cube.aggregated_by(['clim_season', 'season_year'], + iris.analysis.MEAN) + + # CMOR Units are days so we are safe to operate on days + # Ranging on [90, 92] days makes this calendar-independent + def spans_three_months(time): + """Check for three months""" + return 90 <= (time.bound[1] - time.bound[0]).days <= 92 + + three_months_bound = iris.Constraint(time=spans_three_months) + return cube.extract(three_months_bound) + + +def regrid_time(cube, frequency): + """ + Align time axis for cubes so they can be subtracted. + + Operations on time units, calendars, time points and auxiliary + coordinates so that any cube from cubes can be subtracted from any + other cube from cubes. Currently this function supports only monthly + (frequency=mon) and daily (frequency=day) data time frequencies. + + Arguments + --------- + cube: iris.cube.Cube + frequency: str + data frequency: mon or day + + Returns + ------- + iris.cube.Cube instance + """ + # fix calendars + cube.coord('time').units = cf_units.Unit( + cube.coord('time').units.origin, + calendar='gregorian', + ) + + # standardize time points + time_c = [cell.point for cell in cube.coord('time').cells()] + if frequency == 'mon': + cube.coord('time').cells = [ + datetime.datetime(t.year, t.month, 15, 0, 0, 0) for t in time_c + ] + elif frequency == 'day': + cube.coord('time').cells = [ + datetime.datetime(t.year, t.month, t.day, 0, 0, 0) for t in time_c + ] + # TODO add correct handling of hourly data + # this is a bit more complicated since it can be 3h, 6h etc + cube.coord('time').points = [ + cube.coord('time').units.date2num(cl) + for cl in cube.coord('time').cells + ] + + # uniformize bounds + cube.coord('time').bounds = None + cube.coord('time').guess_bounds() + + # remove aux coords that will differ + reset_aux = ['day_of_month', 'day_of_year'] + for auxcoord in cube.aux_coords: + if auxcoord.long_name in reset_aux: + cube.remove_coord(auxcoord) + + # re-add the converted aux coords + iris.coord_categorisation.add_day_of_month( + cube, cube.coord('time'), name='day_of_month') + iris.coord_categorisation.add_day_of_year( + cube, cube.coord('time'), name='day_of_year') + + return cube + + +def annual_mean(cube, decadal=False): + """ + Compute annual or decadal means. + + Note that this function does not weight the annual or decadal mean if + uneven time periods are present. Ie, all data inside the year/decade + are treated equally. + + Parameters + ---------- + cube: iris.cube.Cube + input cube. + decadal: bool + Annual average (:obj:`True`) or decadal average (:obj:`False`) + Returns + ------- + iris.cube.Cube + Annual mean cube + """ + # time_weights = get_time_weights(cube) + + # TODO: Add weighting in time dimension. See iris issue 3290 + # https://github.com/SciTools/iris/issues/3290 + + if decadal: + if not cube.coords('decade'): + + def get_decade(coord, value): + """Callback function to get decades from cube.""" + date = coord.units.num2date(value) + return date.year - date.year % 10 + + iris.coord_categorisation.add_categorised_coord( + cube, 'decade', 'time', get_decade) + + return cube.aggregated_by('decade', iris.analysis.MEAN) + + if not cube.coords('year'): + iris.coord_categorisation.add_year(cube, 'time') + return cube.aggregated_by('year', iris.analysis.MEAN) diff --git a/esmvaltool/preprocessor/_time_area.py b/esmvaltool/preprocessor/_time_area.py deleted file mode 100644 index acfd34a0c8..0000000000 --- a/esmvaltool/preprocessor/_time_area.py +++ /dev/null @@ -1,220 +0,0 @@ -""" -Time and area operations on data cubes - -Allows for selecting data subsets using certain time bounds; -selecting geographical regions; constructing seasonal and area -averages; checks on data time frequencies (daily, monthly etc) -""" -from datetime import timedelta -import iris -import iris.coord_categorisation -import numpy as np - - -# slice cube over a restricted time period -def time_slice(mycube, yr1, mo1, d1, yr2, mo2, d2): - """ - Slice cube on time - - Function that returns a subset of the original cube (slice) - given two dates of interest date1 and date2 - date1 and date2 should be given in a yr,mo,d (int)format e.g. - time_slice(cube,2006,2,2,2010,1,1) or - time_slice(cube,'2006','2','2','2010','1','1'); - - Returns a cube - """ - import datetime - time_units = mycube.coord('time').units - if time_units.calendar == '360_day': - if d1 > 30: - d1 = 30 - if d2 > 30: - d2 = 30 - my_date1 = datetime.datetime(int(yr1), int(mo1), int(d1)) - my_date2 = datetime.datetime(int(yr2), int(mo2), int(d2)) - - t1 = time_units.date2num(my_date1) - t2 = time_units.date2num(my_date2) - # TODO replace the block below for when using iris 2.0 - # my_constraint = iris.Constraint(time=lambda t: ( - # t1 < time_units.date2num(t.point) < t2)) - my_constraint = iris.Constraint(time=lambda t: ( - t1 < t.point < t2)) - cube_slice = mycube.extract(my_constraint) - return cube_slice - - -def extract_season(cube, season): - """ - Slice cube to get only the data belonging to a specific season - - Parameters - ---------- - cube: iris.cube.Cube - Original data - season: str - Season to extract. Available: DJF, MAM, JJA, SON - """ - iris.coord_categorisation.add_season(cube, 'time', name='clim_season') - season_cube = cube.extract(iris.Constraint(clim_season=season.lower())) - return season_cube - - -def extract_month(mycube, month): - """ - Slice cube to get only the data belonging to a specific month - - Parameters - ---------- - cube: iris.cube.Cube - Original data - month: int - Month to extract as a number from 1 to 12 - """ - season_cube = mycube.extract(iris.Constraint(month_number=month)) - return season_cube - - -# get the time average -def time_average(cube): - """ - Compute time average - - Get the time average over the entire cube. The average is weighted by the - bounds of the time coordinate. - - Arguments - --------- - cube: iris.cube.Cube - input cube. - - Returns - ------- - iris.cube.Cube - time averaged cube. - """ - time = cube.coord('time') - time_thickness = time.bounds[..., 1] - time.bounds[..., 0] - - # The weights need to match the dimensionality of the cube. - slices = [None for i in cube.shape] - coord_dim = cube.coord_dims('time')[0] - slices[coord_dim] = slice(None) - time_thickness = np.abs(time_thickness[tuple(slices)]) - ones = np.ones_like(cube.data) - time_weights = time_thickness * ones - - return cube.collapsed('time', iris.analysis.MEAN, - weights=time_weights) - - -# get the probability a value is greater than a threshold -def proportion_greater(mycube, coord1, threshold): - """ - Proportion greater - - Return the probability that a cetain variable coord1 (string) - is greater than a threshold threshold (float or string), - across a cube mycube; returns a cube - """ - thr = float(threshold) - result = mycube.collapsed( - coord1, iris.analysis.PROPORTION, function=lambda values: values > thr) - return result - - -# get the seasonal mean -def seasonal_mean(cube): - """ - Function to compute seasonal means with MEAN - - Chunks time in 3-month periods and computes means over them; - - Arguments - --------- - cube: iris.cube.Cube - input cube. - - Returns - ------- - iris.cube.Cube - Seasonal mean cube - """ - iris.coord_categorisation.add_season(cube, 'time', name='clim_season') - iris.coord_categorisation.add_season_year( - cube, 'time', name='season_year') - annual_seasonal_mean = cube.aggregated_by(['clim_season', 'season_year'], - iris.analysis.MEAN) - - def spans_three_months(time): - """Check for three months""" - return (time.bound[1] - time.bound[0]) == 2160 - - three_months_bound = iris.Constraint(time=spans_three_months) - return annual_seasonal_mean.extract(three_months_bound) - - -# set of time axis checks -# funcs that perform checks on the time axis -# of data cubes and validates the type of data: -# daily, monthly, seasonal or yearly -class NoBoundsError(ValueError): - """OBS files dont have bounds""" - - pass - - -def is_daily(cube): - """Test whether the time coordinate contains only daily bound periods.""" - def is_day(bound): - """Count days""" - time_span = timedelta(days=(bound[1] - bound[0])) - return timedelta(days=1) == time_span - - if not cube.coord('time').has_bounds(): - raise NoBoundsError() - return all([is_day(bound) for bound in cube.coord('time').bounds]) - - -def is_monthly(cube): - """A month is a period of at least 28 days, up to 31 days.""" - def is_month(bound): - """Count months""" - time_span = timedelta(days=(bound[1] - bound[0])) - return timedelta(days=31) >= time_span >= timedelta(days=28) - - if not cube.coord('time').has_bounds(): - raise NoBoundsError() - return all([is_month(bound) for bound in cube.coord('time').bounds]) - - -def is_seasonal(cube): - """ - Check if data is seasonal - - A season is a period of 3 months, i.e. - at least 89 days, and up to 92 days. - """ - def is_season(bound): - """Count seasons""" - time_span = timedelta(days=(bound[1] - bound[0])) - is_seas = timedelta(days=31 + 30 + 31) >= time_span >= \ - timedelta(days=28 + 31 + 30) - return is_seas - - if not cube.coord('time').has_bounds(): - raise NoBoundsError() - return all([is_season(bound) for bound in cube.coord('time').bounds]) - - -def is_yearly(cube): - """A year is a period of at least 360 days, up to 366 days.""" - def is_year(bound): - """Count years""" - t_s = timedelta(days=(bound[1] - bound[0])) - return timedelta(days=365) == t_s or timedelta(days=360) == t_s - - if not cube.coord('time').has_bounds(): - raise NoBoundsError() - return all([is_year(bound) for bound in cube.coord('time').bounds]) diff --git a/esmvaltool/preprocessor/_volume.py b/esmvaltool/preprocessor/_volume.py new file mode 100644 index 0000000000..366450e37d --- /dev/null +++ b/esmvaltool/preprocessor/_volume.py @@ -0,0 +1,456 @@ +""" +Volume and z coordinate operations on data cubes. + +Allows for selecting data subsets using certain volume bounds; +selecting depth or height regions; constructing volumetric averages; +""" +from copy import deepcopy + +import logging + +import iris +import numpy as np + +logger = logging.getLogger(__name__) + + +def extract_volume(cube, z_min, z_max): + """ + Subset a cube based on a range of values in the z-coordinate. + + Function that subsets a cube on a box (z_min, z_max) + This function is a restriction of masked_cube_lonlat(); + Note that this requires the requested z-coordinate range to be the + same sign as the iris cube. ie, if the cube has z-coordinate as + negative, then z_min and z_max need to be negative numbers. + + Arguments + --------- + cube: iris.cube.Cube + input cube. + + z_min: float + minimum depth to extract. + + z_max: float + maximum depth to extract. + + Returns + ------- + iris.cube.Cube + extracted cube. + """ + if z_min > z_max: + # minimum is below maximum, so switch them around + zmax = float(z_min) + zmin = float(z_max) + else: + zmax = float(z_max) + zmin = float(z_min) + + z_constraint = iris.Constraint( + coord_values={ + cube.coord(axis='Z'): lambda cell: zmin < cell.point < zmax}) + + return cube.extract(z_constraint) + + +def _create_cube_time(src_cube, data, times): + """ + Generate a new cube with the volume averaged data. + + The resultant cube is seeded with `src_cube` metadata and coordinates, + excluding any source coordinates that span the associated vertical + dimension. The `times` of interpolation are used along with the + associated source cube time coordinate metadata to add a new + time coordinate to the resultant cube. + + Based on the _create_cube method from _regrid.py. + + Parameters + ---------- + src_cube : cube + The source cube that was vertically interpolated. + data : array + The payload resulting from interpolating the source cube + over the specified times. + times : array + The array of times. + + Returns + ------- + cube + + .. note:: + + If there is only one level of interpolation, the resultant cube + will be collapsed over the associated vertical dimension, and a + scalar vertical coordinate will be added. + + """ + # Get the source cube vertical coordinate and associated dimension. + src_times = src_cube.coord('time') + t_dim, = src_cube.coord_dims(src_times) + + if data.shape[t_dim] != len(times): + emsg = ('Mismatch between data and times for data dimension {!r}, ' + 'got data shape {!r} with times shape {!r}.') + raise ValueError(emsg.format(t_dim, data.shape, times.shape)) + + # Construct the resultant cube with the interpolated data + # and the source cube metadata. + kwargs = deepcopy(src_cube.metadata)._asdict() + result = iris.cube.Cube(data, **kwargs) + + # Add the appropriate coordinates to the cube, excluding + # any coordinates that span the z-dimension of interpolation. + for coord in src_cube.dim_coords: + [dim] = src_cube.coord_dims(coord) + if dim != t_dim: + result.add_dim_coord(coord.copy(), dim) + + for coord in src_cube.aux_coords: + dims = src_cube.coord_dims(coord) + if t_dim not in dims: + result.add_aux_coord(coord.copy(), dims) + + for coord in src_cube.derived_coords: + dims = src_cube.coord_dims(coord) + if t_dim not in dims: + result.add_aux_coord(coord.copy(), dims) + + # Construct the new vertical coordinate for the interpolated + # z-dimension, using the associated source coordinate metadata. + kwargs = deepcopy(src_times._as_defn())._asdict() + + try: + coord = iris.coords.DimCoord(times, **kwargs) + result.add_dim_coord(coord, t_dim) + except ValueError: + coord = iris.coords.AuxCoord(times, **kwargs) + result.add_aux_coord(coord, t_dim) + + return result + + +def calculate_volume(cube): + """ + Calculate volume from a cube. + + This function is used when the volume netcdf fx_files can't be found. + """ + # #### + # Load depth field and figure out which dim is which. + depth = cube.coord(axis='z') + z_dim = cube.coord_dims(cube.coord(axis='z'))[0] + + # #### + # Load z direction thickness + thickness = depth.bounds[..., 1] - depth.bounds[..., 0] + + # #### + # Calculate grid volume: + area = iris.analysis.cartography.area_weights(cube) + if thickness.ndim == 1 and z_dim == 1: + grid_volume = area * thickness[None, :, None, None] + if thickness.ndim == 4 and z_dim == 1: + grid_volume = area * thickness[:, :] + + return grid_volume + + +def average_volume( + cube, + coord1, + coord2, + fx_files=None): + """ + Calculate the average volume. + + The volume average is weighted acoording to the cell volume. Cell volume + is calculated from iris's cartography tool multiplied by the cell + thickness. + + Arguments + --------- + cube: iris.cube.Cube + input cube. + + coord1: str + name of first coordinate + + coord2: str + name of second coordinate + + fx_files: dictionary + dictionary of field:filename for the fx_files + + Returns + ------- + iris.cube.Cube + collapsed cube. + """ + # TODO: Test sigma coordinates. + + # #### + # Load z coordinate field and figure out which dim is which. + t_dim = cube.coord_dims('time')[0] + + grid_volume_found = False + grid_volume = None + if fx_files: + for key, fx_file in fx_files.items(): + if fx_file is None: + continue + logger.info('Attempting to load %s from file: %s', key, fx_file) + fx_cube = iris.load_cube(fx_file) + + grid_volume = fx_cube.data + grid_volume_found = True + cube_shape = cube.data.shape + + if not grid_volume_found: + grid_volume = calculate_volume(cube) + + # Check whether the dimensions are right. + if cube.data.ndim == 4 and grid_volume.ndim == 3: + grid_volume = np.tile(grid_volume, + [cube_shape[0], 1, 1, 1]) + + if cube.data.shape != grid_volume.shape: + raise ValueError('Cube shape ({}) doesn`t match grid volume shape ' + '({})'.format(cube.data.shape, grid_volume.shape)) + + # ##### + # Calculate global volume weighted average + result = [] + # ##### + # iterate over time and z-coordinate dimensions. + for time_itr in range(cube.shape[t_dim]): + # #### + # create empty output arrays + column = [] + depth_volume = [] + + # #### + # iterate over time and z-coordinate dimensions. + for z_itr in range(cube.shape[1]): + # #### + # Calculate weighted mean for this time and layer + total = cube[time_itr, z_itr].collapsed( + [cube.coord(axis='z'), coord1, coord2], + iris.analysis.MEAN, + weights=grid_volume[time_itr, z_itr]).data + column.append(total) + + try: + layer_vol = np.ma.masked_where( + cube[time_itr, z_itr].data.mask, + grid_volume[time_itr, z_itr]).sum() + + except AttributeError: + # #### + # No mask in the cube data. + layer_vol = grid_volume.sum() + depth_volume.append(layer_vol) + # #### + # Calculate weighted mean over the water volumn + result.append(np.average(column, weights=depth_volume)) + + # #### + # Send time series and dummy cube to cube creating tool. + times = np.array(cube.coord('time').points.astype(float)) + result = np.array(result) + + # ##### + # Create a small dummy output array for the output cube + src_cube = cube[:2, :2].collapsed([cube.coord(axis='z'), + coord1, coord2], + iris.analysis.MEAN, + weights=grid_volume[:2, :2], ) + + return _create_cube_time(src_cube, result, times) + + +def depth_integration(cube): + """ + Determine the total sum over the vertical component. + + Requires a 3D cube. The z-coordinate + integration is calculated by taking the sum in the z direction of the + cell contents multiplied by the cell thickness. + + Arguments + --------- + cube: iris.cube.Cube + input cube. + + Returns + ------- + iris.cube.Cube + collapsed cube. + """ + # #### + depth = cube.coord(axis='z') + thickness = depth.bounds[..., 1] - depth.bounds[..., 0] + + if depth.ndim == 1: + slices = [None for i in cube.shape] + coord_dim = cube.coord_dims(cube.coord(axis='z'))[0] + slices[coord_dim] = slice(None) + thickness = np.abs(thickness[tuple(slices)]) + + ones = np.ones_like(cube.data) + + weights = thickness * ones + + result = cube.collapsed(cube.coord(axis='z'), iris.analysis.SUM, + weights=weights) + + result.rename('Depth_integrated_' + str(cube.name())) + # result.units = Unit('m') * result.units # This doesn't work: + # TODO: Change units on cube to reflect 2D concentration (not 3D) + # Waiting for news from iris community. + return result + + +def extract_transect(cube, latitude=None, longitude=None): + """ + Extract data along a line of constant latitude or longitude. + + Both arguments, latitude and longitude, are treated identically. + Either argument can be a single float, or a pair of floats, or can be + left empty. + The single float indicates the latitude or longitude along which the + transect should be extracted. + A pair of floats indicate the range that the transect should be + extracted along the secondairy axis. + + For instance `'extract_transect(cube, longitude=-28)'` will produce a + transect along 28 West. + + Also, `'extract_transect(cube, longitude=-28, latitude=[-50, 50])'` will + produce a transect along 28 West between 50 south and 50 North. + + This function is not yet implemented for irregular arrays - instead + try the extract_trajectory function, but note that it is currently + very slow. Alternatively, use the regrid preprocessor to regrid along + a regular grid and then extract the transect. + + Arguments + --------- + cube: iris.cube.Cube + input cube. + + latitude: None, float or [float, float], optional + transect latiude or range. + + longitude: None, float or [float, float], optional + transect longitude or range. + + Returns + ------- + iris.cube.Cube + collapsed cube. + """ + # ### + coord_dim2 = False + second_coord_range = False + lats = cube.coord('latitude') + lons = cube.coord('longitude') + + if lats.ndim == 2: + raise ValueError( + 'extract_slice: Not implemented for irregular arrays!' + + '\nTry regridding the data first.') + + if isinstance(latitude, float) and isinstance(longitude, float): + raise ValueError( + 'extract_slice: Cant slice along lat and lon at the same time' + ) + + if isinstance(latitude, list) and isinstance(longitude, list): + raise ValueError( + 'extract_slice: Can\'t reduce lat and lon at the same time' + ) + + for dim_name, dim_cut, coord in zip(['latitude', 'longitude'], + [latitude, longitude], [lats, lons]): + # #### + # Look for the first coordinate. + if isinstance(dim_cut, float): + coord_index = coord.nearest_neighbour_index(dim_cut) + coord_dim = cube.coord_dims(dim_name)[0] + + # #### + # Look for the second coordinate. + if isinstance(dim_cut, list): + coord_dim2 = cube.coord_dims(dim_name)[0] + second_coord_range = [coord.nearest_neighbour_index(dim_cut[0]), + coord.nearest_neighbour_index(dim_cut[1])] + # #### + # Extracting the line of constant longitude/latitude + slices = [slice(None) for i in cube.shape] + slices[coord_dim] = coord_index + + if second_coord_range: + slices[coord_dim2] = slice(second_coord_range[0], + second_coord_range[1]) + return cube[tuple(slices)] + + +def extract_trajectory(cube, latitudes, longitudes, number_points=2): + """ + Extract data along a trajectory. + + latitudes and longitudes are the pairs of coordinates for two points. + number_points is the number of points between the two points. + + This version uses the expensive interpolate method, but it may be + necceasiry for irregular grids. + + If only two latitude and longitude coordinates are given, + extract_trajectory will produce a cube will extrapolate along a line + bewteen those two points, and will add `number_points` points between + the two corners. + + If more than two points are provided, then + extract_trajectory will produce a cube which has extrapolated the data + of the cube to those points, and `number_points` is not needed. + + Arguments + --------- + cube: iris.cube.Cube + input cube. + + latitudes: list of floats + list of latitude coordinates. + + longitudes: list of floats + list of longitude coordinates. + + number_points: int + number of points to extrapolate (optional). + + Returns + ------- + iris.cube.Cube + collapsed cube. + """ + from iris.analysis.trajectory import interpolate + + if len(latitudes) != len(longitudes): + raise ValueError( + 'Longitude & Latitude coordinates have different lengths' + ) + + if len(latitudes) == len(longitudes) == 2: + minlat, maxlat = np.min(latitudes), np.max(latitudes) + minlon, maxlon = np.min(longitudes), np.max(longitudes) + + longitudes = np.linspace(minlat, maxlat, num=number_points) + latitudes = np.linspace(minlon, maxlon, num=number_points) + + points = [('latitude', latitudes), ('longitude', longitudes)] + interpolated_cube = interpolate(cube, points) # Very slow! + return interpolated_cube diff --git a/esmvaltool/preprocessor/_volume_pp.py b/esmvaltool/preprocessor/_volume_pp.py deleted file mode 100644 index bc08837b44..0000000000 --- a/esmvaltool/preprocessor/_volume_pp.py +++ /dev/null @@ -1,288 +0,0 @@ -""" -Volume and z coordinate operations on data cubes. - -Allows for selecting data subsets using certain volume bounds; -selecting depth or height regions; constructing volumetric averages; -""" -import iris -import numpy as np - - -# slice cube over a restricted area (box) -def volume_slice(cube, z_min, z_max): - """ - Subset a cube on volume - - Function that subsets a cube on a box (z_min,z_max) - This function is a restriction of masked_cube_lonlat(); - Note that this requires the requested depth range to be the same sign - as the iris cube. ie, if the cube has depth as negative, then z_min - and z_max need to be negative numbers. - - Arguments - --------- - cube: iris.cube.Cube - input cube. - - z_min: float - minimum depth to extract. - - z_max: float - maximum depth to extract. - - Returns - ------- - iris.cube.Cube - extracted cube. - """ - if z_min > z_max: - # minimum is below maximum, so switch them around - zmax = z_min - zmin = z_max - else: - zmax = z_max - zmin = z_min - - subz = iris.Constraint( - depth=lambda cell: float(zmin) <= cell <= float(zmax)) - - region_subset = cube.extract(subz) - return region_subset - - -def volume_average(cube, coordz, coord1, coord2): - """ - Determine the volume average. - - The volume average is weighted acoording to the cell volume. Cell volume - is calculated from iris's cartography tool multiplied by the cell - thickness. - - Arguments - --------- - cube: iris.cube.Cube - input cube. - - coordz: str - name of depth coordinate - - coord1: str - name of first coordinate - - coord2: str - name of second coordinate - - Returns - ------- - iris.cube.Cube - collapsed cube. - """ - # CMOR ised data should already have bounds? - # cube.coord(coord1).guess_bounds() - # cube.coord(coord2).guess_bounds() - depth = cube.coord(coordz) - thickness = depth.bounds[..., 1] - depth.bounds[..., 0] - - area = iris.analysis.cartography.area_weights(cube) - - if depth.ndim == 1: - slices = [None for i in cube.shape] - coord_dim = cube.coord_dims(coordz)[0] - slices[coord_dim] = slice(None) - thickness = np.abs(thickness[tuple(slices)]) - - grid_volume = area * thickness - - result = cube.collapsed( - [coordz, coord1, coord2], iris.analysis.MEAN, weights=grid_volume) - - return result - - -# get the depth integration -def depth_integration(cube, coordz): - """ - Determine the total sum over the vertical component. - - Requires a 3D cube, and the name of the z coordinate. The depth - integration is calculated by taking the sum in the z direction - of the cell contents multiplied by the cell thickness. - - Arguments - --------- - cube: iris.cube.Cube - input cube. - - coordz: str - name of depth coordinate - - Returns - ------- - iris.cube.Cube - collapsed cube. - """ - #### - depth = cube.coord(coordz) - thickness = depth.bounds[..., 1] - depth.bounds[..., 0] - - if depth.ndim == 1: - slices = [None for i in cube.shape] - coord_dim = cube.coord_dims(coordz)[0] - slices[coord_dim] = slice(None) - thickness = np.abs(thickness[tuple(slices)]) - - ones = np.ones_like(cube.data) - - weights = thickness * ones - - result = cube.collapsed(coordz, iris.analysis.SUM, - weights=weights) - - result.rename('Depth_integrated_' + str(cube.name())) - # result.units = Unit('m') * result.units # This doesn't work: - # TODO: Change units on cube to reflect 2D concentration (not 3D) - # Waiting for news from iris community. - return result - - -def extract_transect(cube, latitude=None, longitude=None): - """ - Extract data along a line of constant latitude or longitude. - - Both arguments, latitude and longitude, are treated identically. - Either argument can be a single float, or a pair of floats, or can be - left empty. - The single float indicates the latitude or longitude along which the - transect should be extracted. - A pair of floats indicate the range that the transect should be - extracted along the secondairy axis. - - ie: - extract_transect(cube, longitude=-28) - will produce a transect along 28 West. - - extract_transect(cube, longitude=-28, latitude=[-50,50]) - will produce a transect along 28 West between 50 south and 50 North. - - This function is not yet implemented for irregular arrays - instead - try the extract_trajectory function, but note that it is currently - very slow. Alternatively, use the regrid preprocessor to regrid along - a regular grid and then extract the transect. - - Arguments - --------- - cube: iris.cube.Cube - input cube. - - latitude: None, float or [float, float], optional - transect latiude or range. - - longitude: None, float or [float, float], optional - transect longitude or range. - - Returns - ------- - iris.cube.Cube - collapsed cube. - """ - #### - coord_dim2 = False - second_coord_range = False - lats = cube.coord('latitude') - lons = cube.coord('longitude') - - if lats.ndim == 2: - raise ValueError( - 'extract_slice: Not implemented for irregular arrays!') - - if isinstance(latitude, float) and isinstance(longitude, float): - raise ValueError( - 'extract_slice: Cant slice along lat and lon at the same time' - ) - - if isinstance(latitude, list) and isinstance(longitude, list): - raise ValueError( - 'extract_slice: Can\'t reduce lat and lon at the same time' - ) - - for dim_name, dim_cut, coord in zip(['latitude', 'longitude'], - [latitude, longitude], [lats, lons]): - ##### - # Look for the first coordinate. - if isinstance(dim_cut, float): - coord_index = lats.nearest_neighbour_index(dim_cut) - coord_dim = cube.coord_dims(dim_name)[0] - - ##### - # Look for the second coordinate. - if isinstance(dim_cut, list): - coord_dim2 = cube.coord_dims(dim_name)[0] - second_coord_range = [coord.nearest_neighbour_index(dim_cut[0]), - coord.nearest_neighbour_index(dim_cut[1])] - ##### - # Extracting the line of constant longitude/latitude - slices = [slice(None) for i in cube.shape] - slices[coord_dim] = coord_index - - if second_coord_range: - slices[coord_dim2] = slice(second_coord_range[0], - second_coord_range[1]) - return cube[tuple(slices)] - - -# extract along a trajectory -def extract_trajectory(cube, latitudes, longitudes, number_points=2): - """ - Extract data along a trajectory. - - latitudes and longitudes are the pairs of coordinates for two points. - number_points is the number of points between the two points. - - This version uses the expensive interpolate method, but it may be - necceasiry for irregular grids. - - If only two latitude and longitude coordinates are given, - extract_trajectory will produce a cube will extrapolate along a line - bewteen those two points, and will add `number_points` points between - the two corners. - - If more than two points are provided, then - extract_trajectory will produce a cube which has extrapolated the data - of the cube to those points, and `number_points` is not needed. - - Arguments - --------- - cube: iris.cube.Cube - input cube. - - latitudes: list of floats - list of latitude coordinates. - - longitudes: list of floats - list of longitude coordinates. - - number_points: int - number of points to extrapolate (optional). - - Returns - ------- - iris.cube.Cube - collapsed cube. - """ - from iris.analysis.trajectory import interpolate - - if len(latitudes) != len(longitudes): - raise ValueError( - 'Longitude & Latitude coordinates have different lengths' - ) - - if len(latitudes) == len(longitudes) == 2: - minlat, maxlat = np.min(latitudes), np.max(latitudes) - minlon, maxlon = np.min(longitudes), np.max(longitudes) - - longitudes = np.linspace(minlat, maxlat, num=number_points) - latitudes = np.linspace(minlon, maxlon, num=number_points) - - points = [('latitude', latitudes), ('longitude', longitudes)] - interpolated_cube = interpolate(cube, points) # Very slow! - return interpolated_cube diff --git a/esmvaltool/recipe_schema.yml b/esmvaltool/recipe_schema.yml index 292053f3ba..192d8f6632 100644 --- a/esmvaltool/recipe_schema.yml +++ b/esmvaltool/recipe_schema.yml @@ -3,19 +3,26 @@ --- # Recipe schema +documentation: include('documentation') datasets: list(include('dataset'), required=False) preprocessors: map(map(), required=False) diagnostics: map(include('diagnostic'), required=False) --- # Recipe item definitions +documentation: + description: str() + authors: list(str(), min=1) + projects: list(str(), required=False) + references: list(str(), required=False) + dataset: dataset: str() project: str(required=False) - start_year: int(min=0000, max=4000) - end_year: int(min=0000, max=4000) + start_year: int(required=False, min=0000, max=5000) + end_year: int(required=False, min=0000, max=5000) ensemble: str(required=False) - exp: str(required=False) + exp: any(str(), list(str()), required=False) mip: str(required=False) realm: str(required=False) shift: str(required=False) @@ -23,11 +30,15 @@ dataset: type: str(required=False) variable: - field: str() + project: str(required=False) + start_year: int(required=False, min=0000, max=4000) + end_year: int(required=False, min=0000, max=4000) + ensemble: str(required=False) + exp: any(str(), list(str()), required=False) + mip: str(required=False) preprocessor: str(required=False) reference_dataset: str(required=False) alternative_dataset: str(required=False) - mip: str(required=False) fx_files: list(required=False) additional_datasets: list(include('dataset'), required=False) @@ -37,7 +48,9 @@ diagnostic: scripts: any(null(), map(include('script'))) additional_datasets: list(include('dataset'), required=False) description: str(required=False) - variables: map(include('variable'), required=False) + themes: list(str(), required=False) + realms: list(str(), required=False) + variables: map(include('variable'), null(), required=False) script: script: str() diff --git a/esmvaltool/recipes/examples/recipe_TestMaskThreshold.yml b/esmvaltool/recipes/examples/recipe_TestMaskThreshold.yml deleted file mode 100644 index c206e74a70..0000000000 --- a/esmvaltool/recipes/examples/recipe_TestMaskThreshold.yml +++ /dev/null @@ -1,144 +0,0 @@ -############################################################################### -# recipe_OceanPhysics.yml ---- -documentation: - description: | - Recipe to demonstrate several simple plots based on the monthly ocean - temperature. Please use this file as a template for adding additional - fields into the ocean. This work based on the BGC-val toolkit GMD-2018-103. - Written by Lee de Mora, Plymouth Marine Laboratory - - authors: - - ledm - - references: - - BGC-val:gmd-2018-103 - - projects: - - ukesm - - -datasets: - # working datasets - - {dataset: CanESM2, project: CMIP5, mip: Omon, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} - - {dataset: GISS-E2-H, project: CMIP5, mip: Omon, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} - - {dataset: HadGEM2-AO, project: CMIP5, mip: Omon, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} - - {dataset: HadCM3, project: CMIP5, mip: Omon, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} - - {dataset: CanCM4, project: CMIP5, mip: Omon, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} - - {dataset: CSIRO-Mk3-6-0, project: CMIP5, mip: Omon, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} - - # Problem with latitude > 90. - - {dataset: HadGEM2-CC, project: CMIP5, mip: Omon, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} - - {dataset: HadGEM2-ES, project: CMIP5, mip: Omon, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} - - ##### - # Data with Errors right now -June 2018 - ### - # Problem with times -# - {dataset: MIROC-ESM, project: CMIP5, mip: Omon, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} -# - {dataset: MIROC-ESM-CHEM, project: CMIP5, mip: Omon, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} - - # Unstructured grids -# - {dataset: MPI-ESM-LR, project: CMIP5, mip: Omon, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} -# - {dataset: MPI-ESM-MR, project: CMIP5, mip: Omon, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} -# - {dataset: ACCESS1-0, project: CMIP5, mip: Omon, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} -# - {dataset: ACCESS1-3, project: CMIP5, mip: Omon, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} - -# TODO: All multi-model statistics are turned off until the preprocessor order is fixed. - -preprocessors: - # -------------------------------------------------- - # Time series preprocessors - # -------------------------------------------------- - prep_timeseries_1: # For 2D fields - custom_order: true - mask_above_threshold: - threshold: 280. - average_region: - coord1: longitude - coord2: latitude - multi_model_statistics: - span: overlap - statistics: [mean ] - - prep_timeseries_2: # For 2D fields - custom_order: true - mask_below_threshold: - threshold: 280. - average_region: - coord1: longitude - coord2: latitude - multi_model_statistics: - span: overlap - statistics: [mean ] - - prep_timeseries_3: # For 2D fields - custom_order: true - mask_inside_range: - minimum: 280. - maximum: 285. - average_region: - coord1: longitude - coord2: latitude - multi_model_statistics: - span: overlap - statistics: [mean ] - - prep_timeseries_4: # For 2D fields - custom_order: true - mask_outside_range: - minimum: 280. - maximum: 285. - average_region: - coord1: longitude - coord2: latitude - multi_model_statistics: - span: overlap - statistics: [mean ] - - - -diagnostics: - # -------------------------------------------------- - # Time series diagnostics - # -------------------------------------------------- - diag_timeseries_1: - description: Global Ocean Surface mean timeseries - variables: - tos: # Temperature ocean surface - preprocessor: prep_timeseries_1 - field: TO2Ms - scripts: - Global_Ocean_Surface_mean_timeseries: &Global_Ocean_Surface_mean_timeseries1 - script: ocean/diagnostic_timeseries.py - - diag_timeseries_2: - description: Global Ocean Surface mean timeseries - variables: - tos: # Temperature ocean surface - preprocessor: prep_timeseries_2 - field: TO2Ms - scripts: - Global_Ocean_Surface_mean_timeseries: &Global_Ocean_Surface_mean_timeseries2 - script: ocean/diagnostic_timeseries.py - - diag_timeseries_3: - description: Global Ocean Surface mean timeseries - variables: - tos: # Temperature ocean surface - preprocessor: prep_timeseries_3 - field: TO2Ms - scripts: - Global_Ocean_Surface_mean_timeseries: &Global_Ocean_Surface_mean_timeseries3 - script: ocean/diagnostic_timeseries.py - - diag_timeseries_4: - description: Global Ocean Surface mean timeseries - variables: - tos: # Temperature ocean surface - preprocessor: prep_timeseries_4 - field: TO2Ms - scripts: - Global_Ocean_Surface_mean_timeseries: &Global_Ocean_Surface_mean_timeseries4 - script: ocean/diagnostic_timeseries.py - diff --git a/esmvaltool/recipes/examples/recipe_check_obs.yml b/esmvaltool/recipes/examples/recipe_check_obs.yml new file mode 100644 index 0000000000..3249c01b08 --- /dev/null +++ b/esmvaltool/recipes/examples/recipe_check_obs.yml @@ -0,0 +1,649 @@ +# ESMValTool +# recipe_check_obs.yml +--- +documentation: + description: | + Test recipe for OBS, no proprocessor or diagnostics are applied, + just to check correct reading of the CMORized data. + + authors: + - righ_ma + +preprocessors: + nopp: + extract_levels: false + regrid: false + mask_fillvalues: false + multi_model_statistics: false + +diagnostics: + + ### TIER 2 ################################################################## + + CRU: + description: CRU + variables: + tas: + mip: Amon + pr: + mip: Amon + additional_datasets: + - {dataset: CRU, project: OBS, tier: 2, type: reanaly, version: TS4.02, start_year: 1901, end_year: 2017} + scripts: null + + + ESACCI-AEROSOL: + description: ESACCI-AEROSOL + variables: + abs550aer: + preproc: nopp + mip: aero + od550aer: + preproc: nopp + mip: aero + od550aerStderr: + preproc: nopp + mip: aero + od550lt1aer: + preproc: nopp + mip: aero + od870aer: + preproc: nopp + mip: aero + od870aerStderr: + preproc: nopp + mip: aero + additional_datasets: + - {dataset: ESACCI-AEROSOL, project: OBS, tier: 2, type: sat, version: SU-v4.21, start_year: 1997, end_year: 2011} + scripts: null + + + ESACCI-CLOUD: + description: ESACCI-CLOUD + variables: + clivi: + preproc: nopp + mip: Amon + clt: + preproc: nopp + mip: Amon + cltStderr: + preproc: nopp + mip: Amon + clwvi: + preproc: nopp + mip: Amon + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, tier: 2, type: sat, version: AVHRR-fv3.0, start_year: 1982, end_year: 2016} + scripts: null + + + ESACCI-FIRE: + description: ESACCI-FIRE + variables: + burntArea: + preproc: nopp + mip: Lmon + additional_datasets: + - {dataset: ESACCI-FIRE, project: OBS, tier: 2, type: sat, version: L4-BA-MERIS-fv4.1, start_year: 2005, end_year: 2011} + scripts: null + + + ESACCI-LANDCOVER: + description: ESACCI-LANDCOVER + variables: + baresoilFrac: + preproc: nopp + mip: Lmon + cropFrac: + preproc: nopp + mip: Lmon + grassFrac: + preproc: nopp + mip: Lmon + shrubFrac: + preproc: nopp + mip: Lmon + treeFrac: + preproc: nopp + mip: Lmon + additional_datasets: + - {dataset: ESACCI-LANDCOVER, project: OBS, tier: 2, type: sat, version: L4-LCCS-Map-300m-P5Y-aggregated-0.500000Deg, start_year: 1998, end_year: 2012} + scripts: null + + + ESACCI-OC: + description: ESACCI-OC + variables: + chl: + preproc: nopp + mip: Omon + additional_datasets: + - {dataset: ESACCI-OC, project: OBS, tier: 2, type: sat, version: fv3.1, start_year: 1997, end_year: 2016} + scripts: null + + + ESACCI-OZONE: + description: ESACCI-OZONE + variables: + toz: + preproc: nopp + mip: Amon + additional_datasets: + - {dataset: ESACCI-OZONE, project: OBS, tier: 2, type: sat, version: L3, start_year: 1997, end_year: 2010} + tozStderr: + preproc: nopp + mip: Amon + additional_datasets: + - {dataset: ESACCI-OZONE, project: OBS, tier: 2, type: sat, version: L3, start_year: 1997, end_year: 2010} + tro3prof: + preproc: nopp + mip: Amon + additional_datasets: + - {dataset: ESACCI-OZONE, project: OBS, tier: 2, type: sat, version: L3, start_year: 2007, end_year: 2008} + tro3profStderr: + preproc: nopp + mip: Amon + additional_datasets: + - {dataset: ESACCI-OZONE, project: OBS, tier: 2, type: sat, version: L3, start_year: 2007, end_year: 2008} + scripts: null + + + ESACCI-SOILMOISTURE: + description: ESACCI-SOILMOISTURE + variables: + dos: + preproc: nopp + mip: Lmon + dosStderr: + preproc: nopp + mip: Lmon + sm: + preproc: nopp + mip: Lmon + smStderr: + preproc: nopp + mip: Lmon + additional_datasets: + - {dataset: ESACCI-SOILMOISTURE, project: OBS, tier: 2, type: sat, version: L3S-SSMV-COMBINED-v4.2, start_year: 2005, end_year: 2011} + scripts: null + + + ESACCI-SST: + description: ESACCI-SST + variables: + ts: + preproc: nopp + mip: Amon + tsStderr: + preproc: nopp + mip: Amon + additional_datasets: + - {dataset: ESACCI-SST, project: OBS, tier: 2, type: sat, version: L4-GHRSST-SSTdepth-OSTIA-GLOB, start_year: 1992, end_year: 2010} + scripts: null + + + GHCN: + description: GHCN + variables: + pr: + preproc: nopp + mip: Amon + additional_datasets: + - {dataset: GHCN, project: OBS, tier: 2, type: ground, version: 1, start_year: 1900, end_year: 2014} + scripts: null + + + HadCRUT3: + description: HadCRUT3 + variables: + tasa: + preproc: nopp + mip: Amon + additional_datasets: + - {dataset: HadCRUT3, project: OBS, tier: 2, type: ground, version: 1, start_year: 1850, end_year: 2013} + scripts: null + + + HadCRUT4: + description: HadCRUT4 + variables: + tas: + preproc: nopp + mip: Amon + tasa: + preproc: nopp + mip: Amon + additional_datasets: + - {dataset: HadCRUT4, project: OBS, tier: 2, type: ground, version: 1, start_year: 1850, end_year: 2018} + scripts: null + + + HadISST: + description: HadISST + variables: + ts: + preproc: nopp + mip: Amon + tos: + preproc: nopp + mip: Omon + sic: + preproc: nopp + mip: OImon + additional_datasets: + - {dataset: HadISST, project: OBS, tier: 2, type: reanaly, version: 1, start_year: 1870, end_year: 2017} + scripts: null + + + Landschuetzer2016: + description: Landschuetzer2016 + variables: + dpco2: + preproc: nopp + mip: Omon + fgco2: + preproc: nopp + mip: Omon + spco2: + preproc: nopp + mip: Omon + additional_datasets: + - {dataset: Landschuetzer2016, project: OBS, tier: 2, type: clim, version: v2016, start_year: 1982, end_year: 2015} + scripts: null + + + NCEP: + description: NCEP monthly data + variables: + hur: + preproc: nopp + mip: Amon + hus: + preproc: nopp + mip: Amon + pr_month: + short_name: pr + preproc: nopp + mip: Amon + pr_day: + short_name: pr + preproc: nopp + mip: day + rlut: + preproc: nopp + mip: day + ta: + preproc: nopp + mip: Amon + tas: + preproc: nopp + mip: Amon + ua_month: + short_name: ua + preproc: nopp + mip: Amon + ua_day: + short_name: ua + preproc: nopp + mip: day + va_month: + short_name: va + preproc: nopp + mip: Amon + va_day: + short_name: va + preproc: nopp + mip: day + wap: + preproc: nopp + mip: Amon + zg: + preproc: nopp + mip: Amon + additional_datasets: + - {dataset: NCEP, project: OBS, tier: 2, type: reanaly, version: 1, start_year: 1948, end_year: 2018} + scripts: null + + + PATMOS-x: + description: PATMOS-x + variables: + clt: + preproc: nopp + mip: Amon + additional_datasets: + - {dataset: PATMOS-x, project: OBS, tier: 2, type: sat, version: NOAA, start_year: 1982, end_year: 1985}#2018} + scripts: null + + + WOA: + description: WOA + variables: + so: + preproc: nopp + mip: Omon + thetao: + preproc: nopp + mip: Omon + no3: + preproc: nopp + mip: Oyr + o2: + preproc: nopp + mip: Oyr + po4: + preproc: nopp + mip: Oyr + si: + preproc: nopp + mip: Oyr + additional_datasets: + - {dataset: WOA, project: OBS, tier: 2, type: clim, version: 2013v2, start_year: 2000, end_year: 2000} + scripts: null + + + ### TIER 3 ################################################################## + + AURA-TES: + description: AURA-TES + variables: + tro3: + preproc: nopp + mip: Amon + additional_datasets: + - {dataset: AURA-TES, project: OBS, tier: 3, type: sat, version: 1, start_year: 2005, end_year: 2011} + scripts: null + + + CDS-SATELLITE-SOIL-MOISTURE: + description: CDS-SATELLITE-SOIL-MOISTURE + variables: + sm: + preproc: nopp + mip: day + smStderr: + preproc: nopp + mip: day + additional_datasets: + - {dataset: CDS-SATELLITE-SOIL-MOISTURE, project: OBS, tier: 3, type: sat, version: COMBINED-TCDR-v201812.0.0, start_year: 1979, end_year: 2018} + scripts: null + + + CDS-XCH4: + description: CDS-XCH4 + variables: + xch4: + preproc: nopp + mip: Amon + additional_datasets: + - {dataset: CDS-XCH4, project: OBS, tier: 3, type: sat, version: L3, start_year: 2003, end_year: 2016} + scripts: null + + + CDS-XCO2: + description: CDS-XCO2 + variables: + xco2: + preproc: nopp + mip: Amon + additional_datasets: + - {dataset: CDS-XCO2, project: OBS, tier: 3, type: sat, version: L3, start_year: 2003, end_year: 2016} + scripts: null + + + CERES_mon: + description: CERES-SYN1deg monthly data + variables: + rlds_month: + short_name: rlds + preproc: nopp + mip: Amon + rlds_3hr: + short_name: rlds + preproc: nopp + mip: 3hr + rldscs_month: + short_name: rldscs + preproc: nopp + mip: Amon + rldscs_3hr: + short_name: rldscs + preproc: nopp + mip: 3hr + rlus_month: + short_name: rlus + preproc: nopp + mip: Amon + rlus_3hr: + short_name: rlus + preproc: nopp + mip: 3hr + rluscs: + preproc: nopp + mip: 3hr + rlut_month: + short_name: rlut + preproc: nopp + mip: Amon + rlut_3hr: + short_name: rlut + preproc: nopp + mip: 3hr + rlutcs_month: + short_name: rlutcs + preproc: nopp + mip: Amon + rlutcs_3hr: + short_name: rlutcs + preproc: nopp + mip: 3hr + rsds_month: + short_name: rsds + preproc: nopp + mip: Amon + rsds_3hr: + short_name: rsds + preproc: nopp + mip: 3hr + rsdscs: + preproc: nopp + mip: 3hr + rsdt: + preproc: nopp + mip: Amon + rsus_month: + short_name: rsus + preproc: nopp + mip: Amon + rsus_3hr: + short_name: rsus + preproc: nopp + mip: 3hr + rsuscs: + preproc: nopp + mip: 3hr + rsut_month: + short_name: rsut + preproc: nopp + mip: Amon + rsut_3hr: + short_name: rsut + preproc: nopp + mip: 3hr + rsutcs_month: + short_name: rsutcs + preproc: nopp + mip: Amon + rsutcs_3hr: + short_name: rsutcs + preproc: nopp + mip: 3hr + additional_datasets: + - {dataset: CERES-SYN1deg, project: OBS, tier: 3, type: sat, version: Ed3A, start_year: 2001, end_year: 2016} + scripts: null + + + ERA-Interim: + description: ERA-Interim + variables: + clivi: + preproc: nopp + mip: Amon + fx_files: [sftlf] + clt: + preproc: nopp + mip: Amon + clwvi: + preproc: nopp + mip: Amon + hfds: + preproc: nopp + mip: Omon + hus: + preproc: nopp + mip: Amon + pr_month: + short_name: pr + preproc: nopp + mip: Amon + pr_day: + short_name: pr + preproc: nopp + mip: day + prw: + preproc: nopp + mip: Amon + ps: + preproc: nopp + mip: Amon + psl_month: + short_name: psl + preproc: nopp + mip: Amon + psl_day: + short_name: psl + preproc: nopp + mip: day + ta: + preproc: nopp + mip: Amon + tas_month: + short_name: tas + preproc: nopp + mip: Amon + tas_day: + short_name: tas + preproc: nopp + mip: day + tasmin: + preproc: nopp + mip: day + tasmax: + preproc: nopp + mip: day + tauu: + preproc: nopp + mip: Amon + tauv: + preproc: nopp + mip: Amon + ts: + preproc: nopp + mip: Amon + ua: + preproc: nopp + mip: Amon + va: + preproc: nopp + mip: Amon + wap: + preproc: nopp + mip: Amon + zg: + preproc: nopp + mip: Amon + tos: + preproc: nopp + mip: Omon + additional_datasets: + - {dataset: ERA-Interim, project: OBS, tier: 3, type: reanaly, version: 1, start_year: 1979, end_year: 2018} + scripts: null + + + LandFlux-EVAL: + description: LandFlux-EVAL + variables: + et: + mip: Lmon + etStderr: + mip: Lmon + additional_datasets: + - {dataset: LandFlux-EVAL, project: OBS, tier: 3, type: reanaly, version: Oct13, start_year: 1989, end_year: 2005} + scripts: null + + + MTE: + description: MTE + variables: + gpp: + mip: Lmon + gppStderr: + mip: Lmon + additional_datasets: + - {dataset: MTE, project: OBS, tier: 3, type: reanaly, version: May12, start_year: 1982, end_year: 2011} + scripts: null + + + MODIS: + description: MODIS + variables: + od550aer: + preproc: nopp + mip: aero + clivi: + preproc: nopp + mip: Amon + clwvi: + preproc: nopp + mip: Amon + clt: + preproc: nopp + mip: Amon + lwpStderr: + preproc: nopp + mip: Amon + iwpStderr: + preproc: nopp + mip: Amon + additional_datasets: + - {dataset: MODIS, project: OBS, tier: 3, type: sat, version: MYD08-M3, start_year: 2003, end_year: 2018} + scripts: null + + + NIWA-BS: + description: NIWA-BS + variables: + toz: + preproc: nopp + mip: Amon + tozStderr: + preproc: nopp + mip: Amon + additional_datasets: + - {dataset: NIWA-BS, project: OBS, tier: 3, type: sat, version: v3.3, start_year: 1979, end_year: 2016} + scripts: null + + + UWisc: + description: UWisc + variables: + lwp: + preproc: nopp + mip: Amon + lwpStderr: + preproc: nopp + mip: Amon + additional_datasets: + - {dataset: UWisc, project: OBS, tier: 3, type: sat, version: v2, start_year: 1988, end_year: 2007} + scripts: null diff --git a/esmvaltool/recipes/examples/recipe_concatenate_exps.yml b/esmvaltool/recipes/examples/recipe_concatenate_exps.yml new file mode 100644 index 0000000000..39f8f89dd1 --- /dev/null +++ b/esmvaltool/recipes/examples/recipe_concatenate_exps.yml @@ -0,0 +1,43 @@ +# ESMValTool +# recipe_concatenate_exps.yml +--- +documentation: + description: | + Concatenate time series of different experiments at the preprocessor level. + + authors: + - schl_ma + + maintainer: + - schl_ma + +datasets: + - {dataset: CanESM2} + - {dataset: GFDL-ESM2M} + - {dataset: MPI-ESM-LR} + + +diagnostics: + + diag_regular: + description: Only use historical data. + variables: + tas: &variable_settings + project: CMIP5 + mip: Amon + ensemble: r1i1p1 + exp: historical + start_year: 1950 + end_year: 2000 + additional_datasets: + - {dataset: ERA-Interim, project: OBS, tier: 3, type: reanaly, version: 1, start_year: 1980, end_year: 2000} + scripts: null + + diag_concatenate_exps: + description: Concatenate historical and RCP 8.5 data + variables: + tas: + <<: *variable_settings + exp: [historical, rcp85] + end_year: 2050 + scripts: null diff --git a/esmvaltool/recipes/examples/recipe_correlation.yml b/esmvaltool/recipes/examples/recipe_correlation.yml new file mode 100644 index 0000000000..ec443f909f --- /dev/null +++ b/esmvaltool/recipes/examples/recipe_correlation.yml @@ -0,0 +1,74 @@ +# ESMValTool +# recipe_correlation.yml +--- +documentation: + description: | + Calculate the Pearsons r correlation coefficient over specified dimensions. + + authors: + - ande_bo + + maintainer: + - ande_bo + + projects: + - c3s-magic + +preprocessors: + preprocess_3d_data: + extract_season: + season: DJF + extract_levels: + levels: reference_dataset + scheme: nearest + regrid: + target_grid: reference_dataset + scheme: linear + +diagnostics: + analyses: + description: | + Pearsons r correlation coefficient with respect to a reference dataset. + Note that a mean over the time coordinate is taken before computing the + correlation, because there is no preprocessor function to regrid the + time coordinate. + themes: + - phys + realms: + - atmos + variables: + ta: + preprocessor: preprocess_3d_data + reference_dataset: ERA-Interim + start_year: 2000 + end_year: 2002 + project: CMIP5 + mip: Amon + exp: historical + ensemble: r1i1p1 + additional_datasets: + # One or more datasets can be added here + - {dataset: bcc-csm1-1} + # The reference dataset is required + - {dataset: ERA-Interim, project: OBS, tier: 3, type: reanaly, version: 1} + scripts: + correlation_pressure: + script: examples/correlate.py + plot_type: zonal + pearsonr: + corr_coords: + - air_pressure + mdtol: 1.0 + common_mask: true + quickplot: + plot_type: pcolormesh + correlation_latlon: + script: examples/correlate.py + plot_type: profile + pearsonr: + corr_coords: + - latitude + - longitude + mdtol: 1.0 + quickplot: + plot_type: plot diff --git a/esmvaltool/recipes/examples/recipe_my_personal_diagnostic.yml b/esmvaltool/recipes/examples/recipe_my_personal_diagnostic.yml new file mode 100644 index 0000000000..264a12a72a --- /dev/null +++ b/esmvaltool/recipes/examples/recipe_my_personal_diagnostic.yml @@ -0,0 +1,40 @@ +# ESMValTool +--- +documentation: + description: | + This is an example recipe for a personal diagnostic. + You can run any Python diagnostic of your choice + without installing ESMValTool as developer or git pushing. + Simply include the full path to your script of choice in script, + see example /path/to/your/my_little_diagnostic.py + An example personal diagnostic can be found in + esmvaltool/diag_scripts/examples/my_little_diagnostic.py + + authors: + - pred_va + + maintainer: + - pred_va + +datasets: + - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} + - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2005} + +preprocessors: + pp: + regrid: + target_grid: 1x1 + scheme: linear + +diagnostics: + simple: + description: "Simple personal diagnostic" + variables: + ta: + preprocessor: pp + mip: Amon + fx_files: [sftlf, sftof, areacello] + scripts: + my_diagnostic: + script: /path/to/your/my_little_diagnostic.py + diff --git a/esmvaltool/recipes/examples/recipe_ncl.yml b/esmvaltool/recipes/examples/recipe_ncl.yml index f9c442f59e..8087539b0c 100644 --- a/esmvaltool/recipes/examples/recipe_ncl.yml +++ b/esmvaltool/recipes/examples/recipe_ncl.yml @@ -1,4 +1,21 @@ +# ESMValTool +# recipe_ncl.yml --- +documentation: + description: | + Example recipe that plots air temperature. + + authors: + - righ_ma + + maintainer: + - righ_ma + + references: + - acknow_project + + projects: + - esmval datasets: - {dataset: bcc-csm1-1, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} @@ -12,37 +29,26 @@ preprocessors: levels: 85000 scheme: nearest regrid: false - mask_landocean: false + mask_landsea: false multi_model_statistics: false diagnostics: - ta_diagnostics: - description: Air temperature tutorial diagnostics. + example: + description: Example diagnostic + themes: + - phys + realms: + - atmos variables: ta: preprocessor: preprocessor_1 - field: T3M + reference_dataset: ERA-Interim fx_files: [sftlf] additional_datasets: - {dataset: NCEP, project: OBS, tier: 2, type: reanaly, version: 1, start_year: 2000, end_year: 2002} scripts: test_ta: &settings - script: examples/diagnostic_ta.ncl + script: examples/diagnostic.ncl projection: Mollweide - styleset: CMIP5 colormap: WhiteBlueGreenYellowRed ncdf: default - test_ta_no2: - <<: *settings - script: examples/diagnostic_ta.ncl - - pr_diagnostic: - description: Precipitation tutorial diagnostic. - variables: - pr: - field: T2Ms - additional_datasets: [] - scripts: - test_pr: - <<: *settings - script: examples/diagnostic_pr.ncl diff --git a/esmvaltool/recipes/examples/recipe_preprocessor_derive_test.yml b/esmvaltool/recipes/examples/recipe_preprocessor_derive_test.yml index 1265a79e52..03d40299d9 100644 --- a/esmvaltool/recipes/examples/recipe_preprocessor_derive_test.yml +++ b/esmvaltool/recipes/examples/recipe_preprocessor_derive_test.yml @@ -1,28 +1,127 @@ +# ESMValTool +# recipe_preprocessor_derive_test.yml --- +documentation: + description: | + Recipe that demonstrates various uses of the preprocessor derive functionality. -datasets: -# - {dataset: CESM1-WACCM, project: CMIP5, mip: Amon, exp: historical, ensemble: r2i1p1, start_year: 1997, end_year: 2005} -# - {dataset: CNRM-CM5, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1997, end_year: 2005} - - {dataset: GFDL-CM3, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1997, end_year: 2005} - - {dataset: GISS-E2-H, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p2, start_year: 1997, end_year: 2005} - - {dataset: GISS-E2-R, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p2, start_year: 1997, end_year: 2005} -# - {dataset: MIROC-ESM-CHEM, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1997, end_year: 2005} - - {dataset: ESACCI-OZONE, project: OBS, tier: 2, type: sat, version: L3, start_year: 1997, end_year: 2005} -# - {dataset: NIWA, project: OBS, tier: 3, type: reanaly, version: 1, start_year: 1997, end_year: 2005} + authors: + - ande_bo + - schl_ma + + maintainer: + - righ_ma + + projects: + - c3s-magic preprocessors: - preprocessor: {} - + regrid: + regrid: + target_grid: CanESM2 + scheme: linear + + diagnostics: - derive_diagnostic: + diag1: description: Test variable derivation variables: - toz: - preprocessor: preprocessor - field: T2Ms + toz: &toz + project: CMIP5 + mip: Amon + exp: historical + start_year: 2000 + end_year: 2005 derive: true force_derivation: false - additional_datasets: [] + additional_datasets: + - {dataset: GFDL-CM3, ensemble: r1i1p1} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + swcre: + <<: *toz + lwcre: + <<: *toz + clhmtisccp: &cloud + project: CMIP5 + mip: cfMon + exp: amip + start_year: 1985 + end_year: 1988 + ensemble: r1i1p1 + derive: true + force_derivation: false + additional_datasets: + - {dataset: HadGEM2-A} + nbp_grid: &nbp_grid + preprocessor: regrid + project: CMIP5 + mip: Lmon + exp: historical + start_year: 2000 + end_year: 2005 + derive: true + additional_datasets: + - {dataset: CanESM2, ensemble: r1i1p1} + - {dataset: MPI-ESM-LR, ensemble: r1i1p1} + - {dataset: IPSL-CM5A-LR, ensemble: r1i1p1} + fgco2_grid: + <<: *nbp_grid + mip: Omon + scripts: null + + diag2: + description: Test variable derivation + variables: + rtnt: + <<: *toz + clhtkisccp: + <<: *cloud + scripts: null + + diag3: + description: Test variable derivation + variables: + rsnt: + <<: *toz + clmmtisccp: + <<: *cloud + scripts: null + + diag4: + description: Test variable derivation + variables: + rsns: + <<: *toz + clmtkisccp: + <<: *cloud + scripts: null + + diag5: + description: Test variable derivation + variables: + rlns: + <<: *toz + clmtkisccp: + <<: *cloud + scripts: null + + diag6: + description: Test variable derivation + variables: + lwp: + <<: *toz + cllmtisccp: + <<: *cloud + scripts: null + + diag7: + description: Test variable derivation + variables: + clltkisccp: + <<: *cloud + netcre: + <<: *toz scripts: null diff --git a/esmvaltool/recipes/examples/recipe_preprocessor_test.yml b/esmvaltool/recipes/examples/recipe_preprocessor_test.yml index 2eb85be39a..e62e3d94a9 100644 --- a/esmvaltool/recipes/examples/recipe_preprocessor_test.yml +++ b/esmvaltool/recipes/examples/recipe_preprocessor_test.yml @@ -1,4 +1,20 @@ +# ESMValTool +# recipe_preprocessor_test.yml --- +documentation: + description: | + Various example preprocessors. + + authors: + - righ_ma + - ande_bo + - schl_ma + + maintainer: + - righ_ma + + projects: + - c3s-magic datasets: - {dataset: MPI-ESM-LR, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} @@ -43,7 +59,11 @@ preprocessors: extract_levels: levels: [2000, 7000] scheme: nearest - regrid: false + regrid: + target_grid: 2x2 + lon_offset: false + lat_offset: false + scheme: nearest mask_fillvalues: false multi_model_statistics: false @@ -60,7 +80,6 @@ diagnostics: variables: ta: preprocessor: preprocessor_1 - field: T3M fx_files: [orog, sftlf] additional_datasets: - {dataset: NCEP, project: OBS, tier: 2, type: reanaly, version: 1, start_year: 2000, end_year: 2002} @@ -71,7 +90,6 @@ diagnostics: variables: ta: preprocessor: preprocessor_2 - field: T3M scripts: null diagnostic_3_and_4: @@ -79,8 +97,6 @@ diagnostics: variables: ta: preprocessor: preprocessor_3 - field: T3M pr: preprocessor: preprocessor_4 - field: T2Ms scripts: null diff --git a/esmvaltool/recipes/examples/recipe_python.yml b/esmvaltool/recipes/examples/recipe_python.yml index 3bce502271..7bfe97cb7d 100644 --- a/esmvaltool/recipes/examples/recipe_python.yml +++ b/esmvaltool/recipes/examples/recipe_python.yml @@ -1,10 +1,28 @@ +# ESMValTool +# recipe_python.yml --- +documentation: + description: | + Example recipe that plots the mean precipitation and temperature. + + authors: + - ande_bo + - righ_ma + + maintainer: + - schl_ma + + references: + - acknow_project + + projects: + - esmval + - c3s-magic datasets: - - {dataset: bcc-csm1-1, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} + - {dataset: CanESM2, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - {dataset: GFDL-ESM2G, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - {dataset: MPI-ESM-LR, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: ERA-Interim, project: OBS, tier: 3, type: reanaly, version: 1, start_year: 2000, end_year: 2002} preprocessors: @@ -23,16 +41,16 @@ diagnostics: diagnostic1: description: Air temperature and precipitation Python tutorial diagnostic. + themes: + - phys + realms: + - atmos variables: ta: preprocessor: preprocessor1 - field: T3M - reference_dataset: ERA-Interim - additional_datasets: - - {dataset: NCEP, project: OBS, tier: 2, type: reanaly, version: 1, start_year: 2000, end_year: 2002} + reference_dataset: CanESM2 pr: - field: T2Ms - reference_dataset: ERA-Interim + reference_dataset: MPI-ESM-LR scripts: script1: script: examples/diagnostic.py diff --git a/esmvaltool/recipes/examples/recipe_python_object_oriented.yml b/esmvaltool/recipes/examples/recipe_python_object_oriented.yml index a5a8d5f169..90e1a5b224 100644 --- a/esmvaltool/recipes/examples/recipe_python_object_oriented.yml +++ b/esmvaltool/recipes/examples/recipe_python_object_oriented.yml @@ -1,4 +1,15 @@ +# ESMValTool +# recipe_python_object_oriented.yml --- +documentation: + description: | + Example recipe that runs a Python example diagnostic with a more object oriented interface. + + authors: + - schl_ma + + maintainer: + - schl_ma datasets: - {dataset: GFDL-ESM2G, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} @@ -33,11 +44,9 @@ diagnostics: variables: ta: preprocessor: preprocessor1 - field: T3M reference_dataset: ERA-Interim pr: preprocessor: preprocessor2 - field: T2Ms reference_dataset: ERA-Interim scripts: script1a: @@ -55,7 +64,6 @@ diagnostics: variables: tas: preprocessor: preprocessor2 - field: T2Ms additional_datasets: - {dataset: bcc-csm1-1, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} reference_dataset: MPI-ESM-LR diff --git a/esmvaltool/recipes/examples/recipe_r.yml b/esmvaltool/recipes/examples/recipe_r.yml new file mode 100644 index 0000000000..b8536c72e1 --- /dev/null +++ b/esmvaltool/recipes/examples/recipe_r.yml @@ -0,0 +1,29 @@ +# ESMValTool +# recipe_example_r.yml +--- +documentation: + description: Recipe for example diagnostic written in R. + + authors: + - arno_en + + maintainer: + - ande_bo + +datasets: + - {dataset: EC-EARTH, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1997, end_year: 1997} +preprocessors: + preproc: + extract_levels: false +diagnostics: + example: + description: Example diagnostic written in R + variables: + pr: + preprocessor: preproc + reference_dataset: "EC-EARTH" + mip: day + scripts: + main: + script: examples/diagnostic.r + parameter1: 1 # example parameter diff --git a/esmvaltool/recipes/examples/recipe_variable_groups.yml b/esmvaltool/recipes/examples/recipe_variable_groups.yml new file mode 100644 index 0000000000..64a4a873a7 --- /dev/null +++ b/esmvaltool/recipes/examples/recipe_variable_groups.yml @@ -0,0 +1,63 @@ +# recipe_variable_groups.yml +--- +documentation: + + description: | + Example recipe to demonstrate grouping of variables in the diagnostic + section. + + authors: + - schl_ma + + maintainer: + - schl_ma + + projects: + - crescendo + + +preprocessors: + + mask: + mask_landsea: + mask_out: sea + + +ARBITRARY_YAML_ANCHOR: &datasets + - {dataset: CanESM2} + - {dataset: IPSL-CM5A-LR} + - {dataset: MIROC5} + - {dataset: MPI-ESM-LR} + - {dataset: NorESM1-M} + + +diagnostics: + + diag_variable_groups: + description: Demonstrate the use of variable groups. + variables: + tas_1: &variable_settings + short_name: tas + project: CMIP5 + mip: Amon + exp: historical + ensemble: r1i1p1 + start_year: 2000 + end_year: 2005 + tag: TAS1 + additional_datasets: *datasets + tas_2: + <<: *variable_settings + preprocessor: mask + tag: TAS2 + additional_datasets: + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + tas_3: + <<: *variable_settings + exp: rcp45 + start_year: 2020 + end_year: 2025 + tag: TAS3 + additional_datasets: *datasets + scripts: + null diff --git a/esmvaltool/recipes/recipe_OceanBGC.yml b/esmvaltool/recipes/recipe_OceanBGC.yml deleted file mode 100644 index ed359e243e..0000000000 --- a/esmvaltool/recipes/recipe_OceanBGC.yml +++ /dev/null @@ -1,113 +0,0 @@ -############################################################################### -# recipe_OceanBGC.yml ---- -documentation: - description: | - Recipe to demonstrate a couple simple plots based on the annual ocean - biogeochemistry. Please use this file as a template for adding additional - fields into the ocean. This work based on the BGC-val toolkit GMD-2018-103. - Written by Lee de Mora, Plymouth Marine Laboratory - - authors: - - ledm - - references: - - BGC-val:gmd-2018-103 - - projects: - - ukesm - - -datasets: -# working datasets - - {dataset: CanESM2, project: CMIP5, mip: Oyr, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} -# - {dataset: GISS-E2-H, project: CMIP5, mip: Oyr, exp: historical, ensemble: r1i1p1, start_year: 1990, end_year: 2004} -# - {dataset: HadGEM2-AO, project: CMIP5, mip: Oyr, exp: historical, ensemble: r1i1p1, start_year: 1990, end_year: 2004} -# - {dataset: HadCM3, project: CMIP5, mip: Oyr, exp: historical, ensemble: r1i1p1, start_year: 1990, end_year: 2004} -# - {dataset: CanCM4, project: CMIP5, mip: Oyr, exp: historical, ensemble: r1i1p1, start_year: 1990, end_year: 2004} -# - {dataset: CSIRO-Mk3-6-0, project: CMIP5, mip: Oyr, exp: historical, ensemble: r1i1p1, start_year: 1990, end_year: 2004} - -##### -# datasets with Errors (right now): - -# Problem with latitude > 90. -# - {dataset: HadGEM2-CC, project: CMIP5, mip: Oyr, exp: historical, ensemble: r1i1p1, start_year: 1990, end_year: 2004} -# - {dataset: HadGEM2-ES, project: CMIP5, mip: Oyr, exp: historical, ensemble: r1i1p1, start_year: 1990, end_year: 2004} - - -### -# Problem with times -# - {dataset: MIROC-ESM, project: CMIP5, mip: Oyr, exp: historical, ensemble: r1i1p1, start_year: 1990, end_year: 2004} -# - {dataset: MIROC-ESM-CHEM, project: CMIP5, mip: Oyr, exp: historical, ensemble: r1i1p1, start_year: 1990, end_year: 2004} - -# Unstructured grids -# - {dataset: MPI-ESM-LR, project: CMIP5, mip: Oyr, exp: historical, ensemble: r1i1p1, start_year: 1990, end_year: 2003} -# - {dataset: MPI-ESM-MR, project: CMIP5, mip: Oyr, exp: historical, ensemble: r1i1p1, start_year: 1990, end_year: 2004} -# - {dataset: ACCESS1-0, project: CMIP5, mip: Oyr, exp: historical, ensemble: r1i1p1, start_year: 1990, end_year: 2004} -# - {dataset: ACCESS1-3, project: CMIP5, mip: Oyr, exp: historical, ensemble: r1i1p1, start_year: 1990, end_year: 2004} - - - -preprocessors: - # -------------------------------------------------- - # Depth-profile preprocessors - # -------------------------------------------------- - prep_depth_integration_1: # For a 2D map global profile - depth_integration: - coordz: depth -# new_units: kg m-2 # need to specify in advance, as cf_units has strange behaviour. - time_average: - - - - # -------------------------------------------------- - # map preprocessors - # -------------------------------------------------- - prep_surface_map_1: # For a 2D global surface map - extract_levels: - levels: [0., ] - scheme: linear_horizontal_extrapolate_vertical - time_average: - -diagnostics: - # -------------------------------------------------- - # Depth integrated diagnostics - # -------------------------------------------------- - diag_depthInt_1: - description: Global Ocean Depth Integrated mean timeseries - variables: - chl: - preprocessor: prep_depth_integration_1 - field: TO3Y - no3: - preprocessor: prep_depth_integration_1 - field: TO3Y - scripts: - Global_Ocean_DepthIntegration_map: &Global_Ocean_DepthIntegration_map - script: ocean/diagnostic_maps.py - - # -------------------------------------------------- - # Map diagnostics - # -------------------------------------------------- - diag_map_1: - description: Global Ocean Surface mean timeseries - variables: - chl: - preprocessor: prep_surface_map_1 - field: TO3Y - no3: - preprocessor: prep_surface_map_1 - field: TO3Y - - scripts: - Global_Ocean_surface_map: &Global_Ocean_surface_map - script: ocean/diagnostic_maps.py - - - - - - - - - diff --git a/esmvaltool/recipes/recipe_autoassess_landsurface_permafrost.yml b/esmvaltool/recipes/recipe_autoassess_landsurface_permafrost.yml new file mode 100644 index 0000000000..6fc07d7e1e --- /dev/null +++ b/esmvaltool/recipes/recipe_autoassess_landsurface_permafrost.yml @@ -0,0 +1,61 @@ +# ESMValTool +# recipe_autoassess_landsurface_permafrost.yml +--- +documentation: + description: | + Recipe that runs the Autoassess Land-surface assessment area diagnostic. + Land surface area and ice masks are time independent. + + authors: + - pred_va + + maintainer: + - pred_va + +datasets: + - {dataset: MPI-ESM-LR, project: CMIP5, exp: amip, ensemble: r1i1p1, start_year: 1992, end_year: 2002} + - {dataset: MPI-ESM-MR, project: CMIP5, exp: amip, ensemble: r1i1p1, start_year: 1992, end_year: 2002} + - {dataset: inmcm4, project: CMIP5, exp: amip, ensemble: r1i1p1, start_year: 1992, end_year: 2002} + +preprocessors: + pp_aa_area: + regrid: # NOT USED + target_grid: 0.15x0.15 + scheme: linear + +diagnostics: + aa_landsurf_permafrost: + description: Autoassess test diag for Land-Surface Permafrost. + variables: + tas: + mip: Amon + fx_files: [sftlf, mrsofc] + tsl: + mip: Lmon + fx_files: [sftlf, mrsofc] + scripts: + autoassess_landsurf_permafrost: &autoassess_landsurf_permafrost_settings + script: autoassess/autoassess_area_base.py + title: "Autoassess Land-Surface Permafrost Diagnostic" + area: land_surface_permafrost + control_model: MPI-ESM-LR + exp_model: MPI-ESM-MR + obs_models: [] + fx: [sftlf, mrsofc] + start: 1997/12/01 + end: 2002/12/01 + climfiles_root: '/group_workspaces/jasmin4/esmeval/for_vp/obs_etc' + + plot_standard: + description: Wrapper to collect and plot previously calculated metrics + scripts: + plot_landsurf_permafrost: &plot_landsurf_permafrost_settings + <<: *autoassess_landsurf_permafrost_settings + control_model: MPI-ESM-LR + exp_model: MPI-ESM-MR + script: autoassess/plot_autoassess_metrics.py + ancestors: ['*/autoassess_landsurf_permafrost'] + title: "Plot Land-Surface Permafrost Metrics" + plot_name: "Permafrost_Metrics" + diag_tag: aa_landsurf_permafrost + diag_name: autoassess_landsurf_permafrost diff --git a/esmvaltool/recipes/recipe_autoassess_landsurface_snow.yml b/esmvaltool/recipes/recipe_autoassess_landsurface_snow.yml new file mode 100644 index 0000000000..3067984560 --- /dev/null +++ b/esmvaltool/recipes/recipe_autoassess_landsurface_snow.yml @@ -0,0 +1,60 @@ +# ESMValTool +# recipe_autoassess_landsurface_snow.yml +--- +documentation: + description: | + Recipe that runs the Autoassess Land-surface assessment area diagnostic. + Climatological files are stored externally to avoid overloading the + ESMValTool source. See /group_workspaces/jasmin4/esmeval/autoassess_specific_files + (on JASMIN). + + authors: + - pred_va + + maintainer: + - pred_va + +datasets: + - {dataset: MPI-ESM-LR, project: CMIP5, exp: amip, ensemble: r1i1p1, start_year: 1992, end_year: 2002} + - {dataset: MPI-ESM-MR, project: CMIP5, exp: amip, ensemble: r1i1p1, start_year: 1992, end_year: 2002} + - {dataset: inmcm4, project: CMIP5, exp: amip, ensemble: r1i1p1, start_year: 1992, end_year: 2002} + +preprocessors: + pp_aa_area: + regrid: # NOT USED + target_grid: 0.15x0.15 + scheme: linear + +diagnostics: + aa_landsurf_snow: + description: Autoassess test diag for Land-Surface Snow. + variables: + snw: # surface_snow_amount (CMOR name=snw) + mip: LImon + fx_files: [sftlf] + scripts: + autoassess_landsurf_snow: &autoassess_landsurf_snow_settings + script: autoassess/autoassess_area_base.py + title: "Autoassess Land-Surface Snow Diagnostic" + area: land_surface_snow + control_model: MPI-ESM-LR + exp_model: MPI-ESM-MR + obs_models: [] + fx: [sftlf] + start: 1997/12/01 + end: 2002/12/01 + climfiles_root: '/group_workspaces/jasmin4/esmeval/autoassess_specific_files/files' # on Jasmin + + plot_standard: + description: Wrapper to collect and plot previously calculated metrics + scripts: + plot_landsurf_snow: &plot_landsurf_snow_settings + <<: *autoassess_landsurf_snow_settings + control_model: MPI-ESM-LR + exp_model: MPI-ESM-MR + script: autoassess/plot_autoassess_metrics.py + ancestors: ['*/autoassess_landsurf_snow'] + title: "Plot Land-Surface Snow Metrics" + plot_name: "Snow_Metrics" + diag_tag: aa_landsurf_snow + diag_name: autoassess_landsurf_snow diff --git a/esmvaltool/recipes/recipe_autoassess_landsurface_soilmoisture.yml b/esmvaltool/recipes/recipe_autoassess_landsurface_soilmoisture.yml new file mode 100644 index 0000000000..78a6b11069 --- /dev/null +++ b/esmvaltool/recipes/recipe_autoassess_landsurface_soilmoisture.yml @@ -0,0 +1,61 @@ +# ESMValTool +# recipe_autoassess_landsurface_soilmoisture.yml +--- +documentation: + description: | + Recipe that runs the Autoassess Land-surface assessment area diagnostic. + Climatological files are stored externally to avoid overloading the + ESMValTool source. See /group_workspaces/jasmin4/esmeval/autoassess_specific_files + (on JASMIN). + Missing variable mass_fraction_of_unfrozen_water_in_soil_moisture and + mass_fraction_of_frozen_water_in_soil_moisture. + + authors: + - pred_va + + maintainer: + - pred_va + +datasets: + - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1992, end_year: 2002} + - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1992, end_year: 2002} + +preprocessors: + pp_aa_area: + regrid: # NOT USED + target_grid: 0.15x0.15 + scheme: linear + +diagnostics: + aa_landsurf_soilmoisture: + description: Autoassess test diag for Land-Surface Soilmoisture. + variables: + mrsos: # moisture_content_of_soil_layer + mip: Lmon + # TODO: mass_fraction_of_unfrozen_water_in_soil_moisture + # TODO: mass_fraction_of_frozen_water_in_soil_moisture + scripts: + autoassess_landsurf_soilmoisture: &autoassess_landsurf_soilmoisture_settings + script: autoassess/autoassess_area_base.py + title: "Autoassess Land-Surface Soilmoisture Diagnostic" + area: land_surface_soilmoisture + control_model: IPSL-CM5A-LR + exp_model: inmcm4 + obs_models: [] + start: 1997/12/01 + end: 2002/12/01 + climfiles_root: '/group_workspaces/jasmin4/esmeval/autoassess_specific_files/files' + + plot_standard: + description: Wrapper to collect and plot previously calculated metrics + scripts: + plot_landsurf_soilmoisture: &plot_landsurf_soilmoisture_settings + <<: *autoassess_landsurf_soilmoisture_settings + control_model: IPSL-CM5A-LR + exp_model: inmcm4 + script: autoassess/plot_autoassess_metrics.py + ancestors: ['*/autoassess_landsurf_soilmoisture'] + title: "Plot Land-Surface Soilmoisture Metrics" + plot_name: "Soilmoisture_Metrics" + diag_tag: aa_landsurf_soilmoisture + diag_name: autoassess_landsurf_soilmoisture diff --git a/esmvaltool/recipes/recipe_autoassess_landsurface_surfrad.yml b/esmvaltool/recipes/recipe_autoassess_landsurface_surfrad.yml new file mode 100644 index 0000000000..1bf4943672 --- /dev/null +++ b/esmvaltool/recipes/recipe_autoassess_landsurface_surfrad.yml @@ -0,0 +1,70 @@ +# ESMValTool +# recipe_autoassess_landsurface_surfrad.yml +--- +documentation: + description: | + Recipe that runs the Autoassess Land-surface assessment area diagnostic. + Land-sea masks are not averaged over time (annual) but are fixed, i.e. + using sftlf (lat-lon) grid. The original Autoassess diagnostic used + time-dependent .pp masks these can still be implemented if present in + autoassess_source. The recipe is fully working but masks need to be + checked. + + authors: + - pred_va + + maintainer: + - pred_va + +datasets: + - {dataset: MPI-ESM-LR, project: CMIP5, mip: Amon, exp: amip, ensemble: r1i1p1, start_year: 1992, end_year: 2002} + - {dataset: MPI-ESM-MR, project: CMIP5, mip: Amon, exp: amip, ensemble: r1i1p1, start_year: 1992, end_year: 2002} + - {dataset: inmcm4, project: CMIP5, mip: Amon, exp: amip, ensemble: r1i1p1, start_year: 1992, end_year: 2002} + +preprocessors: + pp_aa_area: + regrid: # NOT USED + target_grid: 0.15x0.15 + scheme: linear + +diagnostics: + aa_landsurf_surfrad: + description: Autoassess test diag for Land-Surface Surfrad. + variables: + rsns: # Surf SW net all sky + derive: true + force_derivation: false + fx_files: [sftlf] + rlns: # Surf LW net all sky + derive: true + force_derivation: false + fx_files: [sftlf] + additional_datasets: + - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, start_year: 2001, end_year: 2012, tier: 1} + scripts: + autoassess_landsurf_surfrad: &autoassess_landsurf_surfrad_settings + script: autoassess/autoassess_area_base.py + title: "Autoassess Land-Surface Diagnostic Surfrad Metric" + area: land_surface_surfrad + control_model: MPI-ESM-LR + exp_model: MPI-ESM-MR + obs_models: [CERES-EBAF] + obs_type: obs4mips + fx: [sftlf] + start: 1997/12/01 + end: 2002/12/01 + + plot_standard: + description: Wrapper to collect and plot previously calculated metrics + scripts: + plot_landsurf_surfrad: &plot_landsurf_surfrad_settings + <<: *autoassess_landsurf_surfrad_settings + control_model: MPI-ESM-LR + exp_model: MPI-ESM-MR + obs_models: [CERES-EBAF] + script: autoassess/plot_autoassess_metrics.py + ancestors: ['*/autoassess_landsurf_surfrad'] + title: "Plot Land-Surface Metrics Surfrad" + plot_name: "Surfrad_Metrics" + diag_tag: aa_landsurf_surfrad + diag_name: autoassess_landsurf_surfrad diff --git a/esmvaltool/recipes/recipe_autoassess_radiation_rms_Amon_all.yml b/esmvaltool/recipes/recipe_autoassess_radiation_rms_Amon_all.yml index 80d7ecefce..da7fac67b5 100644 --- a/esmvaltool/recipes/recipe_autoassess_radiation_rms_Amon_all.yml +++ b/esmvaltool/recipes/recipe_autoassess_radiation_rms_Amon_all.yml @@ -1,30 +1,20 @@ -#### summary -#recipe_autoassess_radiation_rms_Amon_all.yml -# -#Description -#Diagnostics of spatial RMS errors of radiation and clouds -# -#Author -#Yoko Tsushima (Met Office, United Kingdom - yoko.tsushima@metoffice.gov.uk) -# -#Contributor -#Simon Read (University of Reading, United Kingdom) -# -#Author version2 -#Valeriu Predoi: VP (University of Reading, UK) -# -#Project -#CMUG -# -#Port to ESMValTool v2: July 2018 -# Final version of recipe: July 2018 -# -#References -#Technical report or a paper will be written in the future. -# -#This recipe is part of the ESMValTool -########################################################################################################### +# ESMValTool +# recipe_autoassess_radiation_rms_Amon_all.yml --- +documentation: + description: | + Diagnostics of spatial RMS errors of radiation and clouds. + + authors: + - read_si + - pred_va + - sell_al + + maintainer: + - pred_va + + projects: + - cmug datasets: - {dataset: MPI-ESM-LR, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2005} @@ -44,47 +34,38 @@ preprocessors: diagnostics: radiation_Amon_all_CERES-EBAF: - description: "CMIP5 vs CERES-EBAF" + description: "CMIP5 vs CERES-EBAF" variables: rtnt: # TOA TOT net preprocessor: pp_rad_derive_var - field: T2Ms derive: true force_derivation: false rsnt: # TOA SW net preprocessor: pp_rad_derive_var - field: T2Ms derive: true force_derivation: false swcre: # TOA SW Cloud Radiative Effect preprocessor: pp_rad_derive_var - field: T2Ms derive: true force_derivation: false lwcre: # TOA LW Cloud Radiative Effect preprocessor: pp_rad_derive_var - field: T2Ms derive: true force_derivation: false rsns: # obs: ceres; name: Surf SW net all sky preprocessor: pp_rad_derive_var - field: T2Ms derive: true force_derivation: false rlns: # Surf LW net all sky preprocessor: pp_rad_derive_var - field: T2Ms derive: true force_derivation: false rsut: # TOA SW up all sky preprocessor: pp_rad - field: T2Ms rlut: # TOA LW up all sky preprocessor: pp_rad - field: T2Ms rsutcs: # TOA SW up clear sky preprocessor: pp_rad - field: T2Ms additional_datasets: - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, start_year: 2001, end_year: 2012, tier: 1} scripts: @@ -102,10 +83,8 @@ diagnostics: variables: rlutcs: # TOA LW up clear sky preprocessor: pp_rad - field: T2Ms rldscs: # Surf LW down clear sky preprocessor: pp_rad - field: T2Ms additional_datasets: - {dataset: JRA-55, project: ana4mips, type: reanalysis, start_year: 2001, end_year: 2005, tier: 1} scripts: @@ -123,7 +102,6 @@ diagnostics: variables: prw: # Column Water vapour preprocessor: pp_rad - field: T2Ms additional_datasets: - {dataset: SSMI-MERIS, project: obs4mips, level: L3, version: v1-00, start_year: 2003, end_year: 2008, tier: 1} scripts: @@ -141,7 +119,6 @@ diagnostics: variables: pr: # Precipitation preprocessor: pp_rad - field: T2Ms additional_datasets: - {dataset: GPCP-SG, project: obs4mips, level: L3, version: v2.2, start_year: 2001, end_year: 2009, tier: 1} scripts: diff --git a/esmvaltool/recipes/recipe_autoassess_radiation_rms_Amon_obs.yml b/esmvaltool/recipes/recipe_autoassess_radiation_rms_Amon_obs.yml index 79f12fd1bf..5ea1f52ed1 100644 --- a/esmvaltool/recipes/recipe_autoassess_radiation_rms_Amon_obs.yml +++ b/esmvaltool/recipes/recipe_autoassess_radiation_rms_Amon_obs.yml @@ -1,30 +1,20 @@ -#### summary -#recipe_autoassess_radiation_rms_Amon_all.yml -# -#Description -#Diagnostics of spatial RMS errors of radiation and clouds -# -#Author -#Yoko Tsushima (Met Office, United Kingdom - yoko.tsushima@metoffice.gov.uk) -# -#Contributor -#Simon Read (University of Reading, United Kingdom) -# -#Author version2 -#Valeriu Predoi: VP (University of Reading, UK) -# -#Project -#CMUG -# -#Port to ESMValTool v2: July 2018 -# Final version of recipe: July 2018 -# -#References -#Technical report or a paper will be written in the future. -# -#This recipe is part of the ESMValTool -########################################################################################################### +# ESMValTool +# recipe_autoassess_radiation_rms_Amon_obs.yml --- +documentation: + description: | + Diagnostics of spatial RMS errors of radiation and clouds. + + authors: + - read_si + - pred_va + - sell_al + + maintainer: + - pred_va + + projects: + - cmug datasets: # ESMValTool v1 has no specific models here; it says use whatever you want but no more than what Amon_all uses @@ -45,50 +35,41 @@ preprocessors: diagnostics: radiation_Amon_obs_CERES-EBAF: - description: "CMIP5 vs CERES-EBAF" + description: "CMIP5 vs CERES-EBAF" variables: rtnt: # TOA TOT net preprocessor: pp_rad_derive_var - field: T2Ms derive: true force_derivation: false rsnt: # TOA SW net preprocessor: pp_rad_derive_var - field: T2Ms derive: true force_derivation: false swcre: # TOA SW Cloud Radiative Effect preprocessor: pp_rad_derive_var - field: T2Ms derive: true force_derivation: false lwcre: # TOA LW Cloud Radiative Effect preprocessor: pp_rad_derive_var - field: T2Ms derive: true force_derivation: false rsns: # Surf SW net all sky preprocessor: pp_rad_derive_var - field: T2Ms derive: true force_derivation: false rlns: # Surf LW net all sky preprocessor: pp_rad_derive_var - field: T2Ms derive: true force_derivation: false rsut: # TOA SW up all sky preprocessor: pp_rad - field: T2Ms rlut: # TOA LW up all sky preprocessor: pp_rad - field: T2Ms rsutcs: # TOA SW up clear sky preprocessor: pp_rad - field: T2Ms additional_datasets: - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, start_year: 2001, end_year: 2012, tier: 1} - - {dataset: CERES-SYN1deg, project: OBS, type: comp, version: Ed3A, start_year: 2001, end_year: 2015, tier: 2} + - {dataset: CERES-SYN1deg, project: OBS, type: sat, version: Ed3A, start_year: 2001, end_year: 2015, tier: 3} scripts: autoassess_Radiation_rms_Amon_obs: script: autoassess/autoassess_radiation_rms.py @@ -104,13 +85,11 @@ diagnostics: variables: rlutcs: # TOA LW up clear sky preprocessor: pp_rad - field: T2Ms rldscs: # Surf LW down clear sky preprocessor: pp_rad - field: T2Ms additional_datasets: - {dataset: JRA-55, project: ana4mips, type: reanalysis, start_year: 2001, end_year: 2005, tier: 1} - - {dataset: CERES-SYN1deg, project: OBS, type: comp, version: Ed3A, start_year: 2001, end_year: 2015, tier: 2} + - {dataset: CERES-SYN1deg, project: OBS, type: sat, version: Ed3A, start_year: 2001, end_year: 2015, tier: 3} scripts: autoassess_Radiation_rms_Amon_obs: script: autoassess/autoassess_radiation_rms.py @@ -126,7 +105,6 @@ diagnostics: variables: prw: # Column Water vapour preprocessor: pp_rad - field: T2Ms additional_datasets: - {dataset: SSMI-MERIS, project: obs4mips, level: L3, version: v1-00, start_year: 2003, end_year: 2008, tier: 1} - {dataset: SSMI, project: obs4mips, level: L3, version: RSSv07r00, start_year: 1988, end_year: 2006, tier: 1} @@ -145,7 +123,6 @@ diagnostics: variables: pr: # Precipitation preprocessor: pp_rad - field: T2Ms additional_datasets: - {dataset: GPCP-SG, project: obs4mips, level: L3, version: v2.2, start_year: 2001, end_year: 2009, tier: 1} - {dataset: TRMM-L3, project: obs4mips, level: v7, version: 7A, start_year: 1998, end_year: 2013, tier: 1} diff --git a/esmvaltool/recipes/recipe_autoassess_radiation_rms_cfMon_all.yml b/esmvaltool/recipes/recipe_autoassess_radiation_rms_cfMon_all.yml index d4b63a460a..95d825f11a 100644 --- a/esmvaltool/recipes/recipe_autoassess_radiation_rms_cfMon_all.yml +++ b/esmvaltool/recipes/recipe_autoassess_radiation_rms_cfMon_all.yml @@ -1,30 +1,20 @@ -#### summary -#recipe_autoassess_radiation_rms_Amon_all.yml -# -#Description -#Diagnostics of spatial RMS errors of radiation and clouds -# -#Author -#Yoko Tsushima (Met Office, United Kingdom - yoko.tsushima@metoffice.gov.uk) -# -#Contributor -#Simon Read (University of Reading, United Kingdom) -# -#Author version2 -#Valeriu Predoi: VP (University of Reading, UK) -# -#Project -#CMUG -# -#Port to ESMValTool v2: July 2018 -# Final version of recipe: July 2018 -# -#References -#Technical report or a paper will be written in the future. -# -#This recipe is part of the ESMValTool -########################################################################################################### +# ESMValTool +# recipe_autoassess_radiation_rms_cfMon_all.yml --- +documentation: + description: | + Diagnostics of spatial RMS errors of radiation and clouds. + + authors: + - tsus_yo + - read_si + - pred_va + + maintainer: + - pred_va + + projects: + - cmug datasets: - {dataset: MRI-CGCM3, project: CMIP5, mip: cfMon, exp: amip, ensemble: r1i1p1, start_year: 1985, end_year: 1988} @@ -38,40 +28,34 @@ preprocessors: diagnostics: radiation_cfMon_all_ISCCP: - description: "CMIP5 vs ISCCP Clouds" + description: "CMIP5 vs ISCCP Clouds" variables: cllmtisccp: # Low-intermediate Cloud preprocessor: pp_rad_derive_var - field: T4M derive: true force_derivation: false clltkisccp: # Low-thick Cloud preprocessor: pp_rad_derive_var - field: T4M derive: true force_derivation: false clmmtisccp: # Middle-intermediate Cloud preprocessor: pp_rad_derive_var - field: T4M derive: true force_derivation: false clmtkisccp: # Middle-thick Cloud preprocessor: pp_rad_derive_var - field: T4M derive: true force_derivation: false clhmtisccp: # High-intermediate Cloud preprocessor: pp_rad_derive_var - field: T4M derive: true force_derivation: false clhtkisccp: # High-thick Cloud preprocessor: pp_rad_derive_var - field: T4M derive: true force_derivation: false additional_datasets: - - {dataset: ISCCP, project: obs4mips, level: L3, version: V1.0, start_year: 1985, end_year: 1988, tier: 1} + - {dataset: ISCCP, project: obs4mips, level: L3, version: V1.0, start_year: 1985, end_year: 1988, tier: 1} scripts: autoassess_Radiation_rms_cfMon_all: script: autoassess/autoassess_radiation_rms.py diff --git a/esmvaltool/recipes/recipe_autoassess_stratosphere.yml b/esmvaltool/recipes/recipe_autoassess_stratosphere.yml index 925775d3cf..e829a22afc 100644 --- a/esmvaltool/recipes/recipe_autoassess_stratosphere.yml +++ b/esmvaltool/recipes/recipe_autoassess_stratosphere.yml @@ -1,23 +1,27 @@ -#### summary -#### recipe that runs the Autoassess Stratorsphere assessment area diagnostic -#### Version: 1 June 2018 -################# -#### Description: -################# -#The original stratospehre diagnostic from autoassess accepts two input UM suites; this has now been -#changed for ESMValTool to two datasets; the recipe can accomodate as many pairs of datasets to be compared as the -#user needs; this is handled in the diagnostics/aa_strato/scripts: while the script is the same -#(autoassess_stratosphere.py), the (control_model, exp_model) pair may vary accoding to the user's -#needs. Different (start, end) dates can also be specified (in autoassess format) per run, as long as the -#time data is within the (start_year, end_year) specified in datasets: section. Preprocessing is basic and just -#extracts levels and regrids onto the reference ERA-Interim dataset; this is done once. -################# -#### CAVEATS -################# -# - The original autoassess diagnostic needs MERRA obs data as well; not asked for here but it should be implemented -# at a later stage; -########################################################################################################### +# ESMValTool +# recipe_autoassess_stratosphere.yml --- +documentation: + description: | + Recipe that runs the Autoassess Stratosphere assessment area diagnostic. + The original stratospehre diagnostic from autoassess accepts two input UM + suites; this has now been changed for ESMValTool to two datasets; the + recipe can accomodate as many pairs of datasets to be compared as the user + needs. This is handled in the diagnostics/aa_strato/scripts. While the + script is the same (autoassess_stratosphere.py), the (control_model, + exp_model) pair may vary accoding to the user needs. Different (start, + end) dates can also be specified (in autoassess format) per run, as long as + the time data is within the (start_year, end_year) specified in datasets + section. Preprocessing is basic and just extracts levels and regrids onto + the reference ERA-Interim dataset; this is done once. + The original autoassess diagnostic needs MERRA obs data as well, add MERRA + file request in this namelist when running. + + authors: + - pred_va + + maintainer: + - pred_va datasets: - {dataset: MPI-ESM-LR, project: CMIP5, mip: Amon, exp: amip, ensemble: r1i1p1, start_year: 1992, end_year: 2002} @@ -40,34 +44,33 @@ diagnostics: ta: preprocessor: pp_aa_area mip: Amon - field: T3M ua: preprocessor: pp_aa_area mip: Amon - field: T3M hus: preprocessor: pp_aa_area mip: Amon - field: T3M additional_datasets: - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, start_year: 2000, end_year: 2002, tier: 3} scripts: autoassess_strato_test_1: &autoassess_strato_test_1_settings - script: autoassess/autoassess_stratosphere.py + script: autoassess/autoassess_area_base.py title: "Autoassess Stratosphere Diagnostic Metric MPI-MPI" - area: Stratosphere + area: stratosphere control_model: MPI-ESM-LR exp_model: MPI-ESM-MR - obs_models: [ERA-Interim] + obs_models: [ERA-Interim] # list to hold models that are NOT for metrics but for obs operations + additional_metrics: [ERA-Interim, inmcm4] # list to hold additional datasets for metrics start: 1997/12/01 end: 2002/12/01 autoassess_strato_test_2: &autoassess_strato_test_2_settings - script: autoassess/autoassess_stratosphere.py + script: autoassess/autoassess_area_base.py title: "Autoassess Stratosphere Diagnostic Metric MPI-INM" - area: Stratosphere + area: stratosphere control_model: MPI-ESM-LR exp_model: inmcm4 - obs_models: [ERA-Interim] + obs_models: [ERA-Interim] # list to hold models that are NOT for metrics but for obs operations + additional_metrics: [ERA-Interim] # list to hold additional datasets for metrics start: 1997/12/01 end: 2002/12/01 @@ -79,22 +82,22 @@ diagnostics: control_model: MPI-ESM-LR exp_model: MPI-ESM-MR obs_models: [ERA-Interim] - script: autoassess/plot_autoassess_stratosphere.py + script: autoassess/plot_autoassess_metrics.py ancestors: ['*/autoassess_strato_test_1'] title: "Plot Stratosphere Metrics MPI-MPI" - plot_name: "Metrics" + plot_name: "Stratosphere_Metrics" diag_tag: aa_strato diag_name: autoassess_strato_test_1 - plot_title: "MPI-ESM-LR_vs_MPI-ESM-MR" + error_metric: ERA-Interim # has to be from additional_metrics list plot_strato_test_2: &plot_strato_test_2_settings <<: *autoassess_strato_test_2_settings control_model: MPI-ESM-LR exp_model: inmcm4 obs_models: [ERA-Interim] - script: autoassess/plot_autoassess_stratosphere.py + script: autoassess/plot_autoassess_metrics.py ancestors: ['*/autoassess_strato_test_2'] title: "Plot Stratosphere Metrics MPI-MPI" - plot_name: "Metrics" + plot_name: "Stratosphere_Metrics" diag_tag: aa_strato diag_name: autoassess_strato_test_2 - plot_title: "MPI-ESM-LR_vs_inmcm4" + error_metric: ERA-Interim # has to be from additional_metrics list diff --git a/esmvaltool/recipes/recipe_capacity_factor.yml b/esmvaltool/recipes/recipe_capacity_factor.yml new file mode 100644 index 0000000000..dca3bffd24 --- /dev/null +++ b/esmvaltool/recipes/recipe_capacity_factor.yml @@ -0,0 +1,55 @@ +# ESMValTool +# recipe_capacity_factor.yml +--- +documentation: + description: | + Diurnal Temperature Variation (DTR) Indicator and Wind Capacity Factor + + authors: + - hunt_al + - manu_ni + - lled_ll + - caro_lo + - bojo_dr + - gonz_nu + - pere_nu + + projects: + - c3s-magic + +datasets: + - {dataset: IPSL-CM5A-MR, type: exp, project: CMIP5, mip: day, exp: historical, ensemble: r1i1p1, start_year: 1980, end_year: 2005} + #- {dataset: MPI-ESM-MR, type: exp, project: CMIP5, mip: day, exp: rcp26, ensemble: r1i1p1, start_year: 2021, end_year: 2025} + #- {dataset: MPI-ESM-MR, type: exp, project: CMIP5, mip: day, exp: rcp45, ensemble: r1i1p1, start_year: 2031, end_year: 2035} + +preprocessors: + preproc: + regrid: + target_grid: IPSL-CM5A-MR + scheme: linear + mask_fillvalues: + threshold_fraction: 0.95 + extract_region: + start_longitude: 200 + end_longitude: 300 + start_latitude: 27 + end_latitude: 70 + extract_season: + season: djf + +diagnostics: + capacity_factor: + description: Calculate the wind power capacity factor. + variables: + sfcWind: + preprocessor: preproc + mip: day + scripts: + main: + script: magic_bsc/capacity_factor.r + power_curves: + - PowerCurves/Enercon_E70_2.3MW.txt + - PowerCurves/Gamesa_G80_2.0MW.txt + - PowerCurves/Gamesa_G87_2.0MW.txt + - PowerCurves/Vestas_V100_2.0MW.txt + - PowerCurves/Vestas_V110_2.0MW.txt diff --git a/esmvaltool/recipes/recipe_clouds_bias.yml b/esmvaltool/recipes/recipe_clouds_bias.yml new file mode 100644 index 0000000000..d490286f16 --- /dev/null +++ b/esmvaltool/recipes/recipe_clouds_bias.yml @@ -0,0 +1,205 @@ +# ESMValTool +# recipe_clouds_bias.yml +--- +documentation: + description: | + Diagnostics of clouds and hydrological cycle multi-model mean, mean bias, + absolute bias, relative bias. + + authors: + - laue_ax + + maintainer: + - laue_ax + + references: + - flato13ipcc + + projects: + - embrace + +datasets: + - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: bcc-csm1-1-m, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: CESM1-CAM5-1-FV, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CESM1-FASTCHEM, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CESM1-WACCM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CMCC-CESM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: CMCC-CM, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + # - {dataset: CMCC-CMS, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + # - {dataset: CNRM-CM5-2, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: FGOALS-g2, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-H-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: GISS-E2-H, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-R-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-R, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + # - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC4h, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + + +preprocessors: + clim: + regrid: + target_grid: 2x2 + scheme: linear + mask_fillvalues: + threshold_fraction: 0.95 + multi_model_statistics: + span: overlap + statistics: [mean] + exclude: [reference_dataset] + + +diagnostics: + + # ********************************************************************** + # Flato et al. (2013) - IPCC AR5, chap. 9 + # similar to fig. 9.2 a/b/c + # ********************************************************************** + # Multi model mean, multi model mean bias, and mean absolute error + # (geographical distributions) + # ********************************************************************** + + clouds_bias_tas: + description: IPCC AR5 Ch. 9, Fig. 9.2 (near-surface temperature) + themes: + - clouds + realms: + - atmos + variables: + tas: + preprocessor: clim + reference_dataset: ERA-Interim + mip: Amon + additional_datasets: + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, + start_year: 1986, end_year: 2005, tier: 3} + scripts: + clim: &clim_settings + script: clouds/clouds_bias.ncl + projection: Robinson + timemean: annualclim + plot_abs_diff: true + plot_rel_diff: false + + # ********************************************************************** + # Flato et al. (2013) - IPCC AR5, chap. 9 + # similar to fig. 9.4 + # ********************************************************************** + # Multi model mean, multi model mean bias, mean absolute error, and + # mean relative error (geographical ditributions) + # ********************************************************************** + + clouds_bias_pr: + description: IPCC AR5 Ch. 9, Fig. 9.4 (precipitation) + themes: + - clouds + realms: + - atmos + variables: + pr: + preprocessor: clim + reference_dataset: GPCP-SG + mip: Amon + additional_datasets: + - {dataset: GPCP-SG, project: obs4mips, level: L3, version: v2.2, + start_year: 1986, end_year: 2005, tier: 1} + scripts: + clim: + <<: *clim_settings + plot_abs_diff: true + plot_rel_diff: true + + # ********************************************************************** + # Multi model mean, multi model mean bias, mean absolute error, and + # mean relative error (geographical ditributions) of total cloud cover + # ********************************************************************** + + clouds_bias_clt: + description: multi-model mean bias of annual mean compared with a + reference dataset (observations). + themes: + - clouds + realms: + - atmos + variables: + clt: + preprocessor: clim + reference_dataset: MODIS + mip: Amon + additional_datasets: + - {dataset: MODIS, project: obs4mips, level: L3, version: C5, + start_year: 2001, end_year: 2010, tier: 1} + scripts: + clim: + <<: *clim_settings + plot_abs_diff: true + plot_rel_diff: true diff --git a/esmvaltool/recipes/recipe_clouds_ipcc.yml b/esmvaltool/recipes/recipe_clouds_ipcc.yml new file mode 100644 index 0000000000..c74e58049f --- /dev/null +++ b/esmvaltool/recipes/recipe_clouds_ipcc.yml @@ -0,0 +1,190 @@ +# ESMValTool +# recipe_clouds_ipcc.yml +--- +documentation: + description: | + Diagnostics of clouds and hydrological cycle multi-model mean bias, + zonal means; similar to IPCC AR5 fig. 9.5. + + authors: + - laue_ax + + maintainer: + - laue_ax + + references: + - flato13ipcc + + projects: + - esmval + +datasets: + - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: bcc-csm1-1-m, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: CESM1-CAM5-1-FV, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CESM1-FASTCHEM, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CESM1-WACCM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CMCC-CESM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: CMCC-CM, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + # - {dataset: CMCC-CMS, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + # - {dataset: CNRM-CM5-2, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: FGOALS-g2, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-H-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: GISS-E2-H, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-R-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-R, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + # - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC4h, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + + +preprocessors: + clim: + regrid: + target_grid: 2x2 + scheme: linear + mask_fillvalues: + threshold_fraction: 0.95 + multi_model_statistics: + span: overlap + statistics: [mean] + exclude: [reference_dataset] + + +diagnostics: + + # ********************************************************************** + # Flato et al. (2013) - IPCC AR5, chap. 9 + # similar to fig. 9.5 + # ********************************************************************** + # Difference of multi-model mean and reference data set (geographical + # distribution, annual mean) + zonal averages of individual models + # and multi-model mean (annual means). + # ********************************************************************** + + clouds_ipcc_swcre: + description: differences of multi-model mean and reference dataset + themes: + - clouds + realms: + - atmos + variables: + swcre: + preprocessor: clim + reference_dataset: CERES-EBAF + mip: Amon + derive: true + additional_datasets: + - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, + start_year: 2001, end_year: 2010, tier: 1} + scripts: + clim: &clim_settings + script: clouds/clouds_ipcc.ncl + projection: Robinson + colormap: WhiteBlueGreenYellowRed + timemean: annualclim + + clouds_ipcc_lwcre: + description: differences of multi-model mean and reference dataset + themes: + - clouds + realms: + - atmos + variables: + lwcre: + preprocessor: clim + reference_dataset: CERES-EBAF + mip: Amon + derive: true + additional_datasets: + - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, + start_year: 2001, end_year: 2010, tier: 1} + scripts: + clim: + <<: *clim_settings + + clouds_ipcc_netcre: + description: differences of multi-model mean and reference dataset + themes: + - clouds + realms: + - atmos + variables: + netcre: + preprocessor: clim + reference_dataset: CERES-EBAF + mip: Amon + derive: true + additional_datasets: + - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, + start_year: 2001, end_year: 2010, tier: 1} + scripts: + clim: + <<: *clim_settings diff --git a/esmvaltool/recipes/recipe_combined_indices.yml b/esmvaltool/recipes/recipe_combined_indices.yml new file mode 100644 index 0000000000..42663aefc5 --- /dev/null +++ b/esmvaltool/recipes/recipe_combined_indices.yml @@ -0,0 +1,43 @@ +# ESMValTool +# recipe_combined_indices.yml +--- +documentation: + description: | + Recipe for for computing seasonal means or running avergaes, + combining indices from multiple models and computing area averages + + authors: + - hunt_al + - pere_nu + - manu_ni + + projects: + - c3s-magic + +datasets: + - {dataset: MPI-ESM-MR, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1950, end_year: 2005} + #- {dataset: bcc-csm1-1, type: exp, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} + #- {dataset: IPSL-CM5A-MR, type: exp, project: CMIP5, mip: day, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + + +diagnostics: + combine_indices: + description: Combine indices and/or compute area weighted averages. + variables: + psl: + mip: Amon + scripts: + main: + script: magic_bsc/combined_indices.r + ## Weights for combining indices - if not "equal" or "NULL", must be the same length as the number of indices + region: NAO #Nino3, Nino3.4, Nino4, NAO or SOI + + # Compute running mean? + running_mean: 11 #3 + + # Compute seasons? + moninf: 12 # First month + monsup: 2 # Last month + + # Compute standarization? + standardized: true diff --git a/esmvaltool/recipes/recipe_consecdrydays.yml b/esmvaltool/recipes/recipe_consecdrydays.yml new file mode 100644 index 0000000000..6025788173 --- /dev/null +++ b/esmvaltool/recipes/recipe_consecdrydays.yml @@ -0,0 +1,24 @@ +--- +documentation: + description: 'Consecutive dry days' + authors: ['berg_pe'] + projects: ['c3s-magic'] + references: ['acknow_project'] + +datasets: + - {dataset: bcc-csm1-1-m, project: CMIP5, mip: day, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2002} + +diagnostics: + + dry_days: + description: Calculating number of dry days. + variables: + pr: + scripts: + consecutive_dry_days: + script: droughtindex/diag_cdd.py + dryindex: 'cdd' + plim: 1 + frlim: 5 + quickplot: + plot_type: pcolormesh diff --git a/esmvaltool/recipes/recipe_cox18nature.yml b/esmvaltool/recipes/recipe_cox18nature.yml new file mode 100644 index 0000000000..8e557a3478 --- /dev/null +++ b/esmvaltool/recipes/recipe_cox18nature.yml @@ -0,0 +1,86 @@ +# recipe_cox18nature.yml +--- +documentation: + + description: | + Reproducing the emergent constraint for ECS based on global temperature + variability (Cox et al., 2018). + + authors: + - schl_ma + + maintainer: + - schl_ma + + references: + - cox18nature + + projects: + - crescendo + + +preprocessors: + + spatial_mean: + average_region: + coord1: latitude + coord2: longitude + + +diagnostics: + + diag_ecs: + description: Calculate ECS. + scripts: + ecs: + script: climate_metrics/ecs.py + read_external_file: external_sources/ipcc_ar5.yml + + diag_psi: + description: Calculate temperature variability metric psi. + variables: + tas: &tas_settings + preprocessor: spatial_mean + project: CMIP5 + ensemble: r1i1p1 + mip: Amon + exp: [historical, rcp85] + start_year: 1880 + end_year: 2015 + additional_datasets: + - {dataset: ACCESS1-0} + # Historical data end year is 2012, not 2005 (overlaps with RCP8.5) + # - {dataset: bcc-csm1-1} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CNRM-CM5} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: GFDL-ESM2M} + - {dataset: HadGEM2-ES} + - {dataset: GISS-E2-R} + - {dataset: inmcm4} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC-ESM} + - {dataset: MPI-ESM-LR} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + tasa: + <<: *tas_settings + additional_datasets: + - {dataset: HadCRUT4, project: OBS, type: ground, version: 1, tier: 2} + scripts: + psi: + script: climate_metrics/psi.py + + diag_cox18nature: + description: Plot emergent relationship between ECS ans psi. + scripts: + cox18nature: + script: emergent_constraints/cox18nature.py + ancestors: [ + 'diag_ecs/ecs', + 'diag_psi/psi', + 'diag_psi/tas', + 'diag_psi/tasa', + ] diff --git a/esmvaltool/recipes/recipe_cvdp.yml b/esmvaltool/recipes/recipe_cvdp.yml new file mode 100644 index 0000000000..7f56c36deb --- /dev/null +++ b/esmvaltool/recipes/recipe_cvdp.yml @@ -0,0 +1,42 @@ +# ESMValTool +# recipe_cvdp.yml +--- +documentation: + description: | + Recipe for executing the NCAR CVDP package in the ESMValTool framework. + + authors: + - phil_ad + - broe_bj + + maintainer: + - broe_bj + + references: + - phillips14eos + + projects: + - esmval + - c3s-magic + +datasets: + - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1850, end_year: 2005} + +diagnostics: + + diagnostic1: + description: Run the NCAR CVDPackage. + variables: + ts: + mip: Amon + tas: + mip: Amon + pr: + mip: Amon + psl: + mip: Amon + scripts: + cvdp: + script: cvdp/cvdp_wrapper.py + quickplot: + plot_type: pcolormesh diff --git a/esmvaltool/recipes/recipe_diurnal_temperature_index.yml b/esmvaltool/recipes/recipe_diurnal_temperature_index.yml new file mode 100644 index 0000000000..9b96876294 --- /dev/null +++ b/esmvaltool/recipes/recipe_diurnal_temperature_index.yml @@ -0,0 +1,52 @@ +# ESMValTool +# recipe_diurnal_temperature_index.yml +--- +documentation: + description: | + Tool to compute the diurnal temperature indicator. + + authors: + - hunt_al + - pere_nu + - manu_ni + - caro_lo + + projects: + - c3s-magic + + +datasets: + # - {dataset: IPSL-CM5A-MR, type: exp, project: CMIP5, mip: day, exp: historical, ensemble: r1i1p1, start_year: 1961, end_year: 1990} + # - {dataset: IPSL-CM5A-MR, type: exp, project: CMIP5, mip: day, exp: rcp85, ensemble: r1i1p1, start_year: 2020, end_year: 2040} + # - {dataset: bcc-csm1-1, type: exp, project: CMIP5, mip: day, exp: historical, ensemble: r1i1p1, start_year: 1971, end_year: 2000} + # - {dataset: bcc-csm1-1, type: exp, project: CMIP5, mip: day, exp: rcp45, ensemble: r1i1p1, start_year: 2040, end_year: 2060} + - {dataset: MPI-ESM-MR, type: exp, project: CMIP5, mip: day, exp: historical, ensemble: r1i1p1, start_year: 1961, end_year: 1990} + - {dataset: MPI-ESM-MR, type: exp, project: CMIP5, mip: day, exp: rcp26, ensemble: r1i1p1, start_year: 2030, end_year: 2080} + + +preprocessors: + preproc: + regrid: + target_grid: MPI-ESM-MR + scheme: linear + mask_fillvalues: + threshold_fraction: 0.95 + extract_region: + start_longitude: -10 + end_longitude: 40 + start_latitude: 27 + end_latitude: 70 + +diagnostics: + diurnal_temperature_indicator: + description: Calculate the diurnal temperature indicator. + variables: + tasmax: + preprocessor: preproc + mip: day + tasmin: + preprocessor: preproc + mip: day + scripts: + main: + script: magic_bsc/diurnal_temp_index.r diff --git a/esmvaltool/recipes/recipe_ecs.yml b/esmvaltool/recipes/recipe_ecs.yml new file mode 100644 index 0000000000..5ab6f87287 --- /dev/null +++ b/esmvaltool/recipes/recipe_ecs.yml @@ -0,0 +1,96 @@ +# recipe_ecs.yml +--- +documentation: + + description: | + Calculate ECS using linear regression following Andrews et al. (2012). + + authors: + - schl_ma + + references: + - andrews12grl + + projects: + - crescendo + + +preprocessors: + + spatial_mean: + average_region: + coord1: latitude + coord2: longitude + + +diagnostics: + + cmip5: + description: Calculate ECS for all available CMIP5 models. + variables: + tas: &variable_settings + preprocessor: spatial_mean + project: CMIP5 + ensemble: r1i1p1 + mip: Amon + rtmt: + <<: *variable_settings + additional_datasets: + - {dataset: bcc-csm1-1, exp: piControl, start_year: 160, end_year: 309} + - {dataset: bcc-csm1-1, exp: abrupt4xCO2, start_year: 160, end_year: 309} + - {dataset: bcc-csm1-1-m, exp: piControl, start_year: 240, end_year: 389} + - {dataset: bcc-csm1-1-m, exp: abrupt4xCO2, start_year: 240, end_year: 389} + - {dataset: CanESM2, exp: piControl, start_year: 2015, end_year: 2164} + - {dataset: CanESM2, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: CCSM4, exp: piControl, start_year: 800, end_year: 949} + - {dataset: CCSM4, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM5, exp: piControl, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM5, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: CSIRO-Mk3-6-0, exp: piControl, start_year: 1, end_year: 150} + - {dataset: CSIRO-Mk3-6-0, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: GFDL-CM3, exp: piControl, start_year: 1, end_year: 150} + - {dataset: GFDL-CM3, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: GISS-E2-H, exp: piControl, start_year: 1200, end_year: 1349} + - {dataset: GISS-E2-H, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: GISS-E2-R, exp: piControl, start_year: 3331, end_year: 3480} + - {dataset: GISS-E2-R, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: inmcm4, exp: piControl, start_year: 2090, end_year: 2239} + - {dataset: inmcm4, exp: abrupt4xCO2, start_year: 2090, end_year: 2239} + - {dataset: IPSL-CM5A-LR, exp: piControl, start_year: 1850, end_year: 1999} + - {dataset: IPSL-CM5A-LR, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: IPSL-CM5B-LR, exp: piControl, start_year: 1850, end_year: 1999} + - {dataset: IPSL-CM5B-LR, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: MIROC5, exp: piControl, start_year: 2100, end_year: 2249} + - {dataset: MIROC5, exp: abrupt4xCO2, start_year: 2100, end_year: 2249} + - {dataset: MIROC-ESM, exp: piControl, start_year: 1800, end_year: 1949} + - {dataset: MIROC-ESM, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: MPI-ESM-LR, exp: piControl, start_year: 2015, end_year: 2164} + - {dataset: MPI-ESM-LR, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + # Day is out of range for month (wait for iris > 2.0) + # - {dataset: ACCESS1-0, exp: piControl, start_year: 300, end_year: 449} + # - {dataset: ACCESS1-0, exp: abrupt4xCO2, start_year: 300, end_year: 449} + scripts: + ecs: &ecs_script + script: climate_metrics/ecs.py + + cmip6: + description: Calculate ECS for all available CMIP6 models. + variables: + tas: + <<: *variable_settings + project: CMIP6 + rtmt: + <<: *variable_settings + project: CMIP6 + additional_datasets: + - {dataset: BCC-CSM2-MR, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: BCC-CSM2-MR, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: GISS-E2-1-G, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 4150, end_year: 4299} + - {dataset: GISS-E2-1-G, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: IPSL-CM6A-LR, exp: piControl, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: IPSL-CM6A-LR, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: MIROC6, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 3200, end_year: 3349} + - {dataset: MIROC6, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 3200, end_year: 3349} + scripts: + ecs: + <<: *ecs_script diff --git a/esmvaltool/recipes/recipe_ensclus.yml b/esmvaltool/recipes/recipe_ensclus.yml new file mode 100644 index 0000000000..63c4db43db --- /dev/null +++ b/esmvaltool/recipes/recipe_ensclus.yml @@ -0,0 +1,102 @@ +# recipe_EnsClus.yml +--- +documentation: + description: | + Recipe for sub-ensemble selection. + The diagnostics groups ensemble members according to similar + characteristics and selects the most representative member + for each cluster based on a k-means algorithm + + authors: + - mavi_ir + - hard_jo + - arno_en + - cort_su + + maintainer: + - hard_jo + + references: + - straus07jcli + + projects: + - c3s-magic + +datasets: + - {dataset: ACCESS1-0, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2005} + - {dataset: ACCESS1-3, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2005} + - {dataset: bcc-csm1-1, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2005} + - {dataset: bcc-csm1-1-m, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2005} + - {dataset: CanESM2, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2005} + - {dataset: CCSM4, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2005} + - {dataset: CESM1-BGC, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2005} + - {dataset: CESM1-CAM5, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2005} + - {dataset: CESM1-FASTCHEM, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2005} + - {dataset: CESM1-WACCM, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2005} + - {dataset: CMCC-CESM, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2005} + #- {dataset: CMCC-CM, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2005} + - {dataset: CMCC-CMS, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2005} + - {dataset: CNRM-CM5, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2005} + - {dataset: CSIRO-Mk3-6-0, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2005} + #- {dataset: EC-EARTH, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2005} + - {dataset: FGOALS-g2, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2005} + #- {dataset: FGOALS-s2, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2005} + - {dataset: FIO-ESM, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2005} + - {dataset: GFDL-CM2p1, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2005} + - {dataset: GFDL-CM3, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2005} + - {dataset: GFDL-ESM2G, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2005} + - {dataset: GFDL-ESM2M, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2005} + - {dataset: HadCM3, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2005} + #- {dataset: HadGEM2-AO, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2005} + - {dataset: HadGEM2-CC, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2005} + - {dataset: HadGEM2-ES, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2005} + - {dataset: inmcm4, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2005} + - {dataset: IPSL-CM5A-LR, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2005} + - {dataset: IPSL-CM5A-MR, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2005} + - {dataset: IPSL-CM5B-LR, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2005} + #- {dataset: MIROC5, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2005} + #- {dataset: MIROC-ESM-CHEM, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2005} + #- {dataset: MIROC-ESM, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2005} + - {dataset: MPI-ESM-LR, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2005} + - {dataset: MPI-ESM-MR, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2005} + - {dataset: MPI-ESM-P, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2005} + #- {dataset: MRI-CGCM3, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2005} + - {dataset: NorESM1-ME, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2005} + - {dataset: NorESM1-M, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2005} + + +preprocessors: + preproc: + extract_levels: false + regrid: + target_grid: 2.5x2.5 + scheme: linear_extrapolate + + +diagnostics: + EnsClus: + description: Ensemble clustering + variables: + pr: + preprocessor: preproc + scripts: + main: + script: ensclus/ensclus.py + title: "Ensemble clustering diagnostic" + + ## Information required: + ##-------------------------------about data------------------------------------------- + ## Write only letters or numbers, no punctuation marks! + season: 'JJA' #seasonal average + area: 'EU' #regional average (examples:'EAT':Euro-Atlantic + # 'PNA': Pacific North American + # 'NH': Northern Hemisphere) + # 'EU': Europe) + extreme: '75th_percentile' #XXth_percentile (XX arbitrary), mean, maximum, std, trend + #---------------------about cluster analysis------------------------------------------ + numclus: 3 #number of clusters + #Either set perc or numpcs: + perc: 80 #cluster analysis is applied on a number of PCs such as they explain + #'perc' of total variance + numpcs: 0 #number of PCs to retain. Has priority over perc unless set to 0 + max_plot_panels: 72 #threshold in number of panels in a plot to create multiple figures diff --git a/esmvaltool/recipes/recipe_extreme_events.yml b/esmvaltool/recipes/recipe_extreme_events.yml new file mode 100644 index 0000000000..a2e05be09f --- /dev/null +++ b/esmvaltool/recipes/recipe_extreme_events.yml @@ -0,0 +1,86 @@ +# ESMValTool +# recipe_ExtremeEvents.yml +--- +documentation: + description: | + Calculate indices for monitoring changes in extremes based on daily temperature + and precipitation data. Producing Glecker and timeline plots of this as + shown in the IPCC_AR4 report + + authors: + - broe_bj + - sand_ma + - mohr_cw + - arno_en + - hard_jo + + maintainer: + - sand_ma + + references: + - zhang-2011 + + projects: + - crescendo + - c3s-magic + +#preprocessor: +# prep0: + +datasets: + - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: bcc-csm1-1-m, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: CMCC-CESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: CMCC-CM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: CMCC-CMS, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: FGOALS-g2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: HadCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1981, end_year: 2000} + +# - {dataset: EOBS, project: OBS, type: reanaly, version: 1, start_year: 1981, end_year: 2005, tier: 3} + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, start_year: 1981, end_year: 2000, tier: 3} + +diagnostics: + extreme_events: + description: calculate extreme events + variables: + pr: + mip: day + tas: + mip: day + tasmax: + mip: day + tasmin: + mip: day + + scripts: + main: + script: extreme_events/extreme_events.R + reference_datasets: ["ERA-Interim", "BNU-ESM", "ACCESS1-0", "ACCESS1-3"] + regrid_dataset: ERA-Interim + mip_name: CMIP + timeseries_idx: ["sdiiETCCDI_yr", "r95pETCCDI_yr", "rx5dayETCCDI_yr", "rx1dayETCCDI_yr", "cddETCCDI_yr", "fdETCCDI_yr", "trETCCDI_yr", "txnETCCDI_yr", "txxETCCDI_yr", "tnnETCCDI_yr", "tnxETCCDI_yr"] + gleckler_idx: ["sdiiETCCDI_yr", "r95pETCCDI_yr", "rx5dayETCCDI_yr", "rx1dayETCCDI_yr", "cddETCCDI_yr", "fdETCCDI_yr", "trETCCDI_yr", "txnETCCDI_yr", "txxETCCDI_yr", "tnnETCCDI_yr", "tnxETCCDI_yr"] + ts_plt: true + glc_plt: true + base_range: [1981, 2000] + analysis_range: [1981, 2000] diff --git a/esmvaltool/recipes/recipe_extreme_index.yml b/esmvaltool/recipes/recipe_extreme_index.yml new file mode 100644 index 0000000000..95683b7150 --- /dev/null +++ b/esmvaltool/recipes/recipe_extreme_index.yml @@ -0,0 +1,61 @@ +# ESMValTool +# recipe_extreme_index.yml +--- +documentation: + description: | + Tool to compute time series of a number of extreme events: heatwave, + coldwave, heavy precipitation, drought and high wind. + + authors: + - hunt_al + - pere_nu + - manu_ni + - caro_lo + + projects: + - c3s-magic + + references: + - alexander + +datasets: + - {dataset: MPI-ESM-MR, type: exp, project: CMIP5, mip: day, exp: historical, ensemble: r1i1p1, start_year: 1971, end_year: 2000} + - {dataset: MPI-ESM-MR, type: exp, project: CMIP5, mip: day, exp: rcp85, ensemble: r1i1p1, start_year: 2020, end_year: 2040} + +preprocessors: + preproc: + extract_region: + start_longitude: -20 + end_longitude: 40 + start_latitude: 30 + end_latitude: 70 + +diagnostics: + extreme_index: + description: Calculate insurance extreme indices and their combination. + variables: + tasmax: + preprocessor: preproc + mip: day + tasmin: + preprocessor: preproc + mip: day + sfcWind: + preprocessor: preproc + mip: day + pr: + preprocessor: preproc + mip: day + + scripts: + metric: + script: magic_bsc/extreme_index.r + + weight_t90p: 0.2 + weight_t10p: 0.2 + weight_Wx: 0.2 + weight_rx5day: 0.2 + weight_cdd: 0.2 + + # Compute running mean? + running_mean: 5 #3 diff --git a/esmvaltool/recipes/recipe_flato13ipcc.yml b/esmvaltool/recipes/recipe_flato13ipcc.yml index 008d77384e..865fa06485 100644 --- a/esmvaltool/recipes/recipe_flato13ipcc.yml +++ b/esmvaltool/recipes/recipe_flato13ipcc.yml @@ -1,141 +1,813 @@ +# ESMValTool # recipe_flato13ipcc.yml --- documentation: description: | Reproducing selected figures from IPCC AR5, chap. 9 (Flato et al., 2013) - 9.42a + 9.2, 9.4, 9.5, 9.8, 9.42a. authors: + - bock_ls + - laue_ax - schl_ma + maintainer: + - righ_ma + references: - flato13ipcc projects: + - embrace - esmval - crescendo preprocessors: + clim: + regrid: + target_grid: 2x2 + scheme: linear + mask_fillvalues: + threshold_fraction: 0.95 + multi_model_statistics: + span: overlap + statistics: [mean] + exclude: [reference_dataset] + spatial_mean: average_region: coord1: latitude coord2: longitude + clim_ref: + regrid: + target_grid: reference_dataset + scheme: linear + multi_model_statistics: + span: overlap + statistics: [mean] + exclude: [reference_dataset, alternative_dataset] diagnostics: - ecs: - description: Calculate ECS. + # ********************************************************************** + # Flato et al. (2013) - IPCC AR5, chap. 9 + # similar to fig. 9.2 a/b/c + # ********************************************************************** + # Multi model mean, multi model mean bias, and mean absolute error + # (geographical distributions) + # ********************************************************************** + + fig09-2: + description: IPCC AR5 Ch. 9, Fig. 9.2 (near-surface temperature) + themes: + - phys + realms: + - atmos + variables: + tas: + preprocessor: clim + reference_dataset: ERA-Interim + mip: Amon + additional_datasets: + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, + start_year: 1986, end_year: 2005, tier: 3} + - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: bcc-csm1-1-m, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: CESM1-CAM5-1-FV, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CESM1-FASTCHEM, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CESM1-WACCM, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CMCC-CESM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: CMCC-CM, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + # - {dataset: CMCC-CMS, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + # - {dataset: CNRM-CM5-2, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + # - {dataset: FGOALS-g2, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-H-CC, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + # - {dataset: GISS-E2-H, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-R-CC, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-R, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + # - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: MIROC4h, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + scripts: + fig09-2: &fig-9-2_and_9-4_settings + script: clouds/clouds_bias.ncl + projection: Robinson + timemean: annualclim + plot_abs_diff: true + plot_rel_diff: false + + # ********************************************************************** + # Flato et al. (2013) - IPCC AR5, chap. 9 + # similar to fig. 9.4 + # ********************************************************************** + # Multi model mean, multi model mean bias, mean absolute error, and + # mean relative error (geographical ditributions) + # ********************************************************************** + + fig09-4: + description: IPCC AR5 Ch. 9, Fig. 9.4 (precipitation) + themes: + - clouds + realms: + - atmos + variables: + pr: + preprocessor: clim + reference_dataset: GPCP-SG + mip: Amon + additional_datasets: + - {dataset: GPCP-SG, project: obs4mips, level: L3, version: v2.2, + start_year: 1986, end_year: 2005, tier: 1} + - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: bcc-csm1-1-m, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: CESM1-CAM5-1-FV, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CESM1-FASTCHEM, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CESM1-WACCM, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CMCC-CESM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: CMCC-CM, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + # - {dataset: CMCC-CMS, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + # - {dataset: CNRM-CM5-2, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + # - {dataset: FGOALS-g2, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-H-CC, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + # - {dataset: GISS-E2-H, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-R-CC, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-R, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + # - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: MIROC4h, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + scripts: + fig09-4: + <<: *fig-9-2_and_9-4_settings + plot_abs_diff: true + plot_rel_diff: true + + # ********************************************************************** + # Flato et al. (2013) - IPCC AR5, chap. 9 + # similar to fig. 9.5 + # ********************************************************************** + # Difference of multi-model mean and reference data set (geographical + # distribution, annual mean) + zonal averages of individual models + # and multi-model mean (annual means). + # ********************************************************************** + + fig09-5a: + description: differences of multi-model mean and reference dataset + themes: + - clouds + realms: + - atmos variables: - tas: &spatial_mean_cmip5_r1i1p1_amon_t2ms + swcre: + preprocessor: clim + reference_dataset: CERES-EBAF + mip: Amon + derive: true + additional_datasets: + - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, + start_year: 2001, end_year: 2010, tier: 1} + - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: bcc-csm1-1-m, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: CESM1-CAM5-1-FV, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CESM1-FASTCHEM, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CESM1-WACCM, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CMCC-CESM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: CMCC-CM, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + # - {dataset: CMCC-CMS, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + # - {dataset: CNRM-CM5-2, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + # - {dataset: FGOALS-g2, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-H-CC, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + # - {dataset: GISS-E2-H, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-R-CC, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-R, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + # - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: MIROC4h, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + scripts: + fig09-5a: &fig-9-5_settings + script: clouds/clouds_ipcc.ncl + projection: Robinson + colormap: WhiteBlueGreenYellowRed + timemean: annualclim + + fig09-5b: + description: differences of multi-model mean and reference dataset + themes: + - clouds + realms: + - atmos + variables: + lwcre: + preprocessor: clim + reference_dataset: CERES-EBAF + mip: Amon + derive: true + additional_datasets: + - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, + start_year: 2001, end_year: 2010, tier: 1} + - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: bcc-csm1-1-m, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: CESM1-CAM5-1-FV, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CESM1-FASTCHEM, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CESM1-WACCM, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CMCC-CESM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: CMCC-CM, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + # - {dataset: CMCC-CMS, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + # - {dataset: CNRM-CM5-2, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + # - {dataset: FGOALS-g2, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-H-CC, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + # - {dataset: GISS-E2-H, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-R-CC, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-R, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + # - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: MIROC4h, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + scripts: + fig09-5b: + <<: *fig-9-5_settings + + fig09-5c: + description: differences of multi-model mean and reference dataset + themes: + - clouds + realms: + - atmos + variables: + netcre: + preprocessor: clim + reference_dataset: CERES-EBAF + mip: Amon + derive: true + additional_datasets: + - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, + start_year: 2001, end_year: 2010, tier: 1} + - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: bcc-csm1-1-m, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: CESM1-CAM5-1-FV, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CESM1-FASTCHEM, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CESM1-WACCM, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CMCC-CESM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: CMCC-CM, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + # - {dataset: CMCC-CMS, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + # - {dataset: CNRM-CM5-2, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + # - {dataset: FGOALS-g2, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-H-CC, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + # - {dataset: GISS-E2-H, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-R-CC, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-R, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + # - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: MIROC4h, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + scripts: + fig09-5c: + <<: *fig-9-5_settings + + + # ********************************************************************** + # Flato et al. (2013) - IPCC AR5, chap. 9 + # similar to fig. 9.8 + # ********************************************************************** + # Time series of anomalies of annual and global surface temperature + # ********************************************************************** + + fig09-8: + description: IPCC AR5 Ch. 9, Fig. 9.8 (near-surface temperature) + themes: + - phys + realms: + - atmos + variables: + tas: + preprocessor: clim_ref + reference_dataset: HadCRUT4 + alternative_dataset: HadCRUT4-clim + mip: Amon + field: T2Ms + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1870 + end_year: 2004 + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + #- {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + #- {dataset: CESM1-CAM5-1-FV2} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + #- {dataset: EC-EARTH, ensemble: r6i1p1} + #- {dataset: FGOALS-g2} + #- {dataset: FGOALS-s2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + #- {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: HadCRUT4, project: OBS, type: ground, version: 1, + start_year: 1870, end_year: 2017, tier: 2} + - {dataset: HadCRUT4-clim, project: OBS, type: ground, version: 1, + start_year: 1976, end_year: 1976, tier: 2} + scripts: + fig09-8: + script: ipcc_ar5/tsline.ncl + time_avg: "yearly" + ts_anomaly: "anom" #"anom" for anomaly-plot else "noanom" + ref_start: 1961 #start year of reference period for anomalies + ref_end: 1990 #end year of reference period for anomalies + range_option: 0 + plot_units: "degC" + # Plot style + styleset: CMIP5 + + + # ********************************************************************** + # Flato et al. (2013) - IPCC AR5, chap. 9 + # similar to fig. 9.42a + # ********************************************************************** + # + # ********************************************************************** + + ecs_cmip5: + description: Calculate ECS for CMIP5 models. + themes: + - EC + realms: + - atmos + variables: + tas: &ecs_settings preprocessor: spatial_mean project: CMIP5 ensemble: r1i1p1 mip: Amon - field: T2Ms - rtmt: - <<: *spatial_mean_cmip5_r1i1p1_amon_t2ms + rtnt: + <<: *ecs_settings + derive: true additional_datasets: - - {dataset: bcc-csm1-1, exp: piControl, start_year: 160, end_year: 309} - - {dataset: bcc-csm1-1, exp: abrupt4xCO2, start_year: 160, end_year: 309} - - {dataset: bcc-csm1-1-m, exp: piControl, start_year: 240, end_year: 389} - - {dataset: bcc-csm1-1-m, exp: abrupt4xCO2, start_year: 240, end_year: 389} - - {dataset: CanESM2, exp: piControl, start_year: 2015, end_year: 2164} - - {dataset: CanESM2, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} - - {dataset: CCSM4, exp: piControl, start_year: 800, end_year: 949} - - {dataset: CCSM4, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} - - {dataset: CNRM-CM5, exp: piControl, start_year: 1850, end_year: 1999} - - {dataset: CNRM-CM5, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} - - {dataset: CSIRO-Mk3-6-0, exp: piControl, start_year: 1, end_year: 150} - - {dataset: CSIRO-Mk3-6-0, exp: abrupt4xCO2, start_year: 1, end_year: 150} - - {dataset: GFDL-CM3, exp: piControl, start_year: 1, end_year: 150} - - {dataset: GFDL-CM3, exp: abrupt4xCO2, start_year: 1, end_year: 150} - - {dataset: GISS-E2-H, exp: piControl, start_year: 1200, end_year: 1349} - - {dataset: GISS-E2-H, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} - - {dataset: GISS-E2-R, exp: piControl, start_year: 3331, end_year: 3480} - - {dataset: GISS-E2-R, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} - - {dataset: inmcm4, exp: piControl, start_year: 2090, end_year: 2239} - - {dataset: inmcm4, exp: abrupt4xCO2, start_year: 2090, end_year: 2239} - - {dataset: IPSL-CM5A-LR, exp: piControl, start_year: 1850, end_year: 1999} - - {dataset: IPSL-CM5A-LR, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} - - {dataset: IPSL-CM5B-LR, exp: piControl, start_year: 1850, end_year: 1999} - - {dataset: IPSL-CM5B-LR, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} - - {dataset: MIROC5, exp: piControl, start_year: 2100, end_year: 2249} - - {dataset: MIROC5, exp: abrupt4xCO2, start_year: 2100, end_year: 2249} - - {dataset: MPI-ESM-LR, exp: piControl, start_year: 2015, end_year: 2164} - - {dataset: MPI-ESM-LR, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: bcc-csm1-1, exp: piControl, start_year: 160, end_year: 309} + - {dataset: bcc-csm1-1, exp: abrupt4xCO2, start_year: 160, end_year: 309} + - {dataset: bcc-csm1-1-m, exp: piControl, start_year: 240, end_year: 389} + - {dataset: bcc-csm1-1-m, exp: abrupt4xCO2, start_year: 240, end_year: 389} + - {dataset: CanESM2, exp: piControl, start_year: 2015, end_year: 2164} + - {dataset: CanESM2, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: CCSM4, exp: piControl, start_year: 800, end_year: 949} + - {dataset: CCSM4, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM5, exp: piControl, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM5, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: CSIRO-Mk3-6-0, exp: piControl, start_year: 1, end_year: 150} + - {dataset: CSIRO-Mk3-6-0, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: GFDL-CM3, exp: piControl, start_year: 1, end_year: 150} + - {dataset: GFDL-CM3, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: GISS-E2-H, exp: piControl, start_year: 1200, end_year: 1349} + - {dataset: GISS-E2-H, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: GISS-E2-R, exp: piControl, start_year: 3331, end_year: 3480} + - {dataset: GISS-E2-R, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: inmcm4, exp: piControl, start_year: 2090, end_year: 2239} + - {dataset: inmcm4, exp: abrupt4xCO2, start_year: 2090, end_year: 2239} + - {dataset: IPSL-CM5A-LR, exp: piControl, start_year: 1850, end_year: 1999} + - {dataset: IPSL-CM5A-LR, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: IPSL-CM5B-LR, exp: piControl, start_year: 1850, end_year: 1999} + - {dataset: IPSL-CM5B-LR, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} + - {dataset: MIROC5, exp: piControl, start_year: 2100, end_year: 2249} + - {dataset: MIROC5, exp: abrupt4xCO2, start_year: 2100, end_year: 2249} + - {dataset: MIROC-ESM, exp: piControl, start_year: 1800, end_year: 1949} + - {dataset: MIROC-ESM, exp: abrupt4xCO2, start_year: 1, end_year: 150} + - {dataset: MPI-ESM-LR, exp: piControl, start_year: 2015, end_year: 2164} + - {dataset: MPI-ESM-LR, exp: abrupt4xCO2, start_year: 1850, end_year: 1999} # Day is out of range for month (wait for iris > 2.0) - # - {dataset: ACCESS1-0, exp: piControl, start_year: 300, end_year: 449} - # - {dataset: ACCESS1-0, exp: abrupt4xCO2, start_year: 300, end_year: 449} - # Because of issues in the date format, these models raise an - # AttributeError: 'NoneType' object has no attribute 'units' - # Can be solved by removing lines 104-106 of ESMValTool/esmvaltool/cmor/_fixes/CMIP5/MIROC_ESM.py, - # but leads to failing tests - # - {dataset: MIROC-ESM, exp: piControl, start_year: 1800, end_year: 1949} - # - {dataset: MIROC-ESM, exp: abrupt4xCO2, start_year: 1, end_year: 150} + # - {dataset: ACCESS1-0, exp: piControl, start_year: 300, end_year: 449} + # - {dataset: ACCESS1-0, exp: abrupt4xCO2, start_year: 300, end_year: 449} scripts: - ecs: + ecs: &ecs_script script: climate_metrics/ecs.py - plot_ecs_regression: 'True' - output_name: 'ecs' - fig09-42a: - description: Plot ECS vs. GMSAT. + fig09-42a_cmip5: + description: Plot ECS vs. GMSAT for CMIP5 models. + themes: + - EC + - phys + realms: + - atmos variables: tas: - <<: *spatial_mean_cmip5_r1i1p1_amon_t2ms + <<: *ecs_settings additional_datasets: - - {dataset: bcc-csm1-1, exp: piControl, start_year: 160, end_year: 309} - - {dataset: bcc-csm1-1, exp: historical, start_year: 1961, end_year: 1990} - - {dataset: bcc-csm1-1-m, exp: piControl, start_year: 240, end_year: 389} - - {dataset: bcc-csm1-1-m, exp: historical, start_year: 1961, end_year: 1990} - - {dataset: CanESM2, exp: piControl, start_year: 2015, end_year: 2164} - - {dataset: CanESM2, exp: historical, start_year: 1961, end_year: 1990} - - {dataset: CCSM4, exp: piControl, start_year: 800, end_year: 949} - - {dataset: CCSM4, exp: historical, start_year: 1961, end_year: 1990} - - {dataset: CNRM-CM5, exp: piControl, start_year: 1850, end_year: 1999} - - {dataset: CNRM-CM5, exp: historical, start_year: 1961, end_year: 1990} - - {dataset: CSIRO-Mk3-6-0, exp: piControl, start_year: 1, end_year: 150} - - {dataset: CSIRO-Mk3-6-0, exp: historical, start_year: 1961, end_year: 1990} - - {dataset: GFDL-CM3, exp: piControl, start_year: 1, end_year: 150} - - {dataset: GFDL-CM3, exp: historical, start_year: 1961, end_year: 1990} - - {dataset: GISS-E2-H, exp: piControl, start_year: 1200, end_year: 1349} - - {dataset: GISS-E2-H, exp: historical, start_year: 1961, end_year: 1990} - - {dataset: GISS-E2-R, exp: piControl, start_year: 3331, end_year: 3480} - - {dataset: GISS-E2-R, exp: historical, start_year: 1961, end_year: 1990} - - {dataset: inmcm4, exp: piControl, start_year: 2090, end_year: 2239} - - {dataset: inmcm4, exp: historical, start_year: 1961, end_year: 1990} - - {dataset: IPSL-CM5A-LR, exp: piControl, start_year: 1850, end_year: 1999} - - {dataset: IPSL-CM5A-LR, exp: historical, start_year: 1961, end_year: 1990} - - {dataset: IPSL-CM5B-LR, exp: piControl, start_year: 1850, end_year: 1999} - - {dataset: IPSL-CM5B-LR, exp: historical, start_year: 1961, end_year: 1990} - - {dataset: MIROC5, exp: piControl, start_year: 2100, end_year: 2249} - - {dataset: MIROC5, exp: historical, start_year: 1961, end_year: 1990} - - {dataset: MPI-ESM-LR, exp: piControl, start_year: 2015, end_year: 2164} - - {dataset: MPI-ESM-LR, exp: historical, start_year: 1961, end_year: 1990} + - {dataset: bcc-csm1-1, exp: piControl, start_year: 160, end_year: 309} + - {dataset: bcc-csm1-1, exp: historical, start_year: 1961, end_year: 1990} + - {dataset: bcc-csm1-1-m, exp: piControl, start_year: 240, end_year: 389} + - {dataset: bcc-csm1-1-m, exp: historical, start_year: 1961, end_year: 1990} + - {dataset: CanESM2, exp: piControl, start_year: 2015, end_year: 2164} + - {dataset: CanESM2, exp: historical, start_year: 1961, end_year: 1990} + - {dataset: CCSM4, exp: piControl, start_year: 800, end_year: 949} + - {dataset: CCSM4, exp: historical, start_year: 1961, end_year: 1990} + - {dataset: CNRM-CM5, exp: piControl, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM5, exp: historical, start_year: 1961, end_year: 1990} + - {dataset: CSIRO-Mk3-6-0, exp: piControl, start_year: 1, end_year: 150} + - {dataset: CSIRO-Mk3-6-0, exp: historical, start_year: 1961, end_year: 1990} + - {dataset: GFDL-CM3, exp: piControl, start_year: 1, end_year: 150} + - {dataset: GFDL-CM3, exp: historical, start_year: 1961, end_year: 1990} + - {dataset: GISS-E2-H, exp: piControl, start_year: 1200, end_year: 1349} + - {dataset: GISS-E2-H, exp: historical, start_year: 1961, end_year: 1990} + - {dataset: GISS-E2-R, exp: piControl, start_year: 3331, end_year: 3480} + - {dataset: GISS-E2-R, exp: historical, start_year: 1961, end_year: 1990} + - {dataset: inmcm4, exp: piControl, start_year: 2090, end_year: 2239} + - {dataset: inmcm4, exp: historical, start_year: 1961, end_year: 1990} + - {dataset: IPSL-CM5A-LR, exp: piControl, start_year: 1850, end_year: 1999} + - {dataset: IPSL-CM5A-LR, exp: historical, start_year: 1961, end_year: 1990} + - {dataset: IPSL-CM5B-LR, exp: piControl, start_year: 1850, end_year: 1999} + - {dataset: IPSL-CM5B-LR, exp: historical, start_year: 1961, end_year: 1990} + - {dataset: MIROC5, exp: piControl, start_year: 2100, end_year: 2249} + - {dataset: MIROC5, exp: historical, start_year: 1961, end_year: 1990} + - {dataset: MIROC-ESM, exp: piControl, start_year: 1800, end_year: 1949} + - {dataset: MIROC-ESM, exp: historical, start_year: 1961, end_year: 1990} + - {dataset: MPI-ESM-LR, exp: piControl, start_year: 2015, end_year: 2164} + - {dataset: MPI-ESM-LR, exp: historical, start_year: 1961, end_year: 1990} # Day is out of range for month (wait for iris > 2.0) - # - {dataset: ACCESS1-0, exp: piControl, start_year: 300, end_year: 449} - # - {dataset: ACCESS1-0, exp: historical, start_year: 1961, end_year: 1990} - # Because of issues in the date format, these models raise an - # AttributeError: 'NoneType' object has no attribute 'units' - # Can be solved by removing lines 104-106 of ESMValTool/esmvaltool/cmor/_fixes/CMIP5/MIROC_ESM.py, - # but leads to failing tests - # - {dataset: MIROC-ESM, exp: piControl, start_year: 1800, end_year: 1949} - # - {dataset: MIROC-ESM, exp: historical, start_year: 1961, end_year: 1990} + # - {dataset: ACCESS1-0, exp: piControl, start_year: 300, end_year: 449} + # - {dataset: ACCESS1-0, exp: historical, start_year: 1961, + # end_year: 1990} scripts: - fig09-42a: + fig09-42a: &fig09_42a_script script: ipcc_ar5/ch09_fig09_42a.py - ancestors: ['tas', 'ecs/ecs'] - ecs_filename: 'ecs' - output_name: 'ch09_fig09-42a' + ancestors: ['tas', 'ecs_cmip5/ecs'] tas_units: celsius save: bbox_inches: tight orientation: landscape - axes_functions: + axes_functions: &axes_functions set_title: GMSAT vs. ECS for CMIP5 models set_xlabel: ECS / °C set_ylabel: GMSAT / °C @@ -146,3 +818,67 @@ diagnostics: bbox_to_anchor: [1.05, 0.5] borderaxespad: 0.0 ncol: 2 + + ecs_cmip6: + description: Calculate ECS for CMIP6 models. + themes: + - EC + realms: + - atmos + variables: + tas: + <<: *ecs_settings + project: CMIP6 + rtnt: + <<: *ecs_settings + project: CMIP6 + derive: true + additional_datasets: + - {dataset: BCC-CSM2-MR, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: BCC-CSM2-MR, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM6-1, exp: piControl, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM6-1, exp: abrupt-4xCO2, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: GISS-E2-1-G, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 4150, end_year: 4299} + - {dataset: GISS-E2-1-G, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: IPSL-CM6A-LR, exp: piControl, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: IPSL-CM6A-LR, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: MIROC6, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 3200, end_year: 3349} + - {dataset: MIROC6, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 3200, end_year: 3349} + - {dataset: MRI-ESM2-0, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: MRI-ESM2-0, exp: abrupt-4xCO2, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + scripts: + ecs: + <<: *ecs_script + + fig09-42a_cmip6: + description: Plot ECS vs. GMSAT for CMIP6 models. + themes: + - EC + - phys + realms: + - atmos + variables: + tas: + <<: *ecs_settings + project: CMIP6 + additional_datasets: + - {dataset: BCC-CSM2-MR, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: BCC-CSM2-MR, exp: historical, ensemble: r1i1p1f1, grid: gn, start_year: 1961, end_year: 1990} + - {dataset: CNRM-CM6-1, exp: piControl, ensemble: r1i1p1f2, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: CNRM-CM6-1, exp: historical, ensemble: r1i1p1f2, grid: gr, start_year: 1961, end_year: 1990} + - {dataset: GISS-E2-1-G, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 4150, end_year: 4299} + - {dataset: GISS-E2-1-G, exp: historical, ensemble: r1i1p1f1, grid: gn, start_year: 1961, end_year: 1990} + - {dataset: IPSL-CM6A-LR, exp: piControl, ensemble: r1i1p1f1, grid: gr, start_year: 1850, end_year: 1999} + - {dataset: IPSL-CM6A-LR, exp: historical, ensemble: r1i1p1f1, grid: gr, start_year: 1961, end_year: 1990} + - {dataset: MIROC6, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 3200, end_year: 3349} + - {dataset: MIROC6, exp: historical, ensemble: r1i1p1f1, grid: gn, start_year: 1961, end_year: 1990} + - {dataset: MRI-ESM2-0, exp: piControl, ensemble: r1i1p1f1, grid: gn, start_year: 1850, end_year: 1999} + - {dataset: MRI-ESM2-0, exp: historical, ensemble: r1i1p1f1, grid: gn, start_year: 1961, end_year: 1990} + scripts: + fig09-42a: + <<: *fig09_42a_script + ancestors: ['tas', 'ecs_cmip6/ecs'] + axes_functions: + <<: *axes_functions + set_title: GMSAT vs. ECS for CMIP6 models + dataset_style: cmip6 diff --git a/esmvaltool/recipes/recipe_heatwaves_coldwaves.yml b/esmvaltool/recipes/recipe_heatwaves_coldwaves.yml new file mode 100644 index 0000000000..53c274fa9f --- /dev/null +++ b/esmvaltool/recipes/recipe_heatwaves_coldwaves.yml @@ -0,0 +1,50 @@ +# ESMValTool +# recipe_heatwaves_coldwaves.yml +--- +documentation: + description: | + Tool to compute the number of days excedding a quantile + for a minimum number of consecutive days. + + authors: + - hunt_al + - pere_nu + - manu_ni + - caro_lo + + projects: + - c3s-magic + +datasets: +# - {dataset: IPSL-CM5A-MR, type: exp, project: CMIP5, mip: day, exp: historical, ensemble: r1i1p1, start_year: 1961, end_year: 1990} +# - {dataset: IPSL-CM5A-MR, type: exp, project: CMIP5, mip: day, exp: rcp85, ensemble: r1i1p1, start_year: 2020, end_year: 2040} + - {dataset: bcc-csm1-1, type: exp, project: CMIP5, mip: day, exp: historical, ensemble: r1i1p1, start_year: 1971, end_year: 2000} + - {dataset: bcc-csm1-1, type: exp, project: CMIP5, mip: day, exp: rcp85, ensemble: r1i1p1, start_year: 2060, end_year: 2080} + +preprocessors: + preproc: + regrid: + target_grid: bcc-csm1-1 + scheme: linear + mask_fillvalues: + threshold_fraction: 0.95 + extract_region: + start_longitude: 220 + end_longitude: 320 + start_latitude: 30 + end_latitude: 80 + +diagnostics: + heatwaves_coldwaves: + description: Calculate heatwaves and coldwaves. + variables: + tasmin: + preprocessor: preproc + mip: day + scripts: + main: + script: magic_bsc/extreme_spells.r + quantile: 0.80 # quantile defining the exceedance/non-exceedance threshold + min_duration: 5 # Min duration of a heatwave/coldwave event in days + operator: '<' # or '>' + season: winter # or summer diff --git a/esmvaltool/recipes/recipe_hyint.yml b/esmvaltool/recipes/recipe_hyint.yml new file mode 100644 index 0000000000..5dd8c785eb --- /dev/null +++ b/esmvaltool/recipes/recipe_hyint.yml @@ -0,0 +1,112 @@ +# recipe_hyint.yml +--- + +documentation: + description: | + Recipe for the HyInt package by E. Arnone and J. von Hardenberg (ISAC-CNR) + + authors: + - arno_en + - hard_jo + + maintainer: + - arno_en + + references: + - giorgi11jc + - giorgi14jgr + + projects: + - c3s-magic + +preprocessors: + preproc: + mask_landsea: + mask_out: sea + +diagnostics: + hyint: + description: HyInt hydroclimatic indices calculation and plotting + variables: + pr: + preprocessor: preproc + project: CMIP5 + exp: [historical, rcp85] + ensemble: r1i1p1 + start_year: 1980 + end_year: 2020 + reference_dataset: "ACCESS1-0" + mip: day + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: MPI-ESM-MR} + - {dataset: IPSL-CM5A-MR} + - {dataset: CCSM4} + scripts: + main: + script: hyint/hyint.R + + # Reference normalization period to be used for normalized indices + norm_years: [1980, 1999] + + # Select one or more indices for timeseries and maps from the following + # list (order-sensitive): + # "pa_norm", "hyint", "int_norm", "r95_norm", "wsl_norm", "dsl_norm", "int", "dsl", "wsl" + select_indices: ["pa_norm", "hyint", "int_norm", "r95_norm", "wsl_norm", "dsl_norm"] + + # Select regions for timeseries and maps from the following list + # GL=Globe, GL60=Globe 60S/60N, TR=Tropics (30S/30N), SA=South America, + # AF =Africa,6NA=North America, IN=India, EU=Europe, EA=East-Asia, + # AU=Australia + select_regions: ["GL", "SA", "AF", "EU", "EA"] + + # Select type of plot: + # 1) single panel lon/lat map per individual index, multi-year mean + # 2) 3-panel lon/lat maps per individual index with comparison to reference dataset, multi-year mean + # 3) multipanel of indices of lon/lat maps with comparison to reference dataset, multi-year mean + # 11) single panel with timeseries over required individual region + # 12) multipanel of indices with timeseries over multiple regions + # 13) multipanel of indices with timeseries for multiple models + # 14) multipanel of indices with summary of trend coefficients over multiple regions + # 15) multipanel of indices with summary of trend coefficients for multiple models + plot_type: [1, 2, 3, 12, 13, 14, 15] + + ## ------- Optional settings ---------- + # Select number of columns and rows in multi-panel figure + npancol: 2 # number of columns for trend/tseries multi-panel figures + npanrow: 3 # number of rows for trend/tseries multi-panel figures + + # Define whether model data should be regridded + # a) false to keep original resolution + # b) set desired regridding resolution in cdo format e.g., "r320x160" + # c) "REF" to use resolution of reference model + rgrid: "REF" + + # Select automated or pre-set range of values in plots + autolevels: true + + # Scale autolevels (factor multiplying automated range) + autolevels_scale: 1 # for maps and timeseries + autolevels_scale_t: 1 # for trend coefficients + + # data and maps + removedesert: false # T to remove (flag as NA) grid points with mean + # annual pr < 0.5 mm/day (deserts, Giorgi2014) + oplot_grid: false # plot grid points over maps + boxregion: false # !=0 plot region boxes over global maps with + # thickness = abs(boxregion); white (>0) or grey (<0). + + # timeseries and trends + weight_tseries: true # adopt area weights in timeseries + trend_years: false # (a) F=all; (b) c(year1,year2) to apply trend + # calculation and plotting only to a limited + # time interval + add_trend: true # add linear trend to plot + add_trend_sd: true # add stdev range to timeseries + add_trend_sd_shade: true # add shade of stdev range to timeseries + add_tseries_lines: true # lot lines of timeseries over points + add_zeroline: true # plot a dashed line at y=0 + trend_years_only: false # limit timeseries plotting to trend_years + # time interval + scale100years: true # plot trends as 1/100 years + scalepercent: false # plot trends as % change (excluding hyint) diff --git a/esmvaltool/recipes/recipe_landcover.yml b/esmvaltool/recipes/recipe_landcover.yml new file mode 100644 index 0000000000..169974da29 --- /dev/null +++ b/esmvaltool/recipes/recipe_landcover.yml @@ -0,0 +1,69 @@ +# ESMValTool +# namelist_landcover.yml +--- +documentation: + description: | + Recipe for plotting the accumulated area, average fraction + and bias of landcover classes in comparison to ESA_CCI_LC data + for the full globe and large scale regions. + + authors: + - hage_st + - loew_al + - muel_bn + - stac_to + + maintainer: + righ_ma + + references: + - georgievski18tac + + observation references: + - esacci-landcover + + projects: + - crescendo + +datasets: + - {dataset: MPI-ESM-LR, project: CMIP5, exp: rcp85, ensemble: r1i1p1, start_year: 2008, end_year: 2012} + - {dataset: inmcm4, project: CMIP5, exp: rcp85, ensemble: r1i1p1, start_year: 2008, end_year: 2012} + - {dataset: ESACCI-LANDCOVER, project: OBS, type: sat, version: L4-LCCS-Map-300m-P5Y-aggregated-0.500000Deg, tier: 2, start_year: 2008, end_year: 2012} + +preprocessors: + prep0: + regrid: + target_grid: ESACCI-LANDCOVER + scheme: area_weighted + + +diagnostics: + landcover: + description: Doing ESACCI land cover analysis. + variables: + baresoilFrac: + preprocessor: prep0 + reference_dataset: ESACCI-LANDCOVER + mip: Lmon + grassFrac: + preprocessor: prep0 + reference_dataset: ESACCI-LANDCOVER + mip: Lmon + treeFrac: + preprocessor: prep0 + reference_dataset: ESACCI-LANDCOVER + mip: Lmon + shrubFrac: + preprocessor: prep0 + reference_dataset: ESACCI-LANDCOVER + mip: Lmon + cropFrac: + preprocessor: prep0 + reference_dataset: ESACCI-LANDCOVER + mip: Lmon + scripts: + landcover: + script: landcover/landcover.py + comparison: variable + colorscheme: seaborn-darkgrid + diff --git a/esmvaltool/recipes/recipe_lauer13jclim.yml b/esmvaltool/recipes/recipe_lauer13jclim.yml new file mode 100644 index 0000000000..c4ecf8a901 --- /dev/null +++ b/esmvaltool/recipes/recipe_lauer13jclim.yml @@ -0,0 +1,489 @@ +# ESMValTool +# recipe_lauer13jclim.yml +--- +documentation: + description: | + Diagnostics of clouds and hydrological cycle reproducing selected figures + from Lauer and Hamilton (2013). + + authors: + - laue_ax + + maintainer: + - laue_ax + + references: + - lauer13jclim + + projects: + - esmval + - embrace + +datasets: + - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: bcc-csm1-1-m, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: CESM1-CAM5-1-FV, project: CMIP5, exp: historical, + # ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CESM1-FASTCHEM, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: CESM1-WACCM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CMCC-CESM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: CMCC-CM, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + # - {dataset: CMCC-CMS, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + # - {dataset: CNRM-CM5-2, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: FGOALS-g2, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-H-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: GISS-E2-H, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-R-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: GISS-E2-R, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + # - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + # - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, ensemble: r1i1p1, + # start_year: 1986, end_year: 2005} + - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC4h, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical, + ensemble: r1i1p1, start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, + start_year: 1986, end_year: 2005} + + +preprocessors: + clim: + regrid: + target_grid: 2x2 + scheme: linear + mask_fillvalues: + threshold_fraction: 0.95 + multi_model_statistics: + span: overlap + statistics: [mean] + exclude: [reference_dataset] + + +diagnostics: + + # ========================================================================== + # Lauer and Hamilton (2013) - Figure 1 + # Geographical distribution of multi-year annual/seasonal means (panel plot) + # ========================================================================== + + clouds_fig1_lwp: + description: climatological annual means + themes: + - clouds + realms: + - atmos + variables: + lwp: + preprocessor: clim + reference_dataset: UWisc + mip: Amon + derive: true + additional_datasets: + - {dataset: UWisc, project: OBS, type: sat, version: v2, + start_year: 1988, end_year: 2007, tier: 3} + scripts: + clim: &clim_settings + script: clouds/clouds.ncl + # projection + # ---------- + # map projection, e.g., Mollweide, Mercator, CylindricalEquidistant + projection: CylindricalEquidistant + # timemean + # -------- + # - annualclim: 1 plot (annual mean) + # - seasonalclim: 4 plots (DJF, MAM, JJA, SON) + timemean: annualclim + showdiff: false + + clouds_fig1_clt: + description: climatological annual means + themes: + - clouds + realms: + - atmos + variables: + clt: + preprocessor: clim + reference_dataset: MODIS + mip: Amon + additional_datasets: + - {dataset: MODIS, project: obs4mips, level: L3, version: C5, + start_year: 2001, end_year: 2010, tier: 1} + scripts: + clim: + <<: *clim_settings + + clouds_fig1_pr: + description: climatological annual means + themes: + - clouds + realms: + - atmos + variables: + pr: + preprocessor: clim + reference_dataset: GPCP-SG + mip: Amon + additional_datasets: + - {dataset: GPCP-SG, project: obs4mips, level: L3, version: v2.2, + start_year: 1986, end_year: 2005, tier: 1} + scripts: + clim: + <<: *clim_settings + + clouds_fig1_swcre: + description: climatological annual means + themes: + - clouds + - phys + realms: + - atmos + variables: + swcre: + preprocessor: clim + reference_dataset: CERES-EBAF + mip: Amon + derive: true + additional_datasets: + - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, + start_year: 2001, end_year: 2010, tier: 1} + scripts: + clim: + <<: *clim_settings + + clouds_fig1_lwcre: + description: climatological annual means + themes: + - clouds + - phys + realms: + - atmos + variables: + lwcre: + preprocessor: clim + reference_dataset: CERES-EBAF + mip: Amon + derive: true + additional_datasets: + - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, + start_year: 2001, end_year: 2010, tier: 1} + scripts: + clim: + <<: *clim_settings + + # ========================================================================== + # Lauer and Hamilton (2013) - Figure 3 + # Taylor plots of multi-year mean quantities + # ========================================================================== + + clouds_fig3_clt: + description: climatological annual means + themes: + - clouds + realms: + - atmos + variables: + clt: + preprocessor: clim + reference_dataset: MODIS + mip: Amon + additional_datasets: + - {dataset: MODIS, project: obs4mips, level: L3, version: C5, + start_year: 2001, end_year: 2010, tier: 1} + scripts: + clim: &taylor_settings + script: clouds/clouds_taylor.ncl + # emrabcelegend + # ------------- + # - true: 1) save legend to separate file + # 2) plot alternative reference data set (if available) as a + # black or red star and use "altern. ref. dataset" as label + # - false: 1) plot legend into taylor plot (same file) + # 2) plot alternative reference data set (if available) using + # model symbols and use name of observations as label + embracelegend: false + # estimate_obs_uncertainty + # ------------------------ + # - true: estimate observational uncertainties from mean values + # (assuming fractions of obs. RMSE from documentation of the + # obs data); only available for "CERES-EBAF", "MODIS", + # "MODIS-L3" + # - false: do not estimate obs. uncertainties from mean values + estimate_obs_uncertainty: false + # mask_ts_sea_ice + # --------------- + # - true: mask T < 272 K as sea ice (only for variable "ts") + # - false: no additional grid cells masked for variable "ts" + mask_ts_sea_ice: false + # styleset + # -------- + # styleset (line styles + symbols) used for plotting + # ("CMIP5", "DEFAULT") + styleset: CMIP5 + # timemean + # -------- + # - annualclim: 1 plot (annual mean) + # - seasonalclim: 4 plots (DJF, MAM, JJA, SON) + timemean: annualclim + # valid_fraction + # -------------- + # used for creating sea ice mask (mask_ts_sea_ice = true): fraction of + # valid time steps required to mask grid cell as valid data + valid_fraction: 0.5 + + clouds_fig3_lwp: + description: climatological annual means + themes: + - clouds + - phys + realms: + - atmos + variables: + lwp: + preprocessor: clim + reference_dataset: UWisc + mip: Amon + derive: true + additional_datasets: + - {dataset: UWisc, project: OBS, type: sat, version: v2, + start_year: 1988, end_year: 2007, tier: 3} + scripts: + clim: + <<: *taylor_settings + + clouds_fig3_swcre: + description: climatological annual means + themes: + - clouds + - phys + realms: + - atmos + variables: + swcre: + preprocessor: clim + reference_dataset: CERES-EBAF + mip: Amon + derive: true + additional_datasets: + - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, + start_year: 2001, end_year: 2010, tier: 1} + scripts: + clim: + <<: *taylor_settings + + clouds_fig3_lwcre: + description: climatological annual means + themes: + - clouds + - phys + realms: + - atmos + variables: + lwcre: + preprocessor: clim + reference_dataset: CERES-EBAF + mip: Amon + derive: true + additional_datasets: + - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, + start_year: 2001, end_year: 2010, tier: 1} + scripts: + clim: + <<: *taylor_settings + + clouds_fig3_pr: + description: climatological annual means + themes: + - clouds + realms: + - atmos + variables: + pr: + preprocessor: clim + reference_dataset: GPCP-SG + mip: Amon + additional_datasets: + - {dataset: GPCP-SG, project: obs4mips, level: L3, version: v2.2, + start_year: 1986, end_year: 2005, tier: 1} + scripts: + clim: + <<: *taylor_settings + + # ========================================================================== + # Lauer and Hamilton (2013) - Equation 2, part of Figure 8 + # Interannual variability: relative temporal standard deviation calculated + # from monthly mean anomalies after subtracting the climatological mean + # seasonal cycle + # ========================================================================== + + clouds_fig8_lwp: + description: interannual variability + themes: + - clouds + realms: + - atmos + variables: + lwp: + preprocessor: clim + reference_dataset: UWisc + mip: Amon + derive: true + additional_datasets: + - {dataset: UWisc, project: OBS, type: sat, version: v2, + start_year: 1988, end_year: 2007, tier: 3} + scripts: + clim: &intera_settings + script: clouds/clouds_interannual.ncl + # colormap + # -------- + # colors for scale bar (e.g., WhiteBlueGreenYellowRed, rainbow) + colormap: WhiteBlueGreenYellowRed + # extrafiles + # ---------- + # write plots for individual models to separate files (true, false) + extrafiles: false + # projection + # ---------- + # map projection, e.g., Mollweide, Mercator, CylindricalEquidistant + projection: CylindricalEquidistant + # timemean + # -------- + # - annualclim: 1 plot (annual mean) + # - seasonalclim: 4 plots (DJF, MAM, JJA, SON) + timemean: annualclim + + clouds_fig8_clt: + description: interannual variability + themes: + - clouds + realms: + - atmos + variables: + clt: + preprocessor: clim + reference_dataset: MODIS + mip: Amon + additional_datasets: + - {dataset: MODIS, project: obs4mips, level: L3, version: C5, + start_year: 2001, end_year: 2010, tier: 1} + scripts: + clim: + <<: *intera_settings + + clouds_fig8_swcre: + description: interannual variability + themes: + - clouds + - phys + realms: + - atmos + variables: + swcre: + preprocessor: clim + reference_dataset: CERES-EBAF + mip: Amon + derive: true + additional_datasets: + - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, + start_year: 2001, end_year: 2010, tier: 1} + scripts: + clim: + <<: *intera_settings + + clouds_fig8_lwcre: + description: interannual variability + themes: + - clouds + - phys + realms: + - atmos + variables: + lwcre: + preprocessor: clim + reference_dataset: CERES-EBAF + mip: Amon + derive: true + additional_datasets: + - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, + start_year: 2001, end_year: 2010, tier: 1} + scripts: + clim: + <<: *intera_settings + + clouds_fig8_pr: + description: interannual variability + themes: + - clouds + realms: + - atmos + variables: + pr: + preprocessor: clim + reference_dataset: GPCP-SG + mip: Amon + additional_datasets: + - {dataset: GPCP-SG, project: obs4mips, level: L3, version: v2.2, + start_year: 1986, end_year: 2005, tier: 1} + scripts: + clim: + <<: *intera_settings diff --git a/esmvaltool/recipes/recipe_miles_block.yml b/esmvaltool/recipes/recipe_miles_block.yml new file mode 100644 index 0000000000..0aabda8d08 --- /dev/null +++ b/esmvaltool/recipes/recipe_miles_block.yml @@ -0,0 +1,79 @@ +############################################################################### +## namelist_miles.xml +## +## Description +## Namelist for computing blocking using the MiLES package by P. Davini (ISAC-CNR) +## MiLES (Mid-Latitude Evaluation System) v0.31 +## +## Authors +## Paolo Davini (ISAC-CNR, Italy - p.davini@isac.cnr.it) +## J. von Hardenberg (ISAC-CNR, Italy - j.vonhardenberg@isac.cnr.it) +## E. Arnone (ISAC-CNR, Italy - e.arnone@isac.cnr.it) +## +## Project +## Copernicus C3S-MAGIC (C3S 34a Lot2) +## +## References: +## https://github.com/oloapinivad/MiLES +## Davini, P., C. Cagnazzo, S. Gualdi, and A. Navarra, 2012: Bidimensional Diagnostics, Variability, and Trends of Northern Hemisphere Blocking. J. Climate, 25, 6496–6509, doi: 10.1175/JCLI-D-12-00032.1 +## Tibaldi S, Molteni F. 1990. On the operational predictability of blocking. Tellus A 42(3): 343–365, doi:10.1034/j.1600- 0870.1990.t01- 2- 00003.x +## +## This namelist is part of the ESMValTool +################################################################################ +# +# mask_landocean: false +# multi_model_statistics: false +--- +documentation: + description: | + Recipe for computing and plotting blocking statistics using + the MiLES (Mid-Latitude Evaluation System) package + + authors: + - hard_jo + - davi_pa + - arno_en + + references: + - davini18 + - davini12jclim + - tibaldi90tel + + projects: + - c3s-magic + +datasets: + - {dataset: EC-EARTH, project: CMIP5, exp: historical, ensemble: r2i1p1, start_year: 1980, end_year: 1989} + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3, start_year: 1980, end_year: 1989 } + +preprocessors: + preproc1: + extract_levels: + levels: 50000 + scheme: linear + regrid: + target_grid: 2.5x2.5 + lat_offset: False + scheme: linear_extrapolate + extract_region: + start_longitude: 0. + end_longitude: 360. + start_latitude: 1.25 + end_latitude: 90. + +diagnostics: + + miles_diagnostics: + description: MiLES Blocking Diagnostics + variables: + zg: + preprocessor: preproc1 + mip: day + reference_dataset: "ERA-Interim" + scripts: + miles_block: + script: miles/miles_block.R + seasons: DJF # Select season ('DJF','MAM','JJA','SON') + + + diff --git a/esmvaltool/recipes/recipe_miles_eof.yml b/esmvaltool/recipes/recipe_miles_eof.yml new file mode 100644 index 0000000000..fd25e7ebed --- /dev/null +++ b/esmvaltool/recipes/recipe_miles_eof.yml @@ -0,0 +1,80 @@ +############################################################################### +## namelist_miles.xml +## +## Description +## Namelist to compute EOFs using the MiLES package by P. Davini (ISAC-CNR) +## MiLES (Mid-Latitude Evaluation System) v0.31 +## +## Authors +## Paolo Davini (ISAC-CNR, Italy - p.davini@isac.cnr.it) +## J. von Hardenberg (ISAC-CNR, Italy - j.vonhardenberg@isac.cnr.it) +## E. Arnone (ISAC-CNR, Italy - e.arnone@isac.cnr.it) +## +## Project +## Copernicus C3S-MAGIC (C3S 34a Lot2) +## +## References: +## https://github.com/oloapinivad/MiLES +## Davini, P., C. Cagnazzo, S. Gualdi, and A. Navarra, 2012: Bidimensional Diagnostics, Variability, and Trends of Northern Hemisphere Blocking. J. Climate, 25, 6496–6509, doi: 10.1175/JCLI-D-12-00032.1 +## Tibaldi S, Molteni F. 1990. On the operational predictability of blocking. Tellus A 42(3): 343–365, doi:10.1034/j.1600- 0870.1990.t01- 2- 00003.x +## +## This namelist is part of the ESMValTool +################################################################################ +# +# mask_landocean: false +# multi_model_statistics: false +--- +documentation: + description: | + Recipe for computing and plotting EOFs using + the MiLES (Mid-Latitude Evaluation System) package + + authors: + - hard_jo + - davi_pa + - arno_en + + references: + - davini18 + - davini12jclim + - tibaldi90tel + + projects: + - c3s-magic + +datasets: + - {dataset: EC-EARTH, project: CMIP5, exp: historical, ensemble: r2i1p1, start_year: 1980, end_year: 1989} + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3, start_year: 1980, end_year: 1989 } + +preprocessors: + preproc1: + extract_levels: + levels: 50000 + scheme: linear + regrid: + target_grid: 2.5x2.5 + lat_offset: False + scheme: linear_extrapolate + extract_region: + start_longitude: 0. + end_longitude: 360. + start_latitude: 1.25 + end_latitude: 90. + +diagnostics: + + miles_diagnostics: + description: MiLES EOF Diagnostics + variables: + zg: + preprocessor: preproc1 + mip: day + reference_dataset: "ERA-Interim" + scripts: + miles_eof: + script: miles/miles_eof.R + seasons: DJF # Select season ('DJF','MAM','JJA','SON','ALL') or your period as e.g. 'Jan_Feb_Mar' + teles: NAO # Select EOFs ('NAO','AO','PNA') or specify custom area as "lon1_lon2_lat1_lat2" + + + diff --git a/esmvaltool/recipes/recipe_miles_regimes.yml b/esmvaltool/recipes/recipe_miles_regimes.yml new file mode 100644 index 0000000000..c57de864f1 --- /dev/null +++ b/esmvaltool/recipes/recipe_miles_regimes.yml @@ -0,0 +1,77 @@ +############################################################################### +## namelist_miles.xml +## +## Description +## Namelist to compute Weather Regimes using the MiLES package by P. Davini (ISAC-CNR) +## MiLES (Mid-Latitude Evaluation System) v0.31 +## +## Authors +## Paolo Davini (ISAC-CNR, Italy - p.davini@isac.cnr.it) +## J. von Hardenberg (ISAC-CNR, Italy - j.vonhardenberg@isac.cnr.it) +## E. Arnone (ISAC-CNR, Italy - e.arnone@isac.cnr.it) +## +## Project +## Copernicus C3S-MAGIC (C3S 34a Lot2) +## +## References: +## https://github.com/oloapinivad/MiLES +## S. Corti, F. Molteni and T. N. Palmer, 1999 +## Signature of recent climate change in frequencies of natural +## atmospheric circulation regimes". Nature 398, 799-802 +## +## This namelist is part of the ESMValTool +################################################################################ +# +# mask_landocean: false +# multi_model_statistics: false +--- +documentation: + description: | + Recipe for computing and plotting weather regimes using + the MiLES (Mid-Latitude Evaluation System) package + + authors: + - hard_jo + - davi_pa + - arno_en + + references: + - davini18 + - corti99nat + + projects: + - c3s-magic + +datasets: + - {dataset: EC-EARTH, project: CMIP5, exp: historical, ensemble: r2i1p1, start_year: 1980, end_year: 1989} + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3, start_year: 1980, end_year: 1989 } + +preprocessors: + preproc1: + extract_levels: + levels: 50000 + scheme: linear + regrid: + target_grid: 2.5x2.5 + lat_offset: False + scheme: linear_extrapolate + extract_region: + start_longitude: 0. + end_longitude: 360. + start_latitude: 1.25 + end_latitude: 90. + +diagnostics: + + miles_diagnostics: + description: MiLES Weather Regimes Diagnostics + variables: + zg: + preprocessor: preproc1 + mip: day + reference_dataset: "ERA-Interim" + scripts: + miles_regimes: + script: miles/miles_regimes.R + seasons: DJF # Select season (beta: 'DJF' only possible option for now) + nclusters: 4 # beta: only 4 possible for now diff --git a/esmvaltool/recipes/recipe_modes_of_variability.yml b/esmvaltool/recipes/recipe_modes_of_variability.yml new file mode 100644 index 0000000000..46a46932b9 --- /dev/null +++ b/esmvaltool/recipes/recipe_modes_of_variability.yml @@ -0,0 +1,57 @@ +# ESMValTool +# recipe_modes_of_variability.yml +--- +documentation: + description: | + Tool to compute the RMSE between the observed and modelled patterns of + variability obtained through classification and their relative relative + bias (percentage) in the frequency of occurrence and the persistence of + each mode. + + authors: + - torr_ve + - fuck_ne + - cort_ni + - guem_vi + - hunt_al + - pere_nu + - manu_ni + + projects: + - c3s-magic + + references: + - fuckar + +datasets: + - {dataset: bcc-csm1-1, project: CMIP5, start_year: 1971, end_year: 2000, ensemble: r1i1p1, exp: historical} + - {dataset: bcc-csm1-1, project: CMIP5, start_year: 2020, end_year: 2075, ensemble: r1i1p1, exp: rcp85} + +preprocessors: + preproc: + regrid: + target_grid: bcc-csm1-1 + scheme: linear + extract_region: + start_longitude: 0 + end_longitude: 360 + start_latitude: 50 + end_latitude: 90 + +diagnostics: + weather_regime: + description: Compute modes of variability. + variables: + psl: + preprocessor: preproc + mip: Amon + scripts: + main: + script: magic_bsc/weather_regime.r + plot_type: polar # rectangular or polar + + ncenters: 3 + detrend_order: 2 # 0, 1 or 2 for daily data + cluster_method: "kmeans" # select hclust or kmeans + EOFS: false + frequency: 'SON' # Select a month (format: JAN, FEB, ...) or season (JJA, SON, MAM(only monthly), DJF) diff --git a/esmvaltool/recipes/recipe_multimodel_products.yml b/esmvaltool/recipes/recipe_multimodel_products.yml new file mode 100644 index 0000000000..7f5e21e52b --- /dev/null +++ b/esmvaltool/recipes/recipe_multimodel_products.yml @@ -0,0 +1,62 @@ +# ESMValTool +# recipe_multimodel_products.yml +--- +documentation: + description: | + Tool to compute the ensemble mean anomaly and the ensemble + variance and agreement and plot the results as maps and time series. + + authors: + - manu_ni + - hunt_al + - pere_nu + + + projects: + - c3s-magic + + references: + - manubens + +datasets: + - {dataset: bcc-csm1-1, type: exp, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1961, end_year: 1990} + - {dataset: MPI-ESM-MR, type: exp, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1961, end_year: 1990} + - {dataset: IPSL-CM5A-LR, type: exp, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 1961, end_year: 1990} + - {dataset: MPI-ESM-MR, type: exp, project: CMIP5, mip: Amon, exp: rcp26, ensemble: r1i1p1, start_year: 2006, end_year: 2099} + - {dataset: bcc-csm1-1, type: exp, project: CMIP5, mip: Amon, exp: rcp26, ensemble: r1i1p1, start_year: 2006, end_year: 2099} + - {dataset: IPSL-CM5A-LR, type: exp, project: CMIP5, mip: Amon, exp: rcp26, ensemble: r1i1p1, start_year: 2006, end_year: 2099} + +preprocessors: + preproc: + regrid: + target_grid: bcc-csm1-1 + scheme: linear + mask_fillvalues: + threshold_fraction: 0.95 + extract_region: + start_longitude: 0 + end_longitude: 360 + start_latitude: -90 + end_latitude: 90 + +diagnostics: + anomaly_agreement: + description: Calculate multi-member anomalies and their spread/agreement. + variables: + tas: + preprocessor: preproc + mip: Amon + scripts: + main: + script: magic_bsc/multimodel_products.r + + #Parameters for Season() function + moninf: 6 #If this is null, then the monthly anomalies will be computed + monsup: 6 + + agreement_threshold: 80 + #Time series plot options + running_mean: 5 #Length of running mean to use for the time series plot + + # Timeseries plot + time_series_plot: single # Either single or maxmin (plot the mean with/without shading between the max and min. diff --git a/esmvaltool/recipes/recipe_ocean_Landschuetzer2016.yml b/esmvaltool/recipes/recipe_ocean_Landschuetzer2016.yml new file mode 100644 index 0000000000..6f8252850e --- /dev/null +++ b/esmvaltool/recipes/recipe_ocean_Landschuetzer2016.yml @@ -0,0 +1,118 @@ +# ESMValTool +# Landschuetzer2016.yml +--- +documentation: + description: | + Recipe to evaluate CO2 Fluxes of marine biogeochemistry models of CMIP5 + using Landschuetzer et al (2016) dataset. + Written by Tomas Lovato, CMCC, tomas.lovato@cmcc.it + + authors: + - lova_to + + maintainer: + - lova_to + + references: + - acknow_project + + projects: + - crescendo + + +datasets: +# working datasets + - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} + + +# -------------------------------------------------- +# Preprocessors +# -------------------------------------------------- +preprocessors: + # -------------------------------------------------- + # map preprocessors + # -------------------------------------------------- + # For a 2D global surface map + prep_surface_map_2D: + time_average: + regrid: + target_grid: 2x2 + scheme: linear + + # Global area-weighted Average from 2D field + prep_global_Surface_average_timeseries_2D: + custom_order: true + average_region: + coord1: longitude + coord2: latitude + multi_model_statistics: + span: overlap + statistics: [mean ] + +# -------------------------------------------------- +# Diagnostics +# -------------------------------------------------- +diagnostics: + + # -------------------------------------------------- + # Surface time series vs OBS + # -------------------------------------------------- + diag_timeseries_surface_average_vs_OBS: + description: Global surface time series (Landschutzer2014 observations) + variables: + dpco2: + preprocessor: prep_global_Surface_average_timeseries_2D + mip: Omon + fx_files: [areacello, ] + spco2: + preprocessor: prep_global_Surface_average_timeseries_2D + mip: Omon + fx_files: [areacello, ] + fgco2: + preprocessor: prep_global_Surface_average_timeseries_2D + mip: Omon + fx_files: [areacello, ] + additional_datasets: + - {dataset: Landschutzer2016, project: OBS, type: clim, version: v2016, start_year: 2000, end_year: 2010, tier: 2} + scripts: + Global_surface_timeseries: + script: ocean/diagnostic_timeseries.py + observational_dataset: {dataset: Landschutzer2016, project: OBS} + + + # -------------------------------------------------- + # Map diagnostics vs OBS + # -------------------------------------------------- + diag_surface_maps_vs_OBS: + description: Global Ocean Surface maps vs OBS + variables: + dpco2: + preprocessor: prep_surface_map_2D + mip: Omon + fx_files: [areacello, ] + maps_range: [-90., 90.] + diff_range: [-50., 50.] + spco2: + preprocessor: prep_surface_map_2D + mip: Omon + fx_files: [areacello, ] + maps_range: [300., 400.] + diff_range: [-50., 50.] + fgco2: + preprocessor: prep_surface_map_2D + mip: Omon + fx_files: [areacello, ] + maps_range: [-0.2, 0.2] + diff_range: [-0.1, 0.1] + additional_datasets: + - {dataset: Landschutzer2016, project: OBS, type: clim, version: v2016, start_year: 2000, end_year: 2010, tier: 2} + scripts: + Global_Ocean_surface_map: + script: ocean/diagnostic_maps.py + Global_Ocean_model_vs_obs: + script: ocean/diagnostic_model_vs_obs.py + observational_dataset: {dataset: Landschutzer2016, project: OBS} + + + + diff --git a/esmvaltool/recipes/recipe_ocean_amoc.yml b/esmvaltool/recipes/recipe_ocean_amoc.yml new file mode 100644 index 0000000000..819aed3452 --- /dev/null +++ b/esmvaltool/recipes/recipe_ocean_amoc.yml @@ -0,0 +1,155 @@ +# ESMValTool +# recipe_ocean_amoc.yml + +documentation: + description: | + Recipe to produce time series figures of the derived variable, the + Atlantic meriodinal overturning circulation (AMOC). + This recipe also produces transect figures of the stream functions for + the years 2001-2004. + + authors: + - demo_le + + maintainer: + - demo_le + + references: + - demora2018gmd + + projects: + - ukesm + +# datasets: +# - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} +# - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} +# - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} +# - {dataset: CESM1-CAM5-1-FV2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} +# - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} +# - {dataset: CESM1-FASTCHEM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} +# - {dataset: CESM1-WACCM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} +# - {dataset: CNRM-CM5-2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} +# - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} +# - {dataset: FGOALS-g2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} +# - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} +# - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} +# - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} +# - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} +# - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} +# - {dataset: MRI-ESM1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} +# - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} +# - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} + + +preprocessors: + prep_timeseries_drake: + extract_named_regions: + regions: drake_passage + + prep_transect: + custom_order: true + extract_named_regions: + regions: atlantic_arctic_ocean + extract_time: + start_year: 2001 + start_month: 1 + start_day: 1 + end_year: 2003 + end_month: 12 + end_day: 31 + time_average: + +diagnostics: + # -------------------------------------------------- + # Time series diagnostics + # -------------------------------------------------- + diag_timeseries_amoc: + description: atlantic_meridional_overturning_circulation + variables: + amoc: + mip: Omon + derive: true + force_derivation: false + additional_datasets: + - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} + - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} + - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} + - {dataset: CESM1-CAM5-1-FV2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} + - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} + - {dataset: CESM1-FASTCHEM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} + - {dataset: CESM1-WACCM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} + - {dataset: CNRM-CM5-2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} + - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} + - {dataset: FGOALS-g2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} + - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} + - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} + - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} + - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} + - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} + - {dataset: MRI-ESM1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} + - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} + - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} + scripts: + AMOC_timeseries: + script: ocean/diagnostic_timeseries.py + moving_average: 6 years + + diag_timeseries_drake: + description: Drake_passge_current + variables: + mfo: + preprocessor: prep_timeseries_drake + mip: Omon + additional_datasets: + - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} + - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} + - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} + - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1862, end_year: 2004} + - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} + - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} + - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} + - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} + - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} + - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} + - {dataset: MRI-ESM1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} + - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} + - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1860, end_year: 2004} + scripts: + Drake_timeseries: + script: ocean/diagnostic_timeseries.py + moving_average: 6 years + + + # -------------------------------------------------- + # Transect diagnostics + # -------------------------------------------------- + diag_transects: + description: Stream function zonal mean + variables: + msftmyz: + preprocessor: prep_transect + mip: Omon + additional_datasets: + - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2004} + - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2004} + - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2004} + - {dataset: CESM1-CAM5-1-FV2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2004} + - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2004} + - {dataset: CESM1-FASTCHEM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2004} + - {dataset: CESM1-WACCM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2004} + - {dataset: CNRM-CM5-2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2004} + - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2004} + - {dataset: FGOALS-g2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2004} + - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2004} + - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2004} + - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2004} + - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2004} + - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2004} + - {dataset: MRI-ESM1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2004} + - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2004} + - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2004} + scripts: + Atlantic_Stream_function_zonal_mean: + script: ocean/diagnostic_transects.py + thresholds: [-20, -10., 0., 10., 20.] + set_y_logscale: False diff --git a/esmvaltool/recipes/recipe_ocean_bgc.yml b/esmvaltool/recipes/recipe_ocean_bgc.yml new file mode 100644 index 0000000000..5d826fce2f --- /dev/null +++ b/esmvaltool/recipes/recipe_ocean_bgc.yml @@ -0,0 +1,551 @@ +# ESMValTool +# recipe_ocean_bcg.yml +--- +documentation: + description: | + Recipe to evaluate the marine biogeochemistry models of CMIP5. + There are also some physical evaluation metrics here too. + This work based on the BGC-val toolkit GMD-2018-103. + DOI: https://doi.org/10.5194/gmd-11-4215-2018 + Written by Lee de Mora, Plymouth Marine Laboratory, ledm@pml.ac.uk + + authors: + - demo_le + + maintainer: + - demo_le + + references: + - demora2018gmd + + projects: + - ukesm + + +datasets: + - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} + +# working datasets + # - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} + # - {dataset: CanCM4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} + # - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} + # - {dataset: GISS-E2-H, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} + # - {dataset: HadGEM2-AO, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} + # - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} + # - {dataset: HadCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} + + + + +### +# Problem with times +# - {dataset: MIROC-ESM, project: CMIP5, mip: Oyr, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} +# - {dataset: MIROC-ESM-CHEM, project: CMIP5, mip: Oyr, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} + +# Unstructured grids +# - {dataset: MPI-ESM-LR, project: CMIP5, mip: Oyr, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} +# - {dataset: MPI-ESM-MR, project: CMIP5, mip: Oyr, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} +# - {dataset: ACCESS1-0, project: CMIP5, mip: Oyr, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} +# - {dataset: ACCESS1-3, project: CMIP5, mip: Oyr, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} + + +# -------------------------------------------------- +# Preprocessors +# -------------------------------------------------- +preprocessors: + # Global 3D Volume-weighted Average + prep_timeseries_global_volume_average: + custom_order: true + average_volume: + coord1: longitude + coord2: latitude + multi_model_statistics: + span: overlap + statistics: [mean ] + + # Global area-weighted Average from 2D field + prep_global_Surface_average_timeseries_2D: + custom_order: true + average_region: + coord1: longitude + coord2: latitude + multi_model_statistics: + span: overlap + statistics: [mean ] + + # Global area -weighted surface Average from 3D field + prep_global_Surface_average_timeseries_3D: + custom_order: true + extract_levels: + levels: [0., ] + scheme: linear_horizontal_extrapolate_vertical + average_region: + coord1: longitude + coord2: latitude + multi_model_statistics: + span: overlap + statistics: [mean ] + + prep_timeseries_scalar: # Load file as is. + custom_order: true + + + # For a 2D global surface map + prep_surface_map_2D: + time_average: + + # For a 3D global surface map + prep_surface_map_3D: + extract_levels: + levels: [0., ] + scheme: linear_horizontal_extrapolate_vertical + time_average: + + prep_surface_map_regrid_3D: + custom_order: true + extract_levels: + levels: [0., ] + scheme: linear_horizontal_extrapolate_vertical + time_average: + regrid: + target_grid: 1x1 + scheme: linear + + + prep_global_profile: + annual_mean: + average_region: + coord1: longitude + coord2: latitude + + prep_global_profile_decadal: + annual_mean: + decadal: true + average_region: + coord1: longitude + coord2: latitude + + prep_transect_AMT: # Atlantic Meridional Transect (28W) + custom_order: true + time_average: + extract_region: + start_longitude: 320. + end_longitude: 345. + start_latitude: -81. + end_latitude: 89.9 + regrid: + target_grid: 1x1 + scheme: linear + extract_transect: + longitude: 332. + +# # 2D map global depth integration +# prep_depth_integration: +# depth_integration: +# # new_units: kg m-2 # need to specify in advance, as cf_units has strange behaviour. +# time_average: +# +# # 2D map global depth integration time series maps +# prep_depth_integration_timeseries: +# custom_order: true +# depth_integration: +# average_region: +# coord1: longitude +# coord2: latitude + + + +# -------------------------------------------------- +# Diagnostics +# -------------------------------------------------- +diagnostics: +# Need to add: + # Global Air sea flux of CO2 + +# Added: + # Global volume average Temperature time series + # Global volumne average salinity time series + # Drake passge/AMOC (if exists as scalar field) + # Global net integrated primary production timeseries + # Global surface mean chlorophyll timeseries + # Global surface mean nutrients timeseries + # Global surface mean chlorophyll map + # Global surface mean nutrients map + # Global net integrated primary production map + + + # -------------------------------------------------- + # Volume average time series vs WOA + # -------------------------------------------------- + diag_timeseries_volume_average_vs_WOA: + description: Global volume average time series (WOA observations) + variables: + thetao: # Temperature 3D + preprocessor: prep_timeseries_global_volume_average + mip: Omon + fx_files: [volcello,] + so: # Salinity 3D + preprocessor: prep_timeseries_global_volume_average + mip: Omon + fx_files: [volcello,] + no3: # nitrate + preprocessor: prep_timeseries_global_volume_average + mip: Oyr + fx_files: [volcello,] + o2: # oxygen + preprocessor: prep_timeseries_global_volume_average + mip: Oyr + fx_files: [volcello,] + si: # Silicate + preprocessor: prep_timeseries_global_volume_average + mip: Oyr + fx_files: [volcello,] + # po4: # phosphate # No HadGEM2-ES phosphate. + # preprocessor: prep_timeseries_global_volume_average + # mip: Oyr + # fx_files: [volcello,] + additional_datasets: + - {dataset: WOA, project: OBS, type: clim, version: 2013v2, start_year: 2000, end_year: 2000, tier: 2} + scripts: + Global_Volume_Average_timeseries: + script: ocean/diagnostic_timeseries.py + + # -------------------------------------------------- + # Volume average time series - no data + # -------------------------------------------------- + diag_timeseries_volume_average_no_obs: + description: Global volume average time series (No observations) + variables: +# This can't run on jasmin until issue #773 is resolved. +# gtfgco2: # Global Total flux co2 +# preprocessor: prep_timeseries_scalar +# mip: Omon +# derive: true +# force_derivation: false +# fx_files: [areacello, ] + chl: # chlorophyll + preprocessor: prep_timeseries_global_volume_average + mip: Oyr + fx_files: [volcello,] + dfe: # iron + preprocessor: prep_timeseries_global_volume_average + mip: Oyr + fx_files: [volcello,] + talk: # alkalinity + preprocessor: prep_timeseries_global_volume_average + mip: Oyr + fx_files: [volcello,] + # dic: # Dissolved inorganic carbon + # preprocessor: prep_timeseries_global_volume_average + # mip: Oyr + # fx_files: [volcello,] + scripts: + Global_Volume_verage_timeseries: + script: ocean/diagnostic_timeseries.py + + + # -------------------------------------------------- + # Surface and 2D fields time series + # -------------------------------------------------- + diag_timeseries_surface_average_no_obs: + description: Global volume average time series - no data + variables: + talk: # alkalinity + preprocessor: prep_global_Surface_average_timeseries_3D + mip: Oyr + fx_files: [areacello, ] + intpp: + preprocessor: prep_global_Surface_average_timeseries_2D + mip: Omon + fx_files: [areacello, ] + chl: + preprocessor: prep_global_Surface_average_timeseries_3D + mip: Oyr + thresholds: [0.1, 0.2, 0.5] + fx_files: [areacello, ] + # dfe: # iron + # preprocessor: prep_global_Surface_average_timeseries_3D + # mip: Oyr + # fx_files: [areacello, ] + # dic: # Dissolved inorganic carbon + # preprocessor: prep_global_Surface_average_timeseries_3D + # mip: Oyr + # fx_files: [areacello, ] + scripts: + Global_Volume_Average_timeseries: + script: ocean/diagnostic_timeseries.py + + # -------------------------------------------------- + # Surface time series vs WOA + # -------------------------------------------------- + diag_timeseries_surface_average_vs_WOA: + description: Global surface time series (WOA observations) + variables: + thetao: # Temperature ocean surface + preprocessor: prep_global_Surface_average_timeseries_3D + mip: Omon + fx_files: [areacello, ] + so: # Salinity ocean surface + preprocessor: prep_global_Surface_average_timeseries_3D + mip: Omon + fx_files: [areacello, ] + no3: # Nitrate ocean surface + preprocessor: prep_global_Surface_average_timeseries_3D + mip: Oyr + fx_files: [areacello, ] + o2: # oxygen + preprocessor: prep_global_Surface_average_timeseries_3D + mip: Oyr + fx_files: [areacello, ] + si: # Silicate + preprocessor: prep_global_Surface_average_timeseries_3D + mip: Oyr + fx_files: [areacello, ] + # po4: # Phosphate + # preprocessor: prep_global_Surface_average_timeseries_3D + # mip: Oyr + # fx_files: [areacello, ] + additional_datasets: + - {dataset: WOA, project: OBS, type: clim, version: 2013v2, start_year: 2000, end_year: 2000, tier: 2} + scripts: + Global_Volume_Average_timeseries: + script: ocean/diagnostic_timeseries.py + observational_dataset: {dataset: WOA, project: OBS} + + + # -------------------------------------------------- + # Scalar time series + # -------------------------------------------------- + diag_timeseries_scalars: + description: Scalar time series + variables: + mfo: # Marine currents flux + preprocessor: prep_timeseries_scalar + mip: Omon + scripts: + Scalar_timeseries: + script: ocean/diagnostic_timeseries.py + + + # -------------------------------------------------- + # Profile diagnostics - vs WOA + # -------------------------------------------------- + diag_profile_vs_WOA: + description: Global profile (WOA observations) + variables: + thetao: # Temperature ocean surface + preprocessor: prep_global_profile + mip: Omon + fx_files: [volcello,] + so: # Salinity ocean surface + preprocessor: prep_global_profile + mip: Omon + fx_files: [volcello,] + no3: # Nitrate ocean surface + preprocessor: prep_global_profile + mip: Oyr + fx_files: [volcello,] + o2: # oxygen + preprocessor: prep_global_profile + mip: Oyr + fx_files: [volcello,] + si: # Silicate + preprocessor: prep_global_profile + mip: Oyr + fx_files: [volcello,] + # po4: # Phosphate + # preprocessor: prep_global_profile + # mip: Oyr + # fx_files: [volcello,] + additional_datasets: + - {dataset: WOA, project: OBS, type: clim, version: 2013v2, start_year: 2000, end_year: 2000, tier: 2} + scripts: + Global_profile_vs_WOA: + script: ocean/diagnostic_profiles.py + observational_dataset: {dataset: WOA, project: OBS} + + # -------------------------------------------------- + # Profile diagnostics - no data + # -------------------------------------------------- + diag_profile_no_obs: + description: Global profile (No observations) + variables: + chl: # chlorophyll + preprocessor: prep_global_profile + mip: Oyr + fx_files: [volcello,] + dfe: # iron + preprocessor: prep_global_profile + mip: Oyr + fx_files: [volcello,] + talk: # alkalinity + preprocessor: prep_global_profile + mip: Oyr + fx_files: [volcello,] + # dic: # Dissolved inorganic carbon + # preprocessor: prep_global_profile + # mip: Oyr + # fx_files: [volcello,] + scripts: + Global_profile_no_obs: + script: ocean/diagnostic_profiles.py + + + # -------------------------------------------------- + # Map diagnostics - vs WOA + # -------------------------------------------------- + diag_surface_maps_vs_WOA: + description: Global Ocean Surface maps vs WOA + variables: + thetao: + preprocessor: prep_surface_map_regrid_3D + mip: Omon + fx_files: [areacello, ] + so: + preprocessor: prep_surface_map_regrid_3D + mip: Omon + fx_files: [areacello, ] + no3: + preprocessor: prep_surface_map_regrid_3D + mip: Oyr + fx_files: [areacello, ] + si: # Silicate + preprocessor: prep_surface_map_regrid_3D + mip: Oyr + fx_files: [areacello, ] + o2: # Oxygen + preprocessor: prep_surface_map_regrid_3D + mip: Oyr + fx_files: [areacello, ] + # po4: + # preprocessor: prep_surface_map_3D + # mip: Oyr + # fx_files: [areacello, ] + additional_datasets: + - {dataset: WOA, project: OBS, type: clim, version: 2013v2, start_year: 2000, end_year: 2000, tier: 2} + scripts: + Global_Ocean_surface_map: + script: ocean/diagnostic_maps.py + Global_Ocean_model_vs_obs: + script: ocean/diagnostic_model_vs_obs.py + observational_dataset: {dataset: WOA, project: OBS} + + # -------------------------------------------------- + # Map diagnostics - no data + # -------------------------------------------------- + diag_surface_maps_no_data: + description: Global Ocean Surface maps - no data + variables: + intpp: + preprocessor: prep_surface_map_2D + mip: Omon + fx_files: [areacello, ] + fgco2: + preprocessor: prep_surface_map_2D + mip: Omon + fx_files: [areacello, ] + chl: + preprocessor: prep_surface_map_3D + mip: Oyr + # thresholds: [0.1, 0.2, 0.5] + fx_files: [areacello, ] + dfe: + preprocessor: prep_surface_map_3D + mip: Oyr + fx_files: [areacello, ] + + # dic: + # preprocessor: prep_surface_map_3D + # mip: Oyr + # fx_files: [areacello, ] + scripts: + Global_Ocean_surface_map: + script: ocean/diagnostic_maps.py + + + + + # -------------------------------------------------- + # Transects diagnostics - vs WOA + # -------------------------------------------------- + diag_transect_vs_WOA: + description: Transect maps vs WOA + variables: + thetao: + preprocessor: prep_transect_AMT + mip: Omon + thresholds: [0., 5., 10., 15., 20., 25., 30.] + so: + preprocessor: prep_transect_AMT + mip: Omon + no3: + preprocessor: prep_transect_AMT + mip: Oyr + thresholds: [10., 20., 30.] + si: # Silicate + preprocessor: prep_transect_AMT + mip: Oyr + o2: # Oxygen + preprocessor: prep_transect_AMT + mip: Oyr + thresholds: [0.03, ] + # po4: + # preprocessor: prep_transect_AMT + # mip: Oyr + additional_datasets: + - {dataset: WOA, project: OBS, type: clim, version: 2013v2, start_year: 2000, end_year: 2000, tier: 2} + scripts: + Transects_vs_WOA: + script: ocean/diagnostic_transects.py + # observational_dataset: {dataset: WOA, project: OBS} + + # # -------------------------------------------------- + # # Map diagnostics - no data + # # -------------------------------------------------- + diag_transect_no_data: + description: Transects - no data + variables: + chl: + preprocessor: prep_transect_AMT + mip: Oyr + dfe: + preprocessor: prep_transect_AMT + mip: Oyr + # dic: + # preprocessor: prep_transect_AMT + # mip: Oyr + scripts: + Transects_no_data: + script: ocean/diagnostic_transects.py + + + + + + # # # Depth integrated maps + # # diag_depth_int_maps: + # # description: Global Ocean Depth Integrated maps + # # variables: + # # chl: + # # preprocessor: prep_depth_integration + # # mip: Oyr + # # # intpp: + # # # preprocessor: prep_depth_integration + # # # mip: Oyr + # # scripts: + # # Global_Ocean_DepthIntegration_map: + # # script: ocean/diagnostic_maps.py + # + # diag_depth_int_timeseries: + # description: Global Ocean Depth Integrated time series + # variables: + # chl: + # preprocessor: prep_depth_integration_timeseries + # mip: Oyr + # # intpp: + # # preprocessor: prep_depth_integration_timeseries + # # mip: Oyr + # scripts: + # Global_Ocean_DepthIntegration_timeseries: + # script: ocean/diagnostic_timeseries.py + # flags: area_total diff --git a/esmvaltool/recipes/recipe_OceanPhysics.yml b/esmvaltool/recipes/recipe_ocean_example.yml similarity index 87% rename from esmvaltool/recipes/recipe_OceanPhysics.yml rename to esmvaltool/recipes/recipe_ocean_example.yml index 21dad10f95..767f1ac448 100644 --- a/esmvaltool/recipes/recipe_OceanPhysics.yml +++ b/esmvaltool/recipes/recipe_ocean_example.yml @@ -1,33 +1,34 @@ -############################################################################### -# recipe_OceanPhysics.yml +# ESMValTool +# recipe_ocean_example.yml --- documentation: description: | Recipe to demonstrate several simple plots based on the monthly ocean temperature. Please use this file as a template for adding additional - fields into the ocean. This work based on the BGC-val toolkit GMD-2018-103. - Written by Lee de Mora, Plymouth Marine Laboratory - + fields into the ocean. This work based on the BGC-val toolkit GMD-2018-103. + authors: - - ledm + - demo_le + + maintainer: + - demo_le references: - - BGC-val:gmd-2018-103 + - demora2018gmd projects: - ukesm - + datasets: # working datasets - - {dataset: CanESM2, project: CMIP5, mip: Omon, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} - - {dataset: GISS-E2-H, project: CMIP5, mip: Omon, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} - - {dataset: HadGEM2-AO, project: CMIP5, mip: Omon, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} - - {dataset: HadCM3, project: CMIP5, mip: Omon, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} - - {dataset: CanCM4, project: CMIP5, mip: Omon, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} - - {dataset: CSIRO-Mk3-6-0, project: CMIP5, mip: Omon, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} - - # Problem with latitude > 90. + # - {dataset: CanESM2, project: CMIP5, mip: Omon, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} + # - {dataset: GISS-E2-H, project: CMIP5, mip: Omon, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} + # - {dataset: HadGEM2-AO, project: CMIP5, mip: Omon, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} + # - {dataset: HadCM3, project: CMIP5, mip: Omon, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} + # - {dataset: CanCM4, project: CMIP5, mip: Omon, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} + # - {dataset: CSIRO-Mk3-6-0, project: CMIP5, mip: Omon, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} + - {dataset: HadGEM2-CC, project: CMIP5, mip: Omon, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} - {dataset: HadGEM2-ES, project: CMIP5, mip: Omon, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} @@ -39,8 +40,8 @@ datasets: # - {dataset: MIROC-ESM-CHEM, project: CMIP5, mip: Omon, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} # Unstructured grids -# - {dataset: MPI-ESM-LR, project: CMIP5, mip: Omon, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} -# - {dataset: MPI-ESM-MR, project: CMIP5, mip: Omon, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} + # - {dataset: MPI-ESM-LR, project: CMIP5, mip: Omon, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} + # - {dataset: MPI-ESM-MR, project: CMIP5, mip: Omon, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} # - {dataset: ACCESS1-0, project: CMIP5, mip: Omon, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} # - {dataset: ACCESS1-3, project: CMIP5, mip: Omon, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2004} @@ -51,7 +52,7 @@ preprocessors: # Time series preprocessors # -------------------------------------------------- prep_timeseries_1: # For 2D fields - custom_order: true + custom_order: true average_region: coord1: longitude coord2: latitude @@ -60,7 +61,7 @@ preprocessors: statistics: [mean ] prep_timeseries_2: # For specific levels of 3D fields - custom_order: true + custom_order: true extract_levels: levels: [0., 10., 100., 1000.] scheme: linear_horizontal_extrapolate_vertical @@ -72,7 +73,7 @@ preprocessors: statistics: [mean ] prep_timeseries_3: # For regional fields - custom_order: true + custom_order: true extract_region: start_longitude: -80. end_longitude: 30. @@ -86,7 +87,7 @@ preprocessors: statistics: [mean ] prep_timeseries_4: # For regional fields at depth - custom_order: true + custom_order: true extract_levels: levels: [0., 10., 100., 1000.] scheme: linear_horizontal_extrapolate_vertical @@ -103,17 +104,16 @@ preprocessors: statistics: [mean ] prep_timeseries_5: # For Global Volume Averaged - custom_order: true + custom_order: true average_volume: coord1: longitude coord2: latitude - coordz: depth multi_model_statistics: span: overlap statistics: [mean ] prep_timeseries_6: # For regional volume averaged - custom_order: true + custom_order: true extract_region: start_longitude: -80. end_longitude: 30. @@ -122,13 +122,12 @@ preprocessors: average_volume: coord1: longitude coord2: latitude - coordz: depth multi_model_statistics: span: overlap statistics: [mean ] prep_timeseries_7: # For regional volume averaged (at the surface) - custom_order: true + custom_order: true extract_region: start_longitude: -80. end_longitude: 30. @@ -140,7 +139,6 @@ preprocessors: average_volume: coord1: longitude coord2: latitude - coordz: depth multi_model_statistics: span: overlap statistics: [mean ] @@ -153,7 +151,7 @@ preprocessors: time_average: prep_map_2: # For Global 2D fields with regridding - custom_order: true + custom_order: true regrid: target_grid: 1x1 scheme: linear @@ -164,7 +162,7 @@ preprocessors: prep_map_3: # For specific levels of 3D fields with regrid - custom_order: true + custom_order: true extract_levels: levels: [0., 10., 100., 1000.,] scheme: linear_horizontal_extrapolate_vertical @@ -177,7 +175,7 @@ preprocessors: statistics: [mean ] prep_map_4: # For a specific region with regrid - custom_order: true + custom_order: true extract_region: start_longitude: -80. end_longitude: 30. @@ -192,7 +190,7 @@ preprocessors: statistics: [mean ] prep_map_5: # For a specific region at depth levels, - custom_order: true + custom_order: true extract_levels: levels: [0., 10., 100., 1000.,] scheme: linear_horizontal_extrapolate_vertical @@ -257,7 +255,6 @@ preprocessors: # -------------------------------------------------- prep_depth_integration_1: # For a 2D map global profile depth_integration: - coordz: depth time_average: @@ -270,9 +267,8 @@ diagnostics: variables: thetao: # Temperature ocean preprocessor: prep_depth_integration_1 - field: TO3M scripts: - Global_Ocean_DepthIntegration_map: + Global_Ocean_DepthIntegration_map: script: ocean/diagnostic_maps.py # -------------------------------------------------- @@ -283,7 +279,6 @@ diagnostics: variables: tos: # Temperature ocean surface preprocessor: prep_timeseries_1 - field: TO2Ms scripts: Global_Ocean_Surface_mean_timeseries: &Global_Ocean_Surface_mean_timeseries script: ocean/diagnostic_timeseries.py @@ -293,7 +288,6 @@ diagnostics: variables: thetao: # Temperature ocean 3D preprocessor: prep_timeseries_2 - field: TO3M scripts: Global_Ocean_DepthLayers_mean_timeseries: &Global_Ocean_DepthLayers_mean_timeseries script: ocean/diagnostic_timeseries.py @@ -303,7 +297,6 @@ diagnostics: variables: tos: # Temperature ocean surface preprocessor: prep_timeseries_3 - field: TO2Ms scripts: Regional_Ocean_Surface_mean_timeseries: &Regional_Ocean_Surface_mean_timeseries script: ocean/diagnostic_timeseries.py @@ -313,7 +306,6 @@ diagnostics: variables: thetao: # Temperature ocean 3D preprocessor: prep_timeseries_4 - field: TO3M scripts: Regional_Ocean_DepthLayers_mean_timeseries: &Regional_Ocean_DepthLayers_mean_timeseries script: ocean/diagnostic_timeseries.py @@ -323,7 +315,6 @@ diagnostics: variables: thetao: # Temperature ocean 3D preprocessor: prep_timeseries_5 - field: TO3M scripts: Global_VolumeAverage_Ocean_Physics_timeseries: &Global_VolumeAverage_Ocean_Physics_timeseries script: ocean/diagnostic_timeseries.py @@ -333,7 +324,6 @@ diagnostics: variables: thetao: # Temperature ocean 3D preprocessor: prep_timeseries_6 - field: TO3M scripts: Regional_VolumeAverage_Ocean_Physics_timeseries: &Regional_VolumeAverage_Ocean_Physics_timeseries script: ocean/diagnostic_timeseries.py @@ -343,7 +333,6 @@ diagnostics: variables: thetao: # Temperature ocean 3D averaged over top 100m preprocessor: prep_timeseries_7 - field: TO3M scripts: Regional_Volume100mAverage_Ocean_Physics_timeseries: &Regional_Volume100mAverage_Ocean_Physics_timeseries script: ocean/diagnostic_timeseries.py @@ -356,7 +345,6 @@ diagnostics: variables: tos: # Temperature ocean surface preprocessor: prep_map_1 - field: TO2Ms scripts: Global_Ocean_Surface_mean_map: &Global_Ocean_Surface_mean_map script: ocean/diagnostic_maps.py @@ -366,7 +354,6 @@ diagnostics: variables: tos: # Temperature ocean surface preprocessor: prep_map_2 - field: TO2Ms scripts: Global_Ocean_Surface_regrid_map: &Global_Ocean_Surface_regrid_map script: ocean/diagnostic_maps.py @@ -376,7 +363,6 @@ diagnostics: variables: thetao: # Temperature ocean 3D preprocessor: prep_map_3 - field: TO3M scripts: Global_Ocean_DepthLayers_regrid_map: &Global_Ocean_DepthLayers_regrid_map script: ocean/diagnostic_maps.py @@ -386,7 +372,6 @@ diagnostics: variables: tos: # Temperature ocean surface preprocessor: prep_map_4 - field: TO2Ms scripts: Regional_Ocean_Surface_regrid_map: &Regional_Ocean_Surface_regrid_map script: ocean/diagnostic_maps.py @@ -396,7 +381,6 @@ diagnostics: variables: thetao: # Temperature ocean 3D preprocessor: prep_map_5 - field: TO3M scripts: Regional_Ocean_DepthLayers_mean_map: &Regional_Ocean_DepthLayers_mean_map script: ocean/diagnostic_maps.py @@ -409,18 +393,15 @@ diagnostics: variables: thetao: # Temperature ocean 3D preprocessor: prep_transect_1 - field: TO3M scripts: Regional_Ocean_DepthLayers_regrid_transect: &Regional_Ocean_DepthLayers_regrid_transect script: ocean/diagnostic_transects.py - diag_transect_2: description: Extracting along a line with regridding and multimodle mean variables: thetao: # Temperature ocean 3D preprocessor: prep_transect_2 - field: TO3M scripts: Regional_Ocean_DepthLayers_slice_regrid_mmmean_transect: &Regional_Ocean_DepthLayers_slice_regrid_mmmean_transect script: ocean/diagnostic_transects.py @@ -430,7 +411,6 @@ diagnostics: variables: thetao: # Temperature ocean 3D preprocessor: prep_transect_3 - field: TO3M scripts: Regional_Ocean_DepthLayers_AMTslice_regrid_mmmean_transect: &Regional_Ocean_DepthLayers_AMTslice_regrid_mmmean_transect script: ocean/diagnostic_transects.py @@ -444,7 +424,6 @@ diagnostics: variables: thetao: # Temperature ocean surface preprocessor: prep_profile_1 - field: TO3M scripts: Global_Ocean_mean_profile: &Global_Ocean_mean_profile script: ocean/diagnostic_profiles.py @@ -454,7 +433,6 @@ diagnostics: variables: thetao: # Temperature ocean surface preprocessor: prep_profile_2 - field: TO3M scripts: Regional_Ocean_mean_profile: &Regional_Ocean_mean_profile script: ocean/diagnostic_profiles.py diff --git a/esmvaltool/recipes/recipe_ocean_ice_extent.yml b/esmvaltool/recipes/recipe_ocean_ice_extent.yml new file mode 100644 index 0000000000..71b9edb57f --- /dev/null +++ b/esmvaltool/recipes/recipe_ocean_ice_extent.yml @@ -0,0 +1,126 @@ +# ESMValTool +# recipe_ocean_ice_extent.yml +--- +documentation: + description: | + Recipe to demonstrate several sea ice plots based. + + authors: + - demo_le + + maintainer: + - demo_le + + references: + - demora2018gmd + + projects: + - ukesm + +datasets: +# Note that there is no way to extract specific months here, need to add times twice in extract_time + - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1989, end_year: 2004, } + - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1989, end_year: 2004, } + + +preprocessors: + + timeseries_NHW_ice_extent: # North Hemisphere Winter ice_extent + custom_order: true + extract_time: &time_anchor # declare time here. + start_year: 1989 + start_month: 12 + start_day: 1 + end_year: 2004 + end_month: 9 + end_day: 31 + extract_season: + season: DJF + extract_region: + start_longitude: -180. + end_longitude: 180. + start_latitude: 0. + end_latitude: 90. + + timeseries_NHS_ice_extent: # North Hemisphere Summer ice_extent + custom_order: true + extract_time: *time_anchor + extract_season: + season: JJA + extract_region: + start_longitude: -180. + end_longitude: 180. + start_latitude: 0. + end_latitude: 90. + + timeseries_SHS_ice_extent: # South Hemisphere Summer ice_extent + custom_order: true + extract_time: *time_anchor + extract_season: + season: DJF + extract_region: + start_longitude: -180. + end_longitude: 180. + start_latitude: -90. + end_latitude: 0. + + timeseries_SHW_ice_extent: # South Hemisphere Winter ice_extent + custom_order: true + extract_time: *time_anchor + extract_season: + season: JJA + extract_region: + start_longitude: -180. + end_longitude: 180. + start_latitude: -90. + end_latitude: 0. + + + +diagnostics: + # -------------------------------------------------- + # Time series diagnostics + # -------------------------------------------------- + diag_ice_NHW: + description: North Hemisphere Winter Sea Ice diagnostics + variables: + sic: # surface ice cover + preprocessor: timeseries_NHW_ice_extent + mip: OImon + scripts: + Global_seaice_timeseries: + script: ocean/diagnostic_seaice.py + threshold: 15. + + diag_ice_NHS: + description: North Hemisphere Summer Sea Ice diagnostics + variables: + sic: # surface ice cover + preprocessor: timeseries_NHS_ice_extent + mip: OImon + scripts: + Global_seaice_timeseries: + script: ocean/diagnostic_seaice.py + threshold: 15. + + diag_ice_SHW: + description: South Hemisphere Winter Sea Ice diagnostics + variables: + sic: # surface ice cover + preprocessor: timeseries_SHW_ice_extent + mip: OImon + scripts: + Global_seaice_timeseries: + script: ocean/diagnostic_seaice.py + threshold: 15. + + diag_ice_SHS: + description: South Hemisphere Summer Sea Ice diagnostics + variables: + sic: # surface ice cover + preprocessor: timeseries_SHS_ice_extent + mip: OImon + scripts: + Global_seaice_timeseries: + script: ocean/diagnostic_seaice.py + threshold: 15. diff --git a/esmvaltool/recipes/recipe_ocean_quadmap.yml b/esmvaltool/recipes/recipe_ocean_quadmap.yml new file mode 100644 index 0000000000..6c70ca7d6b --- /dev/null +++ b/esmvaltool/recipes/recipe_ocean_quadmap.yml @@ -0,0 +1,62 @@ +# ESMValTool +# recipe_ocean_quadmap.yml +--- +documentation: + description: | + Recipe to demonstrate the Diagnostic Maps quad. + Based on the ocean assess/Marine Assess toolkit plots. + + authors: + - demo_le + + maintainer: + - demo_le + + references: + - demora2018gmd + + projects: + - ukesm + + +datasets: + # working datasets + - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003, } + - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003, } + + +preprocessors: + prep_quad_map: # For Global 2D fields + extract_time: + start_year: 2001 + start_month: 1 + start_day: 1 + end_year: 2003 + end_month: 12 + end_day: 31 + time_average: + regrid: + target_grid: 1x1 + scheme: linear + + +diagnostics: +# -------------------------------------------------- +# Quad Maps diagnostics +# ----------------------------------------------- + diag_map_1: + description: Global surface quad plots + variables: + tos: # Temperature ocean surface + preprocessor: prep_quad_map + mip: Omon + additional_datasets: +# filename: tos_ATSR_L3_ARC-v1.1.1_199701-201112.nc +# download from: https://datashare.is.ed.ac.uk/handle/10283/536 + - {dataset: ATSR, project: obs4mips, level: L3, version: ARC-v1.1.1, start_year: 2001, end_year: 2003, tier: 3} + scripts: + Global_Ocean_map: &Global_Ocean_map + script: ocean/diagnostic_maps_quad.py + control_model: {dataset: HadGEM2-CC, project: CMIP5, mip: Omon, exp: historical, ensemble: r1i1p1} + exper_model: {dataset: HadGEM2-ES, project: CMIP5, mip: Omon, exp: historical, ensemble: r1i1p1} + observational_dataset: {dataset: ATSR, project: obs4mips,} diff --git a/esmvaltool/recipes/recipe_ocean_scalar_fields.yml b/esmvaltool/recipes/recipe_ocean_scalar_fields.yml new file mode 100644 index 0000000000..3a96541669 --- /dev/null +++ b/esmvaltool/recipes/recipe_ocean_scalar_fields.yml @@ -0,0 +1,53 @@ +# ESMValTool +# recipe_ocean_scarlar_fields.yml +--- +documentation: + description: | + Recipe to demonstrate several simple plots based on the scalar fields. + These fluxes are saved as MFO files in CMIP5. + Please use this file as a template for adding additional + fields into the ocean. This work based on the BGC-val toolkit GMD-2018-103. + + authors: + - demo_le + + maintainer: + - demo_le + + references: + - demora2018gmd + + projects: + - ukesm + + +datasets: + # working datasets + - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2004} + - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2004} + - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2004} + - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2004} + - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2004} + - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2004} + - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2004} + - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2004} + - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2004} + - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2004} + - {dataset: MRI-ESM1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2004} + - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2004} + - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1900, end_year: 2004} + + +diagnostics: + # -------------------------------------------------- + # Time series diagnostics + # -------------------------------------------------- + diag_timeseries: + description: sea_water_transport_across_line + variables: + mfo: # sea_water_transport_across_line + mip: Omon + scripts: + Regional_Ocean_Surface_mean_timeseries: + script: ocean/diagnostic_timeseries.py + moving_average: 6 years diff --git a/esmvaltool/recipes/recipe_perfmetrics_CMIP5.yml b/esmvaltool/recipes/recipe_perfmetrics_CMIP5.yml index 035364a45a..5f24b09218 100644 --- a/esmvaltool/recipes/recipe_perfmetrics_CMIP5.yml +++ b/esmvaltool/recipes/recipe_perfmetrics_CMIP5.yml @@ -1,9 +1,10 @@ -# recipe_perfmetrics_CMIP5.xml +# ESMValTool +# recipe_perfmetrics_CMIP5.yml --- documentation: description: | Recipe for plotting the performance metrics for the CMIP5 datasets, - including the "standard" ECVs as in Gleckler et al., plus some additional + including the standard ECVs as in Gleckler et al., and some additional variables (like ozone, sea-ice, aerosol...) authors: @@ -11,6 +12,9 @@ documentation: - righ_ma - eyri_ve + maintainer: + - righ_ma + references: - gleckler08jgr @@ -128,6 +132,28 @@ preprocessors: statistics: [mean, median] exclude: [reference_dataset, alternative_dataset] + ppNOLEV1thr10: + regrid: + target_grid: reference_dataset + scheme: linear + mask_fillvalues: + threshold_fraction: 0.10 + multi_model_statistics: + span: overlap + statistics: [mean, median] + exclude: [reference_dataset] + + ppNOLEV2thr10: + regrid: + target_grid: reference_dataset + scheme: linear + mask_fillvalues: + threshold_fraction: 0.10 + multi_model_statistics: + span: overlap + statistics: [mean, median] + exclude: [reference_dataset, alternative_dataset] + ppNOLEV1x1: regrid: target_grid: 1x1 @@ -156,8 +182,7 @@ preprocessors: diagnostics: - ### ta: AIR TEMPERATURE ############################################################################################################################## - + ### ta: AIR TEMPERATURE ##################################################### ta850: description: Air temperature at 850 hPa global. themes: @@ -170,211 +195,242 @@ diagnostics: reference_dataset: ERA-Interim alternative_dataset: NCEP mip: Amon - field: T3M + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2000 + end_year: 2002 additional_datasets: - - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: bcc-csm1-1-m, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CanCM4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-CAM5-1-FV2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-FASTCHEM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-WACCM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CMCC-CESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CMCC-CM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CMCC-CMS, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CNRM-CM5-2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: EC-EARTH, project: CMIP5, exp: historical, ensemble: r6i1p1, start_year: 2000, end_year: 2002} - - {dataset: FGOALS-g2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: FGOALS-s2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: FIO-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-CM2p1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-H, project: CMIP5, exp: historical, ensemble: r1i1p2, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-H-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-R, project: CMIP5, exp: historical, ensemble: r1i1p2, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-R-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadGEM2-AO, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC4h, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MRI-ESM1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, start_year: 2000, end_year: 2002, tier: 3} - - {dataset: NCEP, project: OBS, type: reanaly, version: 1, start_year: 2000, end_year: 2002, tier: 2} + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-g2} + - {dataset: FGOALS-s2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} scripts: cycle: &cycle_settings script: perfmetrics/main.ncl - plot_type: cycle # Plot type ('cycle' [time], 'zonal' [plev, lat], 'latlon' [lat, lon], 'cycle_latlon' [time, lat, lon]) - time_avg: monthlyclim # Time average ('opt' argument of time_operations.ncl) - region: Global # Selected region ('Global', 'Tropics', 'NH extratropics', 'SH extratropics') - plot_stddev: ref_model # Plot standard deviation ('all', 'none', 'ref_model' or given dataset name) - legend_outside: true # Plot legend in a separate file - styleset: CMIP5 # Plot style + # Plot type ('cycle', 'zonal', 'latlon', 'cycle_latlon', 'cycle_zonal') + plot_type: cycle + # Time average ('opt' argument of time_operations.ncl) + time_avg: monthlyclim + # Region ('global', 'trop', 'nhext', 'shext') + region: global + # Plot standard deviation ('all', 'none', 'ref_model' or dataset name) + plot_stddev: ref_model + # Plot legend in a separate file + legend_outside: true + # Plot style + styleset: CMIP5 grading: &grading_settings <<: *cycle_settings - plot_type: cycle_latlon # Plot type ('cycle' [time], 'zonal' [plev, lat], 'latlon' [lat, lon], 'cycle_latlon' [time, lat, lon]) - draw_plots: false # Do not draw plots - calc_grading: true # Calculate grading - metric: [RMSD, taylor] # Metric ('RMSD', 'BIAS', taylor') - normalization: [centered_median, none] # Normalization ('mean', 'median', 'centered_median', 'none') + # Plot type ('cycle', 'zonal', 'latlon', 'cycle_latlon', 'cycle_zonal') + plot_type: cycle_latlon + # Calculate grading + calc_grading: true + # Metric ('RMSD', 'BIAS', taylor') + metric: [RMSD, taylor] + # Normalization ('mean', 'median', 'centered_median', 'none') + normalization: [centered_median, none] ta200: description: Air temperature at 200 hPa global. + themes: + - phys + realms: + - atmos variables: ta: preprocessor: pp200 reference_dataset: ERA-Interim alternative_dataset: NCEP mip: Amon - field: T3M + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2000 + end_year: 2002 additional_datasets: - - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: bcc-csm1-1-m, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CanCM4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-CAM5-1-FV2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-FASTCHEM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-WACCM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CMCC-CESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CMCC-CM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CMCC-CMS, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CNRM-CM5-2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: EC-EARTH, project: CMIP5, exp: historical, ensemble: r6i1p1, start_year: 2000, end_year: 2002} - - {dataset: FGOALS-g2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: FGOALS-s2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: FIO-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-CM2p1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-H, project: CMIP5, exp: historical, ensemble: r1i1p2, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-H-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-R, project: CMIP5, exp: historical, ensemble: r1i1p2, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-R-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadGEM2-AO, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC4h, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MRI-ESM1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, start_year: 2000, end_year: 2002, tier: 3} - - {dataset: NCEP, project: OBS, type: reanaly, version: 1, start_year: 2000, end_year: 2002, tier: 2} + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-g2} + - {dataset: FGOALS-s2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} scripts: cycle: <<: *cycle_settings grading: <<: *grading_settings - metric: [RMSD] # Metric ('RMSD', 'BIAS', taylor') - normalization: [centered_median] # Normalization ('mean', 'median', 'centered_median', 'none') + # Metric ('RMSD', 'BIAS', taylor') + metric: [RMSD] + # Normalization ('mean', 'median', 'centered_median', 'none') + normalization: [centered_median] ta30: description: Air temperature at 30 hPa global. + themes: + - phys + realms: + - atmos variables: ta: preprocessor: pp30 reference_dataset: ERA-Interim alternative_dataset: NCEP mip: Amon - field: T3M + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2000 + end_year: 2002 additional_datasets: - - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: bcc-csm1-1-m, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CanCM4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-CAM5-1-FV2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-FASTCHEM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-WACCM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CMCC-CESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CMCC-CM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CMCC-CMS, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CNRM-CM5-2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: EC-EARTH, project: CMIP5, exp: historical, ensemble: r6i1p1, start_year: 2000, end_year: 2002} - - {dataset: FGOALS-g2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: FGOALS-s2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: FIO-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-CM2p1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-H, project: CMIP5, exp: historical, ensemble: r1i1p2, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-H-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-R, project: CMIP5, exp: historical, ensemble: r1i1p2, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-R-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadGEM2-AO, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC4h, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MRI-ESM1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, start_year: 2000, end_year: 2002, tier: 3} - - {dataset: NCEP, project: OBS, type: reanaly, version: 1, start_year: 2000, end_year: 2002, tier: 2} + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-g2} + - {dataset: FGOALS-s2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} scripts: cycle: <<: *cycle_settings @@ -382,66 +438,74 @@ diagnostics: ta5: description: Air temperature at 5 hPa global. + themes: + - phys + realms: + - atmos variables: ta: preprocessor: pp5 reference_dataset: ERA-Interim alternative_dataset: NCEP mip: Amon - field: T3M + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2000 + end_year: 2002 additional_datasets: - - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: bcc-csm1-1-m, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CanCM4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-CAM5-1-FV2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-FASTCHEM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-WACCM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CMCC-CESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CMCC-CM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CMCC-CMS, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CNRM-CM5-2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: EC-EARTH, project: CMIP5, exp: historical, ensemble: r6i1p1, start_year: 2000, end_year: 2002} - - {dataset: FGOALS-g2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: FGOALS-s2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: FIO-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-CM2p1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-H, project: CMIP5, exp: historical, ensemble: r1i1p2, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-H-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-R, project: CMIP5, exp: historical, ensemble: r1i1p2, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-R-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadGEM2-AO, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC4h, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MRI-ESM1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, start_year: 2000, end_year: 2002, tier: 3} - - {dataset: NCEP, project: OBS, type: reanaly, version: 1, start_year: 2000, end_year: 2002, tier: 2} + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-g2} + - {dataset: FGOALS-s2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} scripts: cycle: <<: *cycle_settings @@ -449,1034 +513,1473 @@ diagnostics: taZONAL: description: Air temperature zonal mean + themes: + - phys + realms: + - atmos variables: ta: preprocessor: ppALL reference_dataset: ERA-Interim alternative_dataset: NCEP mip: Amon - field: T3M + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2000 + end_year: 2002 additional_datasets: - - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: bcc-csm1-1-m, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CanCM4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-CAM5-1-FV2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-FASTCHEM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-WACCM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CMCC-CESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CMCC-CM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CMCC-CMS, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CNRM-CM5-2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: EC-EARTH, project: CMIP5, exp: historical, ensemble: r6i1p1, start_year: 2000, end_year: 2002} - - {dataset: FGOALS-g2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: FGOALS-s2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: FIO-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-CM2p1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-H, project: CMIP5, exp: historical, ensemble: r1i1p2, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-H-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-R, project: CMIP5, exp: historical, ensemble: r1i1p2, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-R-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadGEM2-AO, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC4h, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MRI-ESM1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, start_year: 2000, end_year: 2002, tier: 3} - - {dataset: NCEP, project: OBS, type: reanaly, version: 1, start_year: 2000, end_year: 2002, tier: 2} + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-g2} + - {dataset: FGOALS-s2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} scripts: zonal: &zonal_settings script: perfmetrics/main.ncl - plot_type: zonal # Plot type ('cycle' [time], 'zonal' [plev, lat], 'latlon' [lat, lon], 'cycle_latlon' [time, lat, lon]) - time_avg: annualclim # Time average ('opt' argument of time_operations.ncl) - region: Global # Selected region ('Global', 'Tropics', 'NH extratropics', 'SH extratropics') - plot_diff: true # Draw difference plots - t_test: true # Calculate t-test in difference plots - conf_level: 0.95 # Confidence level for the t-test - stippling: true # Mask non-significant values with stippling - abs_levs: [200, 210, 220, 230, 240, 250, 260, 270, 280, 290, 300] # Contour levels for absolute plot - diff_levs: [-10, -8, -6, -4, -2, 0, 2, 4, 6, 8, 10] # Contour levels for difference plot - - - ### ua: EASTWARD WIND ################################################################################################################################ - + # Plot type ('cycle', 'zonal', 'latlon', 'cycle_latlon', 'cycle_zonal') + plot_type: zonal + # Time average ('opt' argument of time_operations.ncl) + time_avg: annualclim + # Region ('global', 'trop', 'nhext', 'shext') + region: global + # Draw difference plots + plot_diff: true + # Calculate t-test in difference plots + t_test: true + # Confidence level for the t-test + conf_level: 0.95 + # Mask non-significant values with stippling + stippling: true + # Contour levels for absolute plot + abs_levs: [200, 210, 220, 230, 240, 250, 260, 270, 280, 290, 300] + # Contour levels for difference plot + diff_levs: [-10, -8, -6, -4, -2, 0, 2, 4, 6, 8, 10] + + + ### ua: EASTWARD WIND ####################################################### ua850: description: Eastward wind at 850 hPa global. + themes: + - atmDyn + realms: + - atmos variables: ua: preprocessor: pp850 reference_dataset: ERA-Interim alternative_dataset: NCEP mip: Amon - field: T3M + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2000 + end_year: 2002 additional_datasets: - - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: bcc-csm1-1-m, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CanCM4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-CAM5-1-FV2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-FASTCHEM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-WACCM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CMCC-CESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CMCC-CM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CMCC-CMS, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CNRM-CM5-2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: EC-EARTH, project: CMIP5, exp: historical, ensemble: r6i1p1, start_year: 2000, end_year: 2002} - - {dataset: FGOALS-g2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: FIO-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-CM2p1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-H, project: CMIP5, exp: historical, ensemble: r1i1p2, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-H-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-R, project: CMIP5, exp: historical, ensemble: r1i1p2, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-R-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadGEM2-AO, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC4h, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MRI-ESM1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, start_year: 2000, end_year: 2002, tier: 3} - - {dataset: NCEP, project: OBS, type: reanaly, version: 1, start_year: 2000, end_year: 2002, tier: 2} + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-g2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} scripts: grading: <<: *grading_settings - metric: [RMSD] # Metric ('RMSD', 'BIAS', taylor') - normalization: [centered_median] # Normalization ('mean', 'median', 'centered_median', 'none') + # Metric ('RMSD', 'BIAS', taylor') + metric: [RMSD] + # Normalization ('mean', 'median', 'centered_median', 'none') + normalization: [centered_median] ua200: description: Eastward wind at 200 hPa global. + themes: + - atmDyn + realms: + - atmos variables: ua: preprocessor: pp200 reference_dataset: ERA-Interim alternative_dataset: NCEP mip: Amon - field: T3M + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2000 + end_year: 2002 additional_datasets: - - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: bcc-csm1-1-m, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CanCM4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-CAM5-1-FV2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-FASTCHEM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-WACCM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CMCC-CESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CMCC-CM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CMCC-CMS, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CNRM-CM5-2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: EC-EARTH, project: CMIP5, exp: historical, ensemble: r6i1p1, start_year: 2000, end_year: 2002} - - {dataset: FGOALS-g2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: FIO-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-CM2p1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-H, project: CMIP5, exp: historical, ensemble: r1i1p2, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-H-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-R, project: CMIP5, exp: historical, ensemble: r1i1p2, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-R-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadGEM2-AO, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC4h, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MRI-ESM1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, start_year: 2000, end_year: 2002, tier: 3} - - {dataset: NCEP, project: OBS, type: reanaly, version: 1, start_year: 2000, end_year: 2002, tier: 2} + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-g2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} scripts: grading: <<: *grading_settings - metric: [RMSD] # Metric ('RMSD', 'BIAS', taylor') - normalization: [centered_median] # Normalization ('mean', 'median', 'centered_median', 'none') - + # Metric ('RMSD', 'BIAS', taylor') + metric: [RMSD] + # Normalization ('mean', 'median', 'centered_median', 'none') + normalization: [centered_median] - ### va: NORTHWARD WIND ############################################################################################################################### + ### va: NORTHWARD WIND ###################################################### va850: description: Northward wind at 850 hPa global. + themes: + - atmDyn + realms: + - atmos variables: va: preprocessor: pp850 reference_dataset: ERA-Interim alternative_dataset: NCEP mip: Amon - field: T3M + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2000 + end_year: 2002 additional_datasets: - - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: bcc-csm1-1-m, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CanCM4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-FASTCHEM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-WACCM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CMCC-CESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CMCC-CM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CMCC-CMS, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CNRM-CM5-2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: EC-EARTH, project: CMIP5, exp: historical, ensemble: r6i1p1, start_year: 2000, end_year: 2002} - - {dataset: FGOALS-g2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: FIO-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-CM2p1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-H, project: CMIP5, exp: historical, ensemble: r1i1p2, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-H-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-R, project: CMIP5, exp: historical, ensemble: r1i1p2, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-R-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadGEM2-AO, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC4h, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MRI-ESM1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, start_year: 2000, end_year: 2002, tier: 3} - - {dataset: NCEP, project: OBS, type: reanaly, version: 1, start_year: 2000, end_year: 2002, tier: 2} + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-g2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} scripts: grading: <<: *grading_settings - metric: [RMSD] # Metric ('RMSD', 'BIAS', taylor') - normalization: [centered_median] # Normalization ('mean', 'median', 'centered_median', 'none') + # Metric ('RMSD', 'BIAS', taylor') + metric: [RMSD] + # Normalization ('mean', 'median', 'centered_median', 'none') + normalization: [centered_median] va200: description: Northward wind at 200 hPa global. + themes: + - atmDyn + realms: + - atmos variables: va: preprocessor: pp200 reference_dataset: ERA-Interim alternative_dataset: NCEP mip: Amon - field: T3M + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2000 + end_year: 2002 additional_datasets: - - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: bcc-csm1-1-m, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CanCM4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-FASTCHEM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-WACCM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CMCC-CESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CMCC-CM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CMCC-CMS, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CNRM-CM5-2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: EC-EARTH, project: CMIP5, exp: historical, ensemble: r6i1p1, start_year: 2000, end_year: 2002} - - {dataset: FGOALS-g2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: FIO-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-CM2p1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-H, project: CMIP5, exp: historical, ensemble: r1i1p2, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-H-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-R, project: CMIP5, exp: historical, ensemble: r1i1p2, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-R-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadGEM2-AO, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC4h, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MRI-ESM1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, start_year: 2000, end_year: 2002, tier: 3} - - {dataset: NCEP, project: OBS, type: reanaly, version: 1, start_year: 2000, end_year: 2002, tier: 2} + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-g2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} scripts: grading: <<: *grading_settings - metric: [RMSD] # Metric ('RMSD', 'BIAS', taylor') - normalization: [centered_median] # Normalization ('mean', 'median', 'centered_median', 'none') - + # Metric ('RMSD', 'BIAS', taylor') + metric: [RMSD] + # Normalization ('mean', 'median', 'centered_median', 'none') + normalization: [centered_median] - ### zg: GEOPOTENTIAL HEIGHT ########################################################################################################################## + ### zg: GEOPOTENTIAL HEIGHT ################################################# zg500: description: Geopotential height 500 hPa global. + themes: + - phys + realms: + - atmos variables: zg: preprocessor: pp500 reference_dataset: ERA-Interim alternative_dataset: NCEP mip: Amon - field: T3M + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2000 + end_year: 2002 additional_datasets: - - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: bcc-csm1-1-m, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CanCM4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-CAM5-1-FV2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-FASTCHEM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-WACCM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CMCC-CESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CMCC-CM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CMCC-CMS, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CNRM-CM5-2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: FGOALS-g2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: FGOALS-s2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: FIO-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-CM2p1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-H, project: CMIP5, exp: historical, ensemble: r1i1p2, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-H-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-R-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadGEM2-AO, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC4h, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, start_year: 2000, end_year: 2002, tier: 3} - - {dataset: NCEP, project: OBS, type: reanaly, version: 1, start_year: 2000, end_year: 2002, tier: 2} + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + - {dataset: FGOALS-s2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} scripts: grading: <<: *grading_settings - metric: [RMSD] # Metric ('RMSD', 'BIAS', taylor') - normalization: [centered_median] # Normalization ('mean', 'median', 'centered_median', 'none') - + # Metric ('RMSD', 'BIAS', taylor') + metric: [RMSD] + # Normalization ('mean', 'median', 'centered_median', 'none') + normalization: [centered_median] - ### hus: SPECIFIC HUMIDITY ########################################################################################################################### + ### hus: SPECIFIC HUMIDITY ################################################## hus400: description: Specific humidity at 400 hPa global. + themes: + - phys + realms: + - atmos variables: hus: preprocessor: pp400 reference_dataset: AIRS alternative_dataset: ERA-Interim mip: Amon - field: T3M + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2003 + end_year: 2004 additional_datasets: - - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2003, end_year: 2004} - - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2003, end_year: 2004} - - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2003, end_year: 2004} - - {dataset: bcc-csm1-1-m, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2003, end_year: 2004} - - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2003, end_year: 2004} - - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2003, end_year: 2004} - - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2003, end_year: 2004} - - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2003, end_year: 2004} - - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2003, end_year: 2004} - - {dataset: CESM1-CAM5-1-FV2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2003, end_year: 2004} - - {dataset: CMCC-CM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2003, end_year: 2004} - - {dataset: CMCC-CMS, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2003, end_year: 2004} - - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2003, end_year: 2004} - - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2003, end_year: 2004} - - {dataset: FGOALS-g2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2003, end_year: 2004} - - {dataset: FGOALS-s2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2003, end_year: 2004} - - {dataset: FIO-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2003, end_year: 2004} - - {dataset: GISS-E2-H-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2003, end_year: 2004} - - {dataset: GISS-E2-R-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2003, end_year: 2004} - - {dataset: GFDL-CM2p1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2003, end_year: 2004} - - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2003, end_year: 2004} - - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2003, end_year: 2004} - - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2003, end_year: 2004} - - {dataset: HadGEM2-AO, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2003, end_year: 2004} - - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2003, end_year: 2004} - - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2003, end_year: 2004} - - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2003, end_year: 2004} - - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2003, end_year: 2004} - - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2003, end_year: 2004} - - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2003, end_year: 2004} - - {dataset: MIROC4h, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2003, end_year: 2004} - - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2003, end_year: 2004} - - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2003, end_year: 2004} - - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2003, end_year: 2004} - - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2003, end_year: 2004} - - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2003, end_year: 2004} - - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2003, end_year: 2004} - - {dataset: MRI-ESM1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2003, end_year: 2004} - - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2003, end_year: 2004} - - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2003, end_year: 2004} - - {dataset: AIRS, project: obs4mips, level: L3, version: RetStd-v5, start_year: 2003, end_year: 2004, tier: 1} - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, start_year: 2003, end_year: 2004, tier: 3} + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + - {dataset: FGOALS-s2} + - {dataset: FIO-ESM} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R-CC} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: AIRS, project: obs4mips, level: L3, version: RetStd-v5, tier: 1} + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} scripts: grading: <<: *grading_settings - metric: [RMSD] # Metric ('RMSD', 'BIAS', taylor') - normalization: [centered_median] # Normalization ('mean', 'median', 'centered_median', 'none') - + # Metric ('RMSD', 'BIAS', taylor') + metric: [RMSD] + # Normalization ('mean', 'median', 'centered_median', 'none') + normalization: [centered_median] - ### tas: NEAR-SURFACE TEMPERATURE #################################################################################################################### + ### tas: NEAR-SURFACE TEMPERATURE ########################################### tas: description: Near-surface air temperature + themes: + - phys + realms: + - atmos variables: tas: preprocessor: ppNOLEV2 reference_dataset: ERA-Interim alternative_dataset: NCEP mip: Amon - field: T2Ms + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2000 + end_year: 2002 additional_datasets: - - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: bcc-csm1-1-m, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CanCM4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-CAM5-1-FV2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-FASTCHEM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-WACCM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CMCC-CESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CMCC-CM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CMCC-CMS, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CNRM-CM5-2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: EC-EARTH, project: CMIP5, exp: historical, ensemble: r6i1p1, start_year: 2000, end_year: 2002} - - {dataset: FGOALS-g2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: FGOALS-s2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: FIO-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-CM2p1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-H, project: CMIP5, exp: historical, ensemble: r1i1p2, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-H-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-R, project: CMIP5, exp: historical, ensemble: r1i1p2, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-R-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadGEM2-AO, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC4h, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MRI-ESM1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, start_year: 2000, end_year: 2002, tier: 3} - - {dataset: NCEP, project: OBS, type: reanaly, version: 1, start_year: 2000, end_year: 2002, tier: 2} + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-g2} + - {dataset: FGOALS-s2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} scripts: latlon: &latlon_settings script: perfmetrics/main.ncl - plot_type: latlon # Plot type ('cycle' [time], 'zonal' [plev, lat], 'latlon' [lat, lon], 'cycle_latlon' [time, lat, lon]) - time_avg: annualclim # Time average ('opt' argument of time_operations.ncl) - region: Global # Selected region ('Global', 'Tropics', 'NH extratropics', 'SH extratropics') - plot_diff: true # Draw difference plots - t_test: true # Calculate t-test in difference plots - conf_level: 0.95 # Confidence level for the t-test + # Plot type ('cycle', 'zonal', 'latlon', 'cycle_latlon', 'cycle_zonal') + plot_type: latlon + # Time average ('opt' argument of time_operations.ncl) + time_avg: annualclim + # Region ('global', 'trop', 'nhext', 'shext') + region: global + # Draw difference plots + plot_diff: true + # Calculate t-test in difference plots + t_test: true + # Confidence level for the t-test + conf_level: 0.95 + # Contour levels for absolute plot abs_levs: [240, 243, 246, 249, 252, 255, 258, 261, 264, 267, 270, 273, 276, 279, - 282, 285, 288, 291, 294, 297, 300] # Contour levels for absolute plot - diff_levs: [-5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5] # Contour levels for difference plot + 282, 285, 288, 291, 294, 297, 300] + # Contour levels for difference plot + diff_levs: [-5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5] grading: <<: *grading_settings - metric: [RMSD] # Metric ('RMSD', 'BIAS', taylor') - normalization: [centered_median] # Normalization ('mean', 'median', 'centered_median', 'none') - + # Metric ('RMSD', 'BIAS', taylor') + metric: [RMSD] + # Normalization ('mean', 'median', 'centered_median', 'none') + normalization: [centered_median] - ### ts: SEA-SURFACE (SKIN) TEMPERATURE ############################################################################################################### + ### ts: SEA-SURFACE (SKIN) TEMPERATURE ###################################### ts: description: Sea-surface (skin) temperature + themes: + - phys + realms: + - atmos + - ocean variables: ts: preprocessor: ppNOLEV1x1 reference_dataset: ESACCI-SST alternative_dataset: HadISST mip: Amon - field: T2Ms + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2000 + end_year: 2002 additional_datasets: - - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: bcc-csm1-1-m, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CanCM4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-CAM5-1-FV2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CMCC-CM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CMCC-CMS, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: FGOALS-g2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: FIO-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-CM2p1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-H, project: CMIP5, exp: historical, ensemble: r1i1p2, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-H-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-R, project: CMIP5, exp: historical, ensemble: r1i1p2, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-R-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadGEM2-AO, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC4h, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: ESACCI-SST, project: OBS, type: sat, version: L4-GHRSST-SSTdepth-OSTIA-GLOB, start_year: 2000, end_year: 2002, tier: 2} - - {dataset: HadISST, project: OBS, type: reanaly, version: 1, start_year: 2000, end_year: 2002, tier: 2} + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: ESACCI-SST, project: OBS, type: sat, version: L4-GHRSST-SSTdepth-OSTIA-GLOB, tier: 2} + - {dataset: HadISST, project: OBS, type: reanaly, version: 1, tier: 2} scripts: grading: <<: *grading_settings - metric: [RMSD] # Metric ('RMSD', 'BIAS', taylor') - normalization: [centered_median] # Normalization ('mean', 'median', 'centered_median', 'none') - + # Metric ('RMSD', 'BIAS', taylor') + metric: [RMSD] + # Normalization ('mean', 'median', 'centered_median', 'none') + normalization: [centered_median] - ### pr: PRECIPITATIONS ############################################################################################################################### + ### pr: PRECIPITATION ####################################################### pr: - description: Precipitations + description: Precipitation + themes: + - phys + realms: + - atmos variables: pr: preprocessor: ppNOLEV1 reference_dataset: GPCP-SG mip: Amon - field: T2Ms + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2000 + end_year: 2002 additional_datasets: - - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: bcc-csm1-1-m, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CanCM4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-CAM5-1-FV2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-FASTCHEM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-WACCM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CMCC-CESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CMCC-CM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CMCC-CMS, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CNRM-CM5-2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: EC-EARTH, project: CMIP5, exp: historical, ensemble: r6i1p1, start_year: 2000, end_year: 2002} - - {dataset: FGOALS-g2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: FIO-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-CM2p1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-H, project: CMIP5, exp: historical, ensemble: r1i1p2, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-H-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-R, project: CMIP5, exp: historical, ensemble: r1i1p2, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-R-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadGEM2-AO, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC4h, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MRI-ESM1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GPCP-SG, project: obs4mips, level: L3, version: v2.2, start_year: 2000, end_year: 2002, tier: 1} + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-g2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: GPCP-SG, project: obs4mips, level: L3, version: v2.2, tier: 1} scripts: grading: <<: *grading_settings - metric: [RMSD] # Metric ('RMSD', 'BIAS', taylor') - normalization: [centered_median] # Normalization ('mean', 'median', 'centered_median', 'none') + # Metric ('RMSD', 'BIAS', taylor') + metric: [RMSD] + # Normalization ('mean', 'median', 'centered_median', 'none') + normalization: [centered_median] - ### clt: TOTAL CLOUD COVER ########################################################################################################################### - + ### clt: TOTAL CLOUD COVER ################################################## clt: description: Total cloud cover + themes: + - clouds + realms: + - atmos variables: clt: preprocessor: ppNOLEV2 reference_dataset: ESACCI-CLOUD - alternative_dataset: PATMOS + alternative_dataset: PATMOS-x mip: Amon - field: T2Ms + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2000 + end_year: 2002 additional_datasets: - - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: bcc-csm1-1-m, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CESM1-CAM5-1-FV2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CMCC-CM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CMCC-CMS, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: EC-EARTH, project: CMIP5, exp: historical, ensemble: r6i1p1, start_year: 2000, end_year: 2002} - - {dataset: FGOALS-g2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: FIO-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-H, project: CMIP5, exp: historical, ensemble: r1i1p2, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-H-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-R, project: CMIP5, exp: historical, ensemble: r1i1p2, start_year: 2000, end_year: 2002} - - {dataset: GISS-E2-R-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadGEM2-AO, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC4h, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2000, end_year: 2002} - - {dataset: ESACCI-CLOUD, project: OBS, type: sat, version: AVHRR-fv2.0, start_year: 2000, end_year: 2002, tier: 2} - - {dataset: PATMOS, project: OBS, type: sat, version: 1, start_year: 2000, end_year: 2002, tier: 2} + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-g2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, version: AVHRR-fv3.0, tier: 2} + - {dataset: PATMOS-x, project: OBS, type: sat, version: NOAA, tier: 2} scripts: latlon: <<: *latlon_settings + # Add global average to the plot show_global_avg: true - abs_levs: [0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100] # Contour levels for absolute plot - diff_levs: [-30, -25, -20, -15, -10, -5, 0, 5, 10, 15, 20, 25, 30] # Contour levels for difference plot + # Contour levels for absolute plot + abs_levs: [0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100] + # Contour levels for difference plot + diff_levs: [-30, -25, -20, -15, -10, -5, 0, 5, 10, 15, 20, 25, 30] grading: <<: *grading_settings - metric: [RMSD] # Metric ('RMSD', 'BIAS', taylor') - normalization: [centered_median] # Normalization ('mean', 'median', 'centered_median', 'none') - + # Metric ('RMSD', 'BIAS', taylor') + metric: [RMSD] + # Normalization ('mean', 'median', 'centered_median', 'none') + normalization: [centered_median] - ### rlut: ALL-SKY LONGWAVE RADIATION ################################################################################################################# + ### rlut: ALL-SKY LONGWAVE RADIATION ######################################## rlut: description: All-sky longwave radiation + themes: + - phys + realms: + - atmos variables: rlut: preprocessor: ppNOLEV1 reference_dataset: CERES-EBAF mip: Amon - field: T2Ms + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2001 + end_year: 2003 additional_datasets: - - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: bcc-csm1-1-m, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: CanCM4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: CESM1-CAM5-1-FV2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: CMCC-CM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: CMCC-CMS, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: EC-EARTH, project: CMIP5, exp: historical, ensemble: r6i1p1, start_year: 2001, end_year: 2003} - - {dataset: FGOALS-g2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: FGOALS-s2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: FIO-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: GFDL-CM2p1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: GISS-E2-H, project: CMIP5, exp: historical, ensemble: r1i1p2, start_year: 2001, end_year: 2003} - - {dataset: GISS-E2-R, project: CMIP5, exp: historical, ensemble: r1i1p2, start_year: 2001, end_year: 2003} - - {dataset: HadCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: HadGEM2-AO, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: MIROC4h, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, start_year: 2001, end_year: 2003, tier: 1} + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-g2} + - {dataset: FGOALS-s2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, tier: 1} scripts: grading: <<: *grading_settings - metric: [RMSD] # Metric ('RMSD', 'BIAS', taylor') - normalization: [centered_median] # Normalization ('mean', 'median', 'centered_median', 'none') - + # Metric ('RMSD', 'BIAS', taylor') + metric: [RMSD] + # Normalization ('mean', 'median', 'centered_median', 'none') + normalization: [centered_median] - ### rsut: ALL-SKY SHORTWAVE RADIATION ################################################################################################################ + ### rsut: ALL-SKY SHORTWAVE RADIATION ####################################### rsut: description: All-sky shortwave radiation + themes: + - phys + realms: + - atmos variables: rsut: preprocessor: ppNOLEV1 reference_dataset: CERES-EBAF mip: Amon - field: T2Ms + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2001 + end_year: 2003 additional_datasets: - - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: bcc-csm1-1-m, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: CESM1-CAM5-1-FV2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: CMCC-CM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: CMCC-CMS, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: FGOALS-s2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: FIO-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: GFDL-CM2p1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: GISS-E2-H, project: CMIP5, exp: historical, ensemble: r1i1p2, start_year: 2001, end_year: 2003} - - {dataset: GISS-E2-R, project: CMIP5, exp: historical, ensemble: r1i1p2, start_year: 2001, end_year: 2003} - - {dataset: HadCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: HadGEM2-AO, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: MIROC4h, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, start_year: 2001, end_year: 2003, tier: 1} + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-s2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, tier: 1} scripts: grading: <<: *grading_settings - metric: [RMSD] # Metric ('RMSD', 'BIAS', taylor') - normalization: [centered_median] # Normalization ('mean', 'median', 'centered_median', 'none') + # Metric ('RMSD', 'BIAS', taylor') + metric: [RMSD] + # Normalization ('mean', 'median', 'centered_median', 'none') + normalization: [centered_median] - ### lwcre: LONGWAVE CLOUD FORCING #################################################################################################################### + ### lwcre: LONGWAVE CLOUD FORCING ########################################### lwcre: description: Longwave cloud radiative effect + themes: + - clouds + realms: + - atmos variables: lwcre: preprocessor: ppNOLEV1 reference_dataset: CERES-EBAF mip: Amon - field: T2Ms derive: true force_derivation: false + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2001 + end_year: 2003 additional_datasets: - - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: bcc-csm1-1-m, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: CESM1-CAM5-1-FV2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: CMCC-CM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: CMCC-CMS, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: FGOALS-g2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: FGOALS-s2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: FIO-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: GISS-E2-H, project: CMIP5, exp: historical, ensemble: r1i1p2, start_year: 2001, end_year: 2003} - - {dataset: GISS-E2-R, project: CMIP5, exp: historical, ensemble: r1i1p2, start_year: 2001, end_year: 2003} - - {dataset: HadCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: HadGEM2-AO, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: MIROC4h, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, start_year: 2001, end_year: 2003, tier: 1} + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + - {dataset: FGOALS-s2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, tier: 1} scripts: latlon: <<: *latlon_settings - abs_levs: [0, 10, 20, 30, 40, 50, 60, 70, 80, 90 , 100] # Contour levels for absolute plot - diff_levs: [-30, -25, -20, -15, -10, -5, 0, 5, 10, 15, 20, 25, 30] # Contour levels for difference plot + # Contour levels for absolute plot + abs_levs: [0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100] + # Contour levels for difference plot + diff_levs: [-30, -25, -20, -15, -10, -5, 0, 5, 10, 15, 20, 25, 30] grading: <<: *grading_settings + # Metric ('RMSD', 'BIAS', taylor') + metric: [RMSD] + # Normalization ('mean', 'median', 'centered_median', 'none') + normalization: [centered_median] - ### swcre: SHORTWAVE CLOUD FORCING ################################################################################################################### + ### swcre: SHORTWAVE CLOUD FORCING ########################################## swcre: description: Shortwave cloud radiative effect + themes: + - clouds + realms: + - atmos variables: swcre: preprocessor: ppNOLEV1 reference_dataset: CERES-EBAF mip: Amon - field: T2Ms derive: true force_derivation: false + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2001 + end_year: 2003 additional_datasets: - - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: bcc-csm1-1-m, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: CCSM4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: CESM1-BGC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: CESM1-CAM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: CESM1-CAM5-1-FV2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: CMCC-CM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: FGOALS-s2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: FIO-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: GISS-E2-H, project: CMIP5, exp: historical, ensemble: r1i1p2, start_year: 2001, end_year: 2003} - - {dataset: GISS-E2-R, project: CMIP5, exp: historical, ensemble: r1i1p2, start_year: 2001, end_year: 2003} - - {dataset: HadCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: HadGEM2-AO, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: MIROC4h, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2003} - - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, start_year: 2001, end_year: 2003, tier: 1} + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CMCC-CM} + - {dataset: CNRM-CM5} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-s2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, tier: 1} scripts: latlon: <<: *latlon_settings - abs_levs: [-100, -90, -80, -70, -60, -50, -40, -30, -20, -10, 0] # Contour levels for absolute plot - diff_levs: [-30, -25, -20, -15, -10, -5, 0, 5, 10, 15, 20, 25, 30] # Contour levels for difference plot + # Contour levels for absolute plot + abs_levs: [-100, -90, -80, -70, -60, -50, -40, -30, -20, -10, 0] + # Contour levels for difference plot + diff_levs: [-30, -25, -20, -15, -10, -5, 0, 5, 10, 15, 20, 25, 30] + grading: + <<: *grading_settings + # Metric ('RMSD', 'BIAS', taylor') + metric: [RMSD] + # Normalization ('mean', 'median', 'centered_median', 'none') + normalization: [centered_median] + + + ### od550aer: AEROSOL OPTICAL DEPTH AT 550 nm ############################### + od550aer: + description: Aerosol optical depth at 550 nm + themes: + - aerosols + realms: + - atmos + variables: + od550aer: + preprocessor: ppNOLEV2thr10 + reference_dataset: ESACCI-AEROSOL + alternative_dataset: MODIS + mip: aero + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2003 + end_year: 2004 + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: BNU-ESM} + - {dataset: CESM1-CAM5} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: ESACCI-AEROSOL, project: OBS, type: sat, version: SU-v4.21, tier: 2} + - {dataset: MODIS, project: OBS, type: sat, version: MYD08-M3, tier: 3} + scripts: + grading: + <<: *grading_settings + # Metric ('RMSD', 'BIAS', taylor') + metric: [RMSD] + # Normalization ('mean', 'median', 'centered_median', 'none') + normalization: [centered_median] + + + ### od870aer: AEROSOL OPTICAL DEPTH AT 870 nm ############################### + od870aer: + description: Aerosol optical depth at 870 nm + themes: + - aerosols + realms: + - atmos + variables: + od870aer: + preprocessor: ppNOLEV1thr10 + reference_dataset: ESACCI-AEROSOL + mip: aero + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2002 + end_year: 2004 + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MRI-CGCM3} + - {dataset: ESACCI-AEROSOL, project: OBS, type: sat, version: SU-v4.21, tier: 2} + scripts: grading: <<: *grading_settings - metric: [RMSD] # Metric ('RMSD', 'BIAS', taylor') - normalization: [centered_median] # Normalization ('mean', 'median', 'centered_median', 'none') + # Metric ('RMSD', 'BIAS', taylor') + metric: [RMSD] + # Normalization ('mean', 'median', 'centered_median', 'none') + normalization: [centered_median] -### od550aer: AEROSOL OPTICAL DEPTH AT 550 nm ######################################################################################################## -### od870aer: AEROSOL OPTICAL DEPTH AT 870 nm ######################################################################################################## + ### abs550aer: ABSORPTION OPTICAL DEPTH AT 550 nm ########################### + abs550aer: + description: Absorption optical depth at 550 nm + themes: + - aerosols + realms: + - atmos + variables: + abs550aer: + preprocessor: ppNOLEV1thr10 + reference_dataset: ESACCI-AEROSOL + mip: aero + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2002 + end_year: 2004 + additional_datasets: + - {dataset: CSIRO-Mk3-6-0} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: ESACCI-AEROSOL, project: OBS, type: sat, version: SU-v4.21, tier: 2} + scripts: + grading: + <<: *grading_settings + # Metric ('RMSD', 'BIAS', taylor') + metric: [RMSD] + # Normalization ('mean', 'median', 'centered_median', 'none') + normalization: [centered_median] -### od550lt1aer: FINE MODE AEROSOL OPTICAL DEPTH AT 550 nm ########################################################################################### -### toz: TOTAL COLUMN OZONE ########################################################################################################################## + ### od550lt1aer: FINE MODE AEROSOL OPTICAL DEPTH AT 550 nm ################## + od550lt1aer: + description: Fine mode optical depth at 550 nm + themes: + - aerosols + realms: + - atmos + variables: + od550lt1aer: + preprocessor: ppNOLEV1thr10 + reference_dataset: ESACCI-AEROSOL + mip: aero + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2002 + end_year: 2004 + additional_datasets: + - {dataset: CSIRO-Mk3-6-0} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MRI-CGCM3} + - {dataset: ESACCI-AEROSOL, project: OBS, type: sat, version: SU-v4.21, tier: 2} + scripts: + grading: + <<: *grading_settings + # Metric ('RMSD', 'BIAS', taylor') + metric: [RMSD] + # Normalization ('mean', 'median', 'centered_median', 'none') + normalization: [centered_median] -### sic: SEA-ICE CONCENTRATION (NH) ################################################################################################################## -### sic: SEA-ICE CONCENTRATION (SH) ################################################################################################################## + ### toz: TOTAL COLUMN OZONE ################################################# + toz: + description: Total column ozone + themes: + - chem + - ghg + realms: + - atmosChem + variables: + toz: + preprocessor: ppNOLEV2thr10 + reference_dataset: ESACCI-OZONE + alternative_dataset: NIWA-BS + mip: Amon + derive: true + force_derivation: false + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2002 + end_year: 2004 + additional_datasets: + - {dataset: CESM1-WACCM, ensemble: r2i1p1} + - {dataset: CNRM-CM5} + - {dataset: GFDL-CM3} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: MIROC-ESM-CHEM} + - {dataset: ESACCI-OZONE, project: OBS, type: sat, version: L3, tier: 2} + - {dataset: NIWA-BS, project: OBS, type: sat, version: v3.3, tier: 3} + scripts: + grading_global: + <<: *grading_settings + # Metric ('RMSD', 'BIAS', taylor') + metric: [RMSD] + # Normalization ('mean', 'median', 'centered_median', 'none') + normalization: [centered_median] + grading_shpolar: + <<: *grading_settings + # Region + region: shpolar + # Metric ('RMSD', 'BIAS', taylor') + metric: [RMSD] + # Normalization ('mean', 'median', 'centered_median', 'none') + normalization: [centered_median] -### dos: SOIL MOISTURE ############################################################################################################################### + ### sic: SEA-ICE CONCENTRATION (NH) ######################################### + ### sic: SEA-ICE CONCENTRATION (SH) ######################################### -### COLLECT METRICS ################################################################################################################################## + ### sm: SOIL MOISTURE ####################################################### + sm: + description: Soil moisture + themes: + - phys + realms: + - land + variables: + sm: + preprocessor: ppNOLEV1thr10 + reference_dataset: ESACCI-SOILMOISTURE + mip: Lmon + derive: true + force_derivation: false + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2002 + end_year: 2004 + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CNRM-CM5 } + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + - {dataset: FGOALS-s2} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: HadCM3} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: ESACCI-SOILMOISTURE, project: OBS, type: sat, version: L3S-SSMV-COMBINED-v4.2, tier: 2} + scripts: + grading: + <<: *grading_settings + # Metric ('RMSD', 'BIAS', taylor') + metric: [RMSD] + # Normalization ('mean', 'median', 'centered_median', 'none') + normalization: [centered_median] + + ### COLLECT METRICS ######################################################### collect: description: Wrapper to collect and plot previously calculated metrics scripts: RMSD: script: perfmetrics/collect.ncl - ancestors: ['*/grading'] + ancestors: ['*/grading*'] metric: RMSD label_bounds: [-0.5, 0.5] label_scale: 0.1 disp_values: false cm_interval: [2, 241] - sort: true # Sort dataset in alphabetic order (excluding MMM) + # Sort dataset in alphabetic order (excluding MMM) + sort: true + # Sort diagnostics in a specific order (name = 'diagnositic'-'region') + diag_order: ['ta850-global', 'ta200-global', 'ua850-global', + 'ua200-global', 'va850-global', 'va200-global', + 'zg500-global', 'hus400-global', 'tas-global', + 'ts-global', 'pr-global', 'clt-global', 'rlut-global', + 'rsut-global', 'lwcre-global', 'swcre-global', + 'od550aer-global', 'od870aer-global', 'abs550aer-global', + 'od550lt1aer-global', 'toz-global', 'toz-shpolar', + 'sm-global'] taylor: script: perfmetrics/collect.ncl ancestors: ['*/grading'] diff --git a/esmvaltool/recipes/recipe_perfmetrics_CMIP5_4cds.yml b/esmvaltool/recipes/recipe_perfmetrics_CMIP5_4cds.yml new file mode 100644 index 0000000000..1441375e5f --- /dev/null +++ b/esmvaltool/recipes/recipe_perfmetrics_CMIP5_4cds.yml @@ -0,0 +1,1978 @@ +# ESMValTool +# recipe_perfmetrics_CMIP5_4cds.yml +--- +documentation: + description: | + Recipe for plotting the performance metrics for the CMIP5 datasets, + including the standard ECVs as in Gleckler et al., and some additional + variables (like ozone, sea-ice, aerosol...) + + authors: + - fran_fr + - righ_ma + - eyri_ve + + maintainer: + - righ_ma + + references: + - gleckler08jgr + + projects: + - esmval + - embrace + - crescendo + - c3s-magic + - cmug + +preprocessors: + pp850: + extract_levels: + levels: 85000 + scheme: linear + regrid: + target_grid: reference_dataset + scheme: linear + mask_fillvalues: + threshold_fraction: 0.95 + multi_model_statistics: + span: overlap + statistics: [mean, median] + exclude: [reference_dataset, alternative_dataset] + + pp500: + extract_levels: + levels: 50000 + scheme: linear + regrid: + target_grid: reference_dataset + scheme: linear + mask_fillvalues: + threshold_fraction: 0.95 + multi_model_statistics: + span: overlap + statistics: [mean, median] + exclude: [reference_dataset, alternative_dataset] + + pp400: + extract_levels: + levels: 40000 + scheme: linear + regrid: + target_grid: reference_dataset + scheme: linear + mask_fillvalues: + threshold_fraction: 0.95 + multi_model_statistics: + span: overlap + statistics: [mean, median] + exclude: [reference_dataset, alternative_dataset] + + pp200: + extract_levels: + levels: 20000 + scheme: linear + regrid: + target_grid: reference_dataset + scheme: linear + mask_fillvalues: + threshold_fraction: 0.95 + multi_model_statistics: + span: overlap + statistics: [mean, median] + exclude: [reference_dataset, alternative_dataset] + + pp30: + extract_levels: + levels: 3000 + scheme: linear + regrid: + target_grid: reference_dataset + scheme: linear + mask_fillvalues: + threshold_fraction: 0.95 + multi_model_statistics: + span: overlap + statistics: [mean, median] + exclude: [reference_dataset, alternative_dataset] + + pp5: + extract_levels: + levels: 500 + scheme: linear + regrid: + target_grid: reference_dataset + scheme: linear + mask_fillvalues: + threshold_fraction: 0.95 + multi_model_statistics: + span: overlap + statistics: [mean, median] + exclude: [reference_dataset, alternative_dataset] + + ppNOLEV1: + regrid: + target_grid: reference_dataset + scheme: linear + mask_fillvalues: + threshold_fraction: 0.95 + multi_model_statistics: + span: overlap + statistics: [mean, median] + exclude: [reference_dataset] + + ppNOLEV2: + regrid: + target_grid: reference_dataset + scheme: linear + mask_fillvalues: + threshold_fraction: 0.95 + multi_model_statistics: + span: overlap + statistics: [mean, median] + exclude: [reference_dataset, alternative_dataset] + + ppNOLEV1thr10: + regrid: + target_grid: reference_dataset + scheme: linear + mask_fillvalues: + threshold_fraction: 0.10 + multi_model_statistics: + span: overlap + statistics: [mean, median] + exclude: [reference_dataset] + + ppNOLEV2thr10: + regrid: + target_grid: reference_dataset + scheme: linear + mask_fillvalues: + threshold_fraction: 0.10 + multi_model_statistics: + span: overlap + statistics: [mean, median] + exclude: [reference_dataset, alternative_dataset] + + ppNOLEV1x1: + regrid: + target_grid: 1x1 + scheme: linear + mask_fillvalues: + threshold_fraction: 0.95 + multi_model_statistics: + span: overlap + statistics: [mean, median] + exclude: [reference_dataset, alternative_dataset] + + ppALL: + extract_levels: + levels: reference_dataset + scheme: linear + regrid: + target_grid: reference_dataset + scheme: linear + mask_fillvalues: + threshold_fraction: 0.95 + multi_model_statistics: + span: overlap + statistics: [mean, median] + exclude: [reference_dataset, alternative_dataset] + + +diagnostics: + + ### ta: AIR TEMPERATURE ##################################################### + ta850: + description: Air temperature at 850 hPa global. + themes: + - phys + realms: + - atmos + variables: + ta: + preprocessor: pp850 + reference_dataset: ERA-Interim + alternative_dataset: NCEP + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2000 + end_year: 2002 + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-g2} + - {dataset: FGOALS-s2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} + scripts: + cycle: &cycle_settings + script: perfmetrics/main.ncl + # Plot type ('cycle', 'zonal', 'latlon', 'cycle_latlon', 'cycle_zonal') + plot_type: cycle + # Time average ('opt' argument of time_operations.ncl) + time_avg: monthlyclim + # Region ('global', 'trop', 'nhext', 'shext') + region: global + # Plot standard deviation ('all', 'none', 'ref_model' or dataset name) + plot_stddev: ref_model + # Plot legend in a separate file + legend_outside: true + # Plot style + styleset: CMIP5 + grading: &grading_settings + <<: *cycle_settings + # Plot type ('cycle', 'zonal', 'latlon', 'cycle_latlon', 'cycle_zonal') + plot_type: cycle_latlon + # Calculate grading + calc_grading: true + # Metric ('RMSD', 'BIAS', taylor') + metric: [RMSD, taylor] + # Normalization ('mean', 'median', 'centered_median', 'none') + normalization: [centered_median, none] + + + ta200: + description: Air temperature at 200 hPa global. + themes: + - phys + realms: + - atmos + variables: + ta: + preprocessor: pp200 + reference_dataset: ERA-Interim + alternative_dataset: NCEP + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2000 + end_year: 2002 + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-g2} + - {dataset: FGOALS-s2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} + scripts: + cycle: + <<: *cycle_settings + grading: + <<: *grading_settings + # Metric ('RMSD', 'BIAS', taylor') + metric: [RMSD] + # Normalization ('mean', 'median', 'centered_median', 'none') + normalization: [centered_median] + + + ta30: + description: Air temperature at 30 hPa global. + themes: + - phys + realms: + - atmos + variables: + ta: + preprocessor: pp30 + reference_dataset: ERA-Interim + alternative_dataset: NCEP + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2000 + end_year: 2002 + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-g2} + - {dataset: FGOALS-s2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} + scripts: + cycle: + <<: *cycle_settings + + + ta5: + description: Air temperature at 5 hPa global. + themes: + - phys + realms: + - atmos + variables: + ta: + preprocessor: pp5 + reference_dataset: ERA-Interim + alternative_dataset: NCEP + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2000 + end_year: 2002 + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-g2} + - {dataset: FGOALS-s2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} + scripts: + cycle: + <<: *cycle_settings + + + taZONAL: + description: Air temperature zonal mean + themes: + - phys + realms: + - atmos + variables: + ta: + preprocessor: ppALL + reference_dataset: ERA-Interim + alternative_dataset: NCEP + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2000 + end_year: 2002 + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-g2} + - {dataset: FGOALS-s2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} + scripts: + zonal: &zonal_settings + script: perfmetrics/main.ncl + # Plot type ('cycle', 'zonal', 'latlon', 'cycle_latlon', 'cycle_zonal') + plot_type: zonal + # Time average ('opt' argument of time_operations.ncl) + time_avg: annualclim + # Region ('global', 'trop', 'nhext', 'shext') + region: global + # Draw difference plots + plot_diff: true + # Calculate t-test in difference plots + t_test: true + # Confidence level for the t-test + conf_level: 0.95 + # Mask non-significant values with stippling + stippling: true + # Contour levels for absolute plot + abs_levs: [200, 210, 220, 230, 240, 250, 260, 270, 280, 290, 300] + # Contour levels for difference plot + diff_levs: [-10, -8, -6, -4, -2, 0, 2, 4, 6, 8, 10] + + + ### ua: EASTWARD WIND ####################################################### + ua850: + description: Eastward wind at 850 hPa global. + themes: + - atmDyn + realms: + - atmos + variables: + ua: + preprocessor: pp850 + reference_dataset: ERA-Interim + alternative_dataset: NCEP + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2000 + end_year: 2002 + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-g2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} + scripts: + grading: + <<: *grading_settings + # Metric ('RMSD', 'BIAS', taylor') + metric: [RMSD] + # Normalization ('mean', 'median', 'centered_median', 'none') + normalization: [centered_median] + + + ua200: + description: Eastward wind at 200 hPa global. + themes: + - atmDyn + realms: + - atmos + variables: + ua: + preprocessor: pp200 + reference_dataset: ERA-Interim + alternative_dataset: NCEP + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2000 + end_year: 2002 + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-g2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} + scripts: + grading: + <<: *grading_settings + # Metric ('RMSD', 'BIAS', taylor') + metric: [RMSD] + # Normalization ('mean', 'median', 'centered_median', 'none') + normalization: [centered_median] + + + ### va: NORTHWARD WIND ###################################################### + va850: + description: Northward wind at 850 hPa global. + themes: + - atmDyn + realms: + - atmos + variables: + va: + preprocessor: pp850 + reference_dataset: ERA-Interim + alternative_dataset: NCEP + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2000 + end_year: 2002 + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-g2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} + scripts: + grading: + <<: *grading_settings + # Metric ('RMSD', 'BIAS', taylor') + metric: [RMSD] + # Normalization ('mean', 'median', 'centered_median', 'none') + normalization: [centered_median] + + + va200: + description: Northward wind at 200 hPa global. + themes: + - atmDyn + realms: + - atmos + variables: + va: + preprocessor: pp200 + reference_dataset: ERA-Interim + alternative_dataset: NCEP + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2000 + end_year: 2002 + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-g2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} + scripts: + grading: + <<: *grading_settings + # Metric ('RMSD', 'BIAS', taylor') + metric: [RMSD] + # Normalization ('mean', 'median', 'centered_median', 'none') + normalization: [centered_median] + + + ### zg: GEOPOTENTIAL HEIGHT ################################################# + zg500: + description: Geopotential height 500 hPa global. + themes: + - phys + realms: + - atmos + variables: + zg: + preprocessor: pp500 + reference_dataset: ERA-Interim + alternative_dataset: NCEP + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2000 + end_year: 2002 + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + - {dataset: FGOALS-s2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} + scripts: + grading: + <<: *grading_settings + # Metric ('RMSD', 'BIAS', taylor') + metric: [RMSD] + # Normalization ('mean', 'median', 'centered_median', 'none') + normalization: [centered_median] + + + ### hus: SPECIFIC HUMIDITY ################################################## + hus400: + description: Specific humidity at 400 hPa global. + themes: + - phys + realms: + - atmos + variables: + hus: + preprocessor: pp400 + reference_dataset: AIRS + alternative_dataset: ERA-Interim + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2003 + end_year: 2004 + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + - {dataset: FGOALS-s2} + - {dataset: FIO-ESM} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R-CC} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: AIRS, project: obs4mips, level: L3, version: RetStd-v5, tier: 1} + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + scripts: + grading: + <<: *grading_settings + # Metric ('RMSD', 'BIAS', taylor') + metric: [RMSD] + # Normalization ('mean', 'median', 'centered_median', 'none') + normalization: [centered_median] + + + ### tas: NEAR-SURFACE TEMPERATURE ########################################### + tas: + description: Near-surface air temperature + themes: + - phys + realms: + - atmos + variables: + tas: + preprocessor: ppNOLEV2 + reference_dataset: ERA-Interim + alternative_dataset: NCEP + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2000 + end_year: 2002 + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-g2} + - {dataset: FGOALS-s2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + - {dataset: NCEP, project: OBS, type: reanaly, version: 1, tier: 2} + scripts: + latlon: &latlon_settings + script: perfmetrics/main.ncl + # Plot type ('cycle', 'zonal', 'latlon', 'cycle_latlon', 'cycle_zonal') + plot_type: latlon + # Time average ('opt' argument of time_operations.ncl) + time_avg: annualclim + # Region ('global', 'trop', 'nhext', 'shext') + region: global + # Draw difference plots + plot_diff: true + # Calculate t-test in difference plots + t_test: true + # Confidence level for the t-test + conf_level: 0.95 + # Contour levels for absolute plot + abs_levs: [240, 243, 246, 249, 252, 255, 258, + 261, 264, 267, 270, 273, 276, 279, + 282, 285, 288, 291, 294, 297, 300] + # Contour levels for difference plot + diff_levs: [-5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5] + grading: + <<: *grading_settings + # Metric ('RMSD', 'BIAS', taylor') + metric: [RMSD] + # Normalization ('mean', 'median', 'centered_median', 'none') + normalization: [centered_median] + + + ### ts: SEA-SURFACE (SKIN) TEMPERATURE ###################################### + ts: + description: Sea-surface (skin) temperature + themes: + - phys + realms: + - atmos + - ocean + variables: + ts: + preprocessor: ppNOLEV1x1 + reference_dataset: ESACCI-SST + alternative_dataset: HadISST + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2000 + end_year: 2002 + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-g2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: ESACCI-SST, project: OBS, type: sat, version: L4-GHRSST-SSTdepth-OSTIA-GLOB, tier: 2} + - {dataset: HadISST, project: OBS, type: reanaly, version: 1, tier: 2} + scripts: + grading: + <<: *grading_settings + # Metric ('RMSD', 'BIAS', taylor') + metric: [RMSD] + # Normalization ('mean', 'median', 'centered_median', 'none') + normalization: [centered_median] + + + ### pr: PRECIPITATION ####################################################### + pr: + description: Precipitation + themes: + - phys + realms: + - atmos + variables: + pr: + preprocessor: ppNOLEV1 + reference_dataset: GPCP-SG + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2000 + end_year: 2002 + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CESM1-FASTCHEM} + - {dataset: CESM1-WACCM} + - {dataset: CMCC-CESM} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CNRM-CM5-2} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-g2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MPI-ESM-P} + - {dataset: MRI-CGCM3} + - {dataset: MRI-ESM1} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: GPCP-SG, project: obs4mips, level: L3, version: v2.2, tier: 1} + scripts: + grading: + <<: *grading_settings + # Metric ('RMSD', 'BIAS', taylor') + metric: [RMSD] + # Normalization ('mean', 'median', 'centered_median', 'none') + normalization: [centered_median] + + + ### clt: TOTAL CLOUD COVER ################################################## + clt: + description: Total cloud cover + themes: + - clouds + realms: + - atmos + variables: + clt: + preprocessor: ppNOLEV2 + reference_dataset: ESACCI-CLOUD + alternative_dataset: PATMOS-x + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2000 + end_year: 2002 + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-g2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-H-CC} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: GISS-E2-R-CC} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, version: AVHRR-fv3.0, tier: 2} + - {dataset: PATMOS-x, project: OBS, type: sat, version: NOAA, tier: 2} + scripts: + latlon: + <<: *latlon_settings + # Add global average to the plot + show_global_avg: true + # Contour levels for absolute plot + abs_levs: [0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100] + # Contour levels for difference plot + diff_levs: [-30, -25, -20, -15, -10, -5, 0, 5, 10, 15, 20, 25, 30] + grading: + <<: *grading_settings + # Metric ('RMSD', 'BIAS', taylor') + metric: [RMSD] + # Normalization ('mean', 'median', 'centered_median', 'none') + normalization: [centered_median] + + + ### rlut: ALL-SKY LONGWAVE RADIATION ######################################## + rlut: + description: All-sky longwave radiation + themes: + - phys + realms: + - atmos + variables: + rlut: + preprocessor: ppNOLEV1 + reference_dataset: CERES-EBAF + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2001 + end_year: 2003 + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanCM4} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: EC-EARTH, ensemble: r6i1p1} + - {dataset: FGOALS-g2} + - {dataset: FGOALS-s2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, tier: 1} + scripts: + grading: + <<: *grading_settings + # Metric ('RMSD', 'BIAS', taylor') + metric: [RMSD] + # Normalization ('mean', 'median', 'centered_median', 'none') + normalization: [centered_median] + + + ### rsut: ALL-SKY SHORTWAVE RADIATION ####################################### + rsut: + description: All-sky shortwave radiation + themes: + - phys + realms: + - atmos + variables: + rsut: + preprocessor: ppNOLEV1 + reference_dataset: CERES-EBAF + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 2001 + end_year: 2003 + additional_datasets: + - {dataset: ACCESS1-0} + - {dataset: ACCESS1-3} + - {dataset: bcc-csm1-1} + - {dataset: bcc-csm1-1-m} + - {dataset: BNU-ESM} + - {dataset: CanESM2} + - {dataset: CCSM4} + - {dataset: CESM1-BGC} + - {dataset: CESM1-CAM5} + - {dataset: CESM1-CAM5-1-FV2} + - {dataset: CMCC-CM} + - {dataset: CMCC-CMS} + - {dataset: CNRM-CM5} + - {dataset: CSIRO-Mk3-6-0} + - {dataset: FGOALS-s2} + - {dataset: FIO-ESM} + - {dataset: GFDL-CM2p1} + - {dataset: GFDL-CM3} + - {dataset: GFDL-ESM2G} + - {dataset: GFDL-ESM2M} + - {dataset: GISS-E2-H, ensemble: r1i1p2} + - {dataset: GISS-E2-R, ensemble: r1i1p2} + - {dataset: HadCM3} + - {dataset: HadGEM2-AO} + - {dataset: HadGEM2-CC} + - {dataset: HadGEM2-ES} + - {dataset: inmcm4} + - {dataset: IPSL-CM5A-LR} + - {dataset: IPSL-CM5A-MR} + - {dataset: IPSL-CM5B-LR} + - {dataset: MIROC4h} + - {dataset: MIROC5} + - {dataset: MIROC-ESM} + - {dataset: MIROC-ESM-CHEM} + - {dataset: MPI-ESM-LR} + - {dataset: MPI-ESM-MR} + - {dataset: MRI-CGCM3} + - {dataset: NorESM1-M} + - {dataset: NorESM1-ME} + - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, tier: 1} + scripts: + grading: + <<: *grading_settings + # Metric ('RMSD', 'BIAS', taylor') + metric: [RMSD] + # Normalization ('mean', 'median', 'centered_median', 'none') + normalization: [centered_median] + + + #### lwcre: LONGWAVE CLOUD FORCING ########################################### + #lwcre: + # description: Longwave cloud radiative effect + # themes: + # - clouds + # realms: + # - atmos + # variables: + # lwcre: + # preprocessor: ppNOLEV1 + # reference_dataset: CERES-EBAF + # mip: Amon + # derive: true + # force_derivation: false + # project: CMIP5 + # exp: historical + # ensemble: r1i1p1 + # start_year: 2001 + # end_year: 2003 + # additional_datasets: + # - {dataset: ACCESS1-0} + # - {dataset: ACCESS1-3} + # - {dataset: bcc-csm1-1} + # - {dataset: bcc-csm1-1-m} + # - {dataset: BNU-ESM} + # - {dataset: CanESM2} + # - {dataset: CCSM4} + # - {dataset: CESM1-BGC} + # - {dataset: CESM1-CAM5} + # - {dataset: CESM1-CAM5-1-FV2} + # - {dataset: CMCC-CM} + # - {dataset: CMCC-CMS} + # - {dataset: CNRM-CM5} + # - {dataset: CSIRO-Mk3-6-0} + # - {dataset: FGOALS-g2} + # - {dataset: FGOALS-s2} + # - {dataset: FIO-ESM} + # - {dataset: GFDL-CM3} + # - {dataset: GFDL-ESM2G} + # - {dataset: GFDL-ESM2M} + # - {dataset: GISS-E2-H, ensemble: r1i1p2} + # - {dataset: GISS-E2-R, ensemble: r1i1p2} + # - {dataset: HadCM3} + # - {dataset: HadGEM2-AO} + # - {dataset: HadGEM2-CC} + # - {dataset: HadGEM2-ES} + # - {dataset: inmcm4} + # - {dataset: IPSL-CM5A-LR} + # - {dataset: IPSL-CM5A-MR} + # - {dataset: IPSL-CM5B-LR} + # - {dataset: MIROC4h} + # - {dataset: MIROC5} + # - {dataset: MIROC-ESM} + # - {dataset: MIROC-ESM-CHEM} + # - {dataset: MPI-ESM-LR} + # - {dataset: MPI-ESM-MR} + # - {dataset: MRI-CGCM3} + # - {dataset: NorESM1-M} + # - {dataset: NorESM1-ME} + # - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, tier: 1} + # scripts: + # latlon: + # <<: *latlon_settings + # # Contour levels for absolute plot + # abs_levs: [0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100] + # # Contour levels for difference plot + # diff_levs: [-30, -25, -20, -15, -10, -5, 0, 5, 10, 15, 20, 25, 30] + # grading: + # <<: *grading_settings + + + #### swcre: SHORTWAVE CLOUD FORCING ########################################## + #swcre: + # description: Shortwave cloud radiative effect + # themes: + # - clouds + # realms: + # - atmos + # variables: + # swcre: + # preprocessor: ppNOLEV1 + # reference_dataset: CERES-EBAF + # mip: Amon + # derive: true + # force_derivation: false + # project: CMIP5 + # exp: historical + # ensemble: r1i1p1 + # start_year: 2001 + # end_year: 2003 + # additional_datasets: + # - {dataset: ACCESS1-0} + # - {dataset: ACCESS1-3} + # - {dataset: bcc-csm1-1} + # - {dataset: bcc-csm1-1-m} + # - {dataset: BNU-ESM} + # - {dataset: CanESM2} + # - {dataset: CCSM4} + # - {dataset: CESM1-BGC} + # - {dataset: CESM1-CAM5} + # - {dataset: CESM1-CAM5-1-FV2} + # - {dataset: CMCC-CM} + # - {dataset: CNRM-CM5} + # - {dataset: CSIRO-Mk3-6-0} + # - {dataset: FGOALS-s2} + # - {dataset: FIO-ESM} + # - {dataset: GFDL-CM3} + # - {dataset: GFDL-ESM2G} + # - {dataset: GFDL-ESM2M} + # - {dataset: GISS-E2-H, ensemble: r1i1p2} + # - {dataset: GISS-E2-R, ensemble: r1i1p2} + # - {dataset: HadCM3} + # - {dataset: HadGEM2-AO} + # - {dataset: HadGEM2-CC} + # - {dataset: HadGEM2-ES} + # - {dataset: inmcm4} + # - {dataset: IPSL-CM5A-LR} + # - {dataset: IPSL-CM5A-MR} + # - {dataset: IPSL-CM5B-LR} + # - {dataset: MIROC4h} + # - {dataset: MIROC5} + # - {dataset: MIROC-ESM} + # - {dataset: MIROC-ESM-CHEM} + # - {dataset: MPI-ESM-LR} + # - {dataset: MPI-ESM-MR} + # - {dataset: MRI-CGCM3} + # - {dataset: NorESM1-M} + # - {dataset: NorESM1-ME} + # - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, tier: 1} + # scripts: + # latlon: + # <<: *latlon_settings + # # Contour levels for absolute plot + # abs_levs: [-100, -90, -80, -70, -60, -50, -40, -30, -20, -10, 0] + # # Contour levels for difference plot + # diff_levs: [-30, -25, -20, -15, -10, -5, 0, 5, 10, 15, 20, 25, 30] + # grading: + # <<: *grading_settings + # # Metric ('RMSD', 'BIAS', taylor') + # metric: [RMSD] + # # Normalization ('mean', 'median', 'centered_median', 'none') + # normalization: [centered_median] + + + #### od550aer: AEROSOL OPTICAL DEPTH AT 550 nm ############################### + #od550aer: + # description: Aerosol optical depth at 550 nm + # themes: + # - aerosols + # realms: + # - atmos + # variables: + # od550aer: + # preprocessor: ppNOLEV2thr10 + # reference_dataset: ESACCI-AEROSOL + # alternative_dataset: MODIS + # mip: aero + # project: CMIP5 + # exp: historical + # ensemble: r1i1p1 + # start_year: 2003 + # end_year: 2004 + # additional_datasets: + # - {dataset: ACCESS1-0} + # - {dataset: ACCESS1-3} + # - {dataset: BNU-ESM} + # - {dataset: CESM1-CAM5} + # - {dataset: CSIRO-Mk3-6-0} + # - {dataset: GFDL-CM3} + # - {dataset: GFDL-ESM2G} + # - {dataset: GFDL-ESM2M} + # - {dataset: GISS-E2-H, ensemble: r1i1p2} + # - {dataset: GISS-E2-R, ensemble: r1i1p2} + # - {dataset: HadGEM2-CC} + # - {dataset: HadGEM2-ES} + # - {dataset: IPSL-CM5B-LR} + # - {dataset: MIROC4h} + # - {dataset: MIROC5} + # - {dataset: MIROC-ESM} + # - {dataset: MIROC-ESM-CHEM} + # - {dataset: MRI-CGCM3} + # - {dataset: NorESM1-M} + # - {dataset: NorESM1-ME} + # - {dataset: ESACCI-AEROSOL, project: OBS, type: sat, version: SU-v4.21, tier: 2} + # - {dataset: MODIS, project: OBS, type: sat, version: MYD08-M3, tier: 3} + # scripts: + # grading: + # <<: *grading_settings + # # Metric ('RMSD', 'BIAS', taylor') + # metric: [RMSD] + # # Normalization ('mean', 'median', 'centered_median', 'none') + # normalization: [centered_median] + + + #### od870aer: AEROSOL OPTICAL DEPTH AT 870 nm ############################### + #od870aer: + # description: Aerosol optical depth at 870 nm + # themes: + # - aerosols + # realms: + # - atmos + # variables: + # od870aer: + # preprocessor: ppNOLEV1thr10 + # reference_dataset: ESACCI-AEROSOL + # mip: aero + # project: CMIP5 + # exp: historical + # ensemble: r1i1p1 + # start_year: 2002 + # end_year: 2004 + # additional_datasets: + # - {dataset: ACCESS1-0} + # - {dataset: ACCESS1-3} + # - {dataset: CSIRO-Mk3-6-0} + # - {dataset: GFDL-CM3} + # - {dataset: GFDL-ESM2G} + # - {dataset: GFDL-ESM2M} + # - {dataset: HadGEM2-CC} + # - {dataset: HadGEM2-ES} + # - {dataset: MIROC5} + # - {dataset: MIROC-ESM} + # - {dataset: MIROC-ESM-CHEM} + # - {dataset: MRI-CGCM3} + # - {dataset: ESACCI-AEROSOL, project: OBS, type: sat, version: SU-v4.21, tier: 2} + # scripts: + # grading: + # <<: *grading_settings + # # Metric ('RMSD', 'BIAS', taylor') + # metric: [RMSD] + # # Normalization ('mean', 'median', 'centered_median', 'none') + # normalization: [centered_median] + + + #### abs550aer: ABSORPTION OPTICAL DEPTH AT 550 nm ########################### + #abs550aer: + # description: Absorption optical depth at 550 nm + # themes: + # - aerosols + # realms: + # - atmos + # variables: + # abs550aer: + # preprocessor: ppNOLEV1thr10 + # reference_dataset: ESACCI-AEROSOL + # mip: aero + # project: CMIP5 + # exp: historical + # ensemble: r1i1p1 + # start_year: 2002 + # end_year: 2004 + # additional_datasets: + # - {dataset: CSIRO-Mk3-6-0} + # - {dataset: GFDL-CM3} + # - {dataset: GFDL-ESM2G} + # - {dataset: GFDL-ESM2M} + # - {dataset: GISS-E2-H, ensemble: r1i1p2} + # - {dataset: GISS-E2-R, ensemble: r1i1p2} + # - {dataset: IPSL-CM5B-LR} + # - {dataset: MIROC5} + # - {dataset: MIROC-ESM} + # - {dataset: MIROC-ESM-CHEM} + # - {dataset: NorESM1-M} + # - {dataset: NorESM1-ME} + # - {dataset: ESACCI-AEROSOL, project: OBS, type: sat, version: SU-v4.21, tier: 2} + # scripts: + # grading: + # <<: *grading_settings + # # Metric ('RMSD', 'BIAS', taylor') + # metric: [RMSD] + # # Normalization ('mean', 'median', 'centered_median', 'none') + # normalization: [centered_median] + + + #### od550lt1aer: FINE MODE AEROSOL OPTICAL DEPTH AT 550 nm ################## + #od550lt1aer: + # themes: + # - aerosols + # realms: + # - atmos + # description: Fine mode optical depth at 550 nm + # variables: + # od550lt1aer: + # preprocessor: ppNOLEV1thr10 + # reference_dataset: ESACCI-AEROSOL + # mip: aero + # project: CMIP5 + # exp: historical + # ensemble: r1i1p1 + # start_year: 2002 + # end_year: 2004 + # additional_datasets: + # - {dataset: CSIRO-Mk3-6-0} + # - {dataset: GFDL-CM3} + # - {dataset: GFDL-ESM2G} + # - {dataset: GFDL-ESM2M} + # - {dataset: GISS-E2-H, ensemble: r1i1p2} + # - {dataset: GISS-E2-R, ensemble: r1i1p2} + # - {dataset: IPSL-CM5B-LR} + # - {dataset: MIROC5} + # - {dataset: MIROC-ESM} + # - {dataset: MIROC-ESM-CHEM} + # - {dataset: MRI-CGCM3} + # - {dataset: ESACCI-AEROSOL, project: OBS, type: sat, version: SU-v4.21, tier: 2} + # scripts: + # grading: + # <<: *grading_settings + # # Metric ('RMSD', 'BIAS', taylor') + # metric: [RMSD] + # # Normalization ('mean', 'median', 'centered_median', 'none') + # normalization: [centered_median] + + + #### toz: TOTAL COLUMN OZONE ################################################# + #toz: + # description: Total column ozone + # themes: + # - chem + # - ghg + # realms: + # - atmosChem + # variables: + # toz: + # preprocessor: ppNOLEV2thr10 + # reference_dataset: ESACCI-OZONE + # alternative_dataset: NIWA-BS + # mip: Amon + # derive: true + # force_derivation: false + # project: CMIP5 + # exp: historical + # ensemble: r1i1p1 + # start_year: 2002 + # end_year: 2004 + # additional_datasets: + # - {dataset: CESM1-WACCM, ensemble: r2i1p1} + # - {dataset: CNRM-CM5} + # - {dataset: GFDL-CM3} + # - {dataset: GISS-E2-H, ensemble: r1i1p2} + # - {dataset: GISS-E2-R, ensemble: r1i1p2} + # - {dataset: MIROC-ESM-CHEM} + # - {dataset: ESACCI-OZONE, project: OBS, type: sat, version: L3, tier: 2} + # - {dataset: NIWA-BS, project: OBS, type: sat, version: v3.3, tier: 3} + # scripts: + # grading_global: + # <<: *grading_settings + # # Metric ('RMSD', 'BIAS', taylor') + # metric: [RMSD] + # # Normalization ('mean', 'median', 'centered_median', 'none') + # normalization: [centered_median] + # grading_antarctic: + # <<: *grading_settings + # # Region + # region: Antarctic + # # Metric ('RMSD', 'BIAS', taylor') + # metric: [RMSD] + # # Normalization ('mean', 'median', 'centered_median', 'none') + # normalization: [centered_median] + + ### sic: SEA-ICE CONCENTRATION (NH) ######################################### + + ### sic: SEA-ICE CONCENTRATION (SH) ######################################### + + ### sm: SOIL MOISTURE ####################################################### + #sm: + # description: Soil moisture + # themes: + # - phys + # realms: + # - land + # variables: + # sm: + # preprocessor: ppNOLEV1thr10 + # reference_dataset: ESACCI-SOILMOISTURE + # mip: Lmon + # derive: true + # force_derivation: false + # project: CMIP5 + # exp: historical + # ensemble: r1i1p1 + # start_year: 2002 + # end_year: 2004 + # additional_datasets: + # - {dataset: ACCESS1-0} + # - {dataset: ACCESS1-3} + # - {dataset: bcc-csm1-1} + # - {dataset: CanCM4} + # - {dataset: CanESM2} + # - {dataset: CCSM4} + # - {dataset: CESM1-BGC} + # - {dataset: CESM1-CAM5} + # - {dataset: CESM1-FASTCHEM} + # - {dataset: CESM1-WACCM} + # - {dataset: CNRM-CM5 } + # - {dataset: CSIRO-Mk3-6-0} + # - {dataset: FGOALS-g2} + # - {dataset: FGOALS-s2} + # - {dataset: GFDL-ESM2G} + # - {dataset: GFDL-ESM2M} + # - {dataset: HadCM3} + # - {dataset: HadGEM2-CC} + # - {dataset: HadGEM2-ES} + # - {dataset: inmcm4} + # - {dataset: IPSL-CM5A-LR} + # - {dataset: IPSL-CM5A-MR} + # - {dataset: IPSL-CM5B-LR} + # - {dataset: MIROC4h} + # - {dataset: MIROC5} + # - {dataset: MIROC-ESM} + # - {dataset: MIROC-ESM-CHEM} + # - {dataset: MRI-CGCM3} + # - {dataset: NorESM1-M} + # - {dataset: NorESM1-ME} + # - {dataset: ESACCI-SOILMOISTURE, project: OBS, type: sat, version: L3S-SSMV-COMBINED-v4.2, tier: 2} + # scripts: + # grading: + # <<: *grading_settings + # # Metric ('RMSD', 'BIAS', taylor') + # metric: [RMSD] + # # Normalization ('mean', 'median', 'centered_median', 'none') + # normalization: [centered_median] + + + ### COLLECT METRICS ######################################################### + collect: + description: Wrapper to collect and plot previously calculated metrics + scripts: + RMSD: + script: perfmetrics/collect.ncl + ancestors: ['*/grading*'] + metric: RMSD + label_bounds: [-0.5, 0.5] + label_scale: 0.1 + disp_values: false + cm_interval: [2, 241] + # Sort dataset in alphabetic order (excluding MMM) + sort: true + # Sort diagnostics in a specific order (name = 'diagnositic'-'region') + diag_order: ['ta850-global', 'ta200-global', 'ua850-global', + 'ua200-global', 'va850-global', 'va200-global', + 'zg500-global', 'hus400-global', 'tas-global', + 'ts-global', 'pr-global', 'clt-global', 'rlut-global', + 'rsut-global'] + taylor: + script: perfmetrics/collect.ncl + ancestors: ['*/grading'] + metric: taylor diff --git a/esmvaltool/recipes/recipe_quantilebias.yml b/esmvaltool/recipes/recipe_quantilebias.yml new file mode 100644 index 0000000000..0a63ff4484 --- /dev/null +++ b/esmvaltool/recipes/recipe_quantilebias.yml @@ -0,0 +1,86 @@ +# recipe_quantilebias.yml +--- + +documentation: + description: | + Tool for calculation of precipitation quantile bias + + authors: + - arno_en + - hard_jo + + maintainer: + - hard_jo + + references: + - mehran14jgr + + projects: + - c3s-magic + +datasets: + - {dataset: EC-EARTH, project: CMIP5, exp: historical, ensemble: r2i1p1, start_year: 1997, end_year: 1997} + - {dataset: GPCP-SG, project: obs4mips, level: L3, version: v2.2, tier: 1, start_year: 1997, end_year: 1997} + +preprocessors: + masking_preprocessor: + mask_landsea: + mask_out: sea + +diagnostics: + quantilebias: + description: Tool for calculation of precipitation quantile bias + variables: + pr: + preprocessor: masking_preprocessor + reference_dataset: "GPCP-SG" + mip: Amon + project: CMIP5 + exp: historical + ensemble: r1i1p1 + start_year: 1997 + end_year: 1997 + additional_datasets: + - {dataset: ACCESS1-0} +# - {dataset: ACCESS1-3} + #- {dataset: bcc-csm1-1} + #- {dataset: bcc-csm1-1-m} + #- {dataset: CanESM2} + #- {dataset: CCSM4} + #- {dataset: CESM1-BGC} + #- {dataset: CESM1-CAM5} + #- {dataset: CESM1-FASTCHEM} + #- {dataset: CESM1-WACCM} + #- {dataset: CMCC-CESM} + #- {dataset: CMCC-CM} + #- {dataset: CMCC-CMS} + #- {dataset: CNRM-CM5} + #- {dataset: CSIRO-Mk3-6-0} + #- {dataset: EC-EARTH31, ensemble: r2i1p1} + #- {dataset: FGOALS-g2} + #- {dataset: FGOALS-s2} + #- {dataset: FIO-ESM} + #- {dataset: GFDL-CM2p1} + #- {dataset: GFDL-CM3} + #- {dataset: GFDL-ESM2G} + #- {dataset: GFDL-ESM2M} + #- {dataset: HadGEM2-AO} + #- {dataset: HadGEM2-CC} + #- {dataset: HadGEM2-ES} + #- {dataset: inmcm4} + #- {dataset: IPSL-CM5A-LR} + #- {dataset: IPSL-CM5A-MR} + #- {dataset: IPSL-CM5B-LR} + #- {dataset: MIROC5} + #- {dataset: MIROC-ESM} + #- {dataset: MIROC-ESM-CHEM} + #- {dataset: MPI-ESM-LR} + #- {dataset: MPI-ESM-P} + #- {dataset: MRI-CGCM3} + #- {dataset: NorESM1-M} + #- {dataset: NorESM1-ME} + + scripts: + main: + script: quantilebias/quantilebias.R + perc_lev: 75 diff --git a/esmvaltool/recipes/recipe_rainfarm.yml b/esmvaltool/recipes/recipe_rainfarm.yml new file mode 100644 index 0000000000..07fb7aec4b --- /dev/null +++ b/esmvaltool/recipes/recipe_rainfarm.yml @@ -0,0 +1,51 @@ +# recipe_rainfarm.yml +--- + +documentation: + description: | + Recipe for performing stochastic downscaling of precipitation fields + calling the RainFARM package by J. von Hardenberg (ISAC-CNR) + authors: + - arno_en + - hard_jo + + references: + - donofrio14jh + - rebora06jhm + - terzago18nhess + + projects: + - c3s-magic + +datasets: + - {dataset: EC-EARTH, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1997, end_year: 1997} + +preprocessors: + preproc: + extract_region: + # The selected region needs to have equal and even number of longitude (in any case it is cut) + # and latitude grid points (e.g., 2x2, 4x4, ...). + # Warning: downcaling can reach very high resolution, so select a limited area. + start_longitude: 5 + end_longitude: 15 + start_latitude: 40 + end_latitude: 50 + regrid: + target_grid: 1x1 # in degrees, can also be the name of one of the datasets to use the grid from that dataset + scheme: area_weighted # can be linear, nearest, area_weighted, unstructured_nearest +diagnostics: + rainfarm: + description: RainFARM stochastic downscaling of precipitation fields + variables: + pr: + preprocessor: preproc + mip: day + scripts: + rainfarm: + script: rainfarm/rainfarm.R + slope: 1.7 # spatial spectral slope (set to 0 to compute from large scales) + nens: 2 # number of ensemble members to be calculated + nf: 8 # subdivisions for downscaling + conserv_glob: false # conserve precipitation over full domain (choose either glob or smooth, glob has priority) + conserv_smooth: true # conserve precipitation using convolution (if neither is chosen box conservation is used) + weights_climo: false # orographic weights: set to false or full path to a fine-scale precipitation climatology file diff --git a/esmvaltool/recipes/recipe_runoff_et.yml b/esmvaltool/recipes/recipe_runoff_et.yml new file mode 100644 index 0000000000..fa1951517c --- /dev/null +++ b/esmvaltool/recipes/recipe_runoff_et.yml @@ -0,0 +1,46 @@ +# ESMValTool +# recipe_runoff_et.yml +--- +documentation: + description: | + Recipe for plotting runoff and ET diagnostics. The diagnostics calculates water + balance components for different catchments and compares the results against + observations. Currently, the required catchment mask needs to be downloaded manually + at https://doi.org/10.5281/zenodo.2025776 + + authors: + - hage_st + - loew_al + - stac_to + + maintainer: + - righ_ma + + references: + - duemenil00mpimr + - hagemann13james + - weedon14wrr + + projects: + - embrace + - crescendo + +datasets: + - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1970, end_year: 2000} + - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1970, end_year: 2000} + +diagnostics: + catchment_analysis: + description: EMBRACE runoff and land surface evaporation diagnostic + variables: + evspsbl: + mip: Amon + mrro: + mip: Lmon + pr: + mip: Amon + scripts: + catchment_analysis: + script: runoff_et/catchment_analysis.py + catchmentmask: ~/catchmentmask_v1.nc + diff --git a/esmvaltool/recipes/recipe_shapeselect.yml b/esmvaltool/recipes/recipe_shapeselect.yml new file mode 100644 index 0000000000..58db4da176 --- /dev/null +++ b/esmvaltool/recipes/recipe_shapeselect.yml @@ -0,0 +1,30 @@ +--- +documentation: + description: 'Selects grid points belonging to a provided shapefile.' + authors: ['berg_pe'] + projects: ['c3s-magic'] + references: ['acknow_project'] + +datasets: + - {dataset: EC-EARTH, project: CMIP5, mip: Amon, exp: historical, ensemble: r12i1p1, start_year: 1990, end_year: 1999} +# - {dataset: CRU, mip: Amon, project: OBS, type: reanaly, version: 1, start_year: 1990, end_year: 1994, tier: 3} +# - {dataset: bcc-csm1-1-m, project: CMIP5, mip: day, exp: historical, ensemble: r1i1p1, start_year: 1998, end_year: 1999} + + +diagnostics: + diagnostic1: + description: Extracting data for user defined shapefile. + variables: + tas: + pr: + scripts: + script1: + script: shapeselect/diag_shapeselect.py + # Example shapefiles can be found in: + # esmvaltool/diag_scripts/shapeselect/testdata/ + # Relative paths are relative to 'auxiliary_data_dir' as configured in + # the config-user.yml file. + shapefile: 'Thames.shp' + weighting_method: 'mean_inside' + write_xlsx: true + write_netcdf: true diff --git a/esmvaltool/recipes/recipe_smpi.yml b/esmvaltool/recipes/recipe_smpi.yml new file mode 100644 index 0000000000..26eb5b931a --- /dev/null +++ b/esmvaltool/recipes/recipe_smpi.yml @@ -0,0 +1,368 @@ +# ESMValTool +# recipe_smpi.yml +--- +documentation: + description: | + Recipe for computing Single Model Performance Index. Follows Reichler + and Kim 2008. Considers the following variables: + Sea level pressure, Air Temperature, Zonal Wind Stress, Meridional Wind + Stress, 2m air temperature, Zonal Wind, Meridional Wind, Net surface heat + flux, Precipitation, Specific Humidity, Snow fraction, Sea Surface + Temperature, Sea Ice Fraction and sea surface salinity. + + authors: + - hass_bg + - gier_be + - righ_ma + - eyri_ve + + references: + - rk2008bams + + projects: + - crescendo + - c3s-magic + +datasets: + - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: CSIRO-Mk3-6-0, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: MIROC5, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: MIROC-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: MPI-ESM-LR, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: MRI-CGCM3, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1} + +preprocessors: + + ppALL: + extract_levels: + levels: reference_dataset + scheme: linear + regrid: + target_grid: reference_dataset + scheme: linear + mask_fillvalues: + threshold_fraction: 0.10 + multi_model_statistics: + span: overlap + statistics: [mean] + exclude: [reference_dataset] + + ppNOLEV: + regrid: + target_grid: reference_dataset + scheme: linear + mask_fillvalues: + threshold_fraction: 0.10 + multi_model_statistics: + span: overlap + statistics: [mean] + exclude: [reference_dataset] + + ppNOLEVirreg: + regrid: + target_grid: 1x1 + scheme: linear + mask_fillvalues: + threshold_fraction: 0.10 + multi_model_statistics: + span: overlap + statistics: [mean] + exclude: [reference_dataset] + +diagnostics: + + ta: + description: Air temperature zonal mean + themes: + - phys + realms: + - atmos + variables: + ta: &variable_settings + preprocessor: ppALL + reference_dataset: ERA-Interim + mip: Amon + start_year: 1980 + end_year: 2005 + additional_datasets: + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + scripts: + grading: &grading_settings + script: perfmetrics/main.ncl + plot_type: cycle_zonal + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + va: + description: Meridional Wind + themes: + - phys + realms: + - atmos + variables: + va: + <<: *variable_settings + additional_datasets: + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + scripts: + grading: + script: perfmetrics/main.ncl + plot_type: cycle_zonal + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + ua: + description: Zonal Wind + themes: + - phys + realms: + - atmos + variables: + ua: + <<: *variable_settings + additional_datasets: + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + scripts: + grading: + script: perfmetrics/main.ncl + plot_type: cycle_zonal + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + hus: + description: Near-surface temperature + themes: + - phys + realms: + - atmos + variables: + hus: + <<: *variable_settings + additional_datasets: + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + scripts: + grading: + script: perfmetrics/main.ncl + plot_type: cycle_zonal + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + tas: + description: Near-surface temperature + themes: + - phys + realms: + - atmos + variables: + tas: + <<: *variable_settings + preprocessor: ppNOLEV + additional_datasets: + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + scripts: + grading: + script: perfmetrics/main.ncl + plot_type: cycle_latlon + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + psl: + description: Sea-level pressure + themes: + - phys + realms: + - atmos + variables: + psl: + <<: *variable_settings + preprocessor: ppNOLEV + additional_datasets: + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + scripts: + grading: + script: perfmetrics/main.ncl + plot_type: cycle_latlon + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + pr: + description: Precipitation + themes: + - phys + realms: + - atmos + variables: + pr: + <<: *variable_settings + preprocessor: ppNOLEV + reference_dataset: GPCP-SG + additional_datasets: + - {dataset: GPCP-SG, project: obs4mips, level: L3, version: v2.2, tier: 1} + scripts: + grading: + script: perfmetrics/main.ncl + plot_type: cycle_latlon + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + tos: + description: Sea surface temperature + themes: + - phys + realms: + - ocean + variables: + tos: + <<: *variable_settings + preprocessor: ppNOLEVirreg + reference_dataset: HadISST + mip: Omon + additional_datasets: + - {dataset: HadISST, project: OBS, type: reanaly, version: 1, tier: 2} + scripts: + grading: + script: perfmetrics/main.ncl + plot_type: cycle_latlon + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + sic: + description: Sea ice fraction + themes: + - phys + realms: + - seaIce + variables: + sic: + <<: *variable_settings + preprocessor: ppNOLEVirreg + reference_dataset: HadISST + mip: OImon + additional_datasets: + - {dataset: HadISST, project: OBS, type: reanaly, version: 1, tier: 2} + scripts: + grading: + script: perfmetrics/main.ncl + plot_type: cycle_latlon + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + hfds: + description: Net Surface Heat Flux + themes: + - phys + realms: + - ocean + variables: + hfds: + <<: *variable_settings + preprocessor: ppNOLEVirreg + mip: Omon + additional_datasets: + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + scripts: + grading: + script: perfmetrics/main.ncl + plot_type: cycle_latlon + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + tauu: + description: Zonal Wind Stress + themes: + - phys + realms: + - atmos + variables: + tauu: + <<: *variable_settings + preprocessor: ppNOLEV + additional_datasets: + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + scripts: + grading: + script: perfmetrics/main.ncl + plot_type: cycle_latlon + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + tauv: + description: Meridional Wind Stress + themes: + - phys + realms: + - atmos + variables: + tauv: + <<: *variable_settings + preprocessor: ppNOLEV + additional_datasets: + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + scripts: + grading: + script: perfmetrics/main.ncl + plot_type: cycle_latlon + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + ### COLLECT METRICS ################### + collect: + description: Wrapper to collect and plot previously calculated metrics + scripts: + SMPI: + script: perfmetrics/collect.ncl + ancestors: ['*/grading'] + metric: SMPI diff --git a/esmvaltool/recipes/recipe_smpi_4cds.yml b/esmvaltool/recipes/recipe_smpi_4cds.yml new file mode 100644 index 0000000000..bc486be526 --- /dev/null +++ b/esmvaltool/recipes/recipe_smpi_4cds.yml @@ -0,0 +1,375 @@ +# ESMValTool +# recipe_smpi_4cds.yml +--- +documentation: + description: | + Recipe for computing Single Model Performance Index. Follows Reichler + and Kim 2008. Considers the following variables: + Sea level pressure, Air Temperature, Zonal Wind Stress, Meridional Wind + Stress, 2m air temperature, Zonal Wind, Meridional Wind, Net surface heat + flux, Precipitation, Specific Humidity, Snow fraction, Sea Surface + Temperature, Sea Ice Fraction and sea surface salinity. + + authors: + - hass_bg + - gier_be + - righ_ma + - eyri_ve + + references: + - rk2008bams + + projects: + - crescendo + - c3s-magic + +datasets: + - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: CMCC-CESM, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: CNRM-CM5-2, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: GFDL-ESM2G, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: GFDL-ESM2M, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: MPI-ESM-P, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: NorESM1-ME, project: CMIP5, exp: historical, ensemble: r1i1p1} + + +preprocessors: + + ppALL: + extract_levels: + levels: reference_dataset + scheme: linear + regrid: + target_grid: reference_dataset + scheme: linear + mask_fillvalues: + threshold_fraction: 0.10 + multi_model_statistics: + span: overlap + statistics: [mean] + exclude: [reference_dataset] + + ppNOLEV: + regrid: + target_grid: reference_dataset + scheme: linear + mask_fillvalues: + threshold_fraction: 0.10 + multi_model_statistics: + span: overlap + statistics: [mean] + exclude: [reference_dataset] + + ppNOLEVirreg: + regrid: + target_grid: 1x1 + scheme: linear + mask_fillvalues: + threshold_fraction: 0.10 + multi_model_statistics: + span: overlap + statistics: [mean] + exclude: [reference_dataset] + +diagnostics: + + ta: + description: Air temperature zonal mean + themes: + - phys + realms: + - atmos + variables: + ta: &variable_settings + preprocessor: ppALL + reference_dataset: ERA-Interim + mip: Amon + start_year: 1980 + end_year: 2005 + additional_datasets: + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + scripts: + grading: &grading_settings + script: perfmetrics/main.ncl + plot_type: cycle_zonal + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + va: + description: Meridional Wind + themes: + - phys + realms: + - atmos + variables: + va: + <<: *variable_settings + additional_datasets: + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + scripts: + grading: + script: perfmetrics/main.ncl + plot_type: cycle_zonal + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + ua: + description: Zonal Wind + themes: + - phys + realms: + - atmos + variables: + ua: + <<: *variable_settings + additional_datasets: + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + scripts: + grading: + script: perfmetrics/main.ncl + plot_type: cycle_zonal + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + hus: + description: Near-surface temperature + themes: + - phys + realms: + - atmos + variables: + hus: + <<: *variable_settings + additional_datasets: + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + scripts: + grading: + script: perfmetrics/main.ncl + plot_type: cycle_zonal + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + tas: + description: Near-surface temperature + themes: + - phys + realms: + - atmos + variables: + tas: + <<: *variable_settings + preprocessor: ppNOLEV + additional_datasets: + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + scripts: + grading: + script: perfmetrics/main.ncl + plot_type: cycle_latlon + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + psl: + description: Sea-level pressure + themes: + - phys + realms: + - atmos + variables: + psl: + <<: *variable_settings + preprocessor: ppNOLEV + additional_datasets: + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + scripts: + grading: + script: perfmetrics/main.ncl + plot_type: cycle_latlon + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + pr: + description: Precipitation + themes: + - phys + realms: + - atmos + variables: + pr: + <<: *variable_settings + preprocessor: ppNOLEV + reference_dataset: GPCP-SG + additional_datasets: + - {dataset: GPCP-SG, project: obs4mips, level: L3, version: v2.2, tier: 1} + scripts: + grading: + script: perfmetrics/main.ncl + plot_type: cycle_latlon + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + tos: + description: Sea surface temperature + themes: + - phys + realms: + - ocean + variables: + tos: + <<: *variable_settings + preprocessor: ppNOLEVirreg + reference_dataset: HadISST + mip: Omon + additional_datasets: + - {dataset: HadISST, project: OBS, type: reanaly, version: 1, tier: 2} + scripts: + grading: + script: perfmetrics/main.ncl + plot_type: cycle_latlon + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + sic: + description: Sea ice fraction + themes: + - phys + realms: + - seaIce + variables: + sic: + <<: *variable_settings + preprocessor: ppNOLEVirreg + reference_dataset: HadISST + mip: OImon + additional_datasets: + - {dataset: HadISST, project: OBS, type: reanaly, version: 1, tier: 2} + scripts: + grading: + script: perfmetrics/main.ncl + plot_type: cycle_latlon + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + +# hfds: +# description: Net Surface Heat Flux +# themes: +# - phys +# realms: +# - ocean +# variables: +# hfds: +# <<: *variable_settings +# preprocessor: ppNOLEVirreg +# mip: Omon +# additional_datasets: +# - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} +# scripts: +# grading: +# script: perfmetrics/main.ncl +# plot_type: cycle_latlon +# time_avg: yearly +# region: global +# calc_grading: true +# metric: [SMPI] +# normalization: CMIP5 +# smpi_n_bootstrap: 100 + + tauu: + description: Zonal Wind Stress + themes: + - phys + realms: + - atmos + variables: + tauu: + <<: *variable_settings + preprocessor: ppNOLEV + additional_datasets: + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + scripts: + grading: + script: perfmetrics/main.ncl + plot_type: cycle_latlon + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + tauv: + description: Meridional Wind Stress + themes: + - phys + realms: + - atmos + variables: + tauv: + <<: *variable_settings + preprocessor: ppNOLEV + additional_datasets: + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + scripts: + grading: + script: perfmetrics/main.ncl + plot_type: cycle_latlon + time_avg: yearly + region: global + calc_grading: true + metric: [SMPI] + normalization: CMIP5 + smpi_n_bootstrap: 100 + + ### COLLECT METRICS ################### + collect: + description: Wrapper to collect and plot previously calculated metrics + scripts: + SMPI: + script: perfmetrics/collect.ncl + ancestors: ['*/grading'] + metric: SMPI diff --git a/esmvaltool/recipes/recipe_spei.yml b/esmvaltool/recipes/recipe_spei.yml new file mode 100644 index 0000000000..e3da38cf90 --- /dev/null +++ b/esmvaltool/recipes/recipe_spei.yml @@ -0,0 +1,50 @@ +--- +documentation: + description: 'Calculates the SPI and SPEI drought indices.' + authors: ['berg_pe'] + projects: ['c3s-magic'] + references: ['acknow_project'] + +datasets: +# - {dataset: CRU, project: OBS, type: reanaly, version: 1, tier: 3} + - {dataset: ERA-Interim, project: OBS, type: reanaly, version: 1, tier: 3} + - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1} +# - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1} +# - {dataset: ACCESS1-3, project: CMIP5, exp: historical, ensemble: r1i1p1} +# - {dataset: BNU-ESM, project: CMIP5, exp: historical, ensemble: r1i1p1} +# - {dataset: CNRM-CM5, project: CMIP5, exp: historical, ensemble: r1i1p1} +# - {dataset: EC-EARTH, project: CMIP5, exp: historical, ensemble: r12i1p1} +# - {dataset: GFDL-CM3, project: CMIP5, exp: historical, ensemble: r1i1p1} +# - {dataset: GISS-E2-H, project: CMIP5, exp: historical, ensemble: r6i1p1} +# - {dataset: HadGEM2-CC, project: CMIP5, exp: historical, ensemble: r1i1p1} +# - {dataset: HadGEM2-ES, project: CMIP5, exp: historical, ensemble: r1i1p1} +# - {dataset: inmcm4, project: CMIP5, exp: historical, ensemble: r1i1p1} +# - {dataset: IPSL-CM5A-LR, project: CMIP5, exp: historical, ensemble: r1i1p1} +# - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: historical, ensemble: r1i1p1} +# - {dataset: IPSL-CM5B-LR, project: CMIP5, exp: historical, ensemble: r1i1p1} +# - {dataset: MPI-ESM-MR, project: CMIP5, exp: historical, ensemble: r1i1p1} +# - {dataset: NorESM1-M, project: CMIP5, exp: historical, ensemble: r1i1p1} + +preprocessors: + preprocessor: + regrid: + target_grid: reference_dataset + scheme: linear + +diagnostics: + diagnostic: + description: Calculating SPI and SPEI index + variables: + pr: &var + reference_dataset: ERA-Interim + preprocessor: preprocessor + start_year: 2000 + end_year: 2005 + mip: Amon + tas: *var + scripts: + spi: + script: droughtindex/diag_spi.r + ancestors: [pr] + spei: + script: droughtindex/diag_spei.r diff --git a/esmvaltool/recipes/recipe_thermodyn_diagtool.yml b/esmvaltool/recipes/recipe_thermodyn_diagtool.yml new file mode 100644 index 0000000000..257b634a42 --- /dev/null +++ b/esmvaltool/recipes/recipe_thermodyn_diagtool.yml @@ -0,0 +1,86 @@ +# recipe_thermodyn_diagtool.yml + +--- +documentation: + description: | + Recipe for the computation of various aspects associated with + the thermodynamics of the climate system, such as energy and + water mass budgets, meridional enthalpy transports, the Lorenz + Energy Cycle and the material entropy production. + + authors: + - lemb_va + - kold_ni + + references: + - lembo16climdyn + - lucarini14revgeop + + projects: + - esmval + - trr181 + +datasets: + - {dataset: CanESM2, project: CMIP5, exp: piControl, ensemble: r1i1p1, start_year: 2456, end_year: 2460} + - {dataset: BNU-ESM, project: CMIP5, exp: piControl, ensemble: r1i1p1, start_year: 2002, end_year: 2007} + - {dataset: IPSL-CM5A-MR, project: CMIP5, exp: piControl, ensemble: r1i1p1, start_year: 2044, end_year: 2049} + - {dataset: MIROC5, project: CMIP5, exp: piControl, ensemble: r1i1p1, start_year: 2310, end_year: 2314} + - {dataset: MIROC-ESM-CHEM, project: CMIP5, exp: piControl, ensemble: r1i1p1, start_year: 2001, end_year: 2005} + - {dataset: MPI-ESM-LR, project: CMIP5, exp: piControl, ensemble: r1i1p1, start_year: 2031, end_year: 2035} + - {dataset: MPI-ESM-MR, project: CMIP5, exp: piControl, ensemble: r1i1p1, start_year: 2001, end_year: 2005} + +diagnostics: + Thermodyn_Diag: + description: Thermodynamics diagnostics + variables: + hfls: + mip: Amon + hfss: + mip: Amon + pr: + mip: Amon + ps: + mip: Amon + prsn: + mip: Amon + rlds: + mip: Amon + rlus: + mip: Amon + rlut: + mip: Amon + rsds: + mip: Amon + rsus: + mip: Amon + rsdt: + mip: Amon + rsut: + mip: Amon + ts: + mip: Amon + hus: + mip: Amon + tas: + mip: day + uas: + mip: day + vas: + mip: day + ta: + fx_files: [sftlf] + mip: day + ua: + mip: day + va: + mip: day + wap: + mip: day + scripts: + Thermodyn_Diag: + script: thermodyn_diagtool/thermodyn_diagnostics.py + wat: true + lec: false + entr: true + met: 3 + lsm: true diff --git a/esmvaltool/recipes/recipe_toymodel.yml b/esmvaltool/recipes/recipe_toymodel.yml new file mode 100644 index 0000000000..df9322f5cf --- /dev/null +++ b/esmvaltool/recipes/recipe_toymodel.yml @@ -0,0 +1,54 @@ +# ESMValTool +# recipe_toymodel.yml +--- +documentation: + description: | + Tool for generating synthetic observations based on the model presented + in Weigel et al. (2008) QJRS with an extension to consider non-stationary + (2008) QJRS with an extension to consider non-stationary distributions + distributions prescribing a linear trend. The toymodel allows to + generate an aritifical forecast based on obsevations provided as input. + + + authors: + - bell_om + + projects: + - c3s-magic + + references: + - weigel + + +datasets: +# - {dataset: IPSL-CM5A-LR, type: exp, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1999, end_year: 2000} +# - {dataset: MPI-ESM-LR, type: exp, project: CMIP5, exp: rcp85, ensemble: r1i1p1, start_year: 2020, end_year: 2050} + - {dataset: bcc-csm1-1, type: exp, project: CMIP5, exp: rcp45, ensemble: r1i1p1, start_year: 2051, end_year: 2060} + +preprocessors: + preproc: + regrid: + target_grid: bcc-csm1-1 + scheme: linear + mask_fillvalues: + threshold_fraction: 0.95 + extract_region: + start_longitude: -40 + end_longitude: 40 + start_latitude: 30 + end_latitude: 50 + + +diagnostics: + toymodel: + description: Generate synthetic observations. + variables: + psl: + preprocessor: preproc + mip: Amon + + scripts: + main: + script: magic_bsc/toymodel.r + beta: 0.7 + number_of_members: 2 diff --git a/esmvaltool/recipes/recipe_validation.yml b/esmvaltool/recipes/recipe_validation.yml new file mode 100644 index 0000000000..b01ec8bb36 --- /dev/null +++ b/esmvaltool/recipes/recipe_validation.yml @@ -0,0 +1,66 @@ +# ESMValTool +--- +documentation: + description: | + Validation of CONTROL and EXPERIMENT datasets. + + authors: + - pred_va + + maintainer: + - pred_va + +datasets: + - {dataset: MPI-ESM-LR, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2005} + - {dataset: NorESM1-M, project: CMIP5, mip: Amon, exp: historical, ensemble: r1i1p1, start_year: 2001, end_year: 2005} + +preprocessors: + pp_rad: + regrid: + target_grid: 1x1 + scheme: linear + pp_rad_derive_var: + # use this not to overwrite variables that are used both + # for derivation and as standalone diag vars + regrid: + target_grid: 1x1 + scheme: linear + +diagnostics: + validation_with_CERES-EBAF: + description: "CMIP5 vs CERES-EBAF" + variables: + rtnt: # TOA TOT net + preprocessor: pp_rad_derive_var + derive: true + force_derivation: false + rsut: # TOA SW up all sky + preprocessor: pp_rad + additional_datasets: + - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, start_year: 2001, end_year: 2012, tier: 1} + scripts: + lat_lon: + script: validation.py + title: "" + control_model: MPI-ESM-LR + exper_model: NorESM1-M + observational_datasets: ['CERES-EBAF'] # list of at least one element; if no OBS wanted comment out + analysis_type: lat_lon # use any of: lat_lon, meridional_mean, zonal_mean, vertical_mean[not implemented] + seasonal_analysis: False + zonal_mean: + script: validation.py + title: "" + control_model: MPI-ESM-LR + exper_model: NorESM1-M + observational_datasets: ['CERES-EBAF'] # list of at least one element; if no OBS wanted comment out + analysis_type: zonal_mean # use any of: lat_lon, meridional_mean, zonal_mean, vertical_mean[not implemented] + seasonal_analysis: True + meridional_mean: + script: validation.py + title: "" + control_model: MPI-ESM-LR + exper_model: NorESM1-M + observational_datasets: ['CERES-EBAF'] # list of at least one element; if no OBS wanted comment out + analysis_type: meridional_mean # use any of: lat_lon, meridional_mean, zonal_mean, vertical_mean[not implemented] + seasonal_analysis: True + diff --git a/esmvaltool/recipes/recipe_williams09climdyn_CREM.yml b/esmvaltool/recipes/recipe_williams09climdyn_CREM.yml new file mode 100644 index 0000000000..eaafb97531 --- /dev/null +++ b/esmvaltool/recipes/recipe_williams09climdyn_CREM.yml @@ -0,0 +1,159 @@ +# ESMValTool +# recipe_williams09climdyn_CREM.yml +--- +documentation: + description: | + Cloud Regime Error Metric (CREM) by Williams and Webb (2009). + + authors: + - will_ke + - laue_ax + + maintainer: + - laue_ax + + references: + - williams09climdyn + + projects: + - cmug + + +#datasets: +# - {dataset: CanAM4, project: CMIP5, ensemble: r1i1p1} +# - {dataset: CNRM-CM5, project: CMIP5, ensemble: r1i1p1} +# - {dataset: MIROC5, project: CMIP5, ensemble: r1i1p1} +# - {dataset: MPI-ESM-LR, project: CMIP5, ensemble: r1i1p1} +# - {dataset: MRI-CGCM3, project: CMIP5, ensemble: r1i1p1} + + +preprocessors: + preproc25x25: + regrid: + target_grid: 2.5x2.5 + scheme: linear + + +diagnostics: + + # ********************************************************************** + # Cloud Regime Error Message (CREM) + # ********************************************************************** + + crem: + description: Cloud Regime Error Message (CREM) + themes: + - phys + - clouds + realms: + - atmos + - land + - seaIce + variables: + albisccp: &var_settings + preprocessor: preproc25x25 + start_year: 1985 + end_year: 1987 + grid: gr + mip: cfDay + exp: amip + additional_datasets: + - {dataset: CanAM4, project: CMIP5, ensemble: r1i1p1} + - {dataset: CNRM-CM5, project: CMIP5, ensemble: r1i1p1} + - {dataset: MIROC5, project: CMIP5, ensemble: r1i1p1} + - {dataset: MPI-ESM-LR, project: CMIP5, ensemble: r1i1p1} + - {dataset: MRI-CGCM3, project: CMIP5, ensemble: r1i1p1} + - {dataset: CNRM-CM6-1, project: CMIP6, exp: historical, mip: CFday, ensemble: r1i1p1f2} + pctisccp: + <<: *var_settings + additional_datasets: + - {dataset: CanAM4, project: CMIP5, ensemble: r1i1p1} + - {dataset: CNRM-CM5, project: CMIP5, ensemble: r1i1p1} + - {dataset: MIROC5, project: CMIP5, ensemble: r1i1p1} + - {dataset: MPI-ESM-LR, project: CMIP5, ensemble: r1i1p1} + - {dataset: MRI-CGCM3, project: CMIP5, ensemble: r1i1p1} + - {dataset: CNRM-CM6-1, project: CMIP6, exp: historical, mip: CFday, ensemble: r1i1p1f2} + cltisccp: + <<: *var_settings + additional_datasets: + - {dataset: CanAM4, project: CMIP5, ensemble: r1i1p1} + - {dataset: CNRM-CM5, project: CMIP5, ensemble: r1i1p1} + - {dataset: MIROC5, project: CMIP5, ensemble: r1i1p1} + - {dataset: MPI-ESM-LR, project: CMIP5, ensemble: r1i1p1} + - {dataset: MRI-CGCM3, project: CMIP5, ensemble: r1i1p1} + - {dataset: CNRM-CM6-1, project: CMIP6, exp: historical, mip: CFday, ensemble: r1i1p1f2} + rsut: + <<: *var_settings + additional_datasets: + - {dataset: CanAM4, project: CMIP5, ensemble: r1i1p1} + - {dataset: CNRM-CM5, project: CMIP5, ensemble: r1i1p1} + - {dataset: MIROC5, project: CMIP5, ensemble: r1i1p1} + - {dataset: MPI-ESM-LR, project: CMIP5, ensemble: r1i1p1} + - {dataset: MRI-CGCM3, project: CMIP5, ensemble: r1i1p1} + - {dataset: CNRM-CM6-1, project: CMIP6, exp: historical, mip: CFday, ensemble: r1i1p1f2} + rlut: + <<: *var_settings + additional_datasets: + - {dataset: CanAM4, project: CMIP5, ensemble: r1i1p1} + - {dataset: CNRM-CM5, project: CMIP5, ensemble: r1i1p1} + - {dataset: MIROC5, project: CMIP5, ensemble: r1i1p1} + - {dataset: MPI-ESM-LR, project: CMIP5, ensemble: r1i1p1} + - {dataset: MRI-CGCM3, project: CMIP5, ensemble: r1i1p1} + - {dataset: CNRM-CM6-1, project: CMIP6, exp: historical, mip: day, ensemble: r1i1p1f2} + rsutcs: + <<: *var_settings + additional_datasets: + - {dataset: CanAM4, project: CMIP5, ensemble: r1i1p1} + - {dataset: CNRM-CM5, project: CMIP5, ensemble: r1i1p1} + - {dataset: MIROC5, project: CMIP5, ensemble: r1i1p1} + - {dataset: MPI-ESM-LR, project: CMIP5, ensemble: r1i1p1} + - {dataset: MRI-CGCM3, project: CMIP5, ensemble: r1i1p1} + - {dataset: CNRM-CM6-1, project: CMIP6, exp: historical, mip: CFday, ensemble: r1i1p1f2} + rlutcs: + <<: *var_settings + additional_datasets: + - {dataset: CanAM4, project: CMIP5, ensemble: r1i1p1} + - {dataset: CNRM-CM5, project: CMIP5, ensemble: r1i1p1} + - {dataset: MIROC5, project: CMIP5, ensemble: r1i1p1} + - {dataset: MPI-ESM-LR, project: CMIP5, ensemble: r1i1p1} + - {dataset: MRI-CGCM3, project: CMIP5, ensemble: r1i1p1} + - {dataset: CNRM-CM6-1, project: CMIP6, exp: historical, mip: CFday, ensemble: r1i1p1f2} + snc: + <<: *var_settings + mip: day + additional_datasets: + - {dataset: CanAM4, project: CMIP5, ensemble: r1i1p1} + - {dataset: CNRM-CM5, project: CMIP5, ensemble: r1i1p1} + - {dataset: MIROC5, project: CMIP5, ensemble: r1i1p1} + - {dataset: MPI-ESM-LR, project: CMIP5, ensemble: r1i1p1} + - {dataset: MRI-CGCM3, project: CMIP5, ensemble: r1i1p1} + - {dataset: CNRM-CM6-1, project: CMIP6, exp: historical, ensemble: r1i1p1f2} + # snw: + # <<: *var_settings + # mip: day + # additional_datasets: + # - {dataset: CanAM4, project: CMIP5, ensemble: r1i1p1} + # - {dataset: CNRM-CM5, project: CMIP5, ensemble: r1i1p1} + # - {dataset: MIROC5, project: CMIP5, ensemble: r1i1p1} + # - {dataset: MPI-ESM-LR, project: CMIP5, ensemble: r1i1p1} + # - {dataset: MRI-CGCM3, project: CMIP5, ensemble: r1i1p1} + # - {dataset: CNRM-CM6-1, project: CMIP6, exp: historical, ensemble: r1i1p1f2} + sic: + <<: *var_settings + mip: day + additional_datasets: + - {dataset: CanAM4, project: CMIP5, ensemble: r1i1p1} + - {dataset: CNRM-CM5, project: CMIP5, ensemble: r1i1p1} + - {dataset: MIROC5, project: CMIP5, ensemble: r1i1p1} + - {dataset: MPI-ESM-LR, project: CMIP5, ensemble: r1i1p1} + - {dataset: MRI-CGCM3, project: CMIP5, ensemble: r1i1p1} + siconc: + <<: *var_settings + grid: gn + mip: SIday + exp: historical + additional_datasets: + - {dataset: CNRM-CM6-1, project: CMIP6, exp: historical, ensemble: r1i1p1f2} + scripts: + clim: + script: crem/ww09_esmvaltool.py diff --git a/esmvaltool/recipes/recipe_zmnam.yml b/esmvaltool/recipes/recipe_zmnam.yml new file mode 100644 index 0000000000..1ca0bda913 --- /dev/null +++ b/esmvaltool/recipes/recipe_zmnam.yml @@ -0,0 +1,53 @@ +# recipe zmnam.yml +--- +documentation: + description: | + Recipe for zonal mean Northern Annular Mode. + The diagnostics compute the index and the spatial + pattern to assess the simulation of the + strat-trop coupling in the boreal hemisphere + + authors: + - serv_fe + - hard_jo + - arno_en + - cagn_ch + + maintainer: + - serv_fe + + references: + - baldwin09qjrms + + projects: + - c3s-magic + +datasets: + - {dataset: MPI-ESM-MR, project: CMIP5, mip: day, exp: amip, ensemble: r1i1p1, start_year: 1979, end_year: 2008} + #- {model: MPI-ESM-MR, project: CMIP5, mip: day, exp: historical, ensemble: r1i1p1, start_year: 1979, end_year: 2008} + +preprocessors: + preproc: + extract_levels: + levels: [100000., 50000., 25000., 5000.] + scheme: nearest + regrid: + target_grid: 3x3 + scheme: area_weighted + extract_region: + start_longitude: 0. + end_longitude: 360. + start_latitude: 20. + end_latitude: 90. + + +diagnostics: + zmnam: + description: Zonal mean Northern Annular Mode + variables: + zg: + preprocessor: preproc + scripts: + main: + script: zmnam/zmnam.py + title: "Zonal mean NAM diagnostics" diff --git a/esmvaltool/utils/batch-jobs/job_DKRZ-MISTRAL.sh b/esmvaltool/utils/batch-jobs/job_DKRZ-MISTRAL.sh old mode 100644 new mode 100755 index 5ed56af479..125bd54fc7 --- a/esmvaltool/utils/batch-jobs/job_DKRZ-MISTRAL.sh +++ b/esmvaltool/utils/batch-jobs/job_DKRZ-MISTRAL.sh @@ -1,3 +1,4 @@ + #!/bin/bash -e ############################################################################### ### BATCH SCRIPT TO RUN THE ESMVALTOOL AT DKRZ MISTRAL diff --git a/esmvaltool/utils/cmorizers/mip_convert/config-mipconv-user.yml b/esmvaltool/utils/cmorizers/mip_convert/config-mipconv-user.yml new file mode 100644 index 0000000000..93362f92d7 --- /dev/null +++ b/esmvaltool/utils/cmorizers/mip_convert/config-mipconv-user.yml @@ -0,0 +1,22 @@ +############################################################################### +# User's configuration file for the ESMValTool with mip_convert +# For further details see the README document; current sections are +# mandatory and should be populated with valid entries. +# Author: V. Predoi / UREAD / November 2018 +############################################################################### +--- +# root to directory where mip_convert rose suites will be run +# make this different than your usual /roses/ dir +ROSES_ROOT: "/home/users/$USER/roses_mipconv" +# root to directory where mip_convert rose suites will write output +ROSES_OUTPUT: "/home/users/$USER/roses_mipconv_output" +# map dataset name to relevant UM suite +DATASET_TO_SUITE: {"UKESM1-0-LL": "u-ar766a"} +# map variable standard name to stream definition +STREAM_MAP: {"ps": "ap4", "ta": "ap4", "va": "ap4", "ua": "ap5", "mrsos": "ap5", "toz":"apm"} +# root directory where PP data lives +# this directory is in Jasmin/Archer structure; this one here +# is an actual directory with data +INPUT_DIR: "/group_workspaces/jasmin4/ncas_cms/valeriu/MASS_DATA" +# map streams to realm components +STREAM_COMPONENTS: {"ap4": ["atmos-physics", "land"], "apm": ["atmos-physics"], "ap5": ["land"]} diff --git a/esmvaltool/utils/cmorizers/mip_convert/esmvt_mipconv_setup.py b/esmvaltool/utils/cmorizers/mip_convert/esmvt_mipconv_setup.py new file mode 100644 index 0000000000..bf699f23ee --- /dev/null +++ b/esmvaltool/utils/cmorizers/mip_convert/esmvt_mipconv_setup.py @@ -0,0 +1,532 @@ +""" +Run the first communication between esmvaltool's recipe and mip_convert. + +Description: +------------ + +This script sets up the correct rose suite directories to run mip_convert +on different UM suite data. You can run this tool in three different ways: + - (with -m --mode option) setup-only: will set up the mip convert rose + directories only; it will use the -c configuration file for user options; + - (with -m --mode option) setup-run-suites: will set up the mip convert rose + suites and will go ahead and submit them to cylc via rose suite-run; + - (with -m --mode option) postproc: will symlink newly created netCDF data + into a directory per esmvaltool recipe; note that for now, there is no + DRS-like path set up in that directory; + +Usage: +------ +-c --config-file: [REQUIRED] user specific configuration file; +-r --recipe-file: [REQUIRED] single or multiple (space-sep) recipe files; +-m --mode: [OPTIONAL] running mode (setup-only, setup-run-suites, + postproc), default=setup-only +-l --log-level: [OPTIONAL] log level, default=info + +Environment +----------- +current JASMIN rose/cyclc need python2.7; esmvaltool needs python3.x +So it is impossible at the moment to run this script as executable from an +esmvaltool environment. Instead, you can run it as a stand-alone tool in a +python 2.7 environment, intwo stages: + +[set up mip_convert suites and run them] +python esmvt_mipconv_setup.py -c config.yml -r recipe.yml -m setup-run-suites +[check succesful completion of mip_convert suites] +[run the symlinking] +python esmvt_mipconv_setup.py -c config.yml -r recipe.yml -m postproc + +A practical example of running the tool can be found on JASMIN: +/home/users/valeriu/esmvaltool_mip_convert +There you will find the two component shells: run_conversion +and run_symlink, as well as an example how to set the configuration file. + +The suite used is now on MOSRS (as of 3 December 2018): u-bd681 +You can use the default location on Jasmin: +DEFAULT_SUITE_LOCATION = "/home/users/valeriu/roses/u-bd681" +alternatively this can be turned off, should you want to check out the suite +off MOSRS and use it locally. + +Contact: +-------- +author: Valeriu Predoi (UREAD, valeriu.predoi@ncas.ac.uk) +""" +import argparse +import datetime +import logging +import os +import sys +import shutil +import subprocess +import socket +from distutils.version import LooseVersion +# configparser has changed names in python 3.x +if LooseVersion(sys.version) < LooseVersion("3.0"): + import ConfigParser +else: + import configparser as ConfigParser +import yaml # noqa + +#################### +# global variables # +#################### + +# the tool uses a specially tailored mip_convert Rose suite +# locations of the suite depends on the host +host_name = socket.gethostname().split('.') +if len(host_name) > 1: + if host_name[1] == 'ceda': + # default location for mip_convert suite on JASMIN: + # previous suite: u-ak283_esmvt; new one u-bd681 + # DEFAULT_SUITE_LOCATION = "/home/users/valeriu/roses/u-ak283_esmvt" + DEFAULT_SUITE_LOCATION = "/home/users/valeriu/roses/u-bd681" + # note that you can fcm checkout it straight from the MOSRS + +# stream mapping; taken from hadsdk.streams +# these are used to set defaults if not overrides +STREAM_MAP = { + 'CMIP5': { + '3hr': 'apk', + '6hrPlev': 'apc', + '6hrlev': 'apg', + 'Amon': 'apm', + 'Lmon': 'apm', + 'LImon': 'apm', + 'Oday': 'opa', + 'Omon': 'opm', + 'Oyr': 'opy', + 'CF3hr': 'apk', + 'CFday': 'apa', + 'CFmon': 'apm', + 'CFsubhr': 'ape', + 'day': 'apa' + }, + 'CMIP6': { + '3hr': 'ap8', + '6hrLev': 'ap7', + '6hrPlev': 'ap7', + '6hrPlevPt': 'ap7', + 'AERday': 'ap6', + 'AERhr': 'ap9', + 'AERmon': 'ap4', + 'AERmonZ': 'ap4', + 'Amon': 'ap5', + 'CF3hr': 'ap8', + 'CFday': 'ap6', + 'CFmon': 'ap5', + 'E1hr': 'ap9', + 'E1hrClimMon': 'ap9', + 'E3hr': 'ap8', + 'E3hrPt': 'ap8', + 'E6hrZ': 'ap7', + 'Eday': 'ap6', + 'EdayZ': 'ap6', + 'Efx': 'ancil', + 'Emon': 'ap5', + 'EmonZ': 'ap5', + 'Esubhr': 'ap8', + 'Eyr': 'ap5', + 'LImon': 'ap5', + 'Lmon': 'ap5', + 'Oday': 'ond', + 'Ofx': 'ancil', + 'Omon': 'onm', + 'SIday': 'ind', + 'SImon': 'inm', + 'day': 'ap6', + 'fx': 'ancil', + 'prim1hrpt': 'ap9', + 'prim3hr': 'ap8', + 'prim3hrpt': 'ap8', + 'prim6hr': 'ap7', + 'prim6hrpt': 'ap7', + 'primDay': 'ap6', + 'primMon': 'ap5', + 'primSIday': 'ap6' + } +} + +# set up logging +logger = logging.getLogger(__name__) + +# print the header +HEADER = r""" +______________________________________________________________________ + + ESMValTool + mip_convert: linking mip_convert to ESMValTool +______________________________________________________________________ + +""" + __doc__ + + +def get_args(): + """Define the `esmvaltool` command line.""" + # parse command line args + parser = argparse.ArgumentParser( + description=HEADER, + formatter_class=argparse.RawDescriptionHelpFormatter) + parser.add_argument( + '-c', + '--config-file', + default=os.path.join(os.path.dirname(__file__), 'config-user.yml'), + help='Configuration file') + parser.add_argument( + '-r', + '--recipe-files', + type=str, + nargs='+', + help='Recipe files (list or single file)') + parser.add_argument( + '-m', + '--mode', + default='setup-only', + choices=['setup-only', 'setup-run-suites', 'postproc'], + help='How to run: setup: sets up mipconvert suites only;\n' + + 'or setup-run-suites: sets up suites and runs them as well;\n' + + 'or postproc: grab the output from mip_convert and use it.') + parser.add_argument( + '-l', + '--log-level', + default='info', + choices=['debug', 'info', 'warning', 'error']) + args = parser.parse_args() + return args + + +def _set_logger(logging, out_dir, log_file, log_level): + # set logging for screen and file output + root_logger = logging.getLogger() + out_fmt = "%(asctime)s %(levelname)-8s %(name)s,%(lineno)s\t%(message)s" + logging.basicConfig( + filename=os.path.join(out_dir, log_file), + filemode='a', + format=out_fmt, + datefmt='%H:%M:%S', + level=logging.DEBUG) + root_logger.setLevel(log_level.upper()) + logfmt = logging.Formatter(out_fmt) + console_handler = logging.StreamHandler() + console_handler.setFormatter(logfmt) + root_logger.addHandler(console_handler) + + +def read_yaml_file(yaml_file): + """Read recipe into a dictionary.""" + with open(yaml_file, 'r') as yfile: + loaded_file = yaml.safe_load(yfile) + return loaded_file + + +def map_var_to_stream(diagnostics, stream_map): + """Map variable standard name to stream string.""" + stream_list = [] + for _, diag in diagnostics.items(): + for var in diag['variables']: + stream = stream_map[var] + stream_list.append(stream) + stream_list = list(set(stream_list)) + return stream_list + + +def write_rose_conf(rose_config_template, recipe_file, config_file, log_level): + """Write the new rose conf file per suite.""" + # Build the ConfigParser object + Config = ConfigParser.ConfigParser() + Config.optionxform = str + Config.read(rose_config_template) + recipe_object = read_yaml_file(recipe_file) + conf_file = read_yaml_file(config_file) + datasets = recipe_object['datasets'] + + # check if dataset needs analysis + datasets_to_analyze = [] + for dataset in datasets: + if dataset['dataset'] not in conf_file['DATASET_TO_SUITE']: + logger.warning("Dataset %s has no mapping to suite", + dataset['dataset']) + logger.warning("Assuming data retrival from elsewhere.") + else: + datasets_to_analyze.append(dataset) + diagnostics = recipe_object['diagnostics'] + active_streams = map_var_to_stream(diagnostics, conf_file['STREAM_MAP']) + + # set stream overrides to None and set components + # also set CYCLING_FREQUENCIES to P1Y overall + stream_overrides = {} + stream_components = {} + cycling_frequencies = {} + for stream in active_streams: + stream_overrides[stream] = 'None' + stream_components[stream] = conf_file['STREAM_COMPONENTS'][stream] + cycling_frequencies[stream] = 'P1Y' + + # set the logger to start outputting + if not os.path.exists(conf_file['ROSES_OUTPUT']): + os.makedirs(conf_file['ROSES_OUTPUT']) + _set_logger(logging, conf_file['ROSES_OUTPUT'], 'rose_suites_setup.log', + log_level) + logger.info(HEADER) + + # store the rose suite locations + rose_suite_locations = [] + + # loop through datasets (different suites for different datasets) + for dataset in datasets_to_analyze: + + # set correct paths + rose_suite = os.path.join( + conf_file['ROSES_ROOT'], + conf_file['DATASET_TO_SUITE'][dataset['dataset']]) + rose_suite_locations.append(rose_suite) + rose_output = os.path.join( + conf_file['ROSES_OUTPUT'], + conf_file['DATASET_TO_SUITE'][dataset['dataset']]) + if os.path.exists(rose_suite): + shutil.rmtree(rose_suite) + if os.path.exists(DEFAULT_SUITE_LOCATION): + shutil.copytree(DEFAULT_SUITE_LOCATION, rose_suite) + else: + logger.error("Default Suite Location not found: %s", + DEFAULT_SUITE_LOCATION) + break + if not os.path.exists(rose_output): + os.makedirs(rose_output) + new_mipconv_config = os.path.join(rose_suite, 'mip_convert_config') + + # start logging + logger.info("Working on dataset: %s", dataset) + logger.info("Mapping dataset to suite: %s", rose_suite) + logger.info("Output and logs written to: %s", rose_output) + logger.info("Creating rose suite directories...") + logger.info("Use rose-suite.conf template %s", rose_config_template) + logger.info("Use user config file %s", config_file) + + # write the file + Config.set('jinja2:suite.rc', 'INPUT_DIR', + '"' + conf_file['INPUT_DIR'] + '"') + Config.set('jinja2:suite.rc', 'OUTPUT_DIR', '"' + rose_output + '"') + Config.set('jinja2:suite.rc', 'CDDS_DIR', + '"' + DEFAULT_SUITE_LOCATION + '"') + Config.set('jinja2:suite.rc', 'MIP_CONVERT_CONFIG_DIR', + '"' + new_mipconv_config + '"') + Config.set('jinja2:suite.rc', 'ACTIVE_STREAMS', str(active_streams)) + Config.set('jinja2:suite.rc', 'STREAM_TIME_OVERRIDES', + str(stream_overrides)) + Config.set('jinja2:suite.rc', 'FIRST_YEAR', str(dataset['start_year'])) + Config.set('jinja2:suite.rc', 'REF_YEAR', str(dataset['start_year'])) + Config.set('jinja2:suite.rc', 'FINAL_YEAR', str(dataset['end_year'])) + Config.set('jinja2:suite.rc', 'STREAM_COMPONENTS', + str(stream_components)) + Config.set('jinja2:suite.rc', 'CYCLING_FREQUENCIES', + str(cycling_frequencies)) + Config.set( + 'jinja2:suite.rc', 'TARGET_SUITE_NAME', + '"' + conf_file['DATASET_TO_SUITE'][dataset['dataset']] + '"') + with open(os.path.join(rose_suite, 'rose-suite.conf'), 'w') as r_c: + logger.info("Writing rose-suite.conf file %s", + os.path.join(rose_suite, 'rose-suite.conf')) + Config.write(r_c) + + # now that we have to conf file set up we need to + # edit the mip_convert configuration file with the correct data + for key, values in conf_file['STREAM_COMPONENTS'].items(): + for comp in values: + mipconv_config = os.path.join(new_mipconv_config, + 'mip_convert.cfg.' + comp) + _edit_mip_convert_config(mipconv_config, conf_file, dataset, + key) + + return rose_suite_locations + + +def _edit_mip_convert_config(mipconv_config, conf_file, dataset, stream): + """Edit the mip_convert file for correct runs.""" + # set the correct variables + base_date = str(dataset['start_year']) + '-01-01-00-00-00' + suite_id = conf_file['DATASET_TO_SUITE'][dataset['dataset']] + cdds_dir = os.path.join(DEFAULT_SUITE_LOCATION, 'mip_convert_aux') + + # Build the ConfigParser object + Config = ConfigParser.ConfigParser() + Config.optionxform = str + Config.read(mipconv_config) + + # set the correct fields + Config.set('COMMON', 'cdds_dir', cdds_dir) + Config.set('request', 'base_date', base_date) + Config.set('request', 'suite_id', suite_id) + stream_section = '_'.join(['stream', stream]) + # add the section if not there already + if not Config.has_section(stream_section): + Config.add_section(stream_section) + if 'mip' not in dataset: + # can work without any mip in dataset + # will not take it from diagnostic (will assemble + # all possible mappings instead) + logger.warning("No mip in the recipe dataset section.") + logger.warning("Assigning mapping from default dictionary.") + stream_map_default = STREAM_MAP[dataset['project']] + variables = [] + cmip_types = [] + for key, val in conf_file['STREAM_MAP'].items(): + for key_def, val_def in stream_map_default.items(): + if val == val_def: + cmip_types.append('_'.join([dataset['project'], key_def])) + variables.append(key) + str_variables = ' '.join(list(set([v for v in variables]))) + if variables: + for cmip_type in cmip_types: + Config.set(stream_section, cmip_type, str_variables) + else: + cmip_type = '_'.join([dataset['project'], dataset['mip']]) + all_vars = conf_file['STREAM_MAP'].keys() + str_variables = ' '.join( + [v for v in all_vars if conf_file['STREAM_MAP'][v] == stream]) + Config.set(stream_section, cmip_type, str_variables) + + # write to file + with open(mipconv_config, 'w') as r_c: + logger.info("Writing mip_convert config file %s", mipconv_config) + Config.write(r_c) + + +def _put_in_env(env_script): + """Put new system vars in environment.""" + logger.info("Setting environment for suite submission...") + + # First make it executable. + chmod_command = ["chmod", "+x", env_script] + proc = subprocess.Popen(chmod_command, stdout=subprocess.PIPE) + proc.communicate() + logger.info("Script %s is now executable.", env_script) + + # set the environment + for line in open(env_script, 'r'): + if line.split("=")[0] == 'export PATH': + logger.info("Appending %s to path...", + line.split("=")[1].strip("\n")) + add_path = line.split("=")[1].strip("\n").strip(":$PATH") + os.environ["PATH"] += os.pathsep + add_path + elif line.split("=")[0] == 'export PYTHONPATH': + logger.info("Exporting %s as PYTHONPATH...", + line.split("=")[1].strip("\n")) + os.environ["PYTHONPATH"] = line.split("=")[1].strip("\n") + + # print and check + logger.info("New path: %s", str(os.environ["PATH"])) + logger.info("mip_convert PYTHONPATH: %s", str(os.environ["PYTHONPATH"])) + proc = subprocess.Popen(["which", "rose"], stdout=subprocess.PIPE) + out, err = proc.communicate() + logger.info("rose: %s %s", out, err) + proc = subprocess.Popen(["which", "mip_convert"], stdout=subprocess.PIPE) + out, err = proc.communicate() + logger.info("mip_convert: %s %s", out, err) + + +def _source_envs(suite): + """Source relevant environments.""" + # source the Met Office rose/cylc environment + # and the suite specific environment + suite_env = os.path.join(suite, 'env_setup_command_line.sh') # suite env + env_file_mo = os.path.join(suite, 'sourcepaths.sh') # metomi env + _put_in_env(suite_env) + _put_in_env(env_file_mo) + + +def _run_suite(suite): + """Run the mip_convert suite.""" + os.chdir(suite) + logger.info("Submitting suite from %s", suite) + proc = subprocess.Popen(["rose", "suite-run"], stdout=subprocess.PIPE) + out, err = proc.communicate() + logger.info("Rose communications: %s %s", str(out), str(err)) + + +def symlink_data(recipe_file, config_file, log_level): + """Grab the mip_converted output and manage it for ESMValTool.""" + # get configuration and recipe + recipe_object = read_yaml_file(recipe_file) + conf_file = read_yaml_file(config_file) + datasets = recipe_object['datasets'] + + # create directory that stores all the output netCDF files + now = datetime.datetime.utcnow().strftime("%Y%m%d_%H%M%S") + new_subdir = '_'.join((recipe_file.strip('.yml'), now)) + sym_output_dir = os.path.join(conf_file['ROSES_OUTPUT'], + 'mip_convert_symlinks', new_subdir) + if not os.path.exists(sym_output_dir): + os.makedirs(sym_output_dir) + + # set the logger to start outputting + _set_logger(logging, conf_file['ROSES_OUTPUT'], 'file_simlink.log', + log_level) + logger.info(HEADER) + + # loop through all datasets to symlink output + for dataset in datasets: + rose_output = os.path.join( + conf_file['ROSES_OUTPUT'], + conf_file['DATASET_TO_SUITE'][dataset['dataset']]) + logger.info("Working on dataset: %s", dataset) + logger.info("Output and logs written to: %s", rose_output) + + # create the dataset dir + dataset_output = os.path.join(sym_output_dir, dataset['dataset']) + if os.path.exists(dataset_output): + shutil.rmtree(dataset_output) + os.makedirs(dataset_output) + + # loop through files + for root, _, files in os.walk(rose_output): + for xfile in files: + real_file = os.path.join(root, xfile) + imag_file = os.path.join(dataset_output, xfile) + + # symlink it if nc file + if real_file.endswith('.nc') and \ + xfile.split('_')[2] == dataset['dataset']: + if not os.path.islink(imag_file): + logger.info("File to symlink: %s", real_file) + logger.info("Symlinked file: %s", imag_file) + os.symlink(real_file, imag_file) + else: + logger.info("Symlinked file exists...") + logger.info("Original file: %s", real_file) + logger.info("Symlinked file: %s", imag_file) + + +def main(): + """Run the the meat of the code.""" + logger.info("Running main function...") + args = get_args() + rose_config_template = os.path.join( + os.path.dirname(__file__), "rose-suite-template.conf") + + # make sure the file is retrieved nonetheless + if not os.path.isfile(rose_config_template): + logger.info("Fetching rose template config from suite %s", + DEFAULT_SUITE_LOCATION) + rose_config_template = os.path.join(DEFAULT_SUITE_LOCATION, + "rose-suite-template.conf") + + recipe_files = args.recipe_files + config_file = args.config_file + log_level = args.log_level + for recipe_file in recipe_files: + if args.mode == 'setup-only': + # set up the rose suites + write_rose_conf(rose_config_template, recipe_file, config_file, + log_level) + elif args.mode == 'setup-run-suites': + # setup roses + roses = write_rose_conf(rose_config_template, recipe_file, + config_file, log_level) + # set up the environment and submit + for rose in roses: + _source_envs(rose) + _run_suite(rose) + elif args.mode == 'postproc': + symlink_data(recipe_file, config_file, log_level) + + +if __name__ == '__main__': + main() diff --git a/esmvaltool/utils/cmorizers/mip_convert/recipe_mip_convert.yml b/esmvaltool/utils/cmorizers/mip_convert/recipe_mip_convert.yml new file mode 100644 index 0000000000..8d5168a975 --- /dev/null +++ b/esmvaltool/utils/cmorizers/mip_convert/recipe_mip_convert.yml @@ -0,0 +1,51 @@ +#### summary +# Example of ESMValTool recipe that can be used with the mip_convert capability +# Data for this recipe exists in pp format on JASMIN, ready for mip_convert-ion +# The recipe is no different than any typical ESMValTool recipes, but can be used +# for a test run of mip_convert capability; see the README document and the included +# config-mipconv-user.yml configuration file. +# Author: V. Predoi (Uni Reading, valeriu.predoi@ncas.ac.uk) +# Date: first draft/November 2018 +########################################################################################################### +--- + +datasets: + - {dataset: UKESM1-0-LL, project: CMIP6, mip: Amon, exp: piControl-spinup, ensemble: r1i1p1f1_gn, start_year: 1850, end_year: 1860} + +preprocessors: + pp_rad: + regrid: + target_grid: 1x1 + scheme: linear + +diagnostics: + validation_mip_convert: + description: "Test with mip convert" + variables: + # mapping of standard_name to stream for CMIP6 + # see the associated config file for input + # "ps": "ap4", "ta": "ap4", "va": "ap4", "ua": "ap5", "mrsos": "ap5", "toz":"apm" + ps: + preprocessor: pp_rad + field: T2Ms + ta: + preprocessor: pp_rad + field: T2Ms + va: + preprocessor: pp_rad + field: T2Ms + ua: + preprocessor: pp_rad + field: T2Ms + toz: + preprocessor: pp_rad + field: T2Ms + scripts: + meridional_mean: + script: validation.py + title: "" + control_model: UKESM1-0-LL + exper_model: UKESM1-0-LL + analysis_type: meridional_mean + seasonal_analysis: True + diff --git a/esmvaltool/utils/cmorizers/mip_convert/rose-suite-template.conf b/esmvaltool/utils/cmorizers/mip_convert/rose-suite-template.conf new file mode 100644 index 0000000000..5562333fed --- /dev/null +++ b/esmvaltool/utils/cmorizers/mip_convert/rose-suite-template.conf @@ -0,0 +1,20 @@ +[jinja2:suite.rc] +ACTIVE_STREAMS = +CONCATENATE = "FALSE" +CYCLING_FREQUENCIES = +DUMMY_RUN = "FALSE" +FINAL_YEAR = +FIRST_YEAR = +REF_YEAR = +INPUT_DIR = +LOCATION = "LOTUS" +MEMORY = "70000" +MIP_CONVERT_CONFIG_DIR = +OUTPUT_DIR = +PARALLEL_TASKS = "20" +NTHREADS_CONCATENATE = "6" +CDDS_DIR = +STREAM_COMPONENTS = +STREAM_TIME_OVERRIDES = +TARGET_SUITE_NAME = +WALL_TIME = "6:00:00" diff --git a/esmvaltool/utils/cmorizers/obs/__init__.py b/esmvaltool/utils/cmorizers/obs/__init__.py new file mode 100644 index 0000000000..6449c7e4ac --- /dev/null +++ b/esmvaltool/utils/cmorizers/obs/__init__.py @@ -0,0 +1 @@ +"""Cmorizer modules for Python.""" diff --git a/esmvaltool/utils/cmorizers/obs/cmor_config/CRU.yml b/esmvaltool/utils/cmorizers/obs/cmor_config/CRU.yml new file mode 100644 index 0000000000..1bfb114300 --- /dev/null +++ b/esmvaltool/utils/cmorizers/obs/cmor_config/CRU.yml @@ -0,0 +1,23 @@ +--- +# Filename (will be extended by variable name) +filename: 'cru_ts4.02.1901.2017.{raw_name}.dat.nc.gz' + +# Common global attributes for Cmorizer output +attributes: + dataset_id: CRU + version: 'TS4.02' + tier: 2 + modeling_realm: reanaly + project_id: OBS + source: 'https://crudata.uea.ac.uk/cru/data/hrg/cru_ts_4.02/cruts.1811131722.v4.02/' + reference: 'cru' + comment: '' + +# Variables to cmorize +variables: + tas: + mip: Amon + raw: tmp + pr: + mip: Amon + raw: pre diff --git a/esmvaltool/utils/cmorizers/obs/cmor_config/ESACCI-OC.yml b/esmvaltool/utils/cmorizers/obs/cmor_config/ESACCI-OC.yml new file mode 100644 index 0000000000..770330f165 --- /dev/null +++ b/esmvaltool/utils/cmorizers/obs/cmor_config/ESACCI-OC.yml @@ -0,0 +1,23 @@ +--- +# Common global attributes for Cmorizer output +attributes: + dataset_id: ESACCI-OC + version: 'fv3.1' + tier: 2 + modeling_realm: sat + project_id: OBS + source: 'ftp://oc-cci-data:ELaiWai8ae@oceancolour.org/occci-v3.1/geographic/netcdf/monthly/chlor_a/' + reference: "esacci-oc" + comment: "" + +# Variables to cmorize (here use only filename prefix) +variables: + chl: + mip: Omon + raw: chlor_a + file: ESACCI-OC-L3S-CHLOR_A-MERGED-1M_MONTHLY_4km_GEO_PML_OCx + +# Custom dictionary for this cmorizer +custom: + # Rebin original data (4km) averaging at lower resolution (multiple of 2 accepted) + bin_size: 6 diff --git a/esmvaltool/utils/cmorizers/obs/cmor_config/LandFlux-EVAL.yml b/esmvaltool/utils/cmorizers/obs/cmor_config/LandFlux-EVAL.yml new file mode 100644 index 0000000000..35bcc2301d --- /dev/null +++ b/esmvaltool/utils/cmorizers/obs/cmor_config/LandFlux-EVAL.yml @@ -0,0 +1,23 @@ +--- +# Filename +filename: 'LandFluxEVAL.merged.89-05.monthly.all.nc' + +# Common global attributes for Cmorizer output +attributes: + dataset_id: LandFlux-EVAL + version: 'Oct13' + tier: 3 + modeling_realm: reanaly + project_id: OBS + source: 'https://data.iac.ethz.ch/landflux/' + reference: 'landflux-eval' + comment: '' + +# Variables to cmorize +variables: + et: + mip: Lmon + raw: ET_mean + etStderr: + mip: Lmon + raw: ET_sd diff --git a/esmvaltool/utils/cmorizers/obs/cmor_config/Landschuetzer2016.yml b/esmvaltool/utils/cmorizers/obs/cmor_config/Landschuetzer2016.yml new file mode 100644 index 0000000000..c703455ae7 --- /dev/null +++ b/esmvaltool/utils/cmorizers/obs/cmor_config/Landschuetzer2016.yml @@ -0,0 +1,28 @@ +--- +# Common global attributes for Cmorizer output +attributes: + dataset_id: Landschuetzer2016 + version: 'v2016' + tier: 2 + modeling_realm: clim + project_id: CMIP5 + source: 'https://www.nodc.noaa.gov/archive/arc0105/0160558/3.3/data/0-data/' + reference: 'landschuetzer2016' + comment: '' + +# Variables to cmorize +variables: + fgco2: + mip: Omon + # Match CMOR variables with input file one + raw: fgco2_smoothed + # input file name + file: spco2_1982-2015_MPI_SOM-FFN_v2016.nc + spco2: + mip: Omon + raw: spco2_smoothed + file: spco2_1982-2015_MPI_SOM-FFN_v2016.nc + dpco2: + mip: Omon + raw: dco2_smoothed + file: spco2_1982-2015_MPI_SOM-FFN_v2016.nc diff --git a/esmvaltool/utils/cmorizers/obs/cmor_config/MTE.yml b/esmvaltool/utils/cmorizers/obs/cmor_config/MTE.yml new file mode 100644 index 0000000000..144f3b1f19 --- /dev/null +++ b/esmvaltool/utils/cmorizers/obs/cmor_config/MTE.yml @@ -0,0 +1,22 @@ +--- +# Filename (will be extended by timestamp to match downloaded file) +filename: 'EnsembleGPP_GL.nc' + +# Common global attributes for Cmorizer output +attributes: + dataset_id: MTE + version: 'May12' + tier: 3 + modeling_realm: reanaly + project_id: OBS + source: 'http://www.bgc-jena.mpg.de/geodb/BGI/Home' + reference: 'mte' + comment: '' + +# Variables to cmorize +variables: + gpp: + mip: Lmon + gppStderr: + mip: Lmon + raw: std diff --git a/esmvaltool/utils/cmorizers/obs/cmor_config/WOA.yml b/esmvaltool/utils/cmorizers/obs/cmor_config/WOA.yml new file mode 100644 index 0000000000..f0d277e45a --- /dev/null +++ b/esmvaltool/utils/cmorizers/obs/cmor_config/WOA.yml @@ -0,0 +1,43 @@ +--- +# Common global attributes for Cmorizer output +attributes: + dataset_id: WOA + version: '2013v2' + tier: 2 + modeling_realm: clim + project_id: CMIP5 + source: 'https://data.nodc.noaa.gov/woa/WOA13/DATAv2/' + reference: 'woa' + comment: '' + +# Variables to cmorize (here use only filename prefix) +variables: + thetao: + mip: Omon + raw: t_an + file: woa13_decav81B0_t + so: + mip: Omon + raw: s_an + file: woa13_decav81B0_s + o2: + mip: Oyr + raw: o_an + file: woa13_all_o + no3: + mip: Oyr + raw: n_an + file: woa13_all_n + po4: + mip: Oyr + raw: p_an + file: woa13_all_p + si: + mip: Oyr + raw: i_an + file: woa13_all_i + +# Custom dictionary for this cmorizer +custom: + # years to be analyzed + years: [2000, ] diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs.py b/esmvaltool/utils/cmorizers/obs/cmorize_obs.py new file mode 100755 index 0000000000..50df18411b --- /dev/null +++ b/esmvaltool/utils/cmorizers/obs/cmorize_obs.py @@ -0,0 +1,265 @@ +""" +Run the CMORization module as a utility executable. + +This utility allows the user to call and execute CMOR reformatting +scripts (support for NCL and Python at the moment), that will use +two I/O variables passed by this utility: an input directory as +specified in config-user.yml by the RAWOBS key, and an output dir +created in the form of output_dir/CMOR_DATE_TIME/TierTIER/DATASET. +The user can specify a list of DATASETS that the CMOR reformatting +can by run on by using -o (--obs-list-cmorize) command line argument. +The CMOR reformatting scripts are to be found in: +esmvaltool/cmor/cmorizers/obs +""" +import argparse +import datetime +import importlib +import logging +import os +import subprocess + +from esmvaltool._config import read_config_user_file +from esmvaltool._task import write_ncl_settings + +logger = logging.getLogger(__name__) + +HEADER = r""" +______________________________________________________________________ + _____ ____ __ ____ __ _ _____ _ + | ____/ ___|| \/ \ \ / /_ _| |_ _|__ ___ | | + | _| \___ \| |\/| |\ \ / / _` | | | |/ _ \ / _ \| | + | |___ ___) | | | | \ V / (_| | | | | (_) | (_) | | + |_____|____/|_| |_| \_/ \__,_|_| |_|\___/ \___/|_| +______________________________________________________________________ + +""" + __doc__ + + +def _assemble_datasets(raw_obs, obs_list): + """Get my datasets as dictionary keyed on Tier.""" + # check for desired datasets only (if any) + # if not, walk all over rawobs dir + # assume a RAWOBS/TierX/DATASET input structure + datasets = {} + + # get all available tiers in source dir + tiers = ['Tier{}'.format(i) for i in range(2, 4)] + tiers = [ + tier for tier in tiers if os.path.exists(os.path.join(raw_obs, tier)) + ] + + # if user specified obs list + if obs_list: + for tier in tiers: + datasets[tier] = [] + for dataset_name in obs_list.split(','): + if os.path.isdir(os.path.join(raw_obs, tier, dataset_name)): + datasets[tier].append(dataset_name) + + # otherwise go through the whole raw_obs dir + else: + for tier in tiers: + datasets[tier] = [] + for dats in os.listdir(os.path.join(raw_obs, tier)): + datasets[tier].append(dats) + + return datasets + + +def _write_ncl_settings(project_info, dataset, run_dir, reformat_script, + log_level): + """Write the information needed by the ncl reformat script.""" + settings = { + 'cmorization_script': reformat_script, + 'input_dir_path': project_info[dataset]['indir'], + 'output_dir_path': project_info[dataset]['outdir'], + 'config_user_info': { + 'log_level': log_level + }, + } + settings_filename = os.path.join(run_dir, dataset, 'settings.ncl') + if not os.path.isdir(os.path.join(run_dir, dataset)): + os.makedirs(os.path.join(run_dir, dataset)) + # write the settings file + write_ncl_settings(settings, settings_filename) + return settings_filename + + +def _run_ncl_script(in_dir, out_dir, run_dir, dataset, reformat_script, + log_level): + """Run the NCL cmorization mechanism.""" + project = {} + project[dataset] = {} + project[dataset]['indir'] = in_dir + project[dataset]['outdir'] = out_dir + settings_file = _write_ncl_settings(project, dataset, run_dir, + reformat_script, log_level) + esmvaltool_root = os.path.dirname( + os.path.dirname(os.path.dirname(os.path.dirname(reformat_script)))) + + # put settings in environment + env = dict(os.environ) + env['settings'] = settings_file + env['esmvaltool_root'] = esmvaltool_root + + # call NCL + ncl_call = ['ncl', reformat_script] + logger.info("Executing cmd: %s", ' '.join(ncl_call)) + process = subprocess.Popen(ncl_call, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + env=env) + output, err = process.communicate() + for oline in str(output.decode('utf-8')).split('\n'): + logger.info('[NCL] %s', oline) + if err: + logger.info('[NCL][subprocess.Popen ERROR] %s', err) + + +def _run_pyt_script(in_dir, out_dir, reformat_module): + """Run the Python cmorization mechanism.""" + py_cmor = importlib.import_module(reformat_module) + py_cmor.cmorization(in_dir, out_dir) + + +def execute_cmorize(): + """Run it as executable.""" + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument('-o', + '--obs-list-cmorize', + type=str, + help='List of obs datasets to cmorize. \ + If no list provided: CMORization of \ + all datasets in RAWOBS; \ + -o DATASET1,DATASET2... : \ + for CMORization of select datasets.') + parser.add_argument('-c', + '--config-file', + default=os.path.join(os.path.dirname(__file__), + 'config-user.yml'), + help='Config file') + args = parser.parse_args() + + # get and read config file + config_file = os.path.abspath( + os.path.expandvars(os.path.expanduser(args.config_file))) + + # Read user config file + if not os.path.exists(config_file): + logger.error("Config file %s does not exist", config_file) + + # read the file in + config_user = read_config_user_file(config_file, 'cmorize_obs') + + # set the run dir to hold the settings and log files + run_dir = os.path.join(config_user['output_dir'], 'run') + if not os.path.isdir(run_dir): + os.makedirs(run_dir) + + # set logging for screen and file output + root_logger = logging.getLogger() + out_fmt = "%(asctime)s %(levelname)-8s %(name)s,%(lineno)s\t%(message)s" + logging.basicConfig(filename=os.path.join(run_dir, 'main_log.txt'), + filemode='a', + format=out_fmt, + datefmt='%H:%M:%S', + level=config_user['log_level'].upper()) + root_logger.setLevel(config_user['log_level'].upper()) + logfmt = logging.Formatter(out_fmt) + console_handler = logging.StreamHandler() + console_handler.setFormatter(logfmt) + root_logger.addHandler(console_handler) + + # print header + logger.info(HEADER) + + # run + timestamp1 = datetime.datetime.utcnow() + timestamp_format = "%Y-%m-%d %H:%M:%S" + + logger.info("Starting the CMORization Tool at time: %s UTC", + timestamp1.strftime(timestamp_format)) + + logger.info(70 * "-") + logger.info("input_dir = %s", config_user["rootpath"]["RAWOBS"][0]) + # check if the inputdir actually exists + if not os.path.isdir(config_user["rootpath"]["RAWOBS"][0]): + logger.error("Directory %s does not exist", + config_user["rootpath"]["RAWOBS"][0]) + raise ValueError + logger.info("output_dir = %s", config_user["output_dir"]) + logger.info(70 * "-") + + # call the reformat function + if args.obs_list_cmorize: + obs_list = args.obs_list_cmorize + else: + obs_list = [] + _cmor_reformat(config_user, obs_list) + + # End time timing + timestamp2 = datetime.datetime.utcnow() + logger.info("Ending the CMORization Tool at time: %s UTC", + timestamp2.strftime(timestamp_format)) + logger.info("Time for running the CMORization scripts was: %s", + timestamp2 - timestamp1) + + +def _cmor_reformat(config, obs_list): + """Run the cmorization routine.""" + logger.info("Running the CMORization scripts.") + + # master directory + raw_obs = config["rootpath"]["RAWOBS"][0] + + # set the reformat scripts dir + reformat_scripts = os.path.dirname(os.path.abspath(__file__)) + run_dir = os.path.join(config['output_dir'], 'run') + # datsets dictionary of Tier keys + datasets = _assemble_datasets(raw_obs, obs_list) + logger.info("Processing datasets %s", datasets) + + # loop through tier/datasets to be cmorized + for tier in datasets: + for dataset in datasets[tier]: + reformat_script_root = os.path.join(reformat_scripts, + 'cmorize_obs_' + dataset) + # in-data dir; build out-dir tree + in_data_dir = os.path.join(raw_obs, tier, dataset) + out_data_dir = os.path.join(config['output_dir'], tier, dataset) + if not os.path.isdir(out_data_dir): + os.makedirs(out_data_dir) + + # all operations are done in the working dir now + os.chdir(out_data_dir) + + # figure out what language the script is in + if os.path.isfile(reformat_script_root + '.ncl'): + reformat_script = reformat_script_root + '.ncl' + logger.info("CMORizing dataset %s using NCL script %s", + dataset, reformat_script) + + # call the ncl script + _run_ncl_script( + in_data_dir, + out_data_dir, + run_dir, + dataset, + reformat_script, + config['log_level'], + ) + elif os.path.isfile( + reformat_script_root.replace('-', '_') + '.py'): + py_reformat_script = (reformat_script_root.replace('-', '_') + + '.py') + logger.info("CMORizing dataset %s using Python script %s", + dataset, py_reformat_script) + module_root = 'esmvaltool.utils.cmorizers.obs.cmorize_obs_' + _run_pyt_script(in_data_dir, out_data_dir, + module_root + dataset.replace('-', '_')) + else: + logger.info('Could not find cmorizer for %s', datasets) + + +if __name__ == '__main__': + execute_cmorize() diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_AURA-TES.ncl b/esmvaltool/utils/cmorizers/obs/cmorize_obs_AURA-TES.ncl new file mode 100644 index 0000000000..552e48cf40 --- /dev/null +++ b/esmvaltool/utils/cmorizers/obs/cmorize_obs_AURA-TES.ncl @@ -0,0 +1,178 @@ +; ############################################################################# +; ESMValTool CMORizer for AURA-TES data +; ############################################################################# +; +; Tier +; Tier 3: restricted dataset. +; +; Source +; https://search.earthdata.nasa.gov/search?q=TL3O3M +; +; Last access +; 20181208 +; +; Download and processing instructions +; Select the V004 and V005 projects. +; Download the script file for both projects. +; To download the data use: wget -i . +; From the downloaded data, pick only the *.he5 files and put them in +; input_dir_path. +; Data is freely available, but a registration is required. +; +; Modification history +; 20190108-A_righ_ma: adapted to v2. +; 20140129-A_righ_ma: written. +; +; ############################################################################# +loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/interface.ncl") + +begin + + ; Script name (for logger) + DIAG_SCRIPT = "cmorize_obs_AURA-TES.ncl" + + ; Source name + OBSNAME = "AURA-TES" + + ; Tier + TIER = 3 + + ; Gridtype + GRIDTYPE = "Nadir" + + ; Period + YEAR1 = 2005 + YEAR2 = 2011 + + ; Selected variable + VAR = "tro3" + + ; MIP + MIP = "Amon" + + ; Frequency + FREQ = "mon" + + ; CMOR table + CMOR_TABLE = getenv("esmvaltool_root") + \ + "/cmor/tables/cmip5/Tables/CMIP5_" + MIP + + ; Type + TYPE = "sat" + + ; Version + VERSION = "1" + + ; Global attributes + SOURCE = "https://search.earthdata.nasa.gov/search?q=TL3O3M" + REF = "Beer, R., IEEE Trans. Geosci. Rem. Sens., " + \ + "doi:10.1109/TGRS.2005.863716, 2006" + COMMENT = "" + +end + +begin + + ; Create time coordinate + timec = create_timec(YEAR1, YEAR2) + datec = ut_calendar(timec, 0) + + ; Loop over time + do tt = 0, dimsizes(timec) - 1 + + yy = toint(datec(tt, 0)) + mm = toint(datec(tt, 1)) + + log_info("Processing date " + yy + sprinti("%0.2i", mm)) + + ; Find files + fname = input_dir_path + "TES-Aura_L3-O3-M" + \ + yy + "m" + sprinti("%0.2i", mm) + "_F01_10.he5" + + if (.not.isfilepresent(fname)) then ; alternative name + fname = input_dir_path + "TES-Aura_L3-O3-M" + \ + yy + "m" + sprinti("%0.2i", mm) + "_C01_F01_10.he5" + end if + + if (.not.isfilepresent(fname)) then + log_info("File missing, skipping this date") + continue + end if + + ; Read variable + f = addfile(fname, "r") + str = "O3_" + GRIDTYPE + "Grid" + var = f->$str$ + + ; Reorder + d1 = "XDim_" + GRIDTYPE + "Grid" + d2 = "YDim_" + GRIDTYPE + "Grid" + d3 = "nLevels_" + GRIDTYPE + "Grid" + tmp = var($d1$|:, $d2$|:, $d3$|:) + delete(var) + var = tmp + delete(tmp) + + ; Read vertical coordinate + str = "Pressure_" + GRIDTYPE + "Grid" + press = f->$str$ + var!2 = "plev" + var&plev = todouble(press) + + ; Create array + if (.not.isdefined("outvar")) then + outdim = array_append_record(dimsizes(timec), dimsizes(var), 0) + outvar = new(outdim, float) + outvar@_FillValue = var@_FillValue + end if + + outvar(tt, :, :, :) = var + delete(var) + + end do + + ; Reorder + outvar!0 = "time" + outvar!1 = "lon" + outvar!2 = "lat" + outvar!3 = "plev" + output = outvar(time|:, plev|:, lat|:, lon|:) + output@_FillValue = FILL + delete(outvar) + + ; Assign coordinates + output&time = timec + output&plev = output&plev * 100. ; [hPa] --> [Pa] + output&plev@units = "Pa" + + ; Convert units [mol/mol] --> [1e9] + output = output * 1.e9 + output@units = "1e-9" + + ; Format coordinates + format_coords(output, YEAR1 + "0101", YEAR2 + "1231", FREQ) + + ; Set variable attributes + tmp = format_variable(output, VAR, CMOR_TABLE) + delete(output) + output = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ) + + ; Set global attributes + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Output file + DATESTR = YEAR1 + "01-" + YEAR2 + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, VERSION, \ + MIP, VAR, DATESTR/), "_") + ".nc" + + ; Write variable + write_nc(fout, VAR, output, bounds, gAtt) + delete(gAtt) + delete(output) + +end diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_CDS-SATELLITE-SOIL-MOISTURE.ncl b/esmvaltool/utils/cmorizers/obs/cmorize_obs_CDS-SATELLITE-SOIL-MOISTURE.ncl new file mode 100644 index 0000000000..084ad39244 --- /dev/null +++ b/esmvaltool/utils/cmorizers/obs/cmorize_obs_CDS-SATELLITE-SOIL-MOISTURE.ncl @@ -0,0 +1,132 @@ +; ############################################################################# +; ESMValTool CMORizer for CDS-SATELLITE-SOIL-MOISTURE data +; ############################################################################# +; +; Tier +; Tier 3: restricted dataset. +; +; Source +; https://cds.climate.copernicus.eu/cdsapp#!/dataset/ +; satellite-soil-moisture?tab=form +; +; Last access +; 20190314 +; +; Download and processing instructions +; Download the data from source as follows: +; Variable: Volumetric surface soil moisture +; Time aggregation: Day average +; All Years, Month and Days +; Format: 'tar.gz' (recommended) +; Type of sensor: Combined passive and active +; Type of record: CDR +; Version: v201812.0.0 +; Decompress the files and put them in a single directory (no subdirectories +; with years) +; +; Modification history +; 20190314-A_crez_ba: adapted from file cmorize_obs_ESACCI-SOILMOISTURE.ncl +; +; ############################################################################# +loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/interface.ncl") + +begin + + ; Script name (for logger) + DIAG_SCRIPT = "cmorize_obs_CDS-SATELLITE-SOIL-MOISTURE.ncl" + + ; Source name + OBSNAME = "CDS-SATELLITE-SOIL-MOISTURE" + + ; Tier + TIER = 3 + + ; Period + YEAR1 = 1979 + YEAR2 = 2018 + + ; Selected variable (standard name) + VAR = (/"sm", "smStderr"/) + + ; Name in the raw data + NAME = (/"sm", "sm_uncertainty"/) + + ; MIP + MIP = (/"day", "day"/) + + ; Frequency + FREQ = (/"day", "day"/) + + ; CMOR table + CMOR_TABLE = getenv("esmvaltool_root") + \ + "/cmor/tables/custom/CMOR_" + VAR + ".dat" + + ; Type + TYPE = "sat" + + ; Version + VERSION = "COMBINED-TCDR-v201812.0.0" + + ; Global attributes + SOURCE = "https://cds.climate.copernicus.eu/cdsapp#!/" + \ + "dataset/satellite-soil-moisture" + REF = "" + COMMENT = "" + +end + +begin + + do vv = 0, dimsizes(VAR) - 1 + + log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") + + do yy = YEAR1, YEAR2 + + ; Set list of files + files = systemfunc("ls " + input_dir_path + \ + "C3S-SOILMOISTURE-L3S-SSMV-" + \ + "COMBINED-DAILY-" + yy + "????000000-" + \ + "TCDR-v201812.0.0.nc") + f = addfiles(files, "r") + delete(files) + + ; Read data + output = f[:]->$NAME(vv)$ + delete(f) + + ; Format coordinates + output!0 = "time" + output!1 = "lat" + output!2 = "lon" + format_coords(output, yy + "0101", yy + "1231", FREQ(vv)) + + ; Set variable attributes + tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) + delete(output) + output = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ(vv)) + + ; Set global attributes + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Output file + DATESTR = yy + "01-" + yy + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, VERSION, \ + MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" + + ; Write variable + write_nc(fout, VAR(vv), output, bounds, gAtt) + delete(gAtt) + delete(output) + delete(bounds) + + end do + + end do + +end diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_CDS-XCH4.ncl b/esmvaltool/utils/cmorizers/obs/cmorize_obs_CDS-XCH4.ncl new file mode 100644 index 0000000000..a402a09300 --- /dev/null +++ b/esmvaltool/utils/cmorizers/obs/cmorize_obs_CDS-XCH4.ncl @@ -0,0 +1,115 @@ +; ############################################################################# +; ESMValTool CMORizer for CDS-XCH4 data +; ############################################################################# +; +; Tier +; Tier 3: restricted dataset. +; +; Source +; https://cds.climate.copernicus.eu/cdsapp#!/dataset/ +; satellite-methane?tab=form +; +; Last access +; 20190311 +; +; Download and processing instructions +; Select Processing level "Level 3", variable "Column-average dry-air mole +; fraction of atmospheric methane (XCH4) and related variables", Sensor and +; algorithm "MERGED and OBS4MIPS". +; A registration is required to download the data. +; +; Modification history +; 20190311-A_hass_bg: written. +; +; ############################################################################ +loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/interface.ncl") + +begin + + ; Script name (for logger) + DIAG_SCRIPT = "cmorize_obs_CDS-XCH4.ncl" + + ; Source name + OBSNAME = "CDS-XCH4" + + ; Tier + TIER = 3 + + ; Period + YEAR1 = 2003 + YEAR2 = 2016 + + ; Selected variable (standard name) + VAR = (/"xch4"/) + + ; Name in the raw data + NAME = (/"xch4"/) + + ; MIP + MIP = (/"Amon"/) + + ; Frequency + FREQ = (/"mon"/) + + ; CMOR table + CMOR_TABLE = getenv("esmvaltool_root") + \ + "/cmor/tables/custom/CMOR_xch4.dat" + + ; Type + TYPE = "sat" + + ; Version + VERSION = "L3" + + ; Global attributes + SOURCE = "https://cds.climate.copernicus.eu/cdsapp#!/dataset/" + \ + "satellite-methane?tab=form" + REF = "" + COMMENT = "" + +end + +begin + + ; Loop over variables + do vv = 0, dimsizes(VAR) - 1 + + log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") + + ; Read variables + fname = input_dir_path + "OBS_" + OBSNAME + "_sat_L3_" + NAME(vv) + \ + "_200301-201612.nc" + setfileoption("nc", "MissingToFillValue", False) + f = addfile(fname, "r") + output = f->xch4 + + ; Format coordinates + format_coords(output, YEAR1 + "0101", YEAR2 + "1231", FREQ(vv)) + + ; Set variable attributes + tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) + delete(output) + output = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ(vv)) + + ; Set global attributes + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Output file + DATESTR = YEAR1 + "01-" + YEAR2 + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, VERSION, \ + MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" + + ; Write variable + write_nc(fout, VAR(vv), output, bounds, gAtt) + delete(gAtt) + delete(output) + delete(bounds) + + end do + +end diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_CDS-XCO2.ncl b/esmvaltool/utils/cmorizers/obs/cmorize_obs_CDS-XCO2.ncl new file mode 100644 index 0000000000..49f2e5e1da --- /dev/null +++ b/esmvaltool/utils/cmorizers/obs/cmorize_obs_CDS-XCO2.ncl @@ -0,0 +1,115 @@ +; ############################################################################# +; ESMValTool CMORizer for CDS-XCO2 data +; ############################################################################# +; +; Tier +; Tier 3: restricted dataset. +; +; Source +; https://cds.climate.copernicus.eu/cdsapp#!/dataset/ +; satellite-carbon-dioxide?tab=form +; +; Last access +; 20190319 +; +; Download and processing instructions +; Select Processing level "Level 3", variable "Column-average dry-air mole +; fraction of atmospheric carbon dioxide (XCO2) and related variables", +; Sensor and algorithm "MERGED and OBS4MIPS". +; A registration is required to download the data. +; +; Modification history +; 20190319-A_hass_bg: written. +; +; ############################################################################ +loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/interface.ncl") + +begin + + ; Script name (for logger) + DIAG_SCRIPT = "cmorize_obs_CDS-XCO2.ncl" + + ; Source name + OBSNAME = "CDS-XCO2" + + ; Tier + TIER = 3 + + ; Period + YEAR1 = 2003 + YEAR2 = 2016 + + ; Selected variable (standard name) + VAR = (/"xco2"/) + + ; Name in the raw data + NAME = (/"xco2"/) + + ; MIP + MIP = (/"Amon"/) + + ; Frequency + FREQ = (/"mon"/) + + ; CMOR table + CMOR_TABLE = getenv("esmvaltool_root") + \ + "/cmor/tables/custom/CMOR_xco2.dat" + + ; Type + TYPE = "sat" + + ; Version + VERSION = "L3" + + ; Global attributes + SOURCE = "https://cds.climate.copernicus.eu/cdsapp#!/dataset/" + \ + "satellite-carbon-dioxide?tab=form" + REF = "" + COMMENT = "" + +end + +begin + + ; Loop over variables + do vv = 0, dimsizes(VAR) - 1 + + log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") + + ; Read variables + fname = input_dir_path + "OBS_" + OBSNAME + "_sat_L3_" + NAME(vv) + \ + "_200301-201612.nc" + setfileoption("nc", "MissingToFillValue", False) + f = addfile(fname, "r") + output = f->xco2 + + ; Format coordinates + format_coords(output, YEAR1 + "0101", YEAR2 + "1231", FREQ(vv)) + + ; Set variable attributes + tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) + delete(output) + output = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ(vv)) + + ; Set global attributes + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Output file + DATESTR = YEAR1 + "01-" + YEAR2 + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, VERSION, \ + MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" + + ; Write variable + write_nc(fout, VAR(vv), output, bounds, gAtt) + delete(gAtt) + delete(output) + delete(bounds) + + end do + +end diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_CERES-SYN1deg.ncl b/esmvaltool/utils/cmorizers/obs/cmorize_obs_CERES-SYN1deg.ncl new file mode 100644 index 0000000000..f10e8e1df1 --- /dev/null +++ b/esmvaltool/utils/cmorizers/obs/cmorize_obs_CERES-SYN1deg.ncl @@ -0,0 +1,195 @@ +; ############################################################################# +; ESMValTool CMORizer for CERES-SYN1deg data +; ############################################################################# +; +; Tier +; Tier 3: restricted dataset. +; +; Source +; https://ceres-tool.larc.nasa.gov/ord-tool/jsp/SYN1degSelection.jsp +; +; Last access +; 20190207 +; +; Download and processing instructions +; Monthly data: +; Expand "Compute TOA Fluxes" and select: +; Shortwave Flux, Allsky and Clearsky +; Longwave Flux, Allsky and Clearsky +; Shortwave Flux Down, Allsky +; Expand "Computed Surface Fluxes" and select: +; Shortwave Flux Up, Allsky and Clearsky +; Shortwave Flux Down, Allsky and Clearsky +; Longwave Flux Up, Allsky and Clearsky +; Longwave Flux Down, Allsky and Clearsky +; then click on "Monthly", "Regional" and "Get data". All fields are saved +; in CERES_SYN1deg-Month_Terra-Aqua-MODIS_Ed3A_Subset_200003-201702.nc +; 3hr data: +; Select the same fields as above, then click on "Daily 3-Hourly" and +; "Get data". All fields are saved in +; CERES_SYN1deg-3H_Terra-Aqua-MODIS_Ed3A_Subset_YYYYMMDD-YYYYMMDD.nc +; Put all files in input_dir_path (no subdirectories with years). +; For orders larger than 2 GB a registration is required. +; +; Modification history +; 20190124-A_righ_ma: written based on v1 different bash scripts by laue_ax +; and eval_ma. +; +; ############################################################################# +loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/interface.ncl") + +begin + + ; Script name (for logger) + DIAG_SCRIPT = "cmorize_obs_CERES-SYN1deg.ncl" + + ; Source name + OBSNAME = "CERES-SYN1deg" + + ; Tier + TIER = 3 + + ; Period + YEAR1 = 2001 + YEAR2 = 2016 + + ; CMOR name + VAR = (/"rsus", "rsds", \ + "rlus", "rlds", \ + "rldscs", "rsut", \ + "rlut", "rsutcs", \ + "rlutcs", "rsdt", \ + "rsutcs", "rsut", \ + "rlutcs", "rlut", \ + "rsuscs", "rsus", \ + "rsdscs", "rsds", \ + "rluscs", "rlus", \ + "rldscs", "rlds"/) + + ; Name in the raw data + NAME = (/"sfc_comp_sw_up_all_mon", "sfc_comp_sw_down_all_mon", \ + "sfc_comp_lw_up_all_mon", "sfc_comp_lw_down_all_mon", \ + "sfc_comp_lw_down_clr_mon", "toa_comp_sw_up_all_mon", \ + "toa_comp_lw_up_all_mon", "toa_comp_sw_up_clr_mon", \ + "toa_comp_lw_up_clr_mon", "toa_comp_sw_down_all_mon", \ + "toa_comp_sw_up_clr_3h", "toa_comp_sw_up_all_3h", \ + "toa_comp_lw_up_clr_3h", "toa_comp_lw_up_all_3h", \ + "sfc_comp_sw_up_clr_3h", "sfc_comp_sw_up_all_3h", \ + "sfc_comp_sw_down_clr_3h", "sfc_comp_sw_down_all_3h", \ + "sfc_comp_lw_up_clr_3h", "sfc_comp_lw_up_all_3h", \ + "sfc_comp_lw_down_clr_3h", "sfc_comp_lw_down_all_3h"/) + + ; MIP + MIP = (/"Amon", "Amon", \ + "Amon", "Amon", \ + "Amon", "Amon", \ + "Amon", "Amon", \ + "Amon", "Amon", \ + "3hr", "3hr", \ + "3hr", "3hr", \ + "3hr", "3hr", \ + "3hr", "3hr", \ + "3hr", "3hr", \ + "3hr", "3hr"/) + + ; Frequency + FREQ = (/"mon", "mon", \ + "mon", "mon", \ + "mon", "mon", \ + "mon", "mon", \ + "mon", "mon", \ + "3hr", "3hr", \ + "3hr", "3hr", \ + "3hr", "3hr", \ + "3hr", "3hr", \ + "3hr", "3hr", \ + "3hr", "3hr"/) + + ; CMOR table: Amon version is used also for 3hr, since not all variables are + ; available in 3hr (the tables are identical anyway) + CMOR_TABLE = new(dimsizes(MIP), string) + CMOR_TABLE = getenv("esmvaltool_root") + \ + "/cmor/tables/cmip5/Tables/CMIP5_Amon" + CMOR_TABLE(ind(VAR.eq."rluscs")) = getenv("esmvaltool_root") + \ + "/cmor/tables/custom/CMOR_rluscs.dat" + + ; Type + TYPE = "sat" + + ; Version + VERSION = "Ed3A" + + ; Global attributes + SOURCE = "https://ceres-tool.larc.nasa.gov/ord-tool/jsp/SYN1degSelection.jsp" + REF = "Wielicki et al., Bull. Amer. Meteor. Soc., " + \ + "doi: 10.1175/1520-0477(1996)077<0853:CATERE>2.0.CO;2, 1996" + COMMENT = "" + +end + +begin + + do vv = 0, dimsizes(VAR) - 1 + + log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") + + do yy = YEAR1, YEAR2 + + ; Read file + if (MIP(vv).eq."Amon") then + f = addfile(input_dir_path + "CERES_SYN1deg-Month_Terra-Aqua-" + \ + "MODIS_" + VERSION + "_Subset_200003-201702.nc", "r") + xx = f->$NAME(vv)$ + delete(f) + end if + if (MIP(vv).eq."3hr") then + files = systemfunc("ls " + input_dir_path + "CERES_SYN1deg-3H_" + \ + "Terra-Aqua-MODIS_" + VERSION + "_Subset_*" + \ + yy + "*.nc") + f = addfiles(files, "r") + xx = f[:]->$NAME(vv)$ + delete(f) + delete(files) + end if + + ; Time selection + date = cd_calendar(xx&time, 0) + output = xx(ind(date(:, 0).eq.yy), :, :) + delete(date) + delete(xx) + + ; Format coordinates + output!0 = "time" + output!1 = "lat" + output!2 = "lon" + format_coords(output, yy + "0101", yy + "1231", FREQ(vv)) + + ; Set variable attributes + tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) + delete(output) + output = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ(vv)) + + ; Set global attributes + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Output file + DATESTR = yy + "01-" + yy + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, VERSION, \ + MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" + + ; Write variable + write_nc(fout, VAR(vv), output, bounds, gAtt) + delete(gAtt) + delete(output) + delete(bounds) + + end do + + end do + +end diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_CRU.py b/esmvaltool/utils/cmorizers/obs/cmorize_obs_CRU.py new file mode 100644 index 0000000000..ceac7e0a36 --- /dev/null +++ b/esmvaltool/utils/cmorizers/obs/cmorize_obs_CRU.py @@ -0,0 +1,91 @@ +"""ESMValTool CMORizer for CRU data. + +Tier + Tier 2: other freely-available dataset. + +Source + https://crudata.uea.ac.uk/cru/data/hrg/cru_ts_4.02/cruts.1811131722.v4.02/ + +Last access + 20190516 + +Download and processing instructions + Download the following files: + {raw_name}/cru_ts4.02.1901.2017.{raw_name}.dat.nc.gz + where {raw_name} is the name of the desired variable(s). + +""" + +import gzip +import logging +import os +import shutil + +import iris + +import esmvaltool.utils.cmorizers.obs.utilities as utils + +logger = logging.getLogger(__name__) + +CFG = utils.read_cmor_config('CRU.yml') + + +def _clean(filepath): + """Remove unzipped input file.""" + if os.path.isfile(filepath): + os.remove(filepath) + logger.info("Removed cached file %s", filepath) + + +def _extract_variable(raw_var, cmor_info, attrs, filepath, out_dir): + """Extract variable.""" + var = cmor_info.short_name + cube = iris.load_cube(filepath, utils.var_name_constraint(raw_var)) + utils.fix_var_metadata(cube, cmor_info) + utils.convert_timeunits(cube, 1950) + utils.fix_coords(cube) + utils.set_global_atts(cube, attrs) + if var in ('tas', ): + utils.add_height2m(cube) + utils.save_variable(cube, + var, + out_dir, + attrs, + unlimited_dimensions=['time']) + + +def _unzip(filepath, out_dir): + """Unzip `*.gz` file.""" + filename = os.path.basename(filepath.replace('.gz', '')) + new_path = os.path.join(out_dir, filename) + with gzip.open(filepath, 'rb') as zip_file: + with open(new_path, 'wb') as new_file: + shutil.copyfileobj(zip_file, new_file) + logger.info("Succefully extracted file to %s", new_path) + return new_path + + +def cmorization(in_dir, out_dir): + """Cmorization func call.""" + glob_attrs = CFG['attributes'] + cmor_table = CFG['cmor_table'] + logger.info("Starting cmorization for Tier%s OBS files: %s", + glob_attrs['tier'], glob_attrs['dataset_id']) + logger.info("Input data from: %s", in_dir) + logger.info("Output will be written to: %s", out_dir) + raw_filepath = os.path.join(in_dir, CFG['filename']) + + # Run the cmorization + for (var, var_info) in CFG['variables'].items(): + logger.info("CMORizing variable '%s'", var) + glob_attrs['mip'] = var_info['mip'] + cmor_info = cmor_table.get_variable(var_info['mip'], var) + raw_var = var_info.get('raw', var) + zip_file = os.path.join(in_dir, raw_filepath.format(raw_name=raw_var)) + if not os.path.isfile(zip_file): + logger.debug("Skipping '%s', file '%s' not found", var, zip_file) + continue + logger.info("Found input file '%s'", zip_file) + filepath = _unzip(zip_file, out_dir) + _extract_variable(raw_var, cmor_info, glob_attrs, filepath, out_dir) + _clean(filepath) diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_ERA-Interim.ncl b/esmvaltool/utils/cmorizers/obs/cmorize_obs_ERA-Interim.ncl new file mode 100644 index 0000000000..446e70c8a2 --- /dev/null +++ b/esmvaltool/utils/cmorizers/obs/cmorize_obs_ERA-Interim.ncl @@ -0,0 +1,373 @@ +; ############################################################################# +; ESMValTool CMORizer for ERA-Interim data +; ############################################################################# +; +; Tier +; Tier 3: restricted dataset. +; +; Source +; http://apps.ecmwf.int/datasets/data/interim-full-moda/ +; +; Last access +; 20190205 +; +; Download and processing instructions +; Select "Era Interim Fields": +; Daily: for daily values +; Invariant: for time invariant variables (like land-sea mask) +; Monthly Means of Daily Means: for monthly values +; Monthly Means of Daily Forecast Accumulation: for accumulated variables +; like precipitation or radiation fluxes +; Select "Type of level" (Surface or Pressure levels) +; Download the data on a single variable and single year basis, and save +; them as ERA-Interim___YYYY.nc, where is the ERA-Interim +; variable name and is either monthly or daily. Further download +; "land-sea mask" from the "Invariant" data and save it in +; ERA-Interim_lsm.nc. +; It is also possible to download data in an automated way, see: +; https://confluence.ecmwf.int/display/WEBAPI/Access+ECMWF+Public+Datasets +; https://confluence.ecmwf.int/display/WEBAPI/Python+ERA-interim+examples +; A registration is required for downloading the data. +; +; Caveats +; Make sure to select the right steps for accumulated fluxes, see: +; https://confluence.ecmwf.int/pages/viewpage.action?pageId=56658233 +; https://confluence.ecmwf.int/display/CKB/ERA-Interim%3A+monthly+means +; for a detailed explanation. +; The data are updated regularly: recent years are added, but also the past +; years are sometimes corrected. To have a consistent timeseries, it is +; therefore recommended to download the full timeseries and not just add +; new years to a previous version of the data. +; +; Modification history +; 20190311-A_righ_ma: added surface fluxes. +; 20190204-A_righ_ma: adapted to v2. +; 20171023-A_laue_ax: added variables prw, ts +; 20160913-A_laue_ax: added variable tcc +; 20150820-A_laue_ax: added variables lwp, iwp, sftlf +; 20150327-A_righ_ma: merged with ERA-Interim-surf.ncl and +; ERA-Interim-surf_daily.ncl. +; 20140128-A_righ_ma: written. +; +; ############################################################################# + +begin + + ; Script name (for logger) + DIAG_SCRIPT = "cmorize_obs_ERA-Interim.ncl" + + ; Source name + OBSNAME = "ERA-Interim" + + ; Tier + TIER = 3 + + ; Period + YEAR1 = 1979 + YEAR2 = 2018 + + ; Acceleration of gravity [m s-2] + G = 9.80665 + + ; Variable settings as list [VAR, NAME, CONVERSION, MIP, FREQ] + ; VAR: selected variable (standard name) + ; NAME: name in the raw data + ; RAWUNITS: expected units in the input data + ; CONVERSION: corresponding conversion factor to CMOR units + ; MIP: mip + ; FREQ: frequency + VLIST = \ + [/[/"sftlf", "lsm", 100., "fx", "fx"/], \ + [/"pr", "tp", 1.e3, "day", "day"/], \ + [/"psl", "msl", 1., "day", "day"/], \ + [/"tas", "t2m", 1., "day", "day"/], \ + [/"tasmax", "mx2t", 1., "day", "day"/], \ + [/"tasmin", "mn2t", 1., "day", "day"/], \ + [/"zg", "z", 1. / G, "day", "day"/], \ + [/"clivi", "p57.162", 1., "Amon", "mon"/], \ + [/"clt", "tcc", 100., "Amon", "mon"/], \ + [/"clwvi", (/"p56.162", "p57.162"/), 1., "Amon", "mon"/], \ + [/"hfds", (/"ssr", "str", "slhf", "sshf"/), 1., "Omon", "mon"/], \ + [/"hur", "r", 1., "Amon", "mon"/], \ + [/"hus", "q", 1., "Amon", "mon"/], \ + [/"pr", "tp", 1.e3, "Amon", "mon"/], \ + [/"prw", "tcwv", 1., "Amon", "mon"/], \ + [/"ps", "sp", 1., "Amon", "mon"/], \ + [/"psl", "msl", 1., "Amon", "mon"/], \ + [/"ta", "t", 1., "Amon", "mon"/], \ + [/"tas", "t2m", 1., "Amon", "mon"/], \ + [/"tauu", "iews", 1., "Amon", "mon"/], \ + [/"tauv", "inss", 1., "Amon", "mon"/], \ + [/"tos", "sst", 1., "Omon", "mon"/], \ + [/"ts", "skt", 1., "Amon", "mon"/], \ + [/"ua", "u", 1., "Amon", "mon"/], \ + [/"va", "v", 1., "Amon", "mon"/], \ + [/"wap", "w", 1., "Amon", "mon"/], \ + [/"zg", "z", 1. / G, "Amon", "mon"/], \ + [""]/] + + ; Type + TYPE = "reanaly" + + ; Version + VERSION = "1" + + ; Global attributes + SOURCE = "http://apps.ecmwf.int/datasets/data/interim_full_moda/" + REF = "Dee, D. P. et al., Q. J. Roy. Meteor. Soc., doi:10.1002/qj.828, 2011" + COMMENT = "" + +end +; ############################################################################# +loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/interface.ncl") + +begin + + ; Loop over variables + do vv = 0, ListCount(VLIST) - 2 + + ; Setting for the current variable + clist = VLIST[vv] + VAR = clist[0] + NAME = clist[1] + CONVERSION = clist[2] + MIP = clist[3] + FREQ = clist[4] + CMOR_TABLE = \ + getenv("esmvaltool_root") + "/cmor/tables/cmip5/Tables/CMIP5_" + MIP + delete(clist) + + log_info("Processing " + VAR + " (" + MIP + ")") + + l_day = False + l_fx = False + if (isStrSubset(MIP, "mon")) then + freqkey = "monthly" + end if + if (isStrSubset(MIP, "day")) then + freqkey = "daily" + l_day = True + end if + if (isStrSubset(MIP, "fx")) then + l_fx = True + end if + + ; Create timeseries + do yy = YEAR1, YEAR2 + + ; fx variables are time invariant + if (l_fx) then + fname = input_dir_path + "ERA-Interim_" + NAME(0) + ".nc" + f = addfile(fname, "r") + else + fname = input_dir_path + \ + "ERA-Interim_" + NAME(0) + "_" + freqkey + "_" + yy + ".nc" + f = addfile(fname, "r") + end if + + ; Rank of the input data + rank = dimsizes(getfilevardims(f, NAME(0))) + + ; For daily data, split in monthly files, otherwise yearly + if (l_day) then + mend = 12 + else + mend = 1 ; dummy loop, the whole year will be processed + end if + + do mm = 1, mend + + ; Read variable + if (l_day) then + + ; Check time-index + time = f->time + if (any(VAR.eq.(/"tasmin", "tasmax", "pr"/))) then + time = time - 1 ; Shift back 1h to include 12:00 + step=12h + end if + date = cd_calendar(time, 0) + idxt = ind(date(:, 1).eq.mm) + + ; Extract current month + if (rank.eq.4) then + xx = f->$NAME(0)$(idxt, :, :, :) + elseif (rank.eq.3) then + xx = f->$NAME(0)$(idxt, :, :) + else + error_msg("f", DIAG_SCRIPT, "", "invalid rank") + end if + start_date = yy + sprinti("%0.2i", mm) + "01" + end_date = yy + sprinti("%0.2i", mm) + \ + sprinti("%0.2i", days_in_month(yy, mm)) + + ; Shift back 1h to include 12:00 + step=12h + if (any(VAR.eq.(/"tasmin", "tasmax", "pr"/))) then + xx&time = xx&time - 1 + end if + delete(date) + delete(time) + delete(idxt) + + else + + xx = f->$NAME(0)$ + start_date = yy + "0101" + end_date = yy + "1231" + + end if + + ; Unpack variable according to metadata information + output = short2flt(xx) + delete(xx) + + ; Multiple input case + if (dimsizes(NAME).gt.1) then + + if (l_day) then + error_msg("f", DIAG_SCRIPT, "", "multiple input for daily " + \ + "values not implemented") + end if + + do iv = 1, dimsizes(NAME) - 1 + fname2 = input_dir_path + \ + "ERA-Interim_" + NAME(iv) + "_" + freqkey + "_" + yy + ".nc" + f2 = addfile(fname2, "r") + xx2 = f2->$NAME(iv)$ + output = output + short2flt(xx2) + delete(xx2) + delete(f2) + delete(fname2) + end do + + end if + + ; Calculate daily mean + if (l_day) then + + if (VAR.eq."tasmin") then + dmean = calculate_daily_values(output, "min", 0, False) + elseif (VAR.eq."tasmax") then + dmean = calculate_daily_values(output, "max", 0, False) + elseif (VAR.eq."pr") then + dmean = calculate_daily_values(output, "sum", 0, False) + else + dmean = calculate_daily_values(output, "avg", 0, False) + end if + delete(output) + output = dmean + delete(dmean) + + end if + + ; Time-invariant variables + if (l_fx) then + output := output(0, :, :) + rank = 2 + end if + + ; Convert units + output = output * CONVERSION + + ; Special case: accumulated fluxes + if (any(VAR.eq.(/"pr", "hfds"/))) then ; [X] --> [X s-1] + if (l_day) then + output = output / 24. / 3600. + else + locy = new(12, integer) + locy = yy + dm = conform(output, days_in_month(locy, ispan(1, 12, 1)), 0) + output = output / 24. / 3600. / dm + delete(locy) + delete(dm) + end if + end if + + ; Format coordinates + if (rank.eq.4) then + output!0 = "time" + output!1 = "plev" + output!2 = "lat" + output!3 = "lon" + output&plev = output&plev * 100 ; [mb] --> [Pa] + elseif (rank.eq.3) then + output!0 = "time" + output!1 = "lat" + output!2 = "lon" + elseif (rank.eq.2) then + output!0 = "lat" + output!1 = "lon" + else + error_msg("f", DIAG_SCRIPT, "", "invalid rank") + end if + format_coords(output, start_date, end_date, FREQ) + + ; Set variable attributes + tmp = format_variable(output, VAR, CMOR_TABLE) + delete(output) + output = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ) + + ; Set global attributes + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Output file + if (l_fx) then + DATESTR = "" + elseif (l_day) then + DATESTR = \ + yy + sprinti("%0.2i", mm) + "-" + yy + sprinti("%0.2i", mm) + else + DATESTR = yy + "01-" + yy + "12" + end if + if (DATESTR.eq."") then + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, VERSION, MIP, VAR/), "_") + ".nc" + + else + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, VERSION, \ + MIP, VAR, DATESTR/), "_") + ".nc" + end if + + ; Add height coordinate to tas variable (required by the new backend) + if (any(VAR.eq.(/"tas", "tasmin", "tasmax"/))) then + output@coordinates = "height" + end if + + ; Write variable + write_nc(fout, VAR, output, bounds, gAtt) + delete(gAtt) + delete(output) + delete(bounds) + + ; Add height coordinate to tas variable (required by the new backend) + if (any(VAR.eq.(/"tas", "tasmin", "tasmax"/))) then + height = 2.d + height!0 = "ncl_scalar" + height@units = "m" + height@axis = "Z" + height@positive = "up" + height@long_name = "height" + height@standard_name = "height" + w = addfile(fout, "w") + w->height = height + delete(w) + end if + + end do ; month (for daily data, otherwise dummy loop) + + ; No year loop need for fx (time invariant) + if (l_fx) then + break + end if + + end do ; year + + delete(NAME) + + end do ; variable + +end diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-AEROSOL.ncl b/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-AEROSOL.ncl new file mode 100644 index 0000000000..3ddbe27123 --- /dev/null +++ b/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-AEROSOL.ncl @@ -0,0 +1,168 @@ +; ############################################################################# +; ESMValTool CMORizer for ESACCI-AEROSOL data +; ############################################################################# +; +; Tier +; Tier 2: other freely-available dataset. +; +; Source +; ftp://anon-ftp.ceda.ac.uk/neodc/esacci/aerosol/data/ +; +; Last access +; 20190124 +; +; Download and processing instructions +; Download the data from: +; ATSR2_SU/L3/v4.21/MONTHLY/ (1997-2002) +; AATSR_SU/L3/v4.21/MONTHLY/ (2003-2011) +; Other years are not considered since they are not complete. +; Put all files in input_dir_path (no subdirectories with years). +; +; Modification history +; 20190124-A_righ_ma: adapted to v2. +; 20160718-A_laue_ax: added AOD550 + AOD870 uncertainties. +; 20160525-A_righ_ma: updated to v4.21 and adding more variables. +; 20150126-A_righ_ma: adding AOD at other wavelengths. +; 20151124-A_righ_ma: switched to monthly raw data (now available). +; 20150424-A_righ_ma: written. +; +; ############################################################################# +loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/interface.ncl") + +begin + + ; Script name (for logger) + DIAG_SCRIPT = "cmorize_obs_ESACCI-AEROSOL.ncl" + + ; Source name + OBSNAME = "ESACCI-AEROSOL" + + ; Tier + TIER = 2 + + ; Period + YEAR1 = 1997 + YEAR2 = 2011 + + ; Selected variable (standard name) + VAR = (/"od550aer", "od870aer", "od550lt1aer", "abs550aer", \ + "od550aerStderr", "od870aerStderr"/) + + ; Name in the raw data + NAME = (/"AOD550_mean", "AOD870_mean", "FM_AOD550_mean", "AAOD550_mean", \ + "AOD550_uncertainty", "AOD870_uncertainty"/) + + ; MIP + MIP = (/"aero", "aero", "aero", "aero", \ + "aero", "aero"/) + + ; Frequency + FREQ = (/"mon", "mon", "mon", "mon", \ + "mon", "mon"/) + + ; CMOR table + CMOR_TABLE = getenv("esmvaltool_root") + "/cmor/tables/" + \ + (/"cmip5/Tables/CMIP5_aero", \ + "cmip5/Tables/CMIP5_aero", \ + "cmip5/Tables/CMIP5_aero", \ + "cmip5/Tables/CMIP5_aero", \ + "custom/CMOR_od550aerStderr.dat", \ + "custom/CMOR_od870aerStderr.dat"/) + + ; Type + TYPE = "sat" + + ; Version + VERSION = "SU-v4.21" + + ; Global attributes + SOURCE = "ftp://anon-ftp.ceda.ac.uk/neodc/esacci/aerosol/data/" + REF = "Popp et al., ESA Aerosol Climate Change Initiative " + \ + "(ESA Aerosol_cci) data: AOD v4.21 via Centre for Environmental " + \ + "Data Analysis, 2016" + COMMENT = "Combined dataset ERS2-ATSR2 (1997-2002) and ENVISAT-AATSR " + \ + "(2003-2011), based on the University of Swansea algorithm " + \ + "(monthly mean L3 data)" + +end + +begin + + do vv = 0, dimsizes(VAR) - 1 + + log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") + + time = create_timec(YEAR1, YEAR2) + date = cd_calendar(time, 1) + + ; Create timeseries + do yy = YEAR1, YEAR2 + do mm = 1, 12 + + ldate = yy + sprinti("%0.2i", mm) + + ; Read file + fname = systemfunc("ls " + input_dir_path + ldate + "*.nc") + + ; No files found + if (all(ismissing(fname))) then + continue + end if + + ; Extract data + f = addfile(fname, "r") + xx = f->$NAME(vv)$ + + ; Assign to global array + if (.not.isdefined("output")) then + dims = array_append_record(dimsizes(time), dimsizes(xx), 0) + output = new(dims, float) + output!0 = "time" + output&time = time + output!1 = "lat" + output&lat = f->latitude + output!2 = "lon" + output&lon = f->longitude + end if + output(ind(toint(ldate).eq.date), :, :) = (/xx/) + delete(fname) + + end do + end do + + ; Set fill value + output = where(output.eq.-999, output@_FillValue, output) + + ; Format coordinates + output!0 = "time" + output!1 = "lat" + output!2 = "lon" + format_coords(output, YEAR1 + "0101", YEAR2 + "1231", FREQ(vv)) + + ; Set variable attributes + tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) + delete(output) + output = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ(vv)) + + ; Set global attributes + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Output file + DATESTR = YEAR1 + "01-" + YEAR2 + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, VERSION, \ + MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" + + ; Write variable + write_nc(fout, VAR(vv), output, bounds, gAtt) + delete(gAtt) + delete(output) + delete(bounds) + + end do + +end diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-CLOUD.ncl b/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-CLOUD.ncl new file mode 100644 index 0000000000..b65f05dde5 --- /dev/null +++ b/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-CLOUD.ncl @@ -0,0 +1,196 @@ +; ############################################################################# +; ESMValTool CMORizer for ESACCI-CLOUD data +; ############################################################################# +; +; Tier +; Tier 2: other freely-available dataset. +; +; Source +; https://public.satproj.klima.dwd.de/data/ESA_Cloud_CCI/CLD_PRODUCTS/v3.0/ +; +; Last access +; 20190201 +; +; Download and processing instructions +; Download the data from: +; L3C/AVHRR-PM/ +; To fill the gap 199409-199501, also download: +; L3C/AVHRR-AM/AVHRR_NOAA-12/1994/ \ +; 199409-ESACCI-L3C_CLOUD-CLD_PRODUCTS-AVHRR_NOAA-12-fv3.0.nc +; L3C/AVHRR-AM/AVHRR_NOAA-12/1994/ \ +; 199410-ESACCI-L3C_CLOUD-CLD_PRODUCTS-AVHRR_NOAA-12-fv3.0.nc +; L3C/AVHRR-AM/AVHRR_NOAA-12/1994/ \ +; 199411-ESACCI-L3C_CLOUD-CLD_PRODUCTS-AVHRR_NOAA-12-fv3.0.nc +; L3C/AVHRR-AM/AVHRR_NOAA-12/1994/ \ +; 199412-ESACCI-L3C_CLOUD-CLD_PRODUCTS-AVHRR_NOAA-12-fv3.0.nc +; L3C/AVHRR-AM/AVHRR_NOAA-12/1995/ \ +; 199501-ESACCI-L3C_CLOUD-CLD_PRODUCTS-AVHRR_NOAA-12-fv3.0.nc +; Put all files under a single directory (no subdirectories with years). +; +; Modification history +; 20190201-A_righ_ma: adapted to v2. +; 20181116-A_laue_ax: changed to use CLOUD-CCI v3.0 data (AVHRR-PM), gaps +; (1994/09 - 1995/01) are filled with AVHRR-AM data +; 20180522-A_righ_ma: changed to use AVHRR-PM data. +; 20160519-A_laue_ax: written (reformat_obs_ESACCI-AEROSOL.ncl). +; +; ############################################################################# +loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/interface.ncl") + +begin + + ; Script name (for logger) + DIAG_SCRIPT = "cmorize_obs_ESACCI-CLOUD.ncl" + + ; Source name + OBSNAME = "ESACCI-CLOUD" + + ; Tier + TIER = 2 + + ; Period + YEAR1 = 1982 + YEAR2 = 2016 + + ; Selected variable (standard name) + VAR = (/"clt", "cltStderr", "clivi", "clwvi"/) + + ; Name in the raw data + NAME = (/"cfc", "cfc_unc", "iwp_allsky", "lwp_allsky"/) + + ; Conversion factor + CONV = (/100., 1., 0.001, 0.001/) + + ; MIP + MIP = (/"Amon", "Amon", "Amon", "Amon"/) + + ; Frequency + FREQ = (/"mon", "mon", "mon", "mon"/) + + ; CMOR table + CMOR_TABLE = getenv("esmvaltool_root") + "/cmor/tables/" + \ + (/"cmip5/Tables/CMIP5_Amon", \ + "custom/CMOR_cltStderr.dat", \ + "cmip5/Tables/CMIP5_Amon", \ + "cmip5/Tables/CMIP5_Amon"/) + + ; Type + TYPE = "sat" + + ; Version + VERSION = "AVHRR-fv3.0" + + ; Global attributes + SOURCE = "https://public.satproj.klima.dwd.de/data/ESA_Cloud_CCI/" + \ + "CLD_PRODUCTS/v3.0/" + REF = \ + "Stengel et al., Earth Syst. Sci. Data, doi:10.5194/essd-9-881-2017, 2017" + COMMENT = "" + +end + +begin + + do vv = 0, dimsizes(VAR) - 1 + + log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") + + time = create_timec(YEAR1, YEAR2) + date = cd_calendar(time, 1) + + ; Create timeseries + do yy = YEAR1, YEAR2 + + syear = sprinti("%i", yy) + do mm = 1, 12 + + smonth = sprinti("%0.2i", mm) + + ; Read file + fname = systemfunc("ls " + input_dir_path + syear + smonth + \ + "-ESACCI-L3C_CLOUD-CLD_PRODUCTS-AVHRR_NOAA-*-" + \ + str_sub_str(VERSION, "AVHRR-", "") + ".nc") + + ; No files found + if (all(ismissing(fname))) then + continue + end if + + ; Extract data + f = addfile(fname, "r") + xx = f->$NAME(vv)$ + + ; Convert units + xx = xx * CONV(vv) + + ; lwp is not a CMOR variable, derive as clwvi = lwp + iwp + if (VAR(vv).eq."clwvi") then + + ; Read 2nd variable containing iwp (variable "iwp_allsky") + iwpname = "iwp_allsky" + xx2 = f->$iwpname$ + + ; Convert units + xx2 = xx2 * 0.001 + + ; add iwp to lwp to calculate cmor variable "clwvi" + xx = xx + xx2 + delete(xx2) + end if + + ; Assign to global array + if (.not.isdefined("output")) then + dims = dimsizes(xx) + dims(0) = dimsizes(time) + output = new(dims, float) + output!0 = "time" + output&time = time + output!1 = "lat" + output&lat = f->lat + output!2 = "lon" + output&lon = f->lon + end if + output(ind(toint(yy * 100 + mm).eq.date), :, :) = (/xx/) + + delete(fname) + delete(f) + + end do + end do + + ; Set fill value + output = where(output.eq.-999, output@_FillValue, output) + + ; Format coordinates + output!0 = "time" + output!1 = "lat" + output!2 = "lon" + format_coords(output, YEAR1 + "0101", YEAR2 + "1231", FREQ(vv)) + + ; Set variable attributes + tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) + delete(output) + output = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ(vv)) + + ; Set global attributes + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Output file + DATESTR = YEAR1 + "01-" + YEAR2 + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, VERSION, \ + MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" + + ; Write variable + write_nc(fout, VAR(vv), output, bounds, gAtt) + delete(gAtt) + delete(output) + delete(bounds) + + end do + +end diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-FIRE.ncl b/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-FIRE.ncl new file mode 100644 index 0000000000..4f3155b00e --- /dev/null +++ b/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-FIRE.ncl @@ -0,0 +1,162 @@ +; ############################################################################# +; ESMValTool CMORizer for ESACCI-FIRE data +; ############################################################################# +; +; Tier +; Tier 2: other freely-available dataset. +; +; Source +; ftp://anon-ftp.ceda.ac.uk/neodc/esacci/fire/data/ +; +; Last access +; 20190124 +; +; Download and processing instructions +; Download the data from: +; burned_area/MERIS/grid/v4.1/ +; Put all files in input_dir_path (no subdirectories with years). +; +; Modification history +; 20190124-A_righ_ma: written based on a python script by muel_bn. +; +; ############################################################################# +loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/interface.ncl") + +begin + + ; Script name (for logger) + DIAG_SCRIPT = "cmorize_obs_ESACCI-FIRE.ncl" + + ; Source name + OBSNAME = "ESACCI-FIRE" + + ; Tier + TIER = 2 + + ; Period + YEAR1 = 2005 + YEAR2 = 2011 + + ; Selected variable (standard name) + VAR = "burntArea" + + ; Name in the raw data + NAME = "burned_area" + + ; MIP + MIP = "Lmon" + + ; Frequency + FREQ = "mon" + + ; CMOR table + CMOR_TABLE = getenv("esmvaltool_root") + \ + "/cmor/tables/cmip5/Tables/CMIP5_Lmon" + + ; Type + TYPE = "sat" + + ; Version + VERSION = "L4-BA-MERIS-fv4.1" + + ; Global attributes + SOURCE = "ftp://anon-ftp.ceda.ac.uk/neodc/esacci/fire/data/" + REF = "" + COMMENT = "" + +end + +begin + + time = create_timec(YEAR1, YEAR2) + date = cd_calendar(time, 1) + + do yy = YEAR1, YEAR2 + do mm = 1, 12 + + ldate = yy + sprinti("%0.2i", mm) + + files = systemfunc("ls " + input_dir_path + ldate + \ + "??-ESACCI-L4_FIRE-BA-MERIS-fv4.1.nc") + f = addfiles(files, "r") + + xx = f[:]->$NAME$ + + ; Calculate area + if (.not.isdefined("area")) then + deg2rad = acos(-1.0) / 180. + lat = f[0]->lat + lon = f[0]->lon + nlat = dimsizes(lat) + deltax = abs(lon(1) - lon(0)) + lati = new(dimsizes(lat) + 1, float) + lati(0) = max((/(3 * lat(0) - lat(1)) / 2., -90./)) + do ii = 1, dimsizes(lati) - 2 + lati(ii) = 0.5 * (lat(ii - 1) + lat(ii)) + end do + lati(dimsizes(lati) - 1) = \ + min((/(3 * lat(nlat - 1) - lat(nlat - 2)) / 2., 90./)) + area = new((/dimsizes(lat), dimsizes(lon)/), float) + do ii = 0, dimsizes(lat) - 1 + deltay = sin(lati(ii + 1) * deg2rad) - sin(lati(ii) * deg2rad) + area(ii, :) = abs(6371000. ^ 2 * deltay * deltax * deg2rad) + end do + delete([/lat, lon, nlat, deltax, lati, deltay/]) + end if + + ; Calculate fraction + xx = xx / (/conform(xx, area, (/1, 2/))/) + + ; Assign to global array + if (.not.isdefined("output")) then + dims = array_append_record(dimsizes(time), dimsizes(xx(0, :, :)), 0) + output = new(dims, float) + output!0 = "time" + output&time = time + output!1 = "lat" + output&lat = f[0]->lat + output!2 = "lon" + output&lon = f[0]->lon + end if + output(ind(toint(ldate).eq.date), :, :) = dim_sum_n(xx, 0) + + delete(files) + delete(f) + + end do + end do + + ; Convert units [1] --> [%] + output = output * 100. + + ; Format coordinates + output!0 = "time" + output!1 = "lat" + output!2 = "lon" + format_coords(output, YEAR1 + "0101", YEAR2 + "1231", FREQ) + + ; Set variable attributes + tmp = format_variable(output, VAR, CMOR_TABLE) + delete(output) + output = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ) + + ; Set global attributes + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Output file + DATESTR = YEAR1 + "01-" + YEAR2 + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, VERSION, \ + MIP, VAR, DATESTR/), "_") + ".nc" + + ; Write variable + write_nc(fout, VAR, output, bounds, gAtt) + delete(gAtt) + delete(output) + delete(bounds) + +end diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-LANDCOVER.ncl b/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-LANDCOVER.ncl new file mode 100644 index 0000000000..62a051ed72 --- /dev/null +++ b/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-LANDCOVER.ncl @@ -0,0 +1,215 @@ +; ############################################################################# +; ESMValTool CMORizer for ESACCI-LANDCOVER data +; ############################################################################# +; +; Tier +; Tier 2: other freely-available dataset. +; +; Source +; ftp://anon-ftp.ceda.ac.uk/neodc/esacci/land_cover/data/land_cover_maps/ +; +; Last access +; 20190110 +; +; Download and processing instructions +; Download the 3 NetCDF files for 2000, 2005 and 2010. +; Download the CCI-LC Tools from: +; http://maps.elie.ucl.ac.be/CCI/viewer/download/lc-user-tools-3.14.zip +; Unpack and run the CCI-LC Tools on each of the NetCDF files as follows: +; bash lc-user-tools-3.14/bin/aggregate-map.sh \ +; -PgridName=GEOGRAPHIC_LAT_LON -PnumMajorityClasses=1 \ +; -PoutputAccuracy=false -PoutputPFTClasses=true \ +; -PoutputLCCSClasses=false -PnumRows=360 +; Put the resulting processed data in input_dir_path. +; +; Caveat +; The CCI-LC Tools must be applied before running this script. +; The CCI-LC Tools require Java Version 7 or higher. +; The input data are available for a single year and are copied over to +; generate a time series over their time range of validity. +; +; Modification history +; 20190110-A_righ_ma: rewritten in NCL for v2. +; 20160714-A_muel_bn: written. +; +; ############################################################################# +loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/interface.ncl") + +begin + + ; Script name (for logger) + DIAG_SCRIPT = "cmorize_obs_ESACCI-LANDCOVER.ncl" + + ; Source name + OBSNAME = "ESACCI-LANDCOVER" + + ; Tier + TIER = 2 + + ; Years + YEARS = (/2000, 2005, 2010/) + + ; Variable names + VAR = \ + (/"baresoilFrac", "cropFrac", "grassFrac", "shrubFrac", "treeFrac"/) + + ; Corresponding aggregation classes in the raw data + CLASSES = [/"Bare_Soil", \ + "Managed_Grass", \ + "Natural_Grass", \ + (/"Shrub_Broadleaf_Deciduous", \ + "Shrub_Broadleaf_Evergreen", \ + "Shrub_Needleleaf_Evergreen"/), \ + (/"Tree_Broadleaf_Deciduous", \ + "Tree_Broadleaf_Evergreen", \ + "Tree_Needleleaf_Deciduous", \ + "Tree_Needleleaf_Evergreen"/)/] + + ; MIPs + MIP = (/"Lmon", "Lmon", "Lmon", "Lmon", "Lmon"/) + + ; Frequency + FREQ = (/"mon", "mon", "mon", "mon", "mon"/) + + ; CMOR table + CMOR_TABLE = \ + getenv("esmvaltool_root") + "/cmor/tables/cmip5/Tables/CMIP5_Lmon" + + ; Type + TYPE = "sat" + + ; Version + VERSION = "L4-LCCS-Map-300m-P5Y-aggregated-0.500000Deg" + + ; Global attributes + SOURCE = "ftp://anon-ftp.ceda.ac.uk/neodc/esacci/land_cover/data/" + REF = "Defourny et al.. ESA Land Cover Climate Change Initiative " + \ + "(ESA LC_cci) data, 2015" + COMMENT = "" + +end + +begin + + do yy = 0, dimsizes(YEARS) - 1 + + fname = \ + input_dir_path + "ESACCI-LC-" + VERSION + "-" + YEARS(yy) + "-v1.6.1.nc" + + f = addfile(fname, "r") + + ; Create time coordinate + YEAR1 = YEARS(yy) - 2 + YEAR2 = YEARS(yy) + 2 + time = create_timec(YEAR1, YEAR2) + + do vv = 0, dimsizes(VAR) - 1 + + log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") + + ; Set classes to be added up + class = CLASSES[vv] + + ; Save mask before adding up classes + do cc = 0, dimsizes(class) - 1 + qq = f->$class(cc)$ + replace_ieeenan(qq, FILL, 0) + qq@_FillValue = FILL + tmp = ismissing(qq) + delete(qq) + if (cc.eq.0) then + lmask = tmp + else + lmask := lmask .and. tmp + end if + delete(tmp) + end do + + ; Add up classes + do cc = 0, dimsizes(class) - 1 + log_info(" adding class " + class(cc)) + tmp = f->$class(cc)$ + replace_ieeenan(tmp, FILL, 0) + tmp@_FillValue = FILL + tmp = where(ismissing(tmp), 0., tmp) + if (cc.eq.0) then + xx = tmp + else + xx = xx + tmp + end if + delete(tmp) + end do + delete(class) + + ; Reapply mask of missing values + xx = where(lmask, xx@_FillValue, xx) + + ; Define output array + output = \ + new((/dimsizes(time), dimsizes(xx&lat), dimsizes(xx&lon)/), float) + output!0 = "time" + output&time = time + output!1 = "lat" + output&lat = xx&lat + output!2 = "lon" + output&lon = xx&lon + output = conform(output, xx, (/1, 2/)) + delete(xx) + + ; Set standard fill value + output@_FillValue = FILL + + ; Convert units + output = output * 100 + output@units = "%" + + ; Format coordinates + output!0 = "time" + output!1 = "lat" + output!2 = "lon" + format_coords(output, YEAR1 + "0101", YEAR2 + "1231", FREQ(vv)) + + ; Set variable attributes + tmp = format_variable(output, VAR(vv), CMOR_TABLE) + delete(output) + output = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ(vv)) + + ; Set global attributes + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Output file + DATESTR = YEAR1 + "01-" + YEAR2 + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, VERSION, \ + MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" + + ; Special case for baresoilFrac: add auxiliary coordinate + if (VAR(vv).eq."baresoilFrac") then + output@coordinates = "type" + end if + + ; Write variable + write_nc(fout, VAR(vv), output, bounds, gAtt) + delete(gAtt) + delete(output) + delete(bounds) + + ; Special case for baresoilFrac: add auxiliary coordinate + if (VAR(vv).eq."baresoilFrac") then + type = tochar("bare_ground") + type!0 = "strlen" + type@long_name = "surface type" + type@standard_name = "area_type" + w = addfile(fout, "w") + w->type = type + delete(w) + end if + + end do + end do + +end diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-OC.py b/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-OC.py new file mode 100644 index 0000000000..f0f0d833b7 --- /dev/null +++ b/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-OC.py @@ -0,0 +1,176 @@ +# pylint: disable=invalid-name +"""ESMValTool CMORizer for ESACCI-OC data. + +Tier + +Source + ftp://oceancolour.org/occci-v3.1/geographic/netcdf/monthly/chlor_a/ + user: oc-cci-data + pass: ELaiWai8ae + +Last access + 20190227 + +Download and processing instructions + In case of issues with data download, check also the information provided at + OceanColour webpage https://esa-oceancolour-cci.org/ + Put all files under a single directory (no subdirectories with years) + in ${RAWOBS}/Tier2/ESACCI-OC + +Modification history + 20190227-A_lova_to: written. + +""" + +import logging +import os +import glob +import xarray as xr + +import iris + +from .utilities import (constant_metadata, fix_coords, fix_var_metadata, + read_cmor_config, save_variable, set_global_atts) + +logger = logging.getLogger(__name__) + +# read in CMOR configuration +CFG = read_cmor_config('ESACCI-OC.yml') + + +def _fix_data(cube, var): + """Specific data fixes for different variables.""" + logger.info("Fixing data ...") + with constant_metadata(cube): + if var == 'chl': + cube *= 1.e-06 + return cube + + +def _add_depth_coord(cube): + """Add depth auxiliary coordinate for CMIP5 standard.""" + if not cube.coords('depth'): + depth = 1. + depth_coord = iris.coords.AuxCoord( + depth, + standard_name='depth', + long_name='depth', + var_name='depth', + units='m', + attributes={'positive': 'down'}) + cube.add_aux_coord(depth_coord) + cube.coordinates = 'depth' + + +def extract_variable(var_info, raw_info, out_dir, attrs): + """Extract to all vars.""" + var = var_info.short_name + cubes = iris.load(raw_info['file']) + rawvar = raw_info['name'] + + for cube in cubes: + if cube.var_name == rawvar: + fix_var_metadata(cube, var_info) + fix_coords(cube) + _add_depth_coord(cube) + _fix_data(cube, var) + set_global_atts(cube, attrs) + save_variable( + cube, + var, + out_dir, + attrs, + local_keys=['coordinates'], + unlimited_dimensions=['time'], + ) + + +def merge_data(in_dir, out_dir, raw_info, bins): + """Merge all data into a single (regridded) file.""" + var = raw_info['name'] + do_bin = True if (bins % 2 == 0) & (bins != 0) else False + datafile = sorted(glob.glob(in_dir + '/' + raw_info['file'] + '*.nc')) + for x in datafile: + ds = xr.open_dataset(x) + da = ds[var].sel(lat=slice(None, None, -1)) + # remove inconsistent attributes + for thekeys in [ + 'grid_mapping', 'ancillary_variables', 'parameter_vocab_uri' + ]: + del da.attrs[thekeys] + + if do_bin: + da = da.coarsen(lat=bins, boundary='exact').mean() + da = da.coarsen(lon=bins, boundary='exact').mean() + + if x == datafile[0]: + newda = da + thekeys = [ + 'creator_name', 'creator_url', 'license', 'sensor', + 'processing_level' + ] + dsmeta = dict((y, ds.attrs[y]) for y in thekeys) + if do_bin: + dsmeta['BINNING'] = ' '.join([ + 'Data binned using ', "{}".format(bins), 'by', + "{}".format(bins), 'cells average' + ]) + else: + dsmeta['BINNING'] = "" + continue + + newda = xr.concat((newda, da), dim='time') + + # save to file + ds = newda.to_dataset(name=var) + for x, y in dsmeta.items(): + ds.attrs[x] = y + thekeys = { + 'lat': { + '_FillValue': False + }, + 'lon': { + '_FillValue': False + }, + 'time': { + 'calendar': 'gregorian' + }, + var: { + '_FillValue': 1.e20 + } + } + datafile = os.path.join(out_dir, raw_info['file'] + '_merged.nc') + ds.to_netcdf(datafile, encoding=thekeys, unlimited_dims='time') + + logger.info("Merged data written to: %s", datafile) + + return (datafile, dsmeta['BINNING']) + + +def cmorization(in_dir, out_dir): + """Cmorization func call.""" + cmor_table = CFG['cmor_table'] + glob_attrs = CFG['attributes'] + + logger.info("Starting cmorization for Tier%s OBS files: %s", + glob_attrs['tier'], glob_attrs['dataset_id']) + logger.info("Input data from: %s", in_dir) + logger.info("Output will be written to: %s", out_dir) + + # run the cmorization + for var, vals in CFG['variables'].items(): + var_info = cmor_table.get_variable(vals['mip'], var) + glob_attrs['mip'] = vals['mip'] + raw_info = {'name': vals['raw'], 'file': vals['file']} + + # merge yearly data and apply binning + inpfile, addinfo = merge_data(in_dir, out_dir, raw_info, + CFG['custom']['bin_size']) + + logger.info("CMORizing var %s from file %s", var, inpfile) + raw_info['file'] = inpfile + glob_attrs['comment'] = addinfo + glob_attrs['comment'] + extract_variable(var_info, raw_info, out_dir, glob_attrs) + + # Remove temporary input file + os.remove(inpfile) diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-OZONE.ncl b/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-OZONE.ncl new file mode 100644 index 0000000000..6b2818e447 --- /dev/null +++ b/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-OZONE.ncl @@ -0,0 +1,171 @@ +; ############################################################################# +; ESMValTool CMORizer for ESACCI-OZONE data +; ############################################################################# +; +; Tier +; Tier 2: other freely-available dataset. +; +; Source +; ftp://anon-ftp.ceda.ac.uk/neodc/esacci/ozone/data/ +; +; Last access +; 20190201 +; +; Download and processing instructions +; Download the data from: +; total_columns/l3/merged/v0100/ +; Put all files under a single directory (no subdirectories with years). +; +; Modification history +; 20190201-A_righ_ma: adapted to v2 and replace NaN/inf with FillValue. +; 20160224-A_wenz_sa: written based on reformat_obs_ESACCI-AEROSOL.ncl. +; +; ############################################################################# +loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/interface.ncl") + +begin + + ; Script name (for logger) + DIAG_SCRIPT = "cmorize_obs_ESACCI-OZONE.ncl" + + ; Source name + OBSNAME = "ESACCI-OZONE" + + ; Tier + TIER = 2 + + ; Period + YEAR1 = (/1997, 1997, 2007, 2007/) + YEAR2 = (/2010, 2010, 2008, 2008/) + + ; Selected variable (standard name) + VAR = (/"toz", "tozStderr", "tro3prof", "tro3profStderr"/) + + ; Name in the raw data + NAME = (/"atmosphere_mole_content_of_ozone", \ + "atmosphere_mole_content_of_ozone_standard_error", \ + "merged_ozone_vmr", \ + "uncertainty_of_merged_ozone"/) + + ; MIP + MIP = (/"Amon", "Amon", "Amon", "Amon"/) + + ; Frequency + FREQ = (/"mon", "mon", "mon", "mon"/) + + ; CMOR table + CMOR_TABLE = getenv("esmvaltool_root") + \ + "/cmor/tables/custom/CMOR_" + VAR + ".dat" + + ; File name + FNAME = (/"ESACCI-OZONE-L3S-TC-MERGED-DLR_1M-_DATE_??-fv0100.nc", \ + "ESACCI-OZONE-L3S-TC-MERGED-DLR_1M-_DATE_??-fv0100.nc", \ + "ESACCI-OZONE-L3-LP-MERGED-MZM-_DATE_-fv0002.nc", \ + "ESACCI-OZONE-L3-LP-MERGED-MZM-_DATE_-fv0002.nc"/) + + ; Type + TYPE = "sat" + + ; Version + VERSION = "L3" + + ; Global attributes + SOURCE = "ftp://anon-ftp.ceda.ac.uk/neodc/esacci/ozone/data/" + REF = "Loyola et al., Int. J. Remote Sens. doi:10.1080/" + \ + "01431160902825016, 2009." + COMMENT = "" + +end + +begin + + do vv = 0, dimsizes(VAR) - 1 + + log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") + + time = create_timec(YEAR1(vv), YEAR2(vv)) + date = cd_calendar(time, 1) + + ; Create timeseries + do yy = YEAR1(vv), YEAR2(vv) + do mm = 1, 12 + + ldate = yy + sprinti("%0.2i", mm) + + ; File name + fname = systemfunc("ls " + input_dir_path + \ + str_sub_str(FNAME(vv), "_DATE_", ldate)) + + ; Check + if (all(ismissing(fname))) then + error_msg("f", DIAG_SCRIPT, "", "no file found for date " + ldate) + end if + + ; Extract data + f = addfile(fname(0), "r") + xx = f->$NAME(vv)$ + xx@_FillValue = FILL + xx@missing_value = xx@_FillValue + xx = where(xx.lt.0., xx@_FillValue, xx) + xx = where(xx.gt.1e35, xx@_FillValue, xx) ; get rid of infinity values + replace_ieeenan(xx, xx@_FillValue, 0) + + ; Assign to global array + dimnames = getvardimnames(xx) + if (.not.isdefined("output")) then + dims = array_append_record(dimsizes(time), dimsizes(xx), 0) + output = new(dims, typeof(xx)) + output!0 = "time" + output&time = time + do ii = 0, dimsizes(dimnames) - 1 + if (dimnames(ii).eq."air_pressure") then + output!(ii+1) = "plev" + output&plev = f->$dimnames(ii)$ + elseif (isStrSubset(dimnames(ii), "latitude")) then + output!(ii+1) = "lat" + output&lat = f->$dimnames(ii)$ + elseif (dimnames(ii).eq."longitude") + output!(ii+1) = "lon" + output&lon = f->$dimnames(ii)$ + end if + end do + end if + output(ind(toint(ldate).eq.date), :, :) = (/xx/) + delete(fname) + delete(xx) + + end do + end do + + ; Format coordinates + format_coords(output, YEAR1(vv) + "0101", YEAR2(vv) + "1231", FREQ(vv)) + + ; Set variable attributes + tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) + delete(output) + output = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ(vv)) + + ; Set global attributes + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Output file + DATESTR = YEAR1(vv) + "01-" + YEAR2(vv) + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, VERSION, \ + MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" + + ; Write variable + write_nc(fout, VAR(vv), output, bounds, gAtt) + delete(gAtt) + delete(output) + delete(bounds) + delete(time) + delete(date) + + end do + +end diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-SOILMOISTURE.ncl b/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-SOILMOISTURE.ncl new file mode 100644 index 0000000000..e5655b39e7 --- /dev/null +++ b/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-SOILMOISTURE.ncl @@ -0,0 +1,173 @@ +; ############################################################################# +; ESMValTool CMORizer for ESACCI-SOILMOISTURE data +; ############################################################################# +; +; Tier +; Tier 2: other freely-available dataset. +; +; Source +; ftp://anon-ftp.ceda.ac.uk/neodc/esacci/soil_moisture/data/ +; +; Last access +; 20190201 +; +; Download and processing instructions +; Download the data from: +; daily_files/COMBINED/v04.2/ +; ancillary/v04.2/ +; Put all files under a single directory (no subdirectories with years). +; +; Modification history +; 20190201-A_righ_ma: adapted to v2, use new input data version 4.2. +; 20160824-A_laue_ax: added processing of volumetric soil moisture +; content (sm, smStderr). +; 20160721-A_laue_ax: use daily files, added processing of uncertainty. +; 20150523-A_righ_ma: written. +; +; ############################################################################# +loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/interface.ncl") + +begin + + ; Script name (for logger) + DIAG_SCRIPT = "cmorize_obs_ESACCI-SOILMOISTURE.ncl" + + ; Source name + OBSNAME = "ESACCI-SOILMOISTURE" + + ; Tier + TIER = 2 + + ; Period + YEAR1 = 1979 + YEAR2 = 2016 + + ; Selected variable (standard name) + VAR = (/"sm", "smStderr", "dos", "dosStderr"/) + + ; Name in the raw data + NAME = (/"sm", "sm_uncertainty", "sm", "sm_uncertainty"/) + + ; MIP + MIP = (/"Lmon", "Lmon", "Lmon", "Lmon"/) + + ; Frequency + FREQ = (/"mon", "mon", "mon", "mon"/) + + ; CMOR table + CMOR_TABLE = getenv("esmvaltool_root") + \ + "/cmor/tables/custom/CMOR_" + VAR + ".dat" + + ; Type + TYPE = "sat" + + ; Version + VERSION = "L3S-SSMV-COMBINED-v4.2" + + ; Global attributes + SOURCE = "ftp://anon-ftp.ceda.ac.uk/neodc/esacci/soil_moisture/data/" + REF = "" + COMMENT = "" + +end + +begin + + do vv = 0, dimsizes(VAR) - 1 + + log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") + + do yy = YEAR1, YEAR2 + + ; Set list of files + files = systemfunc("ls " + input_dir_path + \ + "ESACCI-SOILMOISTURE-L3S-SSMV-" + \ + "COMBINED-" + yy + "????000000-fv04.2.nc") + f = addfiles(files, "r") + delete(files) + + ; Read data + xx = f[:]->$NAME(vv)$ + if (isatt(xx, "scale_factor")) then + tmp = tofloat(xx * xx@scale_factor) + copy_VarAtts(xx, tmp) + copy_VarCoords(xx, tmp) + delete(xx) + xx = tmp + delete(tmp) + end if + delete(f) + + ; Derive dos using porosity + if (any(VAR(vv).eq.(/"dos", "dosStderr"/))) then + g = addfile(input_dir_path + \ + "/ESACCI-SOILMOISTURE-POROSITY_V01.1.nc", "r") + zz = g->porosity + xx = xx * 100. / conform(xx, zz, (/1, 2/)) + delete(zz) + end if + + ; Add a minor time shift for correct extraction of monthly mean below + xx&time = xx&time + 0.1 + + ; Calculate monthly means + if (isStrSubset(VAR(vv), "Stderr")) then + xx2 = xx + xx2 = xx ^ 2 ; save metadata + tmp = calculate_monthly_values(xx2, "avg", 0, False) + delete(xx) + delete(xx2) + xx = sqrt(tmp) + copy_VarAtts(tmp, xx) + copy_VarCoords(tmp, xx) + delete(tmp) + else + tmp = calculate_monthly_values(xx, "avg", 0, False) + delete(xx) + xx = tmp + delete(tmp) + end if + + ; Append to time-series + if (.not.isdefined("output")) then + output = xx + else + output := array_append_record(output, xx, 0) + end if + delete(xx) + + end do + + ; Format coordinates + output!0 = "time" + output!1 = "lat" + output!2 = "lon" + format_coords(output, YEAR1 + "0101", YEAR2 + "1231", FREQ(vv)) + + ; Set variable attributes + tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) + delete(output) + output = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ(vv)) + + ; Set global attributes + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Output file + DATESTR = YEAR1 + "01-" + YEAR2 + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, VERSION, \ + MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" + + ; Write variable + write_nc(fout, VAR(vv), output, bounds, gAtt) + delete(gAtt) + delete(output) + delete(bounds) + + end do + +end diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-SST.ncl b/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-SST.ncl new file mode 100644 index 0000000000..306211a8e4 --- /dev/null +++ b/esmvaltool/utils/cmorizers/obs/cmorize_obs_ESACCI-SST.ncl @@ -0,0 +1,176 @@ +; ############################################################################# +; ESMValTool CMORizer for ESACCI-SST data +; ############################################################################# +; +; Tier +; Tier 2: other freely-available dataset. +; +; Source +; ftp://anon-ftp.ceda.ac.uk/neodc/esacci/sst/data/ +; +; Last access +; 20190201 +; +; Download and processing instructions +; Download the data from: +; lt/Analysis/L4/v01.1/ +; Put all files under a single directory (no subdirectories with years). +; +; Modification history +; 20190201-A_righ_ma: adapted to v2. +; 20180529-A_righ_ma: modified to have consistent metadata across multiple +; files and to speed up processing time +; 20160818-A_laue_ax: written +; +; ############################################################################# +loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/interface.ncl") + +begin + + ; Script name (for logger) + DIAG_SCRIPT = "cmorize_obs_ESACCI-SST.ncl" + + ; Source name + OBSNAME = "ESACCI-SST" + + ; Tier + TIER = 2 + + ; Period + YEAR1 = 1992 + YEAR2 = 2010 + + ; MIP + MIP = "Amon" + + ; Frequency + FREQ = "mon" + + ; CMOR table + CMOR_TABLE = getenv("esmvaltool_root") + "/cmor/tables/" + + ; Type + TYPE = "sat" + + ; Version + VERSION = "L4-GHRSST-SSTdepth-OSTIA-GLOB" + + ; Global attributes + SOURCE = "ftp://anon-ftp.ceda.ac.uk/neodc/esacci/sst/data/" + REF = "Merchant et al., Geosci. Data J., doi:10.1002/gdj3.20, 2014" + COMMENT = "" + +end + +begin + + ; Save date for consistent history attribute + today = systemfunc("date") + + ; Loop over time period + do yy = YEAR1, YEAR2 + do mm = 1, 12 + + mo = sprinti("%0.2i", mm) + + dm = days_in_month(yy, mm) + + ; Select files for this year/month + files = \ + systemfunc("ls " + input_dir_path + yy + mo + "??120000-" + \ + "ESACCI-L4_GHRSST-SSTdepth-OSTIA-GLOB_LT-v02.0-fv01.1.nc") + nfiles = dimsizes(files) + + if (nfiles.ne.dm) then + error_msg("f", DIAG_SCRIPT, "", "incomplete data in " + yy + mo + \ + " (" + dm + " != " + nfiles + " files found)") + end if + + ; Create time-series + f = addfiles(files, "r") + ListSetType(f, "cat") + + ; Read mask + lsmask = f[:]->mask + + ; Read SST + tmp = f[:]->analysed_sst + sst = tmp * tmp@scale_factor + tmp@add_offset + copy_VarCoords(tmp, sst) + delete(tmp) + + ; Read error + tmp = f[:]->analysis_error + err = tmp * tmp@scale_factor + tmp@add_offset + copy_VarCoords(tmp, err) + delete(tmp) + + ; Mask anything that is not open sea water (i.e. land, ice, lakes) + sst = where(lsmask.eq.1, sst, sst@_FillValue) + err = where(lsmask.eq.1, err, err@_FillValue) + delete(lsmask) + + ; Calculate time averages + sstavg = sst(0:0, :, :) + sstavg(0, :, :) = dim_avg_n_Wrap(sst, 0) + erravg = err(0:0, :, :) + tmp = err ^ 2 + erravg(0, :, :) = sqrt(dim_avg_n_Wrap(tmp, 0)) + delete(tmp) + delete(sst) + delete(err) + + ; Format time coordinate + sstavg!0 = "time" + sstavg!1 = "lat" + sstavg!2 = "lon" + format_coords(sstavg, yy + mo + "01", yy + mo + dm, FREQ) + erravg!0 = "time" + erravg!1 = "lat" + erravg!2 = "lon" + format_coords(erravg, yy + mo + "01", yy + mo + dm, FREQ) + + ; Format variable metadata + table = CMOR_TABLE + "cmip5/Tables/CMIP5_Amon" + tmp = format_variable(sstavg, "ts", table) + delete(sstavg) + sstavg = tmp + delete(tmp) + + table = CMOR_TABLE + "custom/CMOR_tsStderr.dat" + tmp = format_variable(erravg, "tsStderr", table) + delete(erravg) + erravg = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds_sst = guess_coord_bounds(sstavg, FREQ) + bounds_err = guess_coord_bounds(erravg, FREQ) + + ; Set global attributes + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Write output + DATESTR = yy + mo + "-" + yy + mo + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, VERSION, \ + MIP, "ts", DATESTR/), "_") + ".nc" + write_nc(fout, "ts", sstavg, bounds_sst, gAtt) + delete(sstavg) + delete(bounds_sst) + + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, VERSION, \ + MIP, "tsStderr", DATESTR/), "_") + ".nc" + write_nc(fout, "tsStderr", erravg, bounds_err, gAtt) + delete(erravg) + delete(bounds_err) + + delete(gAtt) + delete(files) + delete(f) + + end do + end do + +end diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_GHCN.ncl b/esmvaltool/utils/cmorizers/obs/cmorize_obs_GHCN.ncl new file mode 100644 index 0000000000..aed9b03e76 --- /dev/null +++ b/esmvaltool/utils/cmorizers/obs/cmorize_obs_GHCN.ncl @@ -0,0 +1,123 @@ +; ############################################################################# +; ESMValTool CMORizer for GHCN data +; ############################################################################# +; +; Tier +; Tier 2: other freely-available dataset. +; +; Source +; https://www.esrl.noaa.gov/psd/data/gridded/data.ghcngridded.html +; +; Last access +; 20190308 +; +; Download and processing instructions +; Download the dataset "precip.mon.total.nc" (precipitation, total, surface, +; 1900-2015 on a 5x5 grid). +; +; Modification history +; 20190308-A_righ_ma: minor changes to include coordinate boundaries. +; 20190227-A_bock_ls: written. +; +; ############################################################################# +loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/interface.ncl") + +begin + + ; Script name (for logger) + DIAG_SCRIPT = "cmorize_obs_GHCN.ncl" + + ; Source name + OBSNAME = "GHCN" + + ; Tier + TIER = 2 + + ; Period + YEAR1 = 1900 + YEAR2 = 2014 + + ; Selected variable (standard name) + VAR = "pr" + + ; Name in the raw data + NAME = "precip" + + ; MIP + MIP = "Amon" + + ; Frequency + FREQ = "mon" + + ; CMOR table + CMOR_TABLE = getenv("esmvaltool_root") + \ + "/cmor/tables/cmip5/Tables/CMIP5_Amon" + + ; Type + TYPE = "ground" + + ; Version + VERSION = "1" + + ; Global attributes + SOURCE = "https://www.esrl.noaa.gov/psd/data/gridded/data.ghcngridded.html" + REF = "Jones, P. D., and A. Moberg, J. Climate, " + \ + "doi:10.1175/1520-0442(2003)016<0206:HALSSA>2.0.CO;2, 2003" + COMMENT = "" + +end + +begin + + ; Read file + fname = input_dir_path + "precip.mon.total.nc" + f = addfile(fname, "r") + setfileoption("nc", "MissingToFillValue", False) + + ; Read absolute precipitation without last incomplete year + output = f->$NAME$(time|0:1379, lat|:, lon|:) + + ; Calculate days per month + date = cd_calendar(output&time, 0) + dpm = days_in_month(toint(date(:, 0)), toint(date(:, 1))) + dpmc = conform(output, dpm, 0) + + ; Check time range + if (dimsizes(date(:, 0)).ne.12 * (YEAR2 - YEAR1 + 1)) then + error_msg("f", DIAG_SCRIPT, "", "incorrect number of timesteps") + end if + + ; Convert units [mm/month] --> [kg/m2/s] + output = output / (24 * 3600 * dpmc) + + log_info(" Climatology range: " + min(output) + \ + " kg/m2/s to " + max(output) + " kg/m2/s") + + ; Format coordinates + format_coords(output, YEAR1 + "0101", YEAR2 + "1231", FREQ) + + ; Set variable attributes + tmp = format_variable(output, VAR, CMOR_TABLE) + delete(output) + output = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ) + + ; Set global attributes + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Output file + DATESTR = YEAR1 + "01-" + YEAR2 + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, VERSION, \ + MIP, VAR, DATESTR/), "_") + ".nc" + + ; Write temperature time-series + write_nc(fout, VAR, output, bounds, gAtt) + delete(gAtt) + delete(output) + delete(bounds) + +end diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_HadCRUT3.ncl b/esmvaltool/utils/cmorizers/obs/cmorize_obs_HadCRUT3.ncl new file mode 100644 index 0000000000..abd1a6b3e9 --- /dev/null +++ b/esmvaltool/utils/cmorizers/obs/cmorize_obs_HadCRUT3.ncl @@ -0,0 +1,125 @@ +; ############################################################################# +; ESMValTool CMORizer for HadCRUT3 data +; ############################################################################# +; +; Tier +; Tier 2: other freely-available dataset. +; +; Source +; http://www.metoffice.gov.uk/hadobs/hadcrut3/data/download.html +; +; Last access +; 20190221 +; +; Download and processing instructions +; Download the HadCRUT3v.nc file (variance adjusted dataset). +; +; Caveats +; The HadCRUT3v variance-adjusted dataset for tas is actually the anomaly +; with respect to the period 1958-2001. +; +; Modification history +; 20190221-A_righ_ma: adapted to v2 and renamed to HadCRUT3. +; 20150330-A_righ_ma: updated paths and global attributes. +; 20140311-A_senf_da: written. +; +; ############################################################################# +loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/interface.ncl") + +begin + + ; Script name (for logger) + DIAG_SCRIPT = "cmorize_obs_HadCRUT3.ncl" + + ; Source name + OBSNAME = "HadCRUT3" + + ; Tier + TIER = 2 + + ; Period + YEAR1 = 1850 + YEAR2 = 2013 + + ; Selected variable (standard name) + VAR = "tasa" + + ; Name in the raw data + NAME = "temp" + + ; MIP + MIP = "Amon" + + ; Frequency + FREQ = "mon" + + ; CMOR table + CMOR_TABLE = getenv("esmvaltool_root") + "/cmor/tables/custom/CMOR_tasa.dat" + + ; Type + TYPE = "ground" + + ; Version + VERSION = "1" + + ; Global attributes + SOURCE = "http://www.metoffice.gov.uk/hadobs/hadcrut3/data/download.html" + REF = "Brohan et al., J. Geophys. Res., doi:10.1029/2005JD006548, 2006" + COMMENT = "Temperature anomaly with respect to the period 1958-2001" + +end + +begin + + ; Read file + fname = input_dir_path + "HadCRUT3v.nc" + f = addfile(fname, "r") + + ; Read variable + output = f->temp + + ; Delete level coordinate (dimension 1) + tmp = rm_single_dims(output) + delete(output) + output = tmp + delete(tmp) + + ; Extract time period + output!0 = "time" + date = cd_calendar(output&time, 0) + idx = ind(date(:, 0).ge.YEAR1 .and. date(:, 0).le.YEAR2) + output := output(idx, :, :) + + ; Format coordinates + output!0 = "time" + output!1 = "lat" + output!2 = "lon" + format_coords(output, YEAR1 + "0101", YEAR2 + "1231", FREQ) + + ; Set variable attributes + tmp = format_variable(output, VAR, CMOR_TABLE) + delete(output) + output = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ) + + ; Set global attributes + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Output file + DATESTR = YEAR1 + "01-" + YEAR2 + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, VERSION, \ + MIP, VAR, DATESTR/), "_") + ".nc" + + ; Write variable + write_nc(fout, VAR, output, bounds, gAtt) + w = addfile(fout, "w") + delete(w) + delete(gAtt) + delete(output) + delete(bounds) + +end diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_HadCRUT4.ncl b/esmvaltool/utils/cmorizers/obs/cmorize_obs_HadCRUT4.ncl new file mode 100644 index 0000000000..07cf11a936 --- /dev/null +++ b/esmvaltool/utils/cmorizers/obs/cmorize_obs_HadCRUT4.ncl @@ -0,0 +1,223 @@ +; ############################################################################# +; ESMValTool CMORizer for HadCRUT data +; ############################################################################# +; +; Tier +; Tier 2: other freely-available dataset. +; +; Source +; https://crudata.uea.ac.uk/cru/data/temperature/ +; +; Last access +; 20190208 +; +; Download and processing instructions +; Download the dataset "HadCRUT4" (median temperature anomalies) and +; the dataset "Absolute" (absolute temperatures for the base period +; 1961-90 on a 5x5 grid). +; +; Caveats +; In contrast to the HadCRUT3 reformat script which produces temperature +; anomalies (relative to the 1961-90 climatology), this script calculates +; absolute tas by adding the climatology ("absolute.nc") to the anomalies +; ("HadCRUT.4.6.0.0.median.nc"). It creates 3 output, one with the +; temperature time-series, one with the anomaly time-series, and one with +; the temperature climatology (1961-1990). +; +; Modification history +; 20190229-A_righ_ma: added output for anomaly (tasa). +; 20190208-A_righ_ma: added output for climatology and adapted to v2. +; 20180222-A_laue_ax: bug fix (added swapping of latitudes if needed). +; 20160203-A_laue_ax: written. +; +; ############################################################################# +loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/interface.ncl") + +begin + + ; Script name (for logger) + DIAG_SCRIPT = "cmorize_obs_HadCRUT4.ncl" + + ; Source name + OBSNAME = "HadCRUT4" + + ; Tier + TIER = 2 + + ; Period + YEAR1 = 1850 + YEAR2 = 2018 + + ; Selected variable (standard name) + VAR = "tas" + + ; MIP + MIP = "Amon" + + ; Frequency + FREQ = "mon" + + ; CMOR table + CMOR_TABLE1 = getenv("esmvaltool_root") + \ + "/cmor/tables/cmip5/Tables/CMIP5_Amon" + CMOR_TABLE2 = getenv("esmvaltool_root") + \ + "/cmor/tables/custom/CMOR_tasa.dat" + CMOR_TABLE3 = getenv("esmvaltool_root") + \ + "/cmor/tables/cmip5/Tables/CMIP5_Amon" + + ; Version + VERSION = "1" + + ; Type + TYPE1 = "ground" + TYPE2 = "ground" + TYPE3 = "clim" + + ; Global attributes + SOURCE = "https://crudata.uea.ac.uk/cru/data/temperature/" + REF1 = "Morice et al., J. Geophys. Res., doi:10.1029/2011JD017187, 2012" + REF2 = "Morice et al., J. Geophys. Res., doi:10.1029/2011JD017187, 2012" + REF3 = "Jones et al., Rev. Geophys., doi:10.1029/1999RG900002, 1999" + COMMENT1 = "Temperature time-series calculated from the anomaly " + \ + "time-series by adding the temperature climatology for 1961-1990" + COMMENT2 = "Temperature anomaly with respect to the period 1961-1990" + COMMENT3 = "Climatology 1961-1990" + +end + +begin + + ; Read file + fname1 = input_dir_path + "HadCRUT.4.6.0.0.median.nc" + fname2 = input_dir_path + "absolute.nc" + f1 = addfile(fname1, "r") + setfileoption("nc", "MissingToFillValue", False) + f2 = addfile(fname2, "r") + + ; Read anomaly + anomaly = f1->temperature_anomaly + + ; Read absolute temperature + tmp = f2->tem + clim = tofloat(tmp * tmp@scale_factor) + 273.15 + copy_VarCoords(tmp, clim) + delete(tmp) + + ; Swap latitudes + if (isMonotonic(anomaly&latitude).eq.-1) then + anomaly = anomaly(:, ::-1, :) + end if + + if (isMonotonic(clim&lat).eq.-1) then + clim = clim(:, ::-1, :) + end if + + log_info(" Climatology range: " + min(clim) + \ + " K to " + max(clim) + " K") + log_info(" Anomaly range: " + min(anomaly) + \ + " K to " + max(anomaly) + " K") + + output1 = anomaly + output2 = anomaly + output3 = clim + delete(output3&time) + output3&time = create_timec(1976, 1976) + dims = dimsizes(output1) + + ; Add absolute temperature to anomaly + do yr = 0, dims(0) / 12 - 1 + m1 = yr * 12 + m2 = m1 + 11 + output1(m1:m2, :, :) = where(.not.ismissing(clim), \ + anomaly(m1:m2, :, :) + clim, \ + tofloat(anomaly@_FillValue)) + end do + + ; Format coordinates + output1!0 = "time" + output1!1 = "lat" + output1!2 = "lon" + format_coords(output1, YEAR1 + "0101", YEAR2 + "1231", FREQ) + output2!0 = "time" + output2!1 = "lat" + output2!2 = "lon" + format_coords(output2, YEAR1 + "0101", YEAR2 + "1231", FREQ) + output3!0 = "time" + output3!1 = "lat" + output3!2 = "lon" + format_coords(output3, "19760101", "19761231", FREQ) + + ; Calculate coordinate bounds + bounds1 = guess_coord_bounds(output1, FREQ) + bounds2 = guess_coord_bounds(output2, FREQ) + bounds3 = guess_coord_bounds(output3, FREQ) + + ; Set variable attributes + tmp = format_variable(output1, VAR, CMOR_TABLE1) + delete(output1) + output1 = tmp + delete(tmp) + tmp = format_variable(output2, "tasa", CMOR_TABLE2) + delete(output2) + output2 = tmp + delete(tmp) + tmp = format_variable(output3, VAR, CMOR_TABLE3) + delete(output3) + output3 = tmp + delete(tmp) + + ; Add height coordinate + output1@coordinates = "height" + output3@coordinates = "height" + height = 2.d + height!0 = "ncl_scalar" + height@units = "m" + height@axis = "Z" + height@positive = "up" + height@long_name = "height" + height@standard_name = "height" + + ; Set global attributes + gAtt1 = set_global_atts(OBSNAME, TIER, SOURCE, REF1, COMMENT1) + gAtt2 = set_global_atts(OBSNAME, TIER, SOURCE, REF2, COMMENT2) + gatt3 = set_global_atts(OBSNAME, TIER, SOURCE, REF3, COMMENT3) + + ; Write temperature time-series + DATESTR = YEAR1 + "01-" + YEAR2 + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE1, VERSION, \ + MIP, VAR, DATESTR/), "_") + ".nc" + write_nc(fout, VAR, output1, bounds1, gAtt1) + w = addfile(fout, "w") + w->height = height + delete(w) + delete(gAtt1) + delete(bounds1) + delete(output1) + + ; Write temperature anomaly time-series + DATESTR = YEAR1 + "01-" + YEAR2 + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE2, VERSION, \ + MIP, "tasa", DATESTR/), "_") + ".nc" + write_nc(fout, "tasa", output2, bounds2, gAtt2) + w = addfile(fout, "w") + delete(w) + delete(gAtt2) + delete(bounds2) + delete(output2) + + ; Write temperature climatology + DATESTR = "197601-197612" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE3, VERSION, \ + MIP, VAR, DATESTR/), "_") + ".nc" + write_nc(fout, VAR, output3, bounds3, gatt3) + w = addfile(fout, "w") + w->height = height + delete(w) + delete(gatt3) + delete(bounds3) + delete(output3) + +end diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_HadISST.ncl b/esmvaltool/utils/cmorizers/obs/cmorize_obs_HadISST.ncl new file mode 100644 index 0000000000..ed9a4a0bc7 --- /dev/null +++ b/esmvaltool/utils/cmorizers/obs/cmorize_obs_HadISST.ncl @@ -0,0 +1,135 @@ +; ############################################################################# +; ESMValTool CMORizer for HadISST data +; ############################################################################# +; +; Tier +; Tier 2: other freely-available dataset. +; +; Source +; http://www.metoffice.gov.uk/hadobs/hadisst/data/download.html +; +; Last access +; 20190208 +; +; Download and processing instructions +; Download and unzip "HadISST_ice.nc.gz" and "HadISST_sst.nc.gz". +; +; Modification history +; 20190208-A_hass_bg: adapted to v2. +; 20180530-A_righ_ma: fixed coordinates and metadata. +; 20170217-A_senf_da: modified so that SST fields are also written as 'tos'. +; 20150422-A_laue_ax: written. +; +; ############################################################################ +loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/interface.ncl") + +begin + + ; Script name (for logger) + DIAG_SCRIPT = "cmorize_obs_HadISST.ncl" + + ; Source name + OBSNAME = "HadISST" + + ; Tier + TIER = 2 + + ; Period + YEAR1 = 1870 + YEAR2 = 2017 + + ; Selected variable (standard name) + VAR = (/"ts", "tos", "sic"/) + + ; Name in the raw data + NAME = (/"sst", "sst", "ice"/) + + ; MIP + MIP = (/"Amon", "Omon", "OImon"/) + + ; Frequency + FREQ = (/"mon", "mon", "mon"/) + + ; CMOR table + CMOR_TABLE = getenv("esmvaltool_root") + \ + "/cmor/tables/cmip5/Tables/CMIP5_" + MIP + + ; Type + TYPE = "reanaly" + + ; Version + VERSION = "1" + + ; Global attributes + SOURCE = "http://www.metoffice.gov.uk/hadobs/hadisst/data/download.html" + REF = "Rayner et al., J. Geophys. Res., doi:10.1029/2002JD002670, 2013" + COMMENT = "" + +end + +begin + + ; Loop over variables + do vv = 0, dimsizes(VAR) - 1 + + log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") + + ; Read variables + fname = input_dir_path + "HadISST_" + NAME(vv) + ".nc" + f = addfile(fname, "r") + if (NAME(vv).eq."sst") then + output = f->sst + output@_FillValue = -1000. + end if + if (NAME(vv).eq."ice") then + output = f->sic + end if + + ; Convert units + if (isStrSubset(NAME(vv), "sst")) then + output = output + 273.15 ; [degC] --> [K] + output@units = "K" + end if + if (isStrSubset(NAME(vv), "ice")) then + output = output * 100. ; [1] --> [%] + output@units = "%" + end if + + ; Extract time period + date = cd_calendar(output&time, 0) + idx = ind(date(:, 0).ge.YEAR1 .and. date(:, 0).le.YEAR2) + output := output(idx, :, :) + + ; Format coordinates + output!0 = "time" + output!1 = "lat" + output!2 = "lon" + format_coords(output, YEAR1 + "0101", YEAR2 + "1231", FREQ(vv)) + + ; Set variable attributes + tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) + delete(output) + output = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ(vv)) + + ; Set global attributes + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Output file + DATESTR = YEAR1 + "01-" + YEAR2 + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, VERSION, \ + MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" + + ; Write variable + write_nc(fout, VAR(vv), output, bounds, gAtt) + delete(gAtt) + delete(output) + delete(bounds) + + end do + +end diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_LandFlux_EVAL.py b/esmvaltool/utils/cmorizers/obs/cmorize_obs_LandFlux_EVAL.py new file mode 100644 index 0000000000..6650520a4b --- /dev/null +++ b/esmvaltool/utils/cmorizers/obs/cmorize_obs_LandFlux_EVAL.py @@ -0,0 +1,87 @@ +"""ESMValTool CMORizer for LandFlux-EVAL data. + +Tier + Tier 3: restricted dataset. + +Source + https://data.iac.ethz.ch/landflux/ + +Last access + 20190516 + +Download and processing instructions + Download the following files: + LandFluxEVAL.merged.89-05.monthly.all.nc + A registration is required for downloading the data (see + ). + +""" + +import logging +import os +from datetime import datetime + +import iris +import numpy as np +from cf_units import Unit + +import esmvaltool.utils.cmorizers.obs.utilities as utils + +logger = logging.getLogger(__name__) + +CFG = utils.read_cmor_config('LandFlux-EVAL.yml') + + +def _extract_variable(raw_var, cmor_info, attrs, filepath, out_dir): + """Extract variable.""" + var = cmor_info.short_name + cube = iris.load_cube(filepath, utils.var_name_constraint(raw_var)) + _fix_time_coord(cube) + utils.fix_var_metadata(cube, cmor_info) + utils.convert_timeunits(cube, 1950) + utils.fix_coords(cube) + utils.set_global_atts(cube, attrs) + utils.save_variable( + cube, var, out_dir, attrs, unlimited_dimensions=['time']) + + +def _fix_time_coord(cube): + """Fix time coordinate (given as month as %Y%m.%f).""" + time_coord = cube.coord('time') + new_units = Unit('days since 1950-1-1 00:00:00', calendar='standard') + + # Function to convert given date to correct number + def _date2num(date_str): + """Convert data given as %Y%m.%f to number.""" + date_str = str(date_str) + year = int(date_str[:4]) + month = int(date_str[4:6]) + day = 15 + date = datetime(year, month, day) + return new_units.date2num(date) + + # Convert time coordinate array and set correct units + time_coord.points = np.vectorize(_date2num)(time_coord.points) + time_coord.units = new_units + time_coord.attributes = {} + + +def cmorization(in_dir, out_dir): + """Cmorization func call.""" + glob_attrs = CFG['attributes'] + cmor_table = CFG['cmor_table'] + logger.info("Starting cmorization for Tier%s OBS files: %s", + glob_attrs['tier'], glob_attrs['dataset_id']) + logger.info("Input data from: %s", in_dir) + logger.info("Output will be written to: %s", out_dir) + filepath = os.path.join(in_dir, CFG['filename']) + logger.info("Found input file '%s'", filepath) + + # Run the cmorization + for (var, var_info) in CFG['variables'].items(): + logger.info("CMORizing variable '%s'", var) + glob_attrs['mip'] = var_info['mip'] + cmor_info = cmor_table.get_variable(var_info['mip'], var) + raw_var = var_info.get('raw', var) + _extract_variable(raw_var, cmor_info, glob_attrs, filepath, out_dir) diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_Landschuetzer2016.py b/esmvaltool/utils/cmorizers/obs/cmorize_obs_Landschuetzer2016.py new file mode 100644 index 0000000000..8c7e85a402 --- /dev/null +++ b/esmvaltool/utils/cmorizers/obs/cmorize_obs_Landschuetzer2016.py @@ -0,0 +1,115 @@ +# pylint: disable=invalid-name +"""ESMValTool CMORizer for Landschuetzer2016 data. + +Tier + Tier 2: other freely-available dataset. + +Source + https://www.nodc.noaa.gov/archive/arc0105/0160558/3.3/data/0-data/ + +Last access + 20190308 + +Download and processing instructions + Download the file spco2_1982-2015_MPI_SOM-FFN_v2016.nc + +Modification history + 20190227-A_lova_to: written. + +""" + +import logging +import os +from warnings import catch_warnings, filterwarnings + +import iris +from dask import array as da + +from .utilities import (constant_metadata, fix_coords, fix_var_metadata, + read_cmor_config, save_variable, set_global_atts) + +logger = logging.getLogger(__name__) + +# read in CMOR configuration +CFG = read_cmor_config('Landschuetzer2016.yml') + + +def _fix_data(cube, var): + """Specific data fixes for different variables.""" + logger.info("Fixing data ...") + with constant_metadata(cube) as metadata: + if var == 'fgco2': + # Assume standard year 365_day + cube *= -12.01 / 1000. / (86400. * 365.) + metadata.attributes['positive'] = 'down' + elif var == 'dpco2': + cube *= -1.0 * 101325. / 1.e06 + elif var == 'spco2': + cube *= 101325. / 1.e06 + return cube + + +# pylint: disable=unused-argument +def _fix_fillvalue(cube, field, filename): + """Create masked array from missing_value.""" + if hasattr(field.cf_data, 'missing_value'): + # fix for bad missing value definition + cube.data = da.ma.masked_equal(cube.core_data(), + field.cf_data.missing_value) + + +def extract_variable(var_info, raw_info, out_dir, attrs): + """Extract to all vars.""" + var = var_info.short_name + with catch_warnings(): + filterwarnings( + action='ignore', + message='Ignoring netCDF variable .* invalid units .*', + category=UserWarning, + module='iris', + ) + cubes = iris.load(raw_info['file'], callback=_fix_fillvalue) + rawvar = raw_info['name'] + + for cube in cubes: + if cube.var_name == rawvar: + fix_var_metadata(cube, var_info) + fix_coords(cube) + _fix_data(cube, var) + set_global_atts(cube, attrs) + save_variable( + cube, + var, + out_dir, + attrs, + local_keys=['positive'], + unlimited_dimensions=['time'], + ) + + +def cmorization(in_dir, out_dir): + """Cmorization func call.""" + cmor_table = CFG['cmor_table'] + glob_attrs = CFG['attributes'] + + logger.info("Starting cmorization for Tier%s OBS files: %s", + glob_attrs['tier'], glob_attrs['dataset_id']) + logger.info("Input data from: %s", in_dir) + logger.info("Output will be written to: %s", out_dir) + + # run the cmorization + for var, vals in CFG['variables'].items(): + inpfile = os.path.join(in_dir, vals['file']) + logger.info("CMORizing var %s from file %s", var, inpfile) + var_info = cmor_table.get_variable(vals['mip'], var) + raw_info = {'name': vals['raw'], 'file': inpfile} + glob_attrs['mip'] = vals['mip'] + with catch_warnings(): + filterwarnings( + action='ignore', + message=('WARNING: missing_value not used since it\n' + 'cannot be safely cast to variable data type'), + category=UserWarning, + module='iris', + ) + extract_variable(var_info, raw_info, out_dir, glob_attrs) diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_MODIS.ncl b/esmvaltool/utils/cmorizers/obs/cmorize_obs_MODIS.ncl new file mode 100644 index 0000000000..5bfcfe1bc6 --- /dev/null +++ b/esmvaltool/utils/cmorizers/obs/cmorize_obs_MODIS.ncl @@ -0,0 +1,247 @@ +; ############################################################################# +; ESMValTool CMORizer for MODIS data +; ############################################################################# +; +; Tier +; Tier 3: restricted dataset. +; +; Source +; https://ladsweb.modaps.eosdis.nasa.gov/search/order +; +; Last access +; 20190209 +; +; Download and processing instructions +; In Products: select "MODIS Aqua", "Collection 6.1" and +; "L3 Atmosphere Product", click on MYD08_M3. +; In Time: select from 2000-01-01 to today. +; In Location: skip, the global domain will be applied. +; In Files: select all. +; Submit the order. +; A registration is required to download the data. +; +; Caveats +; clwvi and clivi data are in-cloud values whereas CMIP5 models provide +; grid-box averages --> multiply MODIS clwvi and clivi values with cloud +; fraction as a first guess +; +; Modification history +; 20180209-A_righ_ma: fixed bug in lwpStderr. +; 20180209-A_hass_bg: adapted to v2. +; 20180810-A_righ_ma: fix minor calendar issue. +; 20180806-A_righ_ma: code cleaning. +; 20170116-A-laue_ax: using cirrus fraction to convert lwp and iwp +; from in-cloud values to gridbox averages +; (test). +; 20160408-A-laue_ax: added processing of uncertainties +; removed suffixes for variables names. +; 20151118-A-laue_ax: bugfix: added unit conversion +; clivi, clwvi: g/m2 --> kg/m2 +; clt: frac --> % +; 20150430-eval_ma: written. +; +; ############################################################################# +loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/interface.ncl") + +begin + + ; Script name (for logger) + DIAG_SCRIPT = "cmorize_obs_MODIS.ncl" + + ; Source name + OBSNAME = "MODIS" + + ; Tier + TIER = 3 + + ; Selected variable (standard name) + VAR = (/"clwvi", \ + "clivi", \ + "clt", \ + "lwpStderr", \ + "iwpStderr", \ + "od550aer"/) + + ; Name in the raw data + NAME = (/"Cloud_Water_Path_Liquid_Mean_Mean", \ + "Cloud_Water_Path_Ice_Mean_Mean", \ + "Cloud_Fraction_Mean_Mean", \ + "Cloud_Water_Path_Liquid_Mean_Uncertainty", \ + "Cloud_Water_Path_Ice_Mean_Uncertainty", \ + "AOD_550_Dark_Target_Deep_Blue_Combined_Mean_Mean"/) + + ; MIP + MIP = (/"Amon", "Amon", "Amon", "Amon", "Amon", "aero"/) + + ; Frequency + FREQ = (/"mon", "mon", "mon", "mon", "mon", "mon"/) + + ; Version + VERSION = "MYD08_M3" + + ; CMOR table + CMOR_TABLE = getenv("esmvaltool_root") + "/cmor/tables/" + \ + (/"cmip5/Tables/CMIP5_Amon", \ + "cmip5/Tables/CMIP5_Amon", \ + "cmip5/Tables/CMIP5_Amon", \ + "custom/CMOR_lwpStderr.dat", \ + "custom/CMOR_iwpStderr.dat", \ + "cmip5/Tables/CMIP5_aero"/) + + ; Type + TYPE = "sat" + + ; Global attributes + SOURCE = "https://ladsweb.modaps.eosdis.nasa.gov/search/order" + REF = "" + COMMENT = "" + +end + +begin + + ; List of files + FILES = systemfunc("ls -1 " + input_dir_path + VERSION + ".A*.hdf") + + do ff = 0, dimsizes(FILES) - 1 + + fin = addfile(FILES(ff), "r") + + ; Get time + infile = systemfunc("basename " + FILES(ff)) + date = yyyyddd_to_yyyymmdd(toint(str_get_cols(infile, 10, 16))) + year = toint(str_get_cols(tostring(date), 0, 3)) + month = toint(str_get_cols(tostring(date), 4, 5)) + dm = days_in_month(year, month) + + ; Loop over variables to fetch from input file + do vv = 0, dimsizes(VAR) - 1 + + invar = fin->$NAME(vv)$ + invar_fv = invar@_FillValue + invar_coords = invar + invar := tofloat(invar) + invar := where(invar.eq.tofloat(invar_fv), \ + default_fillvalue("float"), invar) + + ; Special case clwvi as the sum lwp + iwp + if (VAR(vv).eq."clwvi") then + if (NAME(vv).ne."Cloud_Water_Path_Liquid_Mean_Mean") then + error_msg("f", DIAG_SCRIPT, "", "cannot calculate clwvi") + end if + + ; Read cirrus fraction + ; cfin = fin->Cirrus_Fraction_SWIR_FMean + cfin = fin->Cirrus_Fraction_Infrared_FMean + cif = tofloat(cfin * cfin@scale_factor + cfin@add_offset) + ; liquid fraction is estimated assuming random overlap, i.e. + ; ctot = 1 - (1 - cif) * (1 - lif) + ; --> lif = 1 - (1 - ctot) / (1 - cif) + delete(cfin) + cfin = fin->Cloud_Fraction_Mean_Mean + ctot = tofloat(cfin * cfin@scale_factor + cfin@add_offset) + delete(cfin) + cif = where(cif.gt.0.999, cif@_FillValue, cif) + lif = 1.0 - (1.0 - ctot) / (1.0 - cif) + lif = where(lif.lt.0, 0, lif) + tmpvar = fin->Cloud_Water_Path_Ice_Mean_Mean ; read ice water path + tmpvar_fv = tmpvar@_FillValue + tmpvar := tofloat(tmpvar) + tmpvar := where(tmpvar.eq.tofloat(tmpvar_fv), \ + default_fillvalue("float"), \ + tmpvar) + tmpvar = tmpvar * cif ; convert iwp in-cloud value to gridbox avg + invar = invar * lif ; convert lwp in-cloud value to grid-box avg + invar = invar + tmpvar ; clwvi = lwp + iwp + delete(tmpvar) + delete(lif) + delete(cif) + invar = 0.001 * invar ; [g/m2] --> [kg/m2] + end if + + ; lwp and iwp are in-cloud values + ; convert lwp/iwp to grid-box averages by multiplying with + ; average cloud fraction (not optimum but best we can do at the moment) + if (any((/"clivi", "iwpStderr", "lwpStderr"/) .eq. VAR(vv))) then + + ; Read cirrus fraction (0-1) + ; cfin = fin->Cirrus_Fraction_SWIR_FMean + cfin = fin->Cirrus_Fraction_Infrared_FMean + cf = tofloat(cfin * cfin@scale_factor + cfin@add_offset) + delete(cfin) + if (VAR(vv).eq."lwpStderr") then + cfin = fin->Cloud_Fraction_Mean_Mean + ctot = tofloat(cfin * cfin@scale_factor + cfin@add_offset) + delete(cfin) + cif = where(cf.gt.0.999, cf@_FillValue, cf) + cf = 1.0 - (1.0 - ctot) / (1.0 - cif) + cf = where(cf.lt.0, 0, cf) + delete(cif) + delete(ctot) + end if + invar = invar * cf ; ; "grid-box average" lwp/iwp + delete(cf) + invar = 0.001 * invar ; [g/m2] --> [kg/m2] + end if + + invar@_FillValue = default_fillvalue("float") + copy_VarCoords(invar_coords, invar) + if (isatt(invar_coords, "scale_factor")) then + invar = invar * tofloat(invar_coords@scale_factor) + end if + if (isatt(invar_coords, "add_offset")) then + invar = invar + tofloat(invar_coords@add_offset) + end if + + if (VAR(vv).eq."clt") then + invar = 100.0 * invar ; [1] --> [%] + end if + + ; Create output variable + lat = fin->YDim + lon = fin->XDim + output = new((/1, dimsizes(lat), dimsizes(lon)/), float) + output!0 = "time" + output!1 = "lat" + output!2 = "lon" + output&time = cd_inv_calendar(year, month, 15, 0, 0, 0, TUNITS, 0) + output&lat = lat + output&lon = lon + output(0, :, :) = (/invar/) + delete(invar) + delete(invar_coords) + + ; Format coordinates + format_coords(output, year + sprinti("%0.2i", month) + "01", \ + year + sprinti("%0.2i", month) + dm, FREQ(vv)) + + ; Set variable attributes + tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) + delete(output) + output = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ(vv)) + + ; Set global attributes + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Output file + DATESTR = \ + year + sprinti("%0.2i", month) + "-" + year + sprinti("%0.2i", month) + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, str_sub_str(VERSION, "_", "-"), \ + MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" + + ; Write variable + write_nc(fout, VAR(vv), output, bounds, gAtt) + delete(gAtt) + delete(output) + delete(bounds) + + end do + + end do + +end diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_MTE.py b/esmvaltool/utils/cmorizers/obs/cmorize_obs_MTE.py new file mode 100644 index 0000000000..3d2b193003 --- /dev/null +++ b/esmvaltool/utils/cmorizers/obs/cmorize_obs_MTE.py @@ -0,0 +1,74 @@ +"""ESMValTool CMORizer for MTE data. + +Tier + Tier 3: restricted dataset. + +Source + http://www.bgc-jena.mpg.de/geodb/BGI/Home + +Last access + 20190507 + +Download and processing instructions + Download the following files: + EnsembleGPP_GL.nc + A registration is required for downloading the data. + +""" + +import logging +import os + +import iris + +import esmvaltool.utils.cmorizers.obs.utilities as utils + +logger = logging.getLogger(__name__) + +CFG = utils.read_cmor_config('MTE.yml') + + +def _get_filepath(in_dir, basename): + """Find correct name of file (extend basename with timestamp).""" + all_files = [ + f for f in os.listdir(in_dir) + if os.path.isfile(os.path.join(in_dir, f)) + ] + for filename in all_files: + if filename.endswith(basename): + return os.path.join(in_dir, filename) + raise OSError( + f"Cannot find input file ending with '{basename}' in '{in_dir}'") + + +def _extract_variable(raw_var, cmor_info, attrs, filepath, out_dir): + """Extract variable.""" + var = cmor_info.short_name + cube = iris.load_cube(filepath, utils.var_name_constraint(raw_var)) + utils.fix_var_metadata(cube, cmor_info) + utils.convert_timeunits(cube, 1950) + utils.fix_coords(cube) + utils.set_global_atts(cube, attrs) + utils.flip_dim_coord(cube, 'latitude') + utils.save_variable( + cube, var, out_dir, attrs, unlimited_dimensions=['time']) + + +def cmorization(in_dir, out_dir): + """Cmorization func call.""" + glob_attrs = CFG['attributes'] + cmor_table = CFG['cmor_table'] + logger.info("Starting cmorization for Tier%s OBS files: %s", + glob_attrs['tier'], glob_attrs['dataset_id']) + logger.info("Input data from: %s", in_dir) + logger.info("Output will be written to: %s", out_dir) + filepath = _get_filepath(in_dir, CFG['filename']) + logger.info("Found input file '%s'", filepath) + + # Run the cmorization + for (var, var_info) in CFG['variables'].items(): + logger.info("CMORizing variable '%s'", var) + glob_attrs['mip'] = var_info['mip'] + cmor_info = cmor_table.get_variable(var_info['mip'], var) + raw_var = var_info.get('raw', var) + _extract_variable(raw_var, cmor_info, glob_attrs, filepath, out_dir) diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_NCEP.ncl b/esmvaltool/utils/cmorizers/obs/cmorize_obs_NCEP.ncl new file mode 100644 index 0000000000..39926d7df7 --- /dev/null +++ b/esmvaltool/utils/cmorizers/obs/cmorize_obs_NCEP.ncl @@ -0,0 +1,253 @@ +; ############################################################################# +; ESMValTool CMORizer for NCEP data +; ############################################################################# +; +; Tier +; Tier 2: other freely-available dataset. +; +; Source +; http://www.esrl.noaa.gov/psd/data/gridded/data.ncep.reanalysis.html +; +; Last access +; 20190204 +; +; Download and processing instructions +; To facilitate the download, the links to the ftp server are provided. +; Since the filenames are sometimes identical across different +; save the data in two subdirectories in input_dir_path. +; Subdirectory pressure/: +; ftp://ftp.cdc.noaa.gov/Datasets/ncep.reanalysis.derived/pressure/ +; air.mon.mean.nc +; hgt.mon.mean.nc +; rhum.mon.mean.nc +; shum.mon.mean.nc +; uwnd.mon.mean.nc +; vwnd.mon.mean.nc +; omega.mon.mean.nc +; ftp://ftp.cdc.noaa.gov/Datasets/ncep.reanalysis.dailyavgs/pressure/ +; uwnd.????.nc +; vwnd.????.nc +; Subdirectory surface/: +; ftp://ftp.cdc.noaa.gov/Datasets/ncep.reanalysis.derived/surface/ +; air.mon.mean.nc +; ftp://ftp.cdc.noaa.gov/Datasets/ncep.reanalysis.derived/surface_gauss/ +; prate.mon.mean.nc +; ftp://ftp.cdc.noaa.gov/Datasets/ncep.reanalysis.dailyavgs/surface_gauss/ +; prate.sft.gauss.????.nc +; ftp://ftp.cdc.noaa.gov/Datasets/ncep.reanalysis.dailyavgs/other_gauss/ +; ulwrf.ntat.gauss.????.nc +; +; Select the section "Pressure" and "Surface" and download the variables +; listed below. Since raw data on pressure levels and for surface have the +; same file and variable name, save the data in two different subdirectories +; "press" and "surf" in input_dir_path. +; Specify the time range of the data as YEAR1-YEAR2 below, considering only +; complete years (Jan to Dec). +; +; Modification history +; 20190204-A_righ_ma: merged with NCEP-daily and adapted to v2. +; 20140128-A_righ_ma: written. +; +; ############################################################################# +loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/interface.ncl") + +begin + + ; Script name (for logger) + DIAG_SCRIPT = "cmorize_obs_NCEP.ncl" + + ; Source name + OBSNAME = "NCEP" + + ; Tier + TIER = 2 + + ; Period + YEAR1 = 1948 + YEAR2 = 2018 + + ; Selected variable (standard name) + VAR = (/"ta", "zg", "hur", "hus", "ua", \ + "va", "wap", "tas", "pr", \ + "ua", "va", "pr", "rlut"/) + + ; Name in the raw data + NAME = (/"air", "hgt", "rhum", "shum", "uwnd", \ + "vwnd", "omega", "air", "prate", \ + "uwnd", "vwnd", "prate", "ulwrf"/) + + ; Subdirectory + SUBDIR = (/"pressure/", "pressure/", "pressure/", "pressure/", "pressure/", \ + "pressure/", "pressure/", "surface/", "surface/", \ + "pressure/", "pressure/", "surface/", "surface/"/) + + ; Expected units (according to CMOR standard) + EXPUNITS = (/"K", "m", "%", "1", "m/s", \ + "m/s", "Pascal/s", "K", "Kg/m^2/s", \ + "m/s", "m/s", "Kg/m^2/s", "W/m^2"/) + + ; MIP + MIP = (/"Amon", "Amon", "Amon", "Amon", "Amon", \ + "Amon", "Amon", "Amon", "Amon", \ + "day", "day", "day", "day"/) + + ; Frequency + FREQ = (/"mon", "mon", "mon", "mon", "mon", \ + "mon", "mon", "mon", "mon", \ + "day", "day", "day", "day"/) + + ; CMOR tables + CMOR_TABLE = getenv("esmvaltool_root") + \ + "/cmor/tables/cmip5/Tables/CMIP5_" + MIP + + ; Type + TYPE = "reanaly" + + ; Version + VERSION = "1" + + ; Global attributes + SOURCE = "http://www.esrl.noaa.gov/psd/data/gridded/" + \ + "data.ncep.reanalysis.html" + REF = "Kalnay et al., B. Am. Meteorol. Soc., " + \ + "doi:10.1175/1520-0477(1996)077<0437:TNYRP>2.0.CO;2, 1996" + COMMENT = "" + +end + +begin + + ; Loop over variables + do vv = 0, dimsizes(VAR) - 1 + + log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") + + do yy = YEAR1, YEAR2 + + if (isStrSubset(MIP(vv), "mon")) then + fname = input_dir_path + SUBDIR(vv) + NAME(vv) + ".mon.mean.nc" + end if + + if (isStrSubset(MIP(vv), "day")) then + flist = systemfunc("ls " + input_dir_path + SUBDIR(vv) + NAME(vv) + \ + ".*" + yy + ".nc") + fname = flist(0) + delete(flist) + end if + + f = addfile(fname, "r") + tmp = f->$NAME(vv)$ + delete(fname) + delete(f) + fill_start = num(ismissing(tmp)) + + ; Extract time range + tmp&time@calendar = "standard" + date = cd_calendar(tmp&time, 0) + idx = ind(date(:, 0).eq.yy) + if (dimsizes(dimsizes(tmp)).eq.4) then + output = tmp(idx, :, :, :) + else + output = tmp(idx, :, :) + end if + delete(idx) + delete(tmp) + delete(date) + + ; Unpack variable according to metadata information + if (isatt(output, "scale_factor") .or. \ + isatt(output, "add_offset")) then + tmp = output * output@scale_factor + output@add_offset + copy_VarMeta(output, tmp) + delete(output) + output = tmp + delete(tmp) + end if + + ; Convert units + if (any(VAR(vv).eq.(/"ta", "tas"/)) .and. \ + output@units.eq."degC") then + output = output + 273.15 + output@units = "K" + end if + if (VAR(vv).eq."hus" .and. output@units.eq."grams/kg") then + output = output / 1000. + output@units = "1" + end if + if (output@units.ne.EXPUNITS(vv)) then + error_msg("f", DIAG_SCRIPT, "", \ + "possibly wrong input units for " + VAR(vv)) + end if + + rank = dimsizes(dimsizes(output)) + output!0 = "time" + if (rank.eq.4) then + output!1 = "plev" + output!2 = "lat" + output!3 = "lon" + output&plev = output&plev * 100. ; [mb] --> [Pa] + elseif (rank.eq.3) + output!1 = "lat" + output!2 = "lon" + end if + + ; Format coordinates + format_coords(output, yy + "0101", yy + "1231", FREQ(vv)) + + ; Set variable attributes + tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) + delete(output) + output = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ(vv)) + + ; Set global attributes + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Check fill values + fill_end = num(ismissing(output)) + if (fill_start.ne.fill_end) then + error_msg("f", DIAG_SCRIPT, "", \ + "missing values lost during conversion") + end if + delete(fill_start) + delete(fill_end) + + ; Output file + DATESTR = yy + "01-" + yy + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, VERSION, \ + MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" + + ; Add height coordinate to tas variable (required by the new backend) + if (VAR(vv).eq."tas") then + output@coordinates = "height" + end if + + ; Write variable + write_nc(fout, VAR(vv), output, bounds, gAtt) + delete(gAtt) + delete(output) + delete(bounds) + + ; Add height coordinate to tas variable (required by the new backend) + if (VAR(vv).eq."tas") then + height = 2.d + height!0 = "ncl_scalar" + height@units = "m" + height@axis = "Z" + height@positive = "up" + height@long_name = "height" + height@standard_name = "height" + w = addfile(fout, "w") + w->height = height + delete(w) + end if + + end do + + end do + +end diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_NIWA-BS.ncl b/esmvaltool/utils/cmorizers/obs/cmorize_obs_NIWA-BS.ncl new file mode 100644 index 0000000000..78cbb15b40 --- /dev/null +++ b/esmvaltool/utils/cmorizers/obs/cmorize_obs_NIWA-BS.ncl @@ -0,0 +1,117 @@ +; ############################################################################# +; ESMValTool CMORizer for NIWA-BS data +; ############################################################################# +; +; Tier +; Tier 3: restricted dataset. +; +; Source +; http://www.bodekerscientific.com/data/total-column-ozone +; +; Last access +; 20190207 +; +; Download and processing instructions +; To get the access data send an email to datasets@bodekerscientific.com +; Download all files from +; ftp://ftp.bodekerscientific.com/CombinedTCOV3.3/Monthly/Patched/NetCDF/ +; Newer versions may become available over time, but make sure to download +; the patched one. Only complete years should be downloaded. +; +; Modification history +; 20190207-A_righ_ma: renamed to NIWA-BS and adapted to v2. +; 20140528-A_gott_kl: written. +; +; ############################################################################# +loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/interface.ncl") + +begin + + ; Script name (for logger) + DIAG_SCRIPT = "cmorize_obs_NIWA-BS.ncl" + + ; Source name + OBSNAME = "NIWA-BS" + + ; Tier + TIER = 3 + + ; Period + YEAR1 = 1979 + YEAR2 = 2016 + + ; Selected variable (standard name) + VAR = (/"toz", "tozStderr"/) + + ; Name in the raw data + NAME = (/"tco", "tco_uncert"/) + + ; MIP + MIP = (/"Amon", "Amon"/) + + ; Frequency + FREQ = (/"mon", "mon"/) + + ; CMOR table + CMOR_TABLE = getenv("esmvaltool_root") + "/cmor/tables/custom/CMOR_" + \ + VAR + ".dat" + + ; Type + TYPE = "sat" + + ; Version + VERSION = "V3.3" + + ; Global attributes + SOURCE = "http://www.bodekerscientific.com/data/total-column-ozone" + REF = "Bodeker et al., Atmos. Chem. Phys., doi:10.5194/acp-5-2603-2005, 2005" + COMMENT = "" + +end + +begin + + files = systemfunc("ls " + input_dir_path + \ + "NIWA-BS_CombinedTCO_" + VERSION + \ + "_????_Monthly_Patched.nc") + + do vv = 0, dimsizes(VAR) - 1 + + log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") + + f = addfiles(files, "r") + output = f[:]->$NAME(vv)$ + + ; Format coordinates + output!0 = "time" + output!1 = "lat" + output!2 = "lon" + format_coords(output, YEAR1 + "0101", YEAR2 + "1231", FREQ(vv)) + + ; Set variable attributes + tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) + delete(output) + output = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ(vv)) + + ; Set global attributes + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Output file + DATESTR = YEAR1 + "01-" + YEAR2 + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, str_sub_str(VERSION, "V", "v"), \ + MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" + + ; Write variable + write_nc(fout, VAR(vv), output, bounds, gAtt) + delete(gAtt) + delete(output) + delete(bounds) + + end do + +end diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_PATMOS-x.ncl b/esmvaltool/utils/cmorizers/obs/cmorize_obs_PATMOS-x.ncl new file mode 100644 index 0000000000..0cb2a861b5 --- /dev/null +++ b/esmvaltool/utils/cmorizers/obs/cmorize_obs_PATMOS-x.ncl @@ -0,0 +1,223 @@ +; ############################################################################# +; ESMValTool CMORizer for PATMOS-x data +; ############################################################################# +; +; Tier +; Tier 2: other freely-available dataset. +; +; Source +; https://www.ncdc.noaa.gov/cdr/atmospheric/avhrr-cloud-properties-patmos-x +; +; Last access +; 20190210 +; +; Download and processing instructions +; Click on Download and download all the NOAA data, excluding the +; preliminary, e.g. with: +; wget -r --accept '*NOAA*.nc' --reject '*preliminary*' +; Put all files in input_dir_path (no subdirectories with years). +; Select only complete years for both ascending and descending orbit. +; +; Caveats +; The data are processed by calculating the average of the ascending and the +; descending orbit on each day. Multiple files are available for some days, +; in this case the most recent version (NOAA-vv) is chosen. + +; Modification history +; 20190208-A_righ_ma: written. +; +; ############################################################################# +loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/interface.ncl") + +begin + + ; Script name (for logger) + DIAG_SCRIPT = "cmorize_obs_PATMOS-x.ncl" + + ; Source name + OBSNAME = "PATMOS-x" + + ; Tier + TIER = 2 + + ; Period + YEAR1 = 1982 + YEAR2 = 2016 + + ; Selected variable (standard name) + VAR = (/"clt"/) + + ; Name in the raw data + NAME = (/"cloud_fraction"/) + + ; MIP + MIP = (/"Amon"/) + + ; Frequency + FREQ = (/"mon"/) + + ; CMOR table + CMOR_TABLE = getenv("esmvaltool_root") + \ + "/cmor/tables/cmip5/Tables/CMIP5_" + MIP + + ; Type + TYPE = "sat" + + ; Version + VERSION = "NOAA" + + ; Global attributes + SOURCE = "https://www.ncdc.noaa.gov/cdr/atmospheric/avhrr-cloud-" + \ + "properties-patmos-x" + REF = "Heidinger et al., NOAA National Centers for Environmental " + \ + "Information, doi:10.7289/V5348HCK, last access: 10 February 2019" + COMMENT = "" + +end + +begin + + ; Read coordinates + files = systemfunc("ls " + input_dir_path + "patmosx_*" + YEAR1 + "*.nc") + f = addfile(files(0), "r") + tmp = f->latitude + lat = tmp * tmp@scale_factor + tmp@add_offset + nlat = dimsizes(lat) + delete(tmp) + tmp = f->longitude + lon = tmp * tmp@scale_factor + tmp@add_offset + nlon = dimsizes(lon) + delete(tmp) + delete(files) + delete(f) + + do vv = 0, dimsizes(VAR) - 1 + + log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") + + do yy = YEAR1, YEAR2 + + ; Define output monthly-mean array + output = new((/12, nlat, nlon/), float) + output!0 = "time" + output!1 = "lat" + output!2 = "lon" + output&time = create_timec(yy, yy) + output&lat = lat + output&lon = lon + + do mm = 1, 12 + + ; Number of days + nd = days_in_month(yy, mm) + + ; Define local array + output_temp = new((/nd, nlat, nlon/), float) + + ; Date string for this month + yyyymm = yy + sprinti("%0.2i", mm) + + do dd = 1, nd + + ; Date string for this day + yyyymmdd = yy + sprinti("%0.2i", mm) + sprinti("%0.2i", dd) + + ; Ascending orbit + files_asc = systemfunc("ls " + input_dir_path + \ + "patmosx_v??r??_NOAA-??_asc_d" + \ + yyyymm + "??_c*.nc | grep asc_d" + yyyymmdd) + if (.not.all(ismissing(files_asc))) then + ; Read most recent file + f = addfile(files_asc(dimsizes(files_asc) - 1), "r") + tmp = f->$NAME(vv)$ + xasc = tmp * tmp@scale_factor + tmp@add_offset + delete(tmp) + end if + delete(files_asc) + + ; Descending orbit + files_des = systemfunc("ls " + input_dir_path + \ + "patmosx_v??r??_NOAA-??_des_d" + \ + yyyymm + "??_c*.nc | grep des_d" + yyyymmdd) + if (.not.all(ismissing(files_des))) then + ; Read most recent file + f = addfile(files_des(dimsizes(files_des) - 1), "r") + tmp = f->$NAME(vv)$ + xdes = tmp * tmp@scale_factor + tmp@add_offset + delete(tmp) + end if + delete(files_des) + + ; Skip if no data defined (output_temp will stay missing) + if (.not.isdefined("xasc") .and. .not.isdefined("xdes")) then + continue + end if + + if (.not.isdefined("xasc")) then + output_temp(dd, :, :) = (/xdes/) + delete(xdes) + continue + end if + + if (.not.isdefined("xdes")) then + output_temp(dd, :, :) = (/xasc/) + delete(xasc) + continue + end if + + ; Replace missing values in one orbit with valid values from the + ; other orbit, to avoid propagating missing values while averaging + xasc = where(ismissing(xasc), xdes, xasc) + xdes = where(ismissing(xdes), xasc, xdes) + + output_temp(dd - 1, :, :) = 0.5 * (xasc + xdes) + delete(xasc) + delete(xdes) + + end do ; day + + ; Monthly mean + output(mm - 1, :, :) = dim_avg_n(output_temp, 0) + delete(output_temp) + + end do ; month + + if (VAR(vv).eq."clt") then + output = 100. * output ; [1] --> [%] + end if + + ; Format coordinates + output!0 = "time" + output!1 = "lat" + output!2 = "lon" + format_coords(output, yy + "0101", yy + "1231", FREQ(vv)) + + ; Set variable attributes + tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) + delete(output) + output = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ(vv)) + + ; Set global attributes + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Output file + DATESTR = yy + "01-" + yy + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, VERSION, \ + MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" + + ; Write variable + write_nc(fout, VAR(vv), output, bounds, gAtt) + delete(gAtt) + delete(output) + delete(bounds) + + end do ; year + + end do ; variable + +end diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_UWisc.ncl b/esmvaltool/utils/cmorizers/obs/cmorize_obs_UWisc.ncl new file mode 100644 index 0000000000..f392273fcd --- /dev/null +++ b/esmvaltool/utils/cmorizers/obs/cmorize_obs_UWisc.ncl @@ -0,0 +1,126 @@ +; ############################################################################# +; ESMValTool CMORizer for UWisc data +; ############################################################################# +; +; Tier +; Tier 3: restricted dataset. +; +; Source +; Data provided by Ralf Bennartz. +; +; Last access +; 20150415 +; +; Download and processing instructions +; Contact Ralf Bennartz (Earth and Environmental Sciences, Vanderbilt +; University, USA). +; +; Modification history +; 20190208-A_righ_ma: adapted to v2. +; +; ############################################################################# +loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/interface.ncl") +begin + + ; Script name (for logger) + DIAG_SCRIPT = "cmorize_obs_UWISC.ncl" + + ; Source name + OBSNAME = "UWisc" + + ; Tier + TIER = 3 + + ; Period + YEAR1 = 1988 + YEAR2 = 2007 + + ; Selected variable (standard name) + VAR = (/"lwp", "lwpStderr"/) + + ; Name in the raw data + NAME = (/"LWP", "LWP_ERROR"/) + + ; Conversion factor + CONVERSION = (/1.e-3, 1.e-3/) + + ; MIP + MIP = (/"Amon", "Amon"/) + + ; Frequency + FREQ = (/"mon", "mon"/) + + ; CMOR table + CMOR_TABLE = getenv("esmvaltool_root") + \ + "/cmor/tables/custom/CMOR_" + VAR + ".dat" + + ; Type + TYPE = "sat" + + ; Version + VERSION = "v2" + + ; Global attributes + SOURCE = "Data provided by Ralf Bennartz (Vanderbilt University, USA)" + REF = "O'Dell et al., J. Clim., doi:10.1175/2007JCLI1958.1, 2008" + COMMENT = "" + +end + +begin + + do vv = 0, dimsizes(VAR) - 1 + + log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") + + do yr = YEAR1, YEAR2 + + fname = input_dir_path + "UWisc_LWPMEAN_" + yr + "_v2.nc" + + f = addfile(fname, "r") + output = (/f->$NAME(vv)$/) + + lat = (/f->lat/) + lon = (/f->lon/) + + ; Convert units + output = output * CONVERSION(vv) ; for clivi this will be equal 0 + + ; Format coordinates + output!0 = "time" + output!1 = "lat" + output!2 = "lon" + output&time = create_timec(yr, yr) + output&lat = lat + output&lon = lon + format_coords(output, yr + "0101", yr + "1231", FREQ(vv)) + + ; Set variable attributes + tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) + delete(output) + output = tmp + delete(tmp) + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ(vv)) + + ; Set global attributes + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Output file + DATESTR = yr + "01-" + yr + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, VERSION, \ + MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" + + ; Write variable + write_nc(fout, VAR(vv), output, bounds, gAtt) + delete(gAtt) + delete(output) + delete(bounds) + + end do + + end do + +end diff --git a/esmvaltool/utils/cmorizers/obs/cmorize_obs_WOA.py b/esmvaltool/utils/cmorizers/obs/cmorize_obs_WOA.py new file mode 100644 index 0000000000..e06053b05f --- /dev/null +++ b/esmvaltool/utils/cmorizers/obs/cmorize_obs_WOA.py @@ -0,0 +1,95 @@ +# pylint: disable=invalid-name +"""ESMValTool CMORizer for WOA data. + +Tier + Tier 2: other freely-available dataset. + +Source + https://data.nodc.noaa.gov/woa/WOA13/DATAv2/ + +Last access + 20190131 + +Download and processing instructions + Download the following files: + temperature/netcdf/decav81B0/1.00/woa13_decav81B0_t00_01.nc + salinity/netcdf/decav81B0/1.00/woa13_decav81B0_s00_01.nc + oxygen/netcdf/all/1.00/woa13_all_o00_01.nc + nitrate/netcdf/all/1.00/woa13_all_n00_01.nc + phosphate/netcdf/all/1.00/woa13_all_p00_01.nc + silicate/netcdf/all/1.00/woa13_all_i00_01.nc + +Modification history + 20130328-A_lova_to: cmorizer revision + 20190131-A_pred_va: adapted to v2. + 20190131-A_demo_le: written. + +""" + +import logging +import os + +import iris + +from .utilities import (constant_metadata, convert_timeunits, fix_coords, + fix_var_metadata, read_cmor_config, save_variable, + set_global_atts) + +logger = logging.getLogger(__name__) + +# read in CMOR configuration +CFG = read_cmor_config('WOA.yml') + + +def _fix_data(cube, var): + """Specific data fixes for different variables.""" + logger.info("Fixing data ...") + with constant_metadata(cube): + mll_to_mol = ['po4', 'si', 'no3'] + if var in mll_to_mol: + cube /= 1000. # Convert from ml/l to mol/m^3 + elif var == 'thetao': + cube += 273.15 # Convert to Kelvin + elif var == 'o2': + cube *= 44.661 / 1000. # Convert from ml/l to mol/m^3 + return cube + + +def extract_variable(var_info, raw_info, out_dir, attrs, year): + """Extract to all vars.""" + var = var_info.short_name + cubes = iris.load(raw_info['file']) + rawvar = raw_info['name'] + + for cube in cubes: + if cube.var_name == rawvar: + fix_var_metadata(cube, var_info) + convert_timeunits(cube, year) + fix_coords(cube) + _fix_data(cube, var) + set_global_atts(cube, attrs) + save_variable( + cube, var, out_dir, attrs, unlimited_dimensions=['time']) + + +def cmorization(in_dir, out_dir): + """Cmorization func call.""" + cmor_table = CFG['cmor_table'] + glob_attrs = CFG['attributes'] + + logger.info("Starting cmorization for Tier%s OBS files: %s", + glob_attrs['tier'], glob_attrs['dataset_id']) + logger.info("Input data from: %s", in_dir) + logger.info("Output will be written to: %s", out_dir) + + # run the cmorization + for var, vals in CFG['variables'].items(): + yr = None + for yr in CFG['custom']['years']: + file_suffix = str(yr)[-2:] + '_' + str(yr + 1)[-2:] + '.nc' + inpfile = os.path.join(in_dir, vals['file'] + file_suffix) + logger.info("CMORizing var %s from file %s", var, inpfile) + var_info = cmor_table.get_variable(vals['mip'], var) + raw_info = {'name': vals['raw'], 'file': inpfile} + glob_attrs['mip'] = vals['mip'] + extract_variable(var_info, raw_info, out_dir, glob_attrs, yr) diff --git a/esmvaltool/utils/cmorizers/obs/interface.ncl b/esmvaltool/utils/cmorizers/obs/interface.ncl new file mode 100644 index 0000000000..16bcabb790 --- /dev/null +++ b/esmvaltool/utils/cmorizers/obs/interface.ncl @@ -0,0 +1,25 @@ +; ############################################################################# +; INTERFACE TO HANDLE THE COMMUNICATION BETWEEN THE PYTHON WORKFLOW AND NCL +; ############################################################################# +; Load the automatically-generated settings.ncl for the current dataset, +; utility and logging functions. +; +; No functions/procedures shall be further added to this script. +; ############################################################################# + +; Load interface settings +loadscript("$settings") + +; Load logging functions +loadscript(getenv("esmvaltool_root") + "/interface_scripts/logging.ncl") + +; Load utility functions +loadscript(getenv("esmvaltool_root") + "/utils/cmorizers/obs/utilities.ncl") + +; Check trailing slash +if (str_get_cols(input_dir_path, -1, -1).ne."/") then + input_dir_path = input_dir_path + "/" +end if +if (str_get_cols(output_dir_path, -1, -1).ne."/") then + output_dir_path = output_dir_path + "/" +end if diff --git a/esmvaltool/utils/cmorizers/obs/utilities.ncl b/esmvaltool/utils/cmorizers/obs/utilities.ncl new file mode 100644 index 0000000000..09690b96ac --- /dev/null +++ b/esmvaltool/utils/cmorizers/obs/utilities.ncl @@ -0,0 +1,1713 @@ +; ############################################################################# +; FUNCTIONS FOR THE reformat_obs_*.ncl SCRIPTS +; ############################################################################# +; General purpose functions called by the reformat_obs_*.ncl scripts. +; +; Contents +; function create_timec +; procedure format_time +; procedure format_plev +; procedure format_lev +; procedure format_lat +; procedure format_lon +; procedure format_coords +; function read_cmor +; function format_variable +; function guess_bounds_time +; function guess_bounds_lev +; function guess_bounds_lat +; function guess_bounds_lon +; function guess_coord_bounds +; function set_global_atts +; procedure write_nc +; procedure write_nc_profile +; function set_size_array +; function process_EBAS_data +; +; ############################################################################# + +; Time units +TUNITS = "days since 1950-01-01 00:00:00" + +; CMOR FillValue +FILL = 1.e+20 + +; ############################################################################# +undef("create_timec") +function create_timec(y1:integer, + y2:integer) +; +; Arguments +; y1: start year of the time range. +; y2: end year of the time range. +; +; Return value +; A one-dimensional array of size 12*(y2-y1+1). +; +; Description +; Create a monthly time coordinate for the given time range. +; +; Modification history +; 20140124-A_righ_ma: written. +; +local funcname, scriptname, yy, mm, out +begin + + funcname = "create_timec" + scriptname = "esmvaltool/utils/cmorizers/obs/utilities.ncl" + + out = new(12 * (y2 - y1 + 1), double) + do yy = y1, y2 + do mm = 1, 12 + out(12 * (yy - y1) + mm - 1) = \ + cd_inv_calendar(yy, mm, 15, 0, 0, 0, TUNITS, 0) + end do + end do + + return(out) + +end + +; ############################################################################# +undef("format_time") +procedure format_time(var:numeric, + start_date:string, + end_date:string, + frequency:string) +; +; Arguments +; var: input variable +; start_date: start date as YYYYMMDD +; end_date: end date as YYYYMMDD +; frequency: time frequency ("3hr", "6hr", "day", "mon", "yr") +; +; Description +; Check the time range of the time coordinate, set the values depending on +; the given frequency and set the standard CMOR attributes. +; +; Caveats +; +; References +; +; Modification history +; 20190216-A_righ_ma: written. +; +local funcname, scriptname, ctime, ntime, year1, month1, day1, year2, month2, \ + day2, calendar, date, exp_ntime, yy, mm, m1, m2, dd, opt, newtime +begin + + funcname = "format_time" + scriptname = "esmvaltool/utils/cmorizers/obs/utilities.ncl" + + ; Check supported frequency + if (all(frequency.ne.(/"3hr", "6hr", "day", "mon", "yr"/))) then + error_msg("f", scriptname, funcname, "unsupported frequency " + frequency) + end if + + ; Read coordinate + ctime = var&time + ntime = dimsizes(ctime) + + ; Check monotonicity + if (ntime.ne.1 .and. isMonotonic(ctime).ne.1) then + error_msg("f", scriptname, funcname, \ + "non-monotonically-increasing time coordinate") + end if + + ; Extract expected start/end date + year1 = toint(str_get_cols(start_date, 0, 3)) + month1 = toint(str_get_cols(start_date, 4, 5)) + day1 = toint(str_get_cols(start_date, 6, 7)) + year2 = toint(str_get_cols(end_date, 0, 3)) + month2 = toint(str_get_cols(end_date, 4, 5)) + day2 = toint(str_get_cols(end_date, 6, 7)) + + ; Set array of zeros with the same dimensionality for cd_inv_calendar + zero = year1 + zero = 0 + + ; Set calendar + if (isatt(ctime, "calendar")) then + calendar = ctime@calendar + + ; Special treatment for proleptic_gregorian calendars + ; (not supported by 'days_in_month(...)' + if (calendar .eq. "proleptic_gregorian") then + if (year1 .lt. 1582) then + error_msg("w", scriptname, funcname, \ + "changing calendar from proleptic_gregorian " + \ + "to gregorian prior to 1582") + end if + calendar = "gregorian" + end if + + else + calendar = "standard" + end if + + ; Actual date, to be compared with the expected date + date = cd_calendar(ctime, 0) + delete(ctime) + + ; Yearly frequency + if (frequency.eq."yr") then + + ; Check size + exp_ntime = year2 - year1 + 1 + if (ntime.ne.exp_ntime) then + error_msg("f", scriptname, funcname, \ + "incorrect number of timesteps in input data: " + exp_ntime + \ + " expected, " + ntime + " found") + end if + + ; Reset date (1st of July) + do yy = 0, ntime - 1 + date(yy, 0) = year1 + yy + end do + date(:, 1) = 7 + date(:, 2) = 1 + date(:, 3:5) = 0 + + end if + + ; Monthly frequency + if (frequency.eq."mon") then + + ; Check size + if (year1.eq.year2) then + exp_ntime = month2 - month1 + 1 + else + exp_ntime = (12 - month1 + 1) + month2 + \ + 12 * where((year2 - year1 - 1).gt.0, year2 - year1 - 1, 0) + end if + if (ntime.ne.exp_ntime) then + error_msg("f", scriptname, funcname, \ + "incorrect number of timesteps in input data: " + exp_ntime + \ + "expected, " + ntime + " found") + end if + + ; Reset date (middle of the month) + tt = 0 + do yy = year1, year2 + date(tt, 0) = yy + yy@calendar = calendar + m1 = where(yy.eq.year1, month1, 1) + m2 = where(yy.eq.year2, month2, 12) + do mm = m1, m2 + date(tt, 1) = mm + dm = days_in_month(yy, mm) / 2. + 1 + date(tt, 2) = toint(dm) + date(tt, 3) = toint(24 * (dm - toint(dm))) + tt = tt + 1 + delete(dm) + end do + end do + date(:, 4:5) = 0 + + end if + + ; Daily frequency + if (frequency.eq."day") then + + ; Check size + opt = 0 + opt@calendar = calendar + exp_ntime = \ + cd_inv_calendar(year2, month2, day2, zero, zero, zero, TUNITS, opt) - \ + cd_inv_calendar(year1, month1, day1, zero, zero, zero, TUNITS, opt) + 1 + if (ntime.ne.exp_ntime) then + error_msg("f", scriptname, funcname, \ + "incorrect number of timesteps in input data: " + exp_ntime + \ + "expected, " + ntime + " found") + end if + delete(opt) + + ; Reset date (middle of the day) + tt = 0 + do yy = year1, year2 + date(tt, 0) = yy + m1 = where(yy.eq.year1, month1, 1) + m2 = where(yy.eq.year2, month2, 12) + do mm = m1, m2 + date(tt, 1) = mm + do dd = day1, days_in_month(yy, mm) + date(tt, 2) = dd + tt = tt + 1 + end do + end do + end do + date(:, 3) = 12 + date(:, 4:5) = 0 + + end if + + ; 6-hourly frequency (check size only) + if (frequency.eq."6hr") then + + ; Check size + opt = 0 + opt@calendar = calendar + exp_ntime = \ + cd_inv_calendar(year2, month2, day2, zero, zero, zero, TUNITS, opt) - \ + cd_inv_calendar(year1, month1, day1, zero, zero, zero, TUNITS, opt) + 1 + exp_ntime = 4 * exp_ntime + if (ntime.ne.exp_ntime) then + error_msg("f", scriptname, funcname, \ + "incorrect number of timesteps in input data: " + exp_ntime + \ + "expected, " + ntime + " found") + end if + delete(opt) + + end if + + ; 3-hourly frequency (check size only) + if (frequency.eq."3hr") then + + ; Check size + opt = 0 + opt@calendar = calendar + exp_ntime = \ + cd_inv_calendar(year2, month2, day2, zero, zero, zero, TUNITS, opt) - \ + cd_inv_calendar(year1, month1, day1, zero, zero, zero, TUNITS, opt) + 1 + exp_ntime = 8 * exp_ntime + if (ntime.ne.exp_ntime) then + error_msg("f", scriptname, funcname, \ + "incorrect number of timesteps in input data: " + exp_ntime + \ + "expected, " + ntime + " found") + end if + delete(opt) + + end if + + ; Set calendar + opt = 0 + opt@calendar = calendar + + ; Redefine time coordinate + coord = cd_inv_calendar(toint(date(:, 0)), toint(date(:, 1)), \ + toint(date(:, 2)), toint(date(:, 3)), \ + toint(date(:, 4)), toint(date(:, 5)), \ + TUNITS, opt) + + ; Set standard attributes + newtime = todouble(coord) ; this also removes attributes + copy_VarCoords(coord, newtime) + newtime@bounds = "time_bnds" + newtime@calendar = calendar + newtime@long_name = "time" + newtime@axis = "T" + newtime@units = TUNITS + newtime@standard_name = "time" + if (isatt(newtime, "_FillValue")) then + delete(newtime@_FillValue) + end if + + ; Reset time coordinate + delete(var&time) + var&time = newtime + +end + +; ############################################################################# +undef("format_plev") +procedure format_plev(var:numeric) +; +; Arguments +; var: input variable +; +; Description +; Check the monotonicity of the plev (pressure) coordinate and set the +; standard CMOR attributes. +; +; Caveats +; +; References +; +; Modification history +; 20190216-A_righ_ma: written. +; +local funcname, scriptname, rank, cplev, newplev +begin + + funcname = "format_plev" + scriptname = "esmvaltool/utils/cmorizers/obs/utilities.ncl" + + ; Set rank + rank = dimsizes(dimsizes(var)) + + ; Check monotonicity + if (isMonotonic(var&plev) .eq. 0) then + error_msg("f", scriptname, funcname, "non-monotonic vertical coordinate") + end if + if (isMonotonic(var&plev).eq.1) then ; must be monotonically decreasing + if (rank.eq.4) then + var = var(:, ::-1, :, :) + elseif (rank.eq.3) then + var = var(:, ::-1, :) + elseif (rank.eq.2) then + var = var(:, ::-1) + end if + end if + + ; Read coordinate + cplev = var&plev + + ; Set standard attributes + newplev = todouble(cplev) ; this also removes attributes + copy_VarCoords(cplev, newplev) + newplev@positive = "down" + newplev@long_name = "pressure" + newplev@axis = "Z" + newplev@units = "Pa" + newplev@standard_name = "air_pressure" + if (isatt(newplev, "_FillValue")) then + delete(newplev@_FillValue) + end if + + ; Reset plev coordinate + delete(var&plev) + var&plev = newplev + +end + +; ############################################################################# +undef("format_lev") +procedure format_lev(var:numeric) +; +; Arguments +; var: input variable +; +; Description +; Check the monotonicity of the lev (depth) coordinate and set the +; standard CMOR attributes. +; +; Caveats +; +; References +; +; Modification history +; 20190216-A_righ_ma: written. +; +local funcname, scriptname, rank, clev, newlev +begin + + funcname = "format_lev" + scriptname = "esmvaltool/utils/cmorizers/obs/utilities.ncl" + + ; Set rank + rank = dimsizes(dimsizes(var)) + + ; Check monotonicity + if (isMonotonic(var&lev) .eq. 0) then + error_msg("f", scriptname, funcname, "non-monotonic vertical coordinate") + end if + if (isMonotonic(var&lev).eq.-1) then ; must be monotonically increasing + if (rank.eq.4) then + var = var(:, ::-1, :, :) + elseif (rank.eq.3) then + var = var(:, ::-1, :) + elseif (rank.eq.2) then + var = var(:, ::-1) + end if + end if + + ; Read coordinate + clev = var&lev + + ; Set standard attributes + newlev = todouble(clev) ; this also removes attributes + copy_VarCoords(clev, newlev) + newlev@bounds = "lev_bnds" + newlev@positive = "down" + newlev@long_name = "ocean depth coordinate" + newlev@axis = "Z" + newlev@units = "m" + newlev@standard_name = "depth" + if (isatt(newlev, "_FillValue")) then + delete(newlev@_FillValue) + end if + + ; Reset lev coordinate + delete(var&lev) + var&lev = newlev + +end + +; ############################################################################# +undef("format_lat") +procedure format_lat(var:numeric) +; +; Arguments +; var: input variable +; +; Description +; Check the monotonicity of the latitude coordinat (S->N) and set the +; standard CMOR attributes. +; +; Caveats +; +; References +; +; Modification history +; 20190216-A_righ_ma: written. +; +local funcname, scriptname, rank, dims, dpos, lcheck, clat, newlat +begin + + funcname = "format_lat" + scriptname = "esmvaltool/utils/cmorizers/obs/utilities.ncl" + + ; Set rank + rank = dimsizes(dimsizes(var)) + dims = getvardims(var) + dpos = ind(dims.eq."lat") + + ; Check monotonicity + if (isMonotonic(var&lat) .eq. 0) then + error_msg("f", scriptname, funcname, "non-monotonic latitude coordinate") + end if + if (isMonotonic(var&lat) .eq. -1) then ; must be S->N + lcheck = False + if (rank.eq.4) then + if (dpos.eq.2) then + var = var(:, :, ::-1, :) + lcheck = True + end if + end if + if (rank.eq.3) then + if (dpos.eq.1) then + var = var(:, ::-1, :) + lcheck = True + end if + if (dpos.eq.2) then + var = var(:, :, ::-1) + lcheck = True + end if + end if + if (rank.eq.2) then + if (dpos.eq.0) then + var = var(::-1, :) + lcheck = True + end if + end if + if (.not.lcheck) then + error_msg("f", scriptname, funcname, "cannot locate latitude position") + end if + end if + + ; Read coordinate + clat = var&lat + + ; Set standard attributes + newlat = todouble(clat) ; this also removes attributes + copy_VarCoords(clat, newlat) + newlat@bounds = "lat_bnds" + newlat@long_name = "latitude" + newlat@axis = "Y" + newlat@units = "degrees_north" + newlat@standard_name = "latitude" + if (isatt(newlat, "_FillValue")) then + delete(newlat@_FillValue) + end if + + ; Reset lat coordinate + delete(var&lat) + var&lat = newlat + +end + +; ############################################################################# +undef("format_lon") +procedure format_lon(var:numeric) +; +; Arguments +; var: input variable +; +; Description +; Check the lon coordinate (0:360) and set the standard CMOR attributes. +; +; Caveats +; +; References +; +; Modification history +; 20190216-A_righ_ma: written. +; +local funcname, scriptname, clon, newlon +begin + + funcname = "format_lon" + scriptname = "esmvaltool/utils/cmorizers/obs/utilities.ncl" + + ; Check monotonicity + if (isMonotonic(var&lon) .eq. 0) then + error_msg("f", scriptname, funcname, "non-monotonic lonfitude coordinate") + end if + + ; Check that lon is 0:360 + if (any(var&lon.lt.0.)) then + var = lonFlip(var) + end if + + ; Read coordinate + clon = var&lon + + ; Set standard attributes + newlon = todouble(clon) ; this also removes attributes + copy_VarCoords(clon, newlon) + newlon@bounds = "lon_bnds" + newlon@long_name = "longitude" + newlon@axis = "X" + newlon@units = "degrees_east" + newlon@standard_name = "longitude" + if (isatt(newlon, "_FillValue")) then + delete(newlon@_FillValue) + end if + + ; Reset lon coordinate + delete(var&lon) + var&lon = newlon + +end + +; ############################################################################# +undef("format_coords") +procedure format_coords(var:numeric, + date1:string, + date2:string, + frequency:string) +; +; Arguments +; var: input variable with named dimensions and assiociated coordinates. +; date1: start date as YYYYMMDD +; date2: end date as YYYYMMDD +; frequency: time frequency ("3hr", "6hr", "day", "mon", "yr") + +; +; Description +; Format the coordinate according to the CF/CMOR standard. +; +; Caveats +; +; References +; +; Modification history +; 20190216-A_righ_ma: written. +; +begin + + funcname = "format_coords" + scriptname = "esmvaltool/utils/cmorizers/obs/utilities.ncl" + + ; Get variable dimensions + dnames = getvardims(var) + + ; Loop over dimensions, call formatting procedures for each coordinate + do dd = 0, dimsizes(dnames) - 1 + + found = False + + if (dnames(dd).eq."time") then + format_time(var, date1, date2, frequency) + found = True + end if + + if (dnames(dd).eq."plev") then + format_plev(var) + found = True + end if + + if (dnames(dd).eq."lev") then + format_lev(var) + found = True + end if + + if (dnames(dd).eq."lat") then + format_lat(var) + found = True + end if + + if (dnames(dd).eq."lon") then + format_lon(var) + found = True + end if + + if (.not.found) then + error_msg("f", scriptname, funcname, "cannot format coordinate " + \ + dnames(dd)) + end if + + end do + +end + +; ############################################################################# +undef("read_cmor") +function read_cmor(name:string, + table:string) +; +; Arguments +; name: standard variable name. +; string: full path to the CMOR table of the variable. +; +; Return value +; A logical variable with the CMOR table attached as attributes. +; +; Description +; Read variable attributes from the CMOR tables (cmor/.cmor). +; +; Caveats +; +; References +; +; Modification history +; 20190107-A_righ_ma: modify to read standard CMIP5 tables +; 20130528-A_righ_ma: written. +; +local funcname, scriptname, data, idxu, idxd, attn, attv, out +begin + + funcname = "read_cmor" + scriptname = "esmvaltool/utils/cmorizers/obs/utilities.ncl" + + ; Read attributes from cmor table + if (.not.fileexists(table)) then + error_msg("f", scriptname, funcname, \ + "cannot find CMOR table " + table) + end if + + if (isStrSubset(table, ".json")) then ; CMIP6 tables + + error_msg("f", scriptname, funcname, \ + "use of CMIP6 CMOR tables not supported") + + else ; CMIP5 and custom tables + + data = readAsciiTable(table, 1, "string", 0) + + ; Extract variable block + idxu = ind(data(:, 0).eq."variable_entry: " + name) + if (any(ismissing(idxu))) then + error_msg("f", scriptname, funcname, \ + "table for variable " + name + " not found in table " + \ + table) + end if + tmp = ind(str_get_field(data(:, 0), 1, ":").eq."variable_entry") + if (dimsizes(tmp).gt.1) then + next = min(ind(tmp.gt.idxu)) + if (.not.ismissing(next)) + idxd = tmp(min(ind(tmp.gt.idxu))) - 2 + else + idxd = dimsizes(data(:, 0)) - 1 + end if + data := data(idxu:idxd, 0) + delete(idxd) + else + data := data(:, 0) ; just 1 variable in this table + end if + delete(idxu) + delete(tmp) + + ; Extract attributes + idxu = ind(str_get_field(data, 1, ":").eq."! Variable attributes") + 2 + idxd = ind(str_get_field(data, 1, ":").eq. \ + "! Additional variable information") - 2 + attn = str_squeeze(str_get_field(data(idxu:idxd), 1, ":")) + attv = str_squeeze(str_get_field(data(idxu:idxd), 2, ":")) + + end if + + out = True + do ii = 0, dimsizes(attn) - 1 + out@$attn(ii)$ = attv(ii) + end do + + return(out) + +end + +; ############################################################################# +undef("format_variable") +function format_variable(var:numeric, + name:string, + table:string) +; +; Arguments +; var: input variable. +; name: standard name of the input variable. +; string: full path to the CMOR table containing the variable. +; +; Return value +; An array of the same dimensionality of var. +; +; Description +; Set standard variable attributes according to the given CMOR table. +; +; Caveats +; +; References +; +; Modification history +; 20190107-A_righ_ma: add extra argument for CMOR table +; 20161202-A_laue_ax: preserve attribute "coordinates" if present +; 20130528-A_righ_ma: written. +; +local funcname, scriptname, coordattr, out, tmp, att, ii +begin + + funcname = "var_attrib" + scriptname = "esmvaltool/utils/cmorizers/obs/utilities.ncl" + + ; Set fill value first + if(isatt(var, "_FillValue")) then + var = where(var.eq.var@_FillValue, FILL, var) + end if + var@_FillValue = FILL + + if (isatt(var, "coordinates")) then + coordattr = var@coordinates + end if + + ; Remove attributes + delete_VarAtts(var, -1) + + ; Convert to float + if (typeof(var).ne."float") then + out = tofloat(var) + copy_VarCoords(var, out) + else + out = var + end if + + ; Append attributes + out@_FillValue = FILL + tmp = read_cmor(name, table) + att = getvaratts(tmp) + do ii = 0, dimsizes(att) - 1 + out@$att(dimsizes(att) - 1 - ii)$ = tmp@$att(dimsizes(att) - 1 - ii)$ + end do + + if (isvar("coordattr")) then + out@coordinates = coordattr + end if + + return(out) + +end + +; ############################################################################# +undef("guess_bounds_time") +function guess_bounds_time(coord[*]:double, + frequency:string) +; +; Arguments +; coord: input time coordinate. +; frequency: time frequency ("3hr", "6hr", "day", "mon", "yr") +; +; Return value +; A two dimensional array, with the first dimension of the same size of the +; input coordinate and the second dimension of size 2. +; +; Description +; Calculate the boundaries of the time coordinate given the frequency. +; +; Caveats +; +; References +; +; Modification history +; 20190217-A_righ_ma: written. +; +local funcname, scriptname, date, year, month, day, opt, units, tyear, tmonth +begin + + funcname = "guess_bounds_time" + scriptname = "esmvaltool/utils/cmorizers/obs/utilities.ncl" + + ; This function assumes that units of days are used + if (.not.isStrSubset(coord@units, "days since")) then + error_msg("f", scriptname, funcname, "unsupported units " + \ + coord@units + ", expected " + TUNITS) + end if + + ; Define boundaries + bounds = new((/dimsizes(coord), 2/), double) + bounds!0 = "time" + bounds&time = coord + bounds!1 = "bnds" + delete(bounds@_FillValue) + + ; Get date + date = cd_calendar(coord, 0) + year = date(:, 0) + month = date(:, 1) + day = date(:, 2) + + ; Set array of constants with the same dimensionality for cd_inv_calendar + zero = year + zero = 0 + one = year + one = 1 + + ; Settings for calendar + units = coord@units + opt = 0 + copy_VarAtts(coord, opt) + + if (frequency.eq."yr") then + ; 1st day of the year + bounds(:, 0) = \ + (/cd_inv_calendar(year, one, one, zero, zero, zero, units, opt)/) + ; 1st day of the next year + bounds(:, 1) = \ + (/cd_inv_calendar(year + 1, one, one, zero, zero, zero, units, opt)/) + return(bounds) + end if + + if (frequency.eq."mon") then + ; 1st day of the month + bounds(:, 0) = \ + (/cd_inv_calendar(year, month, one, zero, zero, zero, units, opt)/) + ; 1st day of the next month (special case December!) + tmonth = where(month.eq.12, 1, month + 1) + tyear = where(month.eq.12, year + 1, year) + bounds(:, 1) = \ + (/cd_inv_calendar(tyear, tmonth, one, zero, zero, zero, units, opt)/) + return(bounds) + delete([/tyear, tmonth/]) + end if + + if (frequency.eq."day") then + ; Shift of half a day + bounds(:, 0) = coord - 0.5 + bounds(:, 1) = coord + 0.5 + return(bounds) + end if + + if (frequency.eq."6hr") then + ; Shift of 3 hours (day/8) + bounds(:, 0) = coord - 0.125 + bounds(:, 1) = coord + 0.125 + return(bounds) + end if + + if (frequency.eq."3hr") then + ; Shift of 1.5 hours (day/16) + bounds(:, 0) = coord - 0.0625 + bounds(:, 1) = coord + 0.0625 + return(bounds) + end if + + error_msg("f", scriptname, funcname, "unsupported frequency " + frequency) + +end + +; ############################################################################# +undef("guess_bounds_lev") +function guess_bounds_lev(coord[*]:double) +; +; Arguments +; coord: input level coordinate. +; +; Return value +; A two dimensional array, with the first dimension of the same size of the +; input coordinate and the second dimension of size 2. +; +; Description +; Calculate the boundaries of the level coordinate as midpoints between +; the input levels. The first (top) boundary is set to a maximum of zero. +; +; Caveats +; +; References +; +; Modification history +; 20190217-A_righ_ma: written. +; +local funcname, scriptname, size, top +begin + + funcname = "guess_bounds_lev" + scriptname = "esmvaltool/utils/cmorizers/obs/utilities.ncl" + + bounds = new((/dimsizes(coord), 2/), double) + bounds!0 = "lev" + bounds&lev = coord + bounds!1 = "bnds" + delete(bounds@_FillValue) + + size = dimsizes(coord) + + bounds(1:size - 1, 0) = 0.5 * (coord(0:size - 2) + coord(1:size - 1)) + bounds(0:size - 2, 1) = bounds(1:size - 1, 0) + + ; Set top and bottom separately + top = coord(0) - 0.5 * (coord(1) - coord(0)) + bounds(0, 0) = where(top.ge.0., top, 0.) + bounds(size - 1, 1) = \ + coord(size - 1) + 0.5 * (coord(size - 1) - coord(size - 2)) + + return(bounds) + +end + +; ############################################################################# +undef("guess_bounds_lat") +function guess_bounds_lat(coord[*]:double) +; +; Arguments +; coord: input latitude coordinate. +; +; Return value +; A two dimensional array, with the first dimension of the same size of the +; input coordinate and the second dimension of size 2. +; +; Description +; Calculate the boundaries of the latitude coordinate as midpoints between +; the input values. The first and last boundary is set to 90 S and 90 N, +; respectively. +; +; Caveats +; This function works only with regular grids. +; +; References +; +; Modification history +; 20190217-A_righ_ma: written. +; +local funcname, scriptname +begin + + funcname = "guess_bounds_lat" + scriptname = "esmvaltool/utils/cmorizers/obs/utilities.ncl" + + bounds = new((/dimsizes(coord), 2/), double) + bounds!0 = "lat" + bounds&lat = coord + bounds!1 = "bnds" + delete(bounds@_FillValue) + + size = dimsizes(coord) + + bounds(1:size - 1, 0) = 0.5 * (coord(0:size - 2) + coord(1:size - 1)) + bounds(0:size - 2, 1) = bounds(1:size - 1, 0) + bounds(0, 0) = -90. + bounds(size - 1, 1) = 90. + + return(bounds) + +end + +; ############################################################################# +undef("guess_bounds_lon") +function guess_bounds_lon(coord[*]:double) +; +; Arguments +; coord: input longitude coordinate. +; +; Return value +; A two dimensional array, with the first dimension of the same size of the +; input coordinate and the second dimension of size 2. +; +; Description +; Calculate the boundaries of the longitude coordinate as midpoints between +; the input values. +; +; Caveats +; This function works only with regular grids. +; +; References +; +; Modification history +; 20190217-A_righ_ma: written. +; +local funcname, scriptname +begin + + funcname = "guess_bounds_lon" + scriptname = "esmvaltool/utils/cmorizers/obs/utilities.ncl" + + bounds = new((/dimsizes(coord), 2/), double) + bounds!0 = "lon" + bounds&lon = coord + bounds!1 = "bnds" + delete(bounds@_FillValue) + + size = dimsizes(coord) + + bounds(1:size - 1, 0) = 0.5 * (coord(0:size - 2) + coord(1:size - 1)) + bounds(0:size - 2, 1) = bounds(1:size - 1, 0) + bounds(0, 0) = coord(0) - 0.5 * (coord(1) - coord(0)) + bounds(size - 1, 1) = \ + coord(size - 1) + 0.5 * (coord(size - 1) - coord(size - 2)) + + return(bounds) + +end + +; ############################################################################# +undef("guess_coord_bounds") +function guess_coord_bounds(var:numeric, + frequency:string) +local funcname, scriptname, time_bnds, plev_bnds, lev_bnds, lat_bnds, lon_bnds +begin + + funcname = "guess_coord_bounds" + scriptname = "esmvaltool/utils/cmorizers/obs/utilities.ncl" + + ; Check supported frequency + if (all(frequency.ne.(/"3hr", "6hr", "day", "mon", "yr", "fx"/))) then + error_msg("f", scriptname, funcname, "unsupported frequency " + frequency) + end if + + ; Get variable dimensions + dnames = getvardims(var) + + ; Initialize list + bounds_list = NewList("fifo") + + ; Loop over dimensions, call formatting procedures for each coordinate + do dd = 0, dimsizes(dnames) - 1 + + if (dnames(dd).eq."time") then + time_bnds = guess_bounds_time(var&time, frequency) + ListPush(bounds_list, time_bnds) + continue + end if + + if (dnames(dd).eq."plev") then + continue ; plev bounds are not required + end if + + if (dnames(dd).eq."lev") then + lev_bnds = guess_bounds_lev(var&lev) + ListPush(bounds_list, lev_bnds) + continue + end if + + if (dnames(dd).eq."lat") then + lat_bnds = guess_bounds_lat(var&lat) + ListPush(bounds_list, lat_bnds) + continue + end if + + if (dnames(dd).eq."lon") then + lon_bnds = guess_bounds_lon(var&lon) + ListPush(bounds_list, lon_bnds) + continue + end if + + if (.not.found) then + error_msg("f", scriptname, funcname, "cannot guess bounds for " + \ + "coordinate " + dnames(dd)) + end if + + end do + + return(bounds_list) + +end + +; ############################################################################# +undef("set_global_atts") +function set_global_atts(obsname:string, + tier:integer, + source:string, + reference:string, + comment:string) +; +; Argument +; obsname: name of the observational dataset. +; reference: reference for the dataset, or leave empty if not available. +; source: link to the data source. +; tier: tier number (2 or 3). +; comment: additional information if required, or leave empty. +; +; Return value +; A logical containing the arguments as attributes. +; +; Description +; Generate the global attribute for the output file by combining user +; provided information with default ones (author, host, date, etc.). +; +; Modification history +; 20190202-A_righ_ma: written. +; +local funcname, scriptname, + dim_unlim, ii +begin + + funcname = "set_global_atts" + scriptname = "esmvaltool/utils/cmorizers/obs/utilities.ncl" + + global = True + global@title = obsname + " data reformatted for the ESMValTool v2.0" + global@tier = tier + global@source = source + if (strlen(str_squeeze(reference)).ne.0) then + global@reference = reference + else + global@reference = "not available" + end if + if (strlen(str_squeeze(comment)).ne.0) then + global@comment = comment + end if + global@user = systemfunc("echo $USER") + global@host = systemfunc("hostname -f") + global@history = "Created on " + systemfunc("date") + global@conventions = "CF/CMOR" + + return(global) + +end + +; ############################################################################# +undef("write_nc") +procedure write_nc(outfile:string, + name:string, + var:numeric, + bounds:list, + gAtt:logical) +; +; Arguments +; outfile: the name of the file to be written, including its path. +; name: the variable standard name. +; var: the variable array. +; bounds: a list containing the bounds of the variable coordinates. +; gAtt: a logical variable, whose attributes are appended as file +; attributes. +; +; Description +; Write the given variable to the given NetCDF file, together with its +; coordinates and boundaries, and append the provided global attributes. +; +; Modification history +; 20190218_A_righ_ma: extend with coordinate bounds. +; 20140123-A_righ_ma: written. +; +local funcname, scriptname, w, gAtt, dim_names, ndims, dim_sizes, dim_types, \ + dim_unlim, ii +begin + + funcname = "write_nc" + scriptname = "esmvaltool/utils/cmorizers/obs/utilities.ncl" + + ; Open file + if (fileexists(outfile)) then + system("rm -f " + outfile) + end if + w = addfile(outfile, "c") + setfileoption(w, "DefineMode", True) + + ; Get coordinates + dim_names = getvardims(var) + ndims = dimsizes(dim_names) + dim_sizes = new(ndims, integer) + dim_types = new(ndims, string) + dim_unlim = new(ndims, logical) + do ii = 0, ndims - 1 + dim_sizes(ii) = dimsizes(var&$dim_names(ii)$) + dim_types(ii) = typeof(var&$dim_names(ii)$) + dim_unlim(ii) = False + end do + + ; Time coordinate must be unlimited + if (any(dim_names.eq."time")) then + dim_sizes(ind(dim_names.eq."time")) = -1 + dim_unlim(ind(dim_names.eq."time")) = True + end if + + ; Define dimensions + filedimdef(w, dim_names, dim_sizes, dim_unlim) + filedimdef(w, "bnds", 2, False) + do ii = 0, ndims - 1 + ; Coordinates + filevardef(w, dim_names(ii), dim_types(ii), dim_names(ii)) + filevarattdef(w, dim_names(ii), var&$dim_names(ii)$) + ; Bounds + bnds_name = dim_names(ii) + "_bnds" + listidx = ListIndexFromName(bounds, bnds_name) + if (listidx.ne.-1) then + filevardef(w, bnds_name, dim_types(ii), (/dim_names(ii), "bnds"/)) + end if + ; No attributes for the bounds + end do + + ; Define variable + filevardef(w, name, "float", dim_names) + filevarattdef(w, name, var) + + ; Append global attributes + fileattdef(w, gAtt) + + ; Write coordinate and corresponding bounds (if available) + setfileoption(w, "DefineMode", False) + do ii = 0, ndims - 1 + w->$dim_names(ii)$ = (/var&$dim_names(ii)$/) + bnds_name = dim_names(ii) + "_bnds" + listidx = ListIndexFromName(bounds, bnds_name) + if (listidx.ne.-1) then + bound = bounds[listidx] + w->$bnds_name$ = (/bound/) + delete(bound) + end if + end do + + ; Write variable + w->$name$ = (/var/) + +end + +; ############################################################################# +undef("write_nc_profile") +procedure write_nc_profile(outfile:string, + name:string, + var:numeric, + gAtt:logical) +; +; Arguments +; outfile: the name of the file to be written, including its path. +; name: the variable name. +; var: the variable field. +; gAtt: a logical variable, whose attributes are appended as file +; attributes. +; +; Description +; Write the given variable to the given NetCDF file, appending also the +; provided global attributes. +; Designed to write multiple variables for the vertical profiles data. +; +; Modification history +; 20140422-A_righ_ma: written. +; +local funcname, scriptname, w, coords, cc, jj, locname, locvar, cname +begin + + funcname = "write_nc_profile" + scriptname = "esmvaltool/utils/cmorizers/obs/utilities.ncl" + + ; Open file + if (fileexists(outfile)) then + system("rm -f " + outfile) + end if + w = addfile(outfile, "c") + setfileoption(w, "DefineMode", True) + + ; Attach global attributes + fileattdef(w, gAtt) + + ; Write dimensions + coords = getvardims(var) + do cc = 0, dimsizes(coords) - 2 ; skip column + cname = coords(cc) + filedimdef(w, cname, dimsizes(var&$cname$), False) + filevardef(w, cname, typeof(var&$cname$), cname) + end do + + ; Write variable + do jj = 0, dimsizes(var&column) - 1 + + ; Extract given column + locname = str_sub_str(name + "_" + var&column(jj), "%", "") + if (isdim(var, "case")) then + locvar = var(:, :, jj) + else + locvar = var(:, jj) + end if + if (var&column(jj).eq."N") then + locvar@units = "1" + end if + + ; Define dimensions + filevardef(w, locname, "float", coords(0: dimsizes(coords) - 2)) + do cc = 0, dimsizes(coords) - 2 + cname = coords(cc) + filevarattdef(w, cname, locvar&$cname$) + end do + filevarattdef(w, locname, locvar) + + ; Write + setfileoption(w, "DefineMode", False) + do cc = 0, dimsizes(coords) - 2 + cname = coords(cc) + w->$cname$ = (/locvar&$cname$/) + end do + w->$locname$ = (/locvar/) + delete(locvar) + delete(locname) + + end do + +end + +; ############################################################################# +undef("set_size_array") +function set_size_array() +; +; Arguments +; +; Return value +; An array of type double. +; +; Description +; Set a logarithmic array of sizes to be used for particle size +; distribution calculations. +; +; Caveats +; +; References +; +; Modification history +; 20130528-A_righ_ma: written. +; +local funcname, scriptname, minsize, maxsize, nbins, bin, out +begin + + funcname = "set_size_array" + scriptname = "esmvaltool/utils/cmorizers/obs/utilities.ncl" + + ; Size range (0.5 nm - 10 um) + minsize = 0.5e-9 + maxsize = 10.e-6 + nbins = 100 + + ; Generate array + out = new(nbins, double) + bin = 10. ^ (log10(maxsize / minsize) / (nbins - 1)) + out(0) = minsize + do ii = 1, nbins - 1 + out(ii) = out(ii - 1) * bin + end do + + return(out) + +end + +; ############################################################################# +undef("process_EBAS_data") +function process_EBAS_data(in_vars[*]:string, + in_units[*]: string, + in_matrix[*]:string, + in_compon[*]:string, + in_column[*]:string, + indir[1]:string, + st_code:string, + y1:integer, + y2:integer) +; +; Arguments +; in_vars: variables standard name. +; in_units: variables units in the raw data. +; in_matrix: variables matrix in the raw data. +; in_compon: variables name in the raw data. +; in_column: variables name in the header. +; indir: the input directory for raw data. +; stcode: the code of the station to be processed (used for +; cross-checking) +; y1: start year of the considered time interval. +; y2: end year of the considered time interval. +; +; Return value +; A two-dimensional array (time, variable) with the monthly mean time +; series of for each of the processed variables. +; +; Description +; Process the data from the EBAS database (e.g., EANET, EMEP). +; +; Caveats +; For the time coordinate in the input data, only units of days are +; currently accepted. +; +; Modification history +; 20150413-A_righ_ma: improved time selection. +; 20140124-A_righ_ma: written. +; +local timec, datec, vID, fID, bn, en, head, hh, cline, syear, smonth, sday, \ + scode, comp, matr, unit, scale, fills, lline, cols, data_col, flag_col, \ + start_col, end_col, data, value, flag, start_time, end_time, mm, sidx, \ + monthind, stday, enday, nd, pt1, pt2, data_arr +begin + + funcname = "process_EBAS_data" + scriptname = "esmvaltool/utils/cmorizers/obs/utilities.ncl" + + ; EBAS flags for valid measurements + ; (see http://www.nilu.no/projects/ccc/flags/index.html) + validflags = (/798, 797, 782, 781, 780, 771, 770, 741, 740, 680, 679, \ + 678, 676, 675, 668, 665, 662, 660, 657, 656, 655, 654, \ + 653, 652, 651, 650, 649, 648, 644, 640, 559, 558, 557, \ + 556, 555, 532, 531, 521, 499, 498, 476, 475, 470, 458, \ + 457, 450, 440, 420, 410, 394, 392, 390, 382, 380, 370, \ + 299, 298, 276, 275, 258, 257, 250, 249, 248, 247, 220, \ + 211, 210, 191, 190, 189, 188, 187, 186, 185, 147, 120, \ + 111, 110, 103, 102, 101, 100, 000/) + + ; Create time coordinate + timec = create_timec(y1, y2) + datec = cd_calendar(timec, -1) + + ; Create output array + data_arr = new((/dimsizes(timec), dimsizes(in_vars)/), float) + data_arr!0 = "time" + data_arr&time = timec + data_arr@_FillValue = FILL + + ; Create a temporary arrays for time averages and weights + temp_arr = new(dimsizes(timec), float) + temp_arr!0 = "time" + temp_arr&time = timec + temp_arr@_FillValue = FILL + ndays_arr = new(dimsizes(timec), float) + ndays_arr!0 = "time" + ndays_arr&time = timec + ndays_arr@_FillValue = FILL + + ; Loop over variables + do vID = 0, dimsizes(in_vars) - 1 + + log_info(" Processing variable " + in_compon(vID)) + + ; Initialize + temp_arr = 0. + ndays_arr = 0. + + ; Read file list + cstr = "find " + indir + " -type f -name '" + \ + st_code + ".*." + in_compon(vID) + "." + in_matrix(vID) + "*.nas'" + in_files = systemfunc(cstr) + if (all(ismissing(in_files))) then + delete(in_files) + continue + end if + in_bnames = systemfunc(cstr + " -exec basename {} " + inttochar(92) + ";") + sy = str_get_cols(str_get_field(in_bnames, 2, "."), 0, 5) + delete(cstr) + + sqsort(in_files) + sqsort(in_bnames) + + ; Check for duplicates + if (dimsizes(UNIQ(sy)).ne.dimsizes(sy)) then + log_info("Duplicated data in input files") + do fID = 0, dimsizes(in_files) - 1 + log_info(" " + in_files(fID)) + end do + log_info("Remove duplicated files considering the following criteria") + log_info(" most recent revision date") + log_info(" most complete time coverage") + log_info(" same instrument in different years") + error_msg("f", scriptname, funcname, \ + "rerun this station after removing duplicates") + end if + delete(sy) + + ; Loop over input files + do fID = 0, dimsizes(in_files) - 1 + + log_info(" Reading file " + in_bnames(fID)) + + ; Read header + head = readAsciiHead(in_files(fID), "starttime") + + ; Extract and check starting date + syear = toint(str_get_field(head(6), 1, " ")) + smonth = toint(str_get_field(head(6), 2, " ")) + sday = toint(str_get_field(head(6), 3, " ")) + + ; Get time units + utime = str_sub_str(head(8), "file reference point", "") + if (.not.isStrSubset(utime, "days")) then + error_msg("f", scriptname, funcname, "unexpected time units") + end if + utime = utime + syear + "-" + smonth + "-" + sday + delete(syear) + delete(smonth) + delete(sday) + + ; Use first file units as reference + if (fID.eq.0) then + ref_utime = utime + end if + + ; Check units consistency + do hh = 0, dimsizes(head) - 1 + if (isStrSubset(head(hh), "Unit:")) then + unit = str_squeeze(str_get_field(head(hh), 2, ":")) + if (unit .ne. in_units(vID) .and. unit.ne."ug/m3") then + error_msg("f", scriptname, funcname, \ + "units in the file not as expected " + \ + "(" + unit + " vs. " + in_units(vID) + ")") + end if + delete(unit) + end if + end do + + ; Get specific fill values and scale factors + scale = tofloat(str_get_field(head(10), 2, " ")) + fills = tofloat(str_get_field(head(11), 2, " ")) + + ; Get column names + lline = head(dimsizes(head) - 1) + ncols = str_fields_count(lline, " ") + cols = new(ncols, string) + do cc = 0, ncols - 1 + cols(cc) = str_get_field(lline, cc + 1, " ") + end do + data_col = min(ind(cols.eq.in_column(vID))) + 1 + flag_col = (ind(cols.eq."flag_" + in_column(vID).or.cols.eq."flag")) + 1 + start_col = ind(cols.eq."starttime") + 1 + end_col = ind(cols.eq."endtime") + 1 + delete(cols) + delete(ncols) + + ; Read data + data = readAsciiTable(in_files(fID), 1, "string", dimsizes(head)) + delete(head) + + ; Read data (for the given month) + value = tofloat(str_get_field(data(:, 0), data_col, " ")) + value@_FillValue = -999. + + ; Read flag + flag = toint(1000 * tofloat(str_get_field(data(:, 0), flag_col, " "))) + + ; Filter for valid values + value = where(value.eq.fills, value@_FillValue, value) + value = where(value.lt.0, value@_FillValue, value) + do jj = 0, dimsizes(value) - 1 + if (all(flag(jj).ne.validflags)) then + value(jj) = value@_FillValue + end if + end do + delete(flag) + delete(fills) + + ; Apply scaling + if (scale.ne.1) then + value = value * scale + end if + delete(scale) + + ; Get start and end time + stt = todouble(str_get_field(data(:, 0), start_col, " ")) + stt@units = utime + ent = todouble(str_get_field(data(:, 0), end_col, " ")) + ent@units = utime + delete(data) + + ; Convert to reference time units + stt = cd_convert(stt, ref_utime) + ent = cd_convert(ent, ref_utime) + + ; Create time series + if (fID.eq.0) then + start_time = stt + end_time = ent + var = value + else + tmp = array_append_record(start_time, stt, 0) + delete(start_time) + start_time = tmp + delete(tmp) + tmp = array_append_record(end_time, ent, 0) + delete(end_time) + end_time = tmp + delete(tmp) + tmp = array_append_record(var, value, 0) + delete(var) + var = tmp + delete(tmp) + end if + delete(stt) + delete(ent) + delete(value) + + end do + delete(in_files) + delete(in_bnames) + + ; Check monotonicity + if (isMonotonic(start_time).ne.1) then + error_msg("f", scriptname, funcname, \ + "non-monotonically increasing time-series, possible " + \ + "duplicated data in input") + end if + + ; Calculate monthly mean from the time series + do vv = 0, dimsizes(var) - 1 + + if (ismissing(var(vv))) then + continue + end if + + pstart = start_time(vv) + pend = -1000.d0 + + do while (pend.lt.end_time(vv)) + + wdate = cd_calendar(pstart, -5) + wdatec = cd_calendar(pstart, -1) + + ; Find beginning of next month + if (wdate(0, 1).eq.12) then + wyear = wdate(0, 0) + 1 + wmonth = 1 + else + wyear = wdate(0, 0) + wmonth = wdate(0, 1) + 1 + end if + pend = cd_inv_calendar(wyear, wmonth, 1, 0, 0, 0, ref_utime, 0) + + if (pend.gt.end_time(vv)) then + pend = (/end_time(vv)/) + end if + + didx = ind(wdatec.eq.datec) + if (wdate(0, 0).lt.y1 .or. wdate(0, 0).gt.y2) then + pstart = pend + continue + end if + nd = tofloat(pend - pstart) + temp_arr(didx) = temp_arr(didx) + var(vv) * nd + ndays_arr(didx) = ndays_arr(didx) + nd + + ; DEBUG+++ + ; print(start_time(vv) +" "+end_time(vv) + " " + "(" + \ + ; cd_calendar(start_time(vv), -2) + "-" + \ + ; cd_calendar(end_time(vv), -2)+") " + datec(didx) + \ + ; " nd="+nd) + ; DEBUG--- + + pstart = pend + + end do + delete(pstart) + delete(pend) + + end do + + delete(var) + delete(start_time) + delete(end_time) + + ; Calculate monthly mean + temp_arr = where(temp_arr.eq.0, temp_arr@_FillValue, temp_arr) + ndays_arr = where(ndays_arr.eq.0, ndays_arr@_FillValue, ndays_arr) + temp_arr = temp_arr / ndays_arr + + ; Assign to global data array + idx_nm = ind(.not.ismissing(temp_arr)) + if (all(ismissing(idx_nm))) then + delete(idx_nm) + continue + end if + do ii = 0, dimsizes(idx_nm) - 1 + data_arr(idx_nm(ii), vID) = temp_arr(idx_nm(ii)) + end do + delete(idx_nm) + + end do ; variables + + return(data_arr) + +end diff --git a/esmvaltool/utils/cmorizers/obs/utilities.py b/esmvaltool/utils/cmorizers/obs/utilities.py new file mode 100644 index 0000000000..76f822c2b3 --- /dev/null +++ b/esmvaltool/utils/cmorizers/obs/utilities.py @@ -0,0 +1,285 @@ +"""Utils module for Python cmorizers.""" +import datetime +import logging +import os +from contextlib import contextmanager + +import iris +import numpy as np +import yaml +from cf_units import Unit +from dask import array as da + +from esmvaltool import __version__ as version +from esmvaltool._config import get_tag_value +from esmvaltool.cmor.table import CMOR_TABLES + +logger = logging.getLogger(__name__) + + +def add_height2m(cube): + """Add scalar coordinate 'height' with value of 2m.""" + logger.info("Adding height coordinate (2m)") + height_coord = iris.coords.AuxCoord( + 2.0, + var_name='height', + standard_name='height', + long_name='height', + units=Unit('m'), + attributes={'positive': 'up'}) + cube.add_aux_coord(height_coord, ()) + + +@contextmanager +def constant_metadata(cube): + """Do cube math without modifying units etc.""" + metadata = cube.metadata + yield metadata + cube.metadata = metadata + + +def convert_timeunits(cube, start_year): + """Convert time axis from malformed Year 0.""" + # TODO any more weird cases? + if cube.coord('time').units == 'months since 0000-01-01 00:00:00': + real_unit = 'months since {}-01-01 00:00:00'.format(str(start_year)) + elif cube.coord('time').units == 'days since 0000-01-01 00:00:00': + real_unit = 'days since {}-01-01 00:00:00'.format(str(start_year)) + elif cube.coord('time').units == 'days since 1950-1-1': + real_unit = 'days since 1950-1-1 00:00:00' + else: + real_unit = cube.coord('time').units + cube.coord('time').units = real_unit + return cube + + +def fix_coords(cube): + """Fix the time units and values to CMOR standards.""" + # first fix any completely missing coord var names + _fix_dim_coordnames(cube) + # fix individual coords + for cube_coord in cube.coords(): + # fix time + if cube_coord.var_name == 'time': + logger.info("Fixing time...") + cube.coord('time').convert_units( + Unit('days since 1950-1-1 00:00:00', calendar='gregorian')) + _fix_bounds(cube, cube.coord('time')) + + # fix longitude + if cube_coord.var_name == 'lon': + logger.info("Fixing longitude...") + if cube.coord('longitude').points[0] < 0. and \ + cube.coord('longitude').points[-1] < 181.: + cube.coord('longitude').points = \ + cube.coord('longitude').points + 180. + _fix_bounds(cube, cube.coord('longitude')) + cube.attributes['geospatial_lon_min'] = 0. + cube.attributes['geospatial_lon_max'] = 360. + nlon = len(cube.coord('longitude').points) + _roll_cube_data(cube, int(nlon / 2), -1) + + # fix latitude + if cube_coord.var_name == 'lat': + logger.info("Fixing latitude...") + _fix_bounds(cube, cube.coord('latitude')) + + # fix depth + if cube_coord.var_name == 'lev': + logger.info("Fixing depth...") + _fix_bounds(cube, cube.coord('depth')) + + # fix air_pressure + if cube_coord.var_name == 'air_pressure': + logger.info("Fixing air pressure...") + _fix_bounds(cube, cube.coord('air_pressure')) + + # remove CS + cube.coord('latitude').coord_system = None + cube.coord('longitude').coord_system = None + + return cube + + +def fix_var_metadata(cube, var_info): + """Fix var metadata from CMOR table.""" + if var_info.standard_name == '': + cube.standard_name = None + else: + cube.standard_name = var_info.standard_name + cube.var_name = var_info.short_name + cube.long_name = var_info.long_name + _set_units(cube, var_info.units) + return cube + + +def flip_dim_coord(cube, coord_name): + """Flip (reverse) dimensional coordinate of cube.""" + logger.info("Flipping dimensional coordinate %s...", coord_name) + coord = cube.coord(coord_name, dim_coords=True) + coord_idx = cube.coord_dims(coord)[0] + coord.points = np.flip(coord.points) + coord.bounds = np.flip(coord.bounds, axis=0) + cube.data = da.flip(cube.core_data(), axis=coord_idx) + + +def read_cmor_config(cmor_config): + """Read the associated dataset-specific config file.""" + reg_path = os.path.join( + os.path.dirname(__file__), 'cmor_config', cmor_config) + with open(reg_path, 'r') as file: + cfg = yaml.safe_load(file) + cfg['cmor_table'] = \ + CMOR_TABLES[cfg['attributes']['project_id']] + if 'comment' not in cfg.keys(): + cfg['attributes']['comment'] = '' + return cfg + + +def save_variable(cube, var, outdir, attrs, **kwargs): + """Saver function.""" + # CMOR standard + cube_time = cube.coord('time') + reftime = Unit(cube_time.units.origin, cube_time.units.calendar) + dates = reftime.num2date(cube_time.points[[0, -1]]) + if len(cube_time.points) == 1: + year = str(dates[0].year) + time_suffix = '-'.join([year + '01', year + '12']) + else: + date1 = str(dates[0].year) + '%02d' % dates[0].month + date2 = str(dates[1].year) + '%02d' % dates[1].month + time_suffix = '-'.join([date1, date2]) + + file_name = '_'.join([ + 'OBS', + attrs['dataset_id'], + attrs['modeling_realm'], + attrs['version'], + attrs['mip'], + var, + time_suffix, + ]) + '.nc' + file_path = os.path.join(outdir, file_name) + logger.info('Saving: %s', file_path) + status = 'lazy' if cube.has_lazy_data() else 'realized' + logger.info('Cube has %s data [lazy is preferred]', status) + iris.save(cube, file_path, fill_value=1e20, **kwargs) + + +def set_global_atts(cube, attrs): + """Complete the cmorized file with global metadata.""" + logger.info("Setting global metadata...") + attrs = dict(attrs) + cube.attributes.clear() + timestamp = datetime.datetime.utcnow() + timestamp_format = "%Y-%m-%d %H:%M:%S" + now_time = timestamp.strftime(timestamp_format) + + # Necessary attributes + try: + glob_dict = { + 'title': (f"{attrs.pop('dataset_id')} data reformatted for " + f"ESMValTool v{version}"), + 'version': + attrs.pop('version'), + 'tier': + str(attrs.pop('tier')), + 'source': + attrs.pop('source'), + 'reference': + get_tag_value('references', attrs.pop('reference')), + 'comment': + attrs.pop('comment'), + 'user': + os.environ["USER"], + 'host': + os.environ["HOSTNAME"], + 'history': + f'Created on {now_time}', + 'project_id': + attrs.pop('project_id'), + } + except KeyError: + raise KeyError( + "All CMORized datasets need the global attributes 'dataset_id', " + "'version', 'tier', 'source', 'reference', 'comment' and " + "'project_id' specified in the configuration file") + + # Additional attributes + glob_dict.update(attrs) + cube.attributes = glob_dict + + +def var_name_constraint(var_name): + """:mod:`iris.Constraint` using `var_name` of an :mod:`iris.cube.Cube`.""" + return iris.Constraint(cube_func=lambda c: c.var_name == var_name) + + +def _fix_bounds(cube, dim_coord): + """Reset and fix all bounds.""" + if len(cube.coord(dim_coord).points) > 1: + if cube.coord(dim_coord).has_bounds(): + cube.coord(dim_coord).bounds = None + cube.coord(dim_coord).guess_bounds() + + if cube.coord(dim_coord).has_bounds(): + cube.coord(dim_coord).bounds = da.array( + cube.coord(dim_coord).core_bounds(), dtype='float64') + return cube + + +def _fix_dim_coordnames(cube): + """Perform a check on dim coordinate names.""" + # first check for CMOR standard coord; + for coord in cube.coords(): + # guess the CMOR-standard x, y, z and t axes if not there + coord_type = iris.util.guess_coord_axis(coord) + + if coord_type == 'T': + cube.coord(axis=coord_type).var_name = 'time' + cube.coord(axis=coord_type).attributes = {} + + if coord_type == 'X': + cube.coord(axis=coord_type).var_name = 'lon' + cube.coord(axis=coord_type).standard_name = 'longitude' + cube.coord(axis=coord_type).long_name = 'longitude coordinate' + cube.coord(axis=coord_type).units = Unit('degrees') + cube.coord(axis=coord_type).attributes = {} + + if coord_type == 'Y': + cube.coord(axis=coord_type).var_name = 'lat' + cube.coord(axis=coord_type).standard_name = 'latitude' + cube.coord(axis=coord_type).long_name = 'latitude coordinate' + cube.coord(axis=coord_type).units = Unit('degrees') + cube.coord(axis=coord_type).attributes = {} + + if coord_type == 'Z': + if cube.coord(axis=coord_type).var_name == 'depth': + cube.coord(axis=coord_type).standard_name = 'depth' + cube.coord(axis=coord_type).long_name = \ + 'ocean depth coordinate' + cube.coord(axis=coord_type).var_name = 'lev' + cube.coord(axis=coord_type).attributes['positive'] = 'down' + if cube.coord(axis=coord_type).var_name == 'pressure': + cube.coord(axis=coord_type).standard_name = 'air_pressure' + cube.coord(axis=coord_type).long_name = 'pressure' + cube.coord(axis=coord_type).var_name = 'air_pressure' + cube.coord(axis=coord_type).attributes['positive'] = 'up' + + return cube + + +def _roll_cube_data(cube, shift, axis): + """Roll a cube data on specified axis.""" + cube.data = da.roll(cube.core_data(), shift, axis=axis) + return cube + + +def _set_units(cube, units): + """Set units in compliance with cf_unit.""" + special = {'psu': 1.e-3, 'Sv': '1e6 m3 s-1'} + if units in list(special.keys()): + cube.units = special[units] + else: + cube.units = Unit(units) + return cube diff --git a/esmvaltool/utils/editor-enhancements/find_nclfuncs.csh b/esmvaltool/utils/editor-enhancements/find_nclfuncs.csh index 6f162e6238..e8120a9f3a 100644 --- a/esmvaltool/utils/editor-enhancements/find_nclfuncs.csh +++ b/esmvaltool/utils/editor-enhancements/find_nclfuncs.csh @@ -1,6 +1,6 @@ #!/bin/csh -set list = `find ../../interface_scripts/ ../../diag_scripts/ -type f -name "*.ncl" -exec grep 'undef(' {} \; | awk -F '"' '{print $2}'` +set list = `find ../../interface_scripts/ ../../diag_scripts/ ../cmorizers/obs/ -type f -name "*.ncl" -exec grep 'undef(' {} \; | awk -F '"' '{print $2}'` echo $#list diff --git a/esmvaltool/utils/editor-enhancements/ncl-ESMValTool.el b/esmvaltool/utils/editor-enhancements/ncl-ESMValTool.el index f92d7f5ebd..126f6fa536 100644 --- a/esmvaltool/utils/editor-enhancements/ncl-ESMValTool.el +++ b/esmvaltool/utils/editor-enhancements/ncl-ESMValTool.el @@ -3,6 +3,9 @@ ;******************************************** ; Updated: Thu Mar 2 17:39:33 MST 2017 ; +; Modified version for ESMValTool v2.0 +; Mattia Righi, DLR (Germany) +; ; Revision 0.34 ; - Updated to include new functions, resources, and ; keywords added in NCL 6.0.0 @@ -124,7 +127,7 @@ ;("\\(\".*?\"\\)" 1 font-lock-string-face ) ;; NCL keywords - ("\\<\\(begin\\|break\\|byte\\|character\\|continue\\|create\\|defaultapp\\|do\\|double\\|else\\|end\\|enumeric\\|external\\|file\\|float\\|function\\|getvalues\\|graphic\\|group\\|if\\|integer\\|int64\\|list\\|load\\|local\\|logical\\|long\\|new\\|_Missing\\|Missing\\|new\\|noparent\\|numeric\\|procedure\\|quit\\|QUIT\\|Quit\\|record\\|return\\|setvalues\\|short\\|snumeric\\|stop\\|string\\|then\\|ubyte\\|uint\\|uint64\\|ulong\\|ushort\\|while\\|\\)\\>" 1 font-lock-keyword-face) + ("\\<\\(begin\\|break\\|byte\\|character\\|continue\\|create\\|defaultapp\\|do\\|double\\|else\\|elseif\\|end\\|enumeric\\|external\\|file\\|float\\|function\\|getvalues\\|graphic\\|group\\|if\\|integer\\|int64\\|list\\|load\\|local\\|logical\\|long\\|new\\|_Missing\\|Missing\\|new\\|noparent\\|numeric\\|procedure\\|quit\\|QUIT\\|Quit\\|record\\|return\\|setvalues\\|short\\|snumeric\\|stop\\|string\\|then\\|ubyte\\|uint\\|uint64\\|ulong\\|ushort\\|while\\|\\)\\>" 1 font-lock-keyword-face) ;; Array definition chars and tests - couldn't get to work in list above... ("\\(\(\/\\)" 1 font-lock-keyword-face ) @@ -234,8 +237,8 @@ ("\\<\\(res_vpUseSegments\\|res_vpWidthF\\|res_vpXF\\|res_vpYF\\|res_wkAntiAlias\\|res_wkBackgroundColor\\|res_wkBackgroundOpacityF\\|res_wkColorMapLen\\|res_wkColorMap\\|res_wkColorModel\\|res_wkColorModel\\|res_wkDashTableLength\\|res_wkDefGraphicStyleId\\|res_wkDeviceLowerX\\|res_wkDeviceLowerX\\|res_wkDeviceLowerX\\|res_wkDeviceLowerY\\|res_wkDeviceLowerY\\|res_wkDeviceLowerY\\|res_wkDeviceUpperX\\|res_wkDeviceUpperX\\|res_wkDeviceUpperX\\|res_wkDeviceUpperY\\|res_wkDeviceUpperY\\|res_wkDeviceUpperY\\|res_wkFileName\\|res_wkFileName\\|res_wkFillTableLength\\|res_wkForegroundColor\\|res_wkFormat\\|res_wkFormat\\|res_wkFullBackground\\|res_wkFullBackground\\|res_wkGksWorkId\\|res_wkHeight\\|res_wkMarkerTableLength\\|res_wkMetaName\\|res_wkOrientation\\|res_wkOrientation\\|res_wkOrientation\\|res_wkPDFFileName\\|res_wkPDFFormat\\|res_wkPDFResolution\\|res_wkPSFileName\\|res_wkPSFormat\\|res_wkPSResolution\\|res_wkPaperHeightF\\|res_wkPaperHeightF\\|res_wkPaperHeightF\\|res_wkPaperSize\\|res_wkPaperSize\\|res_wkPaperSize\\|res_wkPaperWidthF\\|res_wkPaperWidthF\\|res_wkPaperWidthF\\|res_wkTopLevelViews\\|res_wkViews\\|res_wkVisualType\\|res_wkVisualType\\|res_wkWidth\\|res_wsCurrentSize\\|res_wsMaximumSize\\|res_wsThresholdSize\\|res_xyComputeXMax\\|res_xyComputeXMin\\|res_xyComputeYMax\\|res_xyComputeYMin\\|res_xyCoordData\\|res_xyCoordDataSpec\\|res_xyCurveDrawOrder\\|res_xyDashPattern\\|res_xyDashPatterns\\|res_xyExplicitLabels\\|res_xyExplicitLegendLabels\\|res_xyLabelMode\\|res_xyLineColor\\|res_xyLineColors\\|res_xyLineDashSegLenF\\|res_xyLineLabelConstantSpacingF\\|res_xyLineLabelFont\\|res_xyLineLabelFontAspectF\\|res_xyLineLabelFontColor\\|res_xyLineLabelFontColors\\|res_xyLineLabelFontHeightF\\|res_xyLineLabelFontQuality\\|res_xyLineLabelFontThicknessF\\|res_xyLineLabelFuncCode\\|res_xyLineOpacities\\|res_xyLineOpacityF\\|res_xyLineThicknessF\\|res_xyLineThicknesses\\|res_xyMarkLineMode\\|res_xyMarkLineModes\\|res_xyMarker\\|res_xyMarkerColor\\|res_xyMarkerColors\\|res_xyMarkerOpacities\\|res_xyMarkerOpacityF\\|res_xyMarkerSizeF\\|res_xyMarkerSizes\\|res_xyMarkerThicknessF\\|res_xyMarkerThicknesses\\|res_xyMarkers\\|res_xyMonoDashPattern\\|res_xyMonoLineColor\\|res_xyMonoLineLabelFontColor\\|res_xyMonoLineThickness\\|res_xyMonoMarkLineMode\\|res_xyMonoMarker\\|res_xyMonoMarkerColor\\|res_xyMonoMarkerSize\\|res_xyMonoMarkerThickness\\|res_xyXIrrTensionF\\|res_xyXIrregularPoints\\|res_xyXStyle\\|res_xyYIrrTensionF\\|res_xyYIrregularPoints\\|res_xyYStyle\\|\\)\\>" 1 font-lock-constant-face) - ;; ESMValTool interface and diag scripts - ("\\<\\(write_header\\|write_diag_header\\|log_info\\|log_debug\\|enter_msg\\|leave_msg\\|error_msg\\|tool_stop\\|exit_if_missing_atts\\|write_filelist\\|write_references\\|inquire_and_save_fileinfo\\|copy_VarCoords_l1\\|check_min_max_datasets\\|tstep\\|get_ncdf_name\\|get_ncdf_dir\\|ncdf_read\\|ncdf_write\\|ncdf_att\\|ncdf_define\\|att2var_default\\|att2var\\|bname\\|basename\\|extract_years\\|extend_var_at\\|copy_CoordNames_n\\|empty_str\\|write_info\\|remove_index\\|set_default_att\\|filter_attrs\\|write_ignore_warnings\\|get_ref_dataset_idx\\|read_data\\|read_fx_data\\|get_start_year\\|get_end_year\\|multi_model_stats\\|dim_stddev_wgt_Wrap\\|time_operations\\|calc_season_index\\|extract_season\\|month_to_season_extended\\|coswgt_areaave\\|coswgt_arearmse\\|coswgt_pattern_cor\\|interannual_variability\\|calculate_metric\\|normalize_metric\\|distrib_stats\\|lognormal_dist\\|roi\\|extract_area\\|gridcell_area\\|map_area\\|area_operations\\|select_region\\|make_latlon2D\\|cdo_remapdis\\|find_destination_grid\\|guestimate_average_grid_area\\|get_lower_limits\\|get_upper_limits\\|is_regional\\|esmf_conserve_wrapper\\|rect2rect_interp\\|plev_lat_interp\\|get_dataset_minus_ref\\|esmf_conserve_wrapper_time\\|regrid_3D_to_rectilinear_grid\\|get_title_suffix\\|remove_attrs\\|plot_two_by_one\\|plot_three_by_one_diff\\|two_by_one\\|three_by_one_diff\\|plot_three_by_one_vector\\|three_by_one_vector\\|plot_multipanel\\|multipanel\\|plot_multipanel_vector\\|multipanel_vector\\|seasonal_plot\\|xy_plot_wrapper\\|ts_line_wrapper\\|get_title_suffix\\|remove_attrs\\|plot_two_by_one\\|plot_three_by_one_diff\\|two_by_one\\|three_by_one_diff\\|plot_three_by_one_vector\\|three_by_one_vector\\|plot_multipanel\\|multipanel\\|plot_multipanel_vector\\|multipanel_vector\\|seasonal_plot\\|xy_plot_wrapper\\|ts_line_wrapper\\|xy_line_overlap\\|unique_labels_min\\|unique_labels_all\\|project_style\\|project_style_GO\\|place_debuginfo\\|place_description\\|gsnColorRange\\|format_units\\|set_log_ticks\\|sort_alphabetically\\|plot_precip_domain\\|precip_domain\\|profile_plev\\|aerosol_profile\\|aerosol_sizedist\\|xy_line\\|timeseries_station\\|cycle_plot\\|errorbar_plot\\|contour_map\\|contour_map_polar\\|contour_map_ce\\|add_markers_to_map\\|taylor_plot\\|legend_lines\\|legend_markers\\|scatterplot\\|scatterplot3D\\|scatterplot_markers\\|month_sel\\|lat_names\\|add_line\\|add_scatt\\|add_legenda\\|calcRegCoeffs\\|genZonalMeans\\|calcMeanAnnCycleMonthly\\|calcMeanAnnCycleAnnual\\|rmMeanAnnCycle\\|apfiltersmooth\\|smoothAnomalies\\|clmMon2clmDayn\\|vector_scalar_map_polar\\|portrait_plot\\|mjo_wave_freq_plot\\|addHorVertLinesCross_extended\\|mjo_cross_spectra_plot\\|mjo_ceof_plot\\|mjo_life_cycle_plot\\|pr_u850_mean_plot\\|mjo_xcor_lag_plot\\|mjo_pr_ua_vari_plot\\|mjo_unvari_eof_plot\\|zonalmean_profile\\|create_legend_lines\\|output_type\\|copy_VarAtt_sel\\|panelling\\|get_plot_dir\\|get_outfile_name\\|get_wks\\|add_markers\\|add_num_markers\\|add_errorbar\\|horizontal_whiskers\\|add_prediction_error\\|contourplot\\|convert_units\\|scalfac\\|scale_units\\|rescale\\|UNIQ\\|union\\|set_inclusive_OR\\|intersection\\|is_array_subset\\|relative_complement\\|set_symmetric_difference\\|\\)\\>" 1 font-lock-type-face) + ;; ESMValTool interface, shared and utility scripts + ("\\<\\(read_data\\|read_fx_data\\|select_metadata_by_atts\\|select_metadata_by_name\\|metadata_att_as_array\\|bname\\|basename\\|att2var\\|att2var_default\\|get_ncdf_name\\|get_ncdf_dir\\|ncdf_read\\|ncdf_define\\|ncdf_write\\|ncdf_att\\|copy_CoordNames_n\\|extend_var_at\\|remove_index\\|set_default_att\\|empty_str\\|log_info\\|log_debug\\|enter_msg\\|leave_msg\\|error_msg\\|tool_stop\\|exit_if_missing_atts\\|log_provenance\\|taylor_plot\\|contour_map\\|contour_map_polar\\|contour_map_ce\\|add_markers_to_map\\|get_title_suffix\\|remove_attrs\\|plot_two_by_one\\|plot_three_by_one_diff\\|two_by_one\\|three_by_one_diff\\|plot_three_by_one_vector\\|three_by_one_vector\\|plot_multipanel\\|multipanel\\|plot_multipanel_vector\\|multipanel_vector\\|seasonal_plot\\|xy_plot_wrapper\\|ts_line_wrapper\\|pr_u850_mean_plot\\|mjo_xcor_lag_plot\\|mjo_pr_ua_vari_plot\\|mjo_unvari_eof_plot\\|get_title_suffix\\|remove_attrs\\|plot_two_by_one\\|plot_three_by_one_diff\\|two_by_one\\|three_by_one_diff\\|plot_three_by_one_vector\\|three_by_one_vector\\|plot_multipanel\\|multipanel\\|plot_multipanel_vector\\|multipanel_vector\\|seasonal_plot\\|xy_plot_wrapper\\|ts_line_wrapper\\|xy_line_overlap\\|plot_precip_domain\\|precip_domain\\|month_sel\\|lat_names\\|add_line\\|add_scatt\\|add_legenda\\|calcRegCoeffs\\|genZonalMeans\\|calcMeanAnnCycleMonthly\\|calcMeanAnnCycleAnnual\\|rmMeanAnnCycle\\|apfiltersmooth\\|smoothAnomalies\\|clmMon2clmDayn\\|scatterplot\\|scatterplot3D\\|scatterplot_markers\\|zonalmean_profile\\|contourplot\\|portrait_plot\\|circle_plot\\|profile_plev\\|aerosol_profile\\|aerosol_sizedist\\|xy_line\\|xy_line_anom\\|timeseries_station\\|cycle_plot\\|errorbar_plot\\|create_legend_lines\\|output_type\\|copy_VarAtt_sel\\|panelling\\|get_plot_dir\\|get_outfile_name\\|get_wks\\|add_markers\\|add_num_markers\\|add_errorbar\\|horizontal_whiskers\\|add_prediction_error\\|mjo_wave_freq_plot\\|addHorVertLinesCross_extended\\|mjo_cross_spectra_plot\\|mjo_ceof_plot\\|mjo_life_cycle_plot\\|vector_scalar_map_polar\\|project_style\\|place_debuginfo\\|place_description\\|gsnColorRange\\|format_units\\|set_log_ticks\\|sort_alphabetically\\|legend_lines\\|legend_markers\\|roi\\|extract_area\\|gridcell_area\\|map_area\\|area_operations\\|select_region\\|make_latlon2D\\|cdo_remapdis\\|guestimate_average_grid_area\\|get_lower_limits\\|get_upper_limits\\|is_regional\\|esmf_conserve_wrapper\\|rect2rect_interp\\|plev_lat_interp\\|get_dataset_minus_ref\\|esmf_conserve_wrapper_time\\|regrid_3D_to_rectilinear_grid\\|get_start_year\\|get_end_year\\|convert_units\\|UNIQ\\|union\\|set_inclusive_OR\\|intersection\\|is_array_subset\\|relative_complement\\|set_symmetric_difference\\|dim_stddev_wgt_Wrap\\|time_operations\\|calc_season_index\\|extract_season\\|month_to_season_extended\\|coswgt_areaave\\|coswgt_arearmse\\|coswgt_pattern_cor\\|interannual_variability\\|calculate_metric\\|normalize_metric\\|distrib_stats\\|lognormal_dist\\|add_labelbar\\|create_empty_array\\|data_read_in\\|data_read_in_ocean_MOC\\|data_read_in_ice\\|y_axis_check\\|check_custom_climo\\|isfilepresent2\\|table_link_setup\\|set_varAtts\\|create_timec\\|format_time\\|format_plev\\|format_lev\\|format_lat\\|format_lon\\|format_coords\\|read_cmor\\|format_variable\\|guess_bounds_time\\|guess_bounds_lev\\|guess_bounds_lat\\|guess_bounds_lon\\|guess_coord_bounds\\|set_global_atts\\|write_nc\\|write_nc_profile\\|set_size_array\\|process_EBAS_data\\|\\)\\>" 1 font-lock-type-face) ) "words used in ncl-mode highlighting" diff --git a/esmvaltool/utils/nclcodestyle/nclcodestyle.py b/esmvaltool/utils/nclcodestyle/nclcodestyle.py index 4d179bb0fd..455ddd7e33 100644 --- a/esmvaltool/utils/nclcodestyle/nclcodestyle.py +++ b/esmvaltool/utils/nclcodestyle/nclcodestyle.py @@ -755,6 +755,8 @@ def whitespace_before_parameters(logical_line, tokens): (prev_type == tokenize.NAME or prev_text in '}])') and # Syntax "class A (B):" is allowed, but avoid it (index < 2 or tokens[index - 2][1] != 'class') and + # Syntax "elseif (": is allowed (special case NCL 6.5.0) + (tokens[index - 1][1] != 'elseif') and # Allow "return (a.foo for a in range(5))" not keyword.iskeyword(prev_text)): yield prev_end, "E211 whitespace before '%s'" % text diff --git a/esmvaltool/utils/rose-cylc/esmvt_rose_wrapper.py b/esmvaltool/utils/rose-cylc/esmvt_rose_wrapper.py new file mode 100644 index 0000000000..ba929fea36 --- /dev/null +++ b/esmvaltool/utils/rose-cylc/esmvt_rose_wrapper.py @@ -0,0 +1,265 @@ +r""" +Install and run u-bd684 - the esmvaltool rose-cylc suite. + +Usage: +------ +-c --config-file: [REQUIRED] user specific configuration file; +-r --recipe-file: [REQUIRED] single or multiple (space-sep) recipe files; +-d --main-dir: [OPTIONAL] main run dir name (full path); + defaults to $HOME/ESMVALTOOL_ROSE; +-s --suite-dir [OPTIONAL] u-bd684 dir full path; can be set by user; + defaults to $HOME/u-bd684; +-n --no-submit [OPTIONAL] if specified, will not submit suite to cylc; +-l --log-level: [OPTIONAL] log level, default=info + +Example: +-------- +python esmvt_rose_wrapper.py -c /home/users/valeriu/input/config-user.yml \ + -r /home/users/valeriu/recipes/recipe1.yml \ + /home/users/valeriu/recipes/recipe2.yml \ + -d /home/users/valeriu/esmvat_WRAPPER \ + -s /home/users/valeriu/u-bd684/ \ + -n + +Base suite: +----------- +The base suite to run esmvaltool via rose-cylc is u-bd684; for now (Nov 2018) +the base suite comes with esmvaltool package by default; this suite will be, +in the near future, included in the Rose repository. The location inside +esmvaltool is standardized to: + +$ESMVALTOOL/esmvaltool/utils/rose-cylc/ + +When rose (exec.) will be working with python3.x, this location will become +default and the pipeline will aceess it independently of user, unless, of +course the user will specify -s $SUITE_LOCATION; until then the user needs +to grab a copy of it in $HOME or specify the default location via -s option. + +Environment: +------------ +We will move to a unified and centrally-installed esmvaltool environment; +until then, the user will have to alter the env_setup script: + +u-bd684/app/esmvaltool/env_setup + +with the correct pointers to esmvaltool installation, if desired; +NOTE that the defaults are working pointers for an install on CEDA-Jasmin. + +To be able to submit to cylc, you need to have the /metomi/ suite in path +AND use a python2.7 environment. Use the Jasmin-example below for guidance. + +Jasmin-example: +--------------- +This shows how to interact with rose-cylc and run esmvaltool under cylc +using this script: + +export PATH=/apps/contrib/metomi/bin:$PATH +export PATH=/home/users/valeriu/miniconda2/bin:$PATH +mkdir esmvaltool_rose +cd esmvaltool_rose +cp $esmvaltool/utils/rose-cylc/esmvt_rose_wrapper.py . +[get u-abd684 in $HOME, get your recipes and the config] +python esmvt_rose_wrapper.py -c config-user.yml \ +-r recipe_autoassess_stratosphere.yml recipe_OceanPhysics.yml \ +-d $HOME/esmvaltool_rose + +Note that you need to pass FULL PATHS to cylc, no . or .. because all +operations are done remotely on different nodes. + +A practical actual example of running the tool can be found on JASMIN: +/home/users/valeriu/esmvaltool_rose +There you will find the run shell: run_example, as well as an example +how to set the configuration file. A copy of u-bd684 is always located +in /home/users/valeriu/roses/u-bd684. + +Contact: +-------- +author: Valeriu Predoi (UREAD, valeriu.predoi@ncas.ac.uk) +""" +import argparse +import logging +import os +import sys +import subprocess +import shutil +from distutils.version import LooseVersion +# configparser has changed names in python 3.x +if LooseVersion(sys.version) < LooseVersion("3.0"): + import ConfigParser +else: + import configparser as ConfigParser +import yaml # noqa + + +# set up logging +logger = logging.getLogger(__name__) + +# print the header +HEADER = r""" +______________________________________________________________________ + + ESMValTool Rose-Cylc Wrapper +______________________________________________________________________ + +""" + __doc__ + + +def get_args(): + """Define the `esmvaltool` command line.""" + # parse command line args + parser = argparse.ArgumentParser( + description=HEADER, + formatter_class=argparse.RawDescriptionHelpFormatter) + parser.add_argument( + '-c', + '--config-file', + default=os.path.join(os.path.dirname(__file__), 'config-user.yml'), + help='Configuration file') + parser.add_argument( + '-r', + '--recipe-files', + type=str, + nargs='+', + help='Recipe files (list or single file)') + parser.add_argument( + '-d', + '--main-dir', + default=os.path.join(os.environ['HOME'], 'ESMVALTOOL_ROSE'), + help='Main analysis directory; default to $HOME/ESMVALTOOL_ROSE') + parser.add_argument( + '-s', + '--suite-dir', + default=os.path.join(os.environ['HOME'], 'u-bd684'), + help='u-bd684 suite directory; default to $HOME/u-bd684') + parser.add_argument( + '-n', + '--no-submit', + action='store_true', + help="Flag to NOT submit the Rose suite.") + parser.add_argument( + '-l', + '--log-level', + default='info', + choices=['debug', 'info', 'warning', 'error']) + args = parser.parse_args() + return args + + +def _set_logger(logging, out_dir, log_file, log_level): + # set logging for screen and file output + root_logger = logging.getLogger() + out_fmt = "%(asctime)s %(levelname)-8s %(name)s,%(lineno)s\t%(message)s" + logging.basicConfig( + filename=os.path.join(out_dir, log_file), + filemode='a', + format=out_fmt, + datefmt='%H:%M:%S', + level=logging.DEBUG) + root_logger.setLevel(log_level.upper()) + logfmt = logging.Formatter(out_fmt) + console_handler = logging.StreamHandler() + console_handler.setFormatter(logfmt) + root_logger.addHandler(console_handler) + + +def read_yaml_file(yaml_file): + """Read recipe into a dictionary.""" + with open(yaml_file, 'r') as yfile: + loaded_file = yaml.safe_load(yfile) + return loaded_file + + +def _setup_work(rose_config_template, recipe_files, + config_file, main_dir, default_suite, log_level): + """Write the new rose conf file per suite.""" + # Build the ConfigParser object + Config = ConfigParser.ConfigParser() + Config.optionxform = str + Config.read(rose_config_template) + + # set the main work dir + if not os.path.exists(main_dir): + os.makedirs(main_dir) + + # assemble work tree + if not os.path.isfile(os.path.join(main_dir, config_file)): + shutil.copy2(config_file, main_dir) + if not os.path.exists(os.path.join(main_dir, 'recipes')): + os.makedirs(os.path.join(main_dir, 'recipes')) + if not os.path.exists(os.path.join(main_dir, + os.path.basename(config_file))): + shutil.copy2(config_file, main_dir) + recipes_field = [] + for recipe in recipe_files: + if not os.path.exists(os.path.join(main_dir, 'recipes', + os.path.basename(recipe))): + shutil.copy2(recipe, os.path.join(main_dir, 'recipes')) + recipes_field.append(os.path.basename(recipe).strip('.yml')) + rose_suite = os.path.join(main_dir, 'u-bd684') + if os.path.exists(rose_suite): + shutil.rmtree(rose_suite) + shutil.copytree(default_suite, rose_suite) + out_dir = os.path.join(main_dir, 'output') + if not os.path.exists(out_dir): + os.makedirs(out_dir) + + # set logging + _set_logger(logging, out_dir, 'setup.log', log_level) + logger.info(HEADER) + + # start logging + logger.info("Main working directory: %s", main_dir) + logger.info("Using Rose-Cylc suite base: %s", default_suite) + logger.info("Output and logs written to: %s", out_dir) + logger.info("Creating rose suite directories...") + logger.info("Use rose-suite.conf template %s", rose_config_template) + logger.info("Use user config file %s", config_file) + + # write the file + Config.set('jinja2:suite.rc', 'INPUT_DIR', + '"' + main_dir + '"') + Config.set('jinja2:suite.rc', 'OUTPUT_DIR', '"' + out_dir + '"') + Config.set('jinja2:suite.rc', 'RECIPES', str(recipes_field)) + with open(os.path.join(rose_suite, 'rose-suite.conf'), 'w') as r_c: + logger.info("Writing rose-suite.conf file %s", + os.path.join(rose_suite, 'rose-suite.conf')) + Config.write(r_c) + + return rose_suite + + +def _run_suite(suite): + """Run the mip_convert suite.""" + os.chdir(suite) + logger.info("Submitting suite from %s", suite) + proc = subprocess.Popen(["rose", "suite-run"], stdout=subprocess.PIPE) + out, err = proc.communicate() + logger.info("Rose communications: %s %s", str(out), str(err)) + + +def main(): + """Run the the meat of the code.""" + logger.info("Running main function...") + args = get_args() + # rose suite default location + if args.suite_dir: + default_suite = args.suite_dir + rose_config_template = os.path.join(default_suite, "rose-suite.conf") + + # get command line arguments + recipe_files = args.recipe_files + config_file = args.config_file + main_dir = args.main_dir + log_level = args.log_level + + # setup rose suite + run_rose = _setup_work(rose_config_template, recipe_files, + config_file, main_dir, default_suite, log_level) + + # submit to cylc + if not args.no_submit: + _run_suite(run_rose) + + +if __name__ == '__main__': + main() diff --git a/language_support.sh b/language_support.sh new file mode 100644 index 0000000000..17b9a04735 --- /dev/null +++ b/language_support.sh @@ -0,0 +1,13 @@ +#!/bin/bash + +tools=(R Rscript julia) + +for tool in "${tools[@]}"; do + if ! command -v "$tool" > /dev/null 2>&1; then + echo "Executable $tool not found! Exiting..." >> $PREFIX/.messages.txt + exit 1 + fi +done + +Rscript $PREFIX/lib/python*/site-packages/esmvaltool/install/R/setup.R >> $PREFIX/.messages.txt +julia $PREFIX/lib/python*/site-packages/esmvaltool/install/Julia/setup.jl >> $PREFIX/.messages.txt diff --git a/meta.yaml b/meta.yaml index 50a8b22aa5..d09c7a4900 100644 --- a/meta.yaml +++ b/meta.yaml @@ -2,10 +2,10 @@ --- # Build commmand: -# conda build esmvaltool -c conda-forge -c birdhouse +# conda build . -c conda-forge -c birdhouse # Package version number -{% set version = "2.0a1" %} +{% set version = "2.0a2" %} package: name: esmvaltool @@ -13,16 +13,20 @@ package: source: # Use these two lines to build a release: - git_rev: v{{ version }} - git_url: https://github.com/ESMValGroup/ESMValTool.git + # git_rev: v{{ version }} + # git_url: https://github.com/ESMValGroup/ESMValTool.git # Use this line instead of the above to test building without a release: - # path: . + path: . build: # Increment the build number when building a new conda package of the same # esmvaltool version, reset to 0 when building a new version. - number: 0 - script: python setup.py install --single-version-externally-managed --record=/tmp/record.txt + number: 1 + script: | + python setup.py install --single-version-externally-managed --record=/tmp/record.txt + POST_LINK="${PREFIX}/bin/.esmvaltool-post-link.sh" + cp -v ${RECIPE_DIR}/language_support.sh ${POST_LINK}; + chmod +x ${POST_LINK}; requirements: build: @@ -32,29 +36,47 @@ requirements: - setuptools_scm run: # esmvaltool - - python - - basemap - - iris=1.13 + - python>=3.6 + - libunwind # specifically for Python3.7+ + - graphviz + - iris>=2.2 - python-stratify # Normally installed via pip: - cartopy - cf_units - cython - - matplotlib + - eofs + - esmpy + - matplotlib<3 + - nc-time-axis - netCDF4 - numba - numpy + - pandas - pillow + - prov - psutil + - pydot - python-cdo - pyyaml + - scikit-learn - shapely - six + - vmprof + - xarray>=0.12.0 - yamale # in birdhouse channel + - fiona + - xlsxwriter + # Command line tools used by diagnostic scripts + - cdo + - imagemagick + - nco # Multi language support: - - ncl - - ncurses=6.1=hfc679d8_1 - # TODO: add R, julia + - ncl>=6.5.0 + - r-base + - r-curl # Dependency of lintr, but fails to compile because it cannot find libcurl installed from conda. + - r-udunits2 # Fails to compile because it cannot find udunits2 installed from conda. + test: # TODO: add unit tests? This seems to require installing the tests imports: diff --git a/readthedocs.yml b/readthedocs.yml deleted file mode 100644 index 7329abf70d..0000000000 --- a/readthedocs.yml +++ /dev/null @@ -1,5 +0,0 @@ -conda: - file: environment.yml - -python: - setup_py_install: true diff --git a/setup.cfg b/setup.cfg index 54cfc87721..b03861006a 100644 --- a/setup.cfg +++ b/setup.cfg @@ -3,3 +3,11 @@ source-dir = doc/sphinx/source build-dir = doc/sphinx/build all_files = 1 builder = html + +[tool:pytest] +log_level = DEBUG +env = + MPLBACKEND = Agg + +[pydocstyle] +convention = numpy diff --git a/setup.py b/setup.py index 065df25949..580e643452 100755 --- a/setup.py +++ b/setup.py @@ -1,12 +1,11 @@ #!/usr/bin/env python -"""ESMValTool installation script""" +"""ESMValTool installation script.""" # This script only installs dependencies available on PyPI # # Dependencies that need to be installed some other way (e.g. conda): # - ncl # - iris # - python-stratify -# - basemap import os import re @@ -28,22 +27,30 @@ # Installation dependencies # Use with pip install . to install from source 'install': [ - 'basemap', 'cartopy', 'cdo', 'cf_units', 'cython', - # 'scitools-iris', # Only iris 2 is on PyPI - 'matplotlib', + 'eofs', + 'fiona', + 'matplotlib<3', + 'nc-time-axis', # needed by iris.plot 'netCDF4', 'numba', 'numpy', + 'pandas', 'pillow', + 'prov[dot]', 'psutil', 'pyyaml', + 'scitools-iris>=2.2', + 'scikit-learn', 'shapely', 'six', 'stratify', + 'vmprof', + 'xarray', + 'xlsxwriter', 'yamale', ], # Test dependencies @@ -54,8 +61,9 @@ 'mock', 'nose', 'pycodestyle', - 'pytest', + 'pytest>=3.9', 'pytest-cov', + 'pytest-env', 'pytest-html', 'pytest-metadata>=1.5.1', ], @@ -68,6 +76,7 @@ 'pydocstyle', 'pylint', 'sphinx', + 'sphinx_rtd_theme', 'yamllint', 'yapf', ], @@ -75,7 +84,7 @@ def discover_python_files(paths, ignore): - """Discover Python files""" + """Discover Python files.""" def _ignore(path): """Return True if `path` should be ignored, False otherwise.""" @@ -93,7 +102,7 @@ def _ignore(path): class CustomCommand(Command): - """Custom Command class""" + """Custom Command class.""" def install_deps_temp(self): """Try to temporarily install packages needed to run the command.""" @@ -107,13 +116,15 @@ def install_deps_temp(self): class RunTests(CustomCommand): """Class to run tests and generate reports.""" - user_options = [] + user_options = [('installation', None, + 'Run tests that require installation.')] def initialize_options(self): - """Do nothing""" + """Initialize custom options.""" + self.installation = False def finalize_options(self): - """Do nothing""" + """Do nothing.""" def run(self): """Run tests and generate a coverage report.""" @@ -123,9 +134,10 @@ def run(self): version = sys.version_info[0] report_dir = 'test-reports/python{}'.format(version) - errno = pytest.main([ + args = [ 'tests', 'esmvaltool', # for doctests + '--ignore=esmvaltool/cmor/tables/', '--doctest-modules', '--cov=esmvaltool', '--cov-report=term', @@ -133,7 +145,10 @@ def run(self): '--cov-report=xml:{}/coverage.xml'.format(report_dir), '--junit-xml={}/report.xml'.format(report_dir), '--html={}/report.html'.format(report_dir), - ]) + ] + if self.installation: + args.append('--installation') + errno = pytest.main(args) sys.exit(errno) @@ -144,10 +159,10 @@ class RunLinter(CustomCommand): user_options = [] def initialize_options(self): - """Do nothing""" + """Do nothing.""" def finalize_options(self): - """Do nothing""" + """Do nothing.""" def run(self): """Run prospector and generate a report.""" @@ -200,8 +215,8 @@ def run(self): 'Environment :: Console', 'License :: OSI Approved :: Apache Software License', 'Programming Language :: Python', - 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', ], packages=PACKAGES, # Include all version controlled files @@ -215,7 +230,12 @@ def run(self): entry_points={ 'console_scripts': [ 'esmvaltool = esmvaltool._main:run', - 'nclcodestyle = esmvaltool.utils.nclcodestyle.nclcodestyle:_main', + 'cmorize_obs = esmvaltool.' + 'utils.cmorizers.obs.cmorize_obs:execute_cmorize', + 'nclcodestyle = esmvaltool.' + 'utils.nclcodestyle.nclcodestyle:_main', + 'mip_convert_setup = esmvaltool.' + 'utils.cmorizers.mip_convert.esmvt_mipconv_setup:main' ], }, cmdclass={ diff --git a/tests/__init__.py b/tests/__init__.py index a12a85a013..7d9b55fc89 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -2,15 +2,11 @@ Provides testing capabilities for :mod:`esmvaltool` package. """ - -from __future__ import absolute_import, division, print_function - +import unittest from functools import wraps import mock import numpy as np -import unittest -import sys class Test(unittest.TestCase): @@ -70,9 +66,6 @@ def patch(self, *args, **kwargs): # Return patch replacement object. return start_result - if sys.version_info[0] == 2: - assertRaisesRegex = unittest.TestCase.assertRaisesRegexp - @wraps(np.testing.assert_array_equal) def assertArrayEqual(self, a, b, err_msg='', verbose=True): np.testing.assert_array_equal(a, b, err_msg=err_msg, verbose=verbose) diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000000..5235a69295 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,20 @@ +import pytest + + +def pytest_addoption(parser): + """Add a command line option to skip tests that require installation.""" + parser.addoption( + "--installation", + action="store_true", + default=False, + help="run tests that require installation") + + +def pytest_collection_modifyitems(config, items): + """Select tests to run based on command line options.""" + if config.getoption("--installation"): + return + skip_install = pytest.mark.skip(reason="need --installation option to run") + for item in items: + if "install" in item.keywords: + item.add_marker(skip_install) diff --git a/tests/integration/cmor/_fixes/CMIP5/test_BNU_ESM.py b/tests/integration/cmor/_fixes/CMIP5/test_BNU_ESM.py index 98301e2060..5adc081f73 100644 --- a/tests/integration/cmor/_fixes/CMIP5/test_BNU_ESM.py +++ b/tests/integration/cmor/_fixes/CMIP5/test_BNU_ESM.py @@ -8,13 +8,13 @@ class TestCo2(unittest.TestCase): def setUp(self): - self.cube = Cube([1], var_name='co2', units='J') + self.cube = Cube([1.0], var_name='co2', units='J') self.fix = co2() def test_fix_metadata(self): - cube = self.fix.fix_metadata(self.cube) + cube = self.fix.fix_metadata([self.cube])[0] self.assertEqual(cube.units, Unit('1e-6')) - self.assertEqual(cube.data[0], 1) + self.assertEqual(cube.data[0], 1.0) def test_fix_data(self): cube = self.fix.fix_data(self.cube) @@ -24,11 +24,11 @@ def test_fix_data(self): class Testfgco2(unittest.TestCase): def setUp(self): - self.cube = Cube([1], var_name='fgco2', units='J') + self.cube = Cube([1.0], var_name='fgco2', units='J') self.fix = fgco2() def test_fix_metadata(self): - cube = self.fix.fix_metadata(self.cube) + cube = self.fix.fix_metadata([self.cube])[0] self.assertEqual(cube.units, Unit('kg m-2 s-1')) self.assertEqual(cube.data[0], 1) @@ -40,11 +40,11 @@ def test_fix_data(self): class TestCh4(unittest.TestCase): def setUp(self): - self.cube = Cube([1], var_name='ch4', units='J') + self.cube = Cube([1.0], var_name='ch4', units='J') self.fix = ch4() def test_fix_metadata(self): - cube = self.fix.fix_metadata(self.cube) + cube = self.fix.fix_metadata([self.cube])[0] self.assertEqual(cube.units, Unit('1e-9')) self.assertEqual(cube.data[0], 1) @@ -56,11 +56,11 @@ def test_fix_data(self): class Testspco2(unittest.TestCase): def setUp(self): - self.cube = Cube([1], var_name='spco2', units='J') + self.cube = Cube([1.0], var_name='spco2', units='J') self.fix = spco2() def test_fix_metadata(self): - cube = self.fix.fix_metadata(self.cube) + cube = self.fix.fix_metadata([self.cube])[0] self.assertEqual(cube.units, Unit('J')) self.assertEqual(cube.data[0], 1) diff --git a/tests/integration/cmor/_fixes/CMIP5/test_CCSM4.py b/tests/integration/cmor/_fixes/CMIP5/test_CCSM4.py index f5158797f3..5249277e4b 100644 --- a/tests/integration/cmor/_fixes/CMIP5/test_CCSM4.py +++ b/tests/integration/cmor/_fixes/CMIP5/test_CCSM4.py @@ -1,6 +1,6 @@ import unittest -import numpy as np +import numpy as np from iris.coords import DimCoord from iris.cube import Cube @@ -12,24 +12,25 @@ class TestsRlut(unittest.TestCase): def setUp(self): """Prepare tests""" - self.cube = Cube([1, 2], var_name='rlut') - self.cube.add_dim_coord(DimCoord([0.50001, 1.499999], - standard_name='latitude', - bounds=[[0.00001, 0.999999], - [1.00001, 1.999999], - ]), - 0) + self.cube = Cube([1.0, 2.0], var_name='rlut') + self.cube.add_dim_coord( + DimCoord([0.50001, 1.499999], + standard_name='latitude', + bounds=[ + [0.00001, 0.999999], + [1.00001, 1.999999], + ]), 0) self.fix = rlut() def test_fix_metadata(self): """Check that latitudes values are rounded""" - cube = self.fix.fix_metadata(self.cube) + cube = self.fix.fix_metadata([self.cube])[0] latitude = cube.coord('latitude') self.assertTrue(np.all(latitude.points == np.array([0.5000, 1.5000]))) - self.assertTrue(np.all(latitude.bounds == np.array([[0.0000, 1.0000], - [1.0000, 2.0000] - ]))) + self.assertTrue( + np.all(latitude.bounds == np.array([[0.0000, 1.0000], + [1.0000, 2.0000]]))) class TestsRlutcs(unittest.TestCase): @@ -37,24 +38,25 @@ class TestsRlutcs(unittest.TestCase): def setUp(self): """Prepare tests""" - self.cube = Cube([1, 2], var_name='rlut') - self.cube.add_dim_coord(DimCoord([0.50001, 1.499999], - standard_name='latitude', - bounds=[[0.00001, 0.999999], - [1.00001, 1.999999], - ]), - 0) + self.cube = Cube([1.0, 2.0], var_name='rlutcs') + self.cube.add_dim_coord( + DimCoord([0.50001, 1.499999], + standard_name='latitude', + bounds=[ + [0.00001, 0.999999], + [1.00001, 1.999999], + ]), 0) self.fix = rlutcs() def test_fix_metadata(self): """Check that latitudes values are rounded""" - cube = self.fix.fix_metadata(self.cube) + cube = self.fix.fix_metadata([self.cube])[0] latitude = cube.coord('latitude') self.assertTrue(np.all(latitude.points == np.array([0.5000, 1.5000]))) - self.assertTrue(np.all(latitude.bounds == np.array([[0.0000, 1.0000], - [1.0000, 2.0000] - ]))) + self.assertTrue( + np.all(latitude.bounds == np.array([[0.0000, 1.0000], + [1.0000, 2.0000]]))) class TestSo(unittest.TestCase): @@ -62,10 +64,10 @@ class TestSo(unittest.TestCase): def setUp(self): """Prepare tests""" - self.cube = Cube([1, 2], var_name='so', units='1.0') + self.cube = Cube([1.0, 2.0], var_name='so', units='1.0') self.fix = so() def test_fix_metadata(self): """Checks that units are changed to the correct value""" - cube = self.fix.fix_metadata(self.cube) + cube = self.fix.fix_metadata([self.cube])[0] self.assertEqual('1e3', cube.units.origin) diff --git a/tests/integration/cmor/_fixes/CMIP5/test_CESM1_BGC.py b/tests/integration/cmor/_fixes/CMIP5/test_CESM1_BGC.py index 6e28db762d..54521d987a 100644 --- a/tests/integration/cmor/_fixes/CMIP5/test_CESM1_BGC.py +++ b/tests/integration/cmor/_fixes/CMIP5/test_CESM1_BGC.py @@ -1,87 +1,22 @@ -import os -import shutil -import tempfile +"""Tests for CESM1-BGC fixes.""" import unittest -import netCDF4 from cf_units import Unit -from iris.coords import DimCoord from iris.cube import Cube -from esmvaltool.cmor._fixes.CMIP5.CESM1_BGC import allvars, co2, nbp - - -class TestAll(unittest.TestCase): - def setUp(self): - self.cube = Cube([1, 2], var_name='co2', units='J') - self.cube.add_dim_coord( - DimCoord( - [0, 1], - standard_name='time', - units=Unit( - 'days since 0001-01-01 00:00:00.0000000 UTC', - calendar='gregorian')), - 0) - self.fix = allvars() - - def test_fix_metadata(self): - cube = self.fix.fix_metadata(self.cube) - - time = cube.coord('time') - self.assertEqual(time.units.origin, 'days since 1850-01-01 00:00:00') - self.assertEqual(time.units.calendar, 'gregorian') - - def test_fix_metadata_good_units(self): - self.cube.coord('time').units = Unit('days since 1950-01-01 00:00:00', - calendar='gregorian') - cube = self.fix.fix_metadata(self.cube) - - time = cube.coord('time') - self.assertEqual(time.units.origin, 'days since 1950-01-01 00:00:00') - self.assertEqual(time.units.calendar, 'gregorian') +from esmvaltool.cmor._fixes.CMIP5.CESM1_BGC import co2 class TestCo2(unittest.TestCase): + """Tests for co2.""" + def setUp(self): - self.cube = Cube([1], var_name='co2', units='J') + """Prepare tests.""" + self.cube = Cube([1.0], var_name='co2', units='J') self.fix = co2() def test_fix_data(self): + """Test fix to set units correctly.""" cube = self.fix.fix_data(self.cube) self.assertEqual(cube.data[0], 28.966 / 44.0) self.assertEqual(cube.units, Unit('J')) - - -class TestNbp(unittest.TestCase): - def setUp(self): - """Prepare temp folder for test""" - self.fix = nbp() - self.temp_folder = tempfile.mkdtemp() - - def tearDown(self): - """Prepare temp folder for test""" - shutil.rmtree(self.temp_folder) - - def test_fix_file(self): - """Test fix on nbp files to set correct missing and fill values""" - temp_handler, temp_path = tempfile.mkstemp('.nc', dir=self.temp_folder) - os.close(temp_handler) - output_dir = os.path.join(self.temp_folder, 'fixed') - - dataset = netCDF4.Dataset(temp_path, "w") - dataset.createDimension('dim', 1) - var = dataset.createVariable('nbp', float, dimensions=('dim',), - fill_value=1.0e20) - var.missing_value = 1.0e20 - var[0] = 1.0 - dataset.close() - - new_file = self.fix.fix_file(temp_path, output_dir) - - self.assertNotEqual( - os.path.realpath(temp_path), os.path.realpath(new_file)) - - dataset = netCDF4.Dataset(new_file) - var = dataset.variables['nbp'] - self.assertEqual(var.missing_value, 1.0e33) - self.assertEqual(var._FillValue, 1.0e33) diff --git a/tests/integration/cmor/_fixes/CMIP5/test_CNRM_CM5.py b/tests/integration/cmor/_fixes/CMIP5/test_CNRM_CM5.py index 8a7f1497dd..e829f48dac 100644 --- a/tests/integration/cmor/_fixes/CMIP5/test_CNRM_CM5.py +++ b/tests/integration/cmor/_fixes/CMIP5/test_CNRM_CM5.py @@ -8,21 +8,21 @@ class TestMsftmyz(unittest.TestCase): def setUp(self): - self.cube = Cube([1], var_name='msftmyz', units='J') + self.cube = Cube([1.0], var_name='msftmyz', units='J') self.fix = msftmyz() def test_fix_data(self): cube = self.fix.fix_data(self.cube) - self.assertEqual(cube.data[0], 1e6) + self.assertEqual(cube.data[0], 1.0e6) self.assertEqual(cube.units, Unit('J')) class TestMsftmyzba(unittest.TestCase): def setUp(self): - self.cube = Cube([1], var_name='msftmyzba', units='J') + self.cube = Cube([1.0], var_name='msftmyzba', units='J') self.fix = msftmyzba() def test_fix_data(self): cube = self.fix.fix_data(self.cube) - self.assertEqual(cube.data[0], 1e6) + self.assertEqual(cube.data[0], 1.0e6) self.assertEqual(cube.units, Unit('J')) diff --git a/tests/integration/cmor/_fixes/CMIP5/test_CanESM2.py b/tests/integration/cmor/_fixes/CMIP5/test_CanESM2.py index 7a22bccb10..dd5dff6850 100644 --- a/tests/integration/cmor/_fixes/CMIP5/test_CanESM2.py +++ b/tests/integration/cmor/_fixes/CMIP5/test_CanESM2.py @@ -8,7 +8,7 @@ class TestCanESM2Fgco2(unittest.TestCase): def setUp(self): - self.cube = Cube([1], var_name='fgco2', units='J') + self.cube = Cube([1.0], var_name='fgco2', units='J') self.fix = fgco2() def test_fix_data(self): diff --git a/tests/integration/cmor/_fixes/CMIP5/test_EC_EARTH.py b/tests/integration/cmor/_fixes/CMIP5/test_EC_EARTH.py index d612d46956..5ddeb0a380 100644 --- a/tests/integration/cmor/_fixes/CMIP5/test_EC_EARTH.py +++ b/tests/integration/cmor/_fixes/CMIP5/test_EC_EARTH.py @@ -8,7 +8,7 @@ class TestSic(unittest.TestCase): def setUp(self): - self.cube = Cube([1], var_name='sic', units='J') + self.cube = Cube([1.0], var_name='sic', units='J') self.fix = sic() def test_fix_data(self): @@ -19,7 +19,7 @@ def test_fix_data(self): class TestSftlf(unittest.TestCase): def setUp(self): - self.cube = Cube([1], var_name='sftlf', units='J') + self.cube = Cube([1.0], var_name='sftlf', units='J') self.fix = sftlf() def test_fix_data(self): diff --git a/tests/integration/cmor/_fixes/CMIP5/test_FGOALS_g2.py b/tests/integration/cmor/_fixes/CMIP5/test_FGOALS_g2.py index c1df1cf86d..9d4d51e00e 100644 --- a/tests/integration/cmor/_fixes/CMIP5/test_FGOALS_g2.py +++ b/tests/integration/cmor/_fixes/CMIP5/test_FGOALS_g2.py @@ -9,17 +9,17 @@ class TestAll(unittest.TestCase): def setUp(self): - self.cube = Cube([1, 2], var_name='co2', units='J') + self.cube = Cube([1.0, 2.0], var_name='co2', units='J') self.cube.add_dim_coord( DimCoord( - [0, 1], + [0.0, 1.0], standard_name='time', units=Unit('days since 0001-01', calendar='gregorian')), 0) self.fix = allvars() def test_fix_metadata(self): - cube = self.fix.fix_metadata(self.cube) + cube = self.fix.fix_metadata([self.cube])[0] time = cube.coord('time') self.assertEqual(time.units.origin, diff --git a/tests/integration/cmor/_fixes/CMIP5/test_FIO_ESM.py b/tests/integration/cmor/_fixes/CMIP5/test_FIO_ESM.py index f6909337eb..a4ed917ce0 100644 --- a/tests/integration/cmor/_fixes/CMIP5/test_FIO_ESM.py +++ b/tests/integration/cmor/_fixes/CMIP5/test_FIO_ESM.py @@ -8,7 +8,7 @@ class TestCh4(unittest.TestCase): def setUp(self): - self.cube = Cube([1], var_name='ch4', units='J') + self.cube = Cube([1.0], var_name='ch4', units='J') self.fix = ch4() def test_fix_data(self): @@ -19,7 +19,7 @@ def test_fix_data(self): class TestCo2(unittest.TestCase): def setUp(self): - self.cube = Cube([1], var_name='co2', units='J') + self.cube = Cube([1.0], var_name='co2', units='J') self.fix = co2() def test_fix_data(self): diff --git a/tests/integration/cmor/_fixes/CMIP5/test_GFDL_CM2p1.py b/tests/integration/cmor/_fixes/CMIP5/test_GFDL_CM2p1.py index a2adffe3fb..498752fd08 100644 --- a/tests/integration/cmor/_fixes/CMIP5/test_GFDL_CM2p1.py +++ b/tests/integration/cmor/_fixes/CMIP5/test_GFDL_CM2p1.py @@ -8,7 +8,7 @@ class TestSftof(unittest.TestCase): def setUp(self): - self.cube = Cube([1], var_name='sftof', units='J') + self.cube = Cube([1.0], var_name='sftof', units='J') self.fix = sftof() def test_fix_data(self): diff --git a/tests/integration/cmor/_fixes/CMIP5/test_GFDL_CM3.py b/tests/integration/cmor/_fixes/CMIP5/test_GFDL_CM3.py index 6021f54e38..c8d42b08d8 100644 --- a/tests/integration/cmor/_fixes/CMIP5/test_GFDL_CM3.py +++ b/tests/integration/cmor/_fixes/CMIP5/test_GFDL_CM3.py @@ -8,7 +8,7 @@ class TestSftof(unittest.TestCase): def setUp(self): - self.cube = Cube([1], var_name='sftof', units='J') + self.cube = Cube([1.0], var_name='sftof', units='J') self.fix = sftof() def test_fix_data(self): diff --git a/tests/integration/cmor/_fixes/CMIP5/test_GFDL_ESM2G.py b/tests/integration/cmor/_fixes/CMIP5/test_GFDL_ESM2G.py index f30246c2b0..f0d5011815 100644 --- a/tests/integration/cmor/_fixes/CMIP5/test_GFDL_ESM2G.py +++ b/tests/integration/cmor/_fixes/CMIP5/test_GFDL_ESM2G.py @@ -1,14 +1,61 @@ +"""Tests for fixes of GFDL-ESM2G (CMIP5).""" import unittest +import iris +import mock +import pytest from cf_units import Unit -from iris.cube import Cube -from esmvaltool.cmor._fixes.CMIP5.GFDL_ESM2G import co2 +from esmvaltool.cmor._fixes.CMIP5.GFDL_ESM2G import (_get_and_remove, allvars, + co2, fgco2) + +CUBE_1 = iris.cube.Cube([1.0], long_name='to_be_rm') +CUBE_2 = iris.cube.Cube([1.0], long_name='not_to_be_rm') +CUBES_LISTS = [ + (iris.cube.CubeList([CUBE_1]), iris.cube.CubeList([])), + (iris.cube.CubeList([CUBE_1, CUBE_2]), iris.cube.CubeList([CUBE_2])), + (iris.cube.CubeList([CUBE_2]), iris.cube.CubeList([CUBE_2])), +] + + +@pytest.mark.parametrize('cubes_in,cubes_out', CUBES_LISTS) +def test_get_and_remove(cubes_in, cubes_out): + _get_and_remove(cubes_in, 'to_be_rm') + assert cubes_in is not cubes_out + assert cubes_in == cubes_out + + +CUBES = iris.cube.CubeList([CUBE_1, CUBE_2]) + + +@mock.patch( + 'esmvaltool.cmor._fixes.CMIP5.GFDL_ESM2G._get_and_remove', autospec=True) +def test_allvars(mock_get_and_remove): + fix = allvars() + fix.fix_metadata(CUBES) + assert mock_get_and_remove.call_count == 3 + assert mock_get_and_remove.call_args_list == [ + mock.call(CUBES, 'Start time for average period'), + mock.call(CUBES, 'End time for average period'), + mock.call(CUBES, 'Length of average period'), + ] + + +@mock.patch( + 'esmvaltool.cmor._fixes.CMIP5.GFDL_ESM2G._get_and_remove', autospec=True) +def test_fgco2(mock_get_and_remove): + fix = fgco2() + fix.fix_metadata(CUBES) + assert mock_get_and_remove.call_count == 2 + assert mock_get_and_remove.call_args_list == [ + mock.call(CUBES, 'Latitude of tracer (h) points'), + mock.call(CUBES, 'Longitude of tracer (h) points'), + ] class TestCo2(unittest.TestCase): def setUp(self): - self.cube = Cube([1], var_name='co2', units='J') + self.cube = iris.cube.Cube([1.0], var_name='co2', units='J') self.fix = co2() def test_fix_data(self): diff --git a/tests/integration/cmor/_fixes/CMIP5/test_GFDL_ESM2M.py b/tests/integration/cmor/_fixes/CMIP5/test_GFDL_ESM2M.py index b9e587cf21..5e4c924f9c 100644 --- a/tests/integration/cmor/_fixes/CMIP5/test_GFDL_ESM2M.py +++ b/tests/integration/cmor/_fixes/CMIP5/test_GFDL_ESM2M.py @@ -4,32 +4,12 @@ from iris.coords import DimCoord from iris.cube import Cube -from esmvaltool.cmor._fixes.CMIP5.GFDL_ESM2M import allvars, co2, sftof - - -class TestAll(unittest.TestCase): - def setUp(self): - self.cube = Cube([1, 2], var_name='co2', units='J') - self.cube.add_dim_coord( - DimCoord( - [0, 1], - standard_name='time', - units=Unit( - 'days since 0001-01-01 00:00:00', calendar='gregorian')), - 0) - self.fix = allvars() - - def test_fix_data(self): - cube = self.fix.fix_metadata(self.cube) - - time = cube.coord('time') - self.assertEqual(time.units.origin, 'days since 1850-01-01 00:00:00') - self.assertEqual(time.units.calendar, 'gregorian') +from esmvaltool.cmor._fixes.CMIP5.GFDL_ESM2M import co2, sftof class TestSftof(unittest.TestCase): def setUp(self): - self.cube = Cube([1], var_name='sftof', units='J') + self.cube = Cube([1.0], var_name='sftof', units='J') self.fix = sftof() def test_fix_data(self): @@ -40,7 +20,7 @@ def test_fix_data(self): class TestCo2(unittest.TestCase): def setUp(self): - self.cube = Cube([1], var_name='co2', units='J') + self.cube = Cube([1.0], var_name='co2', units='J') self.fix = co2() def test_fix_data(self): diff --git a/tests/integration/cmor/_fixes/CMIP5/test_MIROC5.py b/tests/integration/cmor/_fixes/CMIP5/test_MIROC5.py index 76c7084385..bf95a03c55 100644 --- a/tests/integration/cmor/_fixes/CMIP5/test_MIROC5.py +++ b/tests/integration/cmor/_fixes/CMIP5/test_MIROC5.py @@ -8,7 +8,7 @@ class TestGpp(unittest.TestCase): def setUp(self): - self.cube = Cube([1], var_name='sftof', units='J') + self.cube = Cube([1.0], var_name='sftof', units='J') self.fix = sftof() def test_fix_data(self): diff --git a/tests/integration/cmor/_fixes/CMIP5/test_MIROC_ESM.py b/tests/integration/cmor/_fixes/CMIP5/test_MIROC_ESM.py index 4df62afa21..75d42e68b9 100644 --- a/tests/integration/cmor/_fixes/CMIP5/test_MIROC_ESM.py +++ b/tests/integration/cmor/_fixes/CMIP5/test_MIROC_ESM.py @@ -10,18 +10,18 @@ class TestCo2(unittest.TestCase): def setUp(self): - self.cube = Cube([1], var_name='co2', units='J') + self.cube = Cube([1.0], var_name='co2', units='J') self.fix = co2() def test_fix_metadata(self): - cube = self.fix.fix_metadata(self.cube) + cube = self.fix.fix_metadata([self.cube])[0] self.assertEqual(cube.data[0], 1) self.assertEqual(cube.units, Unit('1e-6')) class TestTro3(unittest.TestCase): def setUp(self): - self.cube = Cube([1], var_name='tro3', units='J') + self.cube = Cube([1.0], var_name='tro3', units='J') self.fix = tro3() def test_fix_data(self): @@ -32,18 +32,18 @@ def test_fix_data(self): class TestGpp(unittest.TestCase): def setUp(self): - self.cube = Cube([1], var_name='gpp', units='J') + self.cube = Cube([1.0], var_name='gpp', units='J') self.fix = gpp() def test_fix_metadata(self): - cube = self.fix.fix_metadata(self.cube) + cube = self.fix.fix_metadata([self.cube])[0] self.assertEqual(cube.data[0], 1) self.assertEqual(cube.units, Unit('g m-2 day-1')) class TestAll(unittest.TestCase): def setUp(self): - self.cube = Cube([[1, 2], [3, 4]], var_name='co2', units='J') + self.cube = Cube([[1.0, 2.0], [3.0, 4.0]], var_name='co2', units='J') self.cube.add_dim_coord( DimCoord( [0, 1], @@ -55,30 +55,15 @@ def setUp(self): self.fix = allvars() - def test_fix_metadata(self): - cube = self.fix.fix_metadata(self.cube) - time = cube.coord('time') - self.assertEqual(time.units.origin, 'days since 1849-01-01 00:00:00') - self.assertEqual(time.units.calendar, 'gregorian') - - def test_fix_metadata_1_1(self): - time = self.cube.coord('time') - time.units = Unit("days since 1-1-1", time.units.calendar) - cube = self.fix.fix_metadata(self.cube) - - time = cube.coord('time') - self.assertEqual(time.units.origin, 'days since 1850-01-01 00:00:00') - self.assertEqual(time.units.calendar, 'gregorian') - def test_fix_metadata_plev(self): time = self.cube.coord('time') time.units = Unit("days since 1-1-1", time.units.calendar) - cube = self.fix.fix_metadata(self.cube) + cube = self.fix.fix_metadata([self.cube])[0] cube.coord('air_pressure') def test_fix_metadata_no_plev(self): self.cube.remove_coord('AR5PL35') - cube = self.fix.fix_metadata(self.cube) + cube = self.fix.fix_metadata([self.cube])[0] with self.assertRaises(CoordinateNotFoundError): cube.coord('air_pressure') diff --git a/tests/integration/cmor/_fixes/CMIP5/test_MIROC_ESM_CHEM.py b/tests/integration/cmor/_fixes/CMIP5/test_MIROC_ESM_CHEM.py index 7f80f25786..62149925da 100644 --- a/tests/integration/cmor/_fixes/CMIP5/test_MIROC_ESM_CHEM.py +++ b/tests/integration/cmor/_fixes/CMIP5/test_MIROC_ESM_CHEM.py @@ -8,7 +8,7 @@ class TestTro3(unittest.TestCase): def setUp(self): - self.cube = Cube([1], var_name='tro3', units='J') + self.cube = Cube([1.0], var_name='tro3', units='J') self.fix = tro3() def test_fix_data(self): diff --git a/tests/integration/cmor/_fixes/CMIP5/test_MPI_ESM_LR.py b/tests/integration/cmor/_fixes/CMIP5/test_MPI_ESM_LR.py old mode 100755 new mode 100644 index bfbded9573..1809ebb9ac --- a/tests/integration/cmor/_fixes/CMIP5/test_MPI_ESM_LR.py +++ b/tests/integration/cmor/_fixes/CMIP5/test_MPI_ESM_LR.py @@ -8,7 +8,7 @@ class TestPctisccp2(unittest.TestCase): def setUp(self): - self.cube = Cube([1], var_name='pctisccp', units='J') + self.cube = Cube([1.0], var_name='pctisccp', units='J') self.fix = pctisccp() def test_fix_data(self): diff --git a/tests/integration/cmor/_fixes/CMIP5/test_NorESM1_ME.py b/tests/integration/cmor/_fixes/CMIP5/test_NorESM1_ME.py new file mode 100644 index 0000000000..ae1c056eb1 --- /dev/null +++ b/tests/integration/cmor/_fixes/CMIP5/test_NorESM1_ME.py @@ -0,0 +1,65 @@ +"""Tests for fixes of NorESM1-ME (CMIP5).""" +import pytest +import iris +from iris.cube import CubeList + +from esmvaltool.cmor._fixes.CMIP5.NorESM1_ME import tas + +DIM_COORD_SHORT = iris.coords.DimCoord( + [1.0, 2.0, 3.0], + bounds=[[0.5, 1.5], [1.5, 2.5], [2.5, 3.5]], + var_name='dim_coord', +) +DIM_COORD_LONG = iris.coords.DimCoord( + [1.1234567891011, 2.1234567891011, 3.1234567891011], + bounds=[ + [0.51234567891011, 1.51234567891011], + [1.51234567891011, 2.51234567891011], + [2.51234567891011, 3.51234567891011], + ], + var_name='dim_coord', +) +DIM_COORD_ROUNDED = iris.coords.DimCoord( + [1.123456789101, 2.123456789101, 3.123456789101], + bounds=[ + [0.512345678910, 1.512345678910], + [1.512345678910, 2.512345678910], + [2.512345678910, 3.512345678910], + ], + var_name='dim_coord', +) +AUX_COORD = iris.coords.AuxCoord( + [1.1284712947128749498712, 2.12421841274128947982, 3.12787129852141124214], + var_name='aux_coord', +) + +CUBE_IN_SHORT = iris.cube.Cube( + [3.14, 6.28, 9.42], + dim_coords_and_dims=[(DIM_COORD_SHORT, 0)], + aux_coords_and_dims=[(AUX_COORD, 0)], +) +CUBE_IN_LONG = iris.cube.Cube( + [3.14, 6.28, 9.42], + dim_coords_and_dims=[(DIM_COORD_LONG, 0)], + aux_coords_and_dims=[(AUX_COORD, 0)], +) +CUBE_OUT_LONG = iris.cube.Cube( + [3.14, 6.28, 9.42], + dim_coords_and_dims=[(DIM_COORD_ROUNDED, 0)], + aux_coords_and_dims=[(AUX_COORD, 0)], +) + +CUBES_TO_FIX = [ + (CubeList([CUBE_IN_SHORT]), CubeList([CUBE_IN_SHORT])), + (CubeList([CUBE_IN_LONG]), CubeList([CUBE_OUT_LONG])), + (CubeList([CUBE_IN_LONG, CUBE_IN_SHORT]), + CubeList([CUBE_OUT_LONG, CUBE_IN_SHORT])), +] + + +@pytest.mark.parametrize('cubes_in,cubes_out', CUBES_TO_FIX) +def test_tas(cubes_in, cubes_out): + fix = tas() + new_cubes = fix.fix_metadata(cubes_in) + assert new_cubes is cubes_in + assert new_cubes == cubes_out diff --git a/tests/integration/cmor/_fixes/CMIP5/test_inmcm4.py b/tests/integration/cmor/_fixes/CMIP5/test_inmcm4.py index 17f8f87e10..bdd9712568 100644 --- a/tests/integration/cmor/_fixes/CMIP5/test_inmcm4.py +++ b/tests/integration/cmor/_fixes/CMIP5/test_inmcm4.py @@ -1,14 +1,19 @@ +"""Tests for inmcm4 fixes.""" +import os +import shutil +import tempfile import unittest +import iris from cf_units import Unit from iris.cube import Cube -from esmvaltool.cmor._fixes.CMIP5.inmcm4 import gpp, lai +from esmvaltool.cmor._fixes.CMIP5.inmcm4 import gpp, lai, nbp class TestGpp(unittest.TestCase): def setUp(self): - self.cube = Cube([1], var_name='gpp', units='J') + self.cube = Cube([1.0], var_name='gpp', units='J') self.fix = gpp() def test_fix_data(self): @@ -19,10 +24,38 @@ def test_fix_data(self): class TestLai(unittest.TestCase): def setUp(self): - self.cube = Cube([1], var_name='lai', units='J') + self.cube = Cube([1.0], var_name='lai', units='J') self.fix = lai() def test_fix_data(self): cube = self.fix.fix_data(self.cube) self.assertEqual(cube.data[0], 1.0 / 100.0) self.assertEqual(cube.units, Unit('J')) + + +class TestNbp(unittest.TestCase): + """Tests for nbp.""" + + def setUp(self): + """Prepare temp folder for test.""" + self.cube = Cube([1.0], var_name='nbp') + self.fix = nbp() + self.temp_folder = tempfile.mkdtemp() + + def tearDown(self): + """Delete temp folder.""" + shutil.rmtree(self.temp_folder) + + def test_fix_file(self): + """Test fix on nbp files to set standard_name.""" + temp_handler, temp_path = tempfile.mkstemp('.nc', dir=self.temp_folder) + os.close(temp_handler) + output_dir = os.path.join(self.temp_folder, 'fixed') + + iris.save(self.cube, temp_path) + new_path = self.fix.fix_file(temp_path, output_dir) + new_cube = iris.load_cube(new_path) + self.assertEqual( + new_cube.standard_name, + 'surface_net_downward_mass_flux_of_carbon_dioxide_' + 'expressed_as_carbon_due_to_all_land_processes') diff --git a/tests/integration/cmor/_fixes/OBS/test_BDBP.py b/tests/integration/cmor/_fixes/OBS/test_BDBP.py deleted file mode 100644 index 8b89e9f8f7..0000000000 --- a/tests/integration/cmor/_fixes/OBS/test_BDBP.py +++ /dev/null @@ -1,40 +0,0 @@ -import unittest - -from iris.coords import DimCoord -from iris.cube import Cube - -from esmvaltool.cmor._fixes.OBS.BDBP import tro3prof - - -class TestTro3prof(unittest.TestCase): - def setUp(self): - self.cube = Cube([1, 2], var_name='tro3prof', units='J') - self.cube.add_dim_coord( - DimCoord([1, 2], standard_name='air_pressure', units='hPa'), 0) - self.fix = tro3prof() - - def test_fix_metadata(self): - cube = self.fix.fix_metadata(self.cube) - self.assertEqual(cube.coord('air_pressure').units.origin, 'Pa') - self.assertEqual(cube.coord('air_pressure').points[0], 100) - self.assertEqual(cube.coord('air_pressure').points[1], 200) - self.assertIsNone(cube.coord('air_pressure').bounds) - - def test_fix_metadata_with_bounds(self): - self.cube = Cube([1, 2], var_name='tro3prof', units='J') - self.cube.add_dim_coord( - DimCoord( - [1, 2], - standard_name='air_pressure', - units='hPa', - bounds=[[0.5, 1.5], [1.5, 2.5]]), 0) - cube = self.fix.fix_metadata(self.cube) - - plev = cube.coord('air_pressure') - self.assertEqual(plev.units.origin, 'Pa') - self.assertEqual(plev.points[0], 100) - self.assertEqual(plev.points[1], 200) - self.assertEqual(plev.bounds[0][0], 50) - self.assertEqual(plev.bounds[0][1], 150) - self.assertEqual(plev.bounds[1][0], 150) - self.assertEqual(plev.bounds[1][1], 250) diff --git a/tests/integration/cmor/_fixes/OBS/test_ESACCI_GHG.py b/tests/integration/cmor/_fixes/OBS/test_ESACCI_GHG.py deleted file mode 100644 index 31dca2313c..0000000000 --- a/tests/integration/cmor/_fixes/OBS/test_ESACCI_GHG.py +++ /dev/null @@ -1,66 +0,0 @@ -import unittest - -from iris.cube import Cube - -from esmvaltool.cmor._fixes.OBS.ESACCI_GHG import ( - xch4Stddev, xch4Stderr, xco2Stddev, xco2Stderr) - - -class Testxco2Stderr(unittest.TestCase): - def setUp(self): - self.cube = Cube([1], var_name='xco2Stderr', units='1') - self.fix = xco2Stderr() - - def test_fix_metadata(self): - cube = self.fix.fix_metadata(self.cube) - self.assertEqual(cube.units.origin, '1.0e-6') - self.assertEqual(cube.data[0], 1) - - def test_fix_data(self): - cube = self.fix.fix_data(self.cube) - self.assertEqual(cube.data[0], 1e6) - - -class Testxco2Stddev(unittest.TestCase): - def setUp(self): - self.cube = Cube([1], var_name='xco2Stddev', units='1') - self.fix = xco2Stddev() - - def test_fix_metadata(self): - cube = self.fix.fix_metadata(self.cube) - self.assertEqual(cube.units.origin, '1.0e-6') - self.assertEqual(cube.data[0], 1) - - def test_fix_data(self): - cube = self.fix.fix_data(self.cube) - self.assertEqual(cube.data[0], 1e6) - - -class Testxch4Stddev(unittest.TestCase): - def setUp(self): - self.cube = Cube([1], var_name='xch4Stddev', units='1') - self.fix = xch4Stddev() - - def test_fix_metadata(self): - cube = self.fix.fix_metadata(self.cube) - self.assertEqual(cube.units.origin, '1.0e-9') - self.assertEqual(cube.data[0], 1) - - def test_fix_data(self): - cube = self.fix.fix_data(self.cube) - self.assertEqual(cube.data[0], 1e9) - - -class Testxch4Stderr(unittest.TestCase): - def setUp(self): - self.cube = Cube([1], var_name='xch4Stderr', units='1') - self.fix = xch4Stderr() - - def test_fix_metadata(self): - cube = self.fix.fix_metadata(self.cube) - self.assertEqual(cube.units.origin, '1.0e-9') - self.assertEqual(cube.data[0], 1) - - def test_fix_data(self): - cube = self.fix.fix_data(self.cube) - self.assertEqual(cube.data[0], 1e9) diff --git a/tests/integration/cmor/_fixes/OBS/test_ESACCI_OZONE.py b/tests/integration/cmor/_fixes/OBS/test_ESACCI_OZONE.py deleted file mode 100644 index 9441f5b249..0000000000 --- a/tests/integration/cmor/_fixes/OBS/test_ESACCI_OZONE.py +++ /dev/null @@ -1,41 +0,0 @@ -import unittest - -from iris.coords import DimCoord -from iris.cube import Cube - -from esmvaltool.cmor._fixes.OBS.ESACCI_OZONE import tro3prof - - -class TestTro3prof(unittest.TestCase): - def setUp(self): - self.cube = Cube([1, 2], var_name='tro3prof', units='J') - self.cube.add_dim_coord( - DimCoord([1, 2], standard_name='air_pressure', units='hPa'), 0) - self.fix = tro3prof() - - def test_fix_metadata(self): - cube = self.fix.fix_metadata(self.cube) - plev = cube.coord('air_pressure') - - self.assertEqual(plev.units.origin, 'Pa') - self.assertEqual(plev.points[0], 100) - self.assertEqual(plev.points[1], 200) - - def test_fix_metadata_with_bounds(self): - self.cube = Cube([1, 2], var_name='tro3prof', units='J') - self.cube.add_dim_coord( - DimCoord( - [1, 2], - standard_name='air_pressure', - units='hPa', - bounds=[[0.5, 1.5], [1.5, 2.5]]), 0) - cube = self.fix.fix_metadata(self.cube) - - plev = cube.coord('air_pressure') - self.assertEqual(plev.units.origin, 'Pa') - self.assertEqual(plev.points[0], 100) - self.assertEqual(plev.points[1], 200) - self.assertEqual(plev.bounds[0][0], 50) - self.assertEqual(plev.bounds[0][1], 150) - self.assertEqual(plev.bounds[1][0], 150) - self.assertEqual(plev.bounds[1][1], 250) diff --git a/tests/integration/cmor/_fixes/PRIMAVERA/test_EC_Earth3_HR.py b/tests/integration/cmor/_fixes/PRIMAVERA/test_EC_Earth3_HR.py index 08c06065a4..d0c4a18b68 100644 --- a/tests/integration/cmor/_fixes/PRIMAVERA/test_EC_Earth3_HR.py +++ b/tests/integration/cmor/_fixes/PRIMAVERA/test_EC_Earth3_HR.py @@ -11,12 +11,12 @@ class TestAllVars(unittest.TestCase): """Test for the common fixes for all the variables""" def setUp(self): """Prepare to test""" - self.cube = Cube([[1, 2], [3, 4]], var_name='var') + self.cube = Cube([[1.0, 2.0], [3.0, 4.0]], var_name='var') self.cube.add_dim_coord( - DimCoord([1, 2], standard_name='latitude', + DimCoord([1.0, 2.0], standard_name='latitude', var_name='latitude'), 0) self.cube.add_dim_coord( - DimCoord([1, 2], standard_name='longitude', + DimCoord([1.0, 2.0], standard_name='longitude', var_name='longitude'), 1) self.fix = allvars() @@ -27,6 +27,6 @@ def test_fix_lat_lon_names(self): They should be lat and lon instead of the original latitude and longitude """ - cube = self.fix.fix_metadata(self.cube) + cube = self.fix.fix_metadata([self.cube])[0] self.assertEqual(cube.coord('latitude').var_name, 'lat') self.assertEqual(cube.coord('longitude').var_name, 'lon') diff --git a/tests/integration/cmor/_fixes/test_fix.py b/tests/integration/cmor/_fixes/test_fix.py index fbdcacc23d..bf26d5c652 100644 --- a/tests/integration/cmor/_fixes/test_fix.py +++ b/tests/integration/cmor/_fixes/test_fix.py @@ -1,7 +1,7 @@ -import unittest -import tempfile import os import shutil +import tempfile +import unittest from iris.cube import Cube @@ -27,11 +27,9 @@ def test_get_fixes_with_replace(self): self.assertListEqual(Fix.get_fixes('CMIP5', 'BNU-ESM', 'ch4'), [ch4()]) def test_get_fixes_with_generic(self): - from esmvaltool.cmor._fixes.CMIP5.CESM1_BGC import ( - allvars, co2) + from esmvaltool.cmor._fixes.CMIP5.CESM1_BGC import co2 self.assertListEqual( - Fix.get_fixes('CMIP5', 'CESM1-BGC', 'co2'), [allvars(), - co2()]) + Fix.get_fixes('CMIP5', 'CESM1-BGC', 'co2'), [co2()]) def test_get_fix_no_project(self): self.assertListEqual( @@ -64,5 +62,4 @@ def test_fixed_filenam(self): output_dir = os.path.join(self.temp_folder, 'fixed') os.makedirs(output_dir) fixed_filepath = Fix().get_fixed_filepath(output_dir, filepath) - self.assertTrue(fixed_filepath, - os.path.join(output_dir, 'file.nc')) + self.assertTrue(fixed_filepath, os.path.join(output_dir, 'file.nc')) diff --git a/tests/integration/cmor/test_table.py b/tests/integration/cmor/test_table.py index a1b17cf14a..c9e33300a0 100644 --- a/tests/integration/cmor/test_table.py +++ b/tests/integration/cmor/test_table.py @@ -3,23 +3,23 @@ import os import unittest -from esmvaltool.cmor.table import CMIP5Info, CMIP6Info +from esmvaltool.cmor.table import CMIP5Info, CMIP6Info, CustomInfo class TestCMIP6Info(unittest.TestCase): - """Test for the CMIP6 info class""" + """Test for the CMIP6 info class.""" @classmethod def setUpClass(cls): """ - Set up tests + Set up tests. We read CMIP6Info once to keep tests times manageable """ - cls.variables_info = CMIP6Info() + cls.variables_info = CMIP6Info('cmip6', default=CustomInfo()) def test_custom_tables_location(self): - """Test constructor with custom tables location""" + """Test constructor with custom tables location.""" cwd = os.path.dirname(os.path.realpath(__file__)) cmor_tables_path = os.path.join(cwd, '..', '..', '..', 'esmvaltool', 'cmor', 'tables', 'cmip6') @@ -27,34 +27,67 @@ def test_custom_tables_location(self): CMIP6Info(cmor_tables_path) def test_get_variable_tas(self): - """Get tas variable""" + """Get tas variable.""" var = self.variables_info.get_variable('Amon', 'tas') self.assertEqual(var.short_name, 'tas') def test_get_variable_from_alias(self): - """Get a variable from a known alias""" + """Get a variable from a known alias.""" var = self.variables_info.get_variable('SImon', 'sic') self.assertEqual(var.short_name, 'siconc') def test_get_bad_variable(self): - """Get none if a variable is not in the given table""" + """Get none if a variable is not in the given table.""" + self.assertIsNone(self.variables_info.get_variable('Omon', 'tas')) + + +class Testobs4mipsInfo(unittest.TestCase): + """Test for the obs$mips info class.""" + + @classmethod + def setUpClass(cls): + """ + Set up tests. + + We read CMIP6Info once to keep tests times manageable + """ + cls.variables_info = CMIP6Info( + cmor_tables_path='obs4mips', + default=CustomInfo() + ) + + def test_custom_tables_location(self): + """Test constructor with custom tables location.""" + cwd = os.path.dirname(os.path.realpath(__file__)) + cmor_tables_path = os.path.join(cwd, '..', '..', '..', 'esmvaltool', + 'cmor', 'tables', 'cmip6') + cmor_tables_path = os.path.abspath(cmor_tables_path) + CMIP6Info(cmor_tables_path) + + def test_get_variable_tas(self): + """Get tas variable.""" + var = self.variables_info.get_variable('monStderr', 'ndviStderr') + self.assertEqual(var.short_name, 'ndviStderr') + + def test_get_bad_variable(self): + """Get none if a variable is not in the given table.""" self.assertIsNone(self.variables_info.get_variable('Omon', 'tas')) class TestCMIP5Info(unittest.TestCase): - """Test for the CMIP5 info class""" + """Test for the CMIP5 info class.""" @classmethod def setUpClass(cls): """ - Set up tests + Set up tests. We read CMIP5Info once to keep testing times manageable """ - cls.variables_info = CMIP5Info() + cls.variables_info = CMIP5Info('cmip5', default=CustomInfo()) def test_custom_tables_location(self): - """Test constructor with custom tables location""" + """Test constructor with custom tables location.""" cwd = os.path.dirname(os.path.realpath(__file__)) cmor_tables_path = os.path.join(cwd, '..', '..', '..', 'esmvaltool', 'cmor', 'tables', 'cmip5') @@ -62,10 +95,41 @@ def test_custom_tables_location(self): CMIP5Info(cmor_tables_path) def test_get_variable_tas(self): - """Get tas variable""" + """Get tas variable.""" var = self.variables_info.get_variable('Amon', 'tas') self.assertEqual(var.short_name, 'tas') def test_get_bad_variable(self): - """Get none if a variable is not in the given table""" + """Get none if a variable is not in the given table.""" self.assertIsNone(self.variables_info.get_variable('Omon', 'tas')) + + +class TestCustomInfo(unittest.TestCase): + """Test for the custom info class.""" + + @classmethod + def setUpClass(cls): + """ + Set up tests. + + We read CMIP5Info once to keep testing times manageable + """ + cls.variables_info = CustomInfo() + + def test_custom_tables_location(self): + """Test constructor with custom tables location.""" + cwd = os.path.dirname(os.path.realpath(__file__)) + cmor_tables_path = os.path.join(cwd, '..', '..', '..', 'esmvaltool', + 'cmor', 'tables', 'cmip5') + cmor_tables_path = os.path.abspath(cmor_tables_path) + CustomInfo(cmor_tables_path) + + def test_get_variable_tas(self): + """Get tas variable.""" + CustomInfo() + var = self.variables_info.get_variable('Amon', 'netcre') + self.assertEqual(var.short_name, 'netcre') + + def test_get_bad_variable(self): + """Get none if a variable is not in the given table.""" + self.assertIsNone(self.variables_info.get_variable('Omon', 'badvar')) diff --git a/tests/integration/data_finder.yml b/tests/integration/data_finder.yml new file mode 100644 index 0000000000..cd718fe2b6 --- /dev/null +++ b/tests/integration/data_finder.yml @@ -0,0 +1,238 @@ +--- + +get_output_file: + - variable: &variable + variable_group: test + short_name: ta + dataset: HadGEM2-ES + project: CMIP5 + cmor_table: CMIP5 + institute: [INPE, MOHC] + frequency: mon + modeling_realm: [atmos] + mip: Amon + exp: historical + ensemble: r1i1p1 + start_year: 1960 + end_year: 1980 + diagnostic: test_diag + preprocessor: test_preproc + preproc_dir: this/is/a/path + output_file: this/is/a/path/test_diag/test/CMIP5_HadGEM2-ES_Amon_historical_r1i1p1_ta_1960-1980.nc + + - variable: + <<: *variable + exp: [historical, rcp85] + preproc_dir: /test + output_file: /test/test_diag/test/CMIP5_HadGEM2-ES_Amon_historical-rcp85_r1i1p1_ta_1960-1980.nc + + +get_input_filelist: + - drs: default + variable: + <<: *variable + available_files: + - ta_Amon_HadGEM2-ES_historical_r1i1p1_193412-195911.nc + - ta_Amon_HadGEM2-ES_historical_r1i1p1_195912-198411.nc + - ta_Amon_HadGEM2-ES_historical_r1i1p1_198412-200511.nc + found_files: + - ta_Amon_HadGEM2-ES_historical_r1i1p1_195912-198411.nc + + - drs: default + variable: + <<: *variable + end_year: 2060 + exp: [historical, rcp85] + available_files: + - ta_Amon_HadGEM2-ES_historical_r1i1p1_193412-195911.nc + - ta_Amon_HadGEM2-ES_historical_r1i1p1_195912-198411.nc + - ta_Amon_HadGEM2-ES_historical_r1i1p1_198413-200512.nc + - ta_Amon_HadGEM2-ES_rcp85_r1i1p1_200601-210012.nc + found_files: + - ta_Amon_HadGEM2-ES_historical_r1i1p1_195912-198411.nc + - ta_Amon_HadGEM2-ES_historical_r1i1p1_198413-200512.nc + - ta_Amon_HadGEM2-ES_rcp85_r1i1p1_200601-210012.nc + + - drs: default + variable: + <<: *variable + start_year: 2010 + end_year: 2100 + available_files: + - ta_Amon_HadGEM2-ES_historical_r1i1p1_193412-195911.nc + - ta_Amon_HadGEM2-ES_historical_r1i1p1_195912-198411.nc + - ta_Amon_HadGEM2-ES_historical_r1i1p1_198413-200512.nc + - ta_Amon_HadGEM2-ES_rcp85_r1i1p1_200601-210012.nc + found_files: [] + + - drs: default + variable: *variable + found_files: [] + + - drs: BADC + variable: + <<: *variable + start_year: 1980 + end_year: 2002 + available_files: + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110329/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_193412-195911.nc + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110329/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_195912-198411.nc + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110329/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_198412-200511.nc + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20120928/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_193412-195911.nc + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20120928/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_195912-198411.nc + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20120928/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_198412-200511.nc + available_symlinks: + - link_name: MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/latest + target: v20120928 + found_files: + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/latest/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_195912-198411.nc + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/latest/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_198412-200511.nc + + - drs: DKRZ + variable: + <<: *variable + start_year: 1980 + end_year: 2002 + available_files: + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_185912-188411.nc + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_188412-190911.nc + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_190912-193411.nc + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_193412-195911.nc + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_195912-198411.nc + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_198412-200511.nc + found_files: + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_195912-198411.nc + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_198412-200511.nc + + - drs: DKRZ + variable: + <<: *variable + exp: [historical, rcp45, rcp85] + start_year: 1980 + end_year: 2100 + available_files: + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_185912-188411.nc + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_188412-190911.nc + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_190912-193411.nc + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_193412-195911.nc + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_195912-198411.nc + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_198412-200511.nc + - MOHC/HadGEM2-ES/rcp45/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_rcp45_r1i1p1_200601-210012.nc + - MOHC/HadGEM2-ES/rcp85/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_rcp85_r1i1p1_200601-210012.nc + found_files: + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_195912-198411.nc + - MOHC/HadGEM2-ES/historical/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_historical_r1i1p1_198412-200511.nc + - MOHC/HadGEM2-ES/rcp45/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_rcp45_r1i1p1_200601-210012.nc + - MOHC/HadGEM2-ES/rcp85/mon/atmos/Amon/r1i1p1/v20110330/ta/ta_Amon_HadGEM2-ES_rcp85_r1i1p1_200601-210012.nc + + - drs: ETHZ + variable: + <<: *variable + start_year: 1980 + end_year: 2002 + available_files: + - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_185912-188411.nc + - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_188412-190911.nc + - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_190912-193411.nc + - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_193412-195911.nc + - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_195912-198411.nc + - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_198412-200511.nc + found_files: + - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_195912-198411.nc + - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_198412-200511.nc + + - drs: ETHZ + variable: + <<: *variable + start_year: 2000 + end_year: 2100 + available_files: + - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_185912-188411.nc + - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_188412-190911.nc + - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_190912-193411.nc + - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_193412-195911.nc + - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_195912-198411.nc + - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_198412-200511.nc + - rcp85/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_rcp85_r1i1p1_200601-210012.nc + found_files: + - historical/Amon/ta/HadGEM2-ES/r1i1p1/ta_Amon_HadGEM2-ES_historical_r1i1p1_198412-200511.nc + + +get_input_fx_filelist: + - drs: default + variable: + <<: *variable + fx_files: + - areacella + - areacello + - basin + - deptho + - orog + - sftlf + - sftof + - thkcello + - volcello + available_files: + - sftof_fx_HadGEM2-ES_historical_r0i0p0.nc + - areacella_fx_HadGEM2-ES_historical_r0i0p0.nc + - areacello_fx_HadGEM2-ES_historical_r0i0p0.nc + - basin_fx_HadGEM2-ES_historical_r0i0p0.nc + - deptho_fx_HadGEM2-ES_historical_r0i0p0.nc + - orog_fx_HadGEM2-ES_historical_r0i0p0.nc + - sftlf_fx_HadGEM2-ES_historical_r0i0p0.nc + - sftof_fx_HadGEM2-ES_historical_r0i0p0.nc + - thkcello_fx_HadGEM2-ES_historical_r0i0p0.nc + - volcello_fx_HadGEM2-ES_historical_r0i0p0.nc + found_files: + areacella: areacella_fx_HadGEM2-ES_historical_r0i0p0.nc + areacello: areacello_fx_HadGEM2-ES_historical_r0i0p0.nc + basin: basin_fx_HadGEM2-ES_historical_r0i0p0.nc + deptho: deptho_fx_HadGEM2-ES_historical_r0i0p0.nc + orog: orog_fx_HadGEM2-ES_historical_r0i0p0.nc + sftlf: sftlf_fx_HadGEM2-ES_historical_r0i0p0.nc + sftof: sftof_fx_HadGEM2-ES_historical_r0i0p0.nc + thkcello: thkcello_fx_HadGEM2-ES_historical_r0i0p0.nc + volcello: volcello_fx_HadGEM2-ES_historical_r0i0p0.nc + + - drs: default + variable: + <<: *variable + fx_files: + - sftof + found_files: + sftof: null + + - drs: BADC + variable: + <<: *variable + fx_files: + - sftof + available_files: + - MOHC/HadGEM2-ES/historical/fx/ocean/fx/r0i0p0/v20120215/sftof/sftof_fx_HadGEM2-ES_historical_r0i0p0.nc + - MOHC/HadGEM2-ES/historical/fx/ocean/fx/r0i0p0/v20130612/sftof/sftof_fx_HadGEM2-ES_historical_r0i0p0.nc + available_symlinks: + - link_name: MOHC/HadGEM2-ES/historical/fx/ocean/fx/r0i0p0/latest + target: v20130612 + found_files: + sftof: MOHC/HadGEM2-ES/historical/fx/ocean/fx/r0i0p0/latest/sftof/sftof_fx_HadGEM2-ES_historical_r0i0p0.nc + + - drs: DKRZ + variable: + <<: *variable + fx_files: + - sftof + available_files: + - MOHC/HadGEM2-ES/historical/fx/ocean/fx/r0i0p0/v20120215/sftof/sftof_fx_HadGEM2-ES_historical_r0i0p0.nc + - MOHC/HadGEM2-ES/historical/fx/ocean/fx/r0i0p0/v20130612/sftof/sftof_fx_HadGEM2-ES_historical_r0i0p0.nc + found_files: + sftof: MOHC/HadGEM2-ES/historical/fx/ocean/fx/r0i0p0/v20130612/sftof/sftof_fx_HadGEM2-ES_historical_r0i0p0.nc + + - drs: ETHZ + variable: + <<: *variable + fx_files: + - sftof + available_files: + - historical/fx/sftof/HadGEM2-ES/r0i0p0/sftof_fx_HadGEM2-ES_historical_r0i0p0.nc + found_files: + sftof: historical/fx/sftof/HadGEM2-ES/r0i0p0/sftof_fx_HadGEM2-ES_historical_r0i0p0.nc diff --git a/tests/unit/preprocessor/_volume_pp/__init__.py b/tests/integration/preprocessor/_derive/__init__.py similarity index 100% rename from tests/unit/preprocessor/_volume_pp/__init__.py rename to tests/integration/preprocessor/_derive/__init__.py diff --git a/tests/integration/preprocessor/_derive/test_interface.py b/tests/integration/preprocessor/_derive/test_interface.py new file mode 100644 index 0000000000..cef8ea93a4 --- /dev/null +++ b/tests/integration/preprocessor/_derive/test_interface.py @@ -0,0 +1,69 @@ +from iris.cube import Cube, CubeList + +from esmvaltool.preprocessor import derive +from esmvaltool.preprocessor._derive import get_required + + +def test_get_required(): + + variables = get_required('alb') + + reference = [ + { + 'short_name': 'rsds', + }, + { + 'short_name': 'rsus', + }, + ] + + assert variables == reference + + +def test_get_required_with_fx(): + + variables = get_required('nbp_grid') + + reference = [{ + 'short_name': 'nbp', + 'fx_files': ['sftlf'], + }] + + assert variables == reference + + +def test_derive_nonstandard_nofx(): + + short_name = 'alb' + long_name = 'albedo at the surface' + units = 1 + standard_name = '' + + rsds = Cube([2.]) + rsds.standard_name = 'surface_downwelling_shortwave_flux_in_air' + + rsus = Cube([1.]) + rsus.standard_name = 'surface_upwelling_shortwave_flux_in_air' + + cubes = CubeList([rsds, rsus]) + + alb = derive(cubes, short_name, long_name, units, standard_name) + + print(alb) + assert alb.var_name == short_name + assert alb.long_name == long_name + assert alb.units == units + assert alb.data == [0.5] + + +def test_derive_noop(): + + alb = Cube([1.]) + alb.var_name = 'alb' + alb.long_name = 'albedo at the surface' + alb.units = 1 + + cube = derive([alb], alb.var_name, alb.long_name, alb.units) + + print(cube) + assert cube is alb diff --git a/tests/integration/preprocessor/_io/test_cleanup.py b/tests/integration/preprocessor/_io/test_cleanup.py index 06a0d87139..c984865c44 100644 --- a/tests/integration/preprocessor/_io/test_cleanup.py +++ b/tests/integration/preprocessor/_io/test_cleanup.py @@ -1,10 +1,8 @@ """Integration tests for :func:`esmvaltool.preprocessor._io.cleanup`""" -from __future__ import absolute_import, division, print_function - -import unittest import os import tempfile +import unittest from esmvaltool.preprocessor import _io diff --git a/tests/integration/preprocessor/_io/test_concatenate.py b/tests/integration/preprocessor/_io/test_concatenate.py index cd90153c02..ab0f0d0752 100644 --- a/tests/integration/preprocessor/_io/test_concatenate.py +++ b/tests/integration/preprocessor/_io/test_concatenate.py @@ -1,41 +1,115 @@ -"""Integration tests for :func:`esmvaltool.preprocessor._io.concatenate`""" - -from __future__ import absolute_import, division, print_function +"""Integration tests for :func:`esmvaltool.preprocessor._io.concatenate`.""" import unittest + import numpy as np -from iris.cube import Cube from iris.coords import DimCoord +from iris.cube import Cube +from iris.exceptions import ConcatenateError from esmvaltool.preprocessor import _io class TestConcatenate(unittest.TestCase): - """Tests for :func:`esmvaltool.preprocessor._io.concatenate`""" + """Tests for :func:`esmvaltool.preprocessor._io.concatenate`.""" def setUp(self): + """Start tests.""" coord = DimCoord([1, 2], var_name='coord') second_coord = coord.copy([3, 4]) + third_coord = coord.copy([5, 6]) self.raw_cubes = [] - self.raw_cubes.append(Cube([1, 2], var_name='sample', - dim_coords_and_dims=((coord, 0),))) - self.raw_cubes.append(Cube([3, 4], var_name='sample', - dim_coords_and_dims=((second_coord, 0),))) + self.raw_cubes.append( + Cube([1, 2], var_name='sample', dim_coords_and_dims=((coord, + 0), ))) + self.raw_cubes.append( + Cube([3, 4], + var_name='sample', + dim_coords_and_dims=((second_coord, 0), ))) + self.raw_cubes.append( + Cube([5, 6], + var_name='sample', + dim_coords_and_dims=((third_coord, 0), ))) def test_concatenate(self): - """Test concatenation of two cubes""" + """Test concatenation of two cubes.""" concatenated = _io.concatenate(self.raw_cubes) - self.assertTrue((concatenated.coord('coord').points == - np.array([1, 2, 3, 4])).all()) + self.assertTrue((concatenated.coord('coord').points == np.array( + [1, 2, 3, 4, 5, 6])).all()) def test_fail_with_duplicates(self): - """Test exception raised if two cubes are overlapping""" + """Test exception raised if two cubes are overlapping.""" self.raw_cubes.append(self.raw_cubes[0].copy()) - with self.assertRaises(_io.ConcatenationError): + with self.assertRaises(ConcatenateError): _io.concatenate(self.raw_cubes) def test_fail_metadata_differs(self): - """Test exception raised if two cubes have different metadata""" + """Test exception raised if two cubes have different metadata.""" self.raw_cubes[0].units = 'm' - with self.assertRaises(_io.ConcatenationError): + with self.assertRaises(ConcatenateError): _io.concatenate(self.raw_cubes) + + def test_fix_attributes(self): + """Test fixing attributes for concatenation.""" + identical_attrs = { + 'int': 42, + 'float': 3.1415, + 'bool': True, + 'str': 'Hello, world', + 'list': [1, 1, 2, 3, 5, 8, 13], + 'tuple': (1, 2, 3, 4, 5), + 'dict': { + 1: 'one', + 2: 'two', + 3: 'three' + }, + 'nparray': np.arange(42), + } + differing_attrs = [ + { + 'new_int': 0, + 'new_str': 'hello', + 'new_nparray': np.arange(3), + 'mix': np.arange(2), + }, + { + 'new_int': 1, + 'new_str': 'world', + 'new_list': [1, 1, 2], + 'new_tuple': (0, 1), + 'new_dict': { + 0: 'zero', + }, + 'mix': { + 1: 'one', + }, + }, + { + 'new_str': '!', + 'new_list': [1, 1, 2, 3], + 'new_tuple': (1, 2, 3), + 'new_dict': { + 0: 'zeroo', + 1: 'one', + }, + 'new_nparray': np.arange(2), + 'mix': False, + }, + ] + resulting_attrs = { + 'new_int': '0;1', + 'new_str': 'hello;world;!', + 'new_nparray': '[0 1 2];[0 1]', + 'new_list': '[1, 1, 2];[1, 1, 2, 3]', + 'new_tuple': '(0, 1);(1, 2, 3)', + 'new_dict': "{0: 'zero'};{0: 'zeroo', 1: 'one'}", + 'mix': "[0 1];{1: 'one'};False", + } + resulting_attrs.update(identical_attrs) + + for idx in range(3): + self.raw_cubes[idx].attributes = identical_attrs + self.raw_cubes[idx].attributes.update(differing_attrs[idx]) + _io._fix_cube_attributes(self.raw_cubes) # noqa + for cube in self.raw_cubes: + self.assertTrue(cube.attributes == resulting_attrs) diff --git a/tests/integration/preprocessor/_io/test_load.py b/tests/integration/preprocessor/_io/test_load.py index 6625e2f936..68b7beab8a 100644 --- a/tests/integration/preprocessor/_io/test_load.py +++ b/tests/integration/preprocessor/_io/test_load.py @@ -1,83 +1,82 @@ -"""Integration tests for :func:`esmvaltool.preprocessor._io.concatenate`""" +"""Integration tests for :func:`esmvaltool.preprocessor._io.load`.""" -from __future__ import absolute_import, division, print_function - -import unittest import os import tempfile -import numpy as np +import unittest + import iris -from iris.cube import Cube +import numpy as np from iris.coords import DimCoord +from iris.cube import Cube + +from esmvaltool.preprocessor._io import concatenate_callback, load -from esmvaltool.preprocessor import _io + +def _create_sample_cube(): + coord = DimCoord([1, 2], standard_name='latitude', units='degrees_north') + cube = Cube([1, 2], var_name='sample', dim_coords_and_dims=((coord, 0), )) + return cube class TestLoad(unittest.TestCase): - """Tests for :func:`esmvaltool.preprocessor._io.concatenate`""" + """Tests for :func:`esmvaltool.preprocessor.load`.""" def setUp(self): + """Start tests.""" self.temp_files = [] def tearDown(self): + """Finish tests.""" for temp_file in self.temp_files: os.remove(temp_file) - def _create_sample_cube(self): - coord = DimCoord([1, 2], standard_name='latitude', - units='degrees_north') - cube = Cube([1, 2], var_name='sample', - dim_coords_and_dims=((coord, 0),)) - return cube - def _save_cube(self, cube): descriptor, temp_file = tempfile.mkstemp('.nc') os.close(descriptor) iris.save(cube, temp_file) self.temp_files.append(temp_file) + return temp_file - def test_load_multiple(self): - """Test loading multiple files""" - for num in range(2): - cube = self._create_sample_cube() - self._save_cube(cube) + def test_load(self): + """Test loading multiple files.""" + cube = _create_sample_cube() + temp_file = self._save_cube(cube) - list = _io.load_cubes(self.temp_files, 'filename', None) - cube = list[0] + cubes = load(temp_file) + cube = cubes[0] + self.assertEqual(1, len(cubes)) + self.assertEqual(temp_file, cube.attributes['source_file']) self.assertTrue((cube.data == np.array([1, 2])).all()) - self.assertTrue((cube.coord('latitude').points == - np.array([1, 2])).all()) - self.assertEquals(cube.attributes['_filename'], 'filename') + self.assertTrue((cube.coord('latitude').points == np.array([1, + 2])).all()) - def test_callback_remove_attributtes(self): - """Test callback remove unwanted attributes""" - attributtes = ('history', 'creation_date', 'tracking_id') - for x in range(2): - cube = self._create_sample_cube() - for attr in attributtes: + def test_callback_remove_attributes(self): + """Test callback remove unwanted attributes.""" + attributes = ('history', 'creation_date', 'tracking_id') + for _ in range(2): + cube = _create_sample_cube() + for attr in attributes: cube.attributes[attr] = attr self._save_cube(cube) - - cubes = _io.load_cubes(self.temp_files, 'filename', None, - callback=_io.concatenate_callback) - cube = cubes[0] - self.assertTrue((cube.data == np.array([1, 2])).all()) - self.assertTrue((cube.coord('latitude').points == - np.array([1, 2])).all()) - self.assertEqual(cube.attributes['_filename'], 'filename') - for attr in attributtes: - self.assertTrue(attr not in cube.attributes) + for temp_file in self.temp_files: + cubes = load(temp_file, callback=concatenate_callback) + cube = cubes[0] + self.assertEqual(1, len(cubes)) + self.assertTrue((cube.data == np.array([1, 2])).all()) + self.assertTrue( + (cube.coord('latitude').points == np.array([1, 2])).all()) + for attr in attributes: + self.assertTrue(attr not in cube.attributes) def test_callback_fix_lat_units(self): - """Test callback for fixing units""" - cube = self._create_sample_cube() - self._save_cube(cube) + """Test callback for fixing units.""" + cube = _create_sample_cube() + temp_file = self._save_cube(cube) - list = _io.load_cubes(self.temp_files, 'filename', None, - callback=_io.concatenate_callback) - cube = list[0] + cubes = load(temp_file, callback=concatenate_callback) + cube = cubes[0] + self.assertEqual(1, len(cubes)) self.assertTrue((cube.data == np.array([1, 2])).all()) - self.assertTrue((cube.coord('latitude').points == - np.array([1, 2])).all()) - self.assertEquals(cube.attributes['_filename'], 'filename') + self.assertTrue((cube.coord('latitude').points == np.array([1, + 2])).all()) self.assertEquals(cube.coord('latitude').units, 'degrees_north') diff --git a/tests/integration/preprocessor/_io/test_save.py b/tests/integration/preprocessor/_io/test_save.py index 20d8efcfc4..b6f5f1495f 100644 --- a/tests/integration/preprocessor/_io/test_save.py +++ b/tests/integration/preprocessor/_io/test_save.py @@ -1,21 +1,20 @@ -"""Integration tests for :func:`esmvaltool.preprocessor._io.save`""" +"""Integration tests for :func:`esmvaltool.preprocessor.save`""" -from __future__ import absolute_import, division, print_function - -import unittest import os import tempfile -import numpy as np -import netCDF4 +import unittest + import iris -from iris.cube import Cube +import netCDF4 +import numpy as np from iris.coords import DimCoord +from iris.cube import Cube -from esmvaltool.preprocessor import _io +from esmvaltool.preprocessor import save class TestSave(unittest.TestCase): - """Tests for :func:`esmvaltool.preprocessor._io.save`""" + """Tests for :func:`esmvaltool.preprocessor.save`""" def setUp(self): self.temp_files = [] @@ -26,102 +25,97 @@ def tearDown(self): os.remove(temp_file) def _create_sample_cube(self): - lat = DimCoord(np.asarray([1, 2], np.single), - standard_name='latitude', - units='degrees_north') - lon = DimCoord(np.asarray([1, 2], np.single), - standard_name='longitude', - units='degrees_east') - time = DimCoord(np.asarray([1, 2], np.single), - standard_name='time', - units='days since 2000-1-1') - - cube = Cube(np.random.random_sample([2, 2, 2]), - var_name='sample', - units='1', - dim_coords_and_dims=((lat, 0), (lon, 1), (time, 2))) - - descriptor, temp_file = tempfile.mkstemp('.nc') + lat = DimCoord( + np.asarray([1, 2], np.single), + standard_name='latitude', + units='degrees_north') + lon = DimCoord( + np.asarray([1, 2], np.single), + standard_name='longitude', + units='degrees_east') + time = DimCoord( + np.asarray([1, 2], np.single), + standard_name='time', + units='days since 2000-1-1') + + cube = Cube( + np.random.random_sample([2, 2, 2]), + var_name='sample', + units='1', + dim_coords_and_dims=((lat, 0), (lon, 1), (time, 2))) + + descriptor, filename = tempfile.mkstemp('.nc') os.close(descriptor) - cube.attributes['_filename'] = temp_file - self.temp_files.append(temp_file) - return cube + self.temp_files.append(filename) + return cube, filename def test_save(self): """Test save""" - cube = self._create_sample_cube() - paths = _io.save([cube]) - loaded_cube = iris.load_cube(paths[0]) + cube, filename = self._create_sample_cube() + path = save([cube], filename) + loaded_cube = iris.load_cube(path) self._compare_cubes(cube, loaded_cube) def test_save_zlib(self): """Test save""" - cube = self._create_sample_cube() - paths = _io.save([cube], compress=True) - loaded_cube = iris.load_cube(paths[0]) + cube, filename = self._create_sample_cube() + path = save([cube], filename, compress=True) + loaded_cube = iris.load_cube(path) self._compare_cubes(cube, loaded_cube) - handler = netCDF4.Dataset(paths[0], 'r') + handler = netCDF4.Dataset(path, 'r') sample_filters = handler.variables['sample'].filters() self.assertTrue(sample_filters['zlib']) self.assertTrue(sample_filters['shuffle']) self.assertEqual(sample_filters['complevel'], 4) handler.close() - def test_save_debug(self): - """Test save on debug mode""" - cube = self._create_sample_cube() - paths = _io.save([cube], debug=True) - loaded_cube = iris.load_cube(paths[0]) - self._compare_cubes(cube, loaded_cube) - def test_fail_without_filename(self): - """Test save fails if _filename is not added""" - cube = self._create_sample_cube() - del cube.attributes['_filename'] - with self.assertRaises(ValueError): - _io.save([cube]) + """Test save fails if filename is not provided.""" + cube, _ = self._create_sample_cube() + with self.assertRaises(TypeError): + save([cube]) def test_save_optimized_map(self): """Test save""" - cube = self._create_sample_cube() - paths = _io.save([cube], optimize_access='map') - loaded_cube = iris.load_cube(paths[0]) + cube, filename = self._create_sample_cube() + path = save([cube], filename, optimize_access='map') + loaded_cube = iris.load_cube(path) self._compare_cubes(cube, loaded_cube) - self._check_chunks(paths, [2, 2, 1]) + self._check_chunks(path, [2, 2, 1]) def test_save_optimized_timeseries(self): """Test save""" - cube = self._create_sample_cube() - paths = _io.save([cube], optimize_access='timeseries') - loaded_cube = iris.load_cube(paths[0]) + cube, filename = self._create_sample_cube() + path = save([cube], filename, optimize_access='timeseries') + loaded_cube = iris.load_cube(path) self._compare_cubes(cube, loaded_cube) - self._check_chunks(paths, [1, 1, 2]) + self._check_chunks(path, [1, 1, 2]) def test_save_optimized_lat(self): """Test save""" - cube = self._create_sample_cube() - paths = _io.save([cube], optimize_access='latitude') - loaded_cube = iris.load_cube(paths[0]) + cube, filename = self._create_sample_cube() + path = save([cube], filename, optimize_access='latitude') + loaded_cube = iris.load_cube(path) self._compare_cubes(cube, loaded_cube) expected_chunks = [2, 1, 1] - self._check_chunks(paths, expected_chunks) + self._check_chunks(path, expected_chunks) - def _check_chunks(self, paths, expected_chunks): - handler = netCDF4.Dataset(paths[0], 'r') + def _check_chunks(self, path, expected_chunks): + handler = netCDF4.Dataset(path, 'r') chunking = handler.variables['sample'].chunking() handler.close() self.assertListEqual(expected_chunks, chunking) def test_save_optimized_lon_time(self): """Test save""" - cube = self._create_sample_cube() - paths = _io.save([cube], optimize_access='longitude time') - loaded_cube = iris.load_cube(paths[0]) + cube, filename = self._create_sample_cube() + path = save([cube], filename, optimize_access='longitude time') + loaded_cube = iris.load_cube(path) self._compare_cubes(cube, loaded_cube) - self._check_chunks(paths, [1, 2, 2]) + self._check_chunks(path, [1, 2, 2]) def _compare_cubes(self, cube, loaded_cube): self.assertTrue((cube.data == loaded_cube.data).all()) for coord in cube.coords(): - self.assertTrue((coord.points == - loaded_cube.coord(coord.name()).points).all()) + self.assertTrue( + (coord.points == loaded_cube.coord(coord.name()).points).all()) diff --git a/tests/integration/preprocessor/_mask/test_mask.py b/tests/integration/preprocessor/_mask/test_mask.py index f5b5d74a0a..47d64e30c7 100644 --- a/tests/integration/preprocessor/_mask/test_mask.py +++ b/tests/integration/preprocessor/_mask/test_mask.py @@ -6,51 +6,53 @@ """ -from __future__ import absolute_import, division, print_function - +import os +import tempfile import unittest -import os import iris import numpy as np import tests -from esmvaltool.preprocessor import _mask as mask +from esmvaltool.preprocessor import (PreprocessorFile, mask_fillvalues, + mask_landsea, mask_landseaice) class Test(tests.Test): """Test class""" - def test_mask_landsea(self): - """Test mask_landocean func""" + def setUp(self): + """Assemble a stock cube""" fx_data = np.empty((3, 3)) fx_data[:] = 60. - new_cube_data = np.empty((3, 3)) - new_cube_data[:] = 200. + self.new_cube_data = np.empty((3, 3)) + self.new_cube_data[:] = 200. crd_sys = iris.coord_systems.GeogCS(iris.fileformats.pp.EARTH_RADIUS) - lons = iris.coords.DimCoord( - [0, 1.5, 3], - standard_name='longitude', - bounds=[[0, 1], [1, 2], [2, 3]], - units='degrees_east', - coord_system=crd_sys) - lats = iris.coords.DimCoord( - [0, 1.5, 3], - standard_name='latitude', - bounds=[[0, 1], [1, 2], [2, 3]], - units='degrees_north', - coord_system=crd_sys) - coords_spec = [(lats, 0), (lons, 1)] - fx_mask = iris.cube.Cube(fx_data, dim_coords_and_dims=coords_spec) - iris.save(fx_mask, 'sftlf_test.nc') - new_cube_land = iris.cube.Cube(new_cube_data, - dim_coords_and_dims=coords_spec) - new_cube_sea = iris.cube.Cube(new_cube_data, - dim_coords_and_dims=coords_spec) + lons = iris.coords.DimCoord([0, 1.5, 3], + standard_name='longitude', + bounds=[[0, 1], [1, 2], [2, 3]], + units='degrees_east', + coord_system=crd_sys) + lats = iris.coords.DimCoord([0, 1.5, 3], + standard_name='latitude', + bounds=[[0, 1], [1, 2], [2, 3]], + units='degrees_north', + coord_system=crd_sys) + self.coords_spec = [(lats, 0), (lons, 1)] + self.fx_mask = iris.cube.Cube( + fx_data, dim_coords_and_dims=self.coords_spec) + + def test_mask_landsea(self): + """Test mask_landsea func""" + iris.save(self.fx_mask, 'sftlf_test.nc') + new_cube_land = iris.cube.Cube( + self.new_cube_data, dim_coords_and_dims=self.coords_spec) + new_cube_sea = iris.cube.Cube( + self.new_cube_data, dim_coords_and_dims=self.coords_spec) + # mask with fx files - result_land = mask.mask_landsea(new_cube_land, - ['sftlf_test.nc'], 'land') - result_sea = mask.mask_landsea(new_cube_sea, ['sftlf_test.nc'], 'sea') + result_land = mask_landsea(new_cube_land, ['sftlf_test.nc'], 'land') + result_sea = mask_landsea(new_cube_sea, ['sftlf_test.nc'], 'sea') expected = np.ma.empty((3, 3)) expected.data[:] = 200. expected.mask = np.ones((3, 3), bool) @@ -65,16 +67,77 @@ def test_mask_landsea(self): os.remove('sftlf_test.nc') # mask with shp files - new_cube_land = iris.cube.Cube(new_cube_data, - dim_coords_and_dims=coords_spec) - new_cube_sea = iris.cube.Cube(new_cube_data, - dim_coords_and_dims=coords_spec) + new_cube_land = iris.cube.Cube( + self.new_cube_data, dim_coords_and_dims=self.coords_spec) + new_cube_sea = iris.cube.Cube( + self.new_cube_data, dim_coords_and_dims=self.coords_spec) + # bear in mind all points are in the ocean - result_land = mask.mask_landsea(new_cube_land, None, 'land') + result_land = mask_landsea(new_cube_land, None, 'land') np.ma.set_fill_value(result_land.data, 1e+20) expected.mask = np.zeros((3, 3), bool) self.assertArrayEqual(result_land.data, expected) + def test_mask_landseaice(self): + """Test mask_landseaice func""" + iris.save(self.fx_mask, 'sftgif_test.nc') + new_cube_ice = iris.cube.Cube( + self.new_cube_data, dim_coords_and_dims=self.coords_spec) + result_ice = mask_landseaice(new_cube_ice, ['sftgif_test.nc'], 'ice') + expected = np.ma.empty((3, 3)) + expected.data[:] = 200. + expected.mask = np.ones((3, 3), bool) + np.ma.set_fill_value(result_ice.data, 1e+20) + np.ma.set_fill_value(expected, 1e+20) + self.assertArrayEqual(result_ice.data.mask, expected.mask) + os.remove('sftgif_test.nc') + + def test_mask_fillvalues(self): + """Test the fillvalues mask: func mask_fillvalues""" + data_1 = np.ma.empty((4, 3, 3)) + data_1[:] = 10. + data_2 = np.ma.empty((4, 3, 3)) + data_2[:] = 10. + data_2.mask = np.ones((4, 3, 3), bool) + crd_sys = iris.coord_systems.GeogCS(iris.fileformats.pp.EARTH_RADIUS) + lons = iris.coords.DimCoord([0, 1.5, 3], + standard_name='longitude', + bounds=[[0, 1], [1, 2], [2, 3]], + units='degrees_east', + coord_system=crd_sys) + lats = iris.coords.DimCoord([0, 1.5, 3], + standard_name='latitude', + bounds=[[0, 1], [1, 2], [2, 3]], + units='degrees_north', + coord_system=crd_sys) + times = iris.coords.DimCoord([0, 1.5, 2.5, 3.5], + standard_name='time', + bounds=[[0, 1], [1, 2], [2, 3], [3, 4]], + units='hours') + coords_spec = [(times, 0), (lats, 1), (lons, 2)] + cube_1 = iris.cube.Cube(data_1, dim_coords_and_dims=coords_spec) + cube_2 = iris.cube.Cube(data_2, dim_coords_and_dims=coords_spec) + filename_1 = tempfile.NamedTemporaryFile().name + '.nc' + filename_2 = tempfile.NamedTemporaryFile().name + '.nc' + product_1 = PreprocessorFile( + attributes={'filename': filename_1}, settings={}) + product_1.cubes = [cube_1] + product_2 = PreprocessorFile( + attributes={'filename': filename_2}, settings={}) + product_2.cubes = [cube_2] + results = mask_fillvalues({product_1, product_2}, + 0.95, + min_value=-1.e10, + time_window=1) + result_1, result_2 = None, None + for product in results: + if product.filename == filename_1: + result_1 = product.cubes[0] + if product.filename == filename_2: + result_2 = product.cubes[0] + self.assertArrayEqual(result_2.data.mask, data_2.mask) + self.assertArrayEqual(result_1.data, data_1) + if __name__ == '__main__': unittest.main() diff --git a/tests/integration/preprocessor/_regrid/test_vinterp.py b/tests/integration/preprocessor/_regrid/test_extract_levels.py similarity index 85% rename from tests/integration/preprocessor/_regrid/test_vinterp.py rename to tests/integration/preprocessor/_regrid/test_extract_levels.py index 01e35c1b74..15c5d3150b 100644 --- a/tests/integration/preprocessor/_regrid/test_vinterp.py +++ b/tests/integration/preprocessor/_regrid/test_extract_levels.py @@ -1,18 +1,16 @@ """ -Integration tests for the :func:`esmvaltool.preprocessor.regrid.vinterp` +Integration tests for the :func:`esmvaltool.preprocessor.regrid.extract_levels` function. """ -from __future__ import absolute_import, division, print_function - import unittest import iris import numpy as np import tests -from esmvaltool.preprocessor._regrid import _MDI, vinterp +from esmvaltool.preprocessor._regrid import _MDI, extract_levels from tests.unit.preprocessor._regrid import _make_cube, _make_vcoord @@ -38,23 +36,18 @@ def setUp(self): self.shape = list(self.cube.shape) [self.z_dim] = self.cube.coord_dims(coord) - def test_nop(self): - result = vinterp(self.cube, None, None) - self.assertEqual(result, self.cube) - self.assertEqual(id(result), id(self.cube)) - def test_nop__levels_match(self): vcoord = _make_vcoord(self.z) self.assertEqual(self.cube.coord(axis='z', dim_coords=True), vcoord) levels = vcoord.points - result = vinterp(self.cube, levels, 'linear') + result = extract_levels(self.cube, levels, 'linear') self.assertEqual(result, self.cube) self.assertEqual(id(result), id(self.cube)) def test_interpolation__linear(self): levels = [0.5, 1.5] scheme = 'linear' - result = vinterp(self.cube, levels, scheme) + result = extract_levels(self.cube, levels, scheme) expected = np.array([[[[2., 3.], [4., 5.]], [[6., 7.], [8., 9.]]], [[[14., 15.], [16., 17.]], [[18., 19.], [20., 21.]]]]) @@ -65,7 +58,7 @@ def test_interpolation__linear(self): def test_interpolation__nearest(self): levels = [0.49, 1.51] scheme = 'nearest' - result = vinterp(self.cube, levels, scheme) + result = extract_levels(self.cube, levels, scheme) expected = np.array([[[[0., 1.], [2., 3.]], [[8., 9.], [10., 11.]]], [[[12., 13.], [14., 15.]], [[20., 21.], [22., 23.]]]]) @@ -76,7 +69,7 @@ def test_interpolation__nearest(self): def test_interpolation__extrapolated_NaN_filling(self): levels = [-10, 1, 2, 10] scheme = 'nearest' - result = vinterp(self.cube, levels, scheme) + result = extract_levels(self.cube, levels, scheme) expected = np.array( [[[[_MDI, _MDI], [_MDI, _MDI]], [[4., 5.], [6., 7.]], [[8., 9.], [10., 11.]], [[_MDI, _MDI], [_MDI, _MDI]]], @@ -89,7 +82,7 @@ def test_interpolation__extrapolated_NaN_filling(self): def test_interpolation__scalar_collapse(self): level = 1 scheme = 'nearest' - result = vinterp(self.cube, level, scheme) + result = extract_levels(self.cube, level, scheme) expected = np.array([[[4., 5.], [6., 7.]], [[16., 17.], [18., 19.]]]) self.assertArrayEqual(result.data, expected) del self.shape[self.z_dim] diff --git a/tests/integration/preprocessor/_regrid/test_get_cmor_levels.py b/tests/integration/preprocessor/_regrid/test_get_cmor_levels.py index 2e419877ff..56662b204a 100644 --- a/tests/integration/preprocessor/_regrid/test_get_cmor_levels.py +++ b/tests/integration/preprocessor/_regrid/test_get_cmor_levels.py @@ -5,46 +5,44 @@ """ -from __future__ import absolute_import, division, print_function - import unittest -from esmvaltool.preprocessor import _regrid -from esmvaltool.cmor.table import read_cmor_tables from esmvaltool._config import read_config_developer_file +from esmvaltool.cmor.table import read_cmor_tables +from esmvaltool.preprocessor import _regrid class TestGetCmorLevels(unittest.TestCase): - @staticmethod def setUpClass(): """Read cmor tables before testing""" read_cmor_tables(read_config_developer_file()) def test_cmip6_alt40(self): - self.assertListEqual(_regrid.get_cmor_levels('CMIP6', 'alt40'), - [240.0, 720.0, 1200.0, 1680.0, 2160.0, 2640.0, - 3120.0, 3600.0, 4080.0, 4560.0, 5040.0, 5520.0, - 6000.0, 6480.0, 6960.0, 7440.0, 7920.0, 8400.0, - 8880.0, 9360.0, 9840.0, 10320.0, 10800.0, - 11280.0, 11760.0, 12240.0, 12720.0, 13200.0, - 13680.0, 14160.0, 14640.0, 15120.0, 15600.0, - 16080.0, 16560.0, 17040.0, 17520.0, 18000.0, - 18480.0, 18960.0]) + self.assertListEqual( + _regrid.get_cmor_levels('CMIP6', 'alt40'), [ + 240.0, 720.0, 1200.0, 1680.0, 2160.0, 2640.0, 3120.0, 3600.0, + 4080.0, 4560.0, 5040.0, 5520.0, 6000.0, 6480.0, 6960.0, 7440.0, + 7920.0, 8400.0, 8880.0, 9360.0, 9840.0, 10320.0, 10800.0, + 11280.0, 11760.0, 12240.0, 12720.0, 13200.0, 13680.0, 14160.0, + 14640.0, 15120.0, 15600.0, 16080.0, 16560.0, 17040.0, 17520.0, + 18000.0, 18480.0, 18960.0 + ]) def test_cmip6_p200(self): - self.assertListEqual(_regrid.get_cmor_levels('CMIP6', 'p200'), - [20000.]) + self.assertListEqual( + _regrid.get_cmor_levels('CMIP6', 'p200'), [20000.]) def test_cmip5_alt40(self): - self.assertListEqual(_regrid.get_cmor_levels('CMIP5', 'plevs'), - [100000., 92500., 85000., 70000., 60000., 50000., - 40000., 30000., 25000., 20000., 15000., 10000., - 7000., 5000., 3000., 2000., 1000.]) + self.assertListEqual( + _regrid.get_cmor_levels('CMIP5', 'plevs'), [ + 100000., 92500., 85000., 70000., 60000., 50000., 40000., + 30000., 25000., 20000., 15000., 10000., 7000., 5000., 3000., + 2000., 1000. + ]) def test_cmip5_p500(self): - self.assertListEqual(_regrid.get_cmor_levels('CMIP5', 'p500'), - [50000]) + self.assertListEqual(_regrid.get_cmor_levels('CMIP5', 'p500'), [50000]) def test_not_values_in_coordinate(self): with self.assertRaises(ValueError): diff --git a/tests/integration/preprocessor/_regrid/test_get_file_levels.py b/tests/integration/preprocessor/_regrid/test_get_file_levels.py index b66196b9a2..a394715595 100644 --- a/tests/integration/preprocessor/_regrid/test_get_file_levels.py +++ b/tests/integration/preprocessor/_regrid/test_get_file_levels.py @@ -5,29 +5,30 @@ """ -from __future__ import absolute_import, division, print_function - +import os +import tempfile import unittest + import iris -import iris.cube import iris.coords +import iris.cube import numpy as np -import tempfile -import os from esmvaltool.preprocessor import _regrid class TestGetFileLevels(unittest.TestCase): - def setUp(self): """Prepare the sample file for the test""" self.cube = iris.cube.Cube(np.ones([2, 2, 2]), var_name='var') - self.cube.add_dim_coord(iris.coords.DimCoord(np.arange(0, 2), - var_name='coord'), 0) + self.cube.add_dim_coord( + iris.coords.DimCoord(np.arange(0, 2), var_name='coord'), 0) + self.cube.coord('coord').attributes['positive'] = 'up' + iris.util.guess_coord_axis(self.cube.coord('coord')) descriptor, self.path = tempfile.mkstemp('.nc') os.close(descriptor) + print(self.cube) iris.save(self.cube, self.path) def tearDown(self): @@ -35,9 +36,8 @@ def tearDown(self): os.remove(self.path) def test_get_coord(self): - self.assertListEqual(_regrid.get_reference_levels(self.path, 'coord'), - [0., 1]) - - def test_bad_coord(self): - with self.assertRaises(ValueError): - _regrid.get_reference_levels(self.path, 'bad_coord') + self.assertListEqual( + _regrid.get_reference_levels( + self.path, 'project', 'dataset', 'short_name', 'output_dir'), + [0., 1] + ) diff --git a/tests/integration/preprocessor/_regrid/test_regrid.py b/tests/integration/preprocessor/_regrid/test_regrid.py index 77000128d0..7ab337710f 100644 --- a/tests/integration/preprocessor/_regrid/test_regrid.py +++ b/tests/integration/preprocessor/_regrid/test_regrid.py @@ -4,8 +4,6 @@ """ -from __future__ import absolute_import, division, print_function - import unittest import iris @@ -24,47 +22,57 @@ def setUp(self): self.cube = _make_cube(data) self.cs = iris.coord_systems.GeogCS(iris.fileformats.pp.EARTH_RADIUS) - def test_nop(self): - result = regrid(self.cube, None, None) - self.assertEqual(result, self.cube) - self.assertEqual(id(result), id(self.cube)) - def test_regrid__linear(self): data = np.empty((1, 1)) - grid = iris.cube.Cube(data) - lons = iris.coords.DimCoord( - [1.5], - standard_name='longitude', - bounds=[[1, 2]], - units='degrees_east', - coord_system=self.cs) - lats = iris.coords.DimCoord( - [1.5], - standard_name='latitude', - bounds=[[1, 2]], - units='degrees_north', - coord_system=self.cs) + lons = iris.coords.DimCoord([1.5], + standard_name='longitude', + bounds=[[1, 2]], + units='degrees_east', + coord_system=self.cs) + lats = iris.coords.DimCoord([1.5], + standard_name='latitude', + bounds=[[1, 2]], + units='degrees_north', + coord_system=self.cs) coords_spec = [(lats, 0), (lons, 1)] grid = iris.cube.Cube(data, dim_coords_and_dims=coords_spec) result = regrid(self.cube, grid, 'linear') expected = np.array([[[1.5]], [[5.5]], [[9.5]]]) self.assertArrayEqual(result.data, expected) + def test_regrid__linear_extrapolate(self): + data = np.empty((3, 3)) + lons = iris.coords.DimCoord([0, 1.5, 3], + standard_name='longitude', + bounds=[[0, 1], [1, 2], [2, 3]], + units='degrees_east', + coord_system=self.cs) + lats = iris.coords.DimCoord([0, 1.5, 3], + standard_name='latitude', + bounds=[[0, 1], [1, 2], [2, 3]], + units='degrees_north', + coord_system=self.cs) + coords_spec = [(lats, 0), (lons, 1)] + grid = iris.cube.Cube(data, dim_coords_and_dims=coords_spec) + result = regrid(self.cube, grid, 'linear_extrapolate') + expected = [[[-3., -1.5, 0.], [0., 1.5, 3.], [3., 4.5, 6.]], + [[1., 2.5, 4.], [4., 5.5, 7.], [7., 8.5, 10.]], + [[5., 6.5, 8.], [8., 9.5, 11.], [11., 12.5, 14.]]] + self.assertArrayEqual(result.data, expected) + def test_regrid__linear_extrapolate_with_mask(self): data = np.empty((3, 3)) grid = iris.cube.Cube(data) - lons = iris.coords.DimCoord( - [0, 1.5, 3], - standard_name='longitude', - bounds=[[0, 1], [1, 2], [2, 3]], - units='degrees_east', - coord_system=self.cs) - lats = iris.coords.DimCoord( - [0, 1.5, 3], - standard_name='latitude', - bounds=[[0, 1], [1, 2], [2, 3]], - units='degrees_north', - coord_system=self.cs) + lons = iris.coords.DimCoord([0, 1.5, 3], + standard_name='longitude', + bounds=[[0, 1], [1, 2], [2, 3]], + units='degrees_east', + coord_system=self.cs) + lats = iris.coords.DimCoord([0, 1.5, 3], + standard_name='latitude', + bounds=[[0, 1], [1, 2], [2, 3]], + units='degrees_north', + coord_system=self.cs) coords_spec = [(lats, 0), (lons, 1)] grid = iris.cube.Cube(data, dim_coords_and_dims=coords_spec) result = regrid(self.cube, grid, 'linear') @@ -75,19 +83,16 @@ def test_regrid__linear_extrapolate_with_mask(self): def test_regrid__nearest(self): data = np.empty((1, 1)) - grid = iris.cube.Cube(data) - lons = iris.coords.DimCoord( - [1.6], - standard_name='longitude', - bounds=[[1, 2]], - units='degrees_east', - coord_system=self.cs) - lats = iris.coords.DimCoord( - [1.6], - standard_name='latitude', - bounds=[[1, 2]], - units='degrees_north', - coord_system=self.cs) + lons = iris.coords.DimCoord([1.6], + standard_name='longitude', + bounds=[[1, 2]], + units='degrees_east', + coord_system=self.cs) + lats = iris.coords.DimCoord([1.6], + standard_name='latitude', + bounds=[[1, 2]], + units='degrees_north', + coord_system=self.cs) coords_spec = [(lats, 0), (lons, 1)] grid = iris.cube.Cube(data, dim_coords_and_dims=coords_spec) result = regrid(self.cube, grid, 'nearest') @@ -96,19 +101,16 @@ def test_regrid__nearest(self): def test_regrid__nearest_extrapolate_with_mask(self): data = np.empty((3, 3)) - grid = iris.cube.Cube(data) - lons = iris.coords.DimCoord( - [0, 1.6, 3], - standard_name='longitude', - bounds=[[0, 1], [1, 2], [2, 3]], - units='degrees_east', - coord_system=self.cs) - lats = iris.coords.DimCoord( - [0, 1.6, 3], - standard_name='latitude', - bounds=[[0, 1], [1, 2], [2, 3]], - units='degrees_north', - coord_system=self.cs) + lons = iris.coords.DimCoord([0, 1.6, 3], + standard_name='longitude', + bounds=[[0, 1], [1, 2], [2, 3]], + units='degrees_east', + coord_system=self.cs) + lats = iris.coords.DimCoord([0, 1.6, 3], + standard_name='latitude', + bounds=[[0, 1], [1, 2], [2, 3]], + units='degrees_north', + coord_system=self.cs) coords_spec = [(lats, 0), (lons, 1)] grid = iris.cube.Cube(data, dim_coords_and_dims=coords_spec) result = regrid(self.cube, grid, 'nearest') @@ -119,19 +121,16 @@ def test_regrid__nearest_extrapolate_with_mask(self): def test_regrid__area_weighted(self): data = np.empty((1, 1)) - grid = iris.cube.Cube(data) - lons = iris.coords.DimCoord( - [1.6], - standard_name='longitude', - bounds=[[1, 2]], - units='degrees_east', - coord_system=self.cs) - lats = iris.coords.DimCoord( - [1.6], - standard_name='latitude', - bounds=[[1, 2]], - units='degrees_north', - coord_system=self.cs) + lons = iris.coords.DimCoord([1.6], + standard_name='longitude', + bounds=[[1, 2]], + units='degrees_east', + coord_system=self.cs) + lats = iris.coords.DimCoord([1.6], + standard_name='latitude', + bounds=[[1, 2]], + units='degrees_north', + coord_system=self.cs) coords_spec = [(lats, 0), (lons, 1)] grid = iris.cube.Cube(data, dim_coords_and_dims=coords_spec) result = regrid(self.cube, grid, 'area_weighted') @@ -140,19 +139,16 @@ def test_regrid__area_weighted(self): def test_regrid__unstructured_nearest(self): data = np.empty((1, 1)) - grid = iris.cube.Cube(data) - lons = iris.coords.DimCoord( - [1.6], - standard_name='longitude', - bounds=[[1, 2]], - units='degrees_east', - coord_system=self.cs) - lats = iris.coords.DimCoord( - [1.6], - standard_name='latitude', - bounds=[[1, 2]], - units='degrees_north', - coord_system=self.cs) + lons = iris.coords.DimCoord([1.6], + standard_name='longitude', + bounds=[[1, 2]], + units='degrees_east', + coord_system=self.cs) + lats = iris.coords.DimCoord([1.6], + standard_name='latitude', + bounds=[[1, 2]], + units='degrees_north', + coord_system=self.cs) coords_spec = [(lats, 0), (lons, 1)] grid = iris.cube.Cube(data, dim_coords_and_dims=coords_spec) # Replace 1d spatial coords with 2d spatial coords. diff --git a/tests/integration/test_data_finder.py b/tests/integration/test_data_finder.py new file mode 100644 index 0000000000..484fdc99b7 --- /dev/null +++ b/tests/integration/test_data_finder.py @@ -0,0 +1,113 @@ +"""Tests for _data_finder.py.""" +import os +import shutil +import tempfile + +import pytest +import yaml + +import esmvaltool._config +from esmvaltool._data_finder import (get_input_filelist, get_input_fx_filelist, + get_output_file) +from esmvaltool.cmor.table import read_cmor_tables + +# Initialize with standard config developer file +esmvaltool._config.CFG = esmvaltool._config.read_config_developer_file() +# Initialize CMOR tables +read_cmor_tables(esmvaltool._config.CFG) + +# Load test configuration +with open(os.path.join(os.path.dirname(__file__), 'data_finder.yml')) as file: + CONFIG = yaml.safe_load(file) + + +def print_path(path): + """Print path.""" + txt = path + if os.path.isdir(path): + txt += '/' + if os.path.islink(path): + txt += ' -> ' + os.readlink(path) + print(txt) + + +def tree(path): + """Print path, similar to the the `tree` command.""" + print_path(path) + for dirpath, dirnames, filenames in os.walk(path): + for dirname in dirnames: + print_path(os.path.join(dirpath, dirname)) + for filename in filenames: + print_path(os.path.join(dirpath, filename)) + + +def create_file(filename): + """Create an empty file.""" + dirname = os.path.dirname(filename) + if not os.path.exists(dirname): + os.makedirs(dirname) + + with open(filename, 'a'): + pass + + +def create_tree(path, filenames=None, symlinks=None): + """Create directory structure and files.""" + for filename in filenames or []: + create_file(os.path.join(path, filename)) + + for symlink in symlinks or []: + link_name = os.path.join(path, symlink['link_name']) + os.symlink(symlink['target'], link_name) + + +@pytest.mark.parametrize('cfg', CONFIG['get_output_file']) +def test_get_output_file(cfg): + """Test getting output name for preprocessed files.""" + output_file = get_output_file(cfg['variable'], cfg['preproc_dir']) + assert output_file == cfg['output_file'] + + +@pytest.fixture +def root(): + """Root function for tests.""" + dirname = tempfile.mkdtemp() + yield os.path.join(dirname, 'output1') + print("Directory structure was:") + tree(dirname) + shutil.rmtree(dirname) + + +@pytest.mark.parametrize('cfg', CONFIG['get_input_filelist']) +def test_get_input_filelist(root, cfg): + """Test retrieving input filelist.""" + create_tree(root, cfg.get('available_files'), + cfg.get('available_symlinks')) + + # Find files + rootpath = {cfg['variable']['project']: [root]} + drs = {cfg['variable']['project']: cfg['drs']} + input_filelist = get_input_filelist(cfg['variable'], rootpath, drs) + + # Test result + reference = [os.path.join(root, file) for file in cfg['found_files']] + assert sorted(input_filelist) == sorted(reference) + + +@pytest.mark.parametrize('cfg', CONFIG['get_input_fx_filelist']) +def test_get_input_fx_filelist(root, cfg): + """Test retrieving fx filelist.""" + create_tree(root, cfg.get('available_files'), + cfg.get('available_symlinks')) + + # Find files + rootpath = {cfg['variable']['project']: [root]} + drs = {cfg['variable']['project']: cfg['drs']} + fx_files = get_input_fx_filelist(cfg['variable'], rootpath, drs) + + # Test result + reference = { + fx_var: os.path.join(root, filename) if filename else None + for fx_var, filename in cfg['found_files'].items() + } + assert fx_files == reference diff --git a/tests/integration/test_diagnostic_run.py b/tests/integration/test_diagnostic_run.py new file mode 100644 index 0000000000..d5043284da --- /dev/null +++ b/tests/integration/test_diagnostic_run.py @@ -0,0 +1,144 @@ +"""Test diagnostic script runs.""" +import contextlib +import sys +from textwrap import dedent + +import pytest +import yaml +from six import text_type + +from esmvaltool._main import run + + +def write_config_user_file(dirname): + config_file = dirname / 'config-user.yml' + cfg = { + 'output_dir': str(dirname / 'output_dir'), + 'rootpath': { + 'default': str(dirname / 'input_dir'), + }, + 'drs': { + 'CMIP5': 'BADC', + }, + 'log_level': 'debug', + } + config_file.write_text(yaml.safe_dump(cfg, encoding=None)) + return str(config_file) + + +@contextlib.contextmanager +def arguments(*args): + backup = sys.argv + sys.argv = list(args) + yield + sys.argv = backup + + +def check(result_file): + """Check the results.""" + result = yaml.safe_load(result_file.read_text()) + + print(result) + + required_keys = { + 'input_files', + 'log_level', + 'plot_dir', + 'run_dir', + 'work_dir', + } + missing = required_keys - set(result) + assert not missing + + +SCRIPTS = { + 'diagnostic.py': + dedent(""" + import yaml + from esmvaltool.diag_scripts.shared import run_diagnostic + + def main(cfg): + with open(cfg['setting_name'], 'w') as file: + yaml.safe_dump(cfg, file) + + if __name__ == '__main__': + with run_diagnostic() as config: + main(config) + """), + 'diagnostic.ncl': + dedent(""" + begin + print("INFO Loading settings from " + getenv("settings")) + loadscript("$settings") + end + print("INFO Writing " + diag_script_info@setting_name) + n = str_get_nl() + result = "run_dir: " + config_user_info@run_dir + n +\ + "work_dir: " + config_user_info@work_dir + n +\ + "plot_dir: " + config_user_info@plot_dir + n +\ + "log_level: " + config_user_info@log_level + n +\ + "input_files: []" + n + + system("echo '" + result + "' > " + diag_script_info@setting_name) + """), + 'diagnostic.R': + dedent(""" + library(yaml) + + args <- commandArgs(trailingOnly = TRUE) + print(paste0("INFO Loading settings from ", args[1])) + settings <- yaml::read_yaml(args[1]) + + print(paste0("INFO Writing settings to ", settings$setting_name)) + yaml::write_yaml(settings, settings$setting_name) + """), + 'diagnostic.jl': + dedent(""" + import YAML + @info "Starting diagnostic script with" ARGS + config_file = ARGS[1] + cfg = YAML.load_file(config_file) + out_file = cfg["setting_name"] + @info "Copying file to" out_file + Base.Filesystem.cp(config_file, out_file) + @info "Done" + """), +} + + +@pytest.mark.install +@pytest.mark.parametrize('script_file, script', SCRIPTS.items()) +def test_diagnostic_run(tmp_path, script_file, script): + + recipe_file = tmp_path / 'recipe_test.yml' + script_file = tmp_path / script_file + result_file = tmp_path / 'result.yml' + + # Write script to file + script_file.write_text(text_type(script)) + + # Create recipe + recipe = dedent(""" + documentation: + description: Recipe with no data. + authors: [ande_bo] + + diagnostics: + diagnostic_name: + scripts: + script_name: + script: {} + setting_name: {} + """.format(script_file, result_file)) + recipe_file.write_text(text_type(recipe)) + + config_user_file = write_config_user_file(tmp_path) + with arguments( + 'esmvaltool', + '-c', + config_user_file, + str(recipe_file), + ): + run() + + check(result_file) diff --git a/tests/integration/test_provenance.py b/tests/integration/test_provenance.py new file mode 100644 index 0000000000..630a7c0517 --- /dev/null +++ b/tests/integration/test_provenance.py @@ -0,0 +1,47 @@ +from prov.constants import PROV_ATTR_GENERATED_ENTITY, PROV_ATTR_USED_ENTITY +from prov.model import ProvDerivation + + +def get_file_record(prov, filename): + records = prov.get_record('file:' + filename) + assert records + return records[0] + + +def check_provenance(product): + prov = product.provenance + + entity = get_file_record(prov, product.filename) + assert entity == product.entity + + check_product_wasderivedfrom(product) + + +def check_product_wasderivedfrom(product): + """Check that product.filename was derived from product._ancestors.""" + print('checking provenance of file', product.filename) + prov = product.provenance + + def get_identifier(filename): + record = get_file_record(prov, filename) + return {record.identifier} + + # Check that the input and output file records exist + identifier = get_identifier(product.filename) + + relations = {r for r in prov.records if isinstance(r, ProvDerivation)} + for ancestor in product._ancestors: + input_identifier = get_identifier(ancestor.filename) + for record in relations: + if input_identifier == record.get_attribute(PROV_ATTR_USED_ENTITY): + assert identifier == record.get_attribute( + PROV_ATTR_GENERATED_ENTITY) + break + else: + assert False + + if not product._ancestors: + assert 'tracking_id' in product.attributes + else: + for ancestor in product._ancestors: + check_product_wasderivedfrom(ancestor) diff --git a/tests/integration/test_recipe.py b/tests/integration/test_recipe.py new file mode 100644 index 0000000000..aeaaedabd5 --- /dev/null +++ b/tests/integration/test_recipe.py @@ -0,0 +1,730 @@ +import os +from pprint import pformat +from textwrap import dedent + +import iris +import pytest +import yaml +from mock import create_autospec +from six import text_type + +import esmvaltool +from esmvaltool._recipe import TASKSEP, read_recipe_file +from esmvaltool._task import DiagnosticTask +from esmvaltool.diag_scripts.shared import ( + ProvenanceLogger, get_diagnostic_filename, get_plot_filename) +from esmvaltool.preprocessor import DEFAULT_ORDER, PreprocessingTask +from esmvaltool.preprocessor._io import concatenate_callback + +from .test_diagnostic_run import write_config_user_file +from .test_provenance import check_provenance + +MANDATORY_DATASET_KEYS = ( + 'cmor_table', + 'dataset', + 'diagnostic', + 'end_year', + 'filename', + 'frequency', + 'institute', + 'long_name', + 'mip', + 'modeling_realm', + 'preprocessor', + 'project', + 'short_name', + 'standard_name', + 'start_year', + 'units', +) + +MANDATORY_SCRIPT_SETTINGS_KEYS = ( + 'log_level', + 'script', + 'plot_dir', + 'run_dir', + 'work_dir', +) + +DEFAULT_PREPROCESSOR_STEPS = ( + 'cleanup', + 'cmor_check_data', + 'cmor_check_metadata', + 'concatenate', + 'extract_time', + 'fix_data', + 'fix_file', + 'fix_metadata', + 'load', + 'save', +) + + +@pytest.fixture +def config_user(tmp_path): + filename = write_config_user_file(tmp_path) + cfg = esmvaltool._config.read_config_user_file(filename, 'recipe_test') + cfg['synda_download'] = False + return cfg + + +def create_test_file(filename, tracking_id=None): + dirname = os.path.dirname(filename) + if not os.path.exists(dirname): + os.makedirs(dirname) + + attributes = {} + if tracking_id is not None: + attributes['tracking_id'] = tracking_id + cube = iris.cube.Cube([], attributes=attributes) + + iris.save(cube, filename) + + +@pytest.fixture +def patched_datafinder(tmp_path, monkeypatch): + def tracking_ids(i=0): + while True: + yield i + i += 1 + + tracking_id = tracking_ids() + + def find_files(_, filenames): + # Any occurrence of [something] in filename should have + # been replaced before this function is called. + for filename in filenames: + assert '[' not in filename + + filename = filenames[0] + filename = str(tmp_path / 'input' / filename) + filenames = [] + if filename.endswith('*.nc'): + filename = filename[:-len('*.nc')] + intervals = [ + '1990_1999', + '2000_2009', + '2010_2019', + ] + for interval in intervals: + filenames.append(filename + interval + '.nc') + else: + filenames.append(filename) + + for file in filenames: + create_test_file(file, next(tracking_id)) + return filenames + + monkeypatch.setattr(esmvaltool._data_finder, 'find_files', find_files) + + +DEFAULT_DOCUMENTATION = dedent(""" + documentation: + description: This is a test recipe. + authors: + - ande_bo + references: + - contact_authors + - acknow_project + projects: + - c3s-magic + """) + + +def get_recipe(tempdir, content, cfg): + """Save and load recipe content.""" + recipe_file = tempdir / 'recipe_test.yml' + # Add mandatory documentation section + content = text_type(DEFAULT_DOCUMENTATION + content) + recipe_file.write_text(content) + + recipe = read_recipe_file(str(recipe_file), cfg) + + return recipe + + +def test_simple_recipe(tmp_path, patched_datafinder, config_user): + + content = dedent(""" + datasets: + - dataset: bcc-csm1-1 + + preprocessors: + preprocessor_name: + extract_levels: + levels: 85000 + scheme: nearest + + diagnostics: + diagnostic_name: + additional_datasets: + - dataset: GFDL-ESM2G + variables: + ta: + preprocessor: preprocessor_name + project: CMIP5 + mip: Amon + exp: historical + ensemble: r1i1p1 + start_year: 1999 + end_year: 2002 + additional_datasets: + - dataset: MPI-ESM-LR + scripts: + script_name: + script: examples/diagnostic.py + custom_setting: 1 + """) + + recipe = get_recipe(tmp_path, content, config_user) + raw = yaml.safe_load(content) + # Perform some sanity checks on recipe expansion/normalization + print("Expanded recipe:") + assert len(recipe.diagnostics) == len(raw['diagnostics']) + for diagnostic_name, diagnostic in recipe.diagnostics.items(): + print(pformat(diagnostic)) + source = raw['diagnostics'][diagnostic_name] + + # Check that 'variables' have been read and updated + assert len(diagnostic['preprocessor_output']) == len( + source['variables']) + for variable_name, variables in diagnostic[ + 'preprocessor_output'].items(): + assert len(variables) == 3 + for variable in variables: + for key in MANDATORY_DATASET_KEYS: + assert key in variable and variable[key] + assert variable_name == variable['short_name'] + + # Check that the correct tasks have been created + variables = recipe.diagnostics['diagnostic_name']['preprocessor_output'][ + 'ta'] + tasks = {t for task in recipe.tasks for t in task.flatten()} + preproc_tasks = {t for t in tasks if isinstance(t, PreprocessingTask)} + diagnostic_tasks = {t for t in tasks if isinstance(t, DiagnosticTask)} + + assert len(preproc_tasks) == 1 + for task in preproc_tasks: + print("Task", task.name) + assert task.order == list(DEFAULT_ORDER) + for product in task.products: + variable = [ + v for v in variables if v['filename'] == product.filename + ][0] + assert product.attributes == variable + for step in DEFAULT_PREPROCESSOR_STEPS: + assert step in product.settings + assert len(product.files) == 2 + + assert len(diagnostic_tasks) == 1 + for task in diagnostic_tasks: + print("Task", task.name) + assert task.ancestors == list(preproc_tasks) + assert task.script == 'examples/diagnostic.py' + for key in MANDATORY_SCRIPT_SETTINGS_KEYS: + assert key in task.settings and task.settings[key] + assert task.settings['custom_setting'] == 1 + + +def test_default_preprocessor(tmp_path, patched_datafinder, config_user): + + content = dedent(""" + diagnostics: + diagnostic_name: + variables: + chl: + project: CMIP5 + mip: Oyr + exp: historical + start_year: 2000 + end_year: 2005 + ensemble: r1i1p1 + additional_datasets: + - {dataset: CanESM2} + scripts: null + """) + + recipe = get_recipe(tmp_path, content, config_user) + + assert len(recipe.tasks) == 1 + task = recipe.tasks.pop() + assert len(task.products) == 1 + product = task.products.pop() + preproc_dir = os.path.dirname(product.filename) + assert preproc_dir.startswith(str(tmp_path)) + + fix_dir = os.path.join( + preproc_dir, 'CMIP5_CanESM2_Oyr_historical_r1i1p1_chl_2000-2005_fixed') + defaults = { + 'load': { + 'callback': concatenate_callback, + }, + 'concatenate': {}, + 'fix_file': { + 'project': 'CMIP5', + 'dataset': 'CanESM2', + 'short_name': 'chl', + 'output_dir': fix_dir, + }, + 'fix_data': { + 'project': 'CMIP5', + 'dataset': 'CanESM2', + 'short_name': 'chl', + 'cmor_table': 'CMIP5', + 'mip': 'Oyr', + 'frequency': 'yr', + }, + 'fix_metadata': { + 'project': 'CMIP5', + 'dataset': 'CanESM2', + 'short_name': 'chl', + 'cmor_table': 'CMIP5', + 'mip': 'Oyr', + 'frequency': 'yr', + }, + 'extract_time': { + 'start_year': 2000, + 'end_year': 2006, + 'start_month': 1, + 'end_month': 1, + 'start_day': 1, + 'end_day': 1, + }, + 'cmor_check_metadata': { + 'cmor_table': 'CMIP5', + 'mip': 'Oyr', + 'short_name': 'chl', + 'frequency': 'yr', + }, + 'cmor_check_data': { + 'cmor_table': 'CMIP5', + 'mip': 'Oyr', + 'short_name': 'chl', + 'frequency': 'yr', + }, + 'cleanup': { + 'remove': [fix_dir] + }, + 'save': { + 'compress': False, + 'filename': product.filename, + } + } + assert product.settings == defaults + + +def test_empty_variable(tmp_path, patched_datafinder, config_user): + """Test that it is possible to specify all information in the dataset.""" + content = dedent(""" + diagnostics: + diagnostic_name: + additional_datasets: + - dataset: CanESM2 + project: CMIP5 + mip: Amon + exp: historical + start_year: 2000 + end_year: 2005 + ensemble: r1i1p1 + variables: + pr: + scripts: null + """) + + recipe = get_recipe(tmp_path, content, config_user) + assert len(recipe.tasks) == 1 + task = recipe.tasks.pop() + assert len(task.products) == 1 + product = task.products.pop() + assert product.attributes['short_name'] == 'pr' + assert product.attributes['dataset'] == 'CanESM2' + + +def test_reference_dataset(tmp_path, patched_datafinder, config_user, + monkeypatch): + + levels = [100] + get_reference_levels = create_autospec( + esmvaltool._recipe.get_reference_levels, return_value=levels) + monkeypatch.setattr(esmvaltool._recipe, 'get_reference_levels', + get_reference_levels) + + content = dedent(""" + preprocessors: + test_from_reference: + regrid: + target_grid: reference_dataset + scheme: linear + extract_levels: + levels: reference_dataset + scheme: linear + test_from_cmor_table: + extract_levels: + levels: + cmor_table: CMIP6 + coordinate: alt16 + scheme: nearest + + diagnostics: + diagnostic_name: + variables: + ta: &var + preprocessor: test_from_reference + project: CMIP5 + mip: Amon + exp: historical + start_year: 2000 + end_year: 2005 + ensemble: r1i1p1 + additional_datasets: + - {dataset: GFDL-CM3} + - {dataset: MPI-ESM-LR} + reference_dataset: MPI-ESM-LR + ch4: + <<: *var + preprocessor: test_from_cmor_table + additional_datasets: + - {dataset: GFDL-CM3} + + scripts: null + """) + + recipe = get_recipe(tmp_path, content, config_user) + + assert len(recipe.tasks) == 2 + + # Check that the reference dataset has been used + task = next(t for t in recipe.tasks + if t.name == 'diagnostic_name' + TASKSEP + 'ta') + assert len(task.products) == 2 + product = next( + p for p in task.products if p.attributes['dataset'] == 'GFDL-CM3') + reference = next( + p for p in task.products if p.attributes['dataset'] == 'MPI-ESM-LR') + + assert product.settings['regrid']['target_grid'] == reference.files[0] + assert product.settings['extract_levels']['levels'] == levels + + fix_dir = os.path.splitext(reference.filename)[0] + '_fixed' + get_reference_levels.assert_called_once_with( + reference.files[0], + 'CMIP5', + 'MPI-ESM-LR', + 'ta', + fix_dir + ) + + assert 'regrid' not in reference.settings + assert 'extract_levels' not in reference.settings + + # Check that levels have been read from CMOR table + task = next(t for t in recipe.tasks + if t.name == 'diagnostic_name' + TASKSEP + 'ch4') + assert len(task.products) == 1 + product = next(iter(task.products)) + assert product.settings['extract_levels']['levels'] == [ + 0, + 250, + 750, + 1250, + 1750, + 2250, + 2750, + 3500, + 4500, + 6000, + 8000, + 10000, + 12000, + 14500, + 16000, + 18000, + ] + + +def test_custom_preproc_order(tmp_path, patched_datafinder, config_user): + + content = dedent(""" + preprocessors: + default: &default + average_region: + coord1: longitude + coord2: latitude + multi_model_statistics: + span: overlap + statistics: [mean ] + custom: + custom_order: true + <<: *default + + diagnostics: + diagnostic_name: + variables: + chl_default: &chl + short_name: chl + preprocessor: default + project: CMIP5 + mip: Oyr + exp: historical + start_year: 2000 + end_year: 2005 + ensemble: r1i1p1 + additional_datasets: + - {dataset: CanESM2} + chl_custom: + <<: *chl + preprocessor: custom + scripts: null + """) + + recipe = get_recipe(tmp_path, content, config_user) + + assert len(recipe.tasks) == 2 + + default = next(t for t in recipe.tasks if tuple(t.order) == DEFAULT_ORDER) + custom = next(t for t in recipe.tasks if tuple(t.order) != DEFAULT_ORDER) + + assert custom.order.index('average_region') < custom.order.index( + 'multi_model_statistics') + assert default.order.index('average_region') > default.order.index( + 'multi_model_statistics') + + +def test_derive(tmp_path, patched_datafinder, config_user): + + content = dedent(""" + diagnostics: + diagnostic_name: + variables: + toz: + project: CMIP5 + mip: Amon + exp: historical + start_year: 2000 + end_year: 2005 + derive: true + force_derivation: true + additional_datasets: + - {dataset: GFDL-CM3, ensemble: r1i1p1} + scripts: null + """) + + recipe = get_recipe(tmp_path, content, config_user) + + # Check generated tasks + assert len(recipe.tasks) == 1 + task = recipe.tasks.pop() + + assert task.name == 'diagnostic_name' + TASKSEP + 'toz' + assert len(task.ancestors) == 2 + assert 'diagnostic_name' + TASKSEP + 'toz_derive_input_ps' in [ + t.name for t in task.ancestors + ] + assert 'diagnostic_name' + TASKSEP + 'toz_derive_input_tro3' in [ + t.name for t in task.ancestors + ] + + # Check product content of tasks + assert len(task.products) == 1 + product = task.products.pop() + assert 'derive' in product.settings + assert product.attributes['short_name'] == 'toz' + assert product.files + + ps_product = next(p for a in task.ancestors for p in a.products + if p.attributes['short_name'] == 'ps') + tro3_product = next(p for a in task.ancestors for p in a.products + if p.attributes['short_name'] == 'tro3') + assert ps_product.filename in product.files + assert tro3_product.filename in product.files + + +def test_derive_not_needed(tmp_path, patched_datafinder, config_user): + + content = dedent(""" + diagnostics: + diagnostic_name: + variables: + toz: + project: CMIP5 + mip: Amon + exp: historical + start_year: 2000 + end_year: 2005 + derive: true + force_derivation: false + additional_datasets: + - {dataset: GFDL-CM3, ensemble: r1i1p1} + scripts: null + """) + + recipe = get_recipe(tmp_path, content, config_user) + + # Check generated tasks + assert len(recipe.tasks) == 1 + task = recipe.tasks.pop() + + assert task.name == 'diagnostic_name/toz' + assert len(task.ancestors) == 1 + ancestor = [t for t in task.ancestors][0] + assert ancestor.name == 'diagnostic_name/toz_derive_input_toz' + + # Check product content of tasks + assert len(task.products) == 1 + product = task.products.pop() + assert product.attributes['short_name'] == 'toz' + assert 'derive' in product.settings + + assert len(ancestor.products) == 1 + ancestor_product = ancestor.products.pop() + assert ancestor_product.filename in product.files + assert ancestor_product.attributes['short_name'] == 'toz' + assert 'derive' not in ancestor_product.settings + + # Check that fixes are applied just once + fixes = ('fix_file', 'fix_metadata', 'fix_data') + for fix in fixes: + assert fix in ancestor_product.settings + assert fix not in product.settings + + +def test_derive_with_fx(tmp_path, patched_datafinder, config_user): + + content = dedent(""" + diagnostics: + diagnostic_name: + variables: + nbp_grid: + project: CMIP5 + mip: Lmon + exp: historical + start_year: 2000 + end_year: 2005 + derive: true + force_derivation: true + additional_datasets: + - {dataset: GFDL-CM3, ensemble: r1i1p1} + scripts: null + """) + + recipe = get_recipe(tmp_path, content, config_user) + + # Check generated tasks + assert len(recipe.tasks) == 1 + task = recipe.tasks.pop() + + assert task.name == 'diagnostic_name' + TASKSEP + 'nbp_grid' + assert len(task.ancestors) == 1 + ancestor = [t for t in task.ancestors][0] + assert ancestor.name == 'diagnostic_name/nbp_grid_derive_input_nbp' + + # Check product content of tasks + assert len(task.products) == 1 + product = task.products.pop() + assert 'derive' in product.settings + assert product.attributes['short_name'] == 'nbp_grid' + assert 'fx_files' in product.settings['derive'] + assert 'sftlf' in product.settings['derive']['fx_files'] + assert product.settings['derive']['fx_files']['sftlf'] is not None + + assert len(ancestor.products) == 1 + ancestor_product = ancestor.products.pop() + assert ancestor_product.filename in product.files + assert ancestor_product.attributes['short_name'] == 'nbp' + + +def test_diagnostic_task_provenance(tmp_path, patched_datafinder, config_user): + + script = tmp_path / 'diagnostic.py' + with script.open('w'): + pass + + content = dedent(""" + diagnostics: + diagnostic_name: + themes: + - phys + realms: + - atmos + variables: + chl: + project: CMIP5 + mip: Oyr + exp: historical + start_year: 2000 + end_year: 2005 + ensemble: r1i1p1 + additional_datasets: + - dataset: CanESM2 + scripts: + script_name: + script: {script} + """.format(script=script)) + + recipe = get_recipe(tmp_path, content, config_user) + diagnostic_task = recipe.tasks.pop() + + # Simulate Python diagnostic run + cfg = diagnostic_task.settings + input_files = [ + p.filename for a in diagnostic_task.ancestors for p in a.products + ] + record = { + 'caption': 'Test plot', + 'plot_file': get_plot_filename('test', cfg), + 'statistics': ['mean', 'var'], + 'domains': ['trop', 'et'], + 'plot_type': 'zonal', + 'authors': ['ande_bo'], + 'references': ['acknow_project'], + 'ancestors': input_files, + } + + diagnostic_file = get_diagnostic_filename('test', cfg) + create_test_file(diagnostic_file) + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(diagnostic_file, record) + + diagnostic_task._collect_provenance() + # Done simulating diagnostic run + + # Check resulting product + product = diagnostic_task.products.pop() + check_provenance(product) + for key in ('caption', 'plot_file'): + assert product.attributes[key] == record[key] + assert product.entity.get_attribute('attribute:' + + key).pop() == record[key] + + # Check that diagnostic script tags have been added + with open( + os.path.join( + os.path.dirname(esmvaltool.__file__), + 'config-references.yml')) as file: + tags = yaml.safe_load(file) + for key in ('statistics', 'domains', 'authors', 'references'): + assert product.attributes[key] == tuple( + tags[key][k] for k in record[key]) + + # Check that recipe diagnostic tags have been added + src = yaml.safe_load(DEFAULT_DOCUMENTATION + content) + for key in ('realms', 'themes'): + value = src['diagnostics']['diagnostic_name'][key] + assert product.attributes[key] == tuple(tags[key][k] for k in value) + + # Check that recipe tags have been added + recipe_record = product.provenance.get_record('recipe:recipe_test.yml') + assert len(recipe_record) == 1 + for key in ('description', 'references'): + value = src['documentation'][key] + if key == 'references': + value = ', '.join(tags[key][k] for k in value) + assert recipe_record[0].get_attribute('attribute:' + + key).pop() == value + + # Test that provenance was saved to netcdf, xml and svg plot + cube = iris.load(product.filename)[0] + assert 'provenance' in cube.attributes + prefix = os.path.splitext(product.filename)[0] + '_provenance' + assert os.path.exists(prefix + '.xml') + assert os.path.exists(prefix + '.svg') diff --git a/tests/system/data_simulator.py b/tests/system/data_simulator.py index 420077388d..3efb5a79a0 100644 --- a/tests/system/data_simulator.py +++ b/tests/system/data_simulator.py @@ -1,28 +1,34 @@ """Simulate test data for `esmvaltool`.""" -from __future__ import print_function - import os import sys +import tempfile import time import numpy as np -from dummydata.model2 import Model2 -from dummydata.model3 import Model3 from esmvaltool._config import read_config_user_file -from esmvaltool._data_finder import get_input_filename from esmvaltool._recipe import read_recipe_file +def get_input_filename(variable, rootpath, drs): + """Get a valid input filename.""" + # TODO: implement this according to esmvaltool._data_finder.py + # or patch get_input_filelist there. + return tempfile.NamedTemporaryFile().name + '.nc' + + def write_data_file(short_name, filename, field, start_year, end_year): """Write a file containing simulated data.""" + from dummydata.model2 import Model2 + from dummydata.model3 import Model3 + if 'T2M' in field: writer = Model2 elif 'T3M' in field: writer = Model3 else: - raise NotImplementedError("Cannot create a model from field {}" - .format(field)) + raise NotImplementedError( + "Cannot create a model from field {}".format(field)) # TODO: Maybe this should be made configurable per diagnostic or model cfg = { @@ -61,8 +67,7 @@ def simulate_input_data(recipe_file, config_user_file=None): 'drs': {}, } - recipe = read_recipe_file( - recipe_file, user_config, initialize_tasks=False) + recipe = read_recipe_file(recipe_file, user_config, initialize_tasks=False) start_time = time.time() @@ -88,8 +93,8 @@ def simulate_input_data(recipe_file, config_user_file=None): end_year=variable['end_year'], ) - print("Simulating data took {:.0f} seconds" - .format(time.time() - start_time)) + print( + "Simulating data took {:.0f} seconds".format(time.time() - start_time)) if __name__ == '__main__': diff --git a/tests/system/esmvaltool_testlib.py b/tests/system/esmvaltool_testlib.py index e681cd71e0..61482fa985 100644 --- a/tests/system/esmvaltool_testlib.py +++ b/tests/system/esmvaltool_testlib.py @@ -1,7 +1,5 @@ """Provide a class for testing esmvaltool.""" -from __future__ import print_function - import glob import os import shutil @@ -84,8 +82,8 @@ def __init__(self, recipe, output_directory, ignore='', **kwargs): Glob patterns of files to be ignored when testing. """ if not _CFG['test']['run']: - raise SkipTest("System tests disabled in {}" - .format(_CFG['configfile'])) + raise SkipTest("System tests disabled in {}".format( + _CFG['configfile'])) self.ignore = (ignore, ) if isinstance(ignore, str) else ignore @@ -94,9 +92,7 @@ def __init__(self, recipe, output_directory, ignore='', **kwargs): # Set recipe path if not os.path.exists(recipe): recipe = os.path.join( - os.path.dirname(script_root), - 'recipes', - recipe) + os.path.dirname(script_root), 'recipes', recipe) self.recipe_file = os.path.abspath(recipe) # Simulate input data? @@ -112,8 +108,9 @@ def __init__(self, recipe, output_directory, ignore='', **kwargs): # If reference data is neither available nor should be generated, skip if not (os.path.exists(reference_dir) or self.create_reference_output): - raise SkipTest("No reference data available for recipe {} in {}" - .format(recipe, _CFG['reference']['output'])) + raise SkipTest( + "No reference data available for recipe {} in {}".format( + recipe, _CFG['reference']['output'])) # Write ESMValTool configuration file self.config_user_file = _create_config_user_file(output_directory) @@ -174,9 +171,10 @@ def _execute(self): output.append(path) if not output: - raise OSError("Output directory not found in location {}. " - "Probably ESMValTool failed to create any output." - .format(output_directory)) + raise OSError( + "Output directory not found in location {}. " + "Probably ESMValTool failed to create any output.".format( + output_directory)) if len(output) > 1: print("Warning: found multiple output directories:\n{}\nin output " diff --git a/tests/unit/check_r_code.R b/tests/unit/check_r_code.R new file mode 100644 index 0000000000..bf30b2d045 --- /dev/null +++ b/tests/unit/check_r_code.R @@ -0,0 +1,36 @@ +library(lintr) + +args <- commandArgs(trailingOnly = TRUE) +check_paths <- list("esmvaltool", "tests") + +root_folder <- args[1] +has_errors <- FALSE +linters <- with_defaults( + line_length_linter(79), + # disabled because broken: https://github.com/jimhester/lintr/issues/253 + commas_linter = NULL, + # disabled because broken: https://github.com/jimhester/lintr/issues/27 + object_usage_linter = NULL +) + +for (path in check_paths){ + check_path <- file.path(root_folder, path) + for (file in list.files(check_path, recursive = TRUE, include.dirs = FALSE, + ignore.case = TRUE, pattern = ".*\\.R$")){ + errors <- lint(file.path(check_path, file), linters = linters, + parse_settings = FALSE) + if (!is.null(errors)){ + for (error in errors){ + print(error) + if (error["type"] != "warning"){ + has_errors <- TRUE + } + } + } + } +} + +if (has_errors){ + quit(status = 1) +} +quit(status = 0) diff --git a/tests/unit/cmor/test_cmor_check.py b/tests/unit/cmor/test_cmor_check.py index 77f5ac5c84..010b144d54 100644 --- a/tests/unit/cmor/test_cmor_check.py +++ b/tests/unit/cmor/test_cmor_check.py @@ -2,6 +2,7 @@ import sys import unittest +from io import StringIO import iris import iris.coord_categorisation @@ -112,10 +113,6 @@ def test_report_warning(self): def test_warning_fail_on_error(self): """Test report warning function with fail_on_error""" - if sys.version_info[0] == 2: - from StringIO import StringIO - else: - from io import StringIO checker = CMORCheck(self.cube, self.var_info, fail_on_error=True) stdout = sys.stdout sys.stdout = StringIO() @@ -128,11 +125,18 @@ def test_check(self): """Test checks succeeds for a good cube""" self._check_cube() - def _check_cube(self, automatic_fixes=False): - checker = CMORCheck( - self.cube, self.var_info, automatic_fixes=automatic_fixes) - checker.check_metadata() - checker.check_data() + def _check_cube(self, automatic_fixes=False, frequency=None): + """Apply checks and optionally automatic fixes to self.cube.""" + + def checker(cube): + return CMORCheck( + cube, + self.var_info, + automatic_fixes=automatic_fixes, + frequency=frequency) + + self.cube = checker(self.cube).check_metadata() + self.cube = checker(self.cube).check_data() def test_check_with_month_number(self): """Test checks succeeds for a good cube with month number""" @@ -155,15 +159,15 @@ def test_check_with_year(self): self._check_cube() def test_check_with_unit_conversion(self): - """Test check succeds for a good cube requiring unit converision""" + """Test check succeeds for a good cube requiring unit conversion""" self.cube.units = 'days' self._check_cube() def test_check_with_psu_units(self): - """Test check succeds for a good cube with psu units""" + """Test check succeeds for a good cube with psu units""" self.var_info.units = 'psu' self.cube = self.get_cube(self.var_info) - self._check_cube() + self._check_cube(automatic_fixes=True) def test_check_with_positive(self): """Check variable with positive attribute""" @@ -178,11 +182,11 @@ def test_check_with_no_positive_CMIP5(self): self._check_warnings_on_metadata() def test_check_with_no_positive_CMIP6(self): - """Check CMIP6 variable with no positive attribute report error""" + """Check CMIP6 variable with no positive attribute report warning.""" self.cube = self.get_cube(self.var_info) self.var_info.positive = 'up' self.var_info.table_type = 'CMIP6' - self._check_fails_in_metadata() + self._check_warnings_on_metadata() def test_invalid_rank(self): """Test check fails in metadata step when rank is not correct""" @@ -199,7 +203,7 @@ def test_rank_with_aux_coords(self): def test_rank_with_scalar_coords(self): """Check succeeds even if a required coordinate is a scalar coord""" self.cube = self.cube.extract( - iris.Constraint(time=self.cube.coord('time').points[0])) + iris.Constraint(time=self.cube.coord('time').cell(0))) self._check_cube() def test_rank_unestructured_grid(self): @@ -269,8 +273,8 @@ def test_non_decreasing_fix(self): cube_points = self.cube.coord('latitude').points reference = numpy.linspace(90, -90, 20, endpoint=True) for index in range(20): - self.assertTrue(iris.util.approx_equal(cube_points[index], - reference[index])) + self.assertTrue( + iris.util.approx_equal(cube_points[index], reference[index])) def test_not_correct_lons(self): """Fail if longitudes are not correct in metadata step""" @@ -337,8 +341,10 @@ def test_bad_time(self): def test_time_automatic_fix(self): """Test automatic fix for time units""" - self.cube.coord('time').units = 'days since 1950-1-1 00:00:00' - self._check_cube(automatic_fixes=True) + self.cube.coord('time').units = 'days since 1860-1-1 00:00:00' + self._check_cube() + assert (self.cube.coord('time').units.origin == + 'days since 1950-1-1 00:00:00') def test_time_automatic_fix_failed(self): """Test automatic fix fail for incompatible time units""" @@ -350,6 +356,11 @@ def test_bad_standard_name(self): self.cube.coord('time').standard_name = 'region' self._check_fails_in_metadata() + def test_bad_out_name(self): + """Fail if coordinates have bad short names at metadata step""" + self.cube.coord('latitude').var_name = 'region' + self._check_fails_in_metadata() + def test_bad_data_units(self): """Fail if data has bad units at metadata step""" self.cube.units = 'hPa' @@ -371,6 +382,17 @@ def test_bad_standard_name_genlevel(self): self.cube.coord('depth').standard_name = None self._check_cube() + def test_frequency_month_not_same_day(self): + """Fail at metadata if frequency (day) not matches data frequency""" + self.cube = self.get_cube(self.var_info, frequency='mon') + time = self.cube.coord('time') + points = numpy.array(time.points) + points[1] = points[1] + 12 + dims = self.cube.coord_dims(time) + self.cube.remove_coord(time) + self.cube.add_dim_coord(time.copy(points), dims) + self._check_cube(frequency='mon') + def test_bad_frequency_day(self): """Fail at metadata if frequency (day) not matches data frequency""" self.cube = self.get_cube(self.var_info, frequency='mon') @@ -400,21 +422,6 @@ def test_frequency_not_supported(self): """Fail at metadata if frequency is not supported""" self._check_fails_in_metadata(frequency='wrong_freq') - # For the moment, we don't have a variable definition with these values - # to test - - def test_data_not_valid_max(self): - """Warning if data is above valid_max in data step""" - self.var_info.valid_max = '10000' - self.cube.data[0] = 100000000000 - self._check_warnings_on_data() - - def test_data_not_valid_min(self): - """Warning if data is below valid_min in data step""" - self.var_info.valid_min = '-100' - self.cube.data[0] = -100000000000 - self._check_warnings_on_data() - def _check_fails_on_data(self): checker = CMORCheck(self.cube, self.var_info) checker.check_metadata() diff --git a/tests/unit/cmor/test_fix.py b/tests/unit/cmor/test_fix.py index 76191bb895..1e9832ea50 100644 --- a/tests/unit/cmor/test_fix.py +++ b/tests/unit/cmor/test_fix.py @@ -2,9 +2,10 @@ import unittest -from esmvaltool.cmor.fix import fix_file, fix_data, fix_metadata import mock +from esmvaltool.cmor.fix import Fix, fix_data, fix_file, fix_metadata + class TestFixFile(unittest.TestCase): """Fix file tests""" @@ -17,8 +18,9 @@ def setUp(self): def test_fix(self): """Check that the returned fix is applied""" - with mock.patch('esmvaltool.cmor._fixes.fix.Fix.get_fixes', - return_value=[self.mock_fix]): + with mock.patch( + 'esmvaltool.cmor._fixes.fix.Fix.get_fixes', + return_value=[self.mock_fix]): file_returned = fix_file('filename', 'short_name', 'project', 'model', 'output_dir') self.assertNotEqual(file_returned, self.filename) @@ -26,83 +28,123 @@ def test_fix(self): def test_nofix(self): """Check that the same file is returned if no fix is available""" - with mock.patch('esmvaltool.cmor._fixes.fix.Fix.get_fixes', - return_value=[]): + with mock.patch( + 'esmvaltool.cmor._fixes.fix.Fix.get_fixes', return_value=[]): file_returned = fix_file('filename', 'short_name', 'project', 'model', 'output_dir') self.assertEqual(file_returned, self.filename) -class TestFixMetadata(unittest.TestCase): - """Fix metadata tests""" +class TestGetCube(unittest.TestCase): + """Test get cube by var_name method""" def setUp(self): """Prepare for testing""" + self.cube_1 = mock.Mock() + self.cube_1.var_name = 'cube1' + self.cube_2 = mock.Mock() + self.cube_2.var_name = 'cube2' + self.cubes = [self.cube_1, self.cube_2] + self.fix = Fix() + + def test_get_first_cube(self): + """Test selecting first cube""" + self.assertIs(self.cube_1, + self.fix.get_cube_from_list(self.cubes, "cube1")) + + def test_get_second_cube(self): + """Test selecting second cube.""" + self.assertIs(self.cube_2, + self.fix.get_cube_from_list(self.cubes, "cube2")) + + def test_get_default_raises(self): + """Check that the default raises (Fix is not a cube).""" + with self.assertRaises(Exception): + self.fix.get_cube_from_list(self.cubes) + + def test_get_default(self): + """Check that the default raises (Fix is a cube).""" + self.cube_1.var_name = 'Fix' + self.assertIs(self.cube_1, self.fix.get_cube_from_list(self.cubes)) + + +class TestFixMetadata(unittest.TestCase): + """Fix metadata tests.""" + + def setUp(self): + """Prepare for testing.""" self.cube = mock.Mock() + self.cube.attributes = {'source_file': 'source_file'} self.fixed_cube = mock.Mock() + self.fixed_cube.attributes = {'source_file': 'source_file'} self.mock_fix = mock.Mock() - self.mock_fix.fix_metadata.return_value = self.fixed_cube + self.mock_fix.fix_metadata.return_value = [self.fixed_cube] def test_fix(self): - """Check that the returned fix is applied""" - with mock.patch('esmvaltool.cmor._fixes.fix.Fix.get_fixes', - return_value=[self.mock_fix]): - cube_returned = fix_metadata(self.cube, 'short_name', 'project', - 'model') + """Check that the returned fix is applied.""" + with mock.patch( + 'esmvaltool.cmor._fixes.fix.Fix.get_fixes', + return_value=[self.mock_fix]): + cube_returned = fix_metadata([self.cube], 'short_name', 'project', + 'model')[0] self.assertTrue(cube_returned is not self.cube) self.assertTrue(cube_returned is self.fixed_cube) def test_nofix(self): - """Check that the same cube is returned if no fix is available""" - with mock.patch('esmvaltool.cmor._fixes.fix.Fix.get_fixes', - return_value=[]): - cube_returned = fix_metadata(self.cube, 'short_name', 'project', - 'model') + """Check that the same cube is returned if no fix is available.""" + with mock.patch( + 'esmvaltool.cmor._fixes.fix.Fix.get_fixes', return_value=[]): + cube_returned = fix_metadata([self.cube], 'short_name', 'project', + 'model')[0] self.assertTrue(cube_returned is self.cube) self.assertTrue(cube_returned is not self.fixed_cube) def test_cmor_checker_called(self): - """Check that the cmor check is done""" + """Check that the cmor check is done.""" checker = mock.Mock() checker.return_value = mock.Mock() - with mock.patch('esmvaltool.cmor._fixes.fix.Fix.get_fixes', - return_value=[]): - with mock.patch('esmvaltool.cmor.fix._get_cmor_checker', - return_value=checker) as get_mock: - fix_metadata(self.cube, 'short_name', 'project', - 'model', 'cmor_table', 'mip') - get_mock.assert_called_once_with(automatic_fixes=True, - fail_on_error=False, - mip='mip', - short_name='short_name', - table='cmor_table') + with mock.patch( + 'esmvaltool.cmor._fixes.fix.Fix.get_fixes', return_value=[]): + with mock.patch( + 'esmvaltool.cmor.fix._get_cmor_checker', + return_value=checker) as get_mock: + fix_metadata([self.cube], 'short_name', 'project', 'model', + 'cmor_table', 'mip', 'frequency') + get_mock.assert_called_once_with( + automatic_fixes=True, + fail_on_error=False, + frequency='frequency', + mip='mip', + short_name='short_name', + table='cmor_table') checker.assert_called_once_with(self.cube) checker.return_value.check_metadata.assert_called_once_with() class TestFixData(unittest.TestCase): - """Fix data tests""" + """Fix data tests.""" def setUp(self): - """Prepare for testing""" + """Prepare for testing.""" self.cube = mock.Mock() self.fixed_cube = mock.Mock() self.mock_fix = mock.Mock() self.mock_fix.fix_data.return_value = self.fixed_cube def test_fix(self): - """Check that the returned fix is applied""" - with mock.patch('esmvaltool.cmor._fixes.fix.Fix.get_fixes', - return_value=[self.mock_fix]): + """Check that the returned fix is applied.""" + with mock.patch( + 'esmvaltool.cmor._fixes.fix.Fix.get_fixes', + return_value=[self.mock_fix]): cube_returned = fix_data(self.cube, 'short_name', 'project', 'model') self.assertTrue(cube_returned is not self.cube) self.assertTrue(cube_returned is self.fixed_cube) def test_nofix(self): - """Check that the same cube is returned if no fix is available""" - with mock.patch('esmvaltool.cmor._fixes.fix.Fix.get_fixes', - return_value=[]): + """Check that the same cube is returned if no fix is available.""" + with mock.patch( + 'esmvaltool.cmor._fixes.fix.Fix.get_fixes', return_value=[]): cube_returned = fix_data(self.cube, 'short_name', 'project', 'model') self.assertTrue(cube_returned is self.cube) @@ -112,16 +154,19 @@ def test_cmor_checker_called(self): """Check that the cmor check is done""" checker = mock.Mock() checker.return_value = mock.Mock() - with mock.patch('esmvaltool.cmor._fixes.fix.Fix.get_fixes', - return_value=[]): - with mock.patch('esmvaltool.cmor.fix._get_cmor_checker', - return_value=checker) as get_mock: - fix_data(self.cube, 'short_name', 'project', - 'model', 'cmor_table', 'mip') - get_mock.assert_called_once_with(automatic_fixes=True, - fail_on_error=False, - mip='mip', - short_name='short_name', - table='cmor_table') + with mock.patch( + 'esmvaltool.cmor._fixes.fix.Fix.get_fixes', return_value=[]): + with mock.patch( + 'esmvaltool.cmor.fix._get_cmor_checker', + return_value=checker) as get_mock: + fix_data(self.cube, 'short_name', 'project', 'model', + 'cmor_table', 'mip', 'frequency') + get_mock.assert_called_once_with( + automatic_fixes=True, + fail_on_error=False, + frequency='frequency', + mip='mip', + short_name='short_name', + table='cmor_table') checker.assert_called_once_with(self.cube) checker.return_value.check_data.assert_called_once_with() diff --git a/tests/unit/cmor/test_table.py b/tests/unit/cmor/test_table.py index 62ad172ddc..db9b57e5bf 100644 --- a/tests/unit/cmor/test_table.py +++ b/tests/unit/cmor/test_table.py @@ -6,123 +6,123 @@ class TestVariableInfo(unittest.TestCase): - """Variable info tests""" + """Variable info tests.""" def setUp(self): - """Prepare for testing""" + """Prepare for testing.""" self.value = 'value' def test_constructor(self): - """Test basic constructor""" + """Test basic constructor.""" info = VariableInfo('table_type', 'var') self.assertEqual('table_type', info.table_type) self.assertEqual('var', info.short_name) def test_read_empty_dictionary(self): - """Test read empty dict""" + """Test read empty dict.""" info = VariableInfo('table_type', 'var') info.read_json({}) self.assertEqual('', info.standard_name) def test_read_standard_name(self): - """Test standard_name""" + """Test standard_name.""" info = VariableInfo('table_type', 'var') info.read_json({'standard_name': self.value}) self.assertEqual(info.standard_name, self.value) def test_read_long_name(self): - """Test long_name""" + """Test long_name.""" info = VariableInfo('table_type', 'var') info.read_json({'long_name': self.value}) self.assertEqual(info.long_name, self.value) def test_read_units(self): - """Test units""" + """Test units.""" info = VariableInfo('table_type', 'var') info.read_json({'units': self.value}) self.assertEqual(info.units, self.value) def test_read_valid_min(self): - """Test valid_min""" + """Test valid_min.""" info = VariableInfo('table_type', 'var') info.read_json({'valid_min': self.value}) self.assertEqual(info.valid_min, self.value) def test_read_valid_max(self): - """Test valid_max""" + """Test valid_max.""" info = VariableInfo('table_type', 'var') info.read_json({'valid_max': self.value}) self.assertEqual(info.valid_max, self.value) def test_read_positive(self): - """Test positive""" + """Test positive.""" info = VariableInfo('table_type', 'var') info.read_json({'positive': self.value}) self.assertEqual(info.positive, self.value) class TestCoordinateInfo(unittest.TestCase): - """Tests for CoordinataInfo""" + """Tests for CoordinataInfo.""" def setUp(self): - """Prepare for testing""" + """Prepare for testing.""" self.value = 'value' def test_constructor(self): - """Test constructor""" + """Test constructor.""" info = CoordinateInfo('var') self.assertEqual('var', info.name) def test_read_empty_dictionary(self): - """Test empty dict""" + """Test empty dict.""" info = CoordinateInfo('var') info.read_json({}) self.assertEqual('', info.standard_name) def test_read_standard_name(self): - """Test standard_name""" + """Test standard_name.""" info = CoordinateInfo('var') info.read_json({'standard_name': self.value}) self.assertEqual(info.standard_name, self.value) def test_read_var_name(self): - """Test var_name""" + """Test var_name.""" info = CoordinateInfo('var') info.read_json({'var_name': self.value}) self.assertEqual(info.var_name, self.value) def test_read_out_name(self): - """Test out_name""" + """Test out_name.""" info = CoordinateInfo('var') info.read_json({'out_name': self.value}) self.assertEqual(info.out_name, self.value) def test_read_units(self): - """Test units""" + """Test units.""" info = CoordinateInfo('var') info.read_json({'units': self.value}) self.assertEqual(info.units, self.value) def test_read_valid_min(self): - """Test valid_min""" + """Test valid_min.""" info = CoordinateInfo('var') info.read_json({'valid_min': self.value}) self.assertEqual(info.valid_min, self.value) def test_read_valid_max(self): - """Test valid_max""" + """Test valid_max.""" info = CoordinateInfo('var') info.read_json({'valid_max': self.value}) self.assertEqual(info.valid_max, self.value) def test_read_value(self): - """Test value""" + """Test value.""" info = CoordinateInfo('var') info.read_json({'value': self.value}) self.assertEqual(info.value, self.value) def test_read_requested(self): - """Test requested""" + """Test requested.""" value = ['value1', 'value2'] info = CoordinateInfo('var') info.read_json({'requested': value}) diff --git a/tests/unit/diag_scripts/shared/configs/test_io.yml b/tests/unit/diag_scripts/shared/configs/test_io.yml new file mode 100644 index 0000000000..e4d75b367e --- /dev/null +++ b/tests/unit/diag_scripts/shared/configs/test_io.yml @@ -0,0 +1,188 @@ +_has_necessary_attributes: + - input: + - dataset: model + filename: path/to/model + long_name: 'Loooong name' + project: CMIP42 + short_name: var + units: KiB + - dataset: model + filename: path/to/model + long_name: 'Loooong name' + project: CMIP42 + short_name: var + units: KiB + output: true + - input: + - dataset: model + filename: path/to/model + long_name: 'Loooong name' + project: CMIP42 + short_name: var + units: KiB + output: true + - input: + - filename: path/to/model + long_name: 'Loooong name' + project: CMIP42 + short_name: var + units: KiB + output: false + - input: + - dataset: model + long_name: 'Loooong name' + project: CMIP42 + short_name: var + units: KiB + output: false + - input: + - dataset: model + filename: path/to/model + project: CMIP42 + short_name: var + units: KiB + output: false + - input: + - dataset: model + filename: path/to/model + long_name: 'Loooong name' + short_name: var + units: KiB + output: false + - input: + - dataset: model + filename: path/to/model + long_name: 'Loooong name' + project: CMIP42 + units: KiB + output: false + - input: + - dataset: model + filename: path/to/model + long_name: 'Loooong name' + project: CMIP42 + short_name: var + output: false + - input: + - dataset: model + long_name: 'Loooong name' + project: CMIP42 + short_name: var + units: KiB + - dataset: model + filename: path/to/model + long_name: 'Loooong name' + project: CMIP42 + short_name: var + units: KiB + output: false + - input: + - dataset: model + filename: path/to/model + long_name: 'Loooong name' + project: CMIP42 + short_name: var + units: KiB + - dataset: model + filename: path/to/model + long_name: 'Loooong name' + short_name: var + units: KiB + output: false + - input: + - dataset: model + long_name: 'Loooong name' + project: CMIP42 + short_name: var + units: KiB + - dataset: model + filename: path/to/model + long_name: 'Loooong name' + project: CMIP42 + short_name: var + output: false + - input: + - dataset: model + long_name: 'Loooong name' + project: CMIP42 + short_name: var + units: KiB + - dataset: model + filename: path/to/model + short_name: var + units: KiB + output: false + - kwargs: + only_var_attrs: true + input: + - dataset: model + filename: path/to/model + long_name: 'Loooong name' + project: CMIP42 + short_name: var + units: KiB + - dataset: model + filename: path/to/model + long_name: 'Loooong name' + project: CMIP42 + short_name: var + units: KiB + output: true + - kwargs: + only_var_attrs: true + input: + - dataset: model + filename: path/to/model + long_name: 'Loooong name' + project: CMIP42 + short_name: var + units: KiB + output: true + - kwargs: + only_var_attrs: true + input: + - long_name: 'Loooong name' + short_name: var + units: KiB + - long_name: 'Loooong name' + units: KiB + output: false + - kwargs: + only_var_attrs: true + input: + - long_name: 'Loooong name' + short_name: var + units: KiB + - long_name: 'Loooong name' + short_name: var + units: KiB + output: true + - kwargs: + only_var_attrs: true + input: + - dataset: model + filename: path/to/model + project: CMIP42 + short_name: var + units: KiB + output: false + - kwargs: + only_var_attrs: true + input: + - dataset: model + filename: path/to/model + long_name: 'Loooong name' + project: CMIP42 + short_name: var + units: KiB + - dataset: model + filename: path/to/model + long_name: 'Loooong name' + project: CMIP42 + short_name: var + output: false + - kwargs: + only_var_attrs: true + input: + - {} + output: false diff --git a/tests/unit/diag_scripts/shared/test_io.py b/tests/unit/diag_scripts/shared/test_io.py new file mode 100644 index 0000000000..b8a3608a6f --- /dev/null +++ b/tests/unit/diag_scripts/shared/test_io.py @@ -0,0 +1,465 @@ +"""Tests for the module :mod:`esmvaltool.diag_scripts.shared.io`.""" + +import os +from collections import OrderedDict +from copy import deepcopy + +import iris +import mock +import numpy as np +import pytest +import yaml + +from esmvaltool.diag_scripts.shared import io + +with open(os.path.join(os.path.dirname(__file__), 'configs', + 'test_io.yml')) as file_: + CONFIG = yaml.safe_load(file_) + + +@pytest.mark.parametrize('data', CONFIG['_has_necessary_attributes']) +@mock.patch.object(io, 'logger', autospec=True) +def test_has_necessary_attributes(mock_logger, data): + """Test attribute checks.""" + for log_level in ('debug', 'info', 'warning', 'error', 'exception'): + metadata = data['input'] + kwargs = data.get('kwargs', {}) + has_atts = io._has_necessary_attributes( + metadata, log_level=log_level, **kwargs) + assert has_atts == data['output'] + logger_func = getattr(mock_logger, log_level) + if has_atts: + logger_func.assert_not_called() + else: + logger_func.assert_called() + mock_logger.reset_mock() + + +CFG = { + 'input_files': [ + 'metadata.yml', + 'test_metadata.yml', + 'valid/dir/1', + 'valid/dir/2', + ], + 'other_attr': + 'I am not used!', +} +ROOT_DIR = '/root/to/something' +PATTERNS_FOR_ALL_ANCESTORS = [ + (None, [ + os.path.join(ROOT_DIR, 'test.nc'), + os.path.join(ROOT_DIR, 'egg.yml'), + os.path.join(ROOT_DIR, 'root2', 'x.nc'), + os.path.join(ROOT_DIR, 'root2', 'y.png'), + os.path.join(ROOT_DIR, 'root3', 'egg.nc'), + os.path.join(ROOT_DIR, 'test_1.nc'), + os.path.join(ROOT_DIR, 'test_2.yml'), + os.path.join(ROOT_DIR, 'root4', 'egg.nc'), + ]), + ('*', [ + os.path.join(ROOT_DIR, 'test.nc'), + os.path.join(ROOT_DIR, 'egg.yml'), + os.path.join(ROOT_DIR, 'root2', 'x.nc'), + os.path.join(ROOT_DIR, 'root2', 'y.png'), + os.path.join(ROOT_DIR, 'root3', 'egg.nc'), + os.path.join(ROOT_DIR, 'test_1.nc'), + os.path.join(ROOT_DIR, 'test_2.yml'), + os.path.join(ROOT_DIR, 'root4', 'egg.nc'), + ]), + ('*.nc', [ + os.path.join(ROOT_DIR, 'test.nc'), + os.path.join(ROOT_DIR, 'root2', 'x.nc'), + os.path.join(ROOT_DIR, 'root3', 'egg.nc'), + os.path.join(ROOT_DIR, 'test_1.nc'), + os.path.join(ROOT_DIR, 'root4', 'egg.nc'), + ]), + ('test*', [ + os.path.join(ROOT_DIR, 'test.nc'), + os.path.join(ROOT_DIR, 'test_1.nc'), + os.path.join(ROOT_DIR, 'test_2.yml'), + ]), + ('*.yml', [ + os.path.join(ROOT_DIR, 'egg.yml'), + os.path.join(ROOT_DIR, 'test_2.yml'), + ]), + ('egg.nc*', [ + os.path.join(ROOT_DIR, 'root3', 'egg.nc'), + os.path.join(ROOT_DIR, 'root4', 'egg.nc'), + ]), +] + + +@pytest.mark.parametrize('pattern,output', PATTERNS_FOR_ALL_ANCESTORS) +@mock.patch('esmvaltool.diag_scripts.shared.io.os.walk', autospec=True) +def test_get_all_ancestor_files(mock_walk, pattern, output): + """Test retrieving of ancestor files.""" + input_dirs = [ + [ + (ROOT_DIR, ['dir', '__pycache__'], ['test.nc', 'egg.yml']), + (os.path.join(ROOT_DIR, 'root2'), ['d'], ['x.nc', 'y.png']), + (os.path.join(ROOT_DIR, 'root3'), [], ['egg.nc']), + ], + [ + (ROOT_DIR, ['dir', '__pycache__'], ['test_1.nc', 'test_2.yml']), + (os.path.join(ROOT_DIR, 'root4'), ['d2'], ['egg.nc']), + ], + ] + mock_walk.side_effect = input_dirs + files = io.get_all_ancestor_files(CFG, pattern=pattern) + assert files == output + + +PATTERNS_FOR_SINGLE_ANCESTOR = [ + ([], None, True), + (['I/am/a/cool/file.nc'], 'I/am/a/cool/file.nc', False), + (['I/am/a/cool/file.nc', 'oh/no/file_2.nc'], 'I/am/a/cool/file.nc', True), +] + + +@pytest.mark.parametrize('files,output,logger', PATTERNS_FOR_SINGLE_ANCESTOR) +@mock.patch.object(io, 'get_all_ancestor_files', autospec=True) +@mock.patch.object(io, 'logger', autospec=True) +def test_get_ancestor_file(mock_logger, mock_get_all_ancestors, files, output, + logger): + """Test retrieving of single ancestor file.""" + mock_get_all_ancestors.return_value = files + returned_file = io.get_ancestor_file(CFG, pattern='*') + assert returned_file == output + if logger: + mock_logger.warning.assert_called() + else: + mock_logger.warning.assert_not_called() + + +LONG_NAME = 'Loooong name' +SHORT_NAME = 'var' +STANDARD_NAME = 'air_temperature' +UNITS = 'K' + + +@mock.patch.object(io, 'get_all_ancestor_files', autospec=True) +@mock.patch.object(io, 'logger', autospec=True) +@mock.patch('esmvaltool.diag_scripts.shared.io.iris.load_cube', autospec=True) +@mock.patch('esmvaltool.diag_scripts.shared.io.os.walk', autospec=True) +def test_netcdf_to_metadata(mock_walk, mock_load_cube, mock_logger, + mock_get_all_ancestors): + """Test cube to metadata.""" + attrs = [ + { + 'dataset': 'model', + 'filename': 'path/to/model1.nc', + 'project': 'CMIP42', + }, + { + 'dataset': 'model', + 'filename': 'path/to/model1.yml', + 'project': 'CMIP42', + }, + { + 'dataset': 'model', + 'filename': 'path/to/model2.nc', + }, + { + 'dataset': 'model', + 'filename': 'path/to/model3.nc', + 'project': 'CMIP42', + }, + ] + var_attrs = [ + { + 'long_name': LONG_NAME, + 'var_name': SHORT_NAME, + 'units': UNITS, + }, + { + 'long_name': LONG_NAME, + 'var_name': SHORT_NAME, + 'units': UNITS, + }, + { + 'long_name': LONG_NAME, + 'var_name': SHORT_NAME, + }, + { + 'long_name': LONG_NAME, + 'var_name': SHORT_NAME, + 'standard_name': STANDARD_NAME, + 'units': UNITS, + }, + ] + cubes = [ + iris.cube.Cube(0, attributes=attrs[0], **var_attrs[0]), + iris.cube.Cube(0, attributes=attrs[2], **var_attrs[2]), + iris.cube.Cube(0, attributes=attrs[3], **var_attrs[3]), + ] + walk_output = [ + ('path/to', [], ['model1.nc', 'model1.yml']), + ('path/to', ['d'], ['model2.nc', 'model3.nc']), + ] + output = deepcopy([{**attrs[i], **var_attrs[i]} for i in (0, 3)]) + for out in output: + out['short_name'] = out.pop('var_name') + mock_get_all_ancestors.return_value = [a['filename'] for a in attrs] + mock_walk.return_value = walk_output + for root in (None, '*'): + mock_load_cube.side_effect = cubes + metadata = io.netcdf_to_metadata({}, pattern=root, root=root) + assert metadata == output + mock_logger.warning.assert_called() + mock_logger.reset_mock() + + +ATTRS = [ + { + 'dataset': 'a', + 'filename': 'path/to/model1.nc', + 'project': 'CMIP42', + }, + { + 'dataset': 'b', + 'filename': 'path/to/model2.nc', + 'project': 'CMIP42', + }, + { + 'dataset': 'c', + 'filename': 'path/to/model3.nc', + }, + { + 'dataset': 'd', + 'filename': 'path/to/model4.nc', + 'project': 'CMIP42', + }, +] +VAR_ATTRS = [ + { + 'long_name': LONG_NAME, + 'var_name': SHORT_NAME, + 'units': UNITS, + }, + { + 'long_name': LONG_NAME, + 'var_name': SHORT_NAME, + 'units': UNITS, + }, + { + 'long_name': LONG_NAME, + 'var_name': SHORT_NAME, + }, + { + 'long_name': LONG_NAME, + 'var_name': SHORT_NAME, + 'standard_name': STANDARD_NAME, + 'units': UNITS, + }, +] +CUBES = [ + iris.cube.Cube(0, attributes=ATTRS[i], **VAR_ATTRS[i]) for i in range(4) +] +OUTPUT = [[CUBES[i], ATTRS[i]['filename']] for i in range(4)] +OUTPUT[2] = None +METADATA_TO_NETDCF = zip(ATTRS, VAR_ATTRS, CUBES, OUTPUT) + + +@pytest.mark.parametrize('attrs,var_attrs,cube,output', METADATA_TO_NETDCF) +@mock.patch.object(io, 'iris_save', autospec=True) +@mock.patch.object(io, 'logger', autospec=True) +def test_metadata_to_netcdf(mock_logger, mock_save, attrs, var_attrs, cube, + output): + """Test metadata to cube.""" + wrong_name = 'I_am_an_invalid_standard_name' + metadata = deepcopy({**attrs, **var_attrs}) + metadata['short_name'] = metadata.pop('var_name') + if metadata['dataset'] == 'a': + metadata['standard_name'] = wrong_name + io.metadata_to_netcdf(cube, metadata) + if metadata.get('standard_name', '') == wrong_name: + mock_logger.debug.assert_called() + else: + mock_logger.debug.assert_not_called() + if output is None: + mock_logger.warning.assert_called() + assert not mock_save.called + else: + mock_logger.warning.assert_not_called() + assert mock_save.call_args_list == [mock.call(*output)] + + +PATH = 'path/to/super/cube' +VAR_ATTRS_NEW = [ + { + 'long_name': 'I do not have units :(', + 'short_name': 'sad', + }, + { + 'long_name': 'Long name', + 'short_name': 'var', + 'units': '1', + }, + { + 'short_name': SHORT_NAME, + 'long_name': LONG_NAME, + 'standard_name': STANDARD_NAME, + 'units': UNITS, + }, +] +ATTRS_NEW = [ + {}, + {}, + { + 'test': '123', + 'answer': 42, + }, +] +ATTRIBUTES_FOR_1D_CUBE = zip(VAR_ATTRS_NEW, ATTRS_NEW) + + +@pytest.mark.parametrize('var_attrs,attrs', ATTRIBUTES_FOR_1D_CUBE) +@mock.patch.object(io, 'iris_save', autospec=True) +@mock.patch.object(io, 'logger', autospec=True) +def test_save_1d_data(mock_logger, mock_save, var_attrs, attrs): + """Test saving of 1 dimensional data.""" + coord_name = 'inclination' + data = [ + np.ma.masked_invalid([1.0, np.nan, -1.0]), + np.arange(2.0) + 100.0, + np.ma.masked_invalid([33.0, 22.0, np.nan, np.nan, -77.0]), + ] + coords = [ + iris.coords.DimCoord(np.arange(3.0) - 3.0, long_name=coord_name), + iris.coords.DimCoord(np.arange(2.0) + 2.0, long_name=coord_name), + iris.coords.DimCoord( + np.array([-7.0, -3.0, -2.71, 3.0, 314.15]), long_name=coord_name), + ] + cubes = OrderedDict([ + ('model1', + iris.cube.Cube( + data[0], + var_name='xy', + units='kg', + attributes={'hi': '!'}, + dim_coords_and_dims=[(coords[0], 0)])), + ('model2', + iris.cube.Cube( + data[1], + var_name='zr', + units='1', + attributes={}, + dim_coords_and_dims=[(coords[1], 0)])), + ('model3', + iris.cube.Cube( + data[2], + var_name='wa', + units='unknown', + attributes={'very': 'long cube'}, + dim_coords_and_dims=[(coords[2], 0)])), + ]) + dataset_dim = iris.coords.AuxCoord(list(cubes.keys()), long_name='dataset') + dim_1 = coords[0].copy([-7.0, -3.0, -2.71, -2.0, -1.0, 2.0, 3.0, 314.15]) + output_data = np.ma.masked_invalid( + [[np.nan, 1.0, np.nan, np.nan, -1.0, np.nan, np.nan, np.nan], + [np.nan, np.nan, np.nan, np.nan, np.nan, 100.0, 101.0, np.nan], + [33.0, 22.0, np.nan, np.nan, np.nan, np.nan, np.nan, -77.0]]) + output_dims = [(dataset_dim, 0), (dim_1, 1)] + + # Without cubes + io.save_1d_data({}, PATH, coord_name, var_attrs, attrs) + mock_logger.warning.assert_called() + assert not mock_save.called + mock_logger.reset_mock() + mock_save.reset_mock() + + # With cubes + io.save_1d_data(cubes, PATH, coord_name, var_attrs, attrs) + iris_var_attrs = deepcopy(var_attrs) + iris_var_attrs['var_name'] = iris_var_attrs.pop('short_name') + new_cube = iris.cube.Cube( + output_data, + aux_coords_and_dims=output_dims, + attributes=attrs, + **iris_var_attrs) + if 'units' not in var_attrs: + mock_logger.warning.assert_called() + assert not mock_save.called + else: + mock_logger.warning.assert_not_called() + assert mock_save.call_args_list == [mock.call(new_cube, PATH)] + + +CUBELIST = [ + iris.cube.Cube(1), + iris.cube.Cube(2, attributes={ + 'filename': 'a', + 'x': 'y', + }), +] +CUBELIST_OUT = [ + iris.cube.Cube(1, attributes={'filename': PATH}), + iris.cube.Cube(2, attributes={ + 'filename': PATH, + 'x': 'y', + }), +] +CUBES_TO_SAVE = [ + (iris.cube.Cube(0), iris.cube.Cube(0, attributes={'filename': PATH})), + (CUBELIST, CUBELIST_OUT), + (iris.cube.CubeList(CUBELIST), iris.cube.CubeList(CUBELIST_OUT)), +] + + +@pytest.mark.parametrize('source,output', CUBES_TO_SAVE) +@mock.patch('esmvaltool.diag_scripts.shared.io.iris.save', autospec=True) +@mock.patch.object(io, 'logger', autospec=True) +def test_iris_save(mock_logger, mock_save, source, output): + """Test iris save function.""" + io.iris_save(source, PATH) + assert mock_save.call_args_list == [mock.call(output, PATH)] + mock_logger.info.assert_called_once() + + +AUX_COORDS = [ + None, + None, + iris.coords.AuxCoord([2, 3, 5], long_name='Primes!'), +] +ATTRIBUTES_FOR_SCALAR_CUBE = zip(VAR_ATTRS_NEW, ATTRS_NEW, AUX_COORDS) + + +@pytest.mark.parametrize('var_attrs,attrs,aux_coord', + ATTRIBUTES_FOR_SCALAR_CUBE) +@mock.patch.object(io, 'iris_save', autospec=True) +@mock.patch.object(io, 'logger', autospec=True) +def test_save_scalar_data(mock_logger, mock_save, var_attrs, attrs, aux_coord): + """Test saving of scalar data.""" + data = OrderedDict([ + ('model1', np.nan), + ('model2', 1.0), + ('model3', 3.14), + ]) + dataset_dim = iris.coords.AuxCoord(list(data.keys()), long_name='dataset') + output_data = np.ma.masked_invalid([np.nan, 1.0, 3.14]) + + # Without data + io.save_scalar_data({}, PATH, var_attrs) + mock_logger.warning.assert_called() + assert not mock_save.called + mock_logger.reset_mock() + mock_save.reset_mock() + + # With data + io.save_scalar_data(data, PATH, var_attrs, aux_coord, attrs) + iris_var_attrs = deepcopy(var_attrs) + iris_var_attrs['var_name'] = iris_var_attrs.pop('short_name') + new_cube = iris.cube.Cube( + output_data, + aux_coords_and_dims=[(dataset_dim, 0)], + attributes=attrs, + **iris_var_attrs) + if aux_coord is not None: + new_cube.add_aux_coord(aux_coord, 0) + if 'units' not in var_attrs: + mock_logger.warning.assert_called() + assert not mock_save.called + else: + mock_logger.warning.assert_not_called() + assert mock_save.call_args_list == [mock.call(new_cube, PATH)] diff --git a/tests/unit/diag_scripts/shared/test_iris_helpers.py b/tests/unit/diag_scripts/shared/test_iris_helpers.py new file mode 100644 index 0000000000..51acfbd53f --- /dev/null +++ b/tests/unit/diag_scripts/shared/test_iris_helpers.py @@ -0,0 +1,389 @@ +"""Tests for the module :mod:`esmvaltool.diag_scripts.shared.iris_helpers`.""" + +import iris +import mock +import numpy as np +import pytest + +from esmvaltool.diag_scripts.shared import iris_helpers as ih + +LONG_NAME = 'x' +DIM_COORD_1 = iris.coords.DimCoord(np.arange(3.0) - 1.0, long_name=LONG_NAME) +AUX_COORD_1 = iris.coords.AuxCoord(np.arange(3.0) - 1.0, long_name=LONG_NAME) +AUX_COORD_2 = iris.coords.AuxCoord([10.0, 20.0, 30.0], long_name='longer') +SMALL_COORD = iris.coords.DimCoord([0.0], long_name=LONG_NAME) +LONG_COORD_1 = iris.coords.AuxCoord([-1.0, 0.0, 1.0, 1.], long_name=LONG_NAME) +LONG_COORD_2 = iris.coords.DimCoord([-1.0, -0.5, 0.0, 1.0], + long_name=LONG_NAME) +WRONG_COORD = iris.coords.DimCoord([-200.0, +200.0], long_name=LONG_NAME) +SCALAR_COORD = iris.coords.AuxCoord(2.71, long_name='e') +DUP_COORD = iris.coords.AuxCoord([-1.0, 0.0, 1.0, 1.0], long_name=LONG_NAME) +CUBE_1 = iris.cube.Cube( + np.ma.masked_invalid([-1.0, np.nan, 2.0]), + var_name='a', + attributes={'1': '2'}, + dim_coords_and_dims=[(DIM_COORD_1, 0)], + aux_coords_and_dims=[(SCALAR_COORD, []), (AUX_COORD_2, 0)]) +CUBE_2 = iris.cube.Cube( + np.ma.masked_invalid([-1.0, np.nan, 2.0]), + var_name='a', + attributes={'1': '2'}, + dim_coords_and_dims=[(DIM_COORD_1, 0)], + aux_coords_and_dims=[(SCALAR_COORD, [])]) +CUBE_3 = iris.cube.Cube( + np.ma.masked_invalid([np.nan, 3.14, np.nan]), + var_name='a', + attributes={'1': '2'}, + dim_coords_and_dims=[(DIM_COORD_1, 0)]) +CUBE_4 = iris.cube.Cube( + np.ma.masked_invalid([1.0, 2.0, 3.0, 3.0]), + var_name='a', + attributes={'1': '2'}, + aux_coords_and_dims=[(SCALAR_COORD, []), (LONG_COORD_1, 0)]) +CUBE_5 = iris.cube.Cube( + np.ma.masked_invalid([np.nan, 3.14, np.nan, np.nan]), + var_name='a', + attributes={'1': '2'}, + aux_coords_and_dims=[(LONG_COORD_1, 0)]) +CUBE_SMALL = iris.cube.Cube([3.14], + var_name='a', + attributes={'1': '2'}, + dim_coords_and_dims=[(SMALL_COORD, 0)]) +CUBE_LONG = iris.cube.Cube( + np.ma.masked_invalid([-1.0, np.nan, np.nan, 2.0]), + var_name='a', + attributes={'1': '2'}, + dim_coords_and_dims=[(LONG_COORD_2, 0)], + aux_coords_and_dims=[(SCALAR_COORD, [])]) +CUBE_SMALL_LONG = iris.cube.Cube( + np.ma.masked_invalid([np.nan, np.nan, 3.14, np.nan]), + var_name='a', + attributes={'1': '2'}, + dim_coords_and_dims=[(LONG_COORD_2, 0)]) +CUBE_WRONG = iris.cube.Cube( + np.arange(2.0), + var_name='a', + attributes={'1': '2'}, + dim_coords_and_dims=[(WRONG_COORD, 0)]) +CUBE_DUP = iris.cube.Cube( + np.ma.masked_invalid([np.nan, 3.14, 2.71, 6.28]), + var_name='a', + attributes={'1': '2'}, + aux_coords_and_dims=[(DUP_COORD, 0)]) +CUBES_TO_TRANSFORM = [ + (DIM_COORD_1, [CUBE_1, CUBE_1], [CUBE_2, CUBE_2]), + (DIM_COORD_1, [CUBE_SMALL, CUBE_1], [CUBE_3, CUBE_2]), + (DIM_COORD_1, [CUBE_WRONG, CUBE_1], ValueError), + (DIM_COORD_1, [CUBE_DUP, CUBE_1], ValueError), + (AUX_COORD_1, [CUBE_1, CUBE_1], [CUBE_2, CUBE_2]), + (AUX_COORD_1, [CUBE_SMALL, CUBE_1], [CUBE_3, CUBE_2]), + (AUX_COORD_1, [CUBE_WRONG, CUBE_1], ValueError), + (AUX_COORD_1, [CUBE_DUP, CUBE_1], ValueError), + (LONG_COORD_1, [CUBE_1, CUBE_1], ValueError), + (LONG_COORD_1, [CUBE_SMALL, CUBE_1], ValueError), + (LONG_COORD_1, [CUBE_WRONG, CUBE_1], ValueError), + (LONG_COORD_1, [CUBE_DUP, CUBE_1], ValueError), + (LONG_COORD_2, [CUBE_1, CUBE_1], [CUBE_LONG, CUBE_LONG]), + (LONG_COORD_2, [CUBE_SMALL, CUBE_1], [CUBE_SMALL_LONG, CUBE_LONG]), + (LONG_COORD_2, [CUBE_WRONG, CUBE_1], ValueError), + (LONG_COORD_2, [CUBE_DUP, CUBE_1], ValueError), + (DIM_COORD_1, [CUBE_1], [CUBE_2]), + (DIM_COORD_1, [CUBE_SMALL], [CUBE_3]), + (DIM_COORD_1, [CUBE_WRONG], ValueError), + (DIM_COORD_1, [CUBE_DUP], ValueError), + (AUX_COORD_1, [CUBE_1], [CUBE_2]), + (AUX_COORD_1, [CUBE_SMALL], [CUBE_3]), + (AUX_COORD_1, [CUBE_WRONG], ValueError), + (AUX_COORD_1, [CUBE_DUP], ValueError), + (LONG_COORD_1, [CUBE_1], ValueError), + (LONG_COORD_1, [CUBE_SMALL], ValueError), + (LONG_COORD_1, [CUBE_WRONG], ValueError), + (LONG_COORD_1, [CUBE_DUP], ValueError), + (LONG_COORD_2, [CUBE_1], [CUBE_LONG]), + (LONG_COORD_2, [CUBE_SMALL], [CUBE_SMALL_LONG]), + (LONG_COORD_2, [CUBE_WRONG], ValueError), + (LONG_COORD_2, [CUBE_DUP], ValueError), +] + + +@pytest.mark.parametrize('ref_coord,cubes,output', CUBES_TO_TRANSFORM) +def test_transform_coord_to_ref(ref_coord, cubes, output): + """Test transforming coordinate to reference.""" + # ValueErrors + if isinstance(output, type): + with pytest.raises(output): + new_cubes = ih._transform_coord_to_ref(cubes, ref_coord) + return + + # Working examples + cubes = iris.cube.CubeList(cubes) + output = iris.cube.CubeList(output) + new_cubes = ih._transform_coord_to_ref(cubes, ref_coord) + assert new_cubes == output + + +DIM_COORD_2 = iris.coords.DimCoord(np.arange(3.0) - 1.0, long_name='aaa') +DIM_COORD_3 = iris.coords.DimCoord(np.arange(3.0) + 1.0, long_name=LONG_NAME) +CUBE_6 = iris.cube.Cube( + np.ma.arange(3.0) + 100.0, + var_name='a', + dim_coords_and_dims=[(DIM_COORD_2, 0)]) +CUBE_7 = iris.cube.Cube( + np.ma.arange(3.0) - 100.0, + var_name='a', + dim_coords_and_dims=[(DIM_COORD_3, 0)]) +CUBES_TO_CHECK_COORD = [ + ([CUBE_1, CUBE_1, CUBE_1], DIM_COORD_1.points), + ([CUBE_1], DIM_COORD_1.points), + ([CUBE_1, CUBE_6], iris.exceptions.CoordinateNotFoundError), + ([CUBE_1, CUBE_7], ValueError), +] + + +@pytest.mark.parametrize('cubes,output', CUBES_TO_CHECK_COORD) +def test_check_coordinate(cubes, output): + """Test checking of coordinates.""" + if isinstance(output, type): + with pytest.raises(output): + out = ih.check_coordinate(cubes, LONG_NAME) + else: + out = ih.check_coordinate(cubes, LONG_NAME) + assert np.array_equal(out, output) + + +DICT_1 = {'a': 'b', 'c': 'd'} +DICT_2 = {'short_name': 'x'} +DICT_3 = {'var_name': 'x'} +DICTS_TO_CONVERT = [ + (DICT_1, DICT_1), + (DICT_2, DICT_3), + (DICT_3, DICT_3), + ({ + **DICT_1, + **DICT_2, + }, { + **DICT_1, + **DICT_3, + }), + ({ + **DICT_1, + **DICT_3, + }, { + **DICT_1, + **DICT_3, + }), + ({ + **DICT_1, + **DICT_2, + 'var_name': ':(', + }, { + **DICT_1, + **DICT_3, + }), +] + + +@pytest.mark.parametrize('dict_in,dict_out', DICTS_TO_CONVERT) +@mock.patch.object(ih, 'logger', autospec=True) +def test_convert_to_iris(mock_logger, dict_in, dict_out): + """Test converting metadata dictionary checking of coordinates.""" + new_dict = ih.convert_to_iris(dict_in) + assert new_dict == dict_out + assert new_dict is not dict_in + if 'short_name' in dict_in and 'var_name' in dict_in: + mock_logger.warning.assert_called() + else: + mock_logger.warning.assert_not_called() + + +PROJECT_CONSTRAINTS = [ + (['ONE'], False, [2.0, 6.0], ['a', 'e']), + (['ONE'], True, [3.0, 4.0, 5.0], ['b', 'c', 'd']), + (['ONE', 'THREE'], False, [2.0, 4.0, 6.0], ['a', 'c', 'e']), + (['ONE', 'THREE'], True, [3.0, 5.0], ['b', 'd']), +] + + +@pytest.mark.parametrize('constr,negate,data,points', PROJECT_CONSTRAINTS) +@mock.patch.object(ih, 'logger', autospec=True) +def test_iris_project_constraint(mock_logger, constr, negate, data, points): + """Test iris constraint for projects.""" + cfg = { + 'input_data': { + 'p1': { + 'project': 'ONE', + 'dataset': 'a', + }, + 'p2': { + 'project': 'TWO', + 'dataset': 'b', + }, + 'p3': { + 'project': 'THREE', + 'dataset': 'c', + }, + 'p4': { + 'project': 'ONE', + 'dataset': 'e', + }, + }, + 'does_not_matter': 'oh no', + } + dataset_coord = iris.coords.AuxCoord(['a', 'b', 'c', 'd', 'e'], + long_name='dataset') + cube = iris.cube.Cube( + np.arange(5.0) + 2.0, aux_coords_and_dims=[(dataset_coord, 0)]) + new_cube = iris.cube.Cube( + data, + aux_coords_and_dims=[(iris.coords.AuxCoord( + points, long_name='dataset'), 0)]) + constraint = ih.iris_project_constraint(constr, cfg, negate=negate) + assert cube.extract(constraint) == new_cube + mock_logger.warning.assert_not_called() + mock_logger.reset_mock() + cfg['input_data']['p5'] = {'project': 'ONE', 'ohhh': 1} + constraint = ih.iris_project_constraint(constr, cfg, negate=negate) + assert cube.extract(constraint) == new_cube + mock_logger.warning.assert_called_once() + + +ATTRS = [ + { + 'test': 1, + 'oh': 'yeah', + }, + { + 'a2': 'c2', + }, +] +VAR_ATTRS = [ + { + 'var_name': 'var', + 'long_name': 'LOOONG NAME', + }, + { + 'standard_name': 'air_temperature', + 'units': 'K', + }, +] +DATSET_COORD_1 = iris.coords.AuxCoord(['x', 'b', 'c', 'a', 'y', 'z'], + long_name='dataset') +DATSET_COORD_1_SORTED = iris.coords.AuxCoord(['a', 'b', 'c', 'x', 'y', 'z'], + long_name='dataset') +DATSET_COORD_2 = iris.coords.AuxCoord(['t', 'w', 'z', 'b', 'x'], + long_name='dataset') +DATSET_COORD_3 = iris.coords.AuxCoord(['r', 's'], long_name='dataset') +DATSET_COORD_4 = iris.coords.AuxCoord(['c', 'c', 'b', 'a'], + long_name='dataset') +DATSET_COORD_5 = iris.coords.AuxCoord(['b', 'x', 'z'], long_name='dataset') +CUBE_DAT_1 = iris.cube.Cube( + np.arange(6.0) - 2.0, + aux_coords_and_dims=[(DATSET_COORD_1, 0)], + attributes=ATTRS[0], + **VAR_ATTRS[0]) +CUBE_DAT_1_SORTED = iris.cube.Cube([1.0, -1.0, 0.0, -2.0, 2.0, 3.0], + aux_coords_and_dims=[(DATSET_COORD_1_SORTED, + 0)], + attributes=ATTRS[0], + **VAR_ATTRS[0]) +CUBE_DAT_1_OUT = iris.cube.Cube([-1.0, -2.0, 3.0], + aux_coords_and_dims=[(DATSET_COORD_5, 0)], + attributes=ATTRS[0], + **VAR_ATTRS[0]) +CUBE_DAT_2 = iris.cube.Cube( + np.ma.masked_invalid([np.nan, 0.0, np.nan, 3.14, 2.71]), + aux_coords_and_dims=[(DATSET_COORD_2, 0)], + attributes=ATTRS[1], + **VAR_ATTRS[1]) +CUBE_DAT_2_OUT = iris.cube.Cube( + np.ma.masked_invalid([3.14, 2.71, np.nan]), + aux_coords_and_dims=[(DATSET_COORD_5, 0)], + attributes=ATTRS[1], + **VAR_ATTRS[1]) +CUBE_DAT_3 = iris.cube.Cube( + np.arange(2.0), + aux_coords_and_dims=[(DATSET_COORD_3, 0)], + attributes=ATTRS[0], + **VAR_ATTRS[0]) +CUBE_DAT_4 = iris.cube.Cube( + np.ma.masked_invalid([np.nan, 2.0, 3.0, 42.0]), + aux_coords_and_dims=[(DATSET_COORD_4, 0)], + attributes=ATTRS[1], + **VAR_ATTRS[1]) +CUBES_TO_INTERSECT = [ + ([CUBE_DAT_1, CUBE_1], iris.exceptions.CoordinateNotFoundError), + ([CUBE_DAT_1, CUBE_DAT_4], ValueError), + ([CUBE_DAT_1, CUBE_DAT_3], ValueError), + ([CUBE_DAT_1], [CUBE_DAT_1_SORTED]), + ([CUBE_DAT_1, CUBE_DAT_1], [CUBE_DAT_1_SORTED, CUBE_DAT_1_SORTED]), + ([CUBE_DAT_1, CUBE_DAT_2], [CUBE_DAT_1_OUT, CUBE_DAT_2_OUT]), + ([CUBE_DAT_2, CUBE_DAT_1], [CUBE_DAT_2_OUT, CUBE_DAT_1_OUT]), +] + + +@pytest.mark.parametrize('cubes,output', CUBES_TO_INTERSECT) +def test_intersect_dataset_coords(cubes, output): + """Test unifying 1D cubes.""" + # ValueErrors + if isinstance(output, type): + with pytest.raises(output): + new_cubes = ih.intersect_dataset_coordinates(cubes) + return + + # Working examples + cubes = iris.cube.CubeList(cubes) + output = iris.cube.CubeList(output) + new_cubes = ih.intersect_dataset_coordinates(cubes) + assert new_cubes == output + + +DIM_COORD_4 = DIM_COORD_1.copy([100.0, 150.0, 160.0]) +DIM_COORD_4.rename('time') +DIM_COORD_LONGEST = DIM_COORD_1.copy([-200.0, -1.0, 0.0, 1.0, 2.0, 3.0, 200.0]) +CUBE_8 = CUBE_1.copy() +CUBE_8.coord(LONG_NAME).points = np.array([100.0, 150.0, 160.0]) +CUBE_8.coord(LONG_NAME).rename('time') +CUBE_WRONG_COORD = CUBE_WRONG.copy() +CUBE_WRONG_COORD.coord(LONG_NAME).rename('wrooong') +CUBES_TO_UNIFY = [ + ([CUBE_1, iris.cube.Cube([[1.0]])], LONG_NAME, ValueError), + ([CUBE_1, iris.cube.Cube(0.0)], LONG_NAME, ValueError), + ( + [iris.cube.Cube([0.0])], + LONG_NAME, + iris.exceptions.CoordinateNotFoundError, + ), + ( + [CUBE_1, CUBE_WRONG_COORD, CUBE_3], + LONG_NAME, + iris.exceptions.CoordinateNotFoundError, + ), + ([CUBE_1, CUBE_4, CUBE_3], LONG_NAME, ValueError), + ([CUBE_7, CUBE_1, CUBE_WRONG], LONG_NAME, DIM_COORD_LONGEST), + ([CUBE_8], 'time', DIM_COORD_4), +] + + +@pytest.mark.parametrize('cubes,coord_name,output', CUBES_TO_UNIFY) +@mock.patch.object(ih, '_transform_coord_to_ref', autospec=True) +@mock.patch( + 'esmvaltool.diag_scripts.shared.io.iris.util.unify_time_units', + autospec=True) +def test_unify_1d_cubes(mock_unify_time, mock_transform, cubes, coord_name, + output): + """Test unifying 1D cubes.""" + # ValueErrors + if isinstance(output, type): + with pytest.raises(output): + ih.unify_1d_cubes(cubes, coord_name) + return + + # Working examples + cubes = iris.cube.CubeList(cubes) + ih.unify_1d_cubes(cubes, coord_name) + assert mock_transform.call_args_list == [mock.call(cubes, output)] + mock_transform.reset_mock() + if coord_name == 'time': + assert mock_unify_time.call_count == 1 + else: + assert not mock_unify_time.called diff --git a/tests/unit/diag_scripts/test_cvdp.py b/tests/unit/diag_scripts/test_cvdp.py new file mode 100644 index 0000000000..903da9355c --- /dev/null +++ b/tests/unit/diag_scripts/test_cvdp.py @@ -0,0 +1,40 @@ +"""Provides tests for the cvdp diagnostic.""" + +import os + +import pytest + +from esmvaltool.diag_scripts.cvdp.cvdp_wrapper import create_link + + +# def test_setup_driver(): +# assert False +# +@pytest.fixture(scope='session') +def test_create_links(tmpdir_factory): + """Test create_link function.""" + cfg = dict() + link_dir = tmpdir_factory.mkdir("link") + cfg['run_dir'] = os.path.join(link_dir.dirname) + + testfile = tmpdir_factory.mkdir("sub").join("file_2009-2010.nc") + testfile.write("Test") + filepath = os.path.join(testfile.dirname, testfile.basename) + + link = create_link(cfg, filepath) + if not os.path.islink(link): + raise AssertionError() + + +# +# def test_setup_namelist(): +# assert False +# +# def test_log_functions(): +# assert False +# +# def test_cvdp_available(): +# assert False +# +# def test_nco_available(): +# assert False diff --git a/tests/unit/preprocessor/_area/__init__.py b/tests/unit/preprocessor/_area/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/preprocessor/_area/test_area.py b/tests/unit/preprocessor/_area/test_area.py new file mode 100644 index 0000000000..3f85825f42 --- /dev/null +++ b/tests/unit/preprocessor/_area/test_area.py @@ -0,0 +1,155 @@ +"""Unit tests for the :func:`esmvaltool.preprocessor._area` module.""" + +import unittest + +import iris +import numpy as np +from cf_units import Unit + +import tests +from esmvaltool.preprocessor._area import ( + average_region, extract_named_regions, extract_region) + + +class Test(tests.Test): + """Test class for the :func:`esmvaltool.preprocessor._area_pp` module.""" + + def setUp(self): + """Prepare tests.""" + self.coord_sys = iris.coord_systems.GeogCS( + iris.fileformats.pp.EARTH_RADIUS) + data = np.ones((5, 5)) + lons = iris.coords.DimCoord( + [i + .5 for i in range(5)], + standard_name='longitude', + bounds=[[i, i + 1.] for i in range(5)], # [0,1] to [4,5] + units='degrees_east', + coord_system=self.coord_sys) + lats = iris.coords.DimCoord([i + .5 for i in range(5)], + standard_name='latitude', + bounds=[[i, i + 1.] for i in range(5)], + units='degrees_north', + coord_system=self.coord_sys) + coords_spec = [(lats, 0), (lons, 1)] + self.grid = iris.cube.Cube(data, dim_coords_and_dims=coords_spec) + + ndata = np.ones((6, 6)) + nlons = iris.coords.DimCoord( + [i - 2.5 for i in range(6)], + standard_name='longitude', + bounds=[[i - 3., i - 2.] for i in range(6)], # [3,2] to [4,5] + units='degrees_east', + coord_system=self.coord_sys) + nlats = iris.coords.DimCoord( + [i - 2.5 for i in range(6)], + standard_name='latitude', + bounds=[[i - 3., i - 2.] for i in range(6)], + units='degrees_north', + coord_system=self.coord_sys) + coords_spec = [(nlats, 0), (nlons, 1)] + self.negative_grid = iris.cube.Cube( + ndata, dim_coords_and_dims=coords_spec) + + def test_average_region_mean(self): + """Test for area average of a 2D field.""" + result = average_region(self.grid, 'latitude', 'longitude') + expected = np.array([1.]) + self.assertArrayEqual(result.data, expected) + + def test_average_region_min(self): + """Test for area average of a 2D field.""" + result = average_region(self.grid, 'latitude', 'longitude', + operator='min') + expected = np.array([1.]) + self.assertArrayEqual(result.data, expected) + + def test_average_region_max(self): + """Test for area average of a 2D field.""" + result = average_region(self.grid, 'latitude', 'longitude', + operator='max') + expected = np.array([1.]) + self.assertArrayEqual(result.data, expected) + + def test_average_region_median(self): + """Test for area average of a 2D field.""" + result = average_region(self.grid, 'latitude', 'longitude', + operator='median') + expected = np.array([1.]) + self.assertArrayEqual(result.data, expected) + + def test_average_region_std_dev(self): + """Test for area average of a 2D field.""" + result = average_region(self.grid, 'latitude', 'longitude', + operator='std_dev') + expected = np.array([0.]) + self.assertArrayEqual(result.data, expected) + + def test_average_region_variance(self): + """Test for area average of a 2D field.""" + result = average_region(self.grid, 'latitude', 'longitude', + operator='variance') + expected = np.array([0.]) + self.assertArrayEqual(result.data, expected) + + def test_average_region_neg_lon(self): + """Test for area average of a 2D field.""" + result = average_region(self.negative_grid, 'latitude', 'longitude') + expected = np.array([1.]) + self.assertArrayEqual(result.data, expected) + + def test_extract_region(self): + """Test for extracting a region from a 2D field.""" + result = extract_region(self.grid, 1.5, 2.5, 1.5, 2.5) + # expected outcome + expected = np.ones((2, 2)) + self.assertArrayEqual(result.data, expected) + + def test_extract_region_neg_lon(self): + """Test for extracting a region with a negative longitude field.""" + result = extract_region(self.negative_grid, -0.5, 0.5, -0.5, 0.5) + expected = np.ones((2, 2)) + self.assertArrayEqual(result.data, expected) + + def test_extract_named_region(self): + """Test for extracting a named region.""" + # tests: + # Create a cube with regions + times = np.array([15., 45., 75.]) + bounds = np.array([[0., 30.], [30., 60.], [60., 90.]]) + time = iris.coords.DimCoord( + times, + bounds=bounds, + standard_name='time', + units=Unit('days since 1950-01-01', calendar='gregorian')) + + regions = ['region1', 'region2', 'region3'] + region = iris.coords.AuxCoord( + regions, + standard_name='region', + units='1', + ) + + data = np.ones((3, 3)) + region_cube = iris.cube.Cube( + data, + dim_coords_and_dims=[(time, 0)], + aux_coords_and_dims=[(region, 1)]) + + # test string region + result1 = extract_named_regions(region_cube, 'region1') + expected = np.ones((3, )) + self.assertArrayEqual(result1.data, expected) + + # test list of regions + result2 = extract_named_regions(region_cube, ['region1', 'region2']) + expected = np.ones((3, 2)) + self.assertArrayEqual(result2.data, expected) + + # test for expected failures: + with self.assertRaises(ValueError): + extract_named_regions(region_cube, 'reg_A') + extract_named_regions(region_cube, ['region1', 'reg_A']) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/unit/preprocessor/_area_pp/test_area_pp.py b/tests/unit/preprocessor/_area_pp/test_area_pp.py deleted file mode 100644 index 75fe78aaa3..0000000000 --- a/tests/unit/preprocessor/_area_pp/test_area_pp.py +++ /dev/null @@ -1,83 +0,0 @@ -"""Unit tests for the :func:`esmvaltool.preprocessor._area_pp` module.""" - -from __future__ import absolute_import, division, print_function - -import unittest - -import iris -import numpy as np - -import tests - -from esmvaltool.preprocessor._area_pp import area_slice as extract_region -from esmvaltool.preprocessor._area_pp import area_average as average_region - - -class Test(tests.Test): - """Test class for the :func:`esmvaltool.preprocessor._area_pp` module""" - - def setUp(self): - """Prepare tests.""" - self.coord_sys = iris.coord_systems.GeogCS( - iris.fileformats.pp.EARTH_RADIUS) - data = np.ones((5, 5)) - lons = iris.coords.DimCoord( - [i + .5 for i in range(5)], - standard_name='longitude', - bounds=[[i, i + 1.] for i in range(5)], # [0,1] to [4,5] - units='degrees_east', - coord_system=self.coord_sys) - lats = iris.coords.DimCoord( - [i + .5 for i in range(5)], - standard_name='latitude', - bounds=[[i, i + 1.] for i in range(5)], - units='degrees_north', - coord_system=self.coord_sys) - coords_spec = [(lats, 0), (lons, 1)] - self.grid = iris.cube.Cube(data, dim_coords_and_dims=coords_spec) - - ndata = np.ones((6, 6)) - nlons = iris.coords.DimCoord( - [i - 2.5 for i in range(6)], - standard_name='longitude', - bounds=[[i - 3., i - 2.] for i in range(6)], # [3,2] to [4,5] - units='degrees_east', - coord_system=self.coord_sys) - nlats = iris.coords.DimCoord( - [i - 2.5 for i in range(6)], - standard_name='latitude', - bounds=[[i - 3., i - 2.] for i in range(6)], - units='degrees_north', - coord_system=self.coord_sys) - coords_spec = [(nlats, 0), (nlons, 1)] - self.negative_grid = iris.cube.Cube(ndata, - dim_coords_and_dims=coords_spec) - - def test_area_average_2d(self): - """Test for area average of a 2D field.""" - result = average_region(self.grid, 'latitude', 'longitude') - expected = np.array([1.]) - self.assertArrayEqual(result.data, expected) - - def test_area_average_negative_longitude(self): - """Test for area average of a 2D field.""" - result = average_region(self.negative_grid, 'latitude', 'longitude') - expected = np.array([1.]) - self.assertArrayEqual(result.data, expected) - - def test_extract_region(self): - """Test for extracting a region from a 2D field.""" - result = extract_region(self.grid, 1.5, 2.5, 1.5, 2.5) - # expected outcome - expected = np.ones((2, 2)) - self.assertArrayEqual(result.data, expected) - - def test_extract_region_neg_longitude(self): - """Test for extracting a region with a negative longitude field.""" - result = extract_region(self.negative_grid, -0.5, 0.5, -0.5, 0.5) - expected = np.ones((2, 2)) - self.assertArrayEqual(result.data, expected) - - -if __name__ == '__main__': - unittest.main() diff --git a/tests/unit/preprocessor/_derive/test_fgco2_grid.py b/tests/unit/preprocessor/_derive/test_fgco2_grid.py new file mode 100644 index 0000000000..bb670ecc70 --- /dev/null +++ b/tests/unit/preprocessor/_derive/test_fgco2_grid.py @@ -0,0 +1,16 @@ +"""Test derivation of `fgco2_grid`.""" +import mock + +import esmvaltool.preprocessor._derive.fgco2_grid as fgco2_grid + +CUBES = 'mocked cubes' +STD_NAME = 'surface_downward_mass_flux_of_carbon_dioxide_expressed_as_carbon' + + +@mock.patch.object(fgco2_grid, 'grid_area_correction', autospec=True) +def test_fgco2_grid_calculation(mock_grid_area_correction): + """Test calculation of `fgco2_grid.""" + derived_var = fgco2_grid.DerivedVariable() + derived_var.calculate(CUBES) + mock_grid_area_correction.assert_called_once_with( + CUBES, STD_NAME, ocean_var=True) diff --git a/tests/unit/preprocessor/_derive/test_nbp_grid.py b/tests/unit/preprocessor/_derive/test_nbp_grid.py new file mode 100644 index 0000000000..9c7d8d1f73 --- /dev/null +++ b/tests/unit/preprocessor/_derive/test_nbp_grid.py @@ -0,0 +1,16 @@ +"""Test derivation of `nbp_grid`.""" +import mock + +import esmvaltool.preprocessor._derive.nbp_grid as nbp_grid + +CUBES = 'mocked cubes' +STD_NAME = ('surface_net_downward_mass_flux_of_carbon_dioxide_expressed_as_' + 'carbon_due_to_all_land_processes') + + +@mock.patch.object(nbp_grid, 'grid_area_correction', autospec=True) +def test_nbp_grid_calculation(mock_grid_area_correction): + """Test calculation of `nbp_grid.""" + derived_var = nbp_grid.DerivedVariable() + derived_var.calculate(CUBES) + mock_grid_area_correction.assert_called_once_with(CUBES, STD_NAME) diff --git a/tests/unit/preprocessor/_derive/test_p_level_widths.py b/tests/unit/preprocessor/_derive/test_p_level_widths.py index f5245fa457..35ba177a92 100644 --- a/tests/unit/preprocessor/_derive/test_p_level_widths.py +++ b/tests/unit/preprocessor/_derive/test_p_level_widths.py @@ -1,81 +1,88 @@ -from __future__ import division, print_function +"""Tests for toz variable derivation functions.""" import numpy as np import pytest -from esmvaltool.preprocessor._derive import _p_level_widths +from esmvaltool.preprocessor._derive.toz import _p_level_widths def test_col_is_not_monotonic(): - sp = 1000 + """Test for non-monotonic column.""" + plev = 1000 top_limit = 5 col = np.array([1, 2, 3, 2, 1]) - col = np.insert(col, 0, sp) + col = np.insert(col, 0, plev) col = np.append(col, top_limit) with pytest.raises(ValueError): _p_level_widths(col) -def test__p_level_widths_keeps_columns_length(): - sp = 1000 +def test_keeping_column_length(): + """Test for level widths keeping column lenght.""" + plev = 1000 top_limit = 5 col = np.array([1000, 900, 800]) - col = np.insert(col, 0, sp) + col = np.insert(col, 0, plev) col = np.append(col, top_limit) assert len(_p_level_widths(col)) == len(col) - 2 -def test_lowest_level_is_surface_pressure(): - sp = 1000 +def test_low_lev_surf_press(): + """Test for lowest level equal to surface pressure.""" + plev = 1000 top_limit = 5 col = np.array([1000, 900, 800]) - col = np.insert(col, 0, sp) + col = np.insert(col, 0, plev) col = np.append(col, top_limit) result = np.array([50, 100, 845]) assert all(_p_level_widths(col) == result) -def test_lowest_level_is_above_surface_pressure(): - sp = 1020 +def test_low_lev_above_surf_press(): + """Test for lowest level above surface pressure.""" + plev = 1020 top_limit = 5 col = np.array([1000, 900, 800]) - col = np.insert(col, 0, sp) + col = np.insert(col, 0, plev) col = np.append(col, top_limit) result = np.array([70, 100, 845]) assert all(_p_level_widths(col) == result) -def test_lowest_level_is_below_surface_pressure(): - sp = 970 +def test_low_lev_below_surf_press(): + """Test for lowest level below surface pressure.""" + plev = 970 top_limit = 5 col = np.array([np.NaN, 900, 800]) - col = np.insert(col, 0, sp) + col = np.insert(col, 0, plev) col = np.append(col, top_limit) result = np.array([0, 120, 845]) assert all(_p_level_widths(col) == result) col = np.array([np.NaN, np.NaN, 900, 800]) - col = np.insert(col, 0, sp) + col = np.insert(col, 0, plev) col = np.append(col, top_limit) result = np.array([0, 0, 120, 845]) assert all(_p_level_widths(col) == result) -def test_highest_level_is_top_limit(): - sp = 1020 +def test_high_level_top_limit(): + """Test for highest level equal to top limit.""" + plev = 1020 top_limit = 5 col = np.array([1000, 900, 5]) - col = np.insert(col, 0, sp) + col = np.insert(col, 0, plev) col = np.append(col, top_limit) result = np.array([70, 50 + 895 / 2, 895 / 2]) assert all(_p_level_widths(col) == result) -def test_highest_level_above_top_limit(): - sp = 1020 +def test_high_level_above_top_limit(): + """Test for highest level above top limit.""" + plev = 1020 top_limit = 5 col = np.array([1000, 900, 3]) - col = np.insert(col, 0, sp) + col = np.insert(col, 0, plev) col = np.append(col, top_limit) with pytest.raises(ValueError): _p_level_widths(col) diff --git a/tests/unit/preprocessor/_derive/test_shared.py b/tests/unit/preprocessor/_derive/test_shared.py new file mode 100644 index 0000000000..cc6915416a --- /dev/null +++ b/tests/unit/preprocessor/_derive/test_shared.py @@ -0,0 +1,143 @@ +"""Tests for the shared functions of the derive preprocessor.""" +import copy + +import numpy as np +import pytest +import iris +from iris.cube import CubeList +from cf_units import Unit + +import esmvaltool.preprocessor._derive._shared as shared + +O_NAME = 'sea_surface_temperature' +L_NAME = 'air_temperature' +SFTOF_CUBE = iris.cube.Cube( + [100.0, 0.0, 50.0, 70.0], + var_name='sftof', + standard_name='sea_area_fraction', + units=Unit('%'), +) +SFTLF_CUBE = iris.cube.Cube( + [10.0, 0.0, 100.0], + var_name='sftlf', + standard_name='land_area_fraction', + units=Unit('%'), +) +O_CUBE_1 = iris.cube.Cube( + [1.0, 2.0, -1.0, 2.0], + standard_name=O_NAME, +) +O_CUBE_2 = iris.cube.Cube( + [1.0, -1.0, 3.0], + standard_name=O_NAME, +) +L_CUBE = iris.cube.Cube( + [10.0, 20.0, 0.0], + standard_name=L_NAME, +) +FRAC_O = np.array([0.0, 1.0, 0.5, 0.3]) +FRAC_L = np.array([0.1, 0.0, 1.0]) + +GET_LAND_FRACTION = [ + (CubeList([L_CUBE]), L_NAME, False, None), + (CubeList([L_CUBE]), L_NAME, True, None), + (CubeList([SFTLF_CUBE, L_CUBE]), L_NAME, False, FRAC_L), + (CubeList([SFTLF_CUBE, O_CUBE_1]), O_NAME, False, None), + (CubeList([SFTLF_CUBE, O_CUBE_1]), O_NAME, True, None), + (CubeList([SFTLF_CUBE, O_CUBE_2]), O_NAME, False, FRAC_L), + (CubeList([SFTLF_CUBE, O_CUBE_2]), O_NAME, True, FRAC_L), + (CubeList([SFTOF_CUBE, L_CUBE]), L_NAME, False, None), + (CubeList([SFTOF_CUBE, L_CUBE]), L_NAME, True, None), + (CubeList([SFTOF_CUBE, O_CUBE_1]), O_NAME, False, None), + (CubeList([SFTOF_CUBE, O_CUBE_1]), O_NAME, True, FRAC_O), + (CubeList([SFTOF_CUBE, O_CUBE_2]), O_NAME, True, None), + (CubeList([SFTOF_CUBE, SFTLF_CUBE, O_CUBE_2]), O_NAME, True, FRAC_L), + (CubeList([SFTOF_CUBE, SFTLF_CUBE, O_CUBE_2]), O_NAME, False, FRAC_L), +] + + +@pytest.mark.parametrize('cubes,std_name,ocean_var,out', GET_LAND_FRACTION) +def test_get_land_fraction(cubes, std_name, ocean_var, out): + """Test retrieving of land fraction from list of cubes.""" + land_fraction = shared._get_land_fraction( + cubes, std_name, derive_from_ocean_fraction=ocean_var) + if land_fraction is None or out is None: + assert land_fraction is out + return + land_fraction = np.array(land_fraction) + assert np.allclose(land_fraction, out) + + +SHAPES_TO_BROADCAST = [ + ((), (1, ), True), + ((), (10, 10), True), + ((1, ), (10, ), True), + ((1, ), (10, 10), True), + ((2, ), (10, ), False), + ((10, ), (), True), + ((10, ), (1, ), True), + ((10, ), (10, ), True), + ((10, ), (10, 10), True), + ((10, ), (7, 1), True), + ((10, ), (10, 7), False), + ((10, ), (7, 1, 10), True), + ((10, ), (7, 1, 1), True), + ((10, ), (7, 1, 7), False), + ((10, ), (7, 10, 7), False), + ((10, 1), (1, 1), True), + ((10, 1), (1, 100), True), + ((10, 1), (10, 7), True), + ((10, 12), (10, 1), True), + ((10, 12), (), True), + ((10, 12), (1, ), True), + ((10, 12), (12, ), True), + ((10, 12), (1, 1), True), + ((10, 12), (1, 12), True), + ((10, 12), (10, 10, 1), True), + ((10, 12), (10, 12, 1), False), + ((10, 12), (10, 12, 12), False), + ((10, 12), (10, 10, 12), True), +] + + +@pytest.mark.parametrize('shape_1,shape_2,out', SHAPES_TO_BROADCAST) +def test_shape_is_broadcastable(shape_1, shape_2, out): + """Test check if two shapes are broadcastable.""" + is_broadcastable = shared._shape_is_broadcastable(shape_1, shape_2) + assert is_broadcastable == out + + +O_CUBE_1_OUT = O_CUBE_1.copy([1.0, 0.0, -0.5, 1.4]) +O_CUBE_2_OUT = O_CUBE_2.copy([0.9, -1.0, 0.0]) +O_CUBE_2_OUT_WRONG = O_CUBE_2.copy([0.1, 0.0, 3.0]) +L_CUBE_OUT = L_CUBE.copy([1.0, 0.0, 0.0]) +L_CUBE_OUT_WRONG = L_CUBE.copy([9.0, 20.0, 0.0]) + +CUBES_GRID_AREA_CORRECTION = [ + (CubeList([L_CUBE]), L_NAME, False, L_CUBE), + (CubeList([L_CUBE]), L_NAME, True, L_CUBE), + (CubeList([SFTLF_CUBE, L_CUBE]), L_NAME, False, L_CUBE_OUT), + (CubeList([SFTLF_CUBE, L_CUBE]), L_NAME, True, L_CUBE_OUT_WRONG), + (CubeList([SFTLF_CUBE, O_CUBE_1]), O_NAME, False, O_CUBE_1), + (CubeList([SFTLF_CUBE, O_CUBE_1]), O_NAME, True, O_CUBE_1), + (CubeList([SFTLF_CUBE, O_CUBE_2]), O_NAME, False, O_CUBE_2_OUT_WRONG), + (CubeList([SFTLF_CUBE, O_CUBE_2]), O_NAME, True, O_CUBE_2_OUT), + (CubeList([SFTOF_CUBE, O_CUBE_1]), O_NAME, False, O_CUBE_1), + (CubeList([SFTOF_CUBE, O_CUBE_1]), O_NAME, True, O_CUBE_1_OUT), + (CubeList([SFTOF_CUBE, O_CUBE_2]), O_NAME, False, O_CUBE_2), + (CubeList([SFTOF_CUBE, O_CUBE_2]), O_NAME, True, O_CUBE_2), + (CubeList([SFTOF_CUBE, SFTLF_CUBE, O_CUBE_1]), O_NAME, False, O_CUBE_1), + (CubeList([SFTOF_CUBE, SFTLF_CUBE, O_CUBE_1]), O_NAME, True, O_CUBE_1_OUT), + (CubeList([SFTOF_CUBE, SFTLF_CUBE, O_CUBE_2]), O_NAME, False, + O_CUBE_2_OUT_WRONG), + (CubeList([SFTOF_CUBE, SFTLF_CUBE, O_CUBE_2]), O_NAME, True, O_CUBE_2_OUT), +] + + +@pytest.mark.parametrize('cubes,std_name,ocean_var,out', + CUBES_GRID_AREA_CORRECTION) +def test_grid_area_correction(cubes, std_name, ocean_var, out): + """Test grid area correction.""" + cubes = copy.deepcopy(cubes) + cube = shared.grid_area_correction(cubes, std_name, ocean_var=ocean_var) + assert cube == out diff --git a/tests/unit/preprocessor/_mapping/__init__.py b/tests/unit/preprocessor/_mapping/__init__.py new file mode 100644 index 0000000000..367e65a2b8 --- /dev/null +++ b/tests/unit/preprocessor/_mapping/__init__.py @@ -0,0 +1 @@ +"""Unit tests for the :mod:`esmvaltool.preprocessor._mapping` module.""" diff --git a/tests/unit/preprocessor/_mapping/test_mapping.py b/tests/unit/preprocessor/_mapping/test_mapping.py new file mode 100644 index 0000000000..ba1b9b7118 --- /dev/null +++ b/tests/unit/preprocessor/_mapping/test_mapping.py @@ -0,0 +1,255 @@ +"""Unit tests for the esmvaltool.preprocessor._mapping module.""" +import cf_units +import iris +import mock +import numpy as np + +import tests +from esmvaltool.preprocessor._mapping import (get_empty_data, map_slices, + ref_to_dims_index) + + +class TestHelpers(tests.Test): + """Unit tests for all helper methods.""" + + def setUp(self): + """Set up basic fixtures.""" + self.coord_system = mock.Mock(return_value=None) + self.scalar_coord = mock.sentinel.scalar_coord + self.scalar_coord.name = lambda: 'scalar_coord' + self.coord = mock.sentinel.coord + self.coords = mock.Mock(return_value=[self.scalar_coord, self.coord]) + + def coord(name_or_coord): + """Return coord for mock cube.""" + if name_or_coord == 'coord': + return self.coord + elif name_or_coord == 'scalar_coord': + return self.scalar_coord + else: + raise iris.exceptions.CoordinateNotFoundError('') + + def coord_dims(coord): + """Return associated dims for coord in mock cube.""" + if coord == self.coord: + return [0] + elif coord == self.scalar_coord: + return [] + else: + raise iris.exceptions.CoordinateNotFoundError('') + + self.cube = mock.Mock( + spec=iris.cube.Cube, + dtype=np.float32, + coord_system=self.coord_system, + coords=self.coords, + coord=coord, + coord_dims=coord_dims, + ndim=4, + ) + + def test_get_empty_data(self): + """Test creation of empty data.""" + shape = (3, 3) + data = get_empty_data(shape) + self.assertIsInstance(data, np.ma.MaskedArray) + self.assertEqual(data.shape, shape) + + def test_ref_to_dims_index__int(self): + """Test ref_to_dims_index with valid integer.""" + dims = ref_to_dims_index(self.cube, 0) + self.assertEqual([0], dims) + + def test_ref_to_dims_index__invalid_int(self): + """Test ref_to_dims_index with invalid integer.""" + self.assertRaises(ValueError, ref_to_dims_index, self.cube, -1) + self.assertRaises(ValueError, ref_to_dims_index, self.cube, 100) + + def test_ref_to_dims_index__scalar_coord(self): + """Test ref_to_dims_index with scalar coordinate.""" + self.assertRaises(ValueError, ref_to_dims_index, self.cube, + 'scalar_coord') + + def test_ref_to_dims_index__valid_coordinate_name(self): + """Test ref_to_dims_index with valid coordinate name.""" + dims = ref_to_dims_index(self.cube, 'coord') + self.assertEqual([0], dims) + + def test_ref_to_dims_index__invalid_coordinate_name(self): + """Test ref_to_dims_index with invalid coordinate name.""" + self.assertRaises(iris.exceptions.CoordinateNotFoundError, + ref_to_dims_index, self.cube, 'test') + + def test_ref_to_dims_index__invalid_type(self): + """Test ref_to_dims_index with invalid argument.""" + self.assertRaises(ValueError, ref_to_dims_index, self.cube, + mock.sentinel.something) + + +class Test(tests.Test): + """Unit tests for the main mapping method.""" + + # pylint: disable=too-many-instance-attributes + + def setup_coordinates(self): + """Set up coordinates for mock cube.""" + self.time = mock.Mock( + spec=iris.coords.DimCoord, + standard_name='time', + long_name='time', + shape=(3, ), + ) + self.z = mock.Mock( + spec=iris.coords.DimCoord, + standard_name='height', + long_name='height', + shape=(4, ), + ) + self.src_latitude = mock.Mock( + spec=iris.coords.DimCoord, + standard_name='latitude', + long_name='latitude', + shape=(5, ), + points=np.array([1.1, 2.2, 3.3, 4.4, 5.5]), + ) + self.src_longitude = mock.Mock( + spec=iris.coords.DimCoord, + standard_name='longitude', + long_name='longitude', + shape=(6, ), + points=np.array([1.1, 2.2, 3.3, 4.4, 5.5, 6.6]), + ) + self.dst_latitude = mock.Mock( + spec=iris.coords.DimCoord, + standard_name='latitude', + long_name='latitude', + shape=(2, ), + points=np.array([1.1, 2.2]), + ) + self.dst_longitude = mock.Mock( + spec=iris.coords.DimCoord, + standard_name='longitude', + long_name='longitude', + shape=(2, ), + points=np.array([1.1, 2.2]), + ) + + def setUp(self): + """Set up fixtures for mapping test.""" + self.coord_system = mock.Mock(return_value=None) + self.scalar_coord = mock.sentinel.scalar_coord + self.scalar_coord.name = lambda: 'scalar_coord' + self.setup_coordinates() + + def src_coord(name_or_coord): + """Return coord for mock source cube.""" + if name_or_coord in ['latitude', self.src_latitude]: + return self.src_latitude + elif name_or_coord in ['longitude', self.src_longitude]: + return self.src_longitude + elif name_or_coord == 'scalar_coord': + return self.scalar_coord + else: + raise iris.exceptions.CoordinateNotFoundError('') + + def coord_dims(coord): + """Return coord dim for mock cubes.""" + if coord in [self.time, self.dst_latitude]: + return [0] + elif coord in [self.z, self.dst_longitude]: + return [1] + elif coord in [self.src_latitude]: + return [2] + elif coord in [self.src_longitude]: + return [3] + elif coord == self.scalar_coord: + return [] + else: + raise iris.exceptions.CoordinateNotFoundError('') + + def src_coords(*args, **kwargs): + """Return selected coords for source cube.""" + # pylint: disable=unused-argument + # Here, args is ignored. + dim_coords_list = [ + self.time, self.z, self.src_latitude, self.src_longitude + ] + contains_dimension = kwargs.get('contains_dimension', None) + if contains_dimension is not None: + return [dim_coords_list[contains_dimension]] + dim_coords = kwargs.get('dim_coords', None) + if dim_coords: + return dim_coords_list + return [self.scalar_coord] + dim_coords_list + + def src_repr_coords(*args, **kwargs): + """Return selected coords for source representant cube.""" + # pylint: disable=unused-argument + # Here, args is ignored. + dim_coords = [self.src_latitude, self.src_longitude] + if kwargs.get('dim_coords', False): + return dim_coords + if 'contains_dimension' in kwargs: + return dim_coords + return [self.scalar_coord] + dim_coords + + def dst_repr_coords(*args, **kwargs): + """Return selected coords for destination representant cube.""" + # pylint: disable=unused-argument + # Here, args is ignored. + dim_coords = [self.dst_latitude, self.dst_longitude] + if kwargs.get('dim_coords', False): + return dim_coords + return [self.scalar_coord] + dim_coords + + self.src_cube = mock.Mock( + spec=iris.cube.Cube, + dtype=np.float32, + coord_system=self.coord_system, + coords=src_coords, + coord=src_coord, + coord_dims=coord_dims, + ndim=4, + shape=(3, 4, 5, 6), + standard_name='sea_surface_temperature', + long_name='Sea surface temperature', + var_name='tos', + units=cf_units.Unit('K'), + attributes={}, + cell_methods={}, + __getitem__=lambda a, b: mock.sentinel.src_data, + ) + self.src_repr = mock.Mock( + spec=iris.cube.Cube, + dtype=np.float32, + coords=src_repr_coords, + ndim=2, + ) + self.dst_repr = mock.Mock( + spec=iris.cube.Cube, + dtype=np.float32, + coords=dst_repr_coords, + shape=(2, 2), + ) + + @mock.patch('esmvaltool.preprocessor._mapping.get_empty_data') + @mock.patch('iris.cube.Cube') + def test_map_slices(self, mock_cube, mock_get_empty_data): + """Test map_slices.""" + mock_get_empty_data.return_value = mock.sentinel.empty_data + dst = map_slices(self.src_cube, lambda s: np.ones((2, 2)), + self.src_repr, self.dst_repr) + self.assertEqual(dst, mock_cube.return_value) + dim_coords = self.src_cube.coords(dim_coords=True)[:2] \ + + self.dst_repr.coords(dim_coords=True) + dim_coords_and_dims = [(c, i) for i, c in enumerate(dim_coords)] + mock_cube.assert_called_once_with( + data=mock.sentinel.empty_data, + standard_name=self.src_cube.standard_name, + long_name=self.src_cube.long_name, + var_name=self.src_cube.var_name, + units=self.src_cube.units, + attributes=self.src_cube.attributes, + cell_methods=self.src_cube.cell_methods, + dim_coords_and_dims=dim_coords_and_dims, + ) diff --git a/tests/unit/preprocessor/_mask/test_mask.py b/tests/unit/preprocessor/_mask/test_mask.py index 62bf25f307..e2a3e88f67 100644 --- a/tests/unit/preprocessor/_mask/test_mask.py +++ b/tests/unit/preprocessor/_mask/test_mask.py @@ -1,18 +1,14 @@ """Unit test for the :func:`esmvaltool.preprocessor._mask` function""" -from __future__ import absolute_import, division, print_function - import unittest import iris import numpy as np import tests - -from esmvaltool.preprocessor._mask import mask_above_threshold -from esmvaltool.preprocessor._mask import mask_below_threshold -from esmvaltool.preprocessor._mask import mask_inside_range -from esmvaltool.preprocessor._mask import mask_outside_range +from esmvaltool.preprocessor._mask import ( + mask_above_threshold, mask_below_threshold, mask_inside_range, + mask_outside_range) class Test(tests.Test): @@ -22,47 +18,41 @@ def setUp(self): """Prepare tests""" coord_sys = iris.coord_systems.GeogCS(iris.fileformats.pp.EARTH_RADIUS) self.data2 = np.array([[0., 1.], [2., 3.]]) - lons2 = iris.coords.DimCoord( - [1.5, 2.5], - standard_name='longitude', - bounds=[[1., 2.], [2., 3.]], - units='degrees_east', - coord_system=coord_sys) - lats2 = iris.coords.DimCoord( - [1.5, 2.5], - standard_name='latitude', - bounds=[[1., 2.], [2., 3.]], - units='degrees_north', - coord_system=coord_sys) + lons2 = iris.coords.DimCoord([1.5, 2.5], + standard_name='longitude', + bounds=[[1., 2.], [2., 3.]], + units='degrees_east', + coord_system=coord_sys) + lats2 = iris.coords.DimCoord([1.5, 2.5], + standard_name='latitude', + bounds=[[1., 2.], [2., 3.]], + units='degrees_north', + coord_system=coord_sys) coords_spec3 = [(lats2, 0), (lons2, 1)] self.arr = iris.cube.Cube(self.data2, dim_coords_and_dims=coords_spec3) def test_mask_above_threshold(self): """Test to mask above a threshold.""" result = mask_above_threshold(self.arr, 1.5) - expected = np.ma.array(self.data2, - mask=[[False, False], [True, True]]) + expected = np.ma.array(self.data2, mask=[[False, False], [True, True]]) self.assertArrayEqual(result.data, expected) def test_mask_below_threshold(self): """Test to mask below a threshold.""" result = mask_below_threshold(self.arr, 1.5) - expected = np.ma.array(self.data2, - mask=[[True, True], [False, False]]) + expected = np.ma.array(self.data2, mask=[[True, True], [False, False]]) self.assertArrayEqual(result.data, expected) def test_mask_inside_range(self): """Test to mask inside a range.""" result = mask_inside_range(self.arr, 0.5, 2.5) - expected = np.ma.array(self.data2, - mask=[[False, True], [True, False]]) + expected = np.ma.array(self.data2, mask=[[False, True], [True, False]]) self.assertArrayEqual(result.data, expected) def test_mask_outside_range(self): """Test to mask outside a range.""" result = mask_outside_range(self.arr, 0.5, 2.5) - expected = np.ma.array(self.data2, - mask=[[True, False], [False, True]]) + expected = np.ma.array(self.data2, mask=[[True, False], [False, True]]) self.assertArrayEqual(result.data, expected) diff --git a/tests/unit/preprocessor/_regrid/__init__.py b/tests/unit/preprocessor/_regrid/__init__.py index 54c11f137f..0692e0b44b 100644 --- a/tests/unit/preprocessor/_regrid/__init__.py +++ b/tests/unit/preprocessor/_regrid/__init__.py @@ -3,11 +3,9 @@ """ -from __future__ import (absolute_import, division, print_function) - import iris -from iris.coords import AuxCoord, CellMethod, DimCoord import numpy as np +from iris.coords import AuxCoord, CellMethod, DimCoord def _make_vcoord(data, dtype=None): diff --git a/tests/unit/preprocessor/_regrid/test__create_cube.py b/tests/unit/preprocessor/_regrid/test__create_cube.py index a251e129f5..016926ceda 100644 --- a/tests/unit/preprocessor/_regrid/test__create_cube.py +++ b/tests/unit/preprocessor/_regrid/test__create_cube.py @@ -4,8 +4,6 @@ """ -from __future__ import absolute_import, division, print_function - import unittest import numpy as np diff --git a/tests/unit/preprocessor/_regrid/test__stock_cube.py b/tests/unit/preprocessor/_regrid/test__stock_cube.py index d62c6c38f2..cd1b15f47a 100644 --- a/tests/unit/preprocessor/_regrid/test__stock_cube.py +++ b/tests/unit/preprocessor/_regrid/test__stock_cube.py @@ -4,8 +4,6 @@ """ -from __future__ import absolute_import, division, print_function - import unittest import iris @@ -13,20 +11,26 @@ import numpy as np import tests +from esmvaltool.preprocessor._regrid import (_LAT_MAX, _LAT_MIN, _LAT_RANGE, + _LON_MAX, _LON_MIN, _LON_RANGE) from esmvaltool.preprocessor._regrid import _stock_cube as stock_cube -from esmvaltool.preprocessor._regrid import ( - _LAT_MAX, _LAT_MIN, _LAT_RANGE, _LON_MAX, _LON_MIN, _LON_RANGE) class Test(tests.Test): - def _check(self, dx, dy): + def _check(self, dx, dy, lat_off=True, lon_off=True): # Generate the expected stock cube coordinate points. dx, dy = float(dx), float(dy) mid_dx, mid_dy = dx / 2, dy / 2 - expected_lat_points = np.linspace(_LAT_MIN + mid_dy, _LAT_MAX - mid_dy, - _LAT_RANGE / dy) - expected_lon_points = np.linspace(_LON_MIN + mid_dx, _LON_MAX - mid_dx, - _LON_RANGE / dx) + if lat_off and lon_off: + expected_lat_points = np.linspace( + _LAT_MIN + mid_dy, _LAT_MAX - mid_dy, _LAT_RANGE / dy) + expected_lon_points = np.linspace( + _LON_MIN + mid_dx, _LON_MAX - mid_dx, _LON_RANGE / dx) + else: + expected_lat_points = np.linspace(_LAT_MIN, _LAT_MAX, + _LAT_RANGE / dy + 1) + expected_lon_points = np.linspace(_LON_MIN, _LON_MAX - dx, + _LON_RANGE / dx) # Check the stock cube coordinates. self.assertEqual(self.mock_DimCoord.call_count, 2) @@ -96,6 +100,16 @@ def test_specs(self): self.assertEqual(result, self.Cube) self._check(*list(map(float, spec.split('x')))) + def test_specs_no_offset(self): + specs = ['0.5x0.5', '1x1', '2.5x2.5', '5x5', '10x10'] + for spec in specs: + result = stock_cube(spec, lat_offset=False, lon_offset=False) + self.assertEqual(result, self.Cube) + self._check( + *list(map(float, spec.split('x'))), + lat_off=False, + lon_off=False) + if __name__ == '__main__': unittest.main() diff --git a/tests/unit/preprocessor/_regrid/test_vinterp.py b/tests/unit/preprocessor/_regrid/test_extract_levels.py similarity index 75% rename from tests/unit/preprocessor/_regrid/test_vinterp.py rename to tests/unit/preprocessor/_regrid/test_extract_levels.py index 3ecd028bac..5f4442357c 100644 --- a/tests/unit/preprocessor/_regrid/test_vinterp.py +++ b/tests/unit/preprocessor/_regrid/test_extract_levels.py @@ -1,6 +1,4 @@ -"""Unit tests for :func:`esmvaltool.preprocessor.regrid.vinterp`.""" - -from __future__ import absolute_import, division, print_function +"""Unit tests for :func:`esmvaltool.preprocessor.regrid.extract_levels`.""" import unittest @@ -10,7 +8,8 @@ from numpy import ma import tests -from esmvaltool.preprocessor._regrid import _MDI, vertical_schemes, vinterp +from esmvaltool.preprocessor._regrid import (_MDI, VERTICAL_SCHEMES, + extract_levels) from tests.unit.preprocessor._regrid import _make_cube, _make_vcoord @@ -26,47 +25,32 @@ def setUp(self): self.mock_create_cube = self.patch( 'esmvaltool.preprocessor._regrid._create_cube', return_value=self.created_cube) - self.vinterp_schemes = ['linear', 'nearest', - 'linear_horizontal_extrapolate_vertical', - 'nearest_horizontal_extrapolate_vertical'] - - def test_nop(self): - cube = mock.sentinel.cube - result = vinterp(cube, None, None) - self.assertEqual(result, cube) - - def test_invalid_levels__None(self): - emsg = 'Target levels must be specified' - with self.assertRaisesRegex(ValueError, emsg): - vinterp(self.cube, None, 'linear') - - def test_invalid_scheme__None(self): - levels = mock.sentinel.levels - emsg = 'A scheme must be specified' - with self.assertRaisesRegex(ValueError, emsg): - vinterp(self.cube, levels, None) + self.schemes = [ + 'linear', 'nearest', 'linear_horizontal_extrapolate_vertical', + 'nearest_horizontal_extrapolate_vertical' + ] def test_invalid_scheme__unknown(self): levels = mock.sentinel.levels scheme = mock.sentinel.scheme emsg = 'Unknown vertical interpolation scheme' with self.assertRaisesRegex(ValueError, emsg): - vinterp(self.cube, levels, scheme) + extract_levels(self.cube, levels, scheme) def test_vertical_schemes(self): - self.assertEqual(set(vertical_schemes), set(self.vinterp_schemes)) + self.assertEqual(set(VERTICAL_SCHEMES), set(self.schemes)) def test_nop__levels_match(self): vcoord = _make_vcoord(self.z, dtype=self.dtype) self.assertEqual(self.cube.coord(axis='z', dim_coords=True), vcoord) levels = vcoord.points - result = vinterp(self.cube, levels, 'linear') + result = extract_levels(self.cube, levels, 'linear') self.assertEqual(id(result), id(self.cube)) self.assertEqual(result, self.cube) def test_extraction(self): levels = [0, 2] - result = vinterp(self.cube, levels, 'linear') + result = extract_levels(self.cube, levels, 'linear') data = np.array([0, 1, 4, 5], dtype=self.dtype).reshape(2, 2, 1) expected = _make_cube( data, aux_coord=False, dim_coord=False, dtype=self.dtype) @@ -81,7 +65,7 @@ def test_extraction__failure(self): with mock.patch('iris.cube.Cube.extract', return_value=None): emsg = 'Failed to extract levels' with self.assertRaisesRegex(ValueError, emsg): - vinterp(self.cube, levels, 'linear') + extract_levels(self.cube, levels, 'linear') def test_interpolation(self): new_data = np.array(True) @@ -89,7 +73,7 @@ def test_interpolation(self): scheme = 'linear' with mock.patch( 'stratify.interpolate', return_value=new_data) as mocker: - result = vinterp(self.cube, levels, scheme) + result = extract_levels(self.cube, levels, scheme) self.assertEqual(result, self.created_cube) args, kwargs = mocker.call_args # Check the stratify.interpolate args ... @@ -101,11 +85,9 @@ def test_interpolation(self): self.assertArrayEqual(args[1], src_levels_broadcast) self.assertArrayEqual(args[2], self.cube.data) # Check the stratify.interpolate kwargs ... - self.assertEqual(kwargs, - dict( - axis=0, - interpolation=scheme, - extrapolation='nan')) + self.assertEqual( + kwargs, dict( + axis=0, interpolation=scheme, extrapolation='nan')) args, kwargs = self.mock_create_cube.call_args # Check the _create_cube args ... self.assertEqual(len(args), 3) @@ -121,7 +103,7 @@ def test_interpolation__extrapolated_NaN_filling(self): scheme = 'nearest' with mock.patch( 'stratify.interpolate', return_value=new_data) as mocker: - result = vinterp(self.cube, levels, scheme) + result = extract_levels(self.cube, levels, scheme) self.assertEqual(result, self.created_cube) args, kwargs = mocker.call_args # Check the stratify.interpolate args ... @@ -133,11 +115,9 @@ def test_interpolation__extrapolated_NaN_filling(self): self.assertArrayEqual(args[1], src_levels_broadcast) self.assertArrayEqual(args[2], self.cube.data) # Check the stratify.interpolate kwargs ... - self.assertEqual(kwargs, - dict( - axis=0, - interpolation=scheme, - extrapolation='nan')) + self.assertEqual( + kwargs, dict( + axis=0, interpolation=scheme, extrapolation='nan')) args, kwargs = self.mock_create_cube.call_args # Check the _create_cube args ... self.assertEqual(len(args), 3) @@ -160,7 +140,7 @@ def test_interpolation__masked(self): cube = _make_cube(masked, dtype=self.dtype) with mock.patch( 'stratify.interpolate', return_value=new_data) as mocker: - result = vinterp(cube, levels, scheme) + result = extract_levels(cube, levels, scheme) self.assertEqual(result, self.created_cube) args, kwargs = mocker.call_args # Check the stratify.interpolate args ... @@ -172,13 +152,11 @@ def test_interpolation__masked(self): self.assertArrayEqual(args[1], src_levels_broadcast) self.assertArrayEqual(args[2], cube.data) # Check the stratify.interpolate kwargs ... - self.assertEqual(kwargs, - dict( - axis=0, - interpolation=scheme, - extrapolation='nan')) + self.assertEqual( + kwargs, dict( + axis=0, interpolation=scheme, extrapolation='nan')) args, kwargs = self.mock_create_cube.call_args - # in-place for new vinterp with nan's + # in-place for new extract_levels with nan's new_data[np.isnan(new_data)] = _MDI # Check the _create_cube args ... self.assertEqual(len(args), 3) diff --git a/tests/unit/preprocessor/_regrid/test_regrid.py b/tests/unit/preprocessor/_regrid/test_regrid.py index 5c9c349a8c..346850709a 100644 --- a/tests/unit/preprocessor/_regrid/test_regrid.py +++ b/tests/unit/preprocessor/_regrid/test_regrid.py @@ -3,8 +3,6 @@ """ -from __future__ import absolute_import, division, print_function - import unittest import iris @@ -12,17 +10,17 @@ import tests from esmvaltool.preprocessor import regrid -from esmvaltool.preprocessor._regrid import _cache, horizontal_schemes +from esmvaltool.preprocessor._regrid import _CACHE, HORIZONTAL_SCHEMES class Test(tests.Test): def _check(self, tgt_grid, scheme, spec=False): - expected_scheme = horizontal_schemes[scheme] + expected_scheme = HORIZONTAL_SCHEMES[scheme] if spec: spec = tgt_grid - self.assertIn(spec, _cache) - self.assertEqual(_cache[spec], self.tgt_grid) + self.assertIn(spec, _CACHE) + self.assertEqual(_CACHE[spec], self.tgt_grid) self.coord_system.asset_called_once() expected_calls = [ mock.call(axis='x', dim_coords=True), @@ -62,40 +60,28 @@ def setUp(self): self.tgt_grid = mock.Mock( spec=iris.cube.Cube, coord=self.tgt_grid_coord) self.regrid_schemes = [ - 'linear', 'nearest', 'area_weighted', 'unstructured_nearest' + 'linear', 'linear_extrapolate', 'nearest', 'area_weighted', + 'unstructured_nearest' ] + + def _return_mock_stock_cube(spec, lat_offset=True, lon_offset=True): + return self.tgt_grid + self.mock_stock = self.patch( 'esmvaltool.preprocessor._regrid._stock_cube', - side_effect=lambda arg: self.tgt_grid) + side_effect=_return_mock_stock_cube) self.mocks = [ self.coord_system, self.coords, self.regrid, self.src_cube, self.tgt_grid_coord, self.tgt_grid, self.mock_stock ] - def test_nop(self): - cube = mock.sentinel.cube - result = regrid(cube, None, None) - self.assertEqual(result, cube) - - def test_invalid_tgt_grid__None(self): - dummy = mock.sentinel.dummy - emsg = 'A target grid must be specified' - with self.assertRaisesRegex(ValueError, emsg): - regrid(dummy, None, dummy) - def test_invalid_tgt_grid__unknown(self): dummy = mock.sentinel.dummy scheme = 'linear' - emsg = 'Expecting a cube or cell-specification' + emsg = 'Expecting a cube' with self.assertRaisesRegex(ValueError, emsg): regrid(self.src_cube, dummy, scheme) - def test_invalid_scheme__None(self): - dummy = mock.sentinel.dummy - emsg = 'A scheme must be specified' - with self.assertRaisesRegex(ValueError, emsg): - regrid(dummy, dummy, None) - def test_invalid_scheme__unknown(self): dummy = mock.sentinel.dummy emsg = 'Unknown regridding scheme' @@ -103,8 +89,8 @@ def test_invalid_scheme__unknown(self): regrid(dummy, dummy, 'wibble') def test_horizontal_schemes(self): - self.assertEqual(set(horizontal_schemes.keys()), - set(self.regrid_schemes)) + self.assertEqual( + set(HORIZONTAL_SCHEMES.keys()), set(self.regrid_schemes)) def test_regrid__horizontal_schemes(self): for scheme in self.regrid_schemes: @@ -119,7 +105,7 @@ def test_regrid__cell_specification(self): result = regrid(self.src_cube, spec, scheme) self.assertEqual(result, self.regridded_cube) self._check(spec, scheme, spec=True) - self.assertEqual(set(_cache.keys()), set(specs)) + self.assertEqual(set(_CACHE.keys()), set(specs)) if __name__ == '__main__': diff --git a/tests/unit/preprocessor/_regrid_esmpy/__init__.py b/tests/unit/preprocessor/_regrid_esmpy/__init__.py new file mode 100644 index 0000000000..fc186bf9ed --- /dev/null +++ b/tests/unit/preprocessor/_regrid_esmpy/__init__.py @@ -0,0 +1 @@ +"""Unit tests for the :mod:`esmvaltool.preprocessor._regrid_esmpy` module.""" diff --git a/tests/unit/preprocessor/_regrid_esmpy/test_regrid_esmpy.py b/tests/unit/preprocessor/_regrid_esmpy/test_regrid_esmpy.py new file mode 100644 index 0000000000..bde05f7894 --- /dev/null +++ b/tests/unit/preprocessor/_regrid_esmpy/test_regrid_esmpy.py @@ -0,0 +1,660 @@ +"""Unit tests for the esmvaltool.preprocessor._regrid_esmpy module.""" +import cf_units +import iris +import mock +import numpy as np +from iris.exceptions import CoordinateNotFoundError + +import tests +from esmvaltool.preprocessor._regrid_esmpy import ( + build_regridder, build_regridder_2d, coords_iris_to_esmpy, + cube_to_empty_field, get_grid, get_grid_representant, + get_grid_representants, get_representant, is_lon_circular, regrid) + + +def identity(*args, **kwargs): + """Return args, acting as identity for mocking functions.""" + # pylint: disable=unused-argument + # Here, kwargs will be ignored. + if len(args) == 1: + return args[0] + return args + + +def mock_cube_to_empty_field(cube): + """Return associated field for mock cube.""" + return cube.field + + +class MockGrid(mock.MagicMock): + """Mock ESMF grid.""" + + get_coords = mock.Mock(return_value=mock.MagicMock()) + add_coords = mock.Mock() + add_item = mock.Mock() + get_item = mock.Mock(return_value=mock.MagicMock()) + + +class MockGridItem(mock.Mock): + """Mock ESMF enum for grid items.""" + + MASK = mock.sentinel.gi_mask + + +class MockRegridMethod(mock.Mock): + """Mock ESMF enum for regridding methods.""" + + BILINEAR = mock.sentinel.rm_bilinear + CONSERVE = mock.sentinel.rm_conserve + NEAREST_STOD = mock.sentinel.rm_nearest_stod + + +class MockStaggerLoc(mock.Mock): + """Mock ESMF enum for stagger locations.""" + + CENTER = mock.sentinel.sl_center + CORNER = mock.sentinel.sl_corner + + +class MockUnmappedAction(mock.Mock): + """Mock ESMF enum for unmapped actions.""" + + IGNORE = mock.sentinel.ua_ignore + + +ESMF_REGRID_METHODS = { + 'linear': MockRegridMethod.BILINEAR, + 'area_weighted': MockRegridMethod.CONSERVE, + 'nearest': MockRegridMethod.NEAREST_STOD, +} + +MASK_REGRIDDING_MASK_VALUE = { + mock.sentinel.rm_bilinear: np.array([1]), + mock.sentinel.rm_conserve: np.array([1]), + mock.sentinel.rm_nearest_stod: np.array([]), +} + + +@mock.patch('esmvaltool.preprocessor._regrid_esmpy.MASK_REGRIDDING_MASK_VALUE', + MASK_REGRIDDING_MASK_VALUE) +@mock.patch('esmvaltool.preprocessor._regrid_esmpy.ESMF_REGRID_METHODS', + ESMF_REGRID_METHODS) +@mock.patch('ESMF.Manager', mock.Mock) +@mock.patch('ESMF.GridItem', MockGridItem) +@mock.patch('ESMF.RegridMethod', MockRegridMethod) +@mock.patch('ESMF.StaggerLoc', MockStaggerLoc) +@mock.patch('ESMF.UnmappedAction', MockUnmappedAction) +class TestHelpers(tests.Test): + """Unit tests for helper functions.""" + + # pylint: disable=too-many-instance-attributes, too-many-public-methods + def setUp(self): + """Set up fixtures.""" + # pylint: disable=too-many-locals + lat_1d_pre_bounds = np.linspace(-90, 90, 5) + lat_1d_bounds = np.stack( + [lat_1d_pre_bounds[:-1], lat_1d_pre_bounds[1:]], axis=1) + lat_1d_points = lat_1d_bounds.mean(axis=1) + lon_1d_pre_bounds = np.linspace(0, 360, 5) + lon_1d_bounds = np.stack( + [lon_1d_pre_bounds[:-1], lon_1d_pre_bounds[1:]], axis=1) + lon_1d_points = lon_1d_bounds.mean(axis=1) + lon_2d_points, lat_2d_points = np.meshgrid(lon_1d_points, + lat_1d_points) + (lon_2d_pre_bounds, lat_2d_pre_bounds) = np.meshgrid( + lon_1d_pre_bounds, lat_1d_pre_bounds) + lat_2d_bounds = np.stack([ + lat_2d_pre_bounds[:-1, :-1], lat_2d_pre_bounds[:-1, 1:], + lat_2d_pre_bounds[1:, 1:], lat_2d_pre_bounds[1:, :-1] + ], + axis=2) + lon_2d_bounds = np.stack([ + lon_2d_pre_bounds[:-1, :-1], lon_2d_pre_bounds[:-1, 1:], + lon_2d_pre_bounds[1:, 1:], lon_2d_pre_bounds[1:, :-1] + ], + axis=2) + self.lat_1d = mock.Mock( + iris.coords.DimCoord, + standard_name='latitude', + long_name='latitude', + ndim=1, + points=lat_1d_points, + bounds=lat_1d_bounds, + has_bounds=mock.Mock(return_value=True)) + self.lat_1d_no_bounds = mock.Mock( + iris.coords.DimCoord, + standard_name='latitude', + ndim=1, + points=lat_1d_points, + has_bounds=mock.Mock(return_value=False), + bounds=lat_1d_bounds, + guess_bounds=mock.Mock()) + self.lon_1d = mock.Mock( + iris.coords.DimCoord, + standard_name='longitude', + long_name='longitude', + ndim=1, + points=lon_1d_points, + bounds=lon_1d_bounds, + has_bounds=mock.Mock(return_value=True), + circular=True) + self.lon_1d_aux = mock.Mock( + iris.coords.AuxCoord, + standard_name='longitude', + long_name='longitude', + ndim=1, + shape=lon_1d_points.shape, + points=lon_1d_points, + bounds=lon_1d_bounds, + has_bounds=mock.Mock(return_value=True)) + self.lat_2d = mock.Mock( + iris.coords.AuxCoord, + standard_name='latitude', + long_name='latitude', + ndim=2, + points=lat_2d_points, + bounds=lat_2d_bounds, + has_bounds=mock.Mock(return_value=True)) + self.lon_2d = mock.Mock( + iris.coords.AuxCoord, + standard_name='longitude', + long_name='longitude', + ndim=2, + points=lon_2d_points, + bounds=lon_2d_bounds, + has_bounds=mock.Mock(return_value=True)) + self.lon_2d_non_circular = mock.Mock( + iris.coords.AuxCoord, + standard_name='longitude', + ndim=2, + points=lon_2d_points[:, 1:-1], + bounds=lon_2d_bounds[:, 1:-1], + has_bounds=mock.Mock(return_value=True)) + self.lat_3d = mock.Mock( + iris.coords.AuxCoord, + standard_name='latitude', + long_name='latitude', + ndim=3) + self.lon_3d = mock.Mock( + iris.coords.AuxCoord, + standard_name='longitude', + long_name='longitude', + ndim=3) + depth_pre_bounds = np.linspace(0, 5000, 5) + depth_bounds = np.stack([depth_pre_bounds[:-1], depth_pre_bounds[1:]], + axis=1) + depth_points = depth_bounds.mean(axis=1) + self.depth = mock.Mock( + iris.coords.DimCoord, + standard_name='depth', + long_name='depth', + ndim=1, + shape=depth_points.shape, + points=depth_points, + bounds=depth_bounds, + has_bounds=mock.Mock(return_value=True)) + data_shape = lon_2d_points.shape + raw_data = np.arange(np.prod(data_shape)).reshape(data_shape) + mask = np.zeros(data_shape) + mask[:data_shape[0] // 2] = True + self.data = np.ma.masked_array(raw_data, mask) + self.data_3d = np.repeat( + self.data[..., np.newaxis], depth_points.shape[0], axis=-1) + self.expected_esmpy_lat = np.array([[-67.5, -22.5, 22.5, 67.5], + [-67.5, -22.5, 22.5, 67.5], + [-67.5, -22.5, 22.5, 67.5], + [-67.5, -22.5, 22.5, 67.5]]) + self.expected_esmpy_lon = np.array([[45., 45., 45., 45.], + [135., 135., 135., 135.], + [225., 225., 225., 225.], + [315., 315., 315., 315.]]) + self.expected_esmpy_lat_corners = np.array([[-90., -45., 0., 45., 90.], + [-90., -45., 0., 45., 90.], + [-90., -45., 0., 45., 90.], + [-90., -45., 0., 45., 90.], + [-90., -45., 0., 45., + 90.]]) + self.expected_esmpy_lon_corners = np.array( + [[0., 0., 0., 0., 0.], [90., 90., 90., 90., 90.], + [180., 180., 180., 180., 180.], [270., 270., 270., 270., 270.], + [360., 360., 360., 360., 360.]]) + self.coords = { + 'latitude': self.lat_2d, + 'longitude': self.lon_2d, + 'depth': self.depth + } + self.coord_dims = { + 'latitude': (0, 1), + 'longitude': (0, 1), + self.lat_2d: (0, 1), + self.lon_2d: (0, 1), + } + + def coord(name=None, axis=None): + """Return selected coordinate for mock cube.""" + if axis == 'Z': + raise CoordinateNotFoundError() + return self.coords[name] + + def coords(dim_coords=None): + """Return coordinates for mock cube.""" + if dim_coords: + return [] + return list(self.coords.values()) + + self.cube = mock.Mock( + spec=iris.cube.Cube, + dtype=np.float32, + long_name='longname', + ndim=2, + shape=self.data.shape, + data=self.data, + coord=coord, + coord_dims=lambda name: self.coord_dims[name], + coords=coords, + ) + self.cube.__getitem__ = mock.Mock(return_value=self.cube) + self.unmasked_cube = mock.Mock( + spec=iris.cube.Cube, + dtype=np.float32, + long_name='longname', + ) + self.coord_dims_3d = { + 'latitude': (1, 2), + 'longitude': (1, 2), + self.lat_2d: (1, 2), + self.lon_2d: (1, 2), + 'depth': (0, ), + self.depth: (0, ), + } + + def coord_3d(name=None, dimensions=None, dim_coords=None, axis=None): + """Return coord for 3d mock cube.""" + # pylint: disable=unused-argument + if axis == 'Z' or dimensions == [0]: + return self.coords['depth'] + return self.coords[name] + + self.cube_3d = mock.Mock( + spec=iris.cube.Cube, + dtype=np.float32, + standard_name=None, + long_name='longname', + var_name='ln', + units=cf_units.Unit('1'), + attributes={}, + cell_methods=[], + ndim=3, + shape=self.data_3d.shape, + data=self.data_3d, + coord=coord_3d, + coord_dims=lambda name: self.coord_dims_3d[name], + ) + self.cube.__getitem__ = mock.Mock(return_value=self.cube) + + def test_coords_iris_to_esmpy_mismatched_dimensions(self): + """Test coord conversion with mismatched dimensions.""" + self.assertRaises(ValueError, coords_iris_to_esmpy, self.lat_1d, + self.lon_2d, True) + + def test_coords_iris_to_esmpy_invalid_dimensions(self): + """Test coord conversion with invalid dimensions.""" + self.assertRaises(NotImplementedError, coords_iris_to_esmpy, + self.lat_3d, self.lon_3d, True) + + def test_coords_iris_to_esmpy_call_guess_bounds(self): + """Test coord conversion with missing bounds.""" + coords_iris_to_esmpy(self.lat_1d_no_bounds, self.lon_1d, True) + self.lat_1d_no_bounds.guess_bounds.assert_called_once() + + def test_coords_iris_to_esmpy_1d_circular(self): + """Test coord conversion with 1d coords and circular longitudes.""" + (esmpy_lat, esmpy_lon, + esmpy_lat_corners, esmpy_lon_corners) = coords_iris_to_esmpy( + self.lat_1d, self.lon_1d, True) + self.assertArrayEqual(esmpy_lat, self.expected_esmpy_lat) + self.assertArrayEqual(esmpy_lon, self.expected_esmpy_lon) + self.assertArrayEqual(esmpy_lat_corners, + self.expected_esmpy_lat_corners[:-1]) + self.assertArrayEqual(esmpy_lon_corners, + self.expected_esmpy_lon_corners[:-1]) + + def test_coords_iris_to_esmpy_1d_non_circular(self): + """Test coord conversion with 1d coords and non circular longitudes.""" + (esmpy_lat, esmpy_lon, + esmpy_lat_corners, esmpy_lon_corners) = coords_iris_to_esmpy( + self.lat_1d, self.lon_1d, False) + self.assertArrayEqual(esmpy_lat, self.expected_esmpy_lat) + self.assertArrayEqual(esmpy_lon, self.expected_esmpy_lon) + self.assertArrayEqual(esmpy_lat_corners, + self.expected_esmpy_lat_corners) + self.assertArrayEqual(esmpy_lon_corners, + self.expected_esmpy_lon_corners) + + def test_coords_iris_to_esmpy_2d_circular(self): + """Test coord conversion with 2d coords and circular longitudes.""" + (esmpy_lat, esmpy_lon, + esmpy_lat_corners, esmpy_lon_corners) = coords_iris_to_esmpy( + self.lat_2d, self.lon_2d, True) + self.assertArrayEqual(esmpy_lat, self.expected_esmpy_lat) + self.assertArrayEqual(esmpy_lon, self.expected_esmpy_lon) + self.assertArrayEqual(esmpy_lat_corners, + self.expected_esmpy_lat_corners[:-1]) + self.assertArrayEqual(esmpy_lon_corners, + self.expected_esmpy_lon_corners[:-1]) + + def test_coords_iris_to_esmpy_2d_non_circular(self): + """Test coord conversion with 2d coords and non circular longitudes.""" + (esmpy_lat, esmpy_lon, + esmpy_lat_corners, esmpy_lon_corners) = coords_iris_to_esmpy( + self.lat_2d, self.lon_2d, False) + self.assertArrayEqual(esmpy_lat, self.expected_esmpy_lat) + self.assertArrayEqual(esmpy_lon, self.expected_esmpy_lon) + self.assertArrayEqual(esmpy_lat_corners, + self.expected_esmpy_lat_corners) + self.assertArrayEqual(esmpy_lon_corners, + self.expected_esmpy_lon_corners) + + def test_get_grid_circular(self): + """Test building of ESMF grid from iris cube circular longitude.""" + expected_get_coords_calls = [ + mock.call(0), + mock.call(1), + mock.call(0, staggerloc=mock.sentinel.sl_corner), + mock.call(1, staggerloc=mock.sentinel.sl_corner), + ] + with mock.patch('ESMF.Grid', MockGrid) as mg: + mg.get_coords.reset_mock() + mg.add_coords.reset_mock() + mg.add_item.reset_mock() + get_grid(self.expected_esmpy_lat, self.expected_esmpy_lon, + self.expected_esmpy_lat_corners[:-1], + self.expected_esmpy_lon_corners[:-1], True) + mg.get_coords.assert_has_calls(expected_get_coords_calls) + mg.add_coords.assert_called_once_with([mock.sentinel.sl_corner]) + mg.add_item.assert_called_once_with(mock.sentinel.gi_mask, + mock.sentinel.sl_center) + + def test_get_grid_non_circular(self): + """Test building of ESMF grid from iris cube non circular longitude.""" + expected_get_coords_calls = [ + mock.call(0), + mock.call(1), + mock.call(0, staggerloc=mock.sentinel.sl_corner), + mock.call(1, staggerloc=mock.sentinel.sl_corner), + ] + with mock.patch('ESMF.Grid', MockGrid) as mg: + mg.get_coords.reset_mock() + mg.add_coords.reset_mock() + mg.add_item.reset_mock() + get_grid(self.expected_esmpy_lat, self.expected_esmpy_lon, + self.expected_esmpy_lat_corners, + self.expected_esmpy_lon_corners, False) + mg.get_coords.assert_has_calls(expected_get_coords_calls) + mg.add_coords.assert_called_once_with([mock.sentinel.sl_corner]) + mg.add_item.assert_called_once_with(mock.sentinel.gi_mask, + mock.sentinel.sl_center) + + def test_is_lon_circular_dim_coords_true(self): + """Test detection of circular longitudes 1d dim coords.""" + is_circ = is_lon_circular(self.lon_1d) + self.assertTrue(is_circ) + + def test_is_lon_circular_dim_coords_false(self): + """Test detection of non circular longitudes 1d dim coords.""" + self.lon_1d.circular = False + is_circ = is_lon_circular(self.lon_1d) + self.assertFalse(is_circ) + + def test_is_lon_circular_1d_aux_coords(self): + """Test detection of circular longitudes 1d aux coords.""" + is_circ = is_lon_circular(self.lon_1d_aux) + self.assertTrue(is_circ) + + def test_is_lon_circular_invalid_dimension(self): + """Test detection of circular longitudes, invalid coordinates.""" + self.assertRaises(NotImplementedError, is_lon_circular, self.lon_3d) + + def test_is_lon_circular_invalid_argument(self): + """Test detection of circular longitudes, invalid argument.""" + self.assertRaises(ValueError, is_lon_circular, None) + + def test_is_lon_circular_2d_aux_coords(self): + """Test detection of circular longitudes 2d aux coords.""" + is_circ = is_lon_circular(self.lon_2d) + self.assertTrue(is_circ) + + def test_is_lon_circular_2d_aux_coords_non_circ(self): + """Test detection of non circular longitudes 2d aux coords.""" + is_circ = is_lon_circular(self.lon_2d_non_circular) + self.assertFalse(is_circ) + + @mock.patch('ESMF.Grid', MockGrid) + @mock.patch('ESMF.Field') + def test_cube_to_empty_field(self, mock_field): + """Test building of empty field from iris cube.""" + field = cube_to_empty_field(self.cube) + self.assertEqual(mock_field.return_value, field) + mock_field.assert_called_once() + ckwargs = mock_field.call_args[1] + self.assertEqual('longname', ckwargs['name']) + self.assertEqual(mock.sentinel.sl_center, ckwargs['staggerloc']) + + def test_get_representant(self): + """Test extraction of horizontal representant from iris cube.""" + horizontal_slice = ['latitude', 'longitude'] + get_representant(self.cube, horizontal_slice) + self.cube.__getitem__.assert_called_once_with((slice(None, None, None), + slice(None, None, + None))) + + @mock.patch('esmvaltool.preprocessor._regrid_esmpy.cube_to_empty_field', + mock_cube_to_empty_field) + @mock.patch('ESMF.Regrid') + def test_build_regridder_2d_unmasked_data(self, mock_regrid): + """Test building of 2d regridder for unmasked data.""" + self.cube.data = self.cube.data.data + self.cube.field = mock.Mock() + mock.sentinel.dst_rep.field = mock.Mock() + build_regridder_2d(self.cube, mock.sentinel.dst_rep, + mock.sentinel.regrid_method, .99) + expected_kwargs = { + 'src_mask_values': np.array([1]), + 'dst_mask_values': np.array([1]), + 'regrid_method': mock.sentinel.regrid_method, + 'srcfield': self.cube.field, + 'dstfield': mock.sentinel.dst_rep.field, + 'unmapped_action': mock.sentinel.ua_ignore, + 'ignore_degenerate': True, + } + mock_regrid.assert_called_once_with(**expected_kwargs) + + @mock.patch('esmvaltool.preprocessor._regrid_esmpy.cube_to_empty_field', + mock_cube_to_empty_field) + @mock.patch('ESMF.Regrid') + def test_build_regridder_2d_masked_data(self, mock_regrid): + """Test building of 2d regridder for masked data.""" + mock_regrid.return_value = mock.Mock( + return_value=mock.Mock(data=self.data.T)) + regrid_method = mock.sentinel.rm_bilinear + src_rep = mock.MagicMock(data=self.data) + dst_rep = mock.MagicMock() + src_rep.field = mock.MagicMock(data=self.data.copy()) + dst_rep.field = mock.MagicMock() + build_regridder_2d(src_rep, dst_rep, regrid_method, .99) + expected_calls = [ + mock.call( + src_mask_values=np.array([]), + dst_mask_values=np.array([]), + srcfield=src_rep.field, + dstfield=dst_rep.field, + unmapped_action=mock.sentinel.ua_ignore, + ignore_degenerate=True, + regrid_method=regrid_method), + mock.call( + src_mask_values=np.array([1]), + dst_mask_values=np.array([1]), + regrid_method=regrid_method, + srcfield=src_rep.field, + dstfield=dst_rep.field, + unmapped_action=mock.sentinel.ua_ignore, + ignore_degenerate=True), + ] + kwargs = mock_regrid.call_args_list[0][-1] + expected_kwargs = expected_calls[0][-1] + self.assertEqual(expected_kwargs.keys(), kwargs.keys()) + array_keys = set(['src_mask_values', 'dst_mask_values']) + for key in kwargs.keys(): + if key in array_keys: + self.assertTrue((expected_kwargs[key] == kwargs[key]).all()) + else: + self.assertEqual(expected_kwargs[key], kwargs[key]) + self.assertTrue(mock_regrid.call_args_list[1] == expected_calls[1]) + + @mock.patch('esmvaltool.preprocessor._regrid_esmpy.cube_to_empty_field', + mock_cube_to_empty_field) + @mock.patch('ESMF.Regrid') + def test_regridder_2d_unmasked_data(self, mock_regrid): + """Test regridder for unmasked 2d data.""" + field_regridder = mock.Mock(return_value=mock.Mock(data=self.data.T)) + mock_regrid.return_value = field_regridder + regrid_method = mock.sentinel.rm_bilinear + src_rep = mock.MagicMock(data=self.data, dtype=np.float32) + dst_rep = mock.MagicMock(shape=(4, 4)) + regridder = build_regridder_2d(src_rep, dst_rep, regrid_method, .99) + field_regridder.reset_mock() + regridder(src_rep) + field_regridder.assert_called_once_with(src_rep.field, dst_rep.field) + + @mock.patch('esmvaltool.preprocessor._regrid_esmpy.cube_to_empty_field', + mock_cube_to_empty_field) + @mock.patch('ESMF.Regrid') + def test_regridder_2d_masked_data(self, mock_regrid): + """Test regridder for masked 2d data.""" + field_regridder = mock.Mock(return_value=mock.Mock(data=self.data.T)) + mock_regrid.return_value = field_regridder + regrid_method = mock.sentinel.rm_bilinear + src_rep = mock.MagicMock(data=self.data) + dst_rep = mock.MagicMock(shape=(4, 4)) + regridder = build_regridder_2d(src_rep, dst_rep, regrid_method, .99) + field_regridder.reset_mock() + regridder(self.cube) + field_regridder.assert_called_once_with(src_rep.field, dst_rep.field) + + @mock.patch('esmvaltool.preprocessor._regrid_esmpy.build_regridder_3d') + @mock.patch('esmvaltool.preprocessor._regrid_esmpy.build_regridder_2d') + def test_build_regridder_2(self, mock_regridder_2d, mock_regridder_3d): + """Test build regridder for 2d data.""" + # pylint: disable=no-self-use + src_rep = mock.Mock(ndim=2) + dst_rep = mock.Mock(ndim=2) + build_regridder(src_rep, dst_rep, 'nearest') + mock_regridder_2d.assert_called_once_with( + src_rep, dst_rep, mock.sentinel.rm_nearest_stod, .99) + mock_regridder_3d.assert_not_called() + + @mock.patch('esmvaltool.preprocessor._regrid_esmpy.build_regridder_3d') + @mock.patch('esmvaltool.preprocessor._regrid_esmpy.build_regridder_2d') + def test_build_regridder_3(self, mock_regridder_2d, mock_regridder_3d): + """Test build regridder for 3d data.""" + # pylint: disable=no-self-use + src_rep = mock.Mock(ndim=3) + dst_rep = mock.Mock(ndim=3) + build_regridder(src_rep, dst_rep, 'nearest') + mock_regridder_3d.assert_called_once_with( + src_rep, dst_rep, mock.sentinel.rm_nearest_stod, .99) + mock_regridder_2d.assert_not_called() + + @mock.patch('esmvaltool.preprocessor._regrid_esmpy.get_representant') + def test_get_grid_representant_2d(self, mock_get_representant): + """Test extraction of 2d grid representant from 2 spatial d cube.""" + mock_get_representant.return_value = mock.sentinel.ret + ret = get_grid_representant(self.cube) + self.assertEqual(mock.sentinel.ret, ret) + mock_get_representant.assert_called_once_with( + self.cube, ['latitude', 'longitude']) + + @mock.patch('esmvaltool.preprocessor._regrid_esmpy.get_representant') + def test_get_grid_representant_2d_horiz_only(self, mock_get_representant): + """Test extraction of forced 2d grid representant from 2d cube.""" + mock_get_representant.return_value = mock.sentinel.ret + ret = get_grid_representant(self.cube, True) + self.assertEqual(mock.sentinel.ret, ret) + mock_get_representant.assert_called_once_with( + self.cube, ['latitude', 'longitude']) + + @mock.patch('esmvaltool.preprocessor._regrid_esmpy.get_representant') + def test_get_grid_representant_3d(self, mock_get_representant): + """Test extraction of 3d grid representant from 3 spatial d cube.""" + mock_get_representant.return_value = mock.sentinel.ret + ret = get_grid_representant(self.cube_3d) + self.assertEqual(mock.sentinel.ret, ret) + mock_get_representant.assert_called_once_with( + self.cube_3d, [self.depth, 'latitude', 'longitude']) + + @mock.patch('esmvaltool.preprocessor._regrid_esmpy.get_representant') + def test_get_grid_representant_3d_horiz_only(self, mock_get_representant): + """Test extraction of 2d grid representant from 3 spatial d cube.""" + mock_get_representant.return_value = mock.sentinel.ret + ret = get_grid_representant(self.cube_3d, True) + self.assertEqual(mock.sentinel.ret, ret) + mock_get_representant.assert_called_once_with( + self.cube_3d, ['latitude', 'longitude']) + + @mock.patch('esmvaltool.preprocessor._regrid_esmpy.get_grid_representant', + mock.Mock(side_effect=identity)) + @mock.patch('esmvaltool.preprocessor._regrid_esmpy.get_empty_data') + @mock.patch('iris.cube.Cube') + def test_get_grid_representants_3d_src(self, mock_cube, + mock_get_empty_data): + """Test extraction of grid representants from 3 spatial d cube.""" + src = self.cube_3d + mock_get_empty_data.return_value = mock.sentinel.empty_data + src_rep = get_grid_representants(src, self.cube)[0] + self.assertEqual(src, src_rep) + mock_cube.assert_called_once_with( + data=mock.sentinel.empty_data, + standard_name=src.standard_name, + long_name=src.long_name, + var_name=src.var_name, + units=src.units, + attributes=src.attributes, + cell_methods=src.cell_methods, + dim_coords_and_dims=[(self.depth, 0)], + ) + + @mock.patch('esmvaltool.preprocessor._regrid_esmpy.get_grid_representant', + mock.Mock(side_effect=identity)) + @mock.patch('esmvaltool.preprocessor._regrid_esmpy.get_empty_data') + @mock.patch('iris.cube.Cube') + def test_get_grid_representants_2d_src(self, mock_cube, + mock_get_empty_data): + """Test extraction of grid representants from 2 spatial d cube.""" + src = self.cube + mock_get_empty_data.return_value = mock.sentinel.empty_data + src_rep = get_grid_representants(src, self.cube)[0] + self.assertEqual(src, src_rep) + mock_cube.assert_called_once_with( + data=mock.sentinel.empty_data, + standard_name=src.standard_name, + long_name=src.long_name, + var_name=src.var_name, + units=src.units, + attributes=src.attributes, + cell_methods=src.cell_methods, + dim_coords_and_dims=[], + ) + + @mock.patch('esmvaltool.preprocessor._regrid_esmpy.map_slices') + @mock.patch('esmvaltool.preprocessor._regrid_esmpy.build_regridder') + @mock.patch('esmvaltool.preprocessor._regrid_esmpy.get_grid_representants', + mock.Mock(side_effect=identity)) + def test_regrid(self, mock_build_regridder, mock_map_slices): + """Test full regrid method.""" + mock_build_regridder.return_value = mock.sentinel.regridder + mock_map_slices.return_value = mock.sentinel.regridded + regrid(self.cube_3d, self.cube) + mock_build_regridder.assert_called_once_with(self.cube_3d, self.cube, + 'linear') + mock_map_slices.assert_called_once_with( + self.cube_3d, mock.sentinel.regridder, self.cube_3d, self.cube) diff --git a/tests/unit/preprocessor/_time/__init__.py b/tests/unit/preprocessor/_time/__init__.py new file mode 100644 index 0000000000..720cc45c94 --- /dev/null +++ b/tests/unit/preprocessor/_time/__init__.py @@ -0,0 +1 @@ +"""Test suite for _time module.""" diff --git a/tests/unit/preprocessor/_time/test_time.py b/tests/unit/preprocessor/_time/test_time.py new file mode 100644 index 0000000000..bcb635d700 --- /dev/null +++ b/tests/unit/preprocessor/_time/test_time.py @@ -0,0 +1,332 @@ +"""Unit tests for the :func:`esmvaltool.preprocessor._time` module.""" + +import unittest + +import iris +import iris.coord_categorisation +import iris.coords +import numpy as np +import pytest +from cf_units import Unit +from iris.cube import Cube +from numpy.testing import assert_array_equal + +import tests +from esmvaltool.preprocessor._time import (annual_mean, extract_month, + extract_season, extract_time, + regrid_time, time_average) + + +def _create_sample_cube(): + cube = Cube(np.arange(1, 25), var_name='co2', units='J') + cube.add_dim_coord( + iris.coords.DimCoord( + np.arange(15., 720., 30.), + standard_name='time', + units=Unit('days since 1950-01-01 00:00:00', calendar='gregorian'), + ), + 0, + ) + iris.coord_categorisation.add_month_number(cube, 'time') + return cube + + +def add_auxiliary_coordinate(cubeList): + """Add AuxCoords to cubes in cubeList.""" + for cube in cubeList: + iris.coord_categorisation.add_day_of_month(cube, cube.coord('time')) + iris.coord_categorisation.add_day_of_year(cube, cube.coord('time')) + + +class TestExtractMonth(tests.Test): + """Tests for extract_month.""" + + def setUp(self): + """Prepare tests""" + self.cube = _create_sample_cube() + + def test_get_january(self): + """Test january extraction""" + sliced = extract_month(self.cube, 1) + print(sliced) + assert_array_equal( + np.array([1, 1]), + sliced.coord('month_number').points) + + +class TestTimeSlice(tests.Test): + """Tests for extract_time.""" + + def setUp(self): + """Prepare tests""" + self.cube = _create_sample_cube() + + def test_extract_time(self): + """Test extract_time.""" + sliced = extract_time(self.cube, 1950, 1, 1, 1950, 12, 31) + print(sliced) + assert_array_equal( + np.arange(1, 13, 1), + sliced.coord('month_number').points) + + def test_extract_time_no_slice(self): + """Test fail of extract_time.""" + with self.assertRaises(ValueError): + extract_time(self.cube, 2200, 1, 1, 2200, 12, 31) + + def test_extract_time_one_time(self): + """Test extract_time with one time step.""" + cube = _create_sample_cube() + cube = cube.collapsed('time', iris.analysis.MEAN) + sliced = extract_time(cube, 1950, 1, 1, 1952, 12, 31) + print(sliced) + assert_array_equal(np.array([360.]), sliced.coord('time').points) + + def test_extract_time_no_time(self): + """Test extract_time with no time step.""" + cube = _create_sample_cube()[0] + sliced = extract_time(cube, 1950, 1, 1, 1950, 12, 31) + print('sliced', sliced, sliced.shape) + print('cube', cube, cube.shape) + assert cube == sliced + + +class TestExtractSeason(tests.Test): + """Tests for extract_season.""" + + def setUp(self): + """Prepare tests""" + self.cube = _create_sample_cube() + + def test_get_djf(self): + """Test function for winter""" + sliced = extract_season(self.cube, 'djf') + print(sliced) + assert_array_equal( + np.array([1, 2, 12, 1, 2, 12]), + sliced.coord('month_number').points) + + def test_get_djf_caps(self): + """Test function works when season specified in caps""" + sliced = extract_season(self.cube, 'DJF') + print(sliced) + assert_array_equal( + np.array([1, 2, 12, 1, 2, 12]), + sliced.coord('month_number').points) + + def test_get_mam(self): + """Test function for spring""" + sliced = extract_season(self.cube, 'mam') + print(sliced) + assert_array_equal( + np.array([3, 4, 5, 3, 4, 5]), + sliced.coord('month_number').points) + + def test_get_jja(self): + """Test function for summer""" + sliced = extract_season(self.cube, 'jja') + print(sliced) + assert_array_equal( + np.array([6, 7, 8, 6, 7, 8]), + sliced.coord('month_number').points) + + def test_get_son(self): + """Test function for summer""" + sliced = extract_season(self.cube, 'son') + print(sliced) + assert_array_equal( + np.array([9, 10, 11, 9, 10, 11]), + sliced.coord('month_number').points) + + +class TestTimeAverage(tests.Test): + """Test class for the :func:`esmvaltool.preprocessor._time_pp` module""" + + def test_time_average(self): + """Test for time average of a 1D field.""" + data = np.ones((3)) + times = np.array([15., 45., 75.]) + bounds = np.array([[0., 30.], [30., 60.], [60., 90.]]) + time = iris.coords.DimCoord( + times, + bounds=bounds, + standard_name='time', + units=Unit('days since 1950-01-01', calendar='gregorian')) + cube = iris.cube.Cube(data, dim_coords_and_dims=[(time, 0)]) + + result = time_average(cube) + expected = np.array([1.]) + assert_array_equal(result.data, expected) + + def test_time_average_uneven(self): + """Test for time average of a 1D field with uneven time boundaries.""" + data = np.array([1., 5.]) + times = np.array([5., 25.]) + bounds = np.array([[0., 1.], [1., 4.]]) + time = iris.coords.DimCoord( + times, + bounds=bounds, + standard_name='time', + units=Unit('days since 1950-01-01', calendar='gregorian')) + cube = iris.cube.Cube(data, dim_coords_and_dims=[(time, 0)]) + + result = time_average(cube) + expected = np.array([4.]) + assert_array_equal(result.data, expected) + + def test_time_average_365_day(self): + """Test for time avg of a realisitc time axis and 365 day calendar""" + data = np.ones((6, )) + times = np.array([15, 45, 74, 105, 135, 166]) + bounds = np.array([[0, 31], [31, 59], [59, 90], [90, 120], [120, 151], + [151, 181]]) + time = iris.coords.DimCoord( + times, + bounds=bounds, + standard_name='time', + var_name='time', + units=Unit('days since 1950-01-01', calendar='365_day')) + cube = iris.cube.Cube(data, dim_coords_and_dims=[(time, 0)]) + + result = time_average(cube) + expected = np.array([1.]) + assert_array_equal(result.data, expected) + + +class TestRegridTimeMonthly(tests.Test): + """Tests for regrid_time with monthly frequency.""" + + def setUp(self): + """Prepare tests""" + self.cube_1 = _create_sample_cube() + self.cube_2 = _create_sample_cube() + self.cube_2.data = self.cube_2.data * 2. + self.cube_2.remove_coord('time') + self.cube_2.add_dim_coord( + iris.coords.DimCoord( + np.arange(14., 719., 30.), + standard_name='time', + units=Unit( + 'days since 1950-01-01 00:00:00', calendar='360_day'), + ), + 0, + ) + add_auxiliary_coordinate([self.cube_1, self.cube_2]) + + def test_regrid_time_mon(self): + """Test changes to cubes.""" + # test monthly + newcube_1 = regrid_time(self.cube_1, frequency='mon') + newcube_2 = regrid_time(self.cube_2, frequency='mon') + # no changes to core data + assert_array_equal(newcube_1.data, self.cube_1.data) + assert_array_equal(newcube_2.data, self.cube_2.data) + # no changes to number of coords and aux_coords + assert len(newcube_1.coords()) == len(self.cube_1.coords()) + assert len(newcube_1.aux_coords) == len(self.cube_1.aux_coords) + # test difference; also diff is zero + expected = self.cube_1.data + diff_cube = newcube_2 - newcube_1 + assert_array_equal(diff_cube.data, expected) + + +class TestRegridTimeDaily(tests.Test): + """Tests for regrid_time with daily frequency.""" + + def setUp(self): + """Prepare tests""" + self.cube_1 = _create_sample_cube() + self.cube_2 = _create_sample_cube() + self.cube_2.data = self.cube_2.data * 2. + self.cube_1.remove_coord('time') + self.cube_2.remove_coord('time') + self.cube_1.add_dim_coord( + iris.coords.DimCoord( + np.arange(14. * 24. + 6., 38. * 24. + 6., 24.), + standard_name='time', + units=Unit( + 'hours since 1950-01-01 00:00:00', calendar='360_day'), + ), + 0, + ) + self.cube_2.add_dim_coord( + iris.coords.DimCoord( + np.arange(14. * 24. + 3., 38. * 24. + 3., 24.), + standard_name='time', + units=Unit( + 'hours since 1950-01-01 00:00:00', calendar='360_day'), + ), + 0, + ) + add_auxiliary_coordinate([self.cube_1, self.cube_2]) + + def test_regrid_time_day(self): + """Test changes to cubes.""" + # test daily + newcube_1 = regrid_time(self.cube_1, frequency='day') + newcube_2 = regrid_time(self.cube_2, frequency='day') + # no changes to core data + self.assertArrayEqual(newcube_1.data, self.cube_1.data) + self.assertArrayEqual(newcube_2.data, self.cube_2.data) + # no changes to number of coords and aux_coords + assert len(newcube_1.coords()) == len(self.cube_1.coords()) + assert len(newcube_1.aux_coords) == len(self.cube_1.aux_coords) + # test difference; also diff is zero + expected = self.cube_1.data + diff_cube = newcube_2 - newcube_1 + self.assertArrayEqual(diff_cube.data, expected) + + +def make_time_series(number_years=2): + """Make a cube with time only dimension.""" + times = np.array([i * 30 + 15 for i in range(0, 12 * number_years, 1)]) + bounds = np.array([i * 30 for i in range(0, 12 * number_years + 1, 1)]) + bounds = np.array( + [[bnd, bounds[index + 1]] for index, bnd in enumerate(bounds[:-1])]) + data = np.ones_like(times) + time = iris.coords.DimCoord( + times, + bounds=bounds, + standard_name='time', + units=Unit('days since 1950-01-01', calendar='360_day')) + cube = iris.cube.Cube(data, dim_coords_and_dims=[(time, 0)]) + return cube + + +@pytest.mark.parametrize('existing_coord', [True, False]) +def test_annual_average(existing_coord): + """Test for annual average.""" + cube = make_time_series(number_years=2) + if existing_coord: + iris.coord_categorisation.add_year(cube, 'time') + + result = annual_mean(cube, decadal=False) + expected = np.array([1., 1.]) + assert_array_equal(result.data, expected) + expected_time = np.array([180., 540.]) + assert_array_equal(result.coord('time').points, expected_time) + + +@pytest.mark.parametrize('existing_coord', [True, False]) +def test_decadal_average(existing_coord): + """Test for decadal average.""" + cube = make_time_series(number_years=20) + if existing_coord: + + def get_decade(coord, value): + """Callback function to get decades from cube.""" + date = coord.units.num2date(value) + return date.year - date.year % 10 + + iris.coord_categorisation.add_categorised_coord( + cube, 'decade', 'time', get_decade) + + result = annual_mean(cube, decadal=True) + expected = np.array([1., 1.]) + assert_array_equal(result.data, expected) + expected_time = np.array([1800., 5400.]) + assert_array_equal(result.coord('time').points, expected_time) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/unit/preprocessor/_time_area/__init__.py b/tests/unit/preprocessor/_time_area/__init__.py deleted file mode 100644 index 80ea083ad2..0000000000 --- a/tests/unit/preprocessor/_time_area/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Test suite for _time_area module""" diff --git a/tests/unit/preprocessor/_time_area/test_time_area.py b/tests/unit/preprocessor/_time_area/test_time_area.py deleted file mode 100644 index b57e64a829..0000000000 --- a/tests/unit/preprocessor/_time_area/test_time_area.py +++ /dev/null @@ -1,152 +0,0 @@ -"""Unit tests for the :func:`esmvaltool.preprocessor._time_pp` module""" - -from __future__ import absolute_import, division, print_function - -import unittest - -import iris -import iris.coord_categorisation -import iris.coords -import numpy as np -from cf_units import Unit -from iris.cube import Cube - -import tests -from esmvaltool.preprocessor._time_area import (extract_month, extract_season, - time_average) - - -def _create_sample_cube(): - cube = Cube(np.arange(1, 25), var_name='co2', units='J') - cube.add_dim_coord( - iris.coords.DimCoord( - np.arange(15., 720., 30.), - standard_name='time', - units=Unit('days since 1950-01-01', calendar='gregorian')), 0) - iris.coord_categorisation.add_month_number(cube, 'time') - return cube - - -class TestExtractMonth(tests.Test): - """Tests for extract_month`.""" - - def setUp(self): - """Prepare tests""" - self.cube = _create_sample_cube() - - def test_get_january(self): - """Test january extraction""" - sliced = extract_month(self.cube, 1) - print(sliced) - self.assertTrue( - (np.array([1, 1]) == sliced.coord('month_number').points).all()) - - -class TestExtractSeason(tests.Test): - """Tests for extract_season""" - - def setUp(self): - """Prepare tests""" - self.cube = _create_sample_cube() - - def test_get_djf(self): - """Test function for winter""" - sliced = extract_season(self.cube, 'djf') - print(sliced) - self.assertTrue( - (np.array([1, 2, 12, 1, 2, - 12]) == sliced.coord('month_number').points).all()) - - def test_get_djf_caps(self): - """Test function works when season specified in caps""" - sliced = extract_season(self.cube, 'DJF') - print(sliced) - self.assertTrue( - (np.array([1, 2, 12, 1, 2, - 12]) == sliced.coord('month_number').points).all()) - - def test_get_mam(self): - """Test function for spring""" - sliced = extract_season(self.cube, 'mam') - print(sliced) - self.assertTrue((np.array( - [3, 4, 5, 3, 4, 5]) == sliced.coord('month_number').points).all()) - - def test_get_jja(self): - """Test function for summer""" - sliced = extract_season(self.cube, 'jja') - print(sliced) - self.assertTrue((np.array( - [6, 7, 8, 6, 7, 8]) == sliced.coord('month_number').points).all()) - - def test_get_son(self): - """Test function for summer""" - sliced = extract_season(self.cube, 'son') - print(sliced) - self.assertTrue( - (np.array([9, 10, 11, 9, 10, - 11]) == sliced.coord('month_number').points).all()) - - -class TestTimeAverage(tests.Test): - """Test class for the :func:`esmvaltool.preprocessor._time_pp` module""" - - def test_time_average(self): - """Test for time average of a 1D field.""" - data = np.ones((3)) - cube = iris.cube.Cube(data) - - times = np.array([15., 45., 75.]) - bounds = np.array([[0., 30.], [30., 60.], [60., 90.]]) - time = iris.coords.DimCoord( - times, - bounds=bounds, - standard_name='time', - units=Unit('days since 1950-01-01', calendar='gregorian')) - cube = iris.cube.Cube(data, dim_coords_and_dims=[(time, 0)]) - - result = time_average(cube) - expected = np.array([1.]) - self.assertArrayEqual(result.data, expected) - - def test_time_average_uneven(self): - """Test for time average of a 1D field with uneven time boundaries.""" - data = np.array([1., 5.]) - cube = iris.cube.Cube(data) - - times = np.array([5., 25.]) - bounds = np.array([[0., 1.], [1., 4.]]) - time = iris.coords.DimCoord( - times, - bounds=bounds, - standard_name='time', - units=Unit('days since 1950-01-01', calendar='gregorian')) - cube = iris.cube.Cube(data, dim_coords_and_dims=[(time, 0)]) - - result = time_average(cube) - expected = np.array([4.]) - self.assertArrayEqual(result.data, expected) - - def test_time_average_365_day(self): - """Test for time avg of a realisitc time axis and 365 day calendar""" - data = np.ones((6, )) - cube = iris.cube.Cube(data) - - times = np.array([15, 45, 74, 105, 135, 166]) - bounds = np.array([[0, 31], [31, 59], [59, 90], [90, 120], [120, 151], - [151, 181]]) - time = iris.coords.DimCoord( - times, - bounds=bounds, - standard_name='time', - var_name='time', - units=Unit('days since 1950-01-01', calendar='365_day')) - cube = iris.cube.Cube(data, dim_coords_and_dims=[(time, 0)]) - - result = time_average(cube) - expected = np.array([1.]) - self.assertArrayEqual(result.data, expected) - - -if __name__ == '__main__': - unittest.main() diff --git a/tests/unit/preprocessor/_volume/__init__.py b/tests/unit/preprocessor/_volume/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/preprocessor/_volume/test_volume.py b/tests/unit/preprocessor/_volume/test_volume.py new file mode 100644 index 0000000000..59071b2925 --- /dev/null +++ b/tests/unit/preprocessor/_volume/test_volume.py @@ -0,0 +1,136 @@ +"""Unit test for :func:`esmvaltool.preprocessor._volume`.""" + +import unittest + +import iris +import numpy as np +from cf_units import Unit + +import tests +from esmvaltool.preprocessor._volume import (average_volume, depth_integration, + extract_trajectory, + extract_transect, extract_volume) + + +class Test(tests.Test): + """Test class for _volume_pp""" + + def setUp(self): + """Prepare tests""" + coord_sys = iris.coord_systems.GeogCS(iris.fileformats.pp.EARTH_RADIUS) + data1 = np.ones((3, 2, 2)) + data2 = np.ma.ones((2, 3, 2, 2)) + data3 = np.ma.ones((4, 3, 2, 2)) + mask3 = np.full((4, 3, 2, 2), False) + mask3[0, 0, 0, 0] = True + data3 = np.ma.array(data3, mask=mask3) + + time = iris.coords.DimCoord([15, 45], + standard_name='time', + bounds=[[1., 30.], [30., 60.]], + units=Unit( + 'days since 1950-01-01', + calendar='gregorian')) + time2 = iris.coords.DimCoord([1., 2., 3., 4.], + standard_name='time', + bounds=[ + [0.5, 1.5], + [1.5, 2.5], + [2.5, 3.5], + [3.5, 4.5], + ], + units=Unit( + 'days since 1950-01-01', + calendar='gregorian')) + + zcoord = iris.coords.DimCoord([0.5, 5., 50.], + long_name='zcoord', + bounds=[[0., 2.5], [2.5, 25.], + [25., 250.]], + units='m', + attributes={'positive': 'down'}) + lons2 = iris.coords.DimCoord([1.5, 2.5], + standard_name='longitude', + bounds=[[1., 2.], [2., 3.]], + units='degrees_east', + coord_system=coord_sys) + lats2 = iris.coords.DimCoord([1.5, 2.5], + standard_name='latitude', + bounds=[[1., 2.], [2., 3.]], + units='degrees_north', + coord_system=coord_sys) + + coords_spec3 = [(zcoord, 0), (lats2, 1), (lons2, 2)] + self.grid_3d = iris.cube.Cube(data1, dim_coords_and_dims=coords_spec3) + + coords_spec4 = [(time, 0), (zcoord, 1), (lats2, 2), (lons2, 3)] + self.grid_4d = iris.cube.Cube(data2, dim_coords_and_dims=coords_spec4) + + coords_spec5 = [(time2, 0), (zcoord, 1), (lats2, 2), (lons2, 3)] + self.grid_4d_2 = iris.cube.Cube( + data3, dim_coords_and_dims=coords_spec5) + + # allow iris to figure out the axis='z' coordinate + iris.util.guess_coord_axis(self.grid_3d.coord('zcoord')) + iris.util.guess_coord_axis(self.grid_4d.coord('zcoord')) + iris.util.guess_coord_axis(self.grid_4d_2.coord('zcoord')) + + def test_extract_volume(self): + """Test to extract the top two layers of a 3 layer depth column.""" + result = extract_volume(self.grid_3d, 0., 10.) + expected = np.ones((2, 2, 2)) + print(result.data, expected.data) + self.assertArrayEqual(result.data, expected) + + def test_average_volume(self): + """Test to take the volume weighted average of a (2,3,2,2) cube.""" + result = average_volume(self.grid_4d, 'latitude', 'longitude') + expected = np.array([1., 1.]) + self.assertArrayEqual(result.data, expected) + + def test_average_volume_long(self): + """ + Test to take the volume weighted average of a (4,3,2,2) cube. + + This extra time is needed, as the volume average calculation uses + different methods for small and large cubes. + """ + result = average_volume(self.grid_4d_2, 'latitude', 'longitude') + expected = np.array([1., 1., 1., 1.]) + self.assertArrayEqual(result.data, expected) + + def test_depth_integration_1d(self): + """Test to take the depth integration of a 3 layer cube.""" + result = depth_integration(self.grid_3d[:, 0, 0]) + expected = np.ones((1, 1)) * 250. + print(result.data, expected.data) + self.assertArrayEqual(result.data, expected) + + def test_depth_integration_3d(self): + """Test to take the depth integration of a 3 layer cube.""" + result = depth_integration(self.grid_3d) + expected = np.ones((2, 2)) * 250. + print(result.data, expected.data) + self.assertArrayEqual(result.data, expected) + + def test_extract_transect_latitude(self): + """Test to extract a transect from a (3, 2, 2) cube.""" + result = extract_transect(self.grid_3d, latitude=1.5) + expected = np.ones((3, 2)) + self.assertArrayEqual(result.data, expected) + + def test_extract_transect_longitude(self): + """Test to extract a transect from a (3, 2, 2) cube.""" + result = extract_transect(self.grid_3d, longitude=1.5) + expected = np.ones((3, 2)) + self.assertArrayEqual(result.data, expected) + + def test_extract_trajectory(self): + """Test to extract a trajectory from a (3, 2, 2) cube.""" + result = extract_trajectory(self.grid_3d, [1.5, 2.5], [2., 2.], 2) + expected = np.ones((3, 2)) + self.assertArrayEqual(result.data, expected) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/unit/preprocessor/_volume_pp/test_volume_pp.py b/tests/unit/preprocessor/_volume_pp/test_volume_pp.py deleted file mode 100644 index ef2cdfb62b..0000000000 --- a/tests/unit/preprocessor/_volume_pp/test_volume_pp.py +++ /dev/null @@ -1,93 +0,0 @@ -"""Unit test for the :func:`esmvaltool.preprocessor._volume_pp` function""" - -from __future__ import absolute_import, division, print_function - -import unittest - -import iris -import numpy as np - -import tests -from esmvaltool.preprocessor._volume_pp import volume_slice -from esmvaltool.preprocessor._volume_pp import volume_average -from esmvaltool.preprocessor._volume_pp import depth_integration -from esmvaltool.preprocessor._volume_pp import extract_transect -from esmvaltool.preprocessor._volume_pp import extract_trajectory - - -class Test(tests.Test): - """Test class for _volume_pp""" - - def setUp(self): - """Prepare tests""" - coord_sys = iris.coord_systems.GeogCS(iris.fileformats.pp.EARTH_RADIUS) - data2 = np.ones((3, 2, 2)) - - lons2 = iris.coords.DimCoord( - [1.5, 2.5], - standard_name='longitude', - bounds=[[1., 2.], [2., 3.]], - units='degrees_east', - coord_system=coord_sys) - lats2 = iris.coords.DimCoord( - [1.5, 2.5], - standard_name='latitude', - bounds=[[1., 2.], [2., 3.]], - units='degrees_north', - coord_system=coord_sys) - depth = iris.coords.DimCoord( - [0.5, 5., 50.], - standard_name='depth', - bounds=[[0., 2.5], [2.5, 25.], [25., 250.]], - units='m') - coords_spec3 = [(depth, 0), (lats2, 1), (lons2, 2)] - self.grid_3d = iris.cube.Cube(data2, dim_coords_and_dims=coords_spec3) - - def test_volume_slice(self): - """Test to extract the top two layers of a 3 layer depth column.""" - result = volume_slice(self.grid_3d, 0., 10.) - expected = np.ones((2, 2, 2)) - print(result.data, expected.data) - self.assertArrayEqual(result.data, expected) - - def test_volume_average(self): - """Test to take the volume weighted average of a (3,2,2) cube.""" - result = volume_average(self.grid_3d, 'depth', 'latitude', 'longitude') - expected = np.array([1.]) - self.assertArrayEqual(result.data, expected) - - def test_depth_integration_1d(self): - """Test to take the depth integration of a 3 layer cube.""" - result = depth_integration(self.grid_3d[:, 0, 0], 'depth') - expected = np.ones((1, 1)) * 250. - print(result.data, expected.data) - self.assertArrayEqual(result.data, expected) - - def test_depth_integration_3d(self): - """Test to take the depth integration of a 3 layer cube.""" - result = depth_integration(self.grid_3d, 'depth') - expected = np.ones((2, 2)) * 250. - print(result.data, expected.data) - self.assertArrayEqual(result.data, expected) - - def test_extract_transect_latitude(self): - """Test to extract a transect from a (3, 2, 2) cube.""" - result = extract_transect(self.grid_3d, latitude=1.5) - expected = np.ones((3, 2)) - self.assertArrayEqual(result.data, expected) - - def test_extract_transect_longitude(self): - """Test to extract a transect from a (3, 2, 2) cube.""" - result = extract_transect(self.grid_3d, longitude=1.5) - expected = np.ones((3, 2)) - self.assertArrayEqual(result.data, expected) - - def test_extract_trajectory(self): - """Test to extract a trajectory from a (3, 2, 2) cube.""" - result = extract_trajectory(self.grid_3d, [1.5, 2.5], [2., 2.], 2) - expected = np.ones((3, 2)) - self.assertArrayEqual(result.data, expected) - - -if __name__ == '__main__': - unittest.main() diff --git a/tests/unit/preprocessor/test_runner.py b/tests/unit/preprocessor/test_runner.py new file mode 100644 index 0000000000..8814c5fd4a --- /dev/null +++ b/tests/unit/preprocessor/test_runner.py @@ -0,0 +1,16 @@ +from esmvaltool.preprocessor import (DEFAULT_ORDER, MULTI_MODEL_FUNCTIONS, + _get_itype) + + +def test_first_argument_name(): + """Check that the input type of all preprocessor functions is valid.""" + valid_itypes = ('file', 'files', 'cube', 'cubes', 'products') + for step in DEFAULT_ORDER: + itype = _get_itype(step) + assert itype in valid_itypes, ( + "Invalid preprocessor function definition {}, first argument " + "should be one of {} but is {}".format(step, valid_itypes, itype)) + + +def test_multi_model_exist(): + assert MULTI_MODEL_FUNCTIONS.issubset(set(DEFAULT_ORDER)) diff --git a/tests/unit/test_lint.py b/tests/unit/test_lint.py index 8f484a16ae..b07ae55f25 100644 --- a/tests/unit/test_lint.py +++ b/tests/unit/test_lint.py @@ -1,7 +1,6 @@ -""" Lint tests """ -from __future__ import print_function - +"""Lint tests.""" import os +import subprocess import textwrap import pycodestyle # formerly known as pep8 @@ -16,7 +15,8 @@ def test_pep8_conformance(): 'tests', ] exclude_paths = [ - 'esmvaltool/doc', + 'esmvaltool/doc', 'esmvaltool/diag_scripts/cvdp/cvdp', + 'esmvaltool/cmor/tables' ] print("PEP8 check of directories: {}\n".format(', '.join(check_paths))) @@ -33,7 +33,8 @@ def test_pep8_conformance(): success = style.check_files(check_paths).total_errors == 0 if not success: - print(textwrap.dedent(""" + print( + textwrap.dedent(""" Your Python code does not conform to the official Python style guide (PEP8), see https://www.python.org/dev/peps/pep-0008 @@ -56,16 +57,27 @@ def test_nclcodestyle(): 'tests', ] + exclude_paths = [ + 'esmvaltool/diag_scripts/cvdp/cvdp', + ] + print("Formatting check of NCL code in directories: {}\n".format( ', '.join(check_paths))) - style = nclcodestyle.StyleGuide() + # Get paths wrt package root package_root = os.path.dirname(os.path.dirname(os.path.dirname(__file__))) - check_paths = [os.path.join(package_root, path) for path in check_paths] + for paths in (check_paths, exclude_paths): + for i, path in enumerate(paths): + paths[i] = os.path.join(package_root, path) + + style = nclcodestyle.StyleGuide() + style.options.exclude.extend(exclude_paths) + success = style.check_files(check_paths).total_errors == 0 if not success: - print(textwrap.dedent(""" + print( + textwrap.dedent(""" Your NCL code does not follow our formatting standards. A list of warning and error messages can be found above, @@ -75,3 +87,27 @@ def test_nclcodestyle(): """)) assert success, "Your NCL code does not follow our formatting standards." + + +def test_r_lint(monkeypatch): + """Test R lint.""" + monkeypatch.setenv("LINTR_COMMENT_BOT", "FALSE") + package_root = os.path.dirname(os.path.dirname(os.path.dirname(__file__))) + checker = os.path.join(package_root, 'tests', 'unit', 'check_r_code.R') + try: + output = subprocess.check_output(('Rscript', checker, package_root), + stderr=subprocess.STDOUT, + universal_newlines=True) + print(output) + return + except subprocess.CalledProcessError as ex: + print( + textwrap.dedent(""" + Your R code does not follow our formatting standards. + + Please fix the following issues: + """)) + print(ex.output) + + assert False,\ + 'Your R code does not follow our formatting standards.' diff --git a/tests/unit/test_naming.py b/tests/unit/test_naming.py index 3d570f1233..41971a60a8 100644 --- a/tests/unit/test_naming.py +++ b/tests/unit/test_naming.py @@ -57,11 +57,15 @@ def test_no_namelist(self): This will help us to avoid bad merges with stale branches """ + exclude_paths = ['esmvaltool/diag_scripts/cvdp/cvdp'] + for dirpath, dirnames, filenames in os.walk(self.esmvaltool_folder): if '.git' in dirpath.split(os.sep): continue + if any([item in dirpath for item in exclude_paths]): + continue self.assertFalse( any('namelist' in name.lower() for name in filenames + dirnames), - 'Namelist reference found at {}. Please use "recipe" instead' - .format(dirpath)) + 'Namelist reference found at {}. Please use "recipe" instead'. + format(dirpath))