From 99e01fc0e0f2aa9350709f8d62bf7a2767c1c0b8 Mon Sep 17 00:00:00 2001 From: Teagan King Date: Wed, 21 Feb 2024 14:29:52 -0700 Subject: [PATCH 001/126] preliminary plumber files --- python/ctsm/site_and_regional/plumber_site.py | 119 +++++++++ python/ctsm/site_and_regional/run_plumber.py | 241 ++++++++++++++++++ 2 files changed, 360 insertions(+) create mode 100755 python/ctsm/site_and_regional/plumber_site.py create mode 100755 python/ctsm/site_and_regional/run_plumber.py diff --git a/python/ctsm/site_and_regional/plumber_site.py b/python/ctsm/site_and_regional/plumber_site.py new file mode 100755 index 0000000000..57475c07cd --- /dev/null +++ b/python/ctsm/site_and_regional/plumber_site.py @@ -0,0 +1,119 @@ +""" +This module contains the Plumber2Site class and class functions which are used in run_plumber.py +""" + +# Import libraries +import logging +import os +import sys + +# Get the ctsm util tools and then the cime tools. +_CTSM_PYTHON = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "python")) +sys.path.insert(1, _CTSM_PYTHON) + +# -- import local classes for this script +# pylint: disable=wrong-import-position +from ctsm.site_and_regional.tower_site import TowerSite + +# pylint: disable=wrong-import-position, import-error, unused-import, wrong-import-order +from ctsm import add_cime_to_path +from ctsm.path_utils import path_to_ctsm_root + +from CIME import build +from CIME.case import Case +from CIME.utils import safe_copy, expect, symlink_force + +logger = logging.getLogger(__name__) + + +# pylint: disable=too-many-instance-attributes +class Plumber2Site(TowerSite): + """ + A class for encapsulating plumber sites. + """ + + def build_base_case( + self, + cesmroot, + output_root, + res, + compset, + user_mods_dirs=None, + overwrite=False, + setup_only=False, + ): + if user_mods_dirs is None: + user_mods_dirs = [ + os.path.join(self.cesmroot, "cime_config", "usermods_dirs", "PLUMBER", self.name) + ] + print("in plumbersite adding usermodsdirs") + print("usermodsdirs: {}".format(user_mods_dirs)) + case_path = super().build_base_case(cesmroot, output_root, res, compset, user_mods_dirs) + + return case_path + + # def get_batch_query(self, case): + # return super().get_batch_query(case) + + # pylint: disable=too-many-statements + def run_case( + self, + base_case_root, + run_type, + prism, + run_length, + user_version, + tower_type=None, + user_mods_dirs=None, + overwrite=False, + setup_only=False, + no_batch=False, + rerun=False, + experiment=False, + ): + """ + Run case. + + Args: + self + base_case_root: str, opt + file path of base case + run_type: str, opt + transient, post_ad, or ad case, default transient + prism: bool, opt # TODO: remove? + if True, use PRISM precipitation, default False + run_length: str, opt + length of run, default '4Y' + user_version: str, opt # TODO: is there an equivalent for PLUMBER? + default 'latest' + overwrite: bool, opt + default False + setup_only: bool, opt + default False; if True, set up but do not run case + no_batch: bool, opt + default False + rerun: bool, opt + default False + experiment: str, opt + name of experiment, default False + """ + user_mods_dirs = [ + os.path.join(self.cesmroot, "cime_config", "usermods_dirs", "PLUMBER", self.name) + ] + tower_type = "PLUMBER" + super().run_case( + base_case_root, run_type, prism, run_length, user_version, tower_type, user_mods_dirs + ) + + def set_ref_case(self, case): + super().set_ref_case(case) + return True ### Check if super returns false, if this will still return True? + + def modify_user_nl(self, case_root, run_type, rundir, site_lines=None): + # TODO: include any plumber-specific user namelist lines, using this as just an example currently + if site_lines is None: + site_lines = [ + """hist_fincl1 = 'TOTECOSYSC', 'TOTECOSYSN', 'TOTSOMC', 'TOTSOMN', 'TOTVEGC', + 'TOTVEGN', 'TLAI', 'GPP', 'CPOOL', 'NPP', 'TWS', 'H2OSNO',""" + ] + super().modify_user_nl(case_root, run_type, rundir, site_lines) diff --git a/python/ctsm/site_and_regional/run_plumber.py b/python/ctsm/site_and_regional/run_plumber.py new file mode 100755 index 0000000000..5824770976 --- /dev/null +++ b/python/ctsm/site_and_regional/run_plumber.py @@ -0,0 +1,241 @@ +#! /usr/bin/env python3 + +""" +|------------------------------------------------------------------| +|--------------------- Instructions -----------------------------| +|------------------------------------------------------------------| +This is a wrapper script for running CTSM simulation for one or more +plumber sites. + +This script is only for plumber site and we will develop a more general +code later. + +This script first creates and builds a generic base case. +Next, it will clone the base_case for different plumber sites and run +types to reduce the need to build ctsm everytime. + +This script will do the following: + 1) Create a generic base case for cloning. + 2) Make the case for the specific plumber site(s). + 3) Make changes to the case, for: + a. AD spinup + b. post-AD spinup + c. transient + #--------------- + d. SASU or Matrix spinup + 4) Build and submit the case. + +------------------------------------------------------------------- +Instructions for running using conda python environments: + +../../py_env_create +conda activate ctsm_py + +------------------------------------------------------------------- +To see the available options: + ./run_plumber.py --help +------------------------------------------------------------------- +""" +# TODO (NS) +# - [ ] +# - [ ] Case dependency and the ability to check case status +# - [ ] If Case dependency works we don't need finidat given explicilty for post-ad and transient. + +# - [ ] checkout_externals instead of using env varaiable +# - [ ] wget the fields available and run for those available + +# - [ ] Matrix spin-up if (SASU) Eric merged it in +# - [ ] Make sure both AD and SASU are not on at the same time + +# - [ ] Make sure CIME and other dependencies are checked out. + + +# Import libraries +import glob +import logging +import os +import sys +import pandas as pd + +# Get the ctsm util tools and then the cime tools. +_CTSM_PYTHON = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "python")) +sys.path.insert(1, _CTSM_PYTHON) + +# pylint: disable=wrong-import-position +from ctsm.path_utils import path_to_ctsm_root +from ctsm.download_utils import download_file +from ctsm.site_and_regional.neon_arg_parse import get_parser +from ctsm.site_and_regional.plumber_site import PlumberSite + +# pylint: disable=import-error, wildcard-import, wrong-import-order +from standard_script_setup import * + +logger = logging.getLogger(__name__) + + +def check_plumber_listing(valid_plumber_sites): + """ + A function to download and parse plumber listing file. + """ + listing_file = "listing.csv" + url = "https://storage.neonscience.org/neon-ncar/listing.csv" + # TODO: will there be a parallel plumber listing? + + download_file(url, listing_file) + available_list = parse_plumber_listing(listing_file, valid_plumber_sites) + return available_list + + +def parse_plumber_listing(listing_file, valid_plumber_sites): + """ + A function to parse plumber listing file + and find plumber sites with the dates + where data is available. + + Args: + listing_file (str): downloaded listing file + + Returns: + available_list : + list of plumber_site objects that is found + on the downloaded listing file. + """ + + # pd.set_option("display.max_rows", None, "display.max_columns", None) + + available_list = [] + + listing_df = pd.read_csv(listing_file) + + # check for finidat files for transient run + finidatlist = listing_df[listing_df["object"].str.contains("lnd/ctsm")] + + # -- filter lines with atm/cdep + listing_df = listing_df[listing_df["object"].str.contains("atm/cdeps/")] + + # -- split the object str to extract site name + listing_df = listing_df["object"].str.split("/", expand=True) + + # -- groupby site name + grouped_df = listing_df.groupby(8) + for key, _ in grouped_df: + # -- check if it is a valid plumber site + if any(key in x for x in valid_plumber_sites): + site_name = key + tmp_df = grouped_df.get_group(key) + + # -- filter files only ending with YYYY-MM.nc + tmp_df = tmp_df[tmp_df[9].str.contains(r"\d\d\d\d-\d\d.nc")] + + # -- find all the data versions + # versions = tmp_df[7].unique() + # print ("all versions available for ", site_name,":", *versions) + latest_version = tmp_df[7].iloc[-1] + # print ("latests version available for ", site_name,":", latest_version) + + tmp_df = tmp_df[tmp_df[7].str.contains(latest_version)] + # -- remove .nc from the file names + tmp_df[9] = tmp_df[9].str.replace(".nc", "", regex=False) + + tmp_df2 = tmp_df[9].str.split("-", expand=True) + + # ignore any prefix in file name and just get year + tmp_df2[0] = tmp_df2[0].str.slice(-4) + + # -- figure out start_year and end_year + start_year = tmp_df2[0].iloc[0] + end_year = tmp_df2[0].iloc[-1] + + # -- figure out start_month and end_month + start_month = tmp_df2[1].iloc[0] + end_month = tmp_df2[1].iloc[-1] + + logger.debug("Valid plumber site %s found!", site_name) + logger.debug("File version %s", latest_version) + logger.debug("start_year=%s", start_year) + logger.debug("end_year=%s", end_year) + logger.debug("start_month=%s", start_month) + logger.debug("end_month=%s", end_month) + finidat = None + for line in finidatlist["object"]: + if site_name in line: + finidat = line.split(",")[0].split("/")[-1] + + plumber_site = PlumberSite(site_name, start_year, end_year, start_month, end_month, finidat) + logger.debug(plumber_site) + available_list.append(plumber_site) + + return available_list + + +def main(description): + """ + Determine valid plumber sites. Make an output directory if it does not exist. + Loop through requested sites and run CTSM at that site. + """ + cesmroot = path_to_ctsm_root() + # Get the list of supported plumber sites from usermods + valid_plumber_sites = glob.glob( + os.path.join(cesmroot, "cime_config", "usermods_dirs", "PLUMBER", "[!Fd]*") + ) + valid_plumber_sites = sorted([v.split("/")[-1] for v in valid_plumber_sites]) + + ( + site_list, + output_root, + run_type, + experiment, + prism, + overwrite, + run_length, + base_case_root, + run_from_postad, + setup_only, + no_batch, + rerun, + user_version, + ) = get_parser(sys.argv, description, valid_plumber_sites) + + if output_root: + logger.debug("output_root : %s", output_root) + if not os.path.exists(output_root): + os.makedirs(output_root) + + # -- check plumber listing file for available data: + available_list = check_plumber_listing(valid_plumber_sites) + + # ================================= + # -- all plumber sites can be cloned from one generic case + # -- so no need to define a base_case for every site. + + res = "CLM_USRDAT" + if run_type == "transient": + compset = "IHist1PtClm51Bgc" + else: + compset = "I1PtClm51Bgc" + + # -- Looping over plumber sites + + for plumber_site in available_list: + if plumber_site.name in site_list: + if run_from_postad: + plumber_site.finidat = None + if not base_case_root: + user_mods_dirs = None + base_case_root = plumber_site.build_base_case( + cesmroot, output_root, res, compset, user_mods_dirs, overwrite, setup_only + ) + logger.info("-----------------------------------") + logger.info("Running CTSM for plumber site : %s", plumber_site.name) + plumber_site.run_case( + base_case_root, + run_type, + prism, + run_length, + user_version, + overwrite, + setup_only, + no_batch, + rerun, + experiment, + ) From 7378576920c55ea7c998cda0fff02fd447f71daa Mon Sep 17 00:00:00 2001 From: Teagan King Date: Thu, 22 Feb 2024 12:43:28 -0700 Subject: [PATCH 002/126] include plumber2site values in config_component.xml --- cime_config/config_component.xml | 23 ++++++++++++++++++++ python/ctsm/site_and_regional/run_plumber.py | 4 +++- 2 files changed, 26 insertions(+), 1 deletion(-) diff --git a/cime_config/config_component.xml b/cime_config/config_component.xml index 281a94a0b2..0f35da293a 100644 --- a/cime_config/config_component.xml +++ b/cime_config/config_component.xml @@ -335,6 +335,29 @@ Version id of Neon data + + char + + + AR-SLu,AT-Neu,AU-ASM,AU-Cow,AU-Cpr,AU-Ctr,AU-Cum,AU-DaP,AU-DaS,AU-Dry,AU-Emr,AU-Gin,AU-GWW,AU-How,AU-Lit, + AU-Otw,AU-Rig,AU-Rob,AU-Sam,AU-Stp,AU-TTE,AU-Tum,AU-Whr,AU-Wrr,AU-Ync,BE-Bra,BE-Lon,BE-Vie,BR-Sa3,BW-Ma1,CA-NS1, + CA-NS2,CA-NS4,CA-NS5,CA-NS6,CA-NS7,CA-Qcu,CA-Qfo,CA-SF1,CA-SF2,CA-SF3,CH-Cha,CH-Dav,CH-Fru,CH-Oe1,CN-Cha,CN-Cng, + CN-Dan,CN-Din,CN-Du2,CN-HaM,CN-Qia,CZ-wet,DE-Bay,DE-Geb,DE-Gri,DE-Hai,DE-Kli,DE-Meh,DE-Obe,DE-Seh,DE-SfN,DE-Tha, + DE-Wet,DK-Fou,DK-Lva,DK-Ris,DK-Sor,DK-ZaH,ES-ES1,ES-ES2,ES-LgS,ES-LMa,ES-VDA,FI-Hyy,FI-Kaa,FI-Lom,FI-Sod,FR-Fon, + FR-Gri,FR-Hes,FR-LBr,FR-Lq1,FR-Lq2,FR-Pue,GF-Guy,HU-Bug,ID-Pag,IE-Ca1,IE-Dri,IT-Amp,IT-BCi,IT-CA1,IT-CA2,IT-CA3, + IT-Col,IT-Cpz,IT-Isp,IT-Lav,IT-LMa,IT-Mal,IT-MBo,IT-Noe,IT-Non,IT-PT1,IT-Ren,IT-Ro1,IT-Ro2,IT-SR2,IT-SRo,JP-SMF, + NL-Ca1,NL-Hor,NL-Loo,PL-wet,PT-Esp,PT-Mi1,PT-Mi2,RU-Che,RU-Fyo,RU-Zot,SD-Dem,SE-Deg,UK-Gri,UK-Ham,UK-PL3,US-AR1, + US-AR2,US-ARM,US-Aud,US-Bar,US-Bkg,US-Blo,US-Bo1,US-Cop,US-FPe,US-GLE,US-Goo,US-Ha1,US-Ho1,US-KS2,US-Los,US-Me2, + US-Me4,US-Me6,US-MMS,US-MOz,US-Myb,US-Ne1,US-Ne2,US-Ne3,US-NR1,US-PFa,US-Prr,US-SP1,US-SP2,US-SP3,US-SRG,US-SRM, + US-Syv,US-Ton,US-Tw4,US-Twt,US-UMB,US-Var,US-WCr,US-Whs,US-Wkg,ZA-Kru,ZM-Mon, + + + run_component_ctsm + env_run.xml + Name of site for NEON tower data + + + ========================================= CLM naming conventions diff --git a/python/ctsm/site_and_regional/run_plumber.py b/python/ctsm/site_and_regional/run_plumber.py index 5824770976..0f148b5074 100755 --- a/python/ctsm/site_and_regional/run_plumber.py +++ b/python/ctsm/site_and_regional/run_plumber.py @@ -161,7 +161,9 @@ def parse_plumber_listing(listing_file, valid_plumber_sites): if site_name in line: finidat = line.split(",")[0].split("/")[-1] - plumber_site = PlumberSite(site_name, start_year, end_year, start_month, end_month, finidat) + plumber_site = PlumberSite( + site_name, start_year, end_year, start_month, end_month, finidat + ) logger.debug(plumber_site) available_list.append(plumber_site) From 9fe6a5a7bd7c90e6e9b97aa9626f96de6f942a0b Mon Sep 17 00:00:00 2001 From: Teagan King Date: Thu, 7 Mar 2024 10:04:45 -0700 Subject: [PATCH 003/126] general setup to run plumber --- python/ctsm/site_and_regional/run_plumber.py | 243 ------------------ .../{run_neon.py => run_tower.py} | 61 ++++- .../{neon_arg_parse.py => tower_arg_parse.py} | 15 +- ..._sys_run_neon.py => test_sys_run_tower.py} | 2 +- ...nit_run_neon.py => test_unit_run_tower.py} | 2 +- ..._parse.py => test_unit_tower_arg_parse.py} | 11 +- .../site_and_regional/{run_neon => run_tower} | 10 +- 7 files changed, 80 insertions(+), 264 deletions(-) delete mode 100755 python/ctsm/site_and_regional/run_plumber.py rename python/ctsm/site_and_regional/{run_neon.py => run_tower.py} (76%) rename python/ctsm/site_and_regional/{neon_arg_parse.py => tower_arg_parse.py} (94%) rename python/ctsm/test/{test_sys_run_neon.py => test_sys_run_tower.py} (96%) rename python/ctsm/test/{test_unit_run_neon.py => test_unit_run_tower.py} (96%) rename python/ctsm/test/{test_unit_neon_arg_parse.py => test_unit_tower_arg_parse.py} (82%) rename tools/site_and_regional/{run_neon => run_tower} (86%) diff --git a/python/ctsm/site_and_regional/run_plumber.py b/python/ctsm/site_and_regional/run_plumber.py deleted file mode 100755 index 0f148b5074..0000000000 --- a/python/ctsm/site_and_regional/run_plumber.py +++ /dev/null @@ -1,243 +0,0 @@ -#! /usr/bin/env python3 - -""" -|------------------------------------------------------------------| -|--------------------- Instructions -----------------------------| -|------------------------------------------------------------------| -This is a wrapper script for running CTSM simulation for one or more -plumber sites. - -This script is only for plumber site and we will develop a more general -code later. - -This script first creates and builds a generic base case. -Next, it will clone the base_case for different plumber sites and run -types to reduce the need to build ctsm everytime. - -This script will do the following: - 1) Create a generic base case for cloning. - 2) Make the case for the specific plumber site(s). - 3) Make changes to the case, for: - a. AD spinup - b. post-AD spinup - c. transient - #--------------- - d. SASU or Matrix spinup - 4) Build and submit the case. - -------------------------------------------------------------------- -Instructions for running using conda python environments: - -../../py_env_create -conda activate ctsm_py - -------------------------------------------------------------------- -To see the available options: - ./run_plumber.py --help -------------------------------------------------------------------- -""" -# TODO (NS) -# - [ ] -# - [ ] Case dependency and the ability to check case status -# - [ ] If Case dependency works we don't need finidat given explicilty for post-ad and transient. - -# - [ ] checkout_externals instead of using env varaiable -# - [ ] wget the fields available and run for those available - -# - [ ] Matrix spin-up if (SASU) Eric merged it in -# - [ ] Make sure both AD and SASU are not on at the same time - -# - [ ] Make sure CIME and other dependencies are checked out. - - -# Import libraries -import glob -import logging -import os -import sys -import pandas as pd - -# Get the ctsm util tools and then the cime tools. -_CTSM_PYTHON = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "python")) -sys.path.insert(1, _CTSM_PYTHON) - -# pylint: disable=wrong-import-position -from ctsm.path_utils import path_to_ctsm_root -from ctsm.download_utils import download_file -from ctsm.site_and_regional.neon_arg_parse import get_parser -from ctsm.site_and_regional.plumber_site import PlumberSite - -# pylint: disable=import-error, wildcard-import, wrong-import-order -from standard_script_setup import * - -logger = logging.getLogger(__name__) - - -def check_plumber_listing(valid_plumber_sites): - """ - A function to download and parse plumber listing file. - """ - listing_file = "listing.csv" - url = "https://storage.neonscience.org/neon-ncar/listing.csv" - # TODO: will there be a parallel plumber listing? - - download_file(url, listing_file) - available_list = parse_plumber_listing(listing_file, valid_plumber_sites) - return available_list - - -def parse_plumber_listing(listing_file, valid_plumber_sites): - """ - A function to parse plumber listing file - and find plumber sites with the dates - where data is available. - - Args: - listing_file (str): downloaded listing file - - Returns: - available_list : - list of plumber_site objects that is found - on the downloaded listing file. - """ - - # pd.set_option("display.max_rows", None, "display.max_columns", None) - - available_list = [] - - listing_df = pd.read_csv(listing_file) - - # check for finidat files for transient run - finidatlist = listing_df[listing_df["object"].str.contains("lnd/ctsm")] - - # -- filter lines with atm/cdep - listing_df = listing_df[listing_df["object"].str.contains("atm/cdeps/")] - - # -- split the object str to extract site name - listing_df = listing_df["object"].str.split("/", expand=True) - - # -- groupby site name - grouped_df = listing_df.groupby(8) - for key, _ in grouped_df: - # -- check if it is a valid plumber site - if any(key in x for x in valid_plumber_sites): - site_name = key - tmp_df = grouped_df.get_group(key) - - # -- filter files only ending with YYYY-MM.nc - tmp_df = tmp_df[tmp_df[9].str.contains(r"\d\d\d\d-\d\d.nc")] - - # -- find all the data versions - # versions = tmp_df[7].unique() - # print ("all versions available for ", site_name,":", *versions) - latest_version = tmp_df[7].iloc[-1] - # print ("latests version available for ", site_name,":", latest_version) - - tmp_df = tmp_df[tmp_df[7].str.contains(latest_version)] - # -- remove .nc from the file names - tmp_df[9] = tmp_df[9].str.replace(".nc", "", regex=False) - - tmp_df2 = tmp_df[9].str.split("-", expand=True) - - # ignore any prefix in file name and just get year - tmp_df2[0] = tmp_df2[0].str.slice(-4) - - # -- figure out start_year and end_year - start_year = tmp_df2[0].iloc[0] - end_year = tmp_df2[0].iloc[-1] - - # -- figure out start_month and end_month - start_month = tmp_df2[1].iloc[0] - end_month = tmp_df2[1].iloc[-1] - - logger.debug("Valid plumber site %s found!", site_name) - logger.debug("File version %s", latest_version) - logger.debug("start_year=%s", start_year) - logger.debug("end_year=%s", end_year) - logger.debug("start_month=%s", start_month) - logger.debug("end_month=%s", end_month) - finidat = None - for line in finidatlist["object"]: - if site_name in line: - finidat = line.split(",")[0].split("/")[-1] - - plumber_site = PlumberSite( - site_name, start_year, end_year, start_month, end_month, finidat - ) - logger.debug(plumber_site) - available_list.append(plumber_site) - - return available_list - - -def main(description): - """ - Determine valid plumber sites. Make an output directory if it does not exist. - Loop through requested sites and run CTSM at that site. - """ - cesmroot = path_to_ctsm_root() - # Get the list of supported plumber sites from usermods - valid_plumber_sites = glob.glob( - os.path.join(cesmroot, "cime_config", "usermods_dirs", "PLUMBER", "[!Fd]*") - ) - valid_plumber_sites = sorted([v.split("/")[-1] for v in valid_plumber_sites]) - - ( - site_list, - output_root, - run_type, - experiment, - prism, - overwrite, - run_length, - base_case_root, - run_from_postad, - setup_only, - no_batch, - rerun, - user_version, - ) = get_parser(sys.argv, description, valid_plumber_sites) - - if output_root: - logger.debug("output_root : %s", output_root) - if not os.path.exists(output_root): - os.makedirs(output_root) - - # -- check plumber listing file for available data: - available_list = check_plumber_listing(valid_plumber_sites) - - # ================================= - # -- all plumber sites can be cloned from one generic case - # -- so no need to define a base_case for every site. - - res = "CLM_USRDAT" - if run_type == "transient": - compset = "IHist1PtClm51Bgc" - else: - compset = "I1PtClm51Bgc" - - # -- Looping over plumber sites - - for plumber_site in available_list: - if plumber_site.name in site_list: - if run_from_postad: - plumber_site.finidat = None - if not base_case_root: - user_mods_dirs = None - base_case_root = plumber_site.build_base_case( - cesmroot, output_root, res, compset, user_mods_dirs, overwrite, setup_only - ) - logger.info("-----------------------------------") - logger.info("Running CTSM for plumber site : %s", plumber_site.name) - plumber_site.run_case( - base_case_root, - run_type, - prism, - run_length, - user_version, - overwrite, - setup_only, - no_batch, - rerun, - experiment, - ) diff --git a/python/ctsm/site_and_regional/run_neon.py b/python/ctsm/site_and_regional/run_tower.py similarity index 76% rename from python/ctsm/site_and_regional/run_neon.py rename to python/ctsm/site_and_regional/run_tower.py index 6d0108bf95..9e33bed462 100755 --- a/python/ctsm/site_and_regional/run_neon.py +++ b/python/ctsm/site_and_regional/run_tower.py @@ -5,18 +5,18 @@ |--------------------- Instructions -----------------------------| |------------------------------------------------------------------| This is a wrapper script for running CTSM simulation for one or more -neon sites. +tower (neon or plumber) sites. -This script is only for neon site and we will develop a more general +This script is only for tower sites and we will develop a more general code later. This script first creates and builds a generic base case. -Next, it will clone the base_case for different neon sites and run +Next, it will clone the base_case for different tower sites and run types to reduce the need to build ctsm everytime. This script will do the following: 1) Create a generic base case for cloning. - 2) Make the case for the specific neon site(s). + 2) Make the case for the specific neon or plumber site(s). 3) Make changes to the case, for: a. AD spinup b. post-AD spinup @@ -33,7 +33,7 @@ ------------------------------------------------------------------- To see the available options: - ./run_neon.py --help + ./run_tower.py --help ------------------------------------------------------------------- """ # TODO (NS) @@ -64,8 +64,9 @@ # pylint: disable=wrong-import-position from ctsm.path_utils import path_to_ctsm_root from ctsm.download_utils import download_file -from ctsm.site_and_regional.neon_arg_parse import get_parser +from python.ctsm.site_and_regional.tower_arg_parse import get_parser from ctsm.site_and_regional.neon_site import NeonSite +from ctsm.site_and_regional.plumber_site import PlumberSite # pylint: disable=import-error, wildcard-import, wrong-import-order from standard_script_setup import * @@ -80,6 +81,8 @@ def check_neon_listing(valid_neon_sites): listing_file = "listing.csv" url = "https://storage.neonscience.org/neon-ncar/listing.csv" + # TODO: will there be a parallel plumber listing? + download_file(url, listing_file) available_list = parse_neon_listing(listing_file, valid_neon_sites) return available_list @@ -161,6 +164,7 @@ def parse_neon_listing(listing_file, valid_neon_sites): finidat = line.split(",")[0].split("/")[-1] neon_site = NeonSite(site_name, start_year, end_year, start_month, end_month, finidat) + # TODO: Create parallel PlumberSite available_list logger.debug(neon_site) available_list.append(neon_site) @@ -169,7 +173,7 @@ def parse_neon_listing(listing_file, valid_neon_sites): def main(description): """ - Determine valid neon sites. Make an output directory if it does not exist. + Determine valid tower sites. Make an output directory if it does not exist. Loop through requested sites and run CTSM at that site. """ cesmroot = path_to_ctsm_root() @@ -179,6 +183,14 @@ def main(description): ) valid_neon_sites = sorted([v.split("/")[-1] for v in valid_neon_sites]) + # Get the list of supported plumber sites from usermods + valid_plumber_sites = glob.glob( + os.path.join(cesmroot, "cime_config", "usermods_dirs", "PLUMBER", "[!Fd]*") + ) + valid_plumber_sites = sorted([v.split("/")[-1] for v in valid_plumber_sites]) + + # TODO: or change to be one or other based on tower_type + ( site_list, output_root, @@ -193,7 +205,11 @@ def main(description): no_batch, rerun, user_version, - ) = get_parser(sys.argv, description, valid_neon_sites) + ) = get_parser(sys.argv, description, valid_neon_sites, valid_plumber_sites) + # TODO: make sure get_parser can handle valid_tower_sites + + # TODO: add in a tower_type argument (that has default NEON?) and + # include if statements to differentiate PLUMBER vs NEON if output_root: logger.debug("output_root : %s", output_root) @@ -204,7 +220,7 @@ def main(description): available_list = check_neon_listing(valid_neon_sites) # ================================= - # -- all neon sites can be cloned from one generic case + # -- all tower sites can be cloned from one generic case # -- so no need to define a base_case for every site. res = "CLM_USRDAT" @@ -214,7 +230,6 @@ def main(description): compset = "I1PtClm51Bgc" # -- Looping over neon sites - for neon_site in available_list: if neon_site.name in site_list: if run_from_postad: @@ -238,3 +253,29 @@ def main(description): rerun, experiment, ) + + # -- Looping over plumber sites + # TODO: define available_plumber_list! + # for plumber_site in available_plumber_list: + # if plumber_site.name in site_list: + # if run_from_postad: + # plumber_site.finidat = None + # if not base_case_root: + # user_mods_dirs = None + # base_case_root = plumber_site.build_base_case( + # cesmroot, output_root, res, compset, user_mods_dirs, overwrite, setup_only + # ) + # logger.info("-----------------------------------") + # logger.info("Running CTSM for plumber site : %s", plumber_site.name) + # plumber_site.run_case( + # base_case_root, + # run_type, + # prism, + # run_length, + # user_version, + # overwrite, + # setup_only, + # no_batch, + # rerun, + # experiment, + # ) diff --git a/python/ctsm/site_and_regional/neon_arg_parse.py b/python/ctsm/site_and_regional/tower_arg_parse.py similarity index 94% rename from python/ctsm/site_and_regional/neon_arg_parse.py rename to python/ctsm/site_and_regional/tower_arg_parse.py index 99f184dd62..7c3d9a4201 100644 --- a/python/ctsm/site_and_regional/neon_arg_parse.py +++ b/python/ctsm/site_and_regional/tower_arg_parse.py @@ -1,5 +1,5 @@ """ -Argument parser to use throughout run_neon.py +Argument parser to use throughout run_tower.py """ import argparse @@ -18,7 +18,7 @@ from CIME.utils import setup_standard_logging_options -def get_parser(args, description, valid_neon_sites): +def get_parser(args, description, valid_neon_sites, valid_plumber_sites): """ Get parser object for this script. """ @@ -41,6 +41,17 @@ def get_parser(args, description, valid_neon_sites): nargs="+", ) + parser.add_argument( + "--plumber-sites", + help="plumber site code (eg, AR-SLu)", + action="store", + required=False, + choices=valid_plumber_sites + ["all"], + dest="plumber_sites", + default=["AR-SLu"], + nargs="+", + ) + parser.add_argument( "--base-case", help=""" diff --git a/python/ctsm/test/test_sys_run_neon.py b/python/ctsm/test/test_sys_run_tower.py similarity index 96% rename from python/ctsm/test/test_sys_run_neon.py rename to python/ctsm/test/test_sys_run_tower.py index f4c417ea51..21639596e2 100755 --- a/python/ctsm/test/test_sys_run_neon.py +++ b/python/ctsm/test/test_sys_run_tower.py @@ -12,7 +12,7 @@ import sys from ctsm import unit_testing -from ctsm.site_and_regional.run_neon import main +from python.ctsm.site_and_regional.run_tower import main from ctsm.path_utils import path_to_ctsm_root # Allow test names that pylint doesn't like; otherwise hard to make them diff --git a/python/ctsm/test/test_unit_run_neon.py b/python/ctsm/test/test_unit_run_tower.py similarity index 96% rename from python/ctsm/test/test_unit_run_neon.py rename to python/ctsm/test/test_unit_run_tower.py index a35608e249..a64e045f32 100755 --- a/python/ctsm/test/test_unit_run_neon.py +++ b/python/ctsm/test/test_unit_run_tower.py @@ -18,7 +18,7 @@ # pylint: disable=wrong-import-position from ctsm import unit_testing -from ctsm.site_and_regional.run_neon import check_neon_listing +from python.ctsm.site_and_regional.run_tower import check_neon_listing # pylint: disable=invalid-name diff --git a/python/ctsm/test/test_unit_neon_arg_parse.py b/python/ctsm/test/test_unit_tower_arg_parse.py similarity index 82% rename from python/ctsm/test/test_unit_neon_arg_parse.py rename to python/ctsm/test/test_unit_tower_arg_parse.py index 7bae337709..39c6ec99b0 100755 --- a/python/ctsm/test/test_unit_neon_arg_parse.py +++ b/python/ctsm/test/test_unit_tower_arg_parse.py @@ -19,7 +19,7 @@ # pylint: disable=wrong-import-position from ctsm import unit_testing -from ctsm.site_and_regional.neon_arg_parse import get_parser +from python.ctsm.site_and_regional.tower_arg_parse import get_parser from ctsm.path_utils import path_to_ctsm_root # pylint: disable=invalid-name @@ -61,12 +61,19 @@ def test_function(self): os.path.join(cesmroot, "cime_config", "usermods_dirs", "NEON", "[!d]*") ) valid_neon_sites = sorted([v.split("/")[-1] for v in valid_neon_sites]) - parsed_arguments = get_parser(sys.argv, description, valid_neon_sites) + + valid_plumber_sites = glob.glob( + os.path.join(cesmroot, "cime_config", "usermods_dirs", "PLUMBER", "[!Fd]*") + ) + valid_plumber_sites = sorted([v.split("/")[-1] for v in valid_plumber_sites]) + + parsed_arguments = get_parser(sys.argv, description, valid_neon_sites, valid_plumber_sites) self.assertEqual(parsed_arguments[0][0], "ABBY", "arguments not processed as expected") self.assertEqual(parsed_arguments[3], "test", "arguments not processed as expected") self.assertEqual(parsed_arguments[4], False, "arguments not processed as expected") self.assertEqual(parsed_arguments[2], "ad", "arguments not processed as expected") + #TODO: self.assertEqual(parsed_arguments[x], "SOME PLUMBER-VAL", "arguments not processed as expected") if __name__ == "__main__": diff --git a/tools/site_and_regional/run_neon b/tools/site_and_regional/run_tower similarity index 86% rename from tools/site_and_regional/run_neon rename to tools/site_and_regional/run_tower index ffc3be2af7..096c649302 100755 --- a/tools/site_and_regional/run_neon +++ b/tools/site_and_regional/run_tower @@ -1,12 +1,12 @@ #!/usr/bin/env python3 """ This is a just top-level skeleton script that calls -run_neon.py. -The original code (run_neon.py) is located under +run_tower.py. +The original code (run_tower.py) is located under python/ctsm/site_and_regional folder. For full instructions on how to run the code and different options, -please check python/ctsm/site_and_regional/run_neon.py file. +please check python/ctsm/site_and_regional/run_tower.py file. This script first creates and builds a generic base case. Next, it will clone the base_case for different neon sites and run @@ -25,7 +25,7 @@ This script will do the following: ---------------------------------------------------------------- To see all available options for running tower sites: - ./run_neon --help + ./run_tower --help ---------------------------------------------------------------- Instructions for running using conda python environments: ../../py_env_create @@ -42,7 +42,7 @@ _CTSM_PYTHON = os.path.join( sys.path.insert(1, _CTSM_PYTHON) # pylint: disable=import-error, wrong-import-position -from ctsm.site_and_regional.run_neon import main +from ctsm.site_and_regional.run_tower import main if __name__ == "__main__": main(__doc__) From 92f827e692540868e64b976fa4cb6d74855fe761 Mon Sep 17 00:00:00 2001 From: Teagan King Date: Fri, 8 Mar 2024 13:28:22 -0700 Subject: [PATCH 004/126] update cime config --- cime_config/buildnml | 2 +- cime_config/config_component.xml | 4 +- python/ctsm/site_and_regional/run_tower.py | 45 ++++++++++++++++--- python/ctsm/test/test_unit_tower_arg_parse.py | 4 +- 4 files changed, 46 insertions(+), 9 deletions(-) diff --git a/cime_config/buildnml b/cime_config/buildnml index 0521830616..5d6e75272c 100755 --- a/cime_config/buildnml +++ b/cime_config/buildnml @@ -138,7 +138,7 @@ def buildnml(case, caseroot, compname): clm_usrdat_name = case.get_value("CLM_USRDAT_NAME") clmusr = " -clm_usr_name %s " % clm_usrdat_name # Write warning about initial condition data - if "NEON" in clm_usrdat_name and clm_force_coldstart == "off": + if "NEON" in clm_usrdat_name or "PLUMBER" in clm_usrdat_name and clm_force_coldstart == "off": if ("_transient" in clm_nml_use_case) and ( re.fullmatch(r"\w\w\w\w\.transient", casename) is None or clm_usrdat_name is "NEON.PRISM" diff --git a/cime_config/config_component.xml b/cime_config/config_component.xml index 0f35da293a..db2217210a 100644 --- a/cime_config/config_component.xml +++ b/cime_config/config_component.xml @@ -248,7 +248,9 @@ to create_newcase. The default value is UNSET. For NEON cases, this can be set to either NEON or NEON.PRISM, the latter of which would use PRISM precipitation instead of the default NEON precipitation. NEON cases then also - use the variable NEONSITE to specify the exact site. + use the variable NEONSITE to specify the exact site. PLUMBER cases use the variable + PLUMBER2SITE to specify the exact site. + diff --git a/python/ctsm/site_and_regional/run_tower.py b/python/ctsm/site_and_regional/run_tower.py index 9e33bed462..44b9d532e6 100755 --- a/python/ctsm/site_and_regional/run_tower.py +++ b/python/ctsm/site_and_regional/run_tower.py @@ -81,8 +81,6 @@ def check_neon_listing(valid_neon_sites): listing_file = "listing.csv" url = "https://storage.neonscience.org/neon-ncar/listing.csv" - # TODO: will there be a parallel plumber listing? - download_file(url, listing_file) available_list = parse_neon_listing(listing_file, valid_neon_sites) return available_list @@ -164,12 +162,49 @@ def parse_neon_listing(listing_file, valid_neon_sites): finidat = line.split(",")[0].split("/")[-1] neon_site = NeonSite(site_name, start_year, end_year, start_month, end_month, finidat) - # TODO: Create parallel PlumberSite available_list logger.debug(neon_site) available_list.append(neon_site) return available_list +def parse_plumber_listing(valid_plumber_sites): + """ + A function to find plumber sites with the dates + where data is available. + + Returns: + available_list : + list of plumber_site objects that is found + """ + + available_list = [] + + for site_name in valid_plumber_sites: + + # -- figure out start_year and end_year from shell commands + # TODO: do we even need this though if the shell commands accomplish the same thing? + # start_year = tmp_df2[0].iloc[0] + # end_year = tmp_df2[0].iloc[-1] + + # -- figure out start_month and end_month + # start_month = tmp_df2[1].iloc[0] + # end_month = tmp_df2[1].iloc[-1] + + logger.debug("Valid plumber site %s found!", site_name) + # logger.debug("File version %s", latest_version) + # logger.debug("start_year=%s", start_year) + # logger.debug("end_year=%s", end_year) + # logger.debug("start_month=%s", start_month) + # logger.debug("end_month=%s", end_month) + # finidat = None + # for line in finidatlist["object"]: + # if site_name in line: + # finidat = line.split(",")[0].split("/")[-1] + + # plumber_site = Plumber2Site(site_name, start_year, end_year, start_month, end_month, finidat) + #available_list.append(plumber_site) + + return available_list def main(description): """ @@ -189,7 +224,8 @@ def main(description): ) valid_plumber_sites = sorted([v.split("/")[-1] for v in valid_plumber_sites]) - # TODO: or change to be one or other based on tower_type + # TODO: change to use neon or plumber valid sites based on tower_type? + # if left as is, could run both neon and plumber from one command ( site_list, @@ -206,7 +242,6 @@ def main(description): rerun, user_version, ) = get_parser(sys.argv, description, valid_neon_sites, valid_plumber_sites) - # TODO: make sure get_parser can handle valid_tower_sites # TODO: add in a tower_type argument (that has default NEON?) and # include if statements to differentiate PLUMBER vs NEON diff --git a/python/ctsm/test/test_unit_tower_arg_parse.py b/python/ctsm/test/test_unit_tower_arg_parse.py index 39c6ec99b0..36408d7ec7 100755 --- a/python/ctsm/test/test_unit_tower_arg_parse.py +++ b/python/ctsm/test/test_unit_tower_arg_parse.py @@ -63,7 +63,7 @@ def test_function(self): valid_neon_sites = sorted([v.split("/")[-1] for v in valid_neon_sites]) valid_plumber_sites = glob.glob( - os.path.join(cesmroot, "cime_config", "usermods_dirs", "PLUMBER", "[!Fd]*") + os.path.join(cesmroot, "cime_config", "usermods_dirs", "PLUMBER", "[!Fd]*") ) valid_plumber_sites = sorted([v.split("/")[-1] for v in valid_plumber_sites]) @@ -73,7 +73,7 @@ def test_function(self): self.assertEqual(parsed_arguments[3], "test", "arguments not processed as expected") self.assertEqual(parsed_arguments[4], False, "arguments not processed as expected") self.assertEqual(parsed_arguments[2], "ad", "arguments not processed as expected") - #TODO: self.assertEqual(parsed_arguments[x], "SOME PLUMBER-VAL", "arguments not processed as expected") + # TODO: self.assertEqual(parsed_arguments[x], "SOME PLUMBER-VAL", "arguments not processed as expected") if __name__ == "__main__": From bddb3432e4fccc9b6245ef65c895e8b84bbaaae1 Mon Sep 17 00:00:00 2001 From: Teagan King Date: Fri, 8 Mar 2024 13:33:59 -0700 Subject: [PATCH 005/126] reformat --- python/ctsm/site_and_regional/run_tower.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/python/ctsm/site_and_regional/run_tower.py b/python/ctsm/site_and_regional/run_tower.py index 44b9d532e6..44a40433b0 100755 --- a/python/ctsm/site_and_regional/run_tower.py +++ b/python/ctsm/site_and_regional/run_tower.py @@ -167,6 +167,7 @@ def parse_neon_listing(listing_file, valid_neon_sites): return available_list + def parse_plumber_listing(valid_plumber_sites): """ A function to find plumber sites with the dates @@ -202,10 +203,11 @@ def parse_plumber_listing(valid_plumber_sites): # finidat = line.split(",")[0].split("/")[-1] # plumber_site = Plumber2Site(site_name, start_year, end_year, start_month, end_month, finidat) - #available_list.append(plumber_site) + # available_list.append(plumber_site) return available_list + def main(description): """ Determine valid tower sites. Make an output directory if it does not exist. From af1237e219501a96143e2cb1ee785f6850feed0c Mon Sep 17 00:00:00 2001 From: Teagan King Date: Mon, 25 Mar 2024 16:26:21 -0600 Subject: [PATCH 006/126] remove run_length since overwritten by usermods dirs --- python/ctsm/site_and_regional/neon_site.py | 5 +---- python/ctsm/site_and_regional/run_tower.py | 3 --- python/ctsm/site_and_regional/tower_arg_parse.py | 16 ---------------- python/ctsm/site_and_regional/tower_site.py | 5 ----- 4 files changed, 1 insertion(+), 28 deletions(-) diff --git a/python/ctsm/site_and_regional/neon_site.py b/python/ctsm/site_and_regional/neon_site.py index 75dca35691..50f26b086d 100755 --- a/python/ctsm/site_and_regional/neon_site.py +++ b/python/ctsm/site_and_regional/neon_site.py @@ -61,7 +61,6 @@ def run_case( base_case_root, run_type, prism, - run_length, user_version, tower_type=None, user_mods_dirs=None, @@ -82,8 +81,6 @@ def run_case( transient, post_ad, or ad case, default transient prism: bool, opt if True, use PRISM precipitation, default False - run_length: str, opt - length of run, default '4Y' user_version: str, opt default 'latest' overwrite: bool, opt @@ -102,7 +99,7 @@ def run_case( ] tower_type = "NEON" super().run_case( - base_case_root, run_type, prism, run_length, user_version, tower_type, user_mods_dirs + base_case_root, run_type, prism, user_version, tower_type, user_mods_dirs ) def set_ref_case(self, case): diff --git a/python/ctsm/site_and_regional/run_tower.py b/python/ctsm/site_and_regional/run_tower.py index 44a40433b0..b5b2d4ad18 100755 --- a/python/ctsm/site_and_regional/run_tower.py +++ b/python/ctsm/site_and_regional/run_tower.py @@ -236,7 +236,6 @@ def main(description): experiment, prism, overwrite, - run_length, base_case_root, run_from_postad, setup_only, @@ -282,7 +281,6 @@ def main(description): base_case_root, run_type, prism, - run_length, user_version, overwrite, setup_only, @@ -308,7 +306,6 @@ def main(description): # base_case_root, # run_type, # prism, - # run_length, # user_version, # overwrite, # setup_only, diff --git a/python/ctsm/site_and_regional/tower_arg_parse.py b/python/ctsm/site_and_regional/tower_arg_parse.py index 7c3d9a4201..c2e46ec27f 100644 --- a/python/ctsm/site_and_regional/tower_arg_parse.py +++ b/python/ctsm/site_and_regional/tower_arg_parse.py @@ -207,21 +207,6 @@ def get_parser(args, description, valid_neon_sites, valid_plumber_sites): if "CIME_OUTPUT_ROOT" in args.output_root: args.output_root = None - if args.run_length == "0Y": - if args.run_type == "ad": - run_length = "100Y" - elif args.run_type == "postad": - run_length = "100Y" - else: - # The transient run length is set by cdeps atm buildnml to - # the last date of the available tower data - # this value is not used - run_length = "4Y" - else: - run_length = args.run_length - - run_length = parse_isoduration(run_length) - base_case_root = None if args.base_case_root: base_case_root = os.path.abspath(args.base_case_root) @@ -241,7 +226,6 @@ def get_parser(args, description, valid_neon_sites, valid_plumber_sites): args.experiment, args.prism, args.overwrite, - run_length, base_case_root, args.run_from_postad, args.setup_only, diff --git a/python/ctsm/site_and_regional/tower_site.py b/python/ctsm/site_and_regional/tower_site.py index af3a04e93e..483ab1218d 100644 --- a/python/ctsm/site_and_regional/tower_site.py +++ b/python/ctsm/site_and_regional/tower_site.py @@ -248,7 +248,6 @@ def run_case( base_case_root, run_type, prism, - run_length, user_version, tower_type, user_mods_dirs, @@ -269,8 +268,6 @@ def run_case( transient, post_ad, or ad case, default transient prism: bool, opt if True, use PRISM precipitation, default False - run_length: str, opt - length of run, default '4Y' user_version: str, opt default 'latest' overwrite: bool, opt @@ -375,7 +372,6 @@ def run_case( case.set_value("RUN_REFDATE", "0018-01-01") case.set_value("RUN_STARTDATE", "0018-01-01") case.set_value("RESUBMIT", 1) - case.set_value("STOP_N", run_length) else: case.set_value("CLM_FORCE_COLDSTART", "off") @@ -384,7 +380,6 @@ def run_case( if run_type == "postad": self.set_ref_case(case) - case.set_value("STOP_N", run_length) # For transient cases STOP will be set in the user_mod_directory if run_type == "transient": From 0d1d3dc192f29b4b9b476b21ba9449e66fdcf57d Mon Sep 17 00:00:00 2001 From: Teagan King Date: Mon, 25 Mar 2024 16:30:26 -0600 Subject: [PATCH 007/126] remove run_length from plumber_site and update args --- python/ctsm/site_and_regional/neon_site.py | 4 +--- python/ctsm/site_and_regional/plumber_site.py | 15 +++++++++++---- 2 files changed, 12 insertions(+), 7 deletions(-) diff --git a/python/ctsm/site_and_regional/neon_site.py b/python/ctsm/site_and_regional/neon_site.py index 50f26b086d..8fc1680ac4 100755 --- a/python/ctsm/site_and_regional/neon_site.py +++ b/python/ctsm/site_and_regional/neon_site.py @@ -98,9 +98,7 @@ def run_case( os.path.join(self.cesmroot, "cime_config", "usermods_dirs", "NEON", self.name) ] tower_type = "NEON" - super().run_case( - base_case_root, run_type, prism, user_version, tower_type, user_mods_dirs - ) + super().run_case(base_case_root, run_type, prism, user_version, tower_type, user_mods_dirs) def set_ref_case(self, case): super().set_ref_case(case) diff --git a/python/ctsm/site_and_regional/plumber_site.py b/python/ctsm/site_and_regional/plumber_site.py index 57475c07cd..e3597380a8 100755 --- a/python/ctsm/site_and_regional/plumber_site.py +++ b/python/ctsm/site_and_regional/plumber_site.py @@ -61,7 +61,6 @@ def run_case( base_case_root, run_type, prism, - run_length, user_version, tower_type=None, user_mods_dirs=None, @@ -82,8 +81,6 @@ def run_case( transient, post_ad, or ad case, default transient prism: bool, opt # TODO: remove? if True, use PRISM precipitation, default False - run_length: str, opt - length of run, default '4Y' user_version: str, opt # TODO: is there an equivalent for PLUMBER? default 'latest' overwrite: bool, opt @@ -102,7 +99,17 @@ def run_case( ] tower_type = "PLUMBER" super().run_case( - base_case_root, run_type, prism, run_length, user_version, tower_type, user_mods_dirs + base_case_root, + run_type, + prism, + user_version, + tower_type, + user_mods_dirs, + overwrite, + setup_only, + no_batch, + rerun, + experiment, ) def set_ref_case(self, case): From 5889ec22dfc08a835c151a344bfdea3921cd1693 Mon Sep 17 00:00:00 2001 From: Teagan King Date: Tue, 26 Mar 2024 12:39:05 -0600 Subject: [PATCH 008/126] update tests; include cases with experiment label in gitignore --- .gitignore | 3 +++ python/ctsm/test/test_sys_run_tower.py | 24 +++++++++++++++++++++++- 2 files changed, 26 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index ec2a3b17f6..a1b4a056a4 100644 --- a/.gitignore +++ b/.gitignore @@ -109,8 +109,11 @@ unit_test_build /tools/site_and_regional/listing.csv /tools/site_and_regional/????/ /tools/site_and_regional/????.ad/ +/tools/site_and_regional/????.*.ad/ /tools/site_and_regional/????.postad/ +/tools/site_and_regional/????.*.postad/ /tools/site_and_regional/????.transient/ +/tools/site_and_regional/????.*.transient/ /tools/site_and_regional/archive/ # build output diff --git a/python/ctsm/test/test_sys_run_tower.py b/python/ctsm/test/test_sys_run_tower.py index 21639596e2..ea92370170 100755 --- a/python/ctsm/test/test_sys_run_tower.py +++ b/python/ctsm/test/test_sys_run_tower.py @@ -41,7 +41,7 @@ def tearDown(self): def test_one_site(self): """ - This test specifies a site to run + This test specifies a site to run a default case with experiment label 'TEST' Run the tool, check that file structure is set up correctly """ @@ -52,6 +52,7 @@ def test_one_site(self): "BART", "--setup-only", "--output-root", + "--experiment TEST", self._tempdir, ] main("") @@ -59,6 +60,27 @@ def test_one_site(self): # assert that BART directories were created during setup self.assertTrue("BART" in glob.glob(self._tempdir + "/BART*")[0]) + def test_ad_site(self): + """ + This test specifies a site to run an 'ad' case for + Run the tool, check that file structure is set up correctly + """ + + # run the run_neon tool + sys.argv = [ + os.path.join(path_to_ctsm_root(), "tools", "site_and_regional", "run_neon"), + "--neon-sites", + "ABBY", + "--setup-only", + "--output-root", + "--run-type ad", + self._tempdir, + ] + main("") + + # assert that ABBY directories were created during setup + self.assertTrue("ABBY" in glob.glob(self._tempdir + "/ABBY*")[0]) + # TODO: Would also be useful to test the following items: # It might be good to ensure the log files are working as expected? # Test running transient, ad and post ad cases. From d863c13be015ec1624bf74b8eb1b4a993f6cfb49 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Thu, 25 Apr 2024 17:51:28 -0600 Subject: [PATCH 009/126] Add 1979 and 1979-2026 ne0np4 files to namelist defaults --- bld/namelist_files/namelist_defaults_ctsm.xml | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/bld/namelist_files/namelist_defaults_ctsm.xml b/bld/namelist_files/namelist_defaults_ctsm.xml index ecced7a6e2..e4f4c331d9 100644 --- a/bld/namelist_files/namelist_defaults_ctsm.xml +++ b/bld/namelist_files/namelist_defaults_ctsm.xml @@ -1428,6 +1428,14 @@ lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_1x1_mexicocityMEX_hist_2000_78pfts_c24 lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_1x1_urbanc_alpha_hist_2000_78pfts_c240221.nc + + +lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_ne0np4.ARCTICGRIS.ne30x8_SSP2-4.5_1979_78pfts_c240425.nc + +lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_ne0np4.ARCTIC.ne30x4_SSP2-4.5_1979_78pfts_c240425.nc + +lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_ne0np4CONUS.ne30x8_SSP2-4.5_1979_78pfts_c240425.nc + @@ -1498,6 +1506,7 @@ lnd/clm2/surfdata_esmf/NEON/surfdata_1x1_NEON_TOOL_hist_78pfts_CMIP6_simyr2000_c >lnd/clm2/surfdata_esmf/NEON/landuse.timeseries_NEON_${NEONSITE}_hist_78pfts_simyr2018-2023_c230931.nc --> + lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_0.9x1.25_SSP2-4.5_1850-2100_78pfts_c240216.nc lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_C96_SSP2-4.5_1850-2100_78pfts_c240216.nc +lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_ne0np4.ARCTICGRIS.ne30x8_SSP2-4.5_1979-2026_78pfts_c240425.nc +lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_ne0np4.ARCTIC.ne30x4_SSP2-4.5_1979-2026_78pfts_c240425.nc +lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_ne0np4CONUS.ne30x8_SSP2-4.5_1979-2026_78pfts_c240425.nc + lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_0.9x1.25_SSP3-7.0_1850-2100_78pfts_c240216.nc From c4debee33f97dc0ac503683e7cbc956c2380c377 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Fri, 26 Apr 2024 14:34:10 -0600 Subject: [PATCH 010/126] Replace 3 vr-grid test compsets with ISSP245Clm50BgcCrop compset --- cime_config/testdefs/testlist_clm.xml | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/cime_config/testdefs/testlist_clm.xml b/cime_config/testdefs/testlist_clm.xml index 05526b3529..918d86c213 100644 --- a/cime_config/testdefs/testlist_clm.xml +++ b/cime_config/testdefs/testlist_clm.xml @@ -2034,17 +2034,16 @@ - + - + - + @@ -2059,10 +2058,10 @@ - + - + @@ -2141,12 +2140,11 @@ - - + From 73ac9d72a9a478ed839238d2e73c5fae45f0a24f Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Fri, 26 Apr 2024 14:37:07 -0600 Subject: [PATCH 011/126] Remove sim_year_range from new flanduse_timeseries for xml file to work --- bld/namelist_files/namelist_defaults_ctsm.xml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/bld/namelist_files/namelist_defaults_ctsm.xml b/bld/namelist_files/namelist_defaults_ctsm.xml index e4f4c331d9..ac4f7252d2 100644 --- a/bld/namelist_files/namelist_defaults_ctsm.xml +++ b/bld/namelist_files/namelist_defaults_ctsm.xml @@ -1580,11 +1580,11 @@ lnd/clm2/surfdata_esmf/NEON/surfdata_1x1_NEON_TOOL_hist_78pfts_CMIP6_simyr2000_c lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_C96_SSP2-4.5_1850-2100_78pfts_c240216.nc -lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_ne0np4.ARCTICGRIS.ne30x8_SSP2-4.5_1979-2026_78pfts_c240425.nc -lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_ne0np4.ARCTIC.ne30x4_SSP2-4.5_1979-2026_78pfts_c240425.nc -lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_ne0np4CONUS.ne30x8_SSP2-4.5_1979-2026_78pfts_c240425.nc From e95f9eaa9bcb4c85d57335fd9e4ae57d5e1ad827 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Fri, 26 Apr 2024 17:08:35 -0600 Subject: [PATCH 012/126] Add to Makefile for automatic generation 1979-2026 fsurdat/landuse files --- .../ctsm/toolchain/gen_mksurfdata_jobscript_multi.py | 10 ++++++++++ tools/mksurfdata_esmf/Makefile | 5 +++++ 2 files changed, 15 insertions(+) diff --git a/python/ctsm/toolchain/gen_mksurfdata_jobscript_multi.py b/python/ctsm/toolchain/gen_mksurfdata_jobscript_multi.py index 6deb50ebfb..6c38efdd0d 100755 --- a/python/ctsm/toolchain/gen_mksurfdata_jobscript_multi.py +++ b/python/ctsm/toolchain/gen_mksurfdata_jobscript_multi.py @@ -48,6 +48,7 @@ "crop-global-SSP2-4.5-f19", "crop-global-SSP2-4.5-f10", "crop-global-SSP2-4.5-f45", + "crop-global-SSP2-4.5-ne0np4", "crop-global-SSP2-4.5-ne3", "crop-global-SSP2-4.5-ne16", "crop-global-SSP2-4.5-ne30", @@ -204,6 +205,11 @@ def main(): "ne3": ["ne3np4.pg3"], "ne16": ["ne16np4.pg3"], "ne30": ["ne30np4.pg3", "ne30np4.pg2", "ne30np4"], + "ne0np4": [ + "ne0np4.ARCTICGRIS.ne30x8", + "ne0np4.ARCTIC.ne30x4", + "ne0np4CONUS.ne30x8", + ], "ne120": [ "ne0np4.ARCTICGRIS.ne30x8", "ne0np4.ARCTIC.ne30x4", @@ -333,6 +339,10 @@ def main(): "--start-year 1850 --end-year 2100 --nosurfdata --ssp-rcp SSP2-4.5 --res", "f45", ), + "crop-global-SSP2-4.5-ne0np4": ( + "--start-year 1979 --end-year 2026 --ssp-rcp SSP2-4.5 --res", + "ne0np4", + ), "crop-global-SSP2-4.5-ne3": ( "--start-year 1850 --end-year 2100 --nosurfdata --ssp-rcp SSP2-4.5 --res", "ne3", diff --git a/tools/mksurfdata_esmf/Makefile b/tools/mksurfdata_esmf/Makefile index fc81d48079..00e90efa45 100644 --- a/tools/mksurfdata_esmf/Makefile +++ b/tools/mksurfdata_esmf/Makefile @@ -275,6 +275,7 @@ crop-global-SSP2-4.5 : crop-global-SSP2-4.5-f09 \ crop-global-SSP2-4.5-hcru \ crop-global-SSP2-4.5-ne16 \ crop-global-SSP2-4.5-ne30 \ + crop-global-SSP2-4.5-ne0np4 \ crop-global-SSP2-4.5-C96 \ crop-global-SSP2-4.5-mpasa120 @@ -318,6 +319,10 @@ crop-global-SSP2-4.5-ne30 : FORCE $(MKSURFDATA) --number-of-nodes 9 --scenario $@ --jobscript-file $@.sh --walltime 12:00:00 $(BATCHJOBS) $@.sh +crop-global-SSP2-4.5-ne0np4 : FORCE + $(MKSURFDATA) --number-of-nodes 9 --scenario $@ --jobscript-file $@.sh --walltime 12:00:00 + $(BATCHJOBS) $@.sh + crop-global-SSP2-4.5-C96 : FORCE $(MKSURFDATA) --number-of-nodes 9 --scenario $@ --jobscript-file $@.sh --walltime 12:00:00 $(BATCHJOBS) $@.sh From ee5efecb0630adae98b5839bb206edf14fe01144 Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Mon, 29 Apr 2024 17:12:41 -0600 Subject: [PATCH 013/126] Fix the clm6_0 issues identified in #2492 --- bld/namelist_files/namelist_defaults_ctsm.xml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bld/namelist_files/namelist_defaults_ctsm.xml b/bld/namelist_files/namelist_defaults_ctsm.xml index 147a23f49a..03b5afd1ef 100644 --- a/bld/namelist_files/namelist_defaults_ctsm.xml +++ b/bld/namelist_files/namelist_defaults_ctsm.xml @@ -116,9 +116,9 @@ attributes from the config_cache.xml file (with keys converted to upper-case). lnd/clm2/isotopes/atm_delta_C14_CMIP6_SSP5B_3x1_global_1850-2100_yearly_c181209.nc -.false. .true. .false. +.false. .false. @@ -430,6 +430,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case). Jordan1991 Sturm1997 +Sturm1997 - -lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_ne0np4.ARCTICGRIS.ne30x8_SSP2-4.5_1979_78pfts_c240425.nc - -lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_ne0np4.ARCTIC.ne30x4_SSP2-4.5_1979_78pfts_c240425.nc - -lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_ne0np4CONUS.ne30x8_SSP2-4.5_1979_78pfts_c240425.nc - @@ -1477,11 +1469,11 @@ lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_ne16np4.pg3_hist_1850_78pfts_c240216.n lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_ne120np4.pg3_hist_1850_78pfts_c240216.nc -lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_ne0np4.ARCTICGRIS.ne30x8_hist_1850_78pfts_c240216.nc +lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_ne0np4.ARCTICGRIS.ne30x8_hist_1979_78pfts_c240425.nc -lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_ne0np4.ARCTIC.ne30x4_hist_1850_78pfts_c240216.nc +lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_ne0np4.ARCTIC.ne30x4_hist_1979_78pfts_c240425.nc -lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_ne0np4CONUS.ne30x8_hist_1850_78pfts_c240216.nc +lnd/clm2/surfdata_esmf/ctsm5.2.0/surfdata_ne0np4CONUS.ne30x8_hist_1979_78pfts_c240425.nc Date: Tue, 30 Apr 2024 17:22:15 -0600 Subject: [PATCH 017/126] Draft modifications to address new compset and new test for 1979-2026 --- bld/namelist_files/namelist_defaults_ctsm.xml | 6 +++--- cime_config/config_component.xml | 1 + 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/bld/namelist_files/namelist_defaults_ctsm.xml b/bld/namelist_files/namelist_defaults_ctsm.xml index 6b98dc1482..2f3a7efd72 100644 --- a/bld/namelist_files/namelist_defaults_ctsm.xml +++ b/bld/namelist_files/namelist_defaults_ctsm.xml @@ -1572,11 +1572,11 @@ lnd/clm2/surfdata_esmf/NEON/surfdata_1x1_NEON_TOOL_hist_78pfts_CMIP6_simyr2000_c lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_C96_SSP2-4.5_1850-2100_78pfts_c240216.nc -lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_ne0np4.ARCTICGRIS.ne30x8_SSP2-4.5_1979-2026_78pfts_c240425.nc -lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_ne0np4.ARCTIC.ne30x4_SSP2-4.5_1979-2026_78pfts_c240425.nc -lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_ne0np4CONUS.ne30x8_SSP2-4.5_1979-2026_78pfts_c240425.nc diff --git a/cime_config/config_component.xml b/cime_config/config_component.xml index f7adab268f..984ba5ecdf 100644 --- a/cime_config/config_component.xml +++ b/cime_config/config_component.xml @@ -210,6 +210,7 @@ 1850-2100_SSP3-7.0_transient 1850-2100_SSP5-3.4_transient 1850-2100_SSP2-4.5_transient + 1850-2100_SSP2-4.5_transient 1850-2100_SSP1-1.9_transient 1850-2100_SSP4-3.4_transient 1850-2100_SSP4-6.0_transient From 75cdcd24a4c19ec66aa53fc895b17090c35f6a7a Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Tue, 30 Apr 2024 17:38:27 -0600 Subject: [PATCH 018/126] Improve a comment in namelist_defaults_ctsm.xml --- bld/namelist_files/namelist_defaults_ctsm.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bld/namelist_files/namelist_defaults_ctsm.xml b/bld/namelist_files/namelist_defaults_ctsm.xml index 2f3a7efd72..0f533a56ff 100644 --- a/bld/namelist_files/namelist_defaults_ctsm.xml +++ b/bld/namelist_files/namelist_defaults_ctsm.xml @@ -1498,7 +1498,7 @@ lnd/clm2/surfdata_esmf/NEON/surfdata_1x1_NEON_TOOL_hist_78pfts_CMIP6_simyr2000_c >lnd/clm2/surfdata_esmf/NEON/landuse.timeseries_NEON_${NEONSITE}_hist_78pfts_simyr2018-2023_c230931.nc --> - + lnd/clm2/surfdata_esmf/ctsm5.2.0/landuse.timeseries_0.9x1.25_SSP2-4.5_1850-2100_78pfts_c240216.nc Date: Tue, 30 Apr 2024 17:44:17 -0600 Subject: [PATCH 019/126] Request same #tasks in Makefile as when I generated the ne0np4 files ...which I generated manually one at a time --- tools/mksurfdata_esmf/Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/mksurfdata_esmf/Makefile b/tools/mksurfdata_esmf/Makefile index 00e90efa45..d8bacdc5dd 100644 --- a/tools/mksurfdata_esmf/Makefile +++ b/tools/mksurfdata_esmf/Makefile @@ -320,7 +320,7 @@ crop-global-SSP2-4.5-ne30 : FORCE $(BATCHJOBS) $@.sh crop-global-SSP2-4.5-ne0np4 : FORCE - $(MKSURFDATA) --number-of-nodes 9 --scenario $@ --jobscript-file $@.sh --walltime 12:00:00 + $(MKSURFDATA) --number-of-nodes 2 --scenario $@ --jobscript-file $@.sh --walltime 12:00:00 $(BATCHJOBS) $@.sh crop-global-SSP2-4.5-C96 : FORCE From 4c07dcc2a5079a0d30cca0c520632f24925ea789 Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Wed, 1 May 2024 18:50:43 -0600 Subject: [PATCH 020/126] Add basic script to compare testlist baseline lnd_in files --- bld/unit_testers/cmp_baseline_lnd_in_files.sh | 46 +++++++++++++++++++ 1 file changed, 46 insertions(+) create mode 100755 bld/unit_testers/cmp_baseline_lnd_in_files.sh diff --git a/bld/unit_testers/cmp_baseline_lnd_in_files.sh b/bld/unit_testers/cmp_baseline_lnd_in_files.sh new file mode 100755 index 0000000000..de32cddf76 --- /dev/null +++ b/bld/unit_testers/cmp_baseline_lnd_in_files.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +baseline=$1 +compare=$2 + +cwd=`pwd` +if [ -z "$1" ]; then + echo "Need to enter a baseline directory tag name" + exit 1 +fi +if [ -z "$2" ]; then + echo "Need to enter a comparison directory tag name" + exit 1 +fi + +BASELINE_ROOT=/glade/campaign/cgd/tss/ctsm_baselines +root=$BASELINE_ROOT/$baseline +if ! test -d "$root"; then + echo "Root directory of $root does NOT exist" + exit 1 +fi +comp_root=$BASELINE_ROOT/$compare +if ! test -d "$comp_root"; then + echo "Root comparison directory of $comp_root does NOT exist" + exit 1 +fi +cd $root +filepat="*" +dirnames=($filepat) +if [ "${filenames[*]}" = "$filepat" ]; then + echo "No directoctories exist in this directory" + exit 1 +fi +for dir in ${dirnames[*]} +do + echo $dir + base=$dir/CaseDocs/lnd_in + comp=$BASELINE_ROOT/$compare/$dir/CaseDocs/lnd_in + if ! test -f "$base"; then + echo "$base, does NOT exist, skipping" + elif ! test -f "$comp"; then + echo "$comp, does NOT exist, skipping" + else + $cwd/../../cime/CIME/Tools/compare_namelists $base $comp + fi +done From 5192bd1520f93a00f085d31b266e3913f722652e Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Wed, 1 May 2024 22:16:32 -0600 Subject: [PATCH 021/126] Fix for #2504 so tests of use-cases is done, and also done over all physics options --- bld/unit_testers/build-namelist_test.pl | 44 +++++++++++++++---------- 1 file changed, 26 insertions(+), 18 deletions(-) diff --git a/bld/unit_testers/build-namelist_test.pl b/bld/unit_testers/build-namelist_test.pl index 7b654337af..74dbcef3f4 100755 --- a/bld/unit_testers/build-namelist_test.pl +++ b/bld/unit_testers/build-namelist_test.pl @@ -163,10 +163,10 @@ sub cat_and_create_namelistinfile { # # Figure out number of tests that will run # -my $ntests = 2513; +my $ntests = 3665; if ( defined($opts{'compare'}) ) { - $ntests += 1545; + $ntests += 2313; } plan( tests=>$ntests ); @@ -1421,32 +1421,40 @@ sub cat_and_create_namelistinfile { } print "\n==================================================\n"; -print " Rest all use-cases \n"; +print " Test all use-cases over all physics options\n"; print "==================================================\n"; # Run over all use-cases... my $list = `$bldnml -use_case list 2>&1 | grep "use case"`; my @usecases; if ( $list =~ /build-namelist : use cases : (.+)$/ ) { - my @usecases = split( / /, $list ); + @usecases = split( / /, $1 ); } else { die "ERROR:: Trouble getting list of use-cases\n"; } -foreach my $usecase ( @usecases ) { - $options = "-use_case $usecase -envxml_dir ."; - &make_env_run(); - eval{ system( "$bldnml $options > $tempfile 2>&1 " ); }; - is( $@, '', "options: $options" ); - $cfiles->checkfilesexist( "$options", $mode ); - $cfiles->shownmldiff( "default", "standard" ); - if ( defined($opts{'compare'}) ) { - $cfiles->doNOTdodiffonfile( "$tempfile", "$options", $mode ); - $cfiles->comparefiles( "$options", $mode, $opts{'compare'} ); - } - if ( defined($opts{'generate'}) ) { - $cfiles->copyfiles( "$options", $mode ); +if ( $#usecases != 15 ) { + print "use-cases = @usecases\n"; + die "ERROR:: Number of use-cases isn't what's expected\n"; +} +foreach my $phys ( "clm4_5", "clm5_0", "clm5_1", "clm6_0" ) { + print "physics = $phys\n"; + foreach my $usecase ( @usecases ) { + print "usecase = $usecase\n"; + $options = "-use_case $usecase -envxml_dir ."; + &make_env_run(); + eval{ system( "$bldnml $options > $tempfile 2>&1 " ); }; + is( $@, '', "options: $options" ); + $cfiles->checkfilesexist( "$options", $mode ); + $cfiles->shownmldiff( "default", "standard" ); + if ( defined($opts{'compare'}) ) { + $cfiles->doNOTdodiffonfile( "$tempfile", "$options", $mode ); + $cfiles->comparefiles( "$options", $mode, $opts{'compare'} ); + } + if ( defined($opts{'generate'}) ) { + $cfiles->copyfiles( "$options", $mode ); + } + &cleanup(); } - &cleanup(); } print "\n==================================================\n"; From 480ae1b5ce1557f1848447bd8981690a25bbeaa1 Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Thu, 2 May 2024 00:53:17 -0600 Subject: [PATCH 022/126] Simplify and correct the use-cases, with this the difference between clm5_1 and clm6_0 physics options is only the params_file which is what's expected this finishes out issues in #2492 --- bld/namelist_files/use_cases/1850_control.xml | 45 +++------------ .../use_cases/1850_noanthro_control.xml | 33 ++--------- bld/namelist_files/use_cases/2000_control.xml | 38 +++---------- bld/namelist_files/use_cases/2010_control.xml | 45 +++------------ .../use_cases/20thC_transient.xml | 56 +++++-------------- bld/namelist_files/use_cases/stdurbpt_pd.xml | 8 +-- 6 files changed, 50 insertions(+), 175 deletions(-) diff --git a/bld/namelist_files/use_cases/1850_control.xml b/bld/namelist_files/use_cases/1850_control.xml index 94ee8c5d0d..6ea033629f 100644 --- a/bld/namelist_files/use_cases/1850_control.xml +++ b/bld/namelist_files/use_cases/1850_control.xml @@ -8,47 +8,18 @@ constant -.false. -.false. -.false. +.false. -1850 -1850 +1850 +1850 -1850 -1850 +1850 +1850 -1850 -1850 +1850 +1850 -1850 -1850 - -1850 -1850 - -1850 -1850 - -1850 -1850 - -1850 -1850 - -1850 -1850 - -lnd/clm2/ndepdata/fndep_clm_WACCM6_CMIP6piControl001_y21-50avg_1850monthly_0.95x1.25_c180802.nc - -lnd/clm2/ndepdata/fndep_clm_WACCM6_CMIP6piControl001_y21-50avg_1850monthly_0.95x1.25_c180802.nc - -lnd/clm2/ndepdata/fndep_clm_WACCM6_CMIP6piControl001_y21-50avg_1850monthly_0.95x1.25_c180802.nc - -cycle -cycle diff --git a/bld/namelist_files/use_cases/1850_noanthro_control.xml b/bld/namelist_files/use_cases/1850_noanthro_control.xml index 636164a729..d84903f43c 100644 --- a/bld/namelist_files/use_cases/1850_noanthro_control.xml +++ b/bld/namelist_files/use_cases/1850_noanthro_control.xml @@ -10,26 +10,11 @@ .false. -1850 -1850 +1850 +1850 -1850 -1850 - -1850 -1850 - -cycle -cycle - -1925 -1925 - -1925 -1925 - -1925 -1925 +1925 +1925 none nn -1850 -1850 - -1850 -1850 - -1850 -1850 +1850 +1850 NONE diff --git a/bld/namelist_files/use_cases/2000_control.xml b/bld/namelist_files/use_cases/2000_control.xml index f3c4980fc8..2fce7c5cce 100644 --- a/bld/namelist_files/use_cases/2000_control.xml +++ b/bld/namelist_files/use_cases/2000_control.xml @@ -8,37 +8,17 @@ constant -.true. -.false. -.true. -.false. -.false. +.true. +.false. +.false. -2000 -2000 +2000 +2000 -2000 -2000 +2000 +2000 -2000 -2000 - -2000 -2000 - -2000 -2000 - -2000 -2000 - -2000 -2000 - -2000 -2000 - -2000 -2000 +2000 +2000 diff --git a/bld/namelist_files/use_cases/2010_control.xml b/bld/namelist_files/use_cases/2010_control.xml index 9316ecfb7f..d4c2a9c9c0 100644 --- a/bld/namelist_files/use_cases/2010_control.xml +++ b/bld/namelist_files/use_cases/2010_control.xml @@ -8,44 +8,17 @@ constant -.true. -.true. -.false. -.true. -.false. -.false. +.true. +.true. +.false. -2010 -2010 +2010 +2010 -2010 -2010 +2010 +2010 -2010 -2010 - -2010 -2010 - -2010 -2010 - -2010 -2010 - -2010 -2010 - -2010 -2010 - -2010 -2010 - -2010 -2010 - -2010 -2010 +2010 +2010 diff --git a/bld/namelist_files/use_cases/20thC_transient.xml b/bld/namelist_files/use_cases/20thC_transient.xml index d6dd729b35..b88ca3d3b1 100644 --- a/bld/namelist_files/use_cases/20thC_transient.xml +++ b/bld/namelist_files/use_cases/20thC_transient.xml @@ -18,46 +18,20 @@ flanduse_timeseries -.true. -.false. -.true. -.false. -.false. - -1850 -2015 -1850 - -1850 -2015 -1850 - -1850 -2015 -1850 - -1850 -2016 -1850 - -1850 -2016 -1850 - -1850 -2016 -1850 - -1850 -2106 -1850 - -1850 -2106 -1850 - -1850 -2106 -1850 +.true. +.false. +.false. + +1850 +2015 +1850 + +1850 +2016 +1850 + +1850 +2106 +1850 diff --git a/bld/namelist_files/use_cases/stdurbpt_pd.xml b/bld/namelist_files/use_cases/stdurbpt_pd.xml index 65786f32ae..6f5e754ba0 100644 --- a/bld/namelist_files/use_cases/stdurbpt_pd.xml +++ b/bld/namelist_files/use_cases/stdurbpt_pd.xml @@ -18,10 +18,8 @@ 'OFF' -.true. -.false. -.true. -.false. -.false. +.true. +.false. +.false. From 9ba3c23372d197238f4931a66abd52615470779b Mon Sep 17 00:00:00 2001 From: Teagan King Date: Thu, 2 May 2024 10:10:05 -0600 Subject: [PATCH 023/126] revert changes in buildnml and config_component b/c in other PR --- cime_config/buildnml | 2 +- cime_config/config_component.xml | 26 +------------------------- 2 files changed, 2 insertions(+), 26 deletions(-) diff --git a/cime_config/buildnml b/cime_config/buildnml index 5d6e75272c..0521830616 100755 --- a/cime_config/buildnml +++ b/cime_config/buildnml @@ -138,7 +138,7 @@ def buildnml(case, caseroot, compname): clm_usrdat_name = case.get_value("CLM_USRDAT_NAME") clmusr = " -clm_usr_name %s " % clm_usrdat_name # Write warning about initial condition data - if "NEON" in clm_usrdat_name or "PLUMBER" in clm_usrdat_name and clm_force_coldstart == "off": + if "NEON" in clm_usrdat_name and clm_force_coldstart == "off": if ("_transient" in clm_nml_use_case) and ( re.fullmatch(r"\w\w\w\w\.transient", casename) is None or clm_usrdat_name is "NEON.PRISM" diff --git a/cime_config/config_component.xml b/cime_config/config_component.xml index db2217210a..1f8f435187 100644 --- a/cime_config/config_component.xml +++ b/cime_config/config_component.xml @@ -248,8 +248,7 @@ to create_newcase. The default value is UNSET. For NEON cases, this can be set to either NEON or NEON.PRISM, the latter of which would use PRISM precipitation instead of the default NEON precipitation. NEON cases then also - use the variable NEONSITE to specify the exact site. PLUMBER cases use the variable - PLUMBER2SITE to specify the exact site. + use the variable NEONSITE to specify the exact site. @@ -337,29 +336,6 @@ Version id of Neon data - - char - - - AR-SLu,AT-Neu,AU-ASM,AU-Cow,AU-Cpr,AU-Ctr,AU-Cum,AU-DaP,AU-DaS,AU-Dry,AU-Emr,AU-Gin,AU-GWW,AU-How,AU-Lit, - AU-Otw,AU-Rig,AU-Rob,AU-Sam,AU-Stp,AU-TTE,AU-Tum,AU-Whr,AU-Wrr,AU-Ync,BE-Bra,BE-Lon,BE-Vie,BR-Sa3,BW-Ma1,CA-NS1, - CA-NS2,CA-NS4,CA-NS5,CA-NS6,CA-NS7,CA-Qcu,CA-Qfo,CA-SF1,CA-SF2,CA-SF3,CH-Cha,CH-Dav,CH-Fru,CH-Oe1,CN-Cha,CN-Cng, - CN-Dan,CN-Din,CN-Du2,CN-HaM,CN-Qia,CZ-wet,DE-Bay,DE-Geb,DE-Gri,DE-Hai,DE-Kli,DE-Meh,DE-Obe,DE-Seh,DE-SfN,DE-Tha, - DE-Wet,DK-Fou,DK-Lva,DK-Ris,DK-Sor,DK-ZaH,ES-ES1,ES-ES2,ES-LgS,ES-LMa,ES-VDA,FI-Hyy,FI-Kaa,FI-Lom,FI-Sod,FR-Fon, - FR-Gri,FR-Hes,FR-LBr,FR-Lq1,FR-Lq2,FR-Pue,GF-Guy,HU-Bug,ID-Pag,IE-Ca1,IE-Dri,IT-Amp,IT-BCi,IT-CA1,IT-CA2,IT-CA3, - IT-Col,IT-Cpz,IT-Isp,IT-Lav,IT-LMa,IT-Mal,IT-MBo,IT-Noe,IT-Non,IT-PT1,IT-Ren,IT-Ro1,IT-Ro2,IT-SR2,IT-SRo,JP-SMF, - NL-Ca1,NL-Hor,NL-Loo,PL-wet,PT-Esp,PT-Mi1,PT-Mi2,RU-Che,RU-Fyo,RU-Zot,SD-Dem,SE-Deg,UK-Gri,UK-Ham,UK-PL3,US-AR1, - US-AR2,US-ARM,US-Aud,US-Bar,US-Bkg,US-Blo,US-Bo1,US-Cop,US-FPe,US-GLE,US-Goo,US-Ha1,US-Ho1,US-KS2,US-Los,US-Me2, - US-Me4,US-Me6,US-MMS,US-MOz,US-Myb,US-Ne1,US-Ne2,US-Ne3,US-NR1,US-PFa,US-Prr,US-SP1,US-SP2,US-SP3,US-SRG,US-SRM, - US-Syv,US-Ton,US-Tw4,US-Twt,US-UMB,US-Var,US-WCr,US-Whs,US-Wkg,ZA-Kru,ZM-Mon, - - - run_component_ctsm - env_run.xml - Name of site for NEON tower data - - - ========================================= CLM naming conventions From 3401a6875a2a8b9d02950926f794aa3fb3c0d2a2 Mon Sep 17 00:00:00 2001 From: Teagan King Date: Thu, 2 May 2024 10:11:48 -0600 Subject: [PATCH 024/126] fully revert config_component.xml --- cime_config/config_component.xml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/cime_config/config_component.xml b/cime_config/config_component.xml index 1f8f435187..16462c938c 100644 --- a/cime_config/config_component.xml +++ b/cime_config/config_component.xml @@ -248,8 +248,7 @@ to create_newcase. The default value is UNSET. For NEON cases, this can be set to either NEON or NEON.PRISM, the latter of which would use PRISM precipitation instead of the default NEON precipitation. NEON cases then also - use the variable NEONSITE to specify the exact site. - + use the variable NEONSITE to specify the exact site. From f80e3231db8cea0128277e2a3ff697d7d156b34f Mon Sep 17 00:00:00 2001 From: Teagan King Date: Thu, 2 May 2024 10:27:43 -0600 Subject: [PATCH 025/126] config_conmponent.xml undo --- cime_config/config_component.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cime_config/config_component.xml b/cime_config/config_component.xml index 16462c938c..281a94a0b2 100644 --- a/cime_config/config_component.xml +++ b/cime_config/config_component.xml @@ -248,7 +248,7 @@ to create_newcase. The default value is UNSET. For NEON cases, this can be set to either NEON or NEON.PRISM, the latter of which would use PRISM precipitation instead of the default NEON precipitation. NEON cases then also - use the variable NEONSITE to specify the exact site. + use the variable NEONSITE to specify the exact site. From 37a5c60bb838be21cc958e6fd0869e03e57e77e3 Mon Sep 17 00:00:00 2001 From: Sam Rabin Date: Thu, 2 May 2024 14:26:53 -0600 Subject: [PATCH 026/126] Update ChangeLog and ChangeSum. --- doc/ChangeLog | 81 +++++++++++++++++++++++++++++++++++++++++++++++++++ doc/ChangeSum | 1 + 2 files changed, 82 insertions(+) diff --git a/doc/ChangeLog b/doc/ChangeLog index e5c2466085..e0a5e45cb6 100644 --- a/doc/ChangeLog +++ b/doc/ChangeLog @@ -1,4 +1,85 @@ =============================================================== +Tag name: ctsm5.2.003 +Originator(s): samrabin (Sam Rabin, UCAR/TSS, samrabin@ucar.edu) +Date: Thu May 2 14:06:54 MDT 2024 +One-line Summary: Merge b4b-dev + +Purpose and description of changes +---------------------------------- + +Brings in 4 PRs from b4b-dev to master: +- Regional CTSM Simulations and Capability of Creating Mesh Files (ESCOMP/CTSM#1892; Negin Sobhani and Adrianna Foster) +- Add line about documentation in PR template (ESCOMP/CTSM#2488; Sam Rabin) +- CTSM5.2 2000 fsurdat T42 64x128 file (ESCOMP/CTSM#2495; Sam Levis) +- Move plumber2 scripts to python directory (ESCOMP/CTSM#2505; Teagan King) + + +Significant changes to scientifically-supported configurations +-------------------------------------------------------------- + +Does this tag change answers significantly for any of the following physics configurations? +(Details of any changes will be given in the "Answer changes" section below.) + + [Put an [X] in the box for any configuration with significant answer changes.] + +[ ] clm6_0 + +[ ] clm5_1 + +[ ] clm5_0 + +[ ] ctsm5_0-nwp + +[ ] clm4_5 + + +Bugs fixed +---------- + +List of CTSM issues fixed (include CTSM Issue # and description): +- Resolves ESCOMP/CTSM#1513: Need a process to subset ESMF mesh files from global ones for regional grids +- Resolves ESCOMP/CTSM#1773: High resolution regional simulations +- Resolves ESCOMP/CTSM#2187: Move new PLUMBER2 scripts to python directory to enable python testing +- Resolves ESCOMP/CTSM#2486: Temporarily add back a T42 dataset for CAM + + +Notes of particular relevance for users +--------------------------------------- + +Changes to documentation: +- Adds documentation for making mesh files + + +Notes of particular relevance for developers: +--------------------------------------------- + +Changes to tests or testing: +- Adds testing for mesh-making Python scripts +- Adds testing for plumber2_surf_wrapper + + +Testing summary: +---------------- + + regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing): + + derecho ----- OK + izumi ------- OK + + +Other details +------------- + +Pull Requests that document the changes (include PR ids): +- ESCOMP/CTSM#2513: Merge b4b-dev 2024-05-02 +- Constituent PRs: + - ESCOMP/CTSM#1892: Regional CTSM Simulations and Capability of Creating Mesh Files (https://github.com/ESCOMP/CTSM/pull/1892) + - ESCOMP/CTSM#2488: Add line about documentation in PR template (https://github.com/ESCOMP/CTSM/pull/2488) + - ESCOMP/CTSM#2495: CTSM5.2 2000 fsurdat T42 64x128 file (https://github.com/ESCOMP/CTSM/pull/2495) + - ESCOMP/CTSM#2505: Move plumber2 scripts to python directory (https://github.com/ESCOMP/CTSM/pull/2505) + +=============================================================== +=============================================================== Tag name: ctsm5.2.002 Originator(s): glemieux (Gregory Lemieux, LBNL, glemieux@lbl.gov) Date: Fri 26 Apr 2024 11:13:46 AM MDT diff --git a/doc/ChangeSum b/doc/ChangeSum index 40033a7d6f..b48e04889d 100644 --- a/doc/ChangeSum +++ b/doc/ChangeSum @@ -1,5 +1,6 @@ Tag Who Date Summary ============================================================================================================================ + ctsm5.2.003 samrabin 05/02/2024 Merge b4b-dev ctsm5.2.002 glemieux 04/26/2024 FATES default allometry parameter file update ctsm5.2.001 erik 04/22/2024 Merge b4b-dev ctsm5.2.0 many 04/20/2024 New mksurfdata_esmf tool to create new surface datasets that are in place From 39cd37dd5d1da5ea96b2327ecc10ac7f72d37383 Mon Sep 17 00:00:00 2001 From: Sam Rabin Date: Mon, 6 May 2024 13:05:29 -0600 Subject: [PATCH 027/126] Provide scalar cth and cbh in unit tests. Failing. --- python/ctsm/test/test_unit_singlept_data_surfdata.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/python/ctsm/test/test_unit_singlept_data_surfdata.py b/python/ctsm/test/test_unit_singlept_data_surfdata.py index a29fa05b33..2106799a4b 100755 --- a/python/ctsm/test/test_unit_singlept_data_surfdata.py +++ b/python/ctsm/test/test_unit_singlept_data_surfdata.py @@ -47,8 +47,8 @@ class TestSinglePointCaseSurfaceNoCrop(unittest.TestCase): evenly_split_cropland = False pct_pft = None num_pft = 16 - cth = [0.9, 0.9, 0.9, 0.9, 0.9, 0.9, 0.9, 0.9, 0.9, 0.9, 0.9, 0.9] - cbh = [0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1] + cth = 0.9 + cbh = 0.1 include_nonveg = False uni_snow = True cap_saturation = True @@ -668,8 +668,8 @@ class TestSinglePointCaseSurfaceCrop(unittest.TestCase): evenly_split_cropland = False pct_pft = None num_pft = 78 - cth = [0.9, 0.9, 0.9, 0.9, 0.9, 0.9, 0.9, 0.9, 0.9, 0.9, 0.9, 0.9] - cbh = [0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1] + cth = 0.9 + cbh = 0.1 include_nonveg = False uni_snow = False cap_saturation = False From 486d634d9e930987a61a3609eafaf6c35ffeb133 Mon Sep 17 00:00:00 2001 From: Sam Rabin Date: Mon, 6 May 2024 13:17:17 -0600 Subject: [PATCH 028/126] Ensure cth and cbh are iterable before zipping. Unit tests now pass. --- python/ctsm/site_and_regional/single_point_case.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index 86dabe609d..b197b29578 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -27,6 +27,14 @@ LAST_MONTH = 12 +def _ensure_iterable(thing_we_want_iterable, iterable_length): + try: + iter(thing_we_want_iterable) + return thing_we_want_iterable + except TypeError: + return [thing_we_want_iterable] * iterable_length + + class SinglePointCase(BaseCase): """ A class to encapsulate everything for single point cases. @@ -415,7 +423,10 @@ def modify_surfdata_atpoint(self, f_orig): # f_mod["PCT_CROP"][:, :] = 0 # -- loop over all dom_pft and pct_pft - zip_pfts = zip(self.dom_pft, self.pct_pft, self.cth, self.cbh) + iterable_length = len(self.dom_pft) + cth_to_zip = _ensure_iterable(self.cth, iterable_length) + cbh_to_zip = _ensure_iterable(self.cbh, iterable_length) + zip_pfts = zip(self.dom_pft, self.pct_pft, cth_to_zip, cbh_to_zip) for dom_pft, pct_pft, cth, cbh in zip_pfts: if cth is not None: f_mod["MONTHLY_HEIGHT_TOP"][:, :, :, dom_pft] = cth From af672fb3f0727bad7490c131499e9c151779c574 Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Mon, 6 May 2024 13:53:07 -0600 Subject: [PATCH 029/126] Fix comparison to a baseline directory --- bld/unit_testers/compare_namelists | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/bld/unit_testers/compare_namelists b/bld/unit_testers/compare_namelists index e0c6263555..7a8badc288 100755 --- a/bld/unit_testers/compare_namelists +++ b/bld/unit_testers/compare_namelists @@ -80,9 +80,10 @@ if [ "$PHYSB" = "all" ] && [ "$PHYSA" != "all" ]; then fi if [ "$verbose" = "YES" ]; then echo "Get list of lnd_in.* files"; fi +if [ "$verbose" = "YES" ]; then pwd; fi filepat="lnd_in.*" filenames=($filepat) -if [ "$verbose" = "YES" ]; then echo "filename list = $filenames"; fi +if [ "$verbose" = "YES" ]; then echo "filename list = ${filenames[*]}"; fi if [ "${filenames[*]}" = "$filepat" ]; then echo "No lnd_in files exist in the current directory -- run ./build-namelist_test.pl first" exit 1 @@ -92,9 +93,11 @@ do fileA="./$file" fileB="$baseline/$file" # If physicsA option used and the file matches the physics input - if [[ $fileA =~ "-phys+$PHYSA" ]]; then + if [[ "$fileA" =~ "-phys+$PHYSA" ]] || [ "$PHYSA" = "all" ]; then # Replace physicsB for fileB - fileB=${fileB/-phys+$PHYSA/-phys+$PHYSB} + if [ ! "$PHYSA" = "all" ]; then + fileB=${fileB/-phys+$PHYSA/-phys+$PHYSB} + fi if ! test -f $fileB; then if [ "$verbose" = "YES" ]; then echo "$fileB does NOT exist -- skipping"; fi else @@ -109,4 +112,4 @@ do fi fi done -if [ "$verbose" = "YES" ]; then echo "Successfully ran through lnd_in files to compare"; fi \ No newline at end of file +if [ "$verbose" = "YES" ]; then echo "Successfully ran through lnd_in files to compare"; fi From cfeb9d6301b4ef455762f83262da831fe5e2b129 Mon Sep 17 00:00:00 2001 From: Sam Rabin Date: Mon, 6 May 2024 14:00:33 -0600 Subject: [PATCH 030/126] Move ensure_iterable() to utils.py and add testing. One failure. --- .../site_and_regional/single_point_case.py | 14 ++------ python/ctsm/test/test_unit_utils.py | 34 ++++++++++++++++++- python/ctsm/utils.py | 11 ++++++ 3 files changed, 47 insertions(+), 12 deletions(-) diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index b197b29578..2c2aebad52 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -14,7 +14,7 @@ # -- import local classes for this script from ctsm.site_and_regional.base_case import BaseCase, USRDAT_DIR, DatmFiles -from ctsm.utils import add_tag_to_filename +from ctsm.utils import add_tag_to_filename, ensure_iterable logger = logging.getLogger(__name__) @@ -27,14 +27,6 @@ LAST_MONTH = 12 -def _ensure_iterable(thing_we_want_iterable, iterable_length): - try: - iter(thing_we_want_iterable) - return thing_we_want_iterable - except TypeError: - return [thing_we_want_iterable] * iterable_length - - class SinglePointCase(BaseCase): """ A class to encapsulate everything for single point cases. @@ -424,8 +416,8 @@ def modify_surfdata_atpoint(self, f_orig): # -- loop over all dom_pft and pct_pft iterable_length = len(self.dom_pft) - cth_to_zip = _ensure_iterable(self.cth, iterable_length) - cbh_to_zip = _ensure_iterable(self.cbh, iterable_length) + cth_to_zip = ensure_iterable(self.cth, iterable_length) + cbh_to_zip = ensure_iterable(self.cbh, iterable_length) zip_pfts = zip(self.dom_pft, self.pct_pft, cth_to_zip, cbh_to_zip) for dom_pft, pct_pft, cth, cbh in zip_pfts: if cth is not None: diff --git a/python/ctsm/test/test_unit_utils.py b/python/ctsm/test/test_unit_utils.py index 85ba2515dd..cc22401d1e 100755 --- a/python/ctsm/test/test_unit_utils.py +++ b/python/ctsm/test/test_unit_utils.py @@ -9,7 +9,7 @@ import os from ctsm import unit_testing -from ctsm.utils import fill_template_file +from ctsm.utils import fill_template_file, ensure_iterable from ctsm.config_utils import lon_range_0_to_360, _handle_config_value # Allow names that pylint doesn't like, because otherwise I find it hard @@ -328,6 +328,38 @@ def test_handleConfigValue_isListFalse(self): self.assertEqual(val_out, float(val_in)) +class TestUtilsEnsureIterable(unittest.TestCase): + """Tests of utils: ensure_iterable""" + + def test_ensure_iterable_number(self): + """ + Tests that ensure_iterable(NUMBER, 3) produces a list of 3 NUMBERs + """ + val = 724.1987 + self.assertEqual(ensure_iterable(val, 3), [val, val, val]) + + def test_ensure_iterable_none(self): + """ + Tests that ensure_iterable(None, 2) produces a list of 2 Nones + """ + val = None + self.assertEqual(ensure_iterable(val, 2), [val, val]) + + def test_ensure_iterable_already(self): + """ + Tests that ensure_iterable() returns the input if it's already iterable + """ + val = [11, 12] + self.assertEqual(ensure_iterable(val, 2), val) + + def test_ensure_iterable_error_wrong_length(self): + """ + Tests that ensure_iterable() errors if input is iterable but of the wrong length + """ + with self.assertRaisesRegex(SystemExit, "Input is iterable but wrong length"): + ensure_iterable([11, 12], 3) + + if __name__ == "__main__": unit_testing.setup_for_tests() unittest.main() diff --git a/python/ctsm/utils.py b/python/ctsm/utils.py index 8578ea860c..f7d4e163c6 100644 --- a/python/ctsm/utils.py +++ b/python/ctsm/utils.py @@ -26,6 +26,17 @@ def abort(errmsg): sys.exit("ERROR: {}".format(errmsg)) +def ensure_iterable(thing_we_want_iterable, iterable_length): + """ + Ensure that a variable is iterable + """ + try: + iter(thing_we_want_iterable) + return thing_we_want_iterable + except TypeError: + return [thing_we_want_iterable] * iterable_length + + def fill_template_file(path_to_template, path_to_final, substitutions): """Given a template file (based on python's template strings), write a copy of the file with template values filled in. From a696d9551f619d58e307aa09b95239e6e2a6874b Mon Sep 17 00:00:00 2001 From: Sam Rabin Date: Mon, 6 May 2024 14:05:52 -0600 Subject: [PATCH 031/126] ensure_iterable() now errors if already iterable but wrong length. Tests pass. --- python/ctsm/test/test_unit_utils.py | 2 +- python/ctsm/utils.py | 11 +++++++++-- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/python/ctsm/test/test_unit_utils.py b/python/ctsm/test/test_unit_utils.py index cc22401d1e..aed43cfede 100755 --- a/python/ctsm/test/test_unit_utils.py +++ b/python/ctsm/test/test_unit_utils.py @@ -356,7 +356,7 @@ def test_ensure_iterable_error_wrong_length(self): """ Tests that ensure_iterable() errors if input is iterable but of the wrong length """ - with self.assertRaisesRegex(SystemExit, "Input is iterable but wrong length"): + with self.assertRaisesRegex(ValueError, "Input is iterable but wrong length"): ensure_iterable([11, 12], 3) diff --git a/python/ctsm/utils.py b/python/ctsm/utils.py index f7d4e163c6..3f4149c163 100644 --- a/python/ctsm/utils.py +++ b/python/ctsm/utils.py @@ -30,11 +30,18 @@ def ensure_iterable(thing_we_want_iterable, iterable_length): """ Ensure that a variable is iterable """ + already_iterable = True try: iter(thing_we_want_iterable) - return thing_we_want_iterable except TypeError: - return [thing_we_want_iterable] * iterable_length + already_iterable = False + + if not already_iterable: + thing_we_want_iterable = [thing_we_want_iterable] * iterable_length + elif len(thing_we_want_iterable) != iterable_length: + raise ValueError(f"Input is iterable but wrong length") + + return thing_we_want_iterable def fill_template_file(path_to_template, path_to_final, substitutions): From 475831f0fb0e31e97f630eac4e078c886558b61c Mon Sep 17 00:00:00 2001 From: Sam Rabin Date: Mon, 6 May 2024 14:08:50 -0600 Subject: [PATCH 032/126] Clean up ctsm/utils.py with black and pylint. --- python/ctsm/utils.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/python/ctsm/utils.py b/python/ctsm/utils.py index 3f4149c163..a5a02a5c9d 100644 --- a/python/ctsm/utils.py +++ b/python/ctsm/utils.py @@ -35,12 +35,12 @@ def ensure_iterable(thing_we_want_iterable, iterable_length): iter(thing_we_want_iterable) except TypeError: already_iterable = False - + if not already_iterable: thing_we_want_iterable = [thing_we_want_iterable] * iterable_length elif len(thing_we_want_iterable) != iterable_length: - raise ValueError(f"Input is iterable but wrong length") - + raise ValueError("Input is iterable but wrong length") + return thing_we_want_iterable From 0819983f37aadf9f7c9dde290dd7f65b4870b3f6 Mon Sep 17 00:00:00 2001 From: Sam Rabin Date: Mon, 6 May 2024 14:09:27 -0600 Subject: [PATCH 033/126] Add previous commit to .git-blame-ignore-revs. --- .git-blame-ignore-revs | 1 + 1 file changed, 1 insertion(+) diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index f86a330408..e769d3187c 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -25,6 +25,7 @@ a9d96219902cf609636886c7073a84407f450d9a d866510188d26d51bcd6d37239283db690af7e82 0dcd0a3c1abcaffe5529f8d79a6bc34734b195c7 e096358c832ab292ddfd22dd5878826c7c788968 +475831f0fb0e31e97f630eac4e078c886558b61c # Ran SystemTests and python/ctsm through black python formatter 5364ad66eaceb55dde2d3d598fe4ce37ac83a93c 8056ae649c1b37f5e10aaaac79005d6e3a8b2380 From eec4e79bb867aa671ad52a9c52028f1df00358e1 Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Tue, 7 May 2024 03:23:41 -0600 Subject: [PATCH 034/126] Update testlist based on what's needed for CAM and CESM test lists --- cime_config/testdefs/testlist_clm.xml | 32 +++++++++++++++++++++------ 1 file changed, 25 insertions(+), 7 deletions(-) diff --git a/cime_config/testdefs/testlist_clm.xml b/cime_config/testdefs/testlist_clm.xml index c2a90507ba..56ba176cfe 100644 --- a/cime_config/testdefs/testlist_clm.xml +++ b/cime_config/testdefs/testlist_clm.xml @@ -73,7 +73,7 @@ - + @@ -102,7 +102,7 @@ - + @@ -111,7 +111,7 @@ - + @@ -120,7 +120,7 @@ - + @@ -129,7 +129,7 @@ - + @@ -138,7 +138,7 @@ - + @@ -331,6 +331,24 @@ + + + + + + + + + + + + + + + + + + @@ -685,7 +703,7 @@ - + From e529c2f119f8561a7203f0f3588ded549094e6e6 Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Tue, 7 May 2024 03:27:31 -0600 Subject: [PATCH 035/126] Add IHistClm60BgcCropG which is needed for CESM testing, change NoAnthro compsets to run with RTM as required for Paleo work, best guess as to what needs science support --- cime_config/config_compsets.xml | 30 +++++++++++++++++++++++++----- 1 file changed, 25 insertions(+), 5 deletions(-) diff --git a/cime_config/config_compsets.xml b/cime_config/config_compsets.xml index 2de911da86..538329e523 100644 --- a/cime_config/config_compsets.xml +++ b/cime_config/config_compsets.xml @@ -91,6 +91,8 @@ I2000Clm60Sp 2000_DATM%GSWP3v1_CLM60%SP_SICE_SOCN_MOSART_SGLC_SWAV + + @@ -182,6 +184,8 @@ I1850Clm60Sp 1850_DATM%GSWP3v1_CLM60%SP_SICE_SOCN_MOSART_SGLC_SWAV + + @@ -291,22 +295,24 @@ I1850Clm60BgcNoAnthro - 1850_DATM%GSWP3v1_CLM60%BGC-NOANTHRO_SICE_SOCN_MOSART_SGLC_SWAV + 1850_DATM%GSWP3v1_CLM60%BGC-NOANTHRO_SICE_SOCN_RTM_SGLC_SWAV + I1850Clm60SpNoAnthro - 1850_DATM%GSWP3v1_CLM60%SP-NOANTHRO_SICE_SOCN_MOSART_SGLC_SWAV + 1850_DATM%GSWP3v1_CLM60%SP-NOANTHRO_SICE_SOCN_RTM_SGLC_SWAV + I1850Clm50BgcNoAnthro - 1850_DATM%GSWP3v1_CLM50%BGC-NOANTHRO_SICE_SOCN_MOSART_SGLC_SWAV + 1850_DATM%GSWP3v1_CLM50%BGC-NOANTHRO_SICE_SOCN_RTM_SGLC_SWAV I1850Clm50SpNoAnthro - 1850_DATM%GSWP3v1_CLM50%SP-NOANTHRO_SICE_SOCN_MOSART_SGLC_SWAV + 1850_DATM%GSWP3v1_CLM50%SP-NOANTHRO_SICE_SOCN_RTM_SGLC_SWAV @@ -319,12 +325,15 @@ I1850Clm60SpNoAnthro - 1850_DATM%GSWP3v1_CLM60%SP-NOANTHRO_SICE_SOCN_MOSART_SGLC_SWAV + 1850_DATM%GSWP3v1_CLM60%SP-NOANTHRO_SICE_SOCN_RTM_SGLC_SWAV + IHistClm60Sp HIST_DATM%GSWP3v1_CLM60%SP_SICE_SOCN_MOSART_SGLC_SWAV + + @@ -631,6 +640,17 @@ I1850Clm60BgcCropG 1850_DATM%GSWP3v1_CLM60%BGC-CROP_SICE_SOCN_MOSART_CISM2%GRIS-EVOLVE_SWAV + + + + + + + IHistClm60BgcCropG + HIST_DATM%GSWP3v1_CLM60%BGC-CROP_SICE_SOCN_MOSART_CISM2%GRIS-EVOLVE_SWAV + + + From 9b5001d0026e0de26bc0b20608f3924ea0cca214 Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Tue, 7 May 2024 17:02:30 -0600 Subject: [PATCH 036/126] Fix #2519 so that the mksurfdata_esmf build can work in a CESM checkout --- tools/mksurfdata_esmf/gen_mksurfdata_build | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/tools/mksurfdata_esmf/gen_mksurfdata_build b/tools/mksurfdata_esmf/gen_mksurfdata_build index 974c1929a5..864b5915b9 100755 --- a/tools/mksurfdata_esmf/gen_mksurfdata_build +++ b/tools/mksurfdata_esmf/gen_mksurfdata_build @@ -127,7 +127,16 @@ if [ "$existing_bld" = "No" ]; then if [ "$verbose" != "YES" ]; then options="$options --silent" fi - $cwd/../../cime/CIME/scripts/configure --macros-format CMake --machine $MACH $options + # Try finding configure file assuming a CTSM standalone checkout and then try for a CESM checkout + configure="$cwd/../../cime/CIME/scripts/configure" + if [ ! -f "$configure" ]; then + configure="$cwd/../../../../cime/CIME/scripts/configure" + if [ ! -f "$configure" ]; then + echo "Error $configure file does NOT exist in expected location, either for a standalone checkout or CESM/CAM checkout" + exit 1 + fi + fi + $configure --macros-format CMake --machine $MACH $options if [ $? != 0 ]; then echo "Error doing configure for machine name: $MACH" From 89a4d1d0d02b655a10790fdee043dd65f8c65ab5 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Tue, 7 May 2024 17:42:19 -0600 Subject: [PATCH 037/126] Update ctsm_sci tests for 3 VR grids --- bld/namelist_files/namelist_defaults_ctsm.xml | 6 +++--- cime_config/config_component.xml | 2 +- cime_config/testdefs/testlist_clm.xml | 6 +++--- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/bld/namelist_files/namelist_defaults_ctsm.xml b/bld/namelist_files/namelist_defaults_ctsm.xml index 0f533a56ff..67b21c1259 100644 --- a/bld/namelist_files/namelist_defaults_ctsm.xml +++ b/bld/namelist_files/namelist_defaults_ctsm.xml @@ -939,12 +939,12 @@ attributes from the config_cache.xml file (with keys converted to upper-case). >hgrid=1.9x2.5 maxpft=17 mask=gx1v7 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false. -hgrid=ne0np4.ARCTIC.ne30x4 maxpft=17 mask=tx0.1v2 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false. -hgrid=ne0np4.ARCTICGRIS.ne30x8 maxpft=17 mask=tx0.1v2 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false. @@ -956,7 +956,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case). -hgrid=ne0np4CONUS.ne30x8 maxpft=17 mask=tx0.1v2 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false. diff --git a/cime_config/config_component.xml b/cime_config/config_component.xml index 984ba5ecdf..be6916ae0c 100644 --- a/cime_config/config_component.xml +++ b/cime_config/config_component.xml @@ -210,7 +210,7 @@ 1850-2100_SSP3-7.0_transient 1850-2100_SSP5-3.4_transient 1850-2100_SSP2-4.5_transient - 1850-2100_SSP2-4.5_transient + 1850-2100_SSP2-4.5_transient 1850-2100_SSP1-1.9_transient 1850-2100_SSP4-3.4_transient 1850-2100_SSP4-6.0_transient diff --git a/cime_config/testdefs/testlist_clm.xml b/cime_config/testdefs/testlist_clm.xml index 918d86c213..f8c0a1bd79 100644 --- a/cime_config/testdefs/testlist_clm.xml +++ b/cime_config/testdefs/testlist_clm.xml @@ -2034,7 +2034,7 @@ - + @@ -2043,7 +2043,7 @@ - + @@ -2061,7 +2061,7 @@ - + From b64ceb6b1f7a5a462842c88861f2c5becc7e8c1e Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Wed, 8 May 2024 11:05:37 -0600 Subject: [PATCH 038/126] Change an expected failure "status" from PEND to FAIL --- cime_config/testdefs/ExpectedTestFails.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cime_config/testdefs/ExpectedTestFails.xml b/cime_config/testdefs/ExpectedTestFails.xml index fe6a54c221..e346e6e7af 100644 --- a/cime_config/testdefs/ExpectedTestFails.xml +++ b/cime_config/testdefs/ExpectedTestFails.xml @@ -31,7 +31,7 @@ - PEND + FAIL #2460 From f33040751bcf1fdb566119f6f91d5c9695e97386 Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Wed, 8 May 2024 13:55:17 -0600 Subject: [PATCH 039/126] Change comments about clm5_1 to clm6_0 --- cime_config/testdefs/testlist_clm.xml | 32 +++++++++++++-------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/cime_config/testdefs/testlist_clm.xml b/cime_config/testdefs/testlist_clm.xml index 56ba176cfe..5350b4716b 100644 --- a/cime_config/testdefs/testlist_clm.xml +++ b/cime_config/testdefs/testlist_clm.xml @@ -312,14 +312,14 @@ - + - + @@ -328,7 +328,7 @@ - + @@ -337,7 +337,7 @@ - + @@ -346,7 +346,7 @@ - + @@ -586,14 +586,14 @@ - + - + @@ -602,7 +602,7 @@ - + @@ -749,14 +749,14 @@ - + - + @@ -765,7 +765,7 @@ - + @@ -2108,14 +2108,14 @@ - + - + @@ -2124,7 +2124,7 @@ - + @@ -2798,7 +2798,7 @@ - + @@ -2808,7 +2808,7 @@ - + From 524cbc06a30a6820f72c60e9c44d9748977afe48 Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Wed, 8 May 2024 13:58:33 -0600 Subject: [PATCH 040/126] Revert order of irrigate and add note that it's also set in the use-cases --- bld/namelist_files/namelist_defaults_ctsm.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bld/namelist_files/namelist_defaults_ctsm.xml b/bld/namelist_files/namelist_defaults_ctsm.xml index 25b0c80cf9..6f2f265da4 100644 --- a/bld/namelist_files/namelist_defaults_ctsm.xml +++ b/bld/namelist_files/namelist_defaults_ctsm.xml @@ -115,10 +115,10 @@ attributes from the config_cache.xml file (with keys converted to upper-case). lnd/clm2/isotopes/atm_delta_C14_CMIP6_SSP534os_3x1_global_1850-2100_yearly_c181209.nc lnd/clm2/isotopes/atm_delta_C14_CMIP6_SSP5B_3x1_global_1850-2100_yearly_c181209.nc - + +.false. .true. .false. -.false. .false. From b9e947d76e40fbf39c0bdcfbae3de9e14b751b1c Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Wed, 8 May 2024 16:58:20 -0600 Subject: [PATCH 041/126] Get mksurfdata_esmf build and namelist build working under a CESM checkout --- python/ctsm/.pylintrc | 2 ++ python/ctsm/toolchain/gen_mksurfdata_namelist.py | 8 ++++---- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/python/ctsm/.pylintrc b/python/ctsm/.pylintrc index db78949288..230f8acba8 100644 --- a/python/ctsm/.pylintrc +++ b/python/ctsm/.pylintrc @@ -436,6 +436,8 @@ good-names=i, ds, m, n, + ni, + nj, l1, l2, ax, diff --git a/python/ctsm/toolchain/gen_mksurfdata_namelist.py b/python/ctsm/toolchain/gen_mksurfdata_namelist.py index 8a953c39df..361616419f 100755 --- a/python/ctsm/toolchain/gen_mksurfdata_namelist.py +++ b/python/ctsm/toolchain/gen_mksurfdata_namelist.py @@ -12,7 +12,7 @@ from datetime import datetime import netCDF4 -from ctsm.path_utils import path_to_ctsm_root +from ctsm.path_utils import path_to_ctsm_root, path_to_cime from ctsm.ctsm_logging import setup_logging_pre_config, add_logging_args, process_logging_args logger = logging.getLogger(__name__) @@ -326,7 +326,7 @@ def main(): ) # determine output mesh - determine_output_mesh(res, force_model_mesh_file, input_path, rawdata_files, tool_path) + determine_output_mesh(res, force_model_mesh_file, input_path, rawdata_files) # Determine num_pft if nocrop_flag: @@ -752,11 +752,11 @@ def handle_transient_run( return landuse_fname, must_run_download_input_data -def determine_output_mesh(res, force_model_mesh_file, input_path, rawdata_files, tool_path): +def determine_output_mesh(res, force_model_mesh_file, input_path, rawdata_files): """ determine output mesh """ - xml_path = os.path.join(tool_path, "../../ccs_config/component_grids_nuopc.xml") + xml_path = os.path.join(path_to_cime(), "../ccs_config/component_grids_nuopc.xml") tree2 = ET.parse(xml_path) root = tree2.getroot() model_mesh = "" From 7d91ac1d991003e9c953f5e1bb903064d789e1bd Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Wed, 8 May 2024 17:16:23 -0600 Subject: [PATCH 042/126] Correct clm60 typo in testlist --- cime_config/testdefs/testlist_clm.xml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/cime_config/testdefs/testlist_clm.xml b/cime_config/testdefs/testlist_clm.xml index 5350b4716b..75bdf63e19 100644 --- a/cime_config/testdefs/testlist_clm.xml +++ b/cime_config/testdefs/testlist_clm.xml @@ -331,22 +331,22 @@ - + - + - + - + From 010a008396e8edab3870270da919a706a543ee71 Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Wed, 8 May 2024 17:32:03 -0600 Subject: [PATCH 043/126] Add a description and usage and error check command line args --- bld/unit_testers/cmp_baseline_lnd_in_files.sh | 25 +++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/bld/unit_testers/cmp_baseline_lnd_in_files.sh b/bld/unit_testers/cmp_baseline_lnd_in_files.sh index de32cddf76..ebeeffc72a 100755 --- a/bld/unit_testers/cmp_baseline_lnd_in_files.sh +++ b/bld/unit_testers/cmp_baseline_lnd_in_files.sh @@ -1,15 +1,39 @@ #!/bin/bash +# +# A simple script to compare lnd_in namelists between two baselines on Derecho +# +#---------------------------------------------------------------------- +# Usage subroutine +usage() { + echo "" + echo "***********************************************************************" + echo "usage:" + echo "./cmp_baseline_lnd_in_files.sh " + echo " " + echo "Compares lnd_in files between two baselines on Derecho" + echo "***********************************************************************" +} + +#---------------------------------------------------------------------- + +if [ "$#" -ne 2 ]; then + echo "Need to give two baseline directories to compare" + usage + exit 1 +fi baseline=$1 compare=$2 cwd=`pwd` if [ -z "$1" ]; then echo "Need to enter a baseline directory tag name" + usage exit 1 fi if [ -z "$2" ]; then echo "Need to enter a comparison directory tag name" + usage exit 1 fi @@ -22,6 +46,7 @@ fi comp_root=$BASELINE_ROOT/$compare if ! test -d "$comp_root"; then echo "Root comparison directory of $comp_root does NOT exist" + usage exit 1 fi cd $root From fc0261376e1499fe06d6b41adfc3c2270bd373a6 Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Wed, 8 May 2024 17:47:52 -0600 Subject: [PATCH 044/126] Adjustments to the testlist Make sure CESM testing compsets with cam6 forcing are being exercised. Update NoAnthro to Clm60. --- cime_config/testdefs/testlist_clm.xml | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/cime_config/testdefs/testlist_clm.xml b/cime_config/testdefs/testlist_clm.xml index 75bdf63e19..1b825f0d40 100644 --- a/cime_config/testdefs/testlist_clm.xml +++ b/cime_config/testdefs/testlist_clm.xml @@ -21,11 +21,10 @@ - + - - + @@ -33,6 +32,15 @@ + + + + + + + + + @@ -141,6 +149,7 @@ + From f8c6a49539c3b1ce86dd2ca491c953b0c8856766 Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Thu, 9 May 2024 02:05:20 -0600 Subject: [PATCH 045/126] Add a ctsm_sci test for I1850Clm50BgcCropG to verify that current B1850 will work --- cime_config/testdefs/testlist_clm.xml | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/cime_config/testdefs/testlist_clm.xml b/cime_config/testdefs/testlist_clm.xml index 1b825f0d40..f0dc3ee270 100644 --- a/cime_config/testdefs/testlist_clm.xml +++ b/cime_config/testdefs/testlist_clm.xml @@ -576,6 +576,15 @@ + + + + + + + + + From c37f743023dc4bf8fd26b7bd88336cbb7bc61d68 Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Thu, 9 May 2024 14:57:59 -0600 Subject: [PATCH 046/126] Add more tests of finidat file settings, a bunch of things fail because of a problem in the use-case changes I made --- bld/unit_testers/build-namelist_test.pl | 164 ++++++++++++++++-------- 1 file changed, 114 insertions(+), 50 deletions(-) diff --git a/bld/unit_testers/build-namelist_test.pl b/bld/unit_testers/build-namelist_test.pl index 74dbcef3f4..2b65b18f38 100755 --- a/bld/unit_testers/build-namelist_test.pl +++ b/bld/unit_testers/build-namelist_test.pl @@ -163,10 +163,10 @@ sub cat_and_create_namelistinfile { # # Figure out number of tests that will run # -my $ntests = 3665; +my $ntests = 3385; if ( defined($opts{'compare'}) ) { - $ntests += 2313; + $ntests += 2097; } plan( tests=>$ntests ); @@ -1424,7 +1424,7 @@ sub cat_and_create_namelistinfile { print " Test all use-cases over all physics options\n"; print "==================================================\n"; -# Run over all use-cases... +# Run over all use-cases for f09 and all physics... my $list = `$bldnml -use_case list 2>&1 | grep "use case"`; my @usecases; if ( $list =~ /build-namelist : use cases : (.+)$/ ) { @@ -1436,27 +1436,128 @@ sub cat_and_create_namelistinfile { print "use-cases = @usecases\n"; die "ERROR:: Number of use-cases isn't what's expected\n"; } +my @expect_fails = ( "1850-2100_SSP5-3.4_transient", "1850-2100_SSP4-3.4", "1850-2100_SSP1-1.9_transient", + "1850-2100_SSP4-6.0_transient" ); foreach my $phys ( "clm4_5", "clm5_0", "clm5_1", "clm6_0" ) { print "physics = $phys\n"; foreach my $usecase ( @usecases ) { print "usecase = $usecase\n"; - $options = "-use_case $usecase -envxml_dir ."; + $options = "-res 0.9x1.25 -use_case $usecase -envxml_dir ."; &make_env_run(); eval{ system( "$bldnml $options > $tempfile 2>&1 " ); }; - is( $@, '', "options: $options" ); - $cfiles->checkfilesexist( "$options", $mode ); - $cfiles->shownmldiff( "default", "standard" ); - if ( defined($opts{'compare'}) ) { - $cfiles->doNOTdodiffonfile( "$tempfile", "$options", $mode ); - $cfiles->comparefiles( "$options", $mode, $opts{'compare'} ); + my $expect_fail = 0; + foreach my $failusecase ( @expect_fails ) { + if ( $failusecase eq $usecase ) { + $expect_fail = 1; + } } - if ( defined($opts{'generate'}) ) { - $cfiles->copyfiles( "$options", $mode ); + if ( not $expect_fail ) { + is( $@, '', "options: $options" ); + $cfiles->checkfilesexist( "$options", $mode ); + $cfiles->shownmldiff( "default", "standard" ); + if ( defined($opts{'compare'}) ) { + $cfiles->doNOTdodiffonfile( "$tempfile", "$options", $mode ); + $cfiles->comparefiles( "$options", $mode, $opts{'compare'} ); + } + if ( defined($opts{'generate'}) ) { + $cfiles->copyfiles( "$options", $mode ); + } + } else { + isnt( $@, '', "options: $options" ); } &cleanup(); } } +print "\n=======================================================================================\n"; +print " Test the seperate initial condition files, for ones not tested elsewhere\n"; +print "=========================================================================================\n"; + +my %finidat_files = ( + "f091850Clm45BgcGSW" =>{ phys =>"clm4_5", + atm_forc=>"GSWP3v1", + res => "0.9x1.25", + bgc => "bgc", + use_case => "1850_control", + start_ymd => "18500101", + }, + "f091850Clm45BgcCRU" =>{ phys =>"clm4_5", + atm_forc=>"CRUv7", + res => "0.9x1.25", + bgc => "bgc", + use_case => "1850_control", + start_ymd => "18500101", + }, + "f091850Clm45BgcCAM6" =>{ phys =>"clm4_5", + atm_forc=>"cam6.0", + res => "0.9x1.25", + bgc => "bgc", + use_case => "1850_control", + start_ymd => "18500101", + }, + "f091850Clm50BgcGSW" =>{ phys =>"clm5_0", + atm_forc=>"GSWP3v1", + res => "0.9x1.25", + bgc => "bgc", + use_case => "1850_control", + start_ymd => "18500101", + }, + "f091850Clm50SpGSW" =>{ phys =>"clm5_0", + atm_forc=>"GSWP3v1", + res => "0.9x1.25", + bgc => "sp", + use_case => "1850_control", + start_ymd => "18500101", + }, + "f091850Clm50BgcCRU" =>{ phys =>"clm5_0", + atm_forc=>"CRUv7", + res => "0.9x1.25", + bgc => "bgc", + use_case => "1850_control", + start_ymd => "18500101", + }, + "f091850Clm50SpCRU" =>{ phys =>"clm5_0", + atm_forc=>"CRUv7", + res => "0.9x1.25", + bgc => "sp", + use_case => "1850_control", + start_ymd => "18500101", + }, + "f091850Clm50BgcCAM6" =>{ phys =>"clm5_0", + atm_forc=>"cam6.0", + res => "0.9x1.25", + bgc => "bgc", + use_case => "1850_control", + start_ymd => "18500101", + }, + ); + +foreach my $key ( keys(%finidat_files) ) { + print( "$key\n" ); + my $phys = $finidat_files{$key}{'phys'}; + print "physics = $phys\n"; + &make_config_cache($phys); + my $usecase = $finidat_files{$key}{'use_case'}; + my $bgc = $finidat_files{$key}{'bgc'}; + my $res = $finidat_files{$key}{'res'}; + my $start_ymd = $finidat_files{$key}{'start_ymd'}; + my $lnd_tuning_mode = "${phys}_" . $finidat_files{$key}{'atm_forc'}; + $options = "-bgc $bgc -res $res -use_case $usecase -envxml_dir . --lnd_tuning_mode $lnd_tuning_mode -namelist '&a start_ymd=$start_ymd/'"; + &make_env_run(); + eval{ system( "$bldnml $options > $tempfile 2>&1 " ); }; + is( $@, '', "options: $options" ); + $cfiles->checkfilesexist( "$options", $mode ); + $cfiles->shownmldiff( "default", "standard" ); + if ( defined($opts{'compare'}) ) { + $cfiles->doNOTdodiffonfile( "$tempfile", "$options", $mode ); + $cfiles->comparefiles( "$options", $mode, $opts{'compare'} ); + } + if ( defined($opts{'generate'}) ) { + $cfiles->copyfiles( "$options", $mode ); + } + &cleanup(); +} + print "\n==================================================\n"; print "Test crop resolutions \n"; print "==================================================\n"; @@ -1519,7 +1620,7 @@ sub cat_and_create_namelistinfile { "20thC_transient", ); foreach my $res ( @glc_res ) { - foreach my $usecase ( @usecases ) { + foreach my $usecase ( @use_cases ) { my $startymd = undef; if ( ($usecase eq "1850_control") || ($usecase eq "20thC_transient") ) { $startymd = 18500101; @@ -1591,43 +1692,6 @@ sub cat_and_create_namelistinfile { # # End loop over versions # -# -# Test ALL SSP's for f09... -# -$phys = "clm6_0"; -$mode = "-phys $phys"; -&make_config_cache($phys); -my $res = "0.9x1.25"; -foreach my $usecase ( "1850-2100_SSP5-8.5_transient", "1850-2100_SSP2-4.5_transient", "1850-2100_SSP1-2.6_transient", "1850-2100_SSP3-7.0_transient" ) { - $options = "-res $res -bgc bgc -crop -use_case $usecase -envxml_dir . -namelist '&a start_ymd=20150101/'"; - &make_env_run(); - eval{ system( "$bldnml $options > $tempfile 2>&1 " ); }; - is( $@, '', "$options" ); - $cfiles->checkfilesexist( "$options", $mode ); - $cfiles->shownmldiff( "default", "standard" ); - if ( defined($opts{'compare'}) ) { - $cfiles->doNOTdodiffonfile( "$tempfile", "$options", $mode ); - $cfiles->comparefiles( "$options", $mode, $opts{'compare'} ); - } - if ( defined($opts{'generate'}) ) { - $cfiles->copyfiles( "$options", $mode ); - } - &cleanup(); -} - -# The SSP's that fail because of missing ndep files... -$phys = "clm5_0"; -$mode = "-phys $phys"; -&make_config_cache($phys); -my $res = "0.9x1.25"; -foreach my $usecase ( "1850-2100_SSP5-3.4_transient", "1850-2100_SSP4-3.4", "1850-2100_SSP1-1.9_transient", - "1850-2100_SSP4-6.0_transient" ) { - $options = "-res $res -bgc bgc -crop -use_case $usecase -envxml_dir . -namelist '&a start_ymd=20150101/'"; - &make_env_run(); - eval{ system( "$bldnml $options > $tempfile 2>&1 " ); }; - isnt( $?, 0, $usecase ); - system( "cat $tempfile" ); -} print "\n==================================================\n"; print "Test clm4.5/clm5.0/clm5_1/clm6_0 resolutions \n"; From f9974947dfebaba2c02bca5c3c87f0a2424b2849 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Thu, 9 May 2024 15:14:30 -0600 Subject: [PATCH 047/126] Updated ChangeLog/ChangeSum --- doc/ChangeLog | 105 ++++++++++++++++++++++++++++++++++++++++++++++++++ doc/ChangeSum | 1 + 2 files changed, 106 insertions(+) diff --git a/doc/ChangeLog b/doc/ChangeLog index e0a5e45cb6..3307c8df88 100644 --- a/doc/ChangeLog +++ b/doc/ChangeLog @@ -1,4 +1,109 @@ =============================================================== +Tag name: ctsm5.2.004 +Originator(s): slevis (Samuel Levis,UCAR/TSS,303-665-1310) +Date: Thu 09 May 2024 03:10:52 PM MDT +One-line Summary: CTSM5.2 1979 fsurdat and 1979-2026 landuse ne0np4 files + two fixes + +Purpose and description of changes +---------------------------------- + + Includes work by multiple people: + @slevis-lmwg: new fsurdat/landuse files and corresponding infrastructure for + the three so-called VR grids (ne0np4) + @samsrabin: a subset_data fix + @olyson: corrections to a couple of history long-names and updates to + history_fields_fates.rst and history_fields_nofates.rst + + +Significant changes to scientifically-supported configurations +-------------------------------------------------------------- + +Does this tag change answers significantly for any of the following physics configurations? +(Details of any changes will be given in the "Answer changes" section below.) + + [Put an [X] in the box for any configuration with significant answer changes.] + +[ ] clm6_0 + +[ ] clm5_1 + +[ ] clm5_0 + +[ ] ctsm5_0-nwp + +[ ] clm4_5 + + +Bugs fixed +---------- +List of CTSM issues fixed (include CTSM Issue # and description) [one per line]: + Fixes #2487 VR grids + Fixes #2527 subset_data is broken + Fixes #2511 a hist long name is incorrect + Fixes #2506 a hist long name is incorrect + +Notes of particular relevance for users +--------------------------------------- +Changes made to namelist defaults (e.g., changed parameter values): + The so-called VR grids now use 1979 fsurdat files for 1850 compsets + and 1979-2026 landuse for historical compsets. The latter cross over into + SSP years and use SSP2-4.5 for that. + +Changes to the datasets (e.g., parameter, surface or initial files): + Same comment. + +Changes to documentation: + See Purpose and Description above. + +Substantial timing or memory changes: + Not considered. + +Notes of particular relevance for developers: +--------------------------------------------- +Changes to tests or testing: + Updated the ctsm_sci tests for the three VR grids. + +Testing summary: +---------------- + [PASS means all tests PASS; OK means tests PASS other than expected fails.] + + build-namelist tests (if CLMBuildNamelist.pm has changed): + + derecho - PASS + + python testing (if python code has changed; see instructions in python/README.md; document testing done): + + derecho - PASS + + regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing): + + derecho ----- OK + izumi ------- OK + + any other testing (give details below): + + ctsm_sci + derecho ---- OK + + +Answer changes +-------------- + +Changes answers relative to baseline: NO + + However, note that the ctsm_sci test-suite showed diffs in + - Fates cases as expected since the last available baseline was ctsm_sci-ctsm5.2.0. + - the preexisting VR grid test for 1850 as expected since the fsurdat changed to 1979. + +Other details +------------- +Pull Requests that document the changes (include PR ids): + https://github.com/ESCOMP/ctsm/pull/2590 + https://github.com/ESCOMP/ctsm/pull/2512 + https://github.com/ESCOMP/ctsm/pull/2528 + +=============================================================== +=============================================================== Tag name: ctsm5.2.003 Originator(s): samrabin (Sam Rabin, UCAR/TSS, samrabin@ucar.edu) Date: Thu May 2 14:06:54 MDT 2024 diff --git a/doc/ChangeSum b/doc/ChangeSum index b48e04889d..17ad670f17 100644 --- a/doc/ChangeSum +++ b/doc/ChangeSum @@ -1,5 +1,6 @@ Tag Who Date Summary ============================================================================================================================ + ctsm5.2.004 multiple 05/09/2024 CTSM5.2 1979 fsurdat and 1979-2026 landuse ne0np4 files + two fixes ctsm5.2.003 samrabin 05/02/2024 Merge b4b-dev ctsm5.2.002 glemieux 04/26/2024 FATES default allometry parameter file update ctsm5.2.001 erik 04/22/2024 Merge b4b-dev From cf1a297864938fae60288ce742adfb2150e5caf8 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Thu, 9 May 2024 17:28:23 -0600 Subject: [PATCH 048/126] Update Externals.cfg to cesm2_3_beta17 --- Externals.cfg | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/Externals.cfg b/Externals.cfg index 185f412cab..80bfb45db8 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -8,7 +8,7 @@ required = True local_path = components/cism protocol = git repo_url = https://github.com/ESCOMP/CISM-wrapper -tag = cismwrap_2_1_99 +tag = cismwrap_2_1_100 externals = Externals_CISM.cfg required = True @@ -29,12 +29,12 @@ required = True [mizuRoute] local_path = components/mizuRoute protocol = git -repo_url = https://github.com/nmizukami/mizuRoute -hash = 34723c2 +repo_url = https://github.com/ESCOMP/mizuRoute +hash = 81c720c required = True [ccs_config] -tag = ccs_config_cesm0.0.92 +tag = ccs_config_cesm0.0.106 protocol = git repo_url = https://github.com/ESMCI/ccs_config_cesm.git local_path = ccs_config @@ -44,18 +44,18 @@ required = True local_path = cime protocol = git repo_url = https://github.com/ESMCI/cime -tag = cime6.0.217_httpsbranch03 +tag = cime6.0.238_httpsbranch01 required = True [cmeps] -tag = cmeps0.14.50 +tag = cmeps0.14.60 protocol = git repo_url = https://github.com/ESCOMP/CMEPS.git local_path = components/cmeps required = True [cdeps] -tag = cdeps1.0.28 +tag = cdeps1.0.33 protocol = git repo_url = https://github.com/ESCOMP/CDEPS.git local_path = components/cdeps From 2aa76c4b3f841d35d465a254285daeb075f9d7cc Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Thu, 9 May 2024 17:46:05 -0600 Subject: [PATCH 049/126] Remove mct from Externals.cfg --- Externals.cfg | 7 ------- 1 file changed, 7 deletions(-) diff --git a/Externals.cfg b/Externals.cfg index 80bfb45db8..e8a2121df5 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -76,13 +76,6 @@ repo_url = https://github.com/ESCOMP/CESM_share local_path = share required = True -[mct] -tag = MCT_2.11.0 -protocol = git -repo_url = https://github.com/MCSclimate/MCT -local_path = libraries/mct -required = True - [parallelio] tag = pio2_6_2 protocol = git From ccce061987e7dc193aec37e53d7f46fa441d2147 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Thu, 9 May 2024 17:49:34 -0600 Subject: [PATCH 050/126] Remove cpl7 from Externals.cfg --- Externals.cfg | 7 ------- 1 file changed, 7 deletions(-) diff --git a/Externals.cfg b/Externals.cfg index e8a2121df5..a8a77a40f1 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -62,13 +62,6 @@ local_path = components/cdeps externals = Externals_CDEPS.cfg required = True -[cpl7] -tag = cpl77.0.7 -protocol = git -repo_url = https://github.com/ESCOMP/CESM_CPL7andDataComps -local_path = components/cpl7 -required = True - [share] tag = share1.0.18 protocol = git From 4861125a7a9c81008de5cb218755cd9792db0bb9 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Fri, 10 May 2024 13:11:58 -0600 Subject: [PATCH 051/126] Remove MCT from README and .config_files.xml --- .config_files.xml | 3 --- README | 5 ++--- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/.config_files.xml b/.config_files.xml index d1a592e5db..7f0d6bb11a 100644 --- a/.config_files.xml +++ b/.config_files.xml @@ -19,11 +19,8 @@ $SRCROOT $SRCROOT/components/slim/ - $SRCROOT/components/cpl7/components/data_comps_$COMP_INTERFACE/dlnd $SRCROOT/components/cdeps/dlnd - $SRCROOT/components/cpl7/components/stub_comps_$COMP_INTERFACE/slnd $CIMEROOT/CIME/non_py/src/components/stub_comps_$COMP_INTERFACE/slnd - $SRCROOT/components/cpl7/components/xcpl_comps_$COMP_INTERFACE/xlnd $CIMEROOT/CIME/non_py/src/components/xcpl_comps_$COMP_INTERFACE/xlnd case_comps diff --git a/README b/README index 18cc2b1458..4699fec858 100644 --- a/README +++ b/README @@ -57,7 +57,7 @@ python ------------ Python modules used in tools and testing and automated check Directory structure only for a CTSM checkout: components -------- Other active sub-components needed for CTSM to run (river routing and land-ice models) -libraries --------- CESM libraries: MCT (Model Coupling Toolkit) and PIO (deprecated) +libraries --------- CESM libraries: PIO (deprecated) share ------------- CESM shared code ccs_config -------- CIME configure files (for grids, compsets, and machines) for CESM @@ -68,7 +68,6 @@ components/cdeps -------------------- CESM top level data model shared code (for components/cism --------------------- CESM Community land Ice Sheet Model. components/mosart ------------------- Model for Scale Adaptive River Transport components/rtm ---------------------- CESM River Transport Model. -components/cpl7 --------------------- CESM top level driver for MCT driver (deprecated will be removed) Top level documentation ($CTSMROOT): @@ -154,7 +153,7 @@ Source code directory structure: src/biogeochem ---- Biogeochemisty src/main ---------- Main control and high level code -src/cpl ----------- Land model high level caps for NUOPC driver (and MCT and LILAC) +src/cpl ----------- Land model high level caps for NUOPC driver (and LILAC) src/biogeophys ---- Biogeophysics (Hydrology) src/dyn_subgrid --- Dynamic land unit change src/init_interp --- Online interpolation From db147cd616ea8a52e4bc64e88299c8fe76036396 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Fri, 10 May 2024 13:57:37 -0600 Subject: [PATCH 052/126] Remove MCT from CLMBuildNamelist.pm --- bld/CLMBuildNamelist.pm | 18 +++++------------- 1 file changed, 5 insertions(+), 13 deletions(-) diff --git a/bld/CLMBuildNamelist.pm b/bld/CLMBuildNamelist.pm index fb44023cd5..731589289c 100755 --- a/bld/CLMBuildNamelist.pm +++ b/bld/CLMBuildNamelist.pm @@ -71,7 +71,7 @@ REQUIRED OPTIONS (if read they allow user_nl_clm and CLM_BLDNML_OPTS to expand variables [for example to use \$DIN_LOC_ROOT]) (default current directory) - -lnd_frac "domainfile" Land fraction file (the input domain file) (needed for MCT driver and LILAC) + -lnd_frac "domainfile" Land fraction file (the input domain file) (needed for LILAC) -res "resolution" Specify horizontal grid. Use nlatxnlon for spectral grids; dlatxdlon for fv grids (dlat and dlon are the grid cell size in degrees for latitude and longitude respectively) @@ -83,7 +83,7 @@ REQUIRED OPTIONS (default 2000) -structure "structure" The overall structure being used [ standard | fast ] OPTIONS - -driver "value" CESM driver type you will run with [ mct | nuopc ] + -driver "value" CESM driver type you will run with [ nuopc ] -bgc "value" Build CLM with BGC package [ sp | bgc | fates ] (default is sp). CLM Biogeochemistry mode @@ -1893,10 +1893,10 @@ sub setup_logic_lnd_frac { my ($opts, $nl_flags, $definition, $defaults, $nl, $envxml_ref) = @_; # - # fatmlndfrc is required for the MCT driver (or LILAC), but uneeded for NUOPC + # fatmlndfrc is required for LILAC but uneeded for NUOPC # my $var = "lnd_frac"; - if ( ($opts->{'driver'} eq "mct") || $opts->{'lilac'} ) { + if ( $opts->{'lilac'} ) { if ( defined($opts->{$var}) ) { if ( defined($nl->get_value('fatmlndfrc')) ) { $log->fatal_error("Can NOT set both -lnd_frac option (set via LND_DOMAIN_PATH/LND_DOMAIN_FILE " . @@ -3838,9 +3838,7 @@ sub setup_logic_popd_streams { } add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, 'stream_fldfilename_popdens', 'phys'=>$nl_flags->{'phys'}, 'cnfireson'=>$nl_flags->{'cnfireson'}, 'hgrid'=>"0.5x0.5", 'ssp_rcp'=>$nl_flags->{'ssp_rcp'} ); - # - # TODO (mvertens, 2021-06-22) the following is needed for MCT since a use case enforces this - so for now stream_meshfile_popdens will be added to the mct - # stream namelist but simply not used + if ($opts->{'driver'} eq "nuopc" ) { add_default($opts, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, 'stream_meshfile_popdens', 'hgrid'=>"0.5x0.5"); my $inputdata_rootdir = $nl_flags->{'inputdata_rootdir'}; @@ -3854,12 +3852,6 @@ sub setup_logic_popd_streams { $val = "e_string( $val ); $nl->set_variable_value($group, $var, $val); } - } else { - my $var = 'stream_meshfile_popdens'; - my $group = $definition->get_group_name($var); - my $val = "none"; - $val = "e_string( $val ); - $nl->set_variable_value($group, $var, $val); } } else { # If bgc is NOT CN/CNDV or fire_method==nofire then make sure none of the popdens settings are set From 68d46b68f92ebe08ce04d61527638d6997275b38 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Fri, 10 May 2024 14:03:49 -0600 Subject: [PATCH 053/126] Remove MCT from build-namelist_test.pl --- bld/unit_testers/build-namelist_test.pl | 30 ++----------------------- 1 file changed, 2 insertions(+), 28 deletions(-) diff --git a/bld/unit_testers/build-namelist_test.pl b/bld/unit_testers/build-namelist_test.pl index 7b654337af..12a5f4bd38 100755 --- a/bld/unit_testers/build-namelist_test.pl +++ b/bld/unit_testers/build-namelist_test.pl @@ -314,7 +314,7 @@ sub cat_and_create_namelistinfile { print "=================================================================================\n"; my $startfile = "clmrun.clm2.r.1964-05-27-00000.nc"; -foreach my $driver ( "mct", "nuopc" ) { +foreach my $driver ( "nuopc" ) { print " For $driver driver\n\n"; # configuration, structure, irrigate, verbose, clm_demand, ssp_rcp, test, sim_year, use_case foreach my $options ( "-res 0.9x1.25 -configuration nwp", @@ -335,13 +335,7 @@ sub cat_and_create_namelistinfile { my $file = $startfile; &make_env_run(); my $base_options = "-envxml_dir . -driver $driver"; - if ( $driver eq "mct" ) { - $base_options = "$base_options -lnd_frac $DOMFILE"; - # Skip the MCT test for excess ice streams - if ( $options =~ /use_excess_ice_streams=.true./ ) { - next; - } - } else { + if ( $driver eq "nuopc" ) { $base_options = "$base_options -namelist '&a force_send_to_atm = .false./'"; } eval{ system( "$bldnml $base_options $options > $tempfile 2>&1 " ); }; @@ -548,11 +542,6 @@ sub cat_and_create_namelistinfile { GLC_TWO_WAY_COUPLING=>"FALSE", phys=>"clm5_0", }, - "exice stream on, but mct" =>{ options=>"--res 0.9x1.25 --envxml_dir . --driver mct --lnd_frac $DOMFILE ", - namelst=>"use_excess_ice=.true., use_excess_ice_streams=.true.", - GLC_TWO_WAY_COUPLING=>"FALSE", - phys=>"clm5_0", - }, "clm50CNDVwtransient" =>{ options=>" -envxml_dir . -use_case 20thC_transient -dynamic_vegetation -res 10x15 -ignore_warnings", namelst=>"", GLC_TWO_WAY_COUPLING=>"FALSE", @@ -898,16 +887,6 @@ sub cat_and_create_namelistinfile { GLC_TWO_WAY_COUPLING=>"FALSE", phys=>"clm5_0", }, - "both lnd_frac and on nml" =>{ options=>"-driver mct -lnd_frac $DOMFILE -envxml_dir .", - namelst=>"fatmlndfrc='frac.nc'", - GLC_TWO_WAY_COUPLING=>"FALSE", - phys=>"clm5_0", - }, - "lnd_frac set to UNSET" =>{ options=>"-driver mct -lnd_frac UNSET -envxml_dir .", - namelst=>"", - GLC_TWO_WAY_COUPLING=>"FALSE", - phys=>"clm6_0", - }, "lnd_frac set but nuopc" =>{ options=>"-driver nuopc -lnd_frac $DOMFILE -envxml_dir .", namelst=>"", GLC_TWO_WAY_COUPLING=>"FALSE", @@ -923,11 +902,6 @@ sub cat_and_create_namelistinfile { GLC_TWO_WAY_COUPLING=>"FALSE", phys=>"clm6_0", }, - "force_send but not nuopc" =>{ options=>"-driver mct -lnd_frac $DOMFILE -envxml_dir .", - namelst=>"force_send_to_atm = .false.", - GLC_TWO_WAY_COUPLING=>"FALSE", - phys=>"clm6_0", - }, "branch but NO nrevsn" =>{ options=>"-clm_start_type branch -envxml_dir .", namelst=>"", GLC_TWO_WAY_COUPLING=>"FALSE", From 190be8bd937ce545ad636c7c339f8b5497968ab6 Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Fri, 10 May 2024 14:21:11 -0600 Subject: [PATCH 054/126] Fix the 2010 use case, and get the namelist testing working better --- bld/namelist_files/use_cases/2010_control.xml | 4 ++-- bld/unit_testers/build-namelist_test.pl | 24 ++++++++++++------- 2 files changed, 18 insertions(+), 10 deletions(-) diff --git a/bld/namelist_files/use_cases/2010_control.xml b/bld/namelist_files/use_cases/2010_control.xml index d4c2a9c9c0..d16177301c 100644 --- a/bld/namelist_files/use_cases/2010_control.xml +++ b/bld/namelist_files/use_cases/2010_control.xml @@ -12,8 +12,8 @@ .true. .false. -2010 -2010 +2010 +2010 2010 2010 diff --git a/bld/unit_testers/build-namelist_test.pl b/bld/unit_testers/build-namelist_test.pl index 2b65b18f38..25545dcf8a 100755 --- a/bld/unit_testers/build-namelist_test.pl +++ b/bld/unit_testers/build-namelist_test.pl @@ -163,10 +163,10 @@ sub cat_and_create_namelistinfile { # # Figure out number of tests that will run # -my $ntests = 3385; +my $ntests = 3313; if ( defined($opts{'compare'}) ) { - $ntests += 2097; + $ntests += 2001; } plan( tests=>$ntests ); @@ -1436,22 +1436,24 @@ sub cat_and_create_namelistinfile { print "use-cases = @usecases\n"; die "ERROR:: Number of use-cases isn't what's expected\n"; } -my @expect_fails = ( "1850-2100_SSP5-3.4_transient", "1850-2100_SSP4-3.4", "1850-2100_SSP1-1.9_transient", - "1850-2100_SSP4-6.0_transient" ); +my @expect_fails = ( "1850-2100_SSP5-3.4_transient", "1850-2100_SSP4-3.4_transient", "2018-PD_transient", "1850-2100_SSP1-1.9_transient", + "1850-2100_SSP4-6.0_transient", "2018_control" ); foreach my $phys ( "clm4_5", "clm5_0", "clm5_1", "clm6_0" ) { print "physics = $phys\n"; + &make_config_cache($phys); foreach my $usecase ( @usecases ) { print "usecase = $usecase\n"; $options = "-res 0.9x1.25 -use_case $usecase -envxml_dir ."; &make_env_run(); - eval{ system( "$bldnml $options > $tempfile 2>&1 " ); }; - my $expect_fail = 0; + my $expect_fail = undef; foreach my $failusecase ( @expect_fails ) { if ( $failusecase eq $usecase ) { $expect_fail = 1; + last; } } - if ( not $expect_fail ) { + eval{ system( "$bldnml $options > $tempfile 2>&1 " ); }; + if ( ! defined($expect_fail) ) { is( $@, '', "options: $options" ); $cfiles->checkfilesexist( "$options", $mode ); $cfiles->shownmldiff( "default", "standard" ); @@ -1463,7 +1465,7 @@ sub cat_and_create_namelistinfile { $cfiles->copyfiles( "$options", $mode ); } } else { - isnt( $@, '', "options: $options" ); + isnt( $@, 0, "options: $options" ); } &cleanup(); } @@ -1546,6 +1548,12 @@ sub cat_and_create_namelistinfile { &make_env_run(); eval{ system( "$bldnml $options > $tempfile 2>&1 " ); }; is( $@, '', "options: $options" ); + my $finidat = `grep finidat lnd_in`; + if ( $finidat =~ /initdata_map/ ) { + my $result; + eval( $result = `grep use_init_interp lnd_in` ); + isnt( $#, 0, "use_init_interp needs to be set here and was not: $result") + } $cfiles->checkfilesexist( "$options", $mode ); $cfiles->shownmldiff( "default", "standard" ); if ( defined($opts{'compare'}) ) { From f3386bcd5c0ec78d6d2be660aaa7b4c257910030 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Fri, 10 May 2024 15:58:49 -0600 Subject: [PATCH 055/126] Remove MCT from config_definition_ctsm.xml --- bld/config_files/config_definition_ctsm.xml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/bld/config_files/config_definition_ctsm.xml b/bld/config_files/config_definition_ctsm.xml index dfe6378f17..e6628b1d94 100644 --- a/bld/config_files/config_definition_ctsm.xml +++ b/bld/config_files/config_definition_ctsm.xml @@ -18,11 +18,11 @@ Root directory of CLM source distribution (directory above CLM configure). Component framework interface to use -(Model Coupling Toolkit, or Earth System Modeling Framework) +(Earth System Modeling Framework) Date: Fri, 10 May 2024 16:08:42 -0600 Subject: [PATCH 056/126] Remove MCT from ExpectedTestFails.xml and update README.NUOPC_driver.md --- README.NUOPC_driver.md | 5 +---- cime_config/testdefs/ExpectedTestFails.xml | 7 ------- 2 files changed, 1 insertion(+), 11 deletions(-) diff --git a/README.NUOPC_driver.md b/README.NUOPC_driver.md index ba0b70c2c0..9eb5d5b2cb 100644 --- a/README.NUOPC_driver.md +++ b/README.NUOPC_driver.md @@ -43,9 +43,6 @@ by having an updated copy in your case directory. ## What if I want to use the MCT driver? -The MCT driver is now deprecated, and will be removed. So at this point we don't -suggest using it anymore. - -For more notes see: +The MCT driver is now removed. For more notes see: https://docs.google.com/presentation/d/1yjiKSEV53JDAJbYxhpY2T9GTxlWFzQAn diff --git a/cime_config/testdefs/ExpectedTestFails.xml b/cime_config/testdefs/ExpectedTestFails.xml index 8ee60cf4b8..f600444547 100644 --- a/cime_config/testdefs/ExpectedTestFails.xml +++ b/cime_config/testdefs/ExpectedTestFails.xml @@ -51,13 +51,6 @@ - - - FAIL - #1887 - - - FAIL From 629961c2afa8d86be3101b90dfe040a8d9603ac2 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Fri, 10 May 2024 16:11:33 -0600 Subject: [PATCH 057/126] Remove reference to MCT in fsurdat_modifier.py --- python/ctsm/modify_input_files/fsurdat_modifier.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/python/ctsm/modify_input_files/fsurdat_modifier.py b/python/ctsm/modify_input_files/fsurdat_modifier.py index bd060cb9dc..1a45590872 100644 --- a/python/ctsm/modify_input_files/fsurdat_modifier.py +++ b/python/ctsm/modify_input_files/fsurdat_modifier.py @@ -254,8 +254,8 @@ def modify_optional( """Modify the dataset according to the optional settings""" # Set fsurdat variables in a rectangle that could be global (default). - # Note that the land/ocean mask gets specified in the domain file for - # MCT or the ocean mesh files for NUOPC. Here the user may specify + # Note that the land/ocean mask gets specified in + # the ocean mesh files. Here the user may specify # fsurdat variables inside a box but cannot change which points will # run as land and which as ocean. if idealized: From fb5b6310be60477a7c4cb67656a68a9c71cb8b6e Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Fri, 10 May 2024 16:15:09 -0600 Subject: [PATCH 058/126] Remove src/unit_test_stubs/csm_share/CMakeLists.txt --- src/unit_test_stubs/csm_share/CMakeLists.txt | 5 ----- 1 file changed, 5 deletions(-) delete mode 100644 src/unit_test_stubs/csm_share/CMakeLists.txt diff --git a/src/unit_test_stubs/csm_share/CMakeLists.txt b/src/unit_test_stubs/csm_share/CMakeLists.txt deleted file mode 100644 index 33ddbfb342..0000000000 --- a/src/unit_test_stubs/csm_share/CMakeLists.txt +++ /dev/null @@ -1,5 +0,0 @@ -list(APPEND share_sources - shr_mpi_mod_stub.F90 - ) - -sourcelist_to_parent(share_sources) From 3d1ab9be7da02a53107786a952582462049611db Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Fri, 10 May 2024 16:18:43 -0600 Subject: [PATCH 059/126] Remove MCT from test_driver.sh --- test/tools/test_driver.sh | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/test/tools/test_driver.sh b/test/tools/test_driver.sh index f93301a530..307d1accf8 100755 --- a/test/tools/test_driver.sh +++ b/test/tools/test_driver.sh @@ -463,8 +463,7 @@ if [ ! -d \$CLM_TESTDIR ]; then fi fi -## MCT and PIO build directorys -export MCT_LIBDIR=\$CLM_TESTDIR/mct +## PIO build directory export PIO_LIBDIR=\$CLM_TESTDIR/pio ##set our own environment vars From 371f093e02a7820e8beec823c97dd23b94277c05 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Fri, 10 May 2024 16:20:13 -0600 Subject: [PATCH 060/126] Remove the src/cpl/mct directory --- src/cpl/mct/ExcessIceStreamType.F90 | 144 ----- src/cpl/mct/FireDataBaseType.F90 | 561 ------------------- src/cpl/mct/SoilMoistureStreamMod.F90 | 418 -------------- src/cpl/mct/UrbanTimeVarType.F90 | 314 ----------- src/cpl/mct/ch4FInundatedStreamType.F90 | 389 ------------- src/cpl/mct/clm_cpl_indices.F90 | 330 ----------- src/cpl/mct/laiStreamMod.F90 | 241 --------- src/cpl/mct/lnd_comp_mct.F90 | 632 ---------------------- src/cpl/mct/lnd_import_export.F90 | 354 ------------ src/cpl/mct/lnd_set_decomp_and_domain.F90 | 352 ------------ src/cpl/mct/ndepStreamMod.F90 | 376 ------------- 11 files changed, 4111 deletions(-) delete mode 100644 src/cpl/mct/ExcessIceStreamType.F90 delete mode 100644 src/cpl/mct/FireDataBaseType.F90 delete mode 100644 src/cpl/mct/SoilMoistureStreamMod.F90 delete mode 100644 src/cpl/mct/UrbanTimeVarType.F90 delete mode 100644 src/cpl/mct/ch4FInundatedStreamType.F90 delete mode 100644 src/cpl/mct/clm_cpl_indices.F90 delete mode 100644 src/cpl/mct/laiStreamMod.F90 delete mode 100644 src/cpl/mct/lnd_comp_mct.F90 delete mode 100644 src/cpl/mct/lnd_import_export.F90 delete mode 100644 src/cpl/mct/lnd_set_decomp_and_domain.F90 delete mode 100644 src/cpl/mct/ndepStreamMod.F90 diff --git a/src/cpl/mct/ExcessIceStreamType.F90 b/src/cpl/mct/ExcessIceStreamType.F90 deleted file mode 100644 index 5c5394233c..0000000000 --- a/src/cpl/mct/ExcessIceStreamType.F90 +++ /dev/null @@ -1,144 +0,0 @@ -module ExcessIceStreamType - - !----------------------------------------------------------------------- - ! !DESCRIPTION: - ! Stub for ExcessIceStreams for the MCT driver. So that MCT can be used - ! without excess ice streams. - ! - ! !USES - use shr_kind_mod , only : r8 => shr_kind_r8, CL => shr_kind_CL - use shr_log_mod , only : errMsg => shr_log_errMsg - use spmdMod , only : mpicom, masterproc - use clm_varctl , only : iulog - use abortutils , only : endrun - use decompMod , only : bounds_type - - ! !PUBLIC TYPES: - implicit none - private - - public :: UseExcessIceStreams ! If streams will be used - - type, public :: excessicestream_type - contains - - ! !PUBLIC MEMBER FUNCTIONS: - procedure, public :: Init ! Initialize and read data in - procedure, public :: CalcExcessIce ! Calculate excess ice ammount - - ! !PRIVATE MEMBER FUNCTIONS: - procedure, private :: ReadNML ! Read in namelist - - end type excessicestream_type - ! ! PRIVATE DATA: - - character(len=*), parameter, private :: sourcefile = & - __FILE__ - -!============================================================================== -contains -!============================================================================== - - subroutine Init(this, bounds, NLFilename) - ! - ! - ! arguments - implicit none - class(excessicestream_type) :: this - type(bounds_type), intent(in) :: bounds - character(len=*), intent(in) :: NLFilename ! Namelist filename - - ! - ! local variables - - call this%ReadNML( bounds, NLFileName ) - end subroutine Init - - subroutine CalcExcessIce(this,bounds,exice_bulk_init) - - ! only transfers grid values to columns - implicit none - class(excessicestream_type) :: this - type(bounds_type), intent(in) :: bounds - real(r8) , intent(inout) :: exice_bulk_init(bounds%begc:bounds%endc) - ! - ! !LOCAL VARIABLES: - - end subroutine CalcExcessIce - - logical function UseExcessIceStreams() - ! - ! !DESCRIPTION: - ! Return true if - ! - ! !USES: - ! - ! !ARGUMENTS: - implicit none - ! - ! !LOCAL VARIABLES: - UseExcessIceStreams = .false. -end function UseExcessIceStreams - -subroutine ReadNML(this, bounds, NLFilename) - ! - ! Read the namelist data stream information. - ! - ! Uses: - use shr_nl_mod , only : shr_nl_find_group_name - use shr_log_mod , only : errMsg => shr_log_errMsg - use shr_mpi_mod , only : shr_mpi_bcast - ! - ! arguments - implicit none - class(excessicestream_type) :: this - type(bounds_type), intent(in) :: bounds - character(len=*), intent(in) :: NLFilename ! Namelist filename - ! - ! local variables - integer :: nu_nml ! unit for namelist file - integer :: nml_error ! namelist i/o error flag - logical :: use_excess_ice_streams = .false. ! logical to turn on use of excess ice streams - character(len=CL) :: stream_fldFileName_exice = ' ' - character(len=CL) :: stream_mapalgo_exice = 'none' - character(len=*), parameter :: namelist_name = 'exice_streams' ! MUST agree with name in namelist and read - character(len=*), parameter :: subName = "('exice_streams::ReadNML')" - !----------------------------------------------------------------------- - - namelist /exice_streams/ & ! MUST agree with namelist_name above - stream_mapalgo_exice, stream_fldFileName_exice, use_excess_ice_streams - !----------------------------------------------------------------------- - ! Default values for namelist - - ! Read excess ice namelist - if (masterproc) then - open( newunit=nu_nml, file=trim(NLFilename), status='old', iostat=nml_error ) - call shr_nl_find_group_name(nu_nml, namelist_name, status=nml_error) - if (nml_error == 0) then - read(nu_nml, nml=exice_streams,iostat=nml_error) ! MUST agree with namelist_name above - if (nml_error /= 0) then - call endrun(msg=' ERROR reading '//namelist_name//' namelist'//errMsg(sourcefile, __LINE__)) - end if - else - call endrun(msg=' ERROR finding '//namelist_name//' namelist'//errMsg(sourcefile, __LINE__)) - end if - close(nu_nml) - endif - - call shr_mpi_bcast(use_excess_ice_streams , mpicom) - - if (masterproc) then - if ( use_excess_ice_streams ) then - call endrun(msg=' ERROR excess ice streams can NOT be on for the MCT driver'//errMsg(sourcefile, __LINE__)) - end if - if ( trim(stream_fldFileName_exice) /= '' ) then - call endrun(msg=' ERROR stream_fldFileName_exice can NOT be set for the MCT driver'//errMsg(sourcefile, __LINE__)) - end if - if ( trim(stream_mapalgo_exice) /= 'none' ) then - call endrun(msg=' ERROR stream_mapalgo_exice can only be none for the MCT driver'//errMsg(sourcefile, __LINE__)) - end if - endif - -end subroutine ReadNML - -end module ExcessIceStreamType diff --git a/src/cpl/mct/FireDataBaseType.F90 b/src/cpl/mct/FireDataBaseType.F90 deleted file mode 100644 index 0ee635b2fa..0000000000 --- a/src/cpl/mct/FireDataBaseType.F90 +++ /dev/null @@ -1,561 +0,0 @@ -module FireDataBaseType - -#include "shr_assert.h" - - !----------------------------------------------------------------------- - ! !DESCRIPTION: - ! module for handling of fire data - ! - ! !USES: - use shr_kind_mod , only : r8 => shr_kind_r8, CL => shr_kind_CL - use shr_strdata_mod , only : shr_strdata_type, shr_strdata_create, shr_strdata_print - use shr_strdata_mod , only : shr_strdata_advance - use shr_log_mod , only : errMsg => shr_log_errMsg - use clm_varctl , only : iulog, inst_name - use spmdMod , only : masterproc, mpicom, comp_id - use fileutils , only : getavu, relavu - use domainMod , only : ldomain - use abortutils , only : endrun - use decompMod , only : bounds_type - use FireMethodType , only : fire_method_type - use lnd_set_decomp_and_domain, only : gsmap_global - use mct_mod - ! - implicit none - private - ! - ! !PUBLIC TYPES: - public :: fire_base_type - - ! - type, abstract, extends(fire_method_type) :: fire_base_type - private - ! !PRIVATE MEMBER DATA: - - real(r8), public, pointer :: forc_lnfm(:) ! Lightning frequency - real(r8), public, pointer :: forc_hdm(:) ! Human population density - - real(r8), public, pointer :: gdp_lf_col(:) ! col global real gdp data (k US$/capita) - real(r8), public, pointer :: peatf_lf_col(:) ! col global peatland fraction data (0-1) - integer , public, pointer :: abm_lf_col(:) ! col global peak month of crop fire emissions - - type(shr_strdata_type) :: sdat_hdm ! Human population density input data stream - type(shr_strdata_type) :: sdat_lnfm ! Lightning input data stream - - contains - ! - ! !PUBLIC MEMBER FUNCTIONS: - procedure, public :: FireInit => BaseFireInit ! Initialization of Fire - procedure, public :: BaseFireInit ! Initialization of Fire - procedure(FireReadNML_interface), public, deferred :: FireReadNML ! Read in namelist for Fire - procedure, public :: FireInterp ! Interpolate fire data - procedure(need_lightning_and_popdens_interface), public, deferred :: & - need_lightning_and_popdens ! Returns true if need lightning & popdens - ! - ! !PRIVATE MEMBER FUNCTIONS: - procedure, private :: hdm_init ! position datasets for dynamic human population density - procedure, private :: hdm_interp ! interpolates between two years of human pop. density file data - procedure, private :: lnfm_init ! position datasets for Lightning - procedure, private :: lnfm_interp ! interpolates between two years of Lightning file data - procedure, private :: surfdataread ! read fire related data from surface data set - end type fire_base_type - !----------------------------------------------------------------------- - - abstract interface - !----------------------------------------------------------------------- - function need_lightning_and_popdens_interface(this) result(need_lightning_and_popdens) - ! - ! !DESCRIPTION: - ! Returns true if need lightning and popdens, false otherwise - ! - ! USES - import :: fire_base_type - ! - ! !ARGUMENTS: - class(fire_base_type), intent(in) :: this - logical :: need_lightning_and_popdens ! function result - !----------------------------------------------------------------------- - end function need_lightning_and_popdens_interface - end interface - - character(len=*), parameter, private :: sourcefile = & - __FILE__ - -contains - - !----------------------------------------------------------------------- - subroutine FireReadNML_interface( this, NLFilename ) - ! - ! !DESCRIPTION: - ! Read the namelist for Fire - ! - ! !USES: - ! - ! !ARGUMENTS: - class(fire_base_type) :: this - character(len=*), intent(in) :: NLFilename ! Namelist filename - end subroutine FireReadNML_interface - - !----------------------------------------------------------------------- - subroutine BaseFireInit( this, bounds, NLFilename ) - ! - ! !DESCRIPTION: - ! Initialize CN Fire module - ! !USES: - use shr_infnan_mod , only : nan => shr_infnan_nan, assignment(=) - ! - ! !ARGUMENTS: - class(fire_base_type) :: this - type(bounds_type), intent(in) :: bounds - character(len=*), intent(in) :: NLFilename - !----------------------------------------------------------------------- - - if ( this%need_lightning_and_popdens() ) then - ! Allocate lightning forcing data - allocate( this%forc_lnfm(bounds%begg:bounds%endg) ) - this%forc_lnfm(bounds%begg:) = nan - ! Allocate pop dens forcing data - allocate( this%forc_hdm(bounds%begg:bounds%endg) ) - this%forc_hdm(bounds%begg:) = nan - - ! Allocate real gdp data - allocate(this%gdp_lf_col(bounds%begc:bounds%endc)) - ! Allocate peatland fraction data - allocate(this%peatf_lf_col(bounds%begc:bounds%endc)) - ! Allocates peak month of crop fire emissions - allocate(this%abm_lf_col(bounds%begc:bounds%endc)) - - - call this%hdm_init(bounds, NLFilename) - call this%hdm_interp(bounds) - call this%lnfm_init(bounds, NLFilename) - call this%lnfm_interp(bounds) - call this%surfdataread(bounds) - end if - - end subroutine BaseFireInit - - !----------------------------------------------------------------------- - subroutine FireInterp(this,bounds) - ! - ! !DESCRIPTION: - ! Interpolate CN Fire datasets - ! - ! !ARGUMENTS: - class(fire_base_type) :: this - type(bounds_type), intent(in) :: bounds - !----------------------------------------------------------------------- - - if ( this%need_lightning_and_popdens() ) then - call this%hdm_interp(bounds) - call this%lnfm_interp(bounds) - end if - - end subroutine FireInterp - - !----------------------------------------------------------------------- - subroutine hdm_init( this, bounds, NLFilename ) - ! - ! !DESCRIPTION: - ! Initialize data stream information for population density. - ! - ! !USES: - use clm_time_manager , only : get_calendar - use ncdio_pio , only : pio_subsystem - use shr_pio_mod , only : shr_pio_getiotype - use clm_nlUtilsMod , only : find_nlgroup_name - use ndepStreamMod , only : clm_domain_mct - use histFileMod , only : hist_addfld1d - ! - ! !ARGUMENTS: - implicit none - class(fire_base_type) :: this - type(bounds_type), intent(in) :: bounds - character(len=*), intent(in) :: NLFilename ! Namelist filename - ! - ! !LOCAL VARIABLES: - integer :: stream_year_first_popdens ! first year in pop. dens. stream to use - integer :: stream_year_last_popdens ! last year in pop. dens. stream to use - integer :: model_year_align_popdens ! align stream_year_first_hdm with - integer :: nu_nml ! unit for namelist file - integer :: nml_error ! namelist i/o error flag - type(mct_ggrid) :: dom_clm ! domain information - character(len=CL) :: stream_fldFileName_popdens ! population density streams filename - character(len=CL) :: popdensmapalgo = 'bilinear' ! mapping alogrithm for population density - character(len=CL) :: popdens_tintalgo = 'nearest'! time interpolation alogrithm for population density - character(len=CL) :: stream_meshfile_popdens ! not used - character(*), parameter :: subName = "('hdmdyn_init')" - character(*), parameter :: F00 = "('(hdmdyn_init) ',4a)" - !----------------------------------------------------------------------- - - namelist /popd_streams/ & - stream_year_first_popdens, & - stream_year_last_popdens, & - model_year_align_popdens, & - popdensmapalgo, & - stream_fldFileName_popdens, & - stream_meshfile_popdens , & - popdens_tintalgo - - ! Default values for namelist - stream_year_first_popdens = 1 ! first year in stream to use - stream_year_last_popdens = 1 ! last year in stream to use - model_year_align_popdens = 1 ! align stream_year_first_popdens with this model year - stream_fldFileName_popdens = ' ' - - ! Read popd_streams namelist - if (masterproc) then - nu_nml = getavu() - open( nu_nml, file=trim(NLFilename), status='old', iostat=nml_error ) - call find_nlgroup_name(nu_nml, 'popd_streams', status=nml_error) - if (nml_error == 0) then - read(nu_nml, nml=popd_streams,iostat=nml_error) - if (nml_error /= 0) then - call endrun(msg='ERROR reading popd_streams namelist'//errMsg(sourcefile, __LINE__)) - end if - end if - close(nu_nml) - call relavu( nu_nml ) - endif - - call shr_mpi_bcast(stream_year_first_popdens, mpicom) - call shr_mpi_bcast(stream_year_last_popdens, mpicom) - call shr_mpi_bcast(model_year_align_popdens, mpicom) - call shr_mpi_bcast(stream_fldFileName_popdens, mpicom) - call shr_mpi_bcast(popdens_tintalgo, mpicom) - - if (masterproc) then - write(iulog,*) ' ' - write(iulog,*) 'popdens_streams settings:' - write(iulog,*) ' stream_year_first_popdens = ',stream_year_first_popdens - write(iulog,*) ' stream_year_last_popdens = ',stream_year_last_popdens - write(iulog,*) ' model_year_align_popdens = ',model_year_align_popdens - write(iulog,*) ' stream_fldFileName_popdens = ',stream_fldFileName_popdens - write(iulog,*) ' popdens_tintalgo = ',popdens_tintalgo - write(iulog,*) ' ' - endif - - call clm_domain_mct (bounds, dom_clm) - - call shr_strdata_create(this%sdat_hdm,name="clmhdm", & - pio_subsystem=pio_subsystem, & - pio_iotype=shr_pio_getiotype(inst_name), & - mpicom=mpicom, compid=comp_id, & - gsmap=gsmap_global, ggrid=dom_clm, & - nxg=ldomain%ni, nyg=ldomain%nj, & - yearFirst=stream_year_first_popdens, & - yearLast=stream_year_last_popdens, & - yearAlign=model_year_align_popdens, & - offset=0, & - domFilePath='', & - domFileName=trim(stream_fldFileName_popdens), & - domTvarName='time', & - domXvarName='lon' , & - domYvarName='lat' , & - domAreaName='area', & - domMaskName='mask', & - filePath='', & - filename=(/trim(stream_fldFileName_popdens)/) , & - fldListFile='hdm', & - fldListModel='hdm', & - fillalgo='none', & - mapalgo=popdensmapalgo, & - calendar=get_calendar(), & - tintalgo=popdens_tintalgo, & - taxmode='extend' ) - - if (masterproc) then - call shr_strdata_print(this%sdat_hdm,'population density data') - endif - - ! Add history fields - call hist_addfld1d (fname='HDM', units='counts/km^2', & - avgflag='A', long_name='human population density', & - ptr_lnd=this%forc_hdm, default='inactive') - - end subroutine hdm_init - - !----------------------------------------------------------------------- - subroutine hdm_interp( this, bounds) - ! - ! !DESCRIPTION: - ! Interpolate data stream information for population density. - ! - ! !USES: - use clm_time_manager, only : get_curr_date - ! - ! !ARGUMENTS: - class(fire_base_type) :: this - type(bounds_type), intent(in) :: bounds - ! - ! !LOCAL VARIABLES: - integer :: g, ig - integer :: year ! year (0, ...) for nstep+1 - integer :: mon ! month (1, ..., 12) for nstep+1 - integer :: day ! day of month (1, ..., 31) for nstep+1 - integer :: sec ! seconds into current date for nstep+1 - integer :: mcdate ! Current model date (yyyymmdd) - !----------------------------------------------------------------------- - - call get_curr_date(year, mon, day, sec) - mcdate = year*10000 + mon*100 + day - - call shr_strdata_advance(this%sdat_hdm, mcdate, sec, mpicom, 'hdmdyn') - - ig = 0 - do g = bounds%begg,bounds%endg - ig = ig+1 - this%forc_hdm(g) = this%sdat_hdm%avs(1)%rAttr(1,ig) - end do - - end subroutine hdm_interp - - !----------------------------------------------------------------------- - subroutine lnfm_init( this, bounds, NLFilename ) - ! - ! !DESCRIPTION: - ! - ! Initialize data stream information for Lightning. - ! - ! !USES: - use clm_time_manager , only : get_calendar - use ncdio_pio , only : pio_subsystem - use shr_pio_mod , only : shr_pio_getiotype - use clm_nlUtilsMod , only : find_nlgroup_name - use ndepStreamMod , only : clm_domain_mct - use histFileMod , only : hist_addfld1d - ! - ! !ARGUMENTS: - implicit none - class(fire_base_type) :: this - type(bounds_type), intent(in) :: bounds - character(len=*), intent(in) :: NLFilename - ! - ! !LOCAL VARIABLES: - integer :: stream_year_first_lightng ! first year in Lightning stream to use - integer :: stream_year_last_lightng ! last year in Lightning stream to use - integer :: model_year_align_lightng ! align stream_year_first_lnfm with - integer :: nu_nml ! unit for namelist file - integer :: nml_error ! namelist i/o error flag - type(mct_ggrid) :: dom_clm ! domain information - character(len=CL) :: stream_fldFileName_lightng ! lightning stream filename to read - character(len=CL) :: lightng_tintalgo = 'linear'! time interpolation alogrithm - character(len=CL) :: lightngmapalgo = 'bilinear'! Mapping alogrithm - character(*), parameter :: subName = "('lnfmdyn_init')" - character(*), parameter :: F00 = "('(lnfmdyn_init) ',4a)" - !----------------------------------------------------------------------- - - namelist /light_streams/ & - stream_year_first_lightng, & - stream_year_last_lightng, & - model_year_align_lightng, & - lightngmapalgo, & - stream_fldFileName_lightng, & - lightng_tintalgo - - ! Default values for namelist - stream_year_first_lightng = 1 ! first year in stream to use - stream_year_last_lightng = 1 ! last year in stream to use - model_year_align_lightng = 1 ! align stream_year_first_lnfm with this model year - stream_fldFileName_lightng = ' ' - - ! Read light_streams namelist - if (masterproc) then - nu_nml = getavu() - open( nu_nml, file=trim(NLFilename), status='old', iostat=nml_error ) - call find_nlgroup_name(nu_nml, 'light_streams', status=nml_error) - if (nml_error == 0) then - read(nu_nml, nml=light_streams,iostat=nml_error) - if (nml_error /= 0) then - call endrun(msg='ERROR reading light_streams namelist'//errMsg(sourcefile, __LINE__)) - end if - end if - close(nu_nml) - call relavu( nu_nml ) - endif - - call shr_mpi_bcast(stream_year_first_lightng, mpicom) - call shr_mpi_bcast(stream_year_last_lightng, mpicom) - call shr_mpi_bcast(model_year_align_lightng, mpicom) - call shr_mpi_bcast(stream_fldFileName_lightng, mpicom) - call shr_mpi_bcast(lightng_tintalgo, mpicom) - - if (masterproc) then - write(iulog,*) ' ' - write(iulog,*) 'light_stream settings:' - write(iulog,*) ' stream_year_first_lightng = ',stream_year_first_lightng - write(iulog,*) ' stream_year_last_lightng = ',stream_year_last_lightng - write(iulog,*) ' model_year_align_lightng = ',model_year_align_lightng - write(iulog,*) ' stream_fldFileName_lightng = ',stream_fldFileName_lightng - write(iulog,*) ' lightng_tintalgo = ',lightng_tintalgo - write(iulog,*) ' ' - endif - - call clm_domain_mct (bounds, dom_clm) - - call shr_strdata_create(this%sdat_lnfm,name="clmlnfm", & - pio_subsystem=pio_subsystem, & - pio_iotype=shr_pio_getiotype(inst_name), & - mpicom=mpicom, compid=comp_id, & - gsmap=gsmap_global, ggrid=dom_clm, & - nxg=ldomain%ni, nyg=ldomain%nj, & - yearFirst=stream_year_first_lightng, & - yearLast=stream_year_last_lightng, & - yearAlign=model_year_align_lightng, & - offset=0, & - domFilePath='', & - domFileName=trim(stream_fldFileName_lightng), & - domTvarName='time', & - domXvarName='lon' , & - domYvarName='lat' , & - domAreaName='area', & - domMaskName='mask', & - filePath='', & - filename=(/trim(stream_fldFileName_lightng)/), & - fldListFile='lnfm', & - fldListModel='lnfm', & - fillalgo='none', & - tintalgo=lightng_tintalgo, & - mapalgo=lightngmapalgo, & - calendar=get_calendar(), & - taxmode='cycle' ) - - if (masterproc) then - call shr_strdata_print(this%sdat_lnfm,'Lightning data') - endif - - ! Add history fields - call hist_addfld1d (fname='LNFM', units='counts/km^2/hr', & - avgflag='A', long_name='Lightning frequency', & - ptr_lnd=this%forc_lnfm, default='inactive') - - end subroutine lnfm_init - - !----------------------------------------------------------------------- - subroutine lnfm_interp(this, bounds ) - ! - ! !DESCRIPTION: - ! Interpolate data stream information for Lightning. - ! - ! !USES: - use clm_time_manager, only : get_curr_date - ! - ! !ARGUMENTS: - class(fire_base_type) :: this - type(bounds_type), intent(in) :: bounds - ! - ! !LOCAL VARIABLES: - integer :: g, ig - integer :: year ! year (0, ...) for nstep+1 - integer :: mon ! month (1, ..., 12) for nstep+1 - integer :: day ! day of month (1, ..., 31) for nstep+1 - integer :: sec ! seconds into current date for nstep+1 - integer :: mcdate ! Current model date (yyyymmdd) - !----------------------------------------------------------------------- - - call get_curr_date(year, mon, day, sec) - mcdate = year*10000 + mon*100 + day - - call shr_strdata_advance(this%sdat_lnfm, mcdate, sec, mpicom, 'lnfmdyn') - - ig = 0 - do g = bounds%begg,bounds%endg - ig = ig+1 - this%forc_lnfm(g) = this%sdat_lnfm%avs(1)%rAttr(1,ig) - end do - - end subroutine lnfm_interp - - !----------------------------------------------------------------------- - subroutine surfdataread(this, bounds) - ! - ! !DESCRIPTION: - ! Read surface data set to populate relevant fire-related variables - ! - ! !USES: - use spmdMod , only : masterproc - use clm_varctl , only : nsrest, nsrStartup, fsurdat - use clm_varcon , only : grlnd - use ColumnType , only : col - use fileutils , only : getfil - use ncdio_pio - ! - ! !ARGUMENTS: - class(fire_base_type) :: this - type(bounds_type), intent(in) :: bounds - ! - ! !LOCAL VARIABLES: - integer :: g,c ! indices - type(file_desc_t) :: ncid ! netcdf id - logical :: readvar ! true => variable is on initial dataset - character(len=256) :: locfn ! local filename - real(r8), pointer :: gdp(:) ! global gdp data (needs to be a pointer for use in ncdio) - real(r8), pointer :: peatf(:) ! global peatf data (needs to be a pointer for use in ncdio) - integer, pointer :: abm(:) ! global abm data (needs to be a pointer for use in ncdio) - !----------------------------------------------------------------------- - - ! -------------------------------------------------------------------- - ! Open surface dataset - ! -------------------------------------------------------------------- - - call getfil (fsurdat, locfn, 0) - call ncd_pio_openfile (ncid, locfn, 0) - - ! -------------------------------------------------------------------- - ! Read in GDP data - ! -------------------------------------------------------------------- - - allocate(gdp(bounds%begg:bounds%endg)) - call ncd_io(ncid=ncid, varname='gdp', flag='read', data=gdp, dim1name=grlnd, readvar=readvar) - if (.not. readvar) then - call endrun(msg=' ERROR: gdp NOT on surfdata file'//errMsg(sourcefile, __LINE__)) - end if - do c = bounds%begc, bounds%endc - g = col%gridcell(c) - this%gdp_lf_col(c) = gdp(g) - end do - deallocate(gdp) - - ! -------------------------------------------------------------------- - ! Read in peatf data - ! -------------------------------------------------------------------- - - allocate(peatf(bounds%begg:bounds%endg)) - call ncd_io(ncid=ncid, varname='peatf', flag='read', data=peatf, dim1name=grlnd, readvar=readvar) - if (.not. readvar) then - call endrun(msg=' ERROR: peatf NOT on surfdata file'//errMsg(sourcefile, __LINE__)) - end if - do c = bounds%begc, bounds%endc - g = col%gridcell(c) - this%peatf_lf_col(c) = peatf(g) - end do - deallocate(peatf) - - ! -------------------------------------------------------------------- - ! Read in ABM data - ! -------------------------------------------------------------------- - - allocate(abm(bounds%begg:bounds%endg)) - call ncd_io(ncid=ncid, varname='abm', flag='read', data=abm, dim1name=grlnd, readvar=readvar) - if (.not. readvar) then - call endrun(msg=' ERROR: abm NOT on surfdata file'//errMsg(sourcefile, __LINE__)) - end if - do c = bounds%begc, bounds%endc - g = col%gridcell(c) - this%abm_lf_col(c) = abm(g) - end do - deallocate(abm) - - ! Close file - - call ncd_pio_closefile(ncid) - - if (masterproc) then - write(iulog,*) 'Successfully read fmax, soil color, sand and clay boundary data' - write(iulog,*) - endif - - end subroutine surfdataread - - -end module FireDataBaseType diff --git a/src/cpl/mct/SoilMoistureStreamMod.F90 b/src/cpl/mct/SoilMoistureStreamMod.F90 deleted file mode 100644 index 8b366d6c8e..0000000000 --- a/src/cpl/mct/SoilMoistureStreamMod.F90 +++ /dev/null @@ -1,418 +0,0 @@ -module SoilMoistureStreamMod - - ! ********************************************************************** - ! --------------------------- IMPORTANT NOTE --------------------------- - ! - ! In cases using the NUOPC driver/mediator, we use a different version of this module, - ! based on CDEPS, which resides in src/cpl/nuopc/. Changes to the science here should - ! also be made in the similar file in src/cpl/nuopc. Once we start using CDEPS by - ! default, we can remove this version and move the CDEPS-based version into its place. - ! ********************************************************************** - -#include "shr_assert.h" - - !----------------------------------------------------------------------- - ! !DESCRIPTION: - ! Read in soil moisture from data stream - ! - ! !USES: - use shr_strdata_mod , only : shr_strdata_type, shr_strdata_create - use shr_strdata_mod , only : shr_strdata_print, shr_strdata_advance - use shr_kind_mod , only : r8 => shr_kind_r8 - use shr_kind_mod , only : CL => shr_kind_CL, CXX => shr_kind_CXX - use shr_log_mod , only : errMsg => shr_log_errMsg - use decompMod , only : bounds_type, subgrid_level_column - use abortutils , only : endrun - use clm_varctl , only : iulog, use_soil_moisture_streams, inst_name - use clm_varcon , only : grlnd - use controlMod , only : NLFilename - use domainMod , only : ldomain - use LandunitType , only : lun - use ColumnType , only : col - use SoilStateType , only : soilstate_type - use WaterStateBulkType , only : waterstatebulk_type - use perf_mod , only : t_startf, t_stopf - use spmdMod , only : masterproc, mpicom, comp_id - use lnd_set_decomp_and_domain , only : gsMap_lnd2Dsoi_gdc2glo - use mct_mod - use ncdio_pio - ! - ! !PUBLIC TYPES: - implicit none - private - ! - ! !PUBLIC MEMBER FUNCTIONS: - public :: PrescribedSoilMoistureInit ! position datasets for soil moisture - public :: PrescribedSoilMoistureAdvance ! Advance the soil moisture stream (outside of Open-MP loops) - public :: PrescribedSoilMoistureInterp ! interpolates between two periods of soil moisture data - - ! !PRIVATE MEMBER DATA: - type(shr_strdata_type) :: sdat_soilm ! soil moisture input data stream - integer :: ism ! Soil moisture steram index - integer, allocatable :: g_to_ig(:) ! Array matching gridcell index to data index - logical :: soilm_ignore_data_if_missing ! If should ignore overridding a point with soil moisture data - ! from the streams file, if the streams file shows that point - ! as missing (namelist item) - ! - ! !PRIVATE TYPES: - - character(len=*), parameter, private :: sourcefile = & - __FILE__ - !----------------------------------------------------------------------- - -contains - - !----------------------------------------------------------------------- - ! - ! soil_moisture_init - ! - !----------------------------------------------------------------------- - subroutine PrescribedSoilMoistureInit(bounds) - ! - ! Initialize data stream information for soil moisture. - ! - ! - ! !USES: - use clm_time_manager , only : get_calendar - use ncdio_pio , only : pio_subsystem - use shr_pio_mod , only : shr_pio_getiotype - use clm_nlUtilsMod , only : find_nlgroup_name - use ndepStreamMod , only : clm_domain_mct - use shr_stream_mod , only : shr_stream_file_null - use shr_string_mod , only : shr_string_listCreateField - use clm_varpar , only : nlevsoi - ! - ! !ARGUMENTS: - implicit none - type(bounds_type), intent(in) :: bounds ! bounds - ! - ! !LOCAL VARIABLES: - integer :: i ! index - integer :: stream_year_first_soilm ! first year in Ustar stream to use - integer :: stream_year_last_soilm ! last year in Ustar stream to use - integer :: model_year_align_soilm ! align stream_year_first_soilm with - integer :: nu_nml ! unit for namelist file - integer :: nml_error ! namelist i/o error flag - integer :: soilm_offset ! Offset in time for dataset (sec) - type(mct_ggrid) :: dom_clm ! domain information - character(len=CL) :: stream_fldfilename_soilm ! ustar stream filename to read - character(len=CL) :: soilm_tintalgo = 'linear' ! Time interpolation alogrithm - - character(*), parameter :: subName = "('PrescribedSoilMoistureInit')" - character(*), parameter :: F00 = "('(PrescribedSoilMoistureInit) ',4a)" - character(*), parameter :: soilmString = "H2OSOI" ! base string for field string - character(CXX) :: fldList ! field string - !----------------------------------------------------------------------- - ! - ! deal with namelist variables here in init - ! - namelist /soil_moisture_streams/ & - stream_year_first_soilm, & - stream_year_last_soilm, & - model_year_align_soilm, & - soilm_tintalgo, & - soilm_offset, & - soilm_ignore_data_if_missing, & - stream_fldfilename_soilm - - ! Default values for namelist - stream_year_first_soilm = 1 ! first year in stream to use - stream_year_last_soilm = 1 ! last year in stream to use - model_year_align_soilm = 1 ! align stream_year_first_soilm with this model year - stream_fldfilename_soilm = shr_stream_file_null - soilm_offset = 0 - soilm_ignore_data_if_missing = .false. - - ! Read soilm_streams namelist - if (masterproc) then - open( newunit=nu_nml, file=trim(NLFilename), status='old', iostat=nml_error ) - call find_nlgroup_name(nu_nml, 'soil_moisture_streams', status=nml_error) - if (nml_error == 0) then - read(nu_nml, nml=soil_moisture_streams,iostat=nml_error) - if (nml_error /= 0) then - call endrun(subname // ':: ERROR reading soil_moisture_streams namelist') - end if - else - call endrun(subname // ':: ERROR finding soilm_streams namelist') - end if - close(nu_nml) - endif - - call shr_mpi_bcast(stream_year_first_soilm, mpicom) - call shr_mpi_bcast(stream_year_last_soilm, mpicom) - call shr_mpi_bcast(model_year_align_soilm, mpicom) - call shr_mpi_bcast(stream_fldfilename_soilm, mpicom) - call shr_mpi_bcast(soilm_tintalgo, mpicom) - call shr_mpi_bcast(soilm_offset, mpicom) - call shr_mpi_bcast(soilm_ignore_data_if_missing, mpicom) - - if (masterproc) then - - write(iulog,*) ' ' - write(iulog,*) 'soil_moisture_stream settings:' - write(iulog,*) ' stream_year_first_soilm = ',stream_year_first_soilm - write(iulog,*) ' stream_year_last_soilm = ',stream_year_last_soilm - write(iulog,*) ' model_year_align_soilm = ',model_year_align_soilm - write(iulog,*) ' stream_fldfilename_soilm = ',trim(stream_fldfilename_soilm) - write(iulog,*) ' soilm_tintalgo = ',trim(soilm_tintalgo) - write(iulog,*) ' soilm_offset = ',soilm_offset - if ( soilm_ignore_data_if_missing )then - write(iulog,*) ' Do NOT override a point with streams data if the streams data is missing' - else - write(iulog,*) ' Abort, if you find a model point where the input streams data is set to missing value' - end if - - endif - - call clm_domain_mct (bounds, dom_clm, nlevels=nlevsoi) - - ! create the field list for these fields...use in shr_strdata_create - fldList = trim(soilmString) - if (masterproc) write(iulog,*) 'fieldlist: ', trim(fldList) - - call shr_strdata_create(sdat_soilm,name="soil_moisture", & - pio_subsystem=pio_subsystem, & - pio_iotype=shr_pio_getiotype(inst_name), & - mpicom=mpicom, compid=comp_id, & - gsmap=gsMap_lnd2Dsoi_gdc2glo, ggrid=dom_clm, & - nxg=ldomain%ni, nyg=ldomain%nj, & - nzg=nlevsoi, & - yearFirst=stream_year_first_soilm, & - yearLast=stream_year_last_soilm, & - yearAlign=model_year_align_soilm, & - offset=soilm_offset, & - domFilePath='', & - domFileName=trim(stream_fldFileName_soilm), & - domTvarName='time', & - domXvarName='lon' , & - domYvarName='lat' , & - domZvarName='levsoi' , & - domAreaName='area', & - domMaskName='mask', & - filePath='', & - filename=(/stream_fldFileName_soilm/), & - fldListFile=fldList, & - fldListModel=fldList, & - fillalgo='none', & - mapalgo='none', & - tintalgo=soilm_tintalgo, & - calendar=get_calendar(), & - dtlimit = 15._r8, & - taxmode='cycle' ) - - if (masterproc) then - call shr_strdata_print(sdat_soilm,'soil moisture data') - endif - - end subroutine PrescribedSoilMoistureInit - - - !----------------------------------------------------------------------- - ! - ! PrescribedSoilMoistureAdvance - ! - !----------------------------------------------------------------------- - subroutine PrescribedSoilMoistureAdvance( bounds ) - ! - ! Advanace the prescribed soil moisture stream - ! - ! !USES: - use clm_time_manager, only : get_curr_date - ! - ! !ARGUMENTS: - type(bounds_type) , intent(in) :: bounds - ! - ! !LOCAL VARIABLES: - character(len=CL) :: stream_var_name - integer :: g, ig - integer :: ier ! error code - integer :: year ! year (0, ...) for nstep+1 - integer :: mon ! month (1, ..., 12) for nstep+1 - integer :: day ! day of month (1, ..., 31) for nstep+1 - integer :: sec ! seconds into current date for nstep+1 - integer :: mcdate ! Current model date (yyyymmdd) - - call get_curr_date(year, mon, day, sec) - mcdate = year*10000 + mon*100 + day - - stream_var_name = 'H2OSOI' - - ! Determine variable index - ism = mct_aVect_indexRA(sdat_soilm%avs(1),trim(stream_var_name)) - - call shr_strdata_advance(sdat_soilm, mcdate, sec, mpicom, trim(stream_var_name)) - - ! Map gridcell to AV index - ier = 0 - if ( .not. allocated(g_to_ig) )then - allocate (g_to_ig(bounds%begg:bounds%endg), stat=ier) - if (ier /= 0) then - write(iulog,*) 'Prescribed soil moisture allocation error' - call endrun(msg=errMsg(sourcefile, __LINE__)) - end if - - ig = 0 - do g = bounds%begg,bounds%endg - ig = ig+1 - g_to_ig(g) = ig - end do - end if - - end subroutine PrescribedSoilMoistureAdvance - - !----------------------------------------------------------------------- - ! - ! PrescribedSoilMoistureInterp - ! - !----------------------------------------------------------------------- - subroutine PrescribedSoilMoistureInterp(bounds, soilstate_inst, & - waterstatebulk_inst) - ! - ! Assign data stream information for prescribed soil moisture. - ! - ! !USES: - use clm_time_manager, only : get_curr_date - use clm_varpar , only : nlevsoi - use clm_varcon , only : denh2o, denice, watmin, spval - use landunit_varcon , only : istsoil, istcrop - ! - ! !ARGUMENTS: - implicit none - type(bounds_type) , intent(in) :: bounds - type(soilstate_type) , intent(in) :: soilstate_inst - type(waterstatebulk_type) , intent(inout) :: waterstatebulk_inst - ! - ! !LOCAL VARIABLES: - integer :: c, g, j, ig, n - real(r8) :: soilm_liq_frac ! liquid fraction of soil moisture - real(r8) :: soilm_ice_frac ! ice fraction of soil moisture - real(r8) :: moisture_increment ! soil moisture adjustment increment - real(r8) :: h2osoi_vol_initial ! initial vwc value - character(*), parameter :: subName = "('PrescribedSoilMoistureInterp')" - - !----------------------------------------------------------------------- - - SHR_ASSERT_FL( (lbound(sdat_soilm%avs(1)%rAttr,1) == ism ), sourcefile, __LINE__) - SHR_ASSERT_FL( (ubound(sdat_soilm%avs(1)%rAttr,1) == ism ), sourcefile, __LINE__) - SHR_ASSERT_FL( (lbound(g_to_ig,1) <= bounds%begg ), sourcefile, __LINE__) - SHR_ASSERT_FL( (ubound(g_to_ig,1) >= bounds%endg ), sourcefile, __LINE__) - SHR_ASSERT_FL( (lbound(sdat_soilm%avs(1)%rAttr,2) <= g_to_ig(bounds%begg) ), sourcefile, __LINE__) - SHR_ASSERT_FL( (ubound(sdat_soilm%avs(1)%rAttr,2) >= g_to_ig(bounds%endg)+(nlevsoi-1)*size(g_to_ig) ), sourcefile, __LINE__) - associate( & - dz => col%dz , & ! Input: [real(r8) (:,:) ] layer depth (m) - watsat => soilstate_inst%watsat_col , & ! Input: [real(r8) (:,:) ] volumetric soil water at saturation (porosity) - h2osoi_liq => waterstatebulk_inst%h2osoi_liq_col , & ! Input/Output: [real(r8) (:,:) ] liquid water (kg/m2) - h2osoi_ice => waterstatebulk_inst%h2osoi_ice_col , & ! Input/Output: [real(r8) (:,:) ] ice water (kg/m2) - h2osoi_vol => waterstatebulk_inst%h2osoi_vol_col , & ! Output: volumetric soil water (m3/m3) - h2osoi_vol_prs => waterstatebulk_inst%h2osoi_vol_prs_grc & ! Output: prescribed volumetric soil water (m3/m3) - ) - SHR_ASSERT_FL( (lbound(h2osoi_vol,1) <= bounds%begc ), sourcefile, __LINE__) - SHR_ASSERT_FL( (ubound(h2osoi_vol,1) >= bounds%endc ), sourcefile, __LINE__) - SHR_ASSERT_FL( (lbound(h2osoi_vol,2) == 1 ), sourcefile, __LINE__) - SHR_ASSERT_FL( (ubound(h2osoi_vol,2) >= nlevsoi ), sourcefile, __LINE__) - SHR_ASSERT_FL( (lbound(dz,1) <= bounds%begc ), sourcefile, __LINE__) - SHR_ASSERT_FL( (ubound(dz,1) >= bounds%endc ), sourcefile, __LINE__) - SHR_ASSERT_FL( (lbound(dz,2) <= 1 ), sourcefile, __LINE__) - SHR_ASSERT_FL( (ubound(dz,2) >= nlevsoi ), sourcefile, __LINE__) - SHR_ASSERT_FL( (lbound(watsat,1) <= bounds%begc ), sourcefile, __LINE__) - SHR_ASSERT_FL( (ubound(watsat,1) >= bounds%endc ), sourcefile, __LINE__) - SHR_ASSERT_FL( (lbound(watsat,2) <= 1 ), sourcefile, __LINE__) - SHR_ASSERT_FL( (ubound(watsat,2) >= nlevsoi ), sourcefile, __LINE__) - SHR_ASSERT_FL( (lbound(h2osoi_liq,1) <= bounds%begc ), sourcefile, __LINE__) - SHR_ASSERT_FL( (ubound(h2osoi_liq,1) >= bounds%endc ), sourcefile, __LINE__) - SHR_ASSERT_FL( (lbound(h2osoi_liq,2) <= 1 ), sourcefile, __LINE__) - SHR_ASSERT_FL( (ubound(h2osoi_liq,2) >= nlevsoi ), sourcefile, __LINE__) - SHR_ASSERT_FL( (lbound(h2osoi_ice,1) <= bounds%begc ), sourcefile, __LINE__) - SHR_ASSERT_FL( (ubound(h2osoi_ice,1) >= bounds%endc ), sourcefile, __LINE__) - SHR_ASSERT_FL( (lbound(h2osoi_ice,2) <= 1 ), sourcefile, __LINE__) - SHR_ASSERT_FL( (ubound(h2osoi_ice,2) >= nlevsoi ), sourcefile, __LINE__) - SHR_ASSERT_FL( (lbound(h2osoi_vol_prs,1) <= bounds%begg ), sourcefile, __LINE__) - SHR_ASSERT_FL( (ubound(h2osoi_vol_prs,1) >= bounds%endg ), sourcefile, __LINE__) - SHR_ASSERT_FL( (lbound(h2osoi_vol_prs,2) == 1 ), sourcefile, __LINE__) - SHR_ASSERT_FL( (ubound(h2osoi_vol_prs,2) >= nlevsoi ), sourcefile, __LINE__) - ! - ! Set the prescribed soil moisture read from the file everywhere - ! - do g = bounds%begg, bounds%endg - ig = g_to_ig(g) - do j = 1, nlevsoi - - !n = ig + (j-1)*size(g_to_ig) - n = ig + (j-1)*size(g_to_ig) - - h2osoi_vol_prs(g,j) = sdat_soilm%avs(1)%rAttr(ism,n) - - ! If soil moiture is being interpolated in time and the result is - ! large that probably means one of the two data points is missing (set to spval) - if ( h2osoi_vol_prs(g,j) > 10.0_r8 .and. (h2osoi_vol_prs(g,j) /= spval) )then - h2osoi_vol_prs(g,j) = spval - end if - - end do - end do - - do c = bounds%begc, bounds%endc - ! - ! Set variable for each gridcell/column combination - ! - g = col%gridcell(c) - ig = g_to_ig(g) - - ! EBK Jan/2020, also check weights on gridcell (See https://github.com/ESCOMP/CTSM/issues/847) - if ( (lun%itype(col%landunit(c)) == istsoil) .or. (lun%itype(col%landunit(c)) == istcrop) .and. & - (col%wtgcell(c) /= 0._r8) ) then - ! this is a 2d field (gridcell/nlevsoi) ! - do j = 1, nlevsoi - - n = ig + (j-1)*size(g_to_ig) - - ! if soil water is zero, liq/ice fractions cannot be calculated - if((h2osoi_liq(c, j) + h2osoi_ice(c, j)) > 0._r8) then - - ! save original soil moisture value - h2osoi_vol_initial = h2osoi_vol(c,j) - - ! Check if the vegetated land mask from the dataset on the - ! file is different - if ( (h2osoi_vol_prs(g,j) == spval) .and. (h2osoi_vol_initial /= spval) )then - if ( soilm_ignore_data_if_missing )then - cycle - else - write(iulog,*) 'Input soil moisture dataset is not vegetated as expected: gridcell=', & - g, ' active = ', col%active(c) - call endrun(subgrid_index=c, subgrid_level=subgrid_level_column, & - msg = subname // & - ' ERROR:: The input soil moisture stream is NOT vegetated for one of the land points' ) - end if - end if - - ! update volumetric soil moisture from data prescribed from the file - h2osoi_vol(c,j) = h2osoi_vol_prs(g,j) - - - ! calculate liq/ice mass fractions - soilm_liq_frac = h2osoi_liq(c, j) /(h2osoi_liq(c, j) + h2osoi_ice(c, j)) - soilm_ice_frac = h2osoi_ice(c, j) /(h2osoi_liq(c, j) + h2osoi_ice(c, j)) - - ! calculate moisture increment - moisture_increment = h2osoi_vol(c,j) - h2osoi_vol_initial - ! add limitation check - moisture_increment = min((watsat(c,j) - h2osoi_vol_initial),max(-(h2osoi_vol_initial-watmin),moisture_increment)) - - ! update liq/ice water mass due to (volumetric) moisture increment - h2osoi_liq(c,j) = h2osoi_liq(c,j) + (soilm_liq_frac * moisture_increment * dz(c, j) * denh2o) - h2osoi_ice(c,j) = h2osoi_ice(c,j) + (soilm_ice_frac * moisture_increment * dz(c, j) * denice) - - else - call endrun(subgrid_index=c, subgrid_level=subgrid_level_column, & - msg = subname // ':: ERROR h2osoil liquid plus ice is zero') - endif - enddo - endif - end do - - end associate - - end subroutine PrescribedSoilMoistureInterp - -end module SoilMoistureStreamMod diff --git a/src/cpl/mct/UrbanTimeVarType.F90 b/src/cpl/mct/UrbanTimeVarType.F90 deleted file mode 100644 index 805ac47fbf..0000000000 --- a/src/cpl/mct/UrbanTimeVarType.F90 +++ /dev/null @@ -1,314 +0,0 @@ -module UrbanTimeVarType - - !------------------------------------------------------------------------------ - ! !DESCRIPTION: - ! Urban Time Varying Data - ! - ! !USES: - use shr_kind_mod , only : r8 => shr_kind_r8, CL => shr_kind_CL - use shr_log_mod , only : errMsg => shr_log_errMsg - use abortutils , only : endrun - use decompMod , only : bounds_type, subgrid_level_landunit - use clm_varctl , only : iulog, inst_name - use landunit_varcon , only : isturb_MIN, isturb_MAX - use clm_varcon , only : spval - use LandunitType , only : lun - use GridcellType , only : grc - use mct_mod - use shr_strdata_mod , only : shr_strdata_type - ! - implicit none - save - private - ! - ! - - ! !PUBLIC TYPE - type, public :: urbantv_type - - real(r8), public, pointer :: t_building_max(:) ! lun maximum internal building air temperature (K) - type(shr_strdata_type) :: sdat_urbantv ! urban time varying input data stream - contains - - ! !PUBLIC MEMBER FUNCTIONS: - procedure, public :: Init ! Allocate and initialize urbantv - procedure, public :: urbantv_init ! Initialize urban time varying stream - procedure, public :: urbantv_interp ! Interpolate urban time varying stream - - end type urbantv_type - - !----------------------------------------------------------------------- - character(15), private :: stream_var_name(isturb_MIN:isturb_MAX) - - character(len=*), parameter, private :: sourcefile = & - __FILE__ - -contains - - !----------------------------------------------------------------------- - subroutine Init(this, bounds, NLFilename) - ! - ! Allocate module variables and data structures - ! - ! !USES: - use shr_infnan_mod , only : nan => shr_infnan_nan, assignment(=) - use histFileMod , only : hist_addfld1d - ! - ! !ARGUMENTS: - class(urbantv_type) :: this - type(bounds_type) , intent(in) :: bounds - character(len=*) , intent(in) :: NLFilename ! Namelist filename - ! - ! !LOCAL VARIABLES: - integer :: begl, endl - !--------------------------------------------------------------------- - - begl = bounds%begl; endl = bounds%endl - - ! Allocate urbantv data structure - - allocate(this%t_building_max (begl:endl)) ; this%t_building_max (:) = nan - - call this%urbantv_init(bounds, NLFilename) - call this%urbantv_interp(bounds) - - ! Add history fields - call hist_addfld1d (fname='TBUILD_MAX', units='K', & - avgflag='A', long_name='prescribed maximum interior building temperature', & - ptr_lunit=this%t_building_max, default='inactive', set_nourb=spval, & - l2g_scale_type='unity') - - - end subroutine Init - - !----------------------------------------------------------------------- - - !----------------------------------------------------------------------- - subroutine urbantv_init(this, bounds, NLFilename) - ! - ! !DESCRIPTION: - ! Initialize data stream information for urban time varying data - ! - ! !USES: - use clm_time_manager , only : get_calendar - use ncdio_pio , only : pio_subsystem - use shr_pio_mod , only : shr_pio_getiotype - use clm_nlUtilsMod , only : find_nlgroup_name - use ndepStreamMod , only : clm_domain_mct - use spmdMod , only : masterproc, mpicom, comp_id - use fileutils , only : getavu, relavu - use shr_mpi_mod , only : shr_mpi_bcast - use shr_string_mod , only : shr_string_listAppend - use shr_strdata_mod , only : shr_strdata_create, shr_strdata_print - use domainMod , only : ldomain - use shr_infnan_mod , only : nan => shr_infnan_nan, assignment(=) - use landunit_varcon , only : isturb_TBD, isturb_HD, isturb_MD - use lnd_set_decomp_and_domain , only : gsmap_global - ! - ! !ARGUMENTS: - implicit none - class(urbantv_type) :: this - type(bounds_type), intent(in) :: bounds - character(len=*), intent(in) :: NLFilename ! Namelist filename - ! - ! !LOCAL VARIABLES: - integer :: begl, endl ! landunits - integer :: ifield ! field index - integer :: stream_year_first_urbantv ! first year in urban tv stream to use - integer :: stream_year_last_urbantv ! last year in urban tv stream to use - integer :: model_year_align_urbantv ! align stream_year_first_urbantv - ! with this model year - integer :: nu_nml ! unit for namelist file - integer :: nml_error ! namelist i/o error flag - type(mct_ggrid) :: dom_clm ! domain information - character(len=CL) :: stream_fldFileName_urbantv ! urban tv streams filename - character(len=CL) :: urbantvmapalgo = 'nn' ! mapping alogrithm for urban ac - character(len=CL) :: urbantv_tintalgo = 'linear' ! time interpolation alogrithm - character(len=CL) :: fldList ! field string - character(*), parameter :: urbantvString = "tbuildmax_" ! base string for field string - character(*), parameter :: subName = "('urbantv_init')" - character(*), parameter :: F00 = "('(urbantv_init) ',4a)" - !----------------------------------------------------------------------- - namelist /urbantv_streams/ & - stream_year_first_urbantv, & - stream_year_last_urbantv, & - model_year_align_urbantv, & - urbantvmapalgo, & - stream_fldFileName_urbantv, & - urbantv_tintalgo - !----------------------------------------------------------------------- - - begl = bounds%begl; endl = bounds%endl - - ! Default values for namelist - stream_year_first_urbantv = 1 ! first year in stream to use - stream_year_last_urbantv = 1 ! last year in stream to use - model_year_align_urbantv = 1 ! align stream_year_first_urbantv with this model year - stream_fldFileName_urbantv = ' ' - - ! Read urbantv_streams namelist - if (masterproc) then - nu_nml = getavu() - open( nu_nml, file=trim(NLFilename), status='old', iostat=nml_error ) - call find_nlgroup_name(nu_nml, 'urbantv_streams', status=nml_error) - if (nml_error == 0) then - read(nu_nml, nml=urbantv_streams,iostat=nml_error) - if (nml_error /= 0) then - call endrun(msg='ERROR reading urbantv_streams namelist'//errMsg(sourcefile, __LINE__)) - end if - end if - close(nu_nml) - call relavu( nu_nml ) - endif - - call shr_mpi_bcast(stream_year_first_urbantv, mpicom) - call shr_mpi_bcast(stream_year_last_urbantv, mpicom) - call shr_mpi_bcast(model_year_align_urbantv, mpicom) - call shr_mpi_bcast(stream_fldFileName_urbantv, mpicom) - call shr_mpi_bcast(urbantv_tintalgo, mpicom) - - if (masterproc) then - write(iulog,*) ' ' - write(iulog,*) 'urbantv_streams settings:' - write(iulog,*) ' stream_year_first_urbantv = ',stream_year_first_urbantv - write(iulog,*) ' stream_year_last_urbantv = ',stream_year_last_urbantv - write(iulog,*) ' model_year_align_urbantv = ',model_year_align_urbantv - write(iulog,*) ' stream_fldFileName_urbantv = ',stream_fldFileName_urbantv - write(iulog,*) ' urbantv_tintalgo = ',urbantv_tintalgo - write(iulog,*) ' ' - endif - - call clm_domain_mct (bounds, dom_clm) - - ! create the field list for these urbantv fields...use in shr_strdata_create - stream_var_name(:) = "NOT_SET" - stream_var_name(isturb_TBD) = urbantvString//"TBD" - stream_var_name(isturb_HD) = urbantvString//"HD" - stream_var_name(isturb_MD) = urbantvString//"MD" - fldList = "" - do ifield = isturb_MIN, isturb_MAX - call shr_string_listAppend( fldList, stream_var_name(ifield) ) - end do - - call shr_strdata_create(this%sdat_urbantv,name="clmurbantv", & - pio_subsystem=pio_subsystem, & - pio_iotype=shr_pio_getiotype(inst_name), & - mpicom=mpicom, compid=comp_id, & - gsmap=gsmap_global, ggrid=dom_clm, & - nxg=ldomain%ni, nyg=ldomain%nj, & - yearFirst=stream_year_first_urbantv, & - yearLast=stream_year_last_urbantv, & - yearAlign=model_year_align_urbantv, & - offset=0, & - domFilePath='', & - domFileName=trim(stream_fldFileName_urbantv), & - domTvarName='time', & - domXvarName='lon' , & - domYvarName='lat' , & - domAreaName='area', & - domMaskName='LANDMASK', & - filePath='', & - filename=(/trim(stream_fldFileName_urbantv)/) , & - fldListFile=fldList, & - fldListModel=fldList, & - fillalgo='none', & - mapalgo=urbantvmapalgo, & - calendar=get_calendar(), & - tintalgo=urbantv_tintalgo, & - taxmode='extend' ) - - if (masterproc) then - call shr_strdata_print(this%sdat_urbantv,'urban time varying data') - endif - - - end subroutine urbantv_init - - !----------------------------------------------------------------------- - subroutine urbantv_interp(this, bounds) - ! - ! !DESCRIPTION: - ! Interpolate data stream information for urban time varying data. - ! - ! !USES: - use clm_time_manager, only : get_curr_date - use spmdMod , only : mpicom - use shr_strdata_mod , only : shr_strdata_advance - use clm_instur , only : urban_valid - ! - ! !ARGUMENTS: - class(urbantv_type) :: this - type(bounds_type), intent(in) :: bounds - ! - ! !LOCAL VARIABLES: - logical :: found - integer :: l, glun, ig, g, ip - integer :: year ! year (0, ...) for nstep+1 - integer :: mon ! month (1, ..., 12) for nstep+1 - integer :: day ! day of month (1, ..., 31) for nstep+1 - integer :: sec ! seconds into current date for nstep+1 - integer :: mcdate ! Current model date (yyyymmdd) - integer :: lindx ! landunit index - integer :: gindx ! gridcell index - !----------------------------------------------------------------------- - - call get_curr_date(year, mon, day, sec) - mcdate = year*10000 + mon*100 + day - - call shr_strdata_advance(this%sdat_urbantv, mcdate, sec, mpicom, 'urbantvdyn') - - do l = bounds%begl,bounds%endl - if (lun%urbpoi(l)) then - glun = lun%gridcell(l) - ip = mct_aVect_indexRA(this%sdat_urbantv%avs(1),trim(stream_var_name(lun%itype(l)))) - ! - ! Determine vector index corresponding to glun - ! - ig = 0 - do g = bounds%begg,bounds%endg - ig = ig+1 - if (g == glun) exit - end do - - this%t_building_max(l) = this%sdat_urbantv%avs(1)%rAttr(ip,ig) - else - this%t_building_max(l) = spval - end if - end do - - found = .false. - do l = bounds%begl,bounds%endl - if (lun%urbpoi(l)) then - glun = lun%gridcell(l) - ! - ! Determine vector index corresponding to glun - ! - ig = 0 - do g = bounds%begg,bounds%endg - ig = ig+1 - if (g == glun) exit - end do - - if ( .not. urban_valid(g) .or. (this%t_building_max(l) <= 0._r8)) then - found = .true. - gindx = g - lindx = l - exit - end if - end if - end do - if ( found ) then - write(iulog,*)'ERROR: no valid urban data for g= ',gindx - write(iulog,*)'landunit type: ',lun%itype(lindx) - write(iulog,*)'urban_valid: ',urban_valid(gindx) - write(iulog,*)'t_building_max: ',this%t_building_max(lindx) - call endrun(subgrid_index=lindx, subgrid_level=subgrid_level_landunit, & - msg=errmsg(sourcefile, __LINE__)) - end if - - - end subroutine urbantv_interp - - !----------------------------------------------------------------------- - -end module UrbanTimeVarType diff --git a/src/cpl/mct/ch4FInundatedStreamType.F90 b/src/cpl/mct/ch4FInundatedStreamType.F90 deleted file mode 100644 index 3c26f4d109..0000000000 --- a/src/cpl/mct/ch4FInundatedStreamType.F90 +++ /dev/null @@ -1,389 +0,0 @@ -module ch4FInundatedStreamType - -#include "shr_assert.h" - - !----------------------------------------------------------------------- - ! !DESCRIPTION: - ! Contains methods for reading in finundated streams file for methane code. - ! - ! !USES - use shr_kind_mod , only: r8 => shr_kind_r8, CL => shr_kind_cl - use spmdMod , only: mpicom, masterproc - use clm_varctl , only: iulog, inst_name - use abortutils , only: endrun - use decompMod , only: bounds_type - use ch4varcon , only: finundation_mtd - - ! !PUBLIC TYPES: - implicit none - private - save - - type, public :: ch4finundatedstream_type - real(r8), pointer, private :: zwt0_gdc (:) ! col coefficient for determining finundated (m) - real(r8), pointer, private :: f0_gdc (:) ! col maximum inundated fraction for a gridcell (for methane code) - real(r8), pointer, private :: p3_gdc (:) ! col coefficient for determining finundated (m) - real(r8), pointer, private :: fws_slope_gdc (:) ! col slope in fws = slope * tws + intercept (A coefficient) - real(r8), pointer, private :: fws_intercept_gdc (:) ! col slope in fws = slope * tws + intercept (B coefficient) - contains - - ! !PUBLIC MEMBER FUNCTIONS: - procedure, public :: Init ! Initialize and read data in - procedure, public :: CalcFinundated ! Calculate finundated based on input streams - procedure, public :: UseStreams ! If streams will be used - - ! !PRIVATE MEMBER FUNCTIONS: - procedure, private :: InitAllocate ! Allocate data - - end type ch4finundatedstream_type - - - ! ! PRIVATE DATA: - - type, private :: streamcontrol_type - character(len=CL) :: stream_fldFileName_ch4finundated ! Filename - character(len=CL) :: ch4finundatedmapalgo ! map algo - character(len=CL) :: fldList ! List of fields to read - contains - procedure, private :: ReadNML ! Read in namelist - end type streamcontrol_type - - type(streamcontrol_type), private :: control ! Stream control data - - character(len=*), parameter, private :: sourcefile = & - __FILE__ - !============================================================================== - -contains - - !============================================================================== - - subroutine Init(this, bounds, NLFilename) - ! - ! Initialize the ch4 finundated stream object - ! - ! Uses: - use clm_time_manager , only : get_calendar, get_curr_date - use ncdio_pio , only : pio_subsystem - use shr_pio_mod , only : shr_pio_getiotype - use shr_nl_mod , only : shr_nl_find_group_name - use shr_mpi_mod , only : shr_mpi_bcast - use ndepStreamMod , only : clm_domain_mct - use domainMod , only : ldomain - use decompMod , only : bounds_type - use mct_mod , only : mct_ggrid, mct_avect_indexra - use shr_strdata_mod , only : shr_strdata_type, shr_strdata_create - use shr_strdata_mod , only : shr_strdata_print, shr_strdata_advance - use spmdMod , only : comp_id, iam - use ch4varcon , only : finundation_mtd_h2osfc - use ch4varcon , only : finundation_mtd_ZWT_inversion, finundation_mtd_TWS_inversion - use lnd_set_decomp_and_domain , only : gsmap_global - ! - ! arguments - implicit none - class(ch4finundatedstream_type) :: this - type(bounds_type), intent(in) :: bounds - character(len=*), intent(in) :: NLFilename ! Namelist filename - ! - ! local variables - integer :: ig, g ! Indices - type(mct_ggrid) :: dom_clm ! domain information - type(shr_strdata_type) :: sdat ! input data stream - integer :: index_ZWT0 = 0 ! Index of ZWT0 field - integer :: index_F0 = 0 ! Index of F0 field - integer :: index_P3 = 0 ! Index of P3 field - integer :: index_FWS_TWS_A = 0 ! Index of FWS_TWS_A field - integer :: index_FWS_TWS_B = 0 ! Index of FWS_TWS_B field - integer :: year ! year (0, ...) for nstep+1 - integer :: mon ! month (1, ..., 12) for nstep+1 - integer :: day ! day of month (1, ..., 31) for nstep+1 - integer :: sec ! seconds into current date for nstep+1 - integer :: mcdate ! Current model date (yyyymmdd) - character(len=*), parameter :: stream_name = 'ch4finundated' - character(*), parameter :: subName = "('ch4finundatedstream::Init')" - !----------------------------------------------------------------------- - if ( finundation_mtd /= finundation_mtd_h2osfc )then - call this%InitAllocate( bounds ) - call control%ReadNML( bounds, NLFileName ) - - if ( this%useStreams() )then - call clm_domain_mct (bounds, dom_clm) - - call shr_strdata_create(sdat,name=stream_name, & - pio_subsystem=pio_subsystem, & - pio_iotype=shr_pio_getiotype(inst_name), & - mpicom=mpicom, compid=comp_id, & - gsmap=gsmap_global, ggrid=dom_clm, & - nxg=ldomain%ni, nyg=ldomain%nj, & - yearFirst=1996, & - yearLast=1996, & - yearAlign=1, & - offset=0, & - domFilePath='', & - domFileName=trim(control%stream_fldFileName_ch4finundated), & - domTvarName='time', & - domXvarName='LONGXY' , & - domYvarName='LATIXY' , & - domAreaName='AREA', & - domMaskName='LANDMASK', & - filePath='', & - filename=(/trim(control%stream_fldFileName_ch4finundated)/), & - fldListFile=control%fldList, & - fldListModel=control%fldList, & - fillalgo='none', & - mapalgo=control%ch4finundatedmapalgo, & - calendar=get_calendar(), & - taxmode='extend' ) - - if (masterproc) then - call shr_strdata_print(sdat,'CLM '//stream_name//' data') - endif - - if( finundation_mtd == finundation_mtd_ZWT_inversion )then - index_ZWT0 = mct_avect_indexra(sdat%avs(1),'ZWT0') - index_F0 = mct_avect_indexra(sdat%avs(1),'F0' ) - index_P3 = mct_avect_indexra(sdat%avs(1),'P3' ) - else if( finundation_mtd == finundation_mtd_TWS_inversion )then - index_FWS_TWS_A = mct_avect_indexra(sdat%avs(1),'FWS_TWS_A') - index_FWS_TWS_B = mct_avect_indexra(sdat%avs(1),'FWS_TWS_B') - end if - - - ! Explicitly set current date to a hardcoded constant value. Otherwise - ! using the real date can cause roundoff differences that are - ! detrected as issues with exact restart. EBK M05/20/2017 - !call get_curr_date(year, mon, day, sec) - year = 1996 - mon = 12 - day = 31 - sec = 0 - mcdate = year*10000 + mon*100 + day - - call shr_strdata_advance(sdat, mcdate, sec, mpicom, 'ch4finundated') - - ! Get the data - ig = 0 - do g = bounds%begg,bounds%endg - ig = ig+1 - if ( index_ZWT0 > 0 )then - this%zwt0_gdc(g) = sdat%avs(1)%rAttr(index_ZWT0,ig) - end if - if ( index_F0 > 0 )then - this%f0_gdc(g) = sdat%avs(1)%rAttr(index_F0,ig) - end if - if ( index_P3 > 0 )then - this%p3_gdc(g) = sdat%avs(1)%rAttr(index_P3,ig) - end if - if ( index_FWS_TWS_A > 0 )then - this%fws_slope_gdc(g) = sdat%avs(1)%rAttr(index_FWS_TWS_A,ig) - end if - if ( index_FWS_TWS_B > 0 )then - this%fws_intercept_gdc(g) = sdat%avs(1)%rAttr(index_FWS_TWS_B,ig) - end if - end do - end if - end if - - end subroutine Init - - !----------------------------------------------------------------------- - logical function UseStreams(this) - ! - ! !DESCRIPTION: - ! Return true if - ! - ! !USES: - ! - ! !ARGUMENTS: - implicit none - class(ch4finundatedstream_type) :: this - ! - ! !LOCAL VARIABLES: - if ( trim(control%stream_fldFileName_ch4finundated) == '' )then - UseStreams = .false. - else - UseStreams = .true. - end if - end function UseStreams - - !----------------------------------------------------------------------- - subroutine InitAllocate(this, bounds) - ! - ! !DESCRIPTION: - ! Allocate module variables and data structures - ! - ! !USES: - use shr_infnan_mod, only: nan => shr_infnan_nan, assignment(=) - use ch4varcon , only: finundation_mtd_ZWT_inversion, finundation_mtd_TWS_inversion - ! - ! !ARGUMENTS: - implicit none - class(ch4finundatedstream_type) :: this - type(bounds_type), intent(in) :: bounds - ! - ! !LOCAL VARIABLES: - integer :: begc, endc - integer :: begg, endg - !--------------------------------------------------------------------- - - begc = bounds%begc; endc = bounds%endc - begg = bounds%begg; endg = bounds%endg - - if( finundation_mtd == finundation_mtd_ZWT_inversion )then - allocate(this%zwt0_gdc (begg:endg)) ; this%zwt0_gdc (:) = nan - allocate(this%f0_gdc (begg:endg)) ; this%f0_gdc (:) = nan - allocate(this%p3_gdc (begg:endg)) ; this%p3_gdc (:) = nan - else if( finundation_mtd == finundation_mtd_TWS_inversion )then - allocate(this%fws_slope_gdc (begg:endg)) ; this%fws_slope_gdc (:) = nan - allocate(this%fws_intercept_gdc(begg:endg)) ; this%fws_intercept_gdc(:) = nan - end if - - end subroutine InitAllocate - - !----------------------------------------------------------------------- - subroutine CalcFinundated(this, bounds, num_soilc, filter_soilc, soilhydrology_inst, & - waterdiagnosticbulk_inst, qflx_surf_lag_col, finundated ) - ! - ! !DESCRIPTION: - ! - ! Calculate finundated according to the appropriate methodology - ! - ! !USES: - use ColumnType , only : col - use ch4varcon , only : finundation_mtd_h2osfc, finundation_mtd_ZWT_inversion - use ch4varcon , only : finundation_mtd_TWS_inversion - use clm_varpar , only : nlevsoi - use SoilHydrologyType, only : soilhydrology_type - use WaterDiagnosticBulkType , only : waterdiagnosticbulk_type - ! - ! !ARGUMENTS: - implicit none - class(ch4finundatedstream_type) :: this - type(bounds_type) , intent(in) :: bounds - integer , intent(in) :: num_soilc ! number of column soil points in column filter - integer , intent(in) :: filter_soilc(:) ! column filter for soil points - type(soilhydrology_type) , intent(in) :: soilhydrology_inst - type(waterdiagnosticbulk_type) , intent(in) :: waterdiagnosticbulk_inst - real(r8) , intent(in) :: qflx_surf_lag_col(bounds%begc:) !time-lagged surface runoff (mm H2O /s) - real(r8) , intent(inout) :: finundated(bounds%begc:) ! fractional inundated area in soil column (excluding dedicated wetland columns) - ! - ! !LOCAL VARIABLES: - integer :: g, c, fc ! Indices - real(r8) :: zwt_actual ! Total water storage (ZWT) to use either perched or total depending on conditions - - SHR_ASSERT_ALL_FL((ubound(qflx_surf_lag_col) == (/bounds%endc/)), sourcefile, __LINE__) - SHR_ASSERT_ALL_FL((ubound(finundated) == (/bounds%endc/)), sourcefile, __LINE__) - - associate( & - z => col%z , & ! Input: [real(r8) (:,:) ] layer depth (m) (-nlevsno+1:nlevsoi) - zwt => soilhydrology_inst%zwt_col , & ! Input: [real(r8) (:) ] water table depth (m) - zwt_perched => soilhydrology_inst%zwt_perched_col , & ! Input: [real(r8) (:) ] perched water table depth (m) - tws => waterdiagnosticbulk_inst%tws_grc , & ! Input: [real(r8) (:) ] total water storage (kg m-2) - frac_h2osfc => waterdiagnosticbulk_inst%frac_h2osfc_col & ! Input: [real(r8) (:) ] fraction of ground covered by surface water (0 to 1) - ) - - ! Calculate finundated - do fc = 1, num_soilc - c = filter_soilc(fc) - g = col%gridcell(c) - select case( finundation_mtd ) - case ( finundation_mtd_h2osfc ) - finundated(c) = frac_h2osfc(c) - case ( finundation_mtd_ZWT_inversion ) - if (this%zwt0_gdc(g) > 0._r8) then - if (zwt_perched(c) < z(c,nlevsoi)-1.e-5_r8 .and. zwt_perched(c) < zwt(c)) then - zwt_actual = zwt_perched(c) - else - zwt_actual = zwt(c) - end if - finundated(c) = this%f0_gdc(g) * exp(-zwt_actual/this%zwt0_gdc(g)) + this%p3_gdc(g)*qflx_surf_lag_col(c) - else - finundated(c) = this%p3_gdc(g)*qflx_surf_lag_col(c) - end if - case ( finundation_mtd_TWS_inversion ) - finundated(c) = this%fws_slope_gdc(g) * tws(g) + this%fws_intercept_gdc(g) - end select - finundated(c) = min( 1.0_r8, max( 0.0_r8, finundated(c) ) ) - end do - end associate - - end subroutine CalcFinundated - !============================================================================== - - subroutine ReadNML(this, bounds, NLFilename) - ! - ! Read the namelist data stream information. - ! - ! Uses: - use clm_time_manager , only : get_calendar - use ncdio_pio , only : pio_subsystem - use shr_pio_mod , only : shr_pio_getiotype - use shr_nl_mod , only : shr_nl_find_group_name - use shr_log_mod , only : errMsg => shr_log_errMsg - use shr_mpi_mod , only : shr_mpi_bcast - use fileutils , only : getavu, relavu - use ch4varcon , only : finundation_mtd_ZWT_inversion, finundation_mtd_TWS_inversion - ! - ! arguments - implicit none - class(streamcontrol_type) :: this - type(bounds_type), intent(in) :: bounds - character(len=*), intent(in) :: NLFilename ! Namelist filename - ! - ! local variables - integer :: nu_nml ! unit for namelist file - integer :: nml_error ! namelist i/o error flag - character(len=CL) :: stream_fldFileName_ch4finundated = ' ' - character(len=CL) :: ch4finundatedmapalgo = 'bilinear' - character(len=*), parameter :: namelist_name = 'ch4finundated' ! MUST agree with name in namelist and read - character(len=*), parameter :: shr_strdata_unset = 'NOT_SET' - character(len=*), parameter :: subName = "('ch4finundated::ReadNML')" - character(len=*), parameter :: F00 = "('(ch4finundated_readnml) ',4a)" - !----------------------------------------------------------------------- - - namelist /ch4finundated/ & ! MUST agree with namelist_name above - ch4finundatedmapalgo, stream_fldFileName_ch4finundated - - ! Default values for namelist - - ! Read ch4finundated namelist - if (masterproc) then - nu_nml = getavu() - open( nu_nml, file=trim(NLFilename), status='old', iostat=nml_error ) - call shr_nl_find_group_name(nu_nml, namelist_name, status=nml_error) - if (nml_error == 0) then - read(nu_nml, nml=ch4finundated,iostat=nml_error) ! MUST agree with namelist_name above - if (nml_error /= 0) then - call endrun(msg=' ERROR reading '//namelist_name//' namelist'//errMsg(sourcefile, __LINE__)) - end if - else - call endrun(msg=' ERROR finding '//namelist_name//' namelist'//errMsg(sourcefile, __LINE__)) - end if - close(nu_nml) - call relavu( nu_nml ) - endif - - call shr_mpi_bcast(stream_fldFileName_ch4finundated, mpicom) - call shr_mpi_bcast(ch4finundatedmapalgo , mpicom) - - if (masterproc) then - write(iulog,*) ' ' - write(iulog,*) namelist_name, ' stream settings:' - write(iulog,*) ' stream_fldFileName_ch4finundated = ',stream_fldFileName_ch4finundated - write(iulog,*) ' ch4finundatedmapalgo = ',ch4finundatedmapalgo - write(iulog,*) ' ' - endif - this%stream_fldFileName_ch4finundated = stream_fldFileName_ch4finundated - this%ch4finundatedmapalgo = ch4finundatedmapalgo - if ( finundation_mtd == finundation_mtd_ZWT_inversion )then - this%fldList = "ZWT0:F0:P3" - else if ( finundation_mtd == finundation_mtd_TWS_inversion )then - this%fldList = "FWS_TWS_A:FWS_TWS_B" - else - call endrun(msg=' ERROR do NOT know what list of variables to read for this finundation_mtd type'// & - errMsg(sourcefile, __LINE__)) - end if - - end subroutine ReadNML - -end module ch4FInundatedStreamType diff --git a/src/cpl/mct/clm_cpl_indices.F90 b/src/cpl/mct/clm_cpl_indices.F90 deleted file mode 100644 index 09ed89e92d..0000000000 --- a/src/cpl/mct/clm_cpl_indices.F90 +++ /dev/null @@ -1,330 +0,0 @@ -module clm_cpl_indices - !----------------------------------------------------------------------- - ! !DESCRIPTION: - ! Module containing the indices for the fields passed between CLM and - ! the driver. Includes the River Transport Model fields (RTM) and the - ! fields needed by the land-ice component (sno). - ! - ! !USES: - - use shr_sys_mod, only : shr_sys_abort - implicit none - - SAVE - private ! By default make data private - ! - ! !PUBLIC MEMBER FUNCTIONS: - public :: clm_cpl_indices_set ! Set the coupler indices - ! - ! !PUBLIC DATA MEMBERS: - ! - integer , public :: glc_nec ! number of elevation classes for glacier_mec landunits - ! (from coupler) - must equal maxpatch_glc from namelist - - ! lnd -> drv (required) - - integer, public ::index_l2x_Flrl_rofsur ! lnd->rtm input liquid surface fluxes - integer, public ::index_l2x_Flrl_rofgwl ! lnd->rtm input liquid gwl fluxes - integer, public ::index_l2x_Flrl_rofsub ! lnd->rtm input liquid subsurface fluxes - integer, public ::index_l2x_Flrl_rofi ! lnd->rtm input frozen fluxes - integer, public ::index_l2x_Flrl_irrig ! irrigation withdrawal - - integer, public ::index_l2x_Sl_t ! temperature - integer, public ::index_l2x_Sl_tref ! 2m reference temperature - integer, public ::index_l2x_Sl_qref ! 2m reference specific humidity - integer, public ::index_l2x_Sl_avsdr ! albedo: direct , visible - integer, public ::index_l2x_Sl_anidr ! albedo: direct , near-ir - integer, public ::index_l2x_Sl_avsdf ! albedo: diffuse, visible - integer, public ::index_l2x_Sl_anidf ! albedo: diffuse, near-ir - integer, public ::index_l2x_Sl_snowh ! snow height - integer, public ::index_l2x_Sl_u10 ! 10m wind - integer, public ::index_l2x_Sl_ddvel ! dry deposition velocities (optional) - integer, public ::index_l2x_Sl_fv ! friction velocity - integer, public ::index_l2x_Sl_ram1 ! aerodynamical resistance - integer, public ::index_l2x_Sl_soilw ! volumetric soil water - integer, public ::index_l2x_Fall_taux ! wind stress, zonal - integer, public ::index_l2x_Fall_tauy ! wind stress, meridional - integer, public ::index_l2x_Fall_lat ! latent heat flux - integer, public ::index_l2x_Fall_sen ! sensible heat flux - integer, public ::index_l2x_Fall_lwup ! upward longwave heat flux - integer, public ::index_l2x_Fall_evap ! evaporation water flux - integer, public ::index_l2x_Fall_swnet ! heat flux shortwave net - integer, public ::index_l2x_Fall_fco2_lnd ! co2 flux **For testing set to 0 - integer, public ::index_l2x_Fall_flxdst1 ! dust flux size bin 1 - integer, public ::index_l2x_Fall_flxdst2 ! dust flux size bin 2 - integer, public ::index_l2x_Fall_flxdst3 ! dust flux size bin 3 - integer, public ::index_l2x_Fall_flxdst4 ! dust flux size bin 4 - integer, public ::index_l2x_Fall_flxvoc ! MEGAN fluxes - integer, public ::index_l2x_Fall_flxfire ! Fire fluxes - integer, public ::index_l2x_Sl_ztopfire ! Top of fire emissions (m) - - ! In the following, index 0 is bare land, other indices are glc elevation classes - integer, allocatable, public ::index_l2x_Sl_tsrf(:) ! glc MEC temperature - integer, allocatable, public ::index_l2x_Sl_topo(:) ! glc MEC topo height - integer, allocatable, public ::index_l2x_Flgl_qice(:) ! glc MEC ice flux - - integer, public ::index_x2l_Sa_methane - integer, public ::index_l2x_Fall_methane - - integer, public :: nflds_l2x = 0 - - ! drv -> lnd (required) - - integer, public ::index_x2l_Sa_z ! bottom atm level height - integer, public ::index_x2l_Sa_topo ! atm surface height (m) - integer, public ::index_x2l_Sa_u ! bottom atm level zon wind - integer, public ::index_x2l_Sa_v ! bottom atm level mer wind - integer, public ::index_x2l_Sa_ptem ! bottom atm level pot temp - integer, public ::index_x2l_Sa_shum ! bottom atm level spec hum - integer, public ::index_x2l_Sa_pbot ! bottom atm level pressure - integer, public ::index_x2l_Sa_tbot ! bottom atm level temp - integer, public ::index_x2l_Faxa_lwdn ! downward lw heat flux - integer, public ::index_x2l_Faxa_rainc ! prec: liquid "convective" - integer, public ::index_x2l_Faxa_rainl ! prec: liquid "large scale" - integer, public ::index_x2l_Faxa_snowc ! prec: frozen "convective" - integer, public ::index_x2l_Faxa_snowl ! prec: frozen "large scale" - integer, public ::index_x2l_Faxa_swndr ! sw: nir direct downward - integer, public ::index_x2l_Faxa_swvdr ! sw: vis direct downward - integer, public ::index_x2l_Faxa_swndf ! sw: nir diffuse downward - integer, public ::index_x2l_Faxa_swvdf ! sw: vis diffuse downward - integer, public ::index_x2l_Sa_co2prog ! bottom atm level prognostic co2 - integer, public ::index_x2l_Sa_co2diag ! bottom atm level diagnostic co2 - integer, public ::index_x2l_Faxa_bcphidry ! flux: Black Carbon hydrophilic dry deposition - integer, public ::index_x2l_Faxa_bcphodry ! flux: Black Carbon hydrophobic dry deposition - integer, public ::index_x2l_Faxa_bcphiwet ! flux: Black Carbon hydrophilic wet deposition - integer, public ::index_x2l_Faxa_ocphidry ! flux: Organic Carbon hydrophilic dry deposition - integer, public ::index_x2l_Faxa_ocphodry ! flux: Organic Carbon hydrophobic dry deposition - integer, public ::index_x2l_Faxa_ocphiwet ! flux: Organic Carbon hydrophilic dry deposition - integer, public ::index_x2l_Faxa_dstwet1 ! flux: Size 1 dust -- wet deposition - integer, public ::index_x2l_Faxa_dstwet2 ! flux: Size 2 dust -- wet deposition - integer, public ::index_x2l_Faxa_dstwet3 ! flux: Size 3 dust -- wet deposition - integer, public ::index_x2l_Faxa_dstwet4 ! flux: Size 4 dust -- wet deposition - integer, public ::index_x2l_Faxa_dstdry1 ! flux: Size 1 dust -- dry deposition - integer, public ::index_x2l_Faxa_dstdry2 ! flux: Size 2 dust -- dry deposition - integer, public ::index_x2l_Faxa_dstdry3 ! flux: Size 3 dust -- dry deposition - integer, public ::index_x2l_Faxa_dstdry4 ! flux: Size 4 dust -- dry deposition - - integer, public ::index_x2l_Faxa_nhx ! flux nhx from atm - integer, public ::index_x2l_Faxa_noy ! flux noy from atm - - integer, public ::index_x2l_Flrr_flood ! rtm->lnd rof flood flux - integer, public ::index_x2l_Flrr_volr ! rtm->lnd rof volr total volume - integer, public ::index_x2l_Flrr_volrmch ! rtm->lnd rof volr main channel volume - - ! In the following, index 0 is bare land, other indices are glc elevation classes - integer, allocatable, public ::index_x2l_Sg_ice_covered(:) ! Fraction of glacier from glc model - integer, allocatable, public ::index_x2l_Sg_topo(:) ! Topo height from glc model - integer, allocatable, public ::index_x2l_Flgg_hflx(:) ! Heat flux from glc model - - integer, public ::index_x2l_Sg_icemask - integer, public ::index_x2l_Sg_icemask_coupled_fluxes - - integer, public :: nflds_x2l = 0 - - !----------------------------------------------------------------------- - -contains - - !----------------------------------------------------------------------- - subroutine clm_cpl_indices_set( ) - ! - ! !DESCRIPTION: - ! Set the coupler indices needed by the land model coupler - ! interface. - ! - ! !USES: - use seq_flds_mod , only: seq_flds_x2l_fields, seq_flds_l2x_fields - use mct_mod , only: mct_aVect, mct_aVect_init, mct_avect_indexra - use mct_mod , only: mct_aVect_clean, mct_avect_nRattr - use shr_drydep_mod , only: drydep_fields_token, n_drydep - use shr_megan_mod , only: shr_megan_fields_token, shr_megan_mechcomps_n - use shr_fire_emis_mod,only: shr_fire_emis_fields_token, shr_fire_emis_ztop_token, shr_fire_emis_mechcomps_n - use clm_varctl , only: ndep_from_cpl - use glc_elevclass_mod, only: glc_get_num_elevation_classes, glc_elevclass_as_string - ! - ! !ARGUMENTS: - implicit none - ! - ! !REVISION HISTORY: - ! Author: Mariana Vertenstein - ! 01/2011, Erik Kluzek: Added protex headers - ! - ! !LOCAL VARIABLES: - type(mct_aVect) :: l2x ! temporary, land to coupler - type(mct_aVect) :: x2l ! temporary, coupler to land - integer :: num - character(len=:), allocatable :: nec_str ! string version of glc elev. class number - character(len=64) :: name - character(len=32) :: subname = 'clm_cpl_indices_set' ! subroutine name - !----------------------------------------------------------------------- - - ! Determine attribute vector indices - - ! create temporary attribute vectors - call mct_aVect_init(x2l, rList=seq_flds_x2l_fields, lsize=1) - nflds_x2l = mct_avect_nRattr(x2l) - - call mct_aVect_init(l2x, rList=seq_flds_l2x_fields, lsize=1) - nflds_l2x = mct_avect_nRattr(l2x) - - !------------------------------------------------------------- - ! clm -> drv - !------------------------------------------------------------- - - index_l2x_Flrl_rofsur = mct_avect_indexra(l2x,'Flrl_rofsur') - index_l2x_Flrl_rofgwl = mct_avect_indexra(l2x,'Flrl_rofgwl') - index_l2x_Flrl_rofsub = mct_avect_indexra(l2x,'Flrl_rofsub') - index_l2x_Flrl_rofi = mct_avect_indexra(l2x,'Flrl_rofi') - index_l2x_Flrl_irrig = mct_avect_indexra(l2x,'Flrl_irrig') - - index_l2x_Sl_t = mct_avect_indexra(l2x,'Sl_t') - index_l2x_Sl_snowh = mct_avect_indexra(l2x,'Sl_snowh') - index_l2x_Sl_avsdr = mct_avect_indexra(l2x,'Sl_avsdr') - index_l2x_Sl_anidr = mct_avect_indexra(l2x,'Sl_anidr') - index_l2x_Sl_avsdf = mct_avect_indexra(l2x,'Sl_avsdf') - index_l2x_Sl_anidf = mct_avect_indexra(l2x,'Sl_anidf') - index_l2x_Sl_tref = mct_avect_indexra(l2x,'Sl_tref') - index_l2x_Sl_qref = mct_avect_indexra(l2x,'Sl_qref') - index_l2x_Sl_u10 = mct_avect_indexra(l2x,'Sl_u10') - index_l2x_Sl_ram1 = mct_avect_indexra(l2x,'Sl_ram1') - index_l2x_Sl_fv = mct_avect_indexra(l2x,'Sl_fv') - index_l2x_Sl_soilw = mct_avect_indexra(l2x,'Sl_soilw',perrwith='quiet') - - if ( n_drydep>0 )then - index_l2x_Sl_ddvel = mct_avect_indexra(l2x, trim(drydep_fields_token)) - else - index_l2x_Sl_ddvel = 0 - end if - - index_l2x_Fall_taux = mct_avect_indexra(l2x,'Fall_taux') - index_l2x_Fall_tauy = mct_avect_indexra(l2x,'Fall_tauy') - index_l2x_Fall_lat = mct_avect_indexra(l2x,'Fall_lat') - index_l2x_Fall_sen = mct_avect_indexra(l2x,'Fall_sen') - index_l2x_Fall_lwup = mct_avect_indexra(l2x,'Fall_lwup') - index_l2x_Fall_evap = mct_avect_indexra(l2x,'Fall_evap') - index_l2x_Fall_swnet = mct_avect_indexra(l2x,'Fall_swnet') - index_l2x_Fall_flxdst1 = mct_avect_indexra(l2x,'Fall_flxdst1') - index_l2x_Fall_flxdst2 = mct_avect_indexra(l2x,'Fall_flxdst2') - index_l2x_Fall_flxdst3 = mct_avect_indexra(l2x,'Fall_flxdst3') - index_l2x_Fall_flxdst4 = mct_avect_indexra(l2x,'Fall_flxdst4') - - index_l2x_Fall_fco2_lnd = mct_avect_indexra(l2x,'Fall_fco2_lnd',perrwith='quiet') - - index_l2x_Fall_methane = mct_avect_indexra(l2x,'Fall_methane',perrWith='quiet') - - ! MEGAN fluxes - if (shr_megan_mechcomps_n>0) then - index_l2x_Fall_flxvoc = mct_avect_indexra(l2x,trim(shr_megan_fields_token)) - else - index_l2x_Fall_flxvoc = 0 - endif - - ! Fire fluxes - if (shr_fire_emis_mechcomps_n>0) then - index_l2x_Fall_flxfire = mct_avect_indexra(l2x,trim(shr_fire_emis_fields_token)) - index_l2x_Sl_ztopfire = mct_avect_indexra(l2x,trim(shr_fire_emis_ztop_token)) - else - index_l2x_Fall_flxfire = 0 - index_l2x_Sl_ztopfire = 0 - endif - - !------------------------------------------------------------- - ! drv -> clm - !------------------------------------------------------------- - - index_x2l_Sa_z = mct_avect_indexra(x2l,'Sa_z') - index_x2l_Sa_topo = mct_avect_indexra(x2l,'Sa_topo') - index_x2l_Sa_u = mct_avect_indexra(x2l,'Sa_u') - index_x2l_Sa_v = mct_avect_indexra(x2l,'Sa_v') - index_x2l_Sa_ptem = mct_avect_indexra(x2l,'Sa_ptem') - index_x2l_Sa_pbot = mct_avect_indexra(x2l,'Sa_pbot') - index_x2l_Sa_tbot = mct_avect_indexra(x2l,'Sa_tbot') - index_x2l_Sa_shum = mct_avect_indexra(x2l,'Sa_shum') - index_x2l_Sa_co2prog = mct_avect_indexra(x2l,'Sa_co2prog',perrwith='quiet') - index_x2l_Sa_co2diag = mct_avect_indexra(x2l,'Sa_co2diag',perrwith='quiet') - - index_x2l_Sa_methane = mct_avect_indexra(x2l,'Sa_methane',perrWith='quiet') - - index_x2l_Flrr_volr = mct_avect_indexra(x2l,'Flrr_volr') - index_x2l_Flrr_volrmch = mct_avect_indexra(x2l,'Flrr_volrmch') - - index_x2l_Faxa_lwdn = mct_avect_indexra(x2l,'Faxa_lwdn') - index_x2l_Faxa_rainc = mct_avect_indexra(x2l,'Faxa_rainc') - index_x2l_Faxa_rainl = mct_avect_indexra(x2l,'Faxa_rainl') - index_x2l_Faxa_snowc = mct_avect_indexra(x2l,'Faxa_snowc') - index_x2l_Faxa_snowl = mct_avect_indexra(x2l,'Faxa_snowl') - index_x2l_Faxa_swndr = mct_avect_indexra(x2l,'Faxa_swndr') - index_x2l_Faxa_swvdr = mct_avect_indexra(x2l,'Faxa_swvdr') - index_x2l_Faxa_swndf = mct_avect_indexra(x2l,'Faxa_swndf') - index_x2l_Faxa_swvdf = mct_avect_indexra(x2l,'Faxa_swvdf') - index_x2l_Faxa_bcphidry = mct_avect_indexra(x2l,'Faxa_bcphidry') - index_x2l_Faxa_bcphodry = mct_avect_indexra(x2l,'Faxa_bcphodry') - index_x2l_Faxa_bcphiwet = mct_avect_indexra(x2l,'Faxa_bcphiwet') - index_x2l_Faxa_ocphidry = mct_avect_indexra(x2l,'Faxa_ocphidry') - index_x2l_Faxa_ocphodry = mct_avect_indexra(x2l,'Faxa_ocphodry') - index_x2l_Faxa_ocphiwet = mct_avect_indexra(x2l,'Faxa_ocphiwet') - index_x2l_Faxa_dstdry1 = mct_avect_indexra(x2l,'Faxa_dstdry1') - index_x2l_Faxa_dstdry2 = mct_avect_indexra(x2l,'Faxa_dstdry2') - index_x2l_Faxa_dstdry3 = mct_avect_indexra(x2l,'Faxa_dstdry3') - index_x2l_Faxa_dstdry4 = mct_avect_indexra(x2l,'Faxa_dstdry4') - index_x2l_Faxa_dstwet1 = mct_avect_indexra(x2l,'Faxa_dstwet1') - index_x2l_Faxa_dstwet2 = mct_avect_indexra(x2l,'Faxa_dstwet2') - index_x2l_Faxa_dstwet3 = mct_avect_indexra(x2l,'Faxa_dstwet3') - index_x2l_Faxa_dstwet4 = mct_avect_indexra(x2l,'Faxa_dstwet4') - - index_x2l_Faxa_nhx = mct_avect_indexra(x2l,'Faxa_nhx', perrWith='quiet') - index_x2l_Faxa_noy = mct_avect_indexra(x2l,'Faxa_noy', perrWith='quiet') - - if (index_x2l_Faxa_nhx > 0 .and. index_x2l_Faxa_noy > 0) then - ndep_from_cpl = .true. - end if - - index_x2l_Flrr_flood = mct_avect_indexra(x2l,'Flrr_flood') - - !------------------------------------------------------------- - ! glc coupling - !------------------------------------------------------------- - - index_x2l_Sg_icemask = mct_avect_indexra(x2l,'Sg_icemask') - index_x2l_Sg_icemask_coupled_fluxes = mct_avect_indexra(x2l,'Sg_icemask_coupled_fluxes') - - glc_nec = glc_get_num_elevation_classes() - if (glc_nec < 1) then - call shr_sys_abort('ERROR: In CLM4.5 and later, glc_nec must be at least 1.') - end if - - ! Create coupling fields for all glc elevation classes (1:glc_nec) plus bare land - ! (index 0). - allocate(index_l2x_Sl_tsrf(0:glc_nec)) - allocate(index_l2x_Sl_topo(0:glc_nec)) - allocate(index_l2x_Flgl_qice(0:glc_nec)) - allocate(index_x2l_Sg_ice_covered(0:glc_nec)) - allocate(index_x2l_Sg_topo(0:glc_nec)) - allocate(index_x2l_Flgg_hflx(0:glc_nec)) - - do num = 0,glc_nec - nec_str = glc_elevclass_as_string(num) - - name = 'Sg_ice_covered' // nec_str - index_x2l_Sg_ice_covered(num) = mct_avect_indexra(x2l,trim(name)) - name = 'Sg_topo' // nec_str - index_x2l_Sg_topo(num) = mct_avect_indexra(x2l,trim(name)) - name = 'Flgg_hflx' // nec_str - index_x2l_Flgg_hflx(num) = mct_avect_indexra(x2l,trim(name)) - - name = 'Sl_tsrf' // nec_str - index_l2x_Sl_tsrf(num) = mct_avect_indexra(l2x,trim(name)) - name = 'Sl_topo' // nec_str - index_l2x_Sl_topo(num) = mct_avect_indexra(l2x,trim(name)) - name = 'Flgl_qice' // nec_str - index_l2x_Flgl_qice(num) = mct_avect_indexra(l2x,trim(name)) - end do - - call mct_aVect_clean(x2l) - call mct_aVect_clean(l2x) - - end subroutine clm_cpl_indices_set - -!======================================================================= - -end module clm_cpl_indices diff --git a/src/cpl/mct/laiStreamMod.F90 b/src/cpl/mct/laiStreamMod.F90 deleted file mode 100644 index 47d25287b7..0000000000 --- a/src/cpl/mct/laiStreamMod.F90 +++ /dev/null @@ -1,241 +0,0 @@ -module laiStreamMod - -#include "shr_assert.h" - - !----------------------------------------------------------------------- - ! !DESCRIPTION: - ! Read LAI from stream - ! - ! !USES: - use shr_strdata_mod , only : shr_strdata_type, shr_strdata_create - use shr_strdata_mod , only : shr_strdata_print, shr_strdata_advance - use shr_kind_mod , only : r8=>shr_kind_r8, CL=>shr_kind_CL, CS=>shr_kind_CS, CXX=>shr_kind_CXX - use shr_log_mod , only : errMsg => shr_log_errMsg - use decompMod , only : bounds_type - use abortutils , only : endrun - use clm_varctl , only : iulog, inst_name - use perf_mod , only : t_startf, t_stopf - use spmdMod , only : masterproc, mpicom, comp_id - use ncdio_pio - use mct_mod - ! - ! !PUBLIC TYPES: - implicit none - private - - ! !PUBLIC MEMBER FUNCTIONS: - public :: lai_init ! position datasets for LAI - public :: lai_advance ! Advance the LAI streams (outside of a Open-MP threading loop) - public :: lai_interp ! interpolates between two years of LAI data (when LAI streams - - ! !PRIVATE MEMBER DATA: - integer, allocatable :: g_to_ig(:) ! Array matching gridcell index to data index - type(shr_strdata_type) :: sdat_lai ! LAI input data stream - - character(len=*), parameter :: sourcefile = & - __FILE__ - -!============================================================================== -contains -!============================================================================== - - subroutine lai_init(bounds) - ! - ! Initialize data stream information for LAI. - ! - ! !USES: - use clm_time_manager , only : get_calendar - use ncdio_pio , only : pio_subsystem - use shr_pio_mod , only : shr_pio_getiotype - use shr_stream_mod , only : shr_stream_file_null - use shr_string_mod , only : shr_string_listCreateField - use clm_nlUtilsMod , only : find_nlgroup_name - use ndepStreamMod , only : clm_domain_mct - use histFileMod , only : hist_addfld1d - use domainMod , only : ldomain - use controlMod , only : NLFilename - use lnd_set_decomp_and_domain , only : gsmap_global - ! - ! !ARGUMENTS: - implicit none - type(bounds_type), intent(in) :: bounds ! bounds - ! - ! !LOCAL VARIABLES: - integer :: stream_year_first_lai ! first year in Lai stream to use - integer :: stream_year_last_lai ! last year in Lai stream to use - integer :: model_year_align_lai ! align stream_year_first_lai with - integer :: nu_nml ! unit for namelist file - integer :: nml_error ! namelist i/o error flag - type(mct_ggrid) :: dom_clm ! domain information - character(len=CL) :: stream_fldFileName_lai ! lai stream filename to read - character(len=CL) :: lai_mapalgo = 'bilinear' ! Mapping alogrithm - character(len=CL) :: lai_tintalgo = 'linear' ! Time interpolation alogrithm - character(len=CXX) :: fldList ! field string - character(*), parameter :: laiString = "LAI" ! base string for field string - integer , parameter :: numLaiFields = 16 ! number of fields to build field string - character(*), parameter :: subName = "('laidyn_init')" - !----------------------------------------------------------------------- - ! - ! deal with namelist variables here in init - ! - namelist /lai_streams/ & - stream_year_first_lai, & - stream_year_last_lai, & - model_year_align_lai, & - lai_mapalgo, & - stream_fldFileName_lai, & - lai_tintalgo - - ! Default values for namelist - stream_year_first_lai = 1 ! first year in stream to use - stream_year_last_lai = 1 ! last year in stream to use - model_year_align_lai = 1 ! align stream_year_first_lai with this model year - stream_fldFileName_lai = shr_stream_file_null - - ! Read lai_streams namelist - if (masterproc) then - open( newunit=nu_nml, file=trim(NLFilename), status='old', iostat=nml_error ) - call find_nlgroup_name(nu_nml, 'lai_streams', status=nml_error) - if (nml_error == 0) then - read(nu_nml, nml=lai_streams,iostat=nml_error) - if (nml_error /= 0) then - call endrun(subname // ':: ERROR reading lai_streams namelist') - end if - else - call endrun(subname // ':: ERROR finding lai_streams namelist') - end if - close(nu_nml) - endif - call shr_mpi_bcast(stream_year_first_lai , mpicom) - call shr_mpi_bcast(stream_year_last_lai , mpicom) - call shr_mpi_bcast(model_year_align_lai , mpicom) - call shr_mpi_bcast(stream_fldFileName_lai , mpicom) - call shr_mpi_bcast(lai_tintalgo , mpicom) - - if (masterproc) then - write(iulog,*) ' ' - write(iulog,*) 'lai_stream settings:' - write(iulog,*) ' stream_year_first_lai = ',stream_year_first_lai - write(iulog,*) ' stream_year_last_lai = ',stream_year_last_lai - write(iulog,*) ' model_year_align_lai = ',model_year_align_lai - write(iulog,*) ' stream_fldFileName_lai = ',trim(stream_fldFileName_lai) - write(iulog,*) ' lai_tintalgo = ',trim(lai_tintalgo) - endif - - call clm_domain_mct (bounds, dom_clm) - - ! create the field list for these lai fields...use in shr_strdata_create - fldList = shr_string_listCreateField( numLaiFields, laiString ) - - call shr_strdata_create(sdat_lai,name="laidyn", & - pio_subsystem=pio_subsystem, & - pio_iotype=shr_pio_getiotype(inst_name), & - mpicom=mpicom, compid=comp_id, & - gsmap=gsmap_global, ggrid=dom_clm, & - nxg=ldomain%ni, nyg=ldomain%nj, & - yearFirst=stream_year_first_lai, & - yearLast=stream_year_last_lai, & - yearAlign=model_year_align_lai, & - offset=0, & - domFilePath='', & - domFileName=trim(stream_fldFileName_lai), & - domTvarName='time', & - domXvarName='lon' , & - domYvarName='lat' , & - domAreaName='area', & - domMaskName='mask', & - filePath='', & - filename=(/stream_fldFileName_lai/), & - fldListFile=fldList, & - fldListModel=fldList, & - fillalgo='none', & - mapalgo=lai_mapalgo, & - tintalgo=lai_tintalgo, & - calendar=get_calendar(), & - taxmode='cycle' ) - - if (masterproc) then - call shr_strdata_print(sdat_lai,'LAI data') - endif - - end subroutine lai_init - - !============================================================================== - subroutine lai_advance( bounds ) - ! - ! Advance LAI streams - ! - ! !USES: - use clm_time_manager, only : get_curr_date - ! - ! !ARGUMENTS: - implicit none - type(bounds_type) , intent(in) :: bounds - ! - ! !LOCAL VARIABLES: - integer :: g, ig ! Indices - integer :: year ! year (0, ...) for nstep+1 - integer :: mon ! month (1, ..., 12) for nstep+1 - integer :: day ! day of month (1, ..., 31) for nstep+1 - integer :: sec ! seconds into current date for nstep+1 - integer :: mcdate ! Current model date (yyyymmdd) - !----------------------------------------------------------------------- - - call get_curr_date(year, mon, day, sec) - mcdate = year*10000 + mon*100 + day - - call shr_strdata_advance(sdat_lai, mcdate, sec, mpicom, 'laidyn') - if ( .not. allocated(g_to_ig) )then - allocate (g_to_ig(bounds%begg:bounds%endg) ) - ig = 0 - do g = bounds%begg,bounds%endg - ig = ig+1 - g_to_ig(g) = ig - end do - end if - - end subroutine lai_advance - - !============================================================================== - subroutine lai_interp(bounds, canopystate_inst) - ! - ! Interpolate data stream information for Lai. - ! - ! !USES: - use pftconMod , only : noveg - use CanopyStateType , only : canopystate_type - use PatchType , only : patch - ! - ! !ARGUMENTS: - implicit none - type(bounds_type) , intent(in) :: bounds - type(canopystate_type) , intent(inout) :: canopystate_inst - ! - ! !LOCAL VARIABLES: - integer :: ivt, p, ip, ig - character(len=CL) :: stream_var_name - !----------------------------------------------------------------------- - SHR_ASSERT_FL( (lbound(g_to_ig,1) <= bounds%begg ), sourcefile, __LINE__) - SHR_ASSERT_FL( (ubound(g_to_ig,1) >= bounds%endg ), sourcefile, __LINE__) - SHR_ASSERT_FL( (lbound(sdat_lai%avs(1)%rAttr,2) <= g_to_ig(bounds%begg) ), sourcefile, __LINE__) - SHR_ASSERT_FL( (ubound(sdat_lai%avs(1)%rAttr,2) >= g_to_ig(bounds%endg) ), sourcefile, __LINE__) - - do p = bounds%begp, bounds%endp - ivt = patch%itype(p) - ! Set lai for each gridcell/patch combination - if (ivt /= noveg) then - ! vegetated pft - write(stream_var_name,"(i6)") ivt - stream_var_name = 'LAI_'//trim(adjustl(stream_var_name)) - ip = mct_aVect_indexRA(sdat_lai%avs(1),trim(stream_var_name)) - ig = g_to_ig(patch%gridcell(p)) - canopystate_inst%tlai_patch(p) = sdat_lai%avs(1)%rAttr(ip,ig) - else - ! non-vegetated pft - canopystate_inst%tlai_patch(p) = 0._r8 - endif - end do - - end subroutine lai_interp - -end module LaiStreamMod diff --git a/src/cpl/mct/lnd_comp_mct.F90 b/src/cpl/mct/lnd_comp_mct.F90 deleted file mode 100644 index e50602a378..0000000000 --- a/src/cpl/mct/lnd_comp_mct.F90 +++ /dev/null @@ -1,632 +0,0 @@ -module lnd_comp_mct - - !--------------------------------------------------------------------------- - ! !DESCRIPTION: - ! Interface of the active land model component of CESM the CLM (Community Land Model) - ! with the main CESM driver. This is a thin interface taking CESM driver information - ! in MCT (Model Coupling Toolkit) format and converting it to use by CLM. - ! - ! !uses: - use shr_kind_mod , only : r8 => shr_kind_r8 - use shr_sys_mod , only : shr_sys_flush - use shr_log_mod , only : errMsg => shr_log_errMsg - use mct_mod , only : mct_avect, mct_gsmap, mct_gGrid - use decompmod , only : bounds_type - use lnd_import_export, only : lnd_import, lnd_export - ! - ! !public member functions: - implicit none - private ! by default make data private - ! - ! !public member functions: - public :: lnd_init_mct ! clm initialization - public :: lnd_run_mct ! clm run phase - public :: lnd_final_mct ! clm finalization/cleanup - ! - ! !private member functions: - private :: lnd_domain_mct ! set the land model domain information - private :: lnd_handle_resume ! handle pause/resume signals from the coupler - - character(len=*), parameter, private :: sourcefile = & - __FILE__ - -!==================================================================================== -contains -!==================================================================================== - - subroutine lnd_init_mct( EClock, cdata_l, x2l_l, l2x_l, NLFilename ) - ! - ! !DESCRIPTION: - ! Initialize land surface model and obtain relevant atmospheric model arrays - ! back from (i.e. albedos, surface temperature and snow cover over land). - ! - ! !USES: - use shr_kind_mod , only : shr_kind_cl - use abortutils , only : endrun - use clm_time_manager , only : get_nstep, set_timemgr_init - use clm_initializeMod, only : initialize1, initialize2 - use clm_instMod , only : water_inst, lnd2atm_inst, lnd2glc_inst - use clm_varctl , only : finidat, single_column, clm_varctl_set, iulog - use clm_varctl , only : inst_index, inst_suffix, inst_name - use clm_varorb , only : eccen, obliqr, lambm0, mvelpp - use controlMod , only : control_setNL - use decompMod , only : get_proc_bounds - use domainMod , only : ldomain - use shr_file_mod , only : shr_file_setLogUnit, shr_file_setLogLevel - use shr_file_mod , only : shr_file_getLogUnit, shr_file_getLogLevel - use shr_file_mod , only : shr_file_getUnit, shr_file_setIO - use seq_cdata_mod , only : seq_cdata, seq_cdata_setptrs - use seq_timemgr_mod , only : seq_timemgr_EClockGetData - use seq_infodata_mod , only : seq_infodata_type, seq_infodata_GetData, seq_infodata_PutData, & - seq_infodata_start_type_start, seq_infodata_start_type_cont, & - seq_infodata_start_type_brnch - use seq_comm_mct , only : seq_comm_suffix, seq_comm_inst, seq_comm_name - use seq_flds_mod , only : seq_flds_x2l_fields, seq_flds_l2x_fields - use spmdMod , only : masterproc, spmd_init - use clm_varctl , only : nsrStartup, nsrContinue, nsrBranch - use clm_cpl_indices , only : clm_cpl_indices_set - use mct_mod , only : mct_aVect_init, mct_aVect_zero, mct_gsMap, mct_gsMap_init - use decompMod , only : gindex_global - use lnd_set_decomp_and_domain, only : lnd_set_decomp_and_domain_from_surfrd, gsmap_global - use ESMF - ! - ! !ARGUMENTS: - type(ESMF_Clock), intent(inout) :: EClock ! Input synchronization clock - type(seq_cdata), intent(inout) :: cdata_l ! Input land-model driver data - type(mct_aVect), intent(inout) :: x2l_l, l2x_l ! land model import and export states - character(len=*), optional, intent(in) :: NLFilename ! Namelist filename to read - ! - ! !LOCAL VARIABLES: - integer :: LNDID ! Land identifyer - integer :: mpicom_lnd ! MPI communicator - type(mct_gsMap), pointer :: GSMap_lnd ! Land model MCT GS map - type(mct_gGrid), pointer :: dom_l ! Land model domain - type(seq_infodata_type), pointer :: infodata ! CESM driver level info data - integer :: lsize ! size of attribute vector - integer :: gsize ! global size - integer :: g,i,j ! indices - integer :: dtime_sync ! coupling time-step from the input synchronization clock - logical :: exists ! true if file exists - logical :: atm_aero ! Flag if aerosol data sent from atm model - real(r8) :: scmlat ! single-column latitude - real(r8) :: scmlon ! single-column longitude - character(len=SHR_KIND_CL) :: caseid ! case identifier name - character(len=SHR_KIND_CL) :: ctitle ! case description title - character(len=SHR_KIND_CL) :: starttype ! start-type (startup, continue, branch, hybrid) - character(len=SHR_KIND_CL) :: calendar ! calendar type name - character(len=SHR_KIND_CL) :: hostname ! hostname of machine running on - character(len=SHR_KIND_CL) :: version ! Model version - character(len=SHR_KIND_CL) :: username ! user running the model - integer :: nsrest ! clm restart type - integer :: ref_ymd ! reference date (YYYYMMDD) - integer :: ref_tod ! reference time of day (sec) - integer :: start_ymd ! start date (YYYYMMDD) - integer :: start_tod ! start time of day (sec) - logical :: brnch_retain_casename ! flag if should retain the case name on a branch start type - integer :: lbnum ! input to memory diagnostic - integer :: shrlogunit,shrloglev ! old values for log unit and log level - type(bounds_type) :: bounds ! bounds - logical :: noland - integer :: ni,nj - real(r8) , parameter :: rundef = -9999999._r8 - character(len=32), parameter :: sub = 'lnd_init_mct' - character(len=*), parameter :: format = "('("//trim(sub)//") :',A)" - !----------------------------------------------------------------------- - - ! Set cdata data - call seq_cdata_setptrs(cdata_l, ID=LNDID, mpicom=mpicom_lnd, & - gsMap=GSMap_lnd, dom=dom_l, infodata=infodata) - - ! Determine attriute vector indices - call clm_cpl_indices_set() - - ! Initialize clm MPI communicator - call spmd_init( mpicom_lnd, LNDID ) - -#if (defined _MEMTRACE) - if(masterproc) then - lbnum=1 - call memmon_dump_fort('memmon.out','lnd_init_mct:start::',lbnum) - endif -#endif - - inst_name = seq_comm_name(LNDID) - inst_index = seq_comm_inst(LNDID) - inst_suffix = seq_comm_suffix(LNDID) - ! Initialize io log unit - - call shr_file_getLogUnit (shrlogunit) - if (masterproc) then - inquire(file='lnd_modelio.nml'//trim(inst_suffix),exist=exists) - if (exists) then - iulog = shr_file_getUnit() - call shr_file_setIO('lnd_modelio.nml'//trim(inst_suffix),iulog) - end if - write(iulog,format) "CLM land model initialization" - else - iulog = shrlogunit - end if - - call shr_file_getLogLevel(shrloglev) - call shr_file_setLogUnit (iulog) - - ! Use infodata to set orbital values - call seq_infodata_GetData( infodata, orb_eccen=eccen, orb_mvelpp=mvelpp, & - orb_lambm0=lambm0, orb_obliqr=obliqr ) - - ! Consistency check on namelist filename - call control_setNL("lnd_in"//trim(inst_suffix)) - - ! Initialize clm - ! initialize1 reads namelists - ! decomp and domain are set in lnd_set_decomp_and_domain_from_surfrd - ! initialize2 performs the rest of initialization - call seq_timemgr_EClockGetData(EClock, & - start_ymd=start_ymd, & - start_tod=start_tod, ref_ymd=ref_ymd, & - ref_tod=ref_tod, & - calendar=calendar, & - dtime=dtime_sync) - if (masterproc) then - write(iulog,*)'dtime = ',dtime_sync - end if - call seq_infodata_GetData(infodata, case_name=caseid, & - case_desc=ctitle, single_column=single_column, & - scmlat=scmlat, scmlon=scmlon, & - brnch_retain_casename=brnch_retain_casename, & - start_type=starttype, model_version=version, & - hostname=hostname, username=username ) - - ! Single Column - if ( single_column .and. (scmlat == rundef .or. scmlon == rundef ) ) then - call endrun(msg=' ERROR:: single column mode on -- but scmlat and scmlon are NOT set'//& - errMsg(sourcefile, __LINE__)) - end if - - ! Note that we assume that CTSM's internal dtime matches the coupling time step. - ! i.e., we currently do NOT allow sub-cycling within a coupling time step. - call set_timemgr_init( calendar_in=calendar, start_ymd_in=start_ymd, start_tod_in=start_tod, & - ref_ymd_in=ref_ymd, ref_tod_in=ref_tod, dtime_in=dtime_sync) - - if ( trim(starttype) == trim(seq_infodata_start_type_start)) then - nsrest = nsrStartup - else if (trim(starttype) == trim(seq_infodata_start_type_cont) ) then - nsrest = nsrContinue - else if (trim(starttype) == trim(seq_infodata_start_type_brnch)) then - nsrest = nsrBranch - else - call endrun( sub//' ERROR: unknown starttype' ) - end if - - ! set default values for run control variables - call clm_varctl_set(caseid_in=caseid, ctitle_in=ctitle, & - brnch_retain_casename_in=brnch_retain_casename, & - single_column_in=single_column, scmlat_in=scmlat, & - scmlon_in=scmlon, nsrest_in=nsrest, version_in=version, & - hostname_in=hostname, username_in=username) - - ! Read namelists - call initialize1(dtime=dtime_sync) - - ! Initialize decomposition and domain (ldomain) type - call lnd_set_decomp_and_domain_from_surfrd(noland, ni, nj) - - ! If no land then exit out of initialization - if ( noland ) then - - call seq_infodata_PutData( infodata, lnd_present =.false.) - call seq_infodata_PutData( infodata, lnd_prognostic=.false.) - - else - - ! Determine if aerosol and dust deposition come from atmosphere component - call seq_infodata_GetData(infodata, atm_aero=atm_aero ) - if ( .not. atm_aero )then - call endrun( sub//' ERROR: atmosphere model MUST send aerosols to CLM' ) - end if - - ! Initialize clm gsMap, clm domain and clm attribute vectors - call get_proc_bounds( bounds ) - lsize = bounds%endg - bounds%begg + 1 - gsize = ldomain%ni * ldomain%nj - call mct_gsMap_init( gsMap_lnd, gindex_global, mpicom_lnd, LNDID, lsize, gsize ) - gsmap_global => gsmap_lnd ! module variable in lnd_set_decomp_and_domain - call lnd_domain_mct( bounds, lsize, gsMap_lnd, dom_l ) - call mct_aVect_init(x2l_l, rList=seq_flds_x2l_fields, lsize=lsize) - call mct_aVect_zero(x2l_l) - call mct_aVect_init(l2x_l, rList=seq_flds_l2x_fields, lsize=lsize) - call mct_aVect_zero(l2x_l) - - ! Finish initializing clm - call initialize2(ni,nj) - - ! Create land export state - call lnd_export(bounds, water_inst%waterlnd2atmbulk_inst, lnd2atm_inst, lnd2glc_inst, l2x_l%rattr) - - ! Fill in infodata settings - call seq_infodata_PutData(infodata, lnd_prognostic=.true.) - call seq_infodata_PutData(infodata, lnd_nx=ldomain%ni, lnd_ny=ldomain%nj) - call lnd_handle_resume( cdata_l ) - - ! Reset shr logging to original values - call shr_file_setLogUnit (shrlogunit) - call shr_file_setLogLevel(shrloglev) - -#if (defined _MEMTRACE) - if(masterproc) then - write(iulog,*) TRIM(Sub) // ':end::' - lbnum=1 - call memmon_dump_fort('memmon.out','lnd_int_mct:end::',lbnum) - call memmon_reset_addr() - endif -#endif - end if - - end subroutine lnd_init_mct - - !==================================================================================== - subroutine lnd_run_mct(EClock, cdata_l, x2l_l, l2x_l) - ! - ! !DESCRIPTION: - ! Run clm model - ! - ! !USES: - use shr_kind_mod , only : r8 => shr_kind_r8 - use clm_instMod , only : water_inst, lnd2atm_inst, atm2lnd_inst, lnd2glc_inst, glc2lnd_inst - use clm_driver , only : clm_drv - use clm_time_manager, only : get_curr_date, get_nstep, get_curr_calday, get_step_size - use clm_time_manager, only : advance_timestep, update_rad_dtime - use decompMod , only : get_proc_bounds - use abortutils , only : endrun - use clm_varctl , only : iulog - use clm_varorb , only : eccen, obliqr, lambm0, mvelpp - use shr_file_mod , only : shr_file_setLogUnit, shr_file_setLogLevel - use shr_file_mod , only : shr_file_getLogUnit, shr_file_getLogLevel - use seq_cdata_mod , only : seq_cdata, seq_cdata_setptrs - use seq_timemgr_mod , only : seq_timemgr_EClockGetData, seq_timemgr_StopAlarmIsOn - use seq_timemgr_mod , only : seq_timemgr_RestartAlarmIsOn, seq_timemgr_EClockDateInSync - use seq_infodata_mod, only : seq_infodata_type, seq_infodata_GetData - use spmdMod , only : masterproc, mpicom - use perf_mod , only : t_startf, t_stopf, t_barrierf - use shr_orb_mod , only : shr_orb_decl - use ESMF - ! - ! !ARGUMENTS: - type(ESMF_Clock) , intent(inout) :: EClock ! Input synchronization clock from driver - type(seq_cdata) , intent(inout) :: cdata_l ! Input driver data for land model - type(mct_aVect) , intent(inout) :: x2l_l ! Import state to land model - type(mct_aVect) , intent(inout) :: l2x_l ! Export state from land model - ! - ! !LOCAL VARIABLES: - integer :: ymd_sync ! Sync date (YYYYMMDD) - integer :: yr_sync ! Sync current year - integer :: mon_sync ! Sync current month - integer :: day_sync ! Sync current day - integer :: tod_sync ! Sync current time of day (sec) - integer :: ymd ! CLM current date (YYYYMMDD) - integer :: yr ! CLM current year - integer :: mon ! CLM current month - integer :: day ! CLM current day - integer :: tod ! CLM current time of day (sec) - integer :: dtime ! time step increment (sec) - integer :: nstep ! time step index - logical :: rstwr_sync ! .true. ==> write restart file before returning - logical :: rstwr ! .true. ==> write restart file before returning - logical :: nlend_sync ! Flag signaling last time-step - logical :: nlend ! .true. ==> last time-step - logical :: dosend ! true => send data back to driver - logical :: doalb ! .true. ==> do albedo calculation on this time step - logical :: rof_prognostic ! .true. => running with a prognostic ROF model - logical :: glc_present ! .true. => running with a non-stub GLC model - real(r8) :: nextsw_cday ! calday from clock of next radiation computation - real(r8) :: caldayp1 ! clm calday plus dtime offset - integer :: shrlogunit,shrloglev ! old values for share log unit and log level - integer :: lbnum ! input to memory diagnostic - integer :: g,i,lsize ! counters - real(r8) :: calday ! calendar day for nstep - real(r8) :: declin ! solar declination angle in radians for nstep - real(r8) :: declinp1 ! solar declination angle in radians for nstep+1 - real(r8) :: eccf ! earth orbit eccentricity factor - real(r8) :: recip ! reciprical - logical,save :: first_call = .true. ! first call work - type(seq_infodata_type),pointer :: infodata ! CESM information from the driver - type(mct_gGrid), pointer :: dom_l ! Land model domain data - type(bounds_type) :: bounds ! bounds - character(len=32) :: rdate ! date char string for restart file names - character(len=32), parameter :: sub = "lnd_run_mct" - !--------------------------------------------------------------------------- - - ! Determine processor bounds - - call get_proc_bounds(bounds) - -#if (defined _MEMTRACE) - if(masterproc) then - lbnum=1 - call memmon_dump_fort('memmon.out','lnd_run_mct:start::',lbnum) - endif -#endif - - ! Reset shr logging to my log file - call shr_file_getLogUnit (shrlogunit) - call shr_file_getLogLevel(shrloglev) - call shr_file_setLogUnit (iulog) - - ! Determine time of next atmospheric shortwave calculation - call seq_cdata_setptrs(cdata_l, infodata=infodata, dom=dom_l) - call seq_timemgr_EClockGetData(EClock, & - curr_ymd=ymd, curr_tod=tod_sync, & - curr_yr=yr_sync, curr_mon=mon_sync, curr_day=day_sync) - call seq_infodata_GetData(infodata, nextsw_cday=nextsw_cday ) - - dtime = get_step_size() - - ! Handle pause/resume signals from coupler - call lnd_handle_resume( cdata_l ) - - write(rdate,'(i4.4,"-",i2.2,"-",i2.2,"-",i5.5)') yr_sync,mon_sync,day_sync,tod_sync - nlend_sync = seq_timemgr_StopAlarmIsOn( EClock ) - rstwr_sync = seq_timemgr_RestartAlarmIsOn( EClock ) - - ! Determine if we're running with a prognostic ROF model, and if we're running with a - ! non-stub GLC model. These won't change throughout the run, but we can't count on - ! their being set in initialization, so need to get them in the run method. - - call seq_infodata_GetData( infodata, & - rof_prognostic=rof_prognostic, & - glc_present=glc_present) - - ! Map MCT to land data type - ! Perform downscaling if appropriate - - - ! Map to clm (only when state and/or fluxes need to be updated) - - call t_startf ('lc_lnd_import') - call lnd_import( bounds, & - x2l = x2l_l%rattr, & - glc_present = glc_present, & - atm2lnd_inst = atm2lnd_inst, & - glc2lnd_inst = glc2lnd_inst, & - wateratm2lndbulk_inst = water_inst%wateratm2lndbulk_inst) - call t_stopf ('lc_lnd_import') - - ! Use infodata to set orbital values if updated mid-run - - call seq_infodata_GetData( infodata, orb_eccen=eccen, orb_mvelpp=mvelpp, & - orb_lambm0=lambm0, orb_obliqr=obliqr ) - - ! Loop over time steps in coupling interval - - dosend = .false. - do while(.not. dosend) - - ! Determine if dosend - ! When time is not updated at the beginning of the loop - then return only if - ! are in sync with clock before time is updated - ! - ! NOTE(wjs, 2020-03-09) I think the do while (.not. dosend) loop only is important - ! for the first time step (when we run 2 steps). After that, we now assume that we - ! run one time step per coupling interval (based on setting the model's dtime from - ! the driver). (According to Mariana Vertenstein, sub-cycling (running multiple - ! land model time steps per coupling interval) used to be supported, but hasn't - ! been fully supported for a long time.) We may want to rework this logic to make - ! this more explicit, or - ideally - get rid of this extra time step at the start - ! of the run, at which point I think we could do away with this looping entirely. - - call get_curr_date( yr, mon, day, tod ) - ymd = yr*10000 + mon*100 + day - tod = tod - dosend = (seq_timemgr_EClockDateInSync( EClock, ymd, tod)) - - ! Determine doalb based on nextsw_cday sent from atm model - - nstep = get_nstep() - caldayp1 = get_curr_calday(offset=dtime, reuse_day_365_for_day_366=.true.) - if (nstep == 0) then - doalb = .false. - else if (nstep == 1) then - doalb = (abs(nextsw_cday- caldayp1) < 1.e-10_r8) - else - doalb = (nextsw_cday >= -0.5_r8) - end if - call update_rad_dtime(doalb) - - ! Determine if time to write restart and stop - - rstwr = .false. - if (rstwr_sync .and. dosend) rstwr = .true. - nlend = .false. - if (nlend_sync .and. dosend) nlend = .true. - - ! Run clm - - call t_barrierf('sync_clm_run1', mpicom) - call t_startf ('clm_run') - call t_startf ('shr_orb_decl') - calday = get_curr_calday(reuse_day_365_for_day_366=.true.) - call shr_orb_decl( calday , eccen, mvelpp, lambm0, obliqr, declin , eccf ) - call shr_orb_decl( nextsw_cday, eccen, mvelpp, lambm0, obliqr, declinp1, eccf ) - call t_stopf ('shr_orb_decl') - call clm_drv(doalb, nextsw_cday, declinp1, declin, rstwr, nlend, rdate, rof_prognostic) - call t_stopf ('clm_run') - - ! Create l2x_l export state - add river runoff input to l2x_l if appropriate - - call t_startf ('lc_lnd_export') - call lnd_export(bounds, water_inst%waterlnd2atmbulk_inst, lnd2atm_inst, lnd2glc_inst, l2x_l%rattr) - call t_stopf ('lc_lnd_export') - - ! Advance clm time step - - call t_startf ('lc_clm2_adv_timestep') - call advance_timestep() - call t_stopf ('lc_clm2_adv_timestep') - - end do - - ! Check that internal clock is in sync with master clock - - call get_curr_date( yr, mon, day, tod, offset=-dtime ) - ymd = yr*10000 + mon*100 + day - tod = tod - if ( .not. seq_timemgr_EClockDateInSync( EClock, ymd, tod ) )then - call seq_timemgr_EclockGetData( EClock, curr_ymd=ymd_sync, curr_tod=tod_sync ) - write(iulog,*)' clm ymd=',ymd ,' clm tod= ',tod - write(iulog,*)'sync ymd=',ymd_sync,' sync tod= ',tod_sync - call endrun( sub//":: CLM clock not in sync with Master Sync clock" ) - end if - - ! Reset shr logging to my original values - - call shr_file_setLogUnit (shrlogunit) - call shr_file_setLogLevel(shrloglev) - -#if (defined _MEMTRACE) - if(masterproc) then - lbnum=1 - call memmon_dump_fort('memmon.out','lnd_run_mct:end::',lbnum) - call memmon_reset_addr() - endif -#endif - - first_call = .false. - - end subroutine lnd_run_mct - - !==================================================================================== - subroutine lnd_final_mct( EClock, cdata_l, x2l_l, l2x_l) - ! - ! !DESCRIPTION: - ! Finalize land surface model - - use seq_cdata_mod ,only : seq_cdata, seq_cdata_setptrs - use seq_timemgr_mod ,only : seq_timemgr_EClockGetData, seq_timemgr_StopAlarmIsOn - use seq_timemgr_mod ,only : seq_timemgr_RestartAlarmIsOn, seq_timemgr_EClockDateInSync - use esmf - ! - ! !ARGUMENTS: - type(ESMF_Clock) , intent(inout) :: EClock ! Input synchronization clock from driver - type(seq_cdata) , intent(inout) :: cdata_l ! Input driver data for land model - type(mct_aVect) , intent(inout) :: x2l_l ! Import state to land model - type(mct_aVect) , intent(inout) :: l2x_l ! Export state from land model - !--------------------------------------------------------------------------- - - ! fill this in - end subroutine lnd_final_mct - - !==================================================================================== - subroutine lnd_domain_mct( bounds, lsize, gsMap_l, dom_l ) - ! - ! !DESCRIPTION: - ! Send the land model domain information to the coupler - ! - ! !USES: - use clm_varcon , only: re - use domainMod , only: ldomain - use spmdMod , only: iam - use mct_mod , only: mct_gGrid_importIAttr - use mct_mod , only: mct_gGrid_importRAttr, mct_gGrid_init, mct_gsMap_orderedPoints - use seq_flds_mod, only: seq_flds_dom_coord, seq_flds_dom_other - ! - ! !ARGUMENTS: - type(bounds_type), intent(in) :: bounds ! bounds - integer , intent(in) :: lsize ! land model domain data size - type(mct_gsMap), intent(inout) :: gsMap_l ! Output land model MCT GS map - type(mct_ggrid), intent(out) :: dom_l ! Output domain information for land model - ! - ! Local Variables - integer :: g,i,j ! index - real(r8), pointer :: data(:) ! temporary - integer , pointer :: idata(:) ! temporary - !--------------------------------------------------------------------------- - ! - ! Initialize mct domain type - ! lat/lon in degrees, area in radians^2, mask is 1 (land), 0 (non-land) - ! Note that in addition land carries around landfrac for the purposes of domain checking - ! - call mct_gGrid_init( GGrid=dom_l, CoordChars=trim(seq_flds_dom_coord), & - OtherChars=trim(seq_flds_dom_other), lsize=lsize ) - ! - ! Allocate memory - ! - allocate(data(lsize)) - ! - ! Determine global gridpoint number attribute, GlobGridNum, which is set automatically by MCT - ! - call mct_gsMap_orderedPoints(gsMap_l, iam, idata) - call mct_gGrid_importIAttr(dom_l,'GlobGridNum',idata,lsize) - ! - ! Determine domain (numbering scheme is: West to East and South to North to South pole) - ! Initialize attribute vector with special value - ! - data(:) = -9999.0_R8 - call mct_gGrid_importRAttr(dom_l,"lat" ,data,lsize) - call mct_gGrid_importRAttr(dom_l,"lon" ,data,lsize) - call mct_gGrid_importRAttr(dom_l,"area" ,data,lsize) - call mct_gGrid_importRAttr(dom_l,"aream",data,lsize) - data(:) = 0.0_R8 - call mct_gGrid_importRAttr(dom_l,"mask" ,data,lsize) - ! - ! Fill in correct values for domain components - ! Note aream will be filled in in the atm-lnd mapper - ! - do g = bounds%begg,bounds%endg - i = 1 + (g - bounds%begg) - data(i) = ldomain%lonc(g) - end do - call mct_gGrid_importRattr(dom_l,"lon",data,lsize) - - do g = bounds%begg,bounds%endg - i = 1 + (g - bounds%begg) - data(i) = ldomain%latc(g) - end do - call mct_gGrid_importRattr(dom_l,"lat",data,lsize) - - do g = bounds%begg,bounds%endg - i = 1 + (g - bounds%begg) - data(i) = ldomain%area(g)/(re*re) - end do - call mct_gGrid_importRattr(dom_l,"area",data,lsize) - - do g = bounds%begg,bounds%endg - i = 1 + (g - bounds%begg) - data(i) = real(ldomain%mask(g), r8) - end do - call mct_gGrid_importRattr(dom_l,"mask",data,lsize) - - do g = bounds%begg,bounds%endg - i = 1 + (g - bounds%begg) - data(i) = real(ldomain%frac(g), r8) - end do - call mct_gGrid_importRattr(dom_l,"frac",data,lsize) - - deallocate(data) - deallocate(idata) - - end subroutine lnd_domain_mct - - !==================================================================================== - subroutine lnd_handle_resume( cdata_l ) - ! - ! !DESCRIPTION: - ! Handle resume signals for Data Assimilation (DA) - ! - ! !USES: - use clm_time_manager , only : update_DA_nstep - use seq_cdata_mod , only : seq_cdata, seq_cdata_setptrs - implicit none - ! !ARGUMENTS: - type(seq_cdata), intent(inout) :: cdata_l ! Input land-model driver data - ! !LOCAL VARIABLES: - logical :: resume_from_data_assim ! flag if we are resuming after data assimulation was done - !--------------------------------------------------------------------------- - - ! Check to see if restart was modified and we are resuming from data - ! assimilation - call seq_cdata_setptrs(cdata_l, post_assimilation=resume_from_data_assim) - if ( resume_from_data_assim ) call update_DA_nstep() - - end subroutine lnd_handle_resume - -end module lnd_comp_mct diff --git a/src/cpl/mct/lnd_import_export.F90 b/src/cpl/mct/lnd_import_export.F90 deleted file mode 100644 index 537abd49d9..0000000000 --- a/src/cpl/mct/lnd_import_export.F90 +++ /dev/null @@ -1,354 +0,0 @@ -module lnd_import_export - - use shr_kind_mod , only: r8 => shr_kind_r8, cl=>shr_kind_cl - use abortutils , only: endrun - use decompmod , only: bounds_type, subgrid_level_gridcell - use lnd2atmType , only: lnd2atm_type - use lnd2glcMod , only: lnd2glc_type - use atm2lndType , only: atm2lnd_type - use glc2lndMod , only: glc2lnd_type - use Waterlnd2atmBulkType , only: waterlnd2atmbulk_type - use Wateratm2lndBulkType , only: wateratm2lndbulk_type - use clm_cpl_indices - use GridcellType , only : grc - ! - implicit none - !=============================================================================== - -contains - - !=============================================================================== - subroutine lnd_import( bounds, x2l, glc_present, atm2lnd_inst, glc2lnd_inst, wateratm2lndbulk_inst) - - !--------------------------------------------------------------------------- - ! !DESCRIPTION: - ! Convert the input data from the coupler to the land model - ! - ! !USES: - use seq_flds_mod , only: seq_flds_x2l_fields - use clm_varctl , only: co2_type, co2_ppmv, iulog, use_c13 - use clm_varctl , only: ndep_from_cpl - use clm_varcon , only: c13ratio - use domainMod , only: ldomain - use lnd_import_export_utils, only : derive_quantities, check_for_errors, check_for_nans - ! - ! !ARGUMENTS: - type(bounds_type) , intent(in) :: bounds ! bounds - real(r8) , intent(in) :: x2l(:,:) ! driver import state to land model - logical , intent(in) :: glc_present ! .true. => running with a non-stub GLC model - type(atm2lnd_type) , intent(inout) :: atm2lnd_inst ! clm internal input data type - type(glc2lnd_type) , intent(inout) :: glc2lnd_inst ! clm internal input data type - type(wateratm2lndbulk_type), intent(inout) :: wateratm2lndbulk_inst ! clm internal input data type - ! - ! !LOCAL VARIABLES: - integer :: begg, endg ! bounds - integer :: g,i,k,nstep,ier ! indices, number of steps, and error code - real(r8) :: qsat_kg_kg ! saturation specific humidity (kg/kg) - real(r8) :: forc_pbot ! atmospheric pressure (Pa) - real(r8) :: forc_rainc(bounds%begg:bounds%endg) ! rainxy Atm flux mm/s - real(r8) :: forc_rainl(bounds%begg:bounds%endg) ! rainxy Atm flux mm/s - real(r8) :: forc_snowc(bounds%begg:bounds%endg) ! snowfxy Atm flux mm/s - real(r8) :: forc_snowl(bounds%begg:bounds%endg) ! snowfxl Atm flux mm/s - real(r8) :: co2_ppmv_diag ! temporary - real(r8) :: co2_ppmv_prog ! temporary - real(r8) :: co2_ppmv_val ! temporary - integer :: co2_type_idx ! integer flag for co2_type options - character(len=32) :: fname ! name of field that is NaN - character(len=32), parameter :: sub = 'lnd_import' - - !--------------------------------------------------------------------------- - - ! Set bounds - begg = bounds%begg; endg = bounds%endg - - co2_type_idx = 0 - if (co2_type == 'prognostic') then - co2_type_idx = 1 - else if (co2_type == 'diagnostic') then - co2_type_idx = 2 - end if - if (co2_type == 'prognostic' .and. index_x2l_Sa_co2prog == 0) then - call endrun( sub//' ERROR: must have nonzero index_x2l_Sa_co2prog for co2_type equal to prognostic' ) - else if (co2_type == 'diagnostic' .and. index_x2l_Sa_co2diag == 0) then - call endrun( sub//' ERROR: must have nonzero index_x2l_Sa_co2diag for co2_type equal to diagnostic' ) - end if - - ! Note that the precipitation fluxes received from the coupler - ! are in units of kg/s/m^2. To convert these precipitation rates - ! in units of mm/sec, one must divide by 1000 kg/m^3 and multiply - ! by 1000 mm/m resulting in an overall factor of unity. - ! Below the units are therefore given in mm/s. - - do g = begg,endg - i = 1 + (g - begg) - - ! Determine flooding input, sign convention is positive downward and - ! hierarchy is atm/glc/lnd/rof/ice/ocn. so water sent from rof to land is negative, - ! change the sign to indicate addition of water to system. - - wateratm2lndbulk_inst%forc_flood_grc(g) = -x2l(index_x2l_Flrr_flood,i) - - wateratm2lndbulk_inst%volr_grc(g) = x2l(index_x2l_Flrr_volr,i) * (ldomain%area(g) * 1.e6_r8) - wateratm2lndbulk_inst%volrmch_grc(g)= x2l(index_x2l_Flrr_volrmch,i) * (ldomain%area(g) * 1.e6_r8) - - ! Determine required receive fields - - atm2lnd_inst%forc_hgt_grc(g) = x2l(index_x2l_Sa_z,i) ! zgcmxy Atm state m - atm2lnd_inst%forc_topo_grc(g) = x2l(index_x2l_Sa_topo,i) ! Atm surface height (m) - atm2lnd_inst%forc_u_grc(g) = x2l(index_x2l_Sa_u,i) ! forc_uxy Atm state m/s - atm2lnd_inst%forc_v_grc(g) = x2l(index_x2l_Sa_v,i) ! forc_vxy Atm state m/s - atm2lnd_inst%forc_solad_not_downscaled_grc(g,2) = x2l(index_x2l_Faxa_swndr,i) ! forc_sollxy Atm flux W/m^2 - atm2lnd_inst%forc_solad_not_downscaled_grc(g,1) = x2l(index_x2l_Faxa_swvdr,i) ! forc_solsxy Atm flux W/m^2 - atm2lnd_inst%forc_solai_grc(g,2) = x2l(index_x2l_Faxa_swndf,i) ! forc_solldxy Atm flux W/m^2 - atm2lnd_inst%forc_solai_grc(g,1) = x2l(index_x2l_Faxa_swvdf,i) ! forc_solsdxy Atm flux W/m^2 - - atm2lnd_inst%forc_th_not_downscaled_grc(g) = x2l(index_x2l_Sa_ptem,i) ! forc_thxy Atm state K - wateratm2lndbulk_inst%forc_q_not_downscaled_grc(g) = x2l(index_x2l_Sa_shum,i) ! forc_qxy Atm state kg/kg - atm2lnd_inst%forc_pbot_not_downscaled_grc(g) = x2l(index_x2l_Sa_pbot,i) ! ptcmxy Atm state Pa - atm2lnd_inst%forc_t_not_downscaled_grc(g) = x2l(index_x2l_Sa_tbot,i) ! forc_txy Atm state K - atm2lnd_inst%forc_lwrad_not_downscaled_grc(g) = x2l(index_x2l_Faxa_lwdn,i) ! flwdsxy Atm flux W/m^2 - - forc_rainc(g) = x2l(index_x2l_Faxa_rainc,i) ! mm/s - forc_rainl(g) = x2l(index_x2l_Faxa_rainl,i) ! mm/s - forc_snowc(g) = x2l(index_x2l_Faxa_snowc,i) ! mm/s - forc_snowl(g) = x2l(index_x2l_Faxa_snowl,i) ! mm/s - - ! atmosphere coupling, for prognostic/prescribed aerosols - atm2lnd_inst%forc_aer_grc(g,1) = x2l(index_x2l_Faxa_bcphidry,i) - atm2lnd_inst%forc_aer_grc(g,2) = x2l(index_x2l_Faxa_bcphodry,i) - atm2lnd_inst%forc_aer_grc(g,3) = x2l(index_x2l_Faxa_bcphiwet,i) - atm2lnd_inst%forc_aer_grc(g,4) = x2l(index_x2l_Faxa_ocphidry,i) - atm2lnd_inst%forc_aer_grc(g,5) = x2l(index_x2l_Faxa_ocphodry,i) - atm2lnd_inst%forc_aer_grc(g,6) = x2l(index_x2l_Faxa_ocphiwet,i) - atm2lnd_inst%forc_aer_grc(g,7) = x2l(index_x2l_Faxa_dstwet1,i) - atm2lnd_inst%forc_aer_grc(g,8) = x2l(index_x2l_Faxa_dstdry1,i) - atm2lnd_inst%forc_aer_grc(g,9) = x2l(index_x2l_Faxa_dstwet2,i) - atm2lnd_inst%forc_aer_grc(g,10) = x2l(index_x2l_Faxa_dstdry2,i) - atm2lnd_inst%forc_aer_grc(g,11) = x2l(index_x2l_Faxa_dstwet3,i) - atm2lnd_inst%forc_aer_grc(g,12) = x2l(index_x2l_Faxa_dstdry3,i) - atm2lnd_inst%forc_aer_grc(g,13) = x2l(index_x2l_Faxa_dstwet4,i) - atm2lnd_inst%forc_aer_grc(g,14) = x2l(index_x2l_Faxa_dstdry4,i) - - if (index_x2l_Sa_methane /= 0) then - atm2lnd_inst%forc_pch4_grc(g) = x2l(index_x2l_Sa_methane,i) - endif - - !-------------------------- - ! Check for nans from coupler - !-------------------------- - - call check_for_nans(x2l(:,i), fname, begg, "x2l") - - end do - - !-------------------------- - ! Derived quantities for required fields - ! and corresponding error checks - !-------------------------- - - call derive_quantities(bounds, atm2lnd_inst, wateratm2lndbulk_inst, & - forc_rainc, forc_rainl, forc_snowc, forc_snowl) - - call check_for_errors(bounds, atm2lnd_inst, wateratm2lndbulk_inst) - - ! Determine derived quantities for optional fields - ! Note that the following does unit conversions from ppmv to partial pressures (Pa) - ! Note that forc_pbot is in Pa - - do g = begg,endg - i = 1 + (g - begg) - - forc_pbot = atm2lnd_inst%forc_pbot_not_downscaled_grc(g) - - ! Determine optional receive fields - if (index_x2l_Sa_co2prog /= 0) then - co2_ppmv_prog = x2l(index_x2l_Sa_co2prog,i) ! co2 atm state prognostic - else - co2_ppmv_prog = co2_ppmv - end if - if (index_x2l_Sa_co2diag /= 0) then - co2_ppmv_diag = x2l(index_x2l_Sa_co2diag,i) ! co2 atm state diagnostic - else - co2_ppmv_diag = co2_ppmv - end if - - if (co2_type_idx == 1) then - co2_ppmv_val = co2_ppmv_prog - else if (co2_type_idx == 2) then - co2_ppmv_val = co2_ppmv_diag - else - co2_ppmv_val = co2_ppmv - end if - if ( (co2_ppmv_val < 10.0_r8) .or. (co2_ppmv_val > 15000.0_r8) )then - call endrun(subgrid_index=g, subgrid_level=subgrid_level_gridcell, & - msg = sub//' ERROR: CO2 is outside of an expected range' ) - end if - atm2lnd_inst%forc_pco2_grc(g) = co2_ppmv_val * 1.e-6_r8 * forc_pbot - if (use_c13) then - atm2lnd_inst%forc_pc13o2_grc(g) = co2_ppmv_val * c13ratio * 1.e-6_r8 * forc_pbot - end if - - if (ndep_from_cpl) then - ! The coupler is sending ndep in units if kgN/m2/s - and clm uses units of gN/m2/sec - so the - ! following conversion needs to happen - atm2lnd_inst%forc_ndep_grc(g) = (x2l(index_x2l_Faxa_nhx, i) + x2l(index_x2l_faxa_noy, i))*1000._r8 - end if - - end do - - call glc2lnd_inst%set_glc2lnd_fields_mct( & - bounds = bounds, & - glc_present = glc_present, & - ! NOTE(wjs, 2017-12-13) the x2l argument doesn't have the typical bounds - ! subsetting (bounds%begg:bounds%endg). This mirrors the lack of these bounds in - ! the call to lnd_import from lnd_run_mct. This is okay as long as this code is - ! outside a clump loop. - x2l = x2l, & - index_x2l_Sg_ice_covered = index_x2l_Sg_ice_covered, & - index_x2l_Sg_topo = index_x2l_Sg_topo, & - index_x2l_Flgg_hflx = index_x2l_Flgg_hflx, & - index_x2l_Sg_icemask = index_x2l_Sg_icemask, & - index_x2l_Sg_icemask_coupled_fluxes = index_x2l_Sg_icemask_coupled_fluxes) - - end subroutine lnd_import - - !=============================================================================== - - subroutine lnd_export( bounds, waterlnd2atmbulk_inst, lnd2atm_inst, lnd2glc_inst, l2x) - - !--------------------------------------------------------------------------- - ! !DESCRIPTION: - ! Convert the data to be sent from the clm model to the coupler - ! - ! !USES: - use shr_kind_mod , only : r8 => shr_kind_r8 - use seq_flds_mod , only : seq_flds_l2x_fields - use clm_varctl , only : iulog - use shr_drydep_mod , only : n_drydep - use shr_megan_mod , only : shr_megan_mechcomps_n - use shr_fire_emis_mod , only : shr_fire_emis_mechcomps_n - use lnd_import_export_utils, only : check_for_nans - ! - ! !ARGUMENTS: - implicit none - type(bounds_type) , intent(in) :: bounds ! bounds - type(lnd2atm_type), intent(inout) :: lnd2atm_inst ! clm land to atmosphere exchange data type - type(lnd2glc_type), intent(inout) :: lnd2glc_inst ! clm land to atmosphere exchange data type - type(waterlnd2atmbulk_type), intent(in) :: waterlnd2atmbulk_inst - real(r8) , intent(out) :: l2x(:,:)! land to coupler export state on land grid - ! - ! !LOCAL VARIABLES: - integer :: begg, endg ! bounds - integer :: g,i,k ! indices - integer :: ier ! error status - integer :: nstep ! time step index - integer :: dtime ! time step - integer :: num ! counter - character(len=32) :: fname ! name of field that is NaN - character(len=32), parameter :: sub = 'lnd_export' - !--------------------------------------------------------------------------- - - ! Set bounds - begg = bounds%begg; endg = bounds%endg - - ! cesm sign convention is that fluxes are positive downward - - l2x(:,:) = 0.0_r8 - - do g = begg,endg - i = 1 + (g-begg) - l2x(index_l2x_Sl_t,i) = lnd2atm_inst%t_rad_grc(g) - l2x(index_l2x_Sl_snowh,i) = waterlnd2atmbulk_inst%h2osno_grc(g) - l2x(index_l2x_Sl_avsdr,i) = lnd2atm_inst%albd_grc(g,1) - l2x(index_l2x_Sl_anidr,i) = lnd2atm_inst%albd_grc(g,2) - l2x(index_l2x_Sl_avsdf,i) = lnd2atm_inst%albi_grc(g,1) - l2x(index_l2x_Sl_anidf,i) = lnd2atm_inst%albi_grc(g,2) - l2x(index_l2x_Sl_tref,i) = lnd2atm_inst%t_ref2m_grc(g) - l2x(index_l2x_Sl_qref,i) = waterlnd2atmbulk_inst%q_ref2m_grc(g) - l2x(index_l2x_Sl_u10,i) = lnd2atm_inst%u_ref10m_grc(g) - l2x(index_l2x_Fall_taux,i) = -lnd2atm_inst%taux_grc(g) - l2x(index_l2x_Fall_tauy,i) = -lnd2atm_inst%tauy_grc(g) - l2x(index_l2x_Fall_lat,i) = -lnd2atm_inst%eflx_lh_tot_grc(g) - l2x(index_l2x_Fall_sen,i) = -lnd2atm_inst%eflx_sh_tot_grc(g) - l2x(index_l2x_Fall_lwup,i) = -lnd2atm_inst%eflx_lwrad_out_grc(g) - l2x(index_l2x_Fall_evap,i) = -waterlnd2atmbulk_inst%qflx_evap_tot_grc(g) - l2x(index_l2x_Fall_swnet,i) = lnd2atm_inst%fsa_grc(g) - if (index_l2x_Fall_fco2_lnd /= 0) then - l2x(index_l2x_Fall_fco2_lnd,i) = -lnd2atm_inst%net_carbon_exchange_grc(g) - end if - - ! Additional fields for DUST, PROGSSLT, dry-deposition and VOC - ! These are now standard fields, but the check on the index makes sure the driver handles them - if (index_l2x_Sl_ram1 /= 0 ) l2x(index_l2x_Sl_ram1,i) = lnd2atm_inst%ram1_grc(g) - if (index_l2x_Sl_fv /= 0 ) l2x(index_l2x_Sl_fv,i) = lnd2atm_inst%fv_grc(g) - if (index_l2x_Sl_soilw /= 0 ) l2x(index_l2x_Sl_soilw,i) = waterlnd2atmbulk_inst%h2osoi_vol_grc(g,1) - if (index_l2x_Fall_flxdst1 /= 0 ) l2x(index_l2x_Fall_flxdst1,i)= -lnd2atm_inst%flxdst_grc(g,1) - if (index_l2x_Fall_flxdst2 /= 0 ) l2x(index_l2x_Fall_flxdst2,i)= -lnd2atm_inst%flxdst_grc(g,2) - if (index_l2x_Fall_flxdst3 /= 0 ) l2x(index_l2x_Fall_flxdst3,i)= -lnd2atm_inst%flxdst_grc(g,3) - if (index_l2x_Fall_flxdst4 /= 0 ) l2x(index_l2x_Fall_flxdst4,i)= -lnd2atm_inst%flxdst_grc(g,4) - - - ! for dry dep velocities - if (index_l2x_Sl_ddvel /= 0 ) then - l2x(index_l2x_Sl_ddvel:index_l2x_Sl_ddvel+n_drydep-1,i) = & - lnd2atm_inst%ddvel_grc(g,:n_drydep) - end if - - ! for MEGAN VOC emis fluxes - if (index_l2x_Fall_flxvoc /= 0 ) then - l2x(index_l2x_Fall_flxvoc:index_l2x_Fall_flxvoc+shr_megan_mechcomps_n-1,i) = & - -lnd2atm_inst%flxvoc_grc(g,:shr_megan_mechcomps_n) - end if - - - ! for fire emis fluxes - if (index_l2x_Fall_flxfire /= 0 ) then - l2x(index_l2x_Fall_flxfire:index_l2x_Fall_flxfire+shr_fire_emis_mechcomps_n-1,i) = & - -lnd2atm_inst%fireflx_grc(g,:shr_fire_emis_mechcomps_n) - l2x(index_l2x_Sl_ztopfire,i) = lnd2atm_inst%fireztop_grc(g) - end if - - if (index_l2x_Fall_methane /= 0) then - l2x(index_l2x_Fall_methane,i) = -lnd2atm_inst%ch4_surf_flux_tot_grc(g) - endif - - ! sign convention is positive downward with - ! hierarchy of atm/glc/lnd/rof/ice/ocn. - ! I.e. water sent from land to rof is positive - - l2x(index_l2x_Flrl_rofsur,i) = waterlnd2atmbulk_inst%qflx_rofliq_qsur_grc(g) - - ! subsurface runoff is the sum of qflx_drain and qflx_perched_drain - l2x(index_l2x_Flrl_rofsub,i) = waterlnd2atmbulk_inst%qflx_rofliq_qsub_grc(g) & - + waterlnd2atmbulk_inst%qflx_rofliq_drain_perched_grc(g) - - ! qgwl sent individually to coupler - l2x(index_l2x_Flrl_rofgwl,i) = waterlnd2atmbulk_inst%qflx_rofliq_qgwl_grc(g) - - ! ice sent individually to coupler - l2x(index_l2x_Flrl_rofi,i) = waterlnd2atmbulk_inst%qflx_rofice_grc(g) - - ! irrigation flux to be removed from main channel storage (negative) - l2x(index_l2x_Flrl_irrig,i) = - waterlnd2atmbulk_inst%qirrig_grc(g) - - ! glc coupling - ! We could avoid setting these fields if glc_present is .false., if that would - ! help with performance. (The downside would be that we wouldn't have these fields - ! available for diagnostic purposes or to force a later T compset with dlnd.) - do num = 0,glc_nec - l2x(index_l2x_Sl_tsrf(num),i) = lnd2glc_inst%tsrf_grc(g,num) - l2x(index_l2x_Sl_topo(num),i) = lnd2glc_inst%topo_grc(g,num) - l2x(index_l2x_Flgl_qice(num),i) = lnd2glc_inst%qice_grc(g,num) - end do - - !-------------------------- - ! Check for nans to coupler - !-------------------------- - - call check_for_nans(l2x(:,i), fname, begg, "l2x") - - end do - - end subroutine lnd_export - -end module lnd_import_export diff --git a/src/cpl/mct/lnd_set_decomp_and_domain.F90 b/src/cpl/mct/lnd_set_decomp_and_domain.F90 deleted file mode 100644 index 0a37554313..0000000000 --- a/src/cpl/mct/lnd_set_decomp_and_domain.F90 +++ /dev/null @@ -1,352 +0,0 @@ -module lnd_set_decomp_and_domain - - use shr_kind_mod , only : r8 => shr_kind_r8 - use spmdMod , only : masterproc - use clm_varctl , only : iulog - use mct_mod , only : mct_gsMap - - implicit none - private ! except - - ! public member routines - public :: lnd_set_decomp_and_domain_from_surfrd - - ! private member routines - private :: surfrd_get_globmask ! Reads global land mask (needed for setting domain decomp) - private :: surfrd_get_grid ! Read grid/ladnfrac data into domain (after domain decomp) - - ! translation between local and global indices at gridcell level - type(mct_gsmap), pointer, public :: gsmap_global - - ! translation between local and global indices at gridcell level for multiple levels - ! needed for 3d soil moisture stream - type(mct_gsmap), target , public :: gsMap_lnd2Dsoi_gdc2glo - - character(len=*), parameter, private :: sourcefile = & - __FILE__ - -!=============================================================================== -contains -!=============================================================================== - - subroutine lnd_set_decomp_and_domain_from_surfrd(noland, ni, nj) - - ! Initialize ldomain data types - - use clm_varpar , only: nlevsoi - use clm_varctl , only: fatmlndfrc, use_soil_moisture_streams - use decompInitMod , only: decompInit_lnd - use decompMod , only: bounds_type, get_proc_bounds - use domainMod , only: ldomain, domain_check - - ! input/output variables - logical, intent(out) :: noland - integer, intent(out) :: ni, nj ! global grid sizes - - ! local variables - integer ,pointer :: amask(:) ! global land mask - integer :: begg, endg ! processor bounds - type(bounds_type) :: bounds ! bounds - character(len=32) :: subname = 'lnd_set_decomp_and_domain_from_surfrd' - !----------------------------------------------------------------------- - - ! Read in global land grid and land mask (amask)- needed to set decomposition - ! global memory for amask is allocate in surfrd_get_glomask - must be deallocated below - if (masterproc) then - write(iulog,*) 'Attempting to read global land mask from ',trim(fatmlndfrc) - endif - - ! Get global mask, ni and nj - call surfrd_get_globmask(filename=fatmlndfrc, mask=amask, ni=ni, nj=nj) - - ! Exit early if no valid land points - if ( all(amask == 0) )then - if (masterproc) write(iulog,*) trim(subname)//': no valid land points do NOT run clm' - noland = .true. - return - else - noland = .false. - end if - - ! Determine ctsm gridcell decomposition and processor bounds for gridcells - call decompInit_lnd(ni, nj, amask) - deallocate(amask) - if (use_soil_moisture_streams) call decompInit_lnd3D(ni, nj, nlevsoi) - - ! Initialize bounds for just gridcells - ! Remaining bounds (landunits, columns, patches) will be determined - ! after the call to decompInit_glcp - so get_proc_bounds is called - ! twice and the gridcell information is just filled in twice - call get_proc_bounds(bounds) - - ! Get grid cell bounds values - begg = bounds%begg - endg = bounds%endg - - ! Initialize ldomain data type - if (masterproc) then - write(iulog,*) 'Attempting to read ldomain from ',trim(fatmlndfrc) - endif - call surfrd_get_grid(begg, endg, ldomain, fatmlndfrc) - if (masterproc) then - call domain_check(ldomain) - endif - ldomain%mask = 1 !!! TODO - is this needed? - - end subroutine lnd_set_decomp_and_domain_from_surfrd - - !----------------------------------------------------------------------- - subroutine surfrd_get_globmask(filename, mask, ni, nj) - - ! Read the surface dataset grid related information - ! This is used to set the domain decomposition - so global data is read here - - use fileutils , only : getfil - use ncdio_pio , only : ncd_io, ncd_pio_openfile, ncd_pio_closefile, ncd_inqfdims, file_desc_t - use abortutils , only : endrun - use shr_log_mod, only : errMsg => shr_log_errMsg - - ! input/output variables - character(len=*), intent(in) :: filename ! grid filename - integer , pointer :: mask(:) ! grid mask - integer , intent(out) :: ni, nj ! global grid sizes - - ! local variables - logical :: isgrid2d - integer :: dimid,varid ! netCDF id's - integer :: ns ! size of grid on file - integer :: n,i,j ! index - integer :: ier ! error status - type(file_desc_t) :: ncid ! netcdf id - character(len=256) :: locfn ! local file name - logical :: readvar ! read variable in or not - integer , allocatable :: idata2d(:,:) - character(len=32) :: subname = 'surfrd_get_globmask' ! subroutine name - !----------------------------------------------------------------------- - - if (filename == ' ') then - mask(:) = 1 - else - ! Check if file exists - if (masterproc) then - if (filename == ' ') then - write(iulog,*) trim(subname),' ERROR: filename must be specified ' - call endrun(msg=errMsg(sourcefile, __LINE__)) - endif - end if - - ! Open file - call getfil( filename, locfn, 0 ) - call ncd_pio_openfile (ncid, trim(locfn), 0) - - ! Determine dimensions and if grid file is 2d or 1d - call ncd_inqfdims(ncid, isgrid2d, ni, nj, ns) - if (masterproc) then - write(iulog,*)'lat/lon grid flag (isgrid2d) is ',isgrid2d - end if - allocate(mask(ns)) - mask(:) = 1 - if (isgrid2d) then - ! Grid is 2d - allocate(idata2d(ni,nj)) - idata2d(:,:) = 1 - call ncd_io(ncid=ncid, varname='LANDMASK', data=idata2d, flag='read', readvar=readvar) - if (.not. readvar) then - call ncd_io(ncid=ncid, varname='mask', data=idata2d, flag='read', readvar=readvar) - end if - if (readvar) then - do j = 1,nj - do i = 1,ni - n = (j-1)*ni + i - mask(n) = idata2d(i,j) - enddo - enddo - end if - deallocate(idata2d) - else - ! Grid is not 2d - call ncd_io(ncid=ncid, varname='LANDMASK', data=mask, flag='read', readvar=readvar) - if (.not. readvar) then - call ncd_io(ncid=ncid, varname='mask', data=mask, flag='read', readvar=readvar) - end if - end if - if (.not. readvar) call endrun( msg=' ERROR: landmask not on fatmlndfrc file'//errMsg(sourcefile, __LINE__)) - - ! Close file - call ncd_pio_closefile(ncid) - end if - - end subroutine surfrd_get_globmask - - !----------------------------------------------------------------------- - subroutine surfrd_get_grid(begg, endg, ldomain, filename) - - ! Read the surface dataset grid related information: - ! This is called after the domain decomposition has been created - ! - real latitude of grid cell (degrees) - ! - real longitude of grid cell (degrees) - - use clm_varcon , only : spval, re, grlnd - use domainMod , only : domain_type, lon1d, lat1d, domain_init - use fileutils , only : getfil - use abortutils , only : endrun - use shr_log_mod , only : errMsg => shr_log_errMsg - use ncdio_pio , only : file_desc_t, ncd_pio_openfile, ncd_pio_closefile - use ncdio_pio , only : ncd_io, check_var, ncd_inqfdims, check_dim_size - use pio - - ! input/output variables - integer , intent(in) :: begg, endg - type(domain_type) , intent(inout) :: ldomain ! domain to init - character(len=*) , intent(in) :: filename ! grid filename - - ! local variables - type(file_desc_t) :: ncid ! netcdf id - integer :: beg ! local beg index - integer :: end ! local end index - integer :: ni,nj,ns ! size of grid on file - logical :: readvar ! true => variable is on input file - logical :: isgrid2d ! true => file is 2d lat/lon - logical :: istype_domain ! true => input file is of type domain - real(r8), allocatable :: rdata2d(:,:) ! temporary - character(len=16) :: vname ! temporary - character(len=256) :: locfn ! local file name - integer :: n ! indices - character(len=32) :: subname = 'surfrd_get_grid' ! subroutine name - !----------------------------------------------------------------------- - - if (masterproc) then - if (filename == ' ') then - write(iulog,*) trim(subname),' ERROR: filename must be specified ' - call endrun(msg=errMsg(sourcefile, __LINE__)) - endif - end if - - call getfil( filename, locfn, 0 ) - call ncd_pio_openfile (ncid, trim(locfn), 0) - - ! Determine dimensions - call ncd_inqfdims(ncid, isgrid2d, ni, nj, ns) - - ! Determine isgrid2d flag for domain - call domain_init(ldomain, isgrid2d=isgrid2d, ni=ni, nj=nj, nbeg=begg, nend=endg) - - ! Determine type of file - old style grid file or new style domain file - call check_var(ncid=ncid, varname='xc', readvar=readvar) - if (readvar)then - istype_domain = .true. - else - istype_domain = .false. - end if - - ! Read in area, lon, lat - if (istype_domain) then - call ncd_io(ncid=ncid, varname= 'area', flag='read', data=ldomain%area, & - dim1name=grlnd, readvar=readvar) - ! convert from radians**2 to km**2 - ldomain%area = ldomain%area * (re**2) - if (.not. readvar) call endrun( msg=' ERROR: area NOT on file'//errMsg(sourcefile, __LINE__)) - call ncd_io(ncid=ncid, varname= 'xc', flag='read', data=ldomain%lonc, & - dim1name=grlnd, readvar=readvar) - if (.not. readvar) call endrun( msg=' ERROR: xc NOT on file'//errMsg(sourcefile, __LINE__)) - call ncd_io(ncid=ncid, varname= 'yc', flag='read', data=ldomain%latc, & - dim1name=grlnd, readvar=readvar) - if (.not. readvar) call endrun( msg=' ERROR: yc NOT on file'//errMsg(sourcefile, __LINE__)) - else - call endrun( msg=" ERROR: can no longer read non domain files" ) - end if - - if (isgrid2d) then - allocate(rdata2d(ni,nj), lon1d(ni), lat1d(nj)) - if (istype_domain) vname = 'xc' - call ncd_io(ncid=ncid, varname=trim(vname), data=rdata2d, flag='read', readvar=readvar) - lon1d(:) = rdata2d(:,1) - if (istype_domain) vname = 'yc' - call ncd_io(ncid=ncid, varname=trim(vname), data=rdata2d, flag='read', readvar=readvar) - lat1d(:) = rdata2d(1,:) - deallocate(rdata2d) - end if - - ! Check lat limited to -90,90 - if (minval(ldomain%latc) < -90.0_r8 .or. & - maxval(ldomain%latc) > 90.0_r8) then - write(iulog,*) trim(subname),' WARNING: lat/lon min/max is ', & - minval(ldomain%latc),maxval(ldomain%latc) - endif - if ( any(ldomain%lonc < 0.0_r8) )then - call endrun( msg=' ERROR: lonc is negative (see https://github.com/ESCOMP/ctsm/issues/507)' & - //errMsg(sourcefile, __LINE__)) - endif - call ncd_io(ncid=ncid, varname='mask', flag='read', data=ldomain%mask, & - dim1name=grlnd, readvar=readvar) - if (.not. readvar) then - call endrun( msg=' ERROR: LANDMASK NOT on fracdata file'//errMsg(sourcefile, __LINE__)) - end if - call ncd_io(ncid=ncid, varname='frac', flag='read', data=ldomain%frac, & - dim1name=grlnd, readvar=readvar) - if (.not. readvar) then - call endrun( msg=' ERROR: LANDFRAC NOT on fracdata file'//errMsg(sourcefile, __LINE__)) - end if - - call ncd_pio_closefile(ncid) - - end subroutine surfrd_get_grid - - !------------------------------------------------------------------------------ - subroutine decompInit_lnd3D(lni,lnj,lnk) - ! - ! !DESCRIPTION: - ! Create a 3D decomposition gsmap for the global 2D grid with soil levels - ! as the 3rd dimesnion. - ! - ! !USES: - use decompMod, only : gindex_global, bounds_type, get_proc_bounds - use spmdMod , only : comp_id, mpicom - use mct_mod , only : mct_gsmap_init - ! - ! !ARGUMENTS: - integer , intent(in) :: lni,lnj,lnk ! domain global size - ! - ! !LOCAL VARIABLES: - integer :: m,n,k ! indices - integer :: begg,endg,lsize,gsize ! used for gsmap init - integer :: begg3d,endg3d - integer, pointer :: gindex(:) ! global index for gsmap init - type(bounds_type) :: bounds - !------------------------------------------------------------------------------ - - ! Initialize gsmap_lnd2dsoi_gdc2glo - call get_proc_bounds(bounds) - begg = bounds%begg; endg=bounds%endg - - begg3d = (begg-1)*lnk + 1 - endg3d = endg*lnk - lsize = (endg3d - begg3d + 1 ) - allocate(gindex(begg3d:endg3d)) - do k = 1, lnk - do n = begg,endg - m = (begg-1)*lnk + (k-1)*(endg-begg+1) + (n-begg+1) - gindex(m) = gindex_global(n-begg+1) + (k-1)*(lni*lnj) - enddo - enddo - gsize = lni * lnj * lnk - call mct_gsMap_init(gsMap_lnd2Dsoi_gdc2glo, gindex, mpicom, comp_id, lsize, gsize) - - ! Diagnostic output - - if (masterproc) then - write(iulog,*)' 3D GSMap' - write(iulog,*)' longitude points = ',lni - write(iulog,*)' latitude points = ',lnj - write(iulog,*)' soil levels = ',lnk - write(iulog,*)' gsize = ',gsize - write(iulog,*)' lsize = ',lsize - write(iulog,*)' bounds(gindex) = ',size(gindex) - write(iulog,*) - end if - - deallocate(gindex) - - end subroutine decompInit_lnd3D - -end module lnd_set_decomp_and_domain diff --git a/src/cpl/mct/ndepStreamMod.F90 b/src/cpl/mct/ndepStreamMod.F90 deleted file mode 100644 index d26ff7c95e..0000000000 --- a/src/cpl/mct/ndepStreamMod.F90 +++ /dev/null @@ -1,376 +0,0 @@ -module ndepStreamMod - - !----------------------------------------------------------------------- - ! !DESCRIPTION: - ! Contains methods for reading in nitrogen deposition data file - ! Also includes functions for dynamic ndep file handling and - ! interpolation. - ! - ! !USES - use shr_kind_mod, only: r8 => shr_kind_r8, CL => shr_kind_cl - use shr_strdata_mod, only: shr_strdata_type, shr_strdata_create - use shr_strdata_mod, only: shr_strdata_print, shr_strdata_advance - use mct_mod , only: mct_ggrid - use spmdMod , only: mpicom, masterproc, comp_id, iam - use clm_varctl , only: iulog, inst_name - use abortutils , only: endrun - use decompMod , only: bounds_type - use domainMod , only: ldomain - - ! !PUBLIC TYPES: - implicit none - private - - ! !PUBLIC MEMBER FUNCTIONS: - public :: ndep_init ! position datasets for dynamic ndep - public :: ndep_interp ! interpolates between two years of ndep file data - public :: clm_domain_mct ! Sets up MCT domain for this resolution - - ! !PRIVATE MEMBER FUNCTIONS: - private :: check_units ! Check the units and make sure they can be used - - ! ! PRIVATE TYPES - type(shr_strdata_type) :: sdat ! input data stream - integer :: stream_year_first_ndep ! first year in stream to use - integer :: stream_year_last_ndep ! last year in stream to use - integer :: model_year_align_ndep ! align stream_year_firstndep with - logical :: divide_by_secs_per_yr = .true. ! divide by the number of seconds per year - - character(len=*), parameter, private :: sourcefile = & - __FILE__ - !============================================================================== - -contains - - !============================================================================== - - subroutine ndep_init(bounds, NLFilename) - ! - ! Initialize data stream information. - ! - ! Uses: - use shr_kind_mod , only : CS => shr_kind_cs - use clm_time_manager , only : get_calendar - use ncdio_pio , only : pio_subsystem - use shr_pio_mod , only : shr_pio_getiotype - use shr_nl_mod , only : shr_nl_find_group_name - use shr_log_mod , only : errMsg => shr_log_errMsg - use shr_mpi_mod , only : shr_mpi_bcast - use lnd_set_decomp_and_domain , only : gsMap_lnd2Dsoi_gdc2glo, gsmap_global - ! - ! arguments - implicit none - type(bounds_type), intent(in) :: bounds - character(len=*), intent(in) :: NLFilename ! Namelist filename - ! - ! local variables - integer :: nu_nml ! unit for namelist file - integer :: nml_error ! namelist i/o error flag - type(mct_ggrid) :: dom_clm ! domain information - character(len=CL) :: stream_fldFileName_ndep - character(len=CL) :: ndepmapalgo = 'bilinear' - character(len=CL) :: ndep_tintalgo = 'linear' - character(len=CS) :: ndep_taxmode = 'extend' - character(len=CL) :: ndep_varlist = 'NDEP_year' - character(*), parameter :: shr_strdata_unset = 'NOT_SET' - character(*), parameter :: subName = "('ndepdyn_init')" - character(*), parameter :: F00 = "('(ndepdyn_init) ',4a)" - !----------------------------------------------------------------------- - - namelist /ndepdyn_nml/ & - stream_year_first_ndep, & - stream_year_last_ndep, & - model_year_align_ndep, & - ndepmapalgo, ndep_taxmode, & - ndep_varlist, & - stream_fldFileName_ndep, & - ndep_tintalgo - - ! Default values for namelist - stream_year_first_ndep = 1 ! first year in stream to use - stream_year_last_ndep = 1 ! last year in stream to use - model_year_align_ndep = 1 ! align stream_year_first_ndep with this model year - stream_fldFileName_ndep = ' ' - - ! Read ndepdyn_nml namelist - if (masterproc) then - open( newunit=nu_nml, file=trim(NLFilename), status='old', iostat=nml_error ) - call shr_nl_find_group_name(nu_nml, 'ndepdyn_nml', status=nml_error) - if (nml_error == 0) then - read(nu_nml, nml=ndepdyn_nml,iostat=nml_error) - if (nml_error /= 0) then - call endrun(msg=' ERROR reading ndepdyn_nml namelist'//errMsg(sourcefile, __LINE__)) - end if - else - call endrun(msg=' ERROR finding ndepdyn_nml namelist'//errMsg(sourcefile, __LINE__)) - end if - close(nu_nml) - endif - - call shr_mpi_bcast(stream_year_first_ndep , mpicom) - call shr_mpi_bcast(stream_year_last_ndep , mpicom) - call shr_mpi_bcast(model_year_align_ndep , mpicom) - call shr_mpi_bcast(stream_fldFileName_ndep, mpicom) - call shr_mpi_bcast(ndep_varlist , mpicom) - call shr_mpi_bcast(ndep_taxmode , mpicom) - call shr_mpi_bcast(ndep_tintalgo , mpicom) - - if (masterproc) then - write(iulog,*) ' ' - write(iulog,*) 'ndepdyn stream settings:' - write(iulog,*) ' stream_year_first_ndep = ',stream_year_first_ndep - write(iulog,*) ' stream_year_last_ndep = ',stream_year_last_ndep - write(iulog,*) ' model_year_align_ndep = ',model_year_align_ndep - write(iulog,*) ' stream_fldFileName_ndep = ',stream_fldFileName_ndep - write(iulog,*) ' ndep_varList = ',ndep_varList - write(iulog,*) ' ndep_taxmode = ',ndep_taxmode - write(iulog,*) ' ndep_tintalgo = ',ndep_tintalgo - write(iulog,*) ' ' - endif - ! Read in units - call check_units( stream_fldFileName_ndep, ndep_varList ) - - ! Set domain and create streams - call clm_domain_mct (bounds, dom_clm) - - call shr_strdata_create(sdat,name="clmndep", & - pio_subsystem=pio_subsystem, & - pio_iotype=shr_pio_getiotype(inst_name), & - mpicom=mpicom, compid=comp_id, & - gsmap=gsmap_global, ggrid=dom_clm, & - nxg=ldomain%ni, nyg=ldomain%nj, & - yearFirst=stream_year_first_ndep, & - yearLast=stream_year_last_ndep, & - yearAlign=model_year_align_ndep, & - offset=0, & - domFilePath='', & - domFileName=trim(stream_fldFileName_ndep), & - domTvarName='time', & - domXvarName='lon' , & - domYvarName='lat' , & - domAreaName='area', & - domMaskName='mask', & - filePath='', & - filename=(/trim(stream_fldFileName_ndep)/),& - fldListFile=ndep_varlist, & - fldListModel=ndep_varlist, & - fillalgo='none', & - mapalgo=ndepmapalgo, & - tintalgo=ndep_tintalgo, & - calendar=get_calendar(), & - taxmode=ndep_taxmode ) - - - if (masterproc) then - call shr_strdata_print(sdat,'CLMNDEP data') - endif - - end subroutine ndep_init - !================================================================ - - subroutine check_units( stream_fldFileName_ndep, ndep_varList ) - !------------------------------------------------------------------- - ! Check that units are correct on the file and if need any conversion - use ncdio_pio , only : ncd_pio_openfile, ncd_inqvid, ncd_getatt, ncd_pio_closefile, ncd_nowrite - use ncdio_pio , only : file_desc_t, var_desc_t - use shr_kind_mod , only : CS => shr_kind_cs - use shr_log_mod , only : errMsg => shr_log_errMsg - use shr_string_mod, only : shr_string_listGetName - implicit none - - !----------------------------------------------------------------------- - ! - ! Arguments - character(len=*), intent(IN) :: stream_fldFileName_ndep ! ndep filename - character(len=*), intent(IN) :: ndep_varList ! ndep variable list to examine - ! - ! Local variables - type(file_desc_t) :: ncid ! NetCDF filehandle for ndep file - type(var_desc_t) :: vardesc ! variable descriptor - integer :: varid ! variable index - logical :: readvar ! If variable was read - character(len=CS) :: ndepunits! ndep units - character(len=CS) :: fname ! ndep field name - !----------------------------------------------------------------------- - call ncd_pio_openfile( ncid, trim(stream_fldFileName_ndep), ncd_nowrite ) - call shr_string_listGetName( ndep_varList, 1, fname ) - call ncd_inqvid( ncid, fname, varid, vardesc, readvar=readvar ) - if ( readvar ) then - call ncd_getatt( ncid, varid, "units", ndepunits ) - else - call endrun(msg=' ERROR finding variable: '//trim(fname)//" in file: "// & - trim(stream_fldFileName_ndep)//errMsg(sourcefile, __LINE__)) - end if - call ncd_pio_closefile( ncid ) - - ! Now check to make sure they are correct - if ( trim(ndepunits) == "g(N)/m2/s" )then - divide_by_secs_per_yr = .false. - else if ( trim(ndepunits) == "g(N)/m2/yr" )then - divide_by_secs_per_yr = .true. - else - call endrun(msg=' ERROR in units for nitrogen deposition equal to: '//trim(ndepunits)//" not units expected"// & - errMsg(sourcefile, __LINE__)) - end if - - end subroutine check_units - - !================================================================ - subroutine ndep_interp(bounds, atm2lnd_inst) - - !----------------------------------------------------------------------- - use clm_time_manager, only : get_curr_date, get_curr_days_per_year - use clm_varcon , only : secspday - use atm2lndType , only : atm2lnd_type - ! - ! Arguments - type(bounds_type) , intent(in) :: bounds - type(atm2lnd_type), intent(inout) :: atm2lnd_inst - ! - ! Local variables - integer :: g, ig - integer :: year ! year (0, ...) for nstep+1 - integer :: mon ! month (1, ..., 12) for nstep+1 - integer :: day ! day of month (1, ..., 31) for nstep+1 - integer :: sec ! seconds into current date for nstep+1 - integer :: mcdate ! Current model date (yyyymmdd) - integer :: dayspyr ! days per year - !----------------------------------------------------------------------- - - call get_curr_date(year, mon, day, sec) - mcdate = year*10000 + mon*100 + day - - call shr_strdata_advance(sdat, mcdate, sec, mpicom, 'ndepdyn') - - if ( divide_by_secs_per_yr )then - ig = 0 - dayspyr = get_curr_days_per_year( ) - do g = bounds%begg,bounds%endg - ig = ig+1 - atm2lnd_inst%forc_ndep_grc(g) = sdat%avs(1)%rAttr(1,ig) / (secspday * dayspyr) - end do - else - ig = 0 - do g = bounds%begg,bounds%endg - ig = ig+1 - atm2lnd_inst%forc_ndep_grc(g) = sdat%avs(1)%rAttr(1,ig) - end do - end if - - end subroutine ndep_interp - - !============================================================================== - subroutine clm_domain_mct(bounds, dom_clm, nlevels) - - !------------------------------------------------------------------- - ! Set domain data type for internal clm grid - use clm_varcon , only : re - use domainMod , only : ldomain - use mct_mod , only : mct_ggrid, mct_gsMap_lsize, mct_gGrid_init - use mct_mod , only : mct_gsMap_orderedPoints, mct_gGrid_importIAttr - use mct_mod , only : mct_gGrid_importRAttr, mct_gsMap - use lnd_set_decomp_and_domain , only : gsMap_lnd2Dsoi_gdc2glo, gsmap_global - implicit none - ! - ! arguments - type(bounds_type), intent(in) :: bounds - type(mct_ggrid), intent(out) :: dom_clm ! Output domain information for land model - integer, intent(in), optional :: nlevels ! Number of levels if this is a 3D field - ! - ! local variables - integer :: g,i,j,k ! index - integer :: lsize ! land model domain data size - real(r8), pointer :: data(:) ! temporary - integer , pointer :: idata(:) ! temporary - integer :: nlevs ! Number of vertical levels - type(mct_gsMap), pointer :: gsmap => null() ! MCT GS map - !------------------------------------------------------------------- - ! SEt number of levels, and get the GS map for either the 2D or 3D grid - nlevs = 1 - if ( present(nlevels) ) nlevs = nlevels - if ( nlevs == 1 ) then - gsmap => gsmap_global - else - gsmap => gsMap_lnd2Dsoi_gdc2glo - end if - ! - ! Initialize mct domain type - ! lat/lon in degrees, area in radians^2, mask is 1 (land), 0 (non-land) - ! Note that in addition land carries around landfrac for the purposes of domain checking - ! - lsize = mct_gsMap_lsize(gsmap, mpicom) - call mct_gGrid_init( GGrid=dom_clm, & - CoordChars='lat:lon:hgt', OtherChars='area:aream:mask:frac', lsize=lsize ) - ! - ! Allocate memory - ! - allocate(data(lsize)) - ! - ! Determine global gridpoint number attribute, GlobGridNum, which is set automatically by MCT - ! - call mct_gsMap_orderedPoints(gsmap, iam, idata) - gsmap => null() - call mct_gGrid_importIAttr(dom_clm,'GlobGridNum',idata,lsize) - ! - ! Determine domain (numbering scheme is: West to East and South to North to South pole) - ! Initialize attribute vector with special value - ! - data(:) = -9999.0_R8 - call mct_gGrid_importRAttr(dom_clm,"lat" ,data,lsize) - call mct_gGrid_importRAttr(dom_clm,"lon" ,data,lsize) - call mct_gGrid_importRAttr(dom_clm,"area" ,data,lsize) - call mct_gGrid_importRAttr(dom_clm,"aream",data,lsize) - data(:) = 0.0_R8 - call mct_gGrid_importRAttr(dom_clm,"mask" ,data,lsize) - ! - ! Determine bounds - ! - ! Fill in correct values for domain components - ! Note aream will be filled in in the atm-lnd mapper - ! - do k = 1, nlevs - do g = bounds%begg,bounds%endg - i = 1 + (g - bounds%begg) - data(i) = ldomain%lonc(g) - end do - end do - call mct_gGrid_importRattr(dom_clm,"lon",data,lsize) - - do k = 1, nlevs - do g = bounds%begg,bounds%endg - i = 1 + (g - bounds%begg) - data(i) = ldomain%latc(g) - end do - end do - call mct_gGrid_importRattr(dom_clm,"lat",data,lsize) - - do k = 1, nlevs - do g = bounds%begg,bounds%endg - i = 1 + (g - bounds%begg) - data(i) = ldomain%area(g)/(re*re) - end do - end do - call mct_gGrid_importRattr(dom_clm,"area",data,lsize) - - do k = 1, nlevs - do g = bounds%begg,bounds%endg - i = 1 + (g - bounds%begg) - data(i) = real(ldomain%mask(g), r8) - end do - end do - call mct_gGrid_importRattr(dom_clm,"mask",data,lsize) - - do k = 1, nlevs - do g = bounds%begg,bounds%endg - i = 1 + (g - bounds%begg) - data(i) = real(ldomain%frac(g), r8) - end do - end do - call mct_gGrid_importRattr(dom_clm,"frac",data,lsize) - - deallocate(data) - deallocate(idata) - - end subroutine clm_domain_mct - -end module ndepStreamMod From 788f4996c12dda3873af80d9875c4a6feda340c9 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Fri, 10 May 2024 16:35:53 -0600 Subject: [PATCH 061/126] Remove a couple more references to MCT --- .../customizing-the-clm-configuration.rst | 2 +- src/main/clm_varctl.F90 | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/source/users_guide/setting-up-and-running-a-case/customizing-the-clm-configuration.rst b/doc/source/users_guide/setting-up-and-running-a-case/customizing-the-clm-configuration.rst index f8b5fee002..e76a9cc6f0 100644 --- a/doc/source/users_guide/setting-up-and-running-a-case/customizing-the-clm-configuration.rst +++ b/doc/source/users_guide/setting-up-and-running-a-case/customizing-the-clm-configuration.rst @@ -638,7 +638,7 @@ The output to the above command is as follows: -vichydro Turn VIC hydrologic parameterizations : [on | off] (default is off) -crop Toggle for prognostic crop model. [on | off] (default is off) (can ONLY be turned on when BGC type is CN or CNDV) - -comp_intf Component interface to use (ESMF or MCT) (default MCT) + -comp_intf Component interface to use (default ESMF, currently no other option) -defaults Specify full path to a configuration file which will be used to supply defaults instead of the defaults in bld/config_files. This file is used to specify model configuration parameters only. diff --git a/src/main/clm_varctl.F90 b/src/main/clm_varctl.F90 index 7d0b2b55ad..0daedbbb1f 100644 --- a/src/main/clm_varctl.F90 +++ b/src/main/clm_varctl.F90 @@ -115,7 +115,7 @@ module clm_varctl character(len=fname_len), public :: fsnowaging = ' ' ! snow aging parameters file name character(len=fname_len), public :: fatmlndfrc = ' ' ! lnd frac file on atm grid - ! only needed for LILAC and MCT drivers + ! only needed for LILAC !---------------------------------------------------------- ! Flag to read ndep rather than obtain it from coupler From cc0e04dafe87cb878a8d3333c7a16d1b41a8c7e0 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Fri, 10 May 2024 17:21:55 -0600 Subject: [PATCH 062/126] Update ntests in build-namelist_test.pl --- bld/unit_testers/build-namelist_test.pl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bld/unit_testers/build-namelist_test.pl b/bld/unit_testers/build-namelist_test.pl index 12a5f4bd38..ec370749bb 100755 --- a/bld/unit_testers/build-namelist_test.pl +++ b/bld/unit_testers/build-namelist_test.pl @@ -163,10 +163,10 @@ sub cat_and_create_namelistinfile { # # Figure out number of tests that will run # -my $ntests = 2513; +my $ntests = 2454; if ( defined($opts{'compare'}) ) { - $ntests += 1545; + $ntests += 1486; } plan( tests=>$ntests ); From 2b8c042973bd383d986c64dda73b11ab9350d2d7 Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Fri, 10 May 2024 18:05:52 -0600 Subject: [PATCH 063/126] Fix the tests so expected fails, actually fail --- bld/unit_testers/build-namelist_test.pl | 23 +++++++++++++++++++++-- 1 file changed, 21 insertions(+), 2 deletions(-) diff --git a/bld/unit_testers/build-namelist_test.pl b/bld/unit_testers/build-namelist_test.pl index 25545dcf8a..b0ddb1e448 100755 --- a/bld/unit_testers/build-namelist_test.pl +++ b/bld/unit_testers/build-namelist_test.pl @@ -1480,57 +1480,73 @@ sub cat_and_create_namelistinfile { atm_forc=>"GSWP3v1", res => "0.9x1.25", bgc => "bgc", + crop => "--no-crop", use_case => "1850_control", start_ymd => "18500101", + namelist => "irrigate=T", }, "f091850Clm45BgcCRU" =>{ phys =>"clm4_5", atm_forc=>"CRUv7", res => "0.9x1.25", bgc => "bgc", + crop => "--no-crop", use_case => "1850_control", start_ymd => "18500101", + namelist => "irrigate=T", }, "f091850Clm45BgcCAM6" =>{ phys =>"clm4_5", atm_forc=>"cam6.0", res => "0.9x1.25", bgc => "bgc", + crop => "--crop", use_case => "1850_control", start_ymd => "18500101", + namelist => "irrigate=F", }, "f091850Clm50BgcGSW" =>{ phys =>"clm5_0", atm_forc=>"GSWP3v1", res => "0.9x1.25", bgc => "bgc", + crop => "--crop", use_case => "1850_control", start_ymd => "18500101", + namelist => "irrigate=F", }, "f091850Clm50SpGSW" =>{ phys =>"clm5_0", atm_forc=>"GSWP3v1", res => "0.9x1.25", bgc => "sp", + crop => "--no-crop", use_case => "1850_control", start_ymd => "18500101", + namelist => "irrigate=T", }, "f091850Clm50BgcCRU" =>{ phys =>"clm5_0", atm_forc=>"CRUv7", res => "0.9x1.25", bgc => "bgc", + crop => "--crop", use_case => "1850_control", start_ymd => "18500101", + namelist => "irrigate=F", }, "f091850Clm50SpCRU" =>{ phys =>"clm5_0", atm_forc=>"CRUv7", res => "0.9x1.25", bgc => "sp", + crop => "--no-crop", use_case => "1850_control", start_ymd => "18500101", + namelist => "irrigate=T", }, "f091850Clm50BgcCAM6" =>{ phys =>"clm5_0", atm_forc=>"cam6.0", res => "0.9x1.25", bgc => "bgc", + crop => "--crop", use_case => "1850_control", start_ymd => "18500101", + namelist => "irrigate=F", }, ); @@ -1542,9 +1558,12 @@ sub cat_and_create_namelistinfile { my $usecase = $finidat_files{$key}{'use_case'}; my $bgc = $finidat_files{$key}{'bgc'}; my $res = $finidat_files{$key}{'res'}; + my $crop = $finidat_files{$key}{'crop'}; + my $namelist = $finidat_files{$key}{'namelist'}; my $start_ymd = $finidat_files{$key}{'start_ymd'}; my $lnd_tuning_mode = "${phys}_" . $finidat_files{$key}{'atm_forc'}; - $options = "-bgc $bgc -res $res -use_case $usecase -envxml_dir . --lnd_tuning_mode $lnd_tuning_mode -namelist '&a start_ymd=$start_ymd/'"; + $options = "-bgc $bgc -res $res -use_case $usecase -envxml_dir . $crop --lnd_tuning_mode $lnd_tuning_mode " . + "-namelist '&a start_ymd=$start_ymd, $namelist/'"; &make_env_run(); eval{ system( "$bldnml $options > $tempfile 2>&1 " ); }; is( $@, '', "options: $options" ); @@ -1552,7 +1571,7 @@ sub cat_and_create_namelistinfile { if ( $finidat =~ /initdata_map/ ) { my $result; eval( $result = `grep use_init_interp lnd_in` ); - isnt( $#, 0, "use_init_interp needs to be set here and was not: $result") + is ( $result =~ /.true./, 1, "use_init_interp needs to be true here: $result"); } $cfiles->checkfilesexist( "$options", $mode ); $cfiles->shownmldiff( "default", "standard" ); From 8c109657651315ff83b98561108c6ac71a7f6b29 Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Fri, 10 May 2024 21:05:33 -0600 Subject: [PATCH 064/126] Set use_init_interp=T for finidat files that are older than ctsm5.2 --- bld/namelist_files/namelist_defaults_ctsm.xml | 70 +++++++++---------- 1 file changed, 35 insertions(+), 35 deletions(-) diff --git a/bld/namelist_files/namelist_defaults_ctsm.xml b/bld/namelist_files/namelist_defaults_ctsm.xml index 6f2f265da4..c99700289e 100644 --- a/bld/namelist_files/namelist_defaults_ctsm.xml +++ b/bld/namelist_files/namelist_defaults_ctsm.xml @@ -970,21 +970,21 @@ attributes from the config_cache.xml file (with keys converted to upper-case). lnd/clm2/initdata_map/clmi.I1850Clm45BgcGs.0901-01-01.0.9x1.25_gx1v7_simyr1850_c200806.nc lnd/clm2/initdata_map/clmi.I1850Clm45BgcCruGs.1101-01-01.0.9x1.25_gx1v7_simyr1850_c200806.nc lnd/clm2/initdata_map/clmi.B1850Clm45BgcGs.0161-01-01.0.9x1.25_gx1v7_simyr1850_c200806.nc @@ -992,7 +992,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case). lnd/clm2/initdata_map/clmi.I1850Clm50Sp.0181-01-01.0.9x1.25_gx1v7_simyr1850_c200806.nc @@ -1016,14 +1016,14 @@ attributes from the config_cache.xml file (with keys converted to upper-case). lnd/clm2/initdata_map/clmi.B1850Clm50BgcCrop.0161-01-01.0.9x1.25_gx1v7_simyr1850_c200729.nc lnd/clm2/initdata_map/clmi.I1850Clm50SpCru.1706-01-01.0.9x1.25_gx1v7_simyr1850_c200806.nc @@ -1031,7 +1031,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case). lnd/clm2/initdata_map/clmi.I1850Clm50Sp.0181-01-01.0.9x1.25_gx1v7_simyr1850_c200806.nc lnd/clm2/initdata_map/clmi.B1850Clm50BgcCrop.0161-01-01.0.9x1.25_gx1v7_simyr1850_c200729.nc @@ -1053,7 +1053,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case). lnd/clm2/initdata_map/clmi.I1850Clm50Sp.0181-01-01.0.9x1.25_gx1v7_simyr1850_c200806.nc lnd/clm2/initdata_map/clmi.B1850Clm50BgcCrop.0161-01-01.0.9x1.25_gx1v7_simyr1850_c200729.nc @@ -1125,7 +1125,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case). lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.0.9x1.25_gx1v7_simyr1979_c200806.nc @@ -1136,14 +1136,14 @@ attributes from the config_cache.xml file (with keys converted to upper-case). lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.0.9x1.25_gx1v7_simyr1979_c200806.nc lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.1.9x2.5_gx1v7_simyr1979_c200806.nc @@ -1151,7 +1151,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case). lnd/clm2/initdata_map/clmi.FHISTSp.1979-01-01.ARCTIC_ne30x4_mt12_simyr1979_c200806.nc @@ -1159,7 +1159,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case). lnd/clm2/initdata_map/clmi.FHISTSp.1979-01-01.ARCTICGRIS_ne30x8_mt12_simyr1979_c200806.nc @@ -1167,7 +1167,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case). lnd/clm2/initdata_map/clmi.F2000.2000-01-01.ne120pg3_mt13_simyr2000_c200728.nc @@ -1175,7 +1175,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case). lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.0.9x1.25_gx1v7_simyr2000_c200728.nc @@ -1197,7 +1197,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case). lnd/clm2/initdata_map/clmi.BHISTSp.2000-01-01.1.9x2.5_gx1v7_simyr2003_c200807.nc @@ -1206,21 +1206,21 @@ attributes from the config_cache.xml file (with keys converted to upper-case). lnd/clm2/initdata_map/clmi.FHISTSp.2013-01-01.ne0CONUSne30x8_mt12_simyr2013_c200806.nc lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.0.9x1.25_gx1v7_simyr1979_c200806.nc lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.1.9x2.5_gx1v7_simyr1979_c200806.nc @@ -1228,7 +1228,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case). lnd/clm2/initdata_map/clmi.FHISTSp.1979-01-01.ARCTIC_ne30x4_mt12_simyr1979_c200806.nc @@ -1236,7 +1236,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case). lnd/clm2/initdata_map/clmi.FHISTSp.1979-01-01.ARCTICGRIS_ne30x8_mt12_simyr1979_c200806.nc @@ -1244,7 +1244,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case). lnd/clm2/initdata_map/clmi.F2000.2000-01-01.ne120pg3_mt13_simyr2000_c200728.nc @@ -1252,7 +1252,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case). lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.0.9x1.25_gx1v7_simyr2000_c200728.nc @@ -1260,7 +1260,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case). lnd/clm2/initdata_map/clmi.BHISTSp.2000-01-01.1.9x2.5_gx1v7_simyr2003_c200807.nc @@ -1269,21 +1269,21 @@ attributes from the config_cache.xml file (with keys converted to upper-case). lnd/clm2/initdata_map/clmi.FHISTSp.2013-01-01.ne0CONUSne30x8_mt12_simyr2013_c200806.nc lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.0.9x1.25_gx1v7_simyr1979_c200806.nc lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.1.9x2.5_gx1v7_simyr1979_c200806.nc @@ -1291,7 +1291,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case). lnd/clm2/initdata_map/clmi.FHISTSp.1979-01-01.ARCTIC_ne30x4_mt12_simyr1979_c200806.nc @@ -1299,7 +1299,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case). lnd/clm2/initdata_map/clmi.FHISTSp.1979-01-01.ARCTICGRIS_ne30x8_mt12_simyr1979_c200806.nc @@ -1307,7 +1307,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case). lnd/clm2/initdata_map/clmi.F2000.2000-01-01.ne120pg3_mt13_simyr2000_c200728.nc @@ -1315,7 +1315,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case). lnd/clm2/initdata_map/clmi.BHIST.2000-01-01.0.9x1.25_gx1v7_simyr2000_c200728.nc @@ -1323,7 +1323,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case). lnd/clm2/initdata_map/clmi.BHISTSp.2000-01-01.1.9x2.5_gx1v7_simyr2003_c200807.nc @@ -1332,7 +1332,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case). lnd/clm2/initdata_map/clmi.FHISTSp.2013-01-01.ne0CONUSne30x8_mt12_simyr2013_c200806.nc From e14303f9ac6bb7a8f36dfebec2ab73d0a6e20151 Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Sun, 12 May 2024 20:21:29 -0600 Subject: [PATCH 065/126] Fix irrigate for two use-cases so namelists all agree with the ctsm5.1.dev176 version --- bld/namelist_files/use_cases/2010_control.xml | 6 +++--- bld/namelist_files/use_cases/20thC_transient.xml | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/bld/namelist_files/use_cases/2010_control.xml b/bld/namelist_files/use_cases/2010_control.xml index d16177301c..2f72624077 100644 --- a/bld/namelist_files/use_cases/2010_control.xml +++ b/bld/namelist_files/use_cases/2010_control.xml @@ -8,9 +8,9 @@ constant -.true. -.true. -.false. +.true. +.false. +.false. 2010 2010 diff --git a/bld/namelist_files/use_cases/20thC_transient.xml b/bld/namelist_files/use_cases/20thC_transient.xml index b88ca3d3b1..6cbf9e0d38 100644 --- a/bld/namelist_files/use_cases/20thC_transient.xml +++ b/bld/namelist_files/use_cases/20thC_transient.xml @@ -20,7 +20,7 @@ .true. .false. -.false. +.false. 1850 2015 From 9fa2841b8ed6ad0d0696969ee3ea9d4b5627aec7 Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Mon, 13 May 2024 00:56:57 -0600 Subject: [PATCH 066/126] New finidat files for clm5_1/clm6_0 for 1850 Use ne30np4.pg3_gx1v7 BgcCrop GSWP3v1 finidat file for 1850 clm5_1 and clm6_0. --- bld/namelist_files/namelist_defaults_ctsm.xml | 57 ++++--------------- 1 file changed, 11 insertions(+), 46 deletions(-) diff --git a/bld/namelist_files/namelist_defaults_ctsm.xml b/bld/namelist_files/namelist_defaults_ctsm.xml index 40ba339d65..fff414d82e 100644 --- a/bld/namelist_files/namelist_defaults_ctsm.xml +++ b/bld/namelist_files/namelist_defaults_ctsm.xml @@ -758,34 +758,13 @@ attributes from the config_cache.xml file (with keys converted to upper-case). >hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.false. glc_nec=10 do_transient_pfts=.false. - - -hgrid=0.9x1.25 maxpft=17 mask=gx1v7 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false. + +hgrid=ne30np4.pg3 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.false. lnd_tuning_mode=clm6_0_GSWP3v1 -hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.false. glc_nec=10 do_transient_pfts=.false. - - - -hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.false. glc_nec=10 do_transient_pfts=.false. - - - - -hgrid=0.9x1.25 maxpft=17 mask=gx1v7 use_cn=.false. use_crop=.false. irrigate=.true. glc_nec=10 do_transient_pfts=.false. - - -hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.false. glc_nec=10 do_transient_pfts=.false. - - - -hgrid=0.9x1.25 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.false. glc_nec=10 do_transient_pfts=.false. +hgrid=ne30np4.pg3 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.false. lnd_tuning_mode=clm6_0_GSWP3v1 @@ -1049,26 +1028,12 @@ attributes from the config_cache.xml file (with keys converted to upper-case). >lnd/clm2/initdata_map/clmi.B1850Clm50BgcCrop.0161-01-01.0.9x1.25_gx1v7_simyr1850_c200729.nc - -lnd/clm2/initdata_map/clmi.I1850Clm50Sp.0181-01-01.0.9x1.25_gx1v7_simyr1850_c200806.nc - -lnd/clm2/initdata_esmf/ctsm5.2/clmi.I1850Clm50BgcCrop-ciso.1366-01-01.0.9x1.25_gx1v7_simyr1850_c240223.nc - - - -lnd/clm2/initdata_map/clmi.B1850Clm50BgcCrop.0161-01-01.0.9x1.25_gx1v7_simyr1850_c200729.nc + + +lnd/clm2/initdata_esmf/ctsm5.2/clmi.I1850Clm60BgcCrop-ciso.1361-01-01.ne30pg3_mg17_c240317.nc Date: Mon, 13 May 2024 02:44:47 -0600 Subject: [PATCH 070/126] Start on Change files --- doc/ChangeLog | 117 ++++++++++++++++++++++++++++++++++++++++++++++++++ doc/ChangeSum | 1 + 2 files changed, 118 insertions(+) diff --git a/doc/ChangeLog b/doc/ChangeLog index 3307c8df88..172c203e7f 100644 --- a/doc/ChangeLog +++ b/doc/ChangeLog @@ -1,4 +1,121 @@ =============================================================== +Tag name: ctsm5.2.005 +Originator(s): erik (Erik Kluzek,UCAR/TSS,303-497-1326) +Date: Mon 13 May 2024 02:44:33 AM MDT +One-line Summary: Fix clm6_0 defaults and CESM testing issues, add tests to detect these problems + +Purpose and description of changes +---------------------------------- + +Fix the clm6_0 defaults that were incorrect in ctsm5.2.0. The use-cases needed to be changed to handle clm6_0. +And clm6_0 wasn't handled in the merge of ctsm5.1.dev174 for the two new settings. Simplified the use-cases which +should help prevent these problems in the future. use_init_interp will be set for ctsm5.1 finidat files. + +Fix some testing. mksurfdata_esmf and the MKSUIRFDATA test didn't work in a CESM checkout. build-namelist unit tests +weren't going over the use-cases. Also started adding some tests to go over finidat files, but didn't yet capture all of them. +Add some scripts to compare namelists in order to detect namelist issues for physics +version updates, and more easily see namelist changes in one place (bld/unit_testers/cmp_baseline_lnd_in_files, +bld/unit_testers/compare_namelists). + +Add some tests to detect issues in CESM and CAM testing (add more tests for cam6.0 forcing, add new IHistClm60BgcCropG +compset). Add a NoAnthro compset to the ctsm_sci testing. Add I1850Clm60Sp test for f09 and f19 to ctsm_sci. + +Change NoAnthro compsets to use RTM rather than MOSART. Add science support to some clm6_0 compsets, as was obvious. + + +Significant changes to scientifically-supported configurations +-------------------------------------------------------------- + +Does this tag change answers significantly for any of the following physics configurations? +(Details of any changes will be given in the "Answer changes" section below.) + + [Put an [X] in the box for any configuration with significant answer changes.] + +[X] clm6_0 + +[ ] clm5_1 + +[ ] clm5_0 + +[ ] ctsm5_0-nwp + +[ ] clm4_5 + + +Bugs fixed +---------- + +List of CTSM issues fixed (include CTSM Issue # and description) [one per line]: + Fixes #2492 -- Fix clm6_0 defaults + Fixes #2504 -- build-namelist unit tests for use-cases + Fixes #2519 -- MKSURFDATAESMF for CESM checkout + Fixes #2520 -- B1850 compsets are failing + One tick box in #2403 (new initial conditions) + +Notes of particular relevance for users +--------------------------------------- + +Changes made to namelist defaults (e.g., changed parameter values): + Fixes: snow_thermal_cond_method, irrigate and snicar_snobc_intmix for clm6_0 + +Changes to the datasets (e.g., parameter, surface or initial files): + New initial conditions for clm5_1/clm6_0 for 1850 from ne30pg3_g17 GSWP3v1 forcing for BgcCrop with ciso + +Notes of particular relevance for developers: +--------------------------------------------- + +Caveats for developers (e.g., code that is duplicated that requires double maintenance): + build-namelist unit tester started to add some finidat tests, but only some of them were added to the list + +Changes to tests or testing: Add some tests to detect the CESM test issues + + +Testing summary: regular ctsm_sci +---------------- + [PASS means all tests PASS; OK means tests PASS other than expected fails.] + + build-namelist tests (if CLMBuildNamelist.pm has changed): + + derecho - PASS (except + + python testing (if python code has changed; see instructions in python/README.md; document testing done): + + derecho - PASS (except + + regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing): + + derecho ----- OK + izumi ------- OK + + any other testing (give details below): + + ctsm_sci + derecho ---- OK + +If the tag used for baseline comparisons was NOT the previous tag, note that here: + + +Answer changes +-------------- + +Changes answers relative to baseline: Yes! + + Summarize any changes to answers, i.e., + - what code configurations: clm6_0 (some clm4_5 see below) + - what platforms/compilers: All + - nature of change (roundoff; larger than roundoff/same climate; new climate): + new climate so agrees with clm5_1 namelists, and fixes irrigate for clm4_5 + +Other details +------------- + +Pull Requests that document the changes (include PR ids): +(https://github.com/ESCOMP/ctsm/pull) + + #2501 -- Fix clm6_0 defaults + +=============================================================== +=============================================================== Tag name: ctsm5.2.004 Originator(s): slevis (Samuel Levis,UCAR/TSS,303-665-1310) Date: Thu 09 May 2024 03:10:52 PM MDT diff --git a/doc/ChangeSum b/doc/ChangeSum index 17ad670f17..0edccb3a18 100644 --- a/doc/ChangeSum +++ b/doc/ChangeSum @@ -1,5 +1,6 @@ Tag Who Date Summary ============================================================================================================================ + ctsm5.2.005 erik 05/13/2024 Fix clm6_0 defaults and CESM testing issues, add tests to detect these problems ctsm5.2.004 multiple 05/09/2024 CTSM5.2 1979 fsurdat and 1979-2026 landuse ne0np4 files + two fixes ctsm5.2.003 samrabin 05/02/2024 Merge b4b-dev ctsm5.2.002 glemieux 04/26/2024 FATES default allometry parameter file update From a09d22376f704ea37c30b19e6978f6fa19a261bf Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Mon, 13 May 2024 13:44:35 -0600 Subject: [PATCH 071/126] Some tests were failing to find the 1850 finidat file, more changes to match how the finidat matching is done so that it now works --- bld/namelist_files/namelist_defaults_ctsm.xml | 25 ++++--------------- 1 file changed, 5 insertions(+), 20 deletions(-) diff --git a/bld/namelist_files/namelist_defaults_ctsm.xml b/bld/namelist_files/namelist_defaults_ctsm.xml index 59a0d9ee75..655e97c47c 100644 --- a/bld/namelist_files/namelist_defaults_ctsm.xml +++ b/bld/namelist_files/namelist_defaults_ctsm.xml @@ -760,11 +760,11 @@ attributes from the config_cache.xml file (with keys converted to upper-case). hgrid=ne30np4.pg3 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.false. lnd_tuning_mode=clm6_0_GSWP3v1 +>hgrid=ne30np4.pg3 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.false. glc_nex=10 do_transient_pfts=.false. lnd_tuning_mode=clm6_0_GSWP3v1 hgrid=ne30np4.pg3 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.false. lnd_tuning_mode=clm6_0_GSWP3v1 +>hgrid=ne30np4.pg3 maxpft=79 mask=gx1v7 use_cn=.true. use_crop=.true. irrigate=.false. glc_nex=10 do_transient_pfts=.false. lnd_tuning_mode=clm6_0_GSWP3v1 @@ -1006,27 +1006,12 @@ attributes from the config_cache.xml file (with keys converted to upper-case). >lnd/clm2/initdata_map/clmi.I1850Clm50SpCru.1706-01-01.0.9x1.25_gx1v7_simyr1850_c200806.nc - -lnd/clm2/initdata_map/clmi.I1850Clm50Sp.0181-01-01.0.9x1.25_gx1v7_simyr1850_c200806.nc - - - -lnd/clm2/initdata_map/clmi.B1850Clm50BgcCrop.0161-01-01.0.9x1.25_gx1v7_simyr1850_c200729.nc - - lnd/clm2/initdata_esmf/ctsm5.2/clmi.I1850Clm60BgcCrop-ciso.1361-01-01.ne30pg3_mg17_c240317.nc From 9ecc1bc79a1dd66a138fd398872ef8fa55e09fbf Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Mon, 13 May 2024 16:10:43 -0600 Subject: [PATCH 072/126] Fix a resoplution name in a test --- cime_config/testdefs/testlist_clm.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cime_config/testdefs/testlist_clm.xml b/cime_config/testdefs/testlist_clm.xml index da38a4e27d..9cfba6f5b3 100644 --- a/cime_config/testdefs/testlist_clm.xml +++ b/cime_config/testdefs/testlist_clm.xml @@ -576,7 +576,7 @@ - + From 8bf5f5abd69b4ccfe590b1b42ddebc1a91c8c2c1 Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Mon, 13 May 2024 16:32:08 -0600 Subject: [PATCH 073/126] Add test to expected fails --- cime_config/testdefs/ExpectedTestFails.xml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/cime_config/testdefs/ExpectedTestFails.xml b/cime_config/testdefs/ExpectedTestFails.xml index 8ee60cf4b8..9da502fd54 100644 --- a/cime_config/testdefs/ExpectedTestFails.xml +++ b/cime_config/testdefs/ExpectedTestFails.xml @@ -51,6 +51,13 @@ + + + FAIL + #2542 + + + FAIL From 672da6de229e5a1acb166adf33375d772615de44 Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Mon, 13 May 2024 16:46:38 -0600 Subject: [PATCH 074/126] Update change log a bit --- doc/ChangeLog | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/doc/ChangeLog b/doc/ChangeLog index 172c203e7f..de37d4934c 100644 --- a/doc/ChangeLog +++ b/doc/ChangeLog @@ -1,7 +1,7 @@ =============================================================== Tag name: ctsm5.2.005 Originator(s): erik (Erik Kluzek,UCAR/TSS,303-497-1326) -Date: Mon 13 May 2024 02:44:33 AM MDT +Date: Mon 13 May 2024 04:46:10 PM MDT One-line Summary: Fix clm6_0 defaults and CESM testing issues, add tests to detect these problems Purpose and description of changes @@ -76,11 +76,11 @@ Testing summary: regular ctsm_sci build-namelist tests (if CLMBuildNamelist.pm has changed): - derecho - PASS (except + derecho - PASS (737 compare different to baseline) python testing (if python code has changed; see instructions in python/README.md; document testing done): - derecho - PASS (except + derecho - PASS regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing): From 4715afef8db5cade7e3a2835ee6044bd711b21e3 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Wed, 15 May 2024 14:33:34 -0600 Subject: [PATCH 075/126] Bring back src/unit_test_stubs/csm_share/CMakeLists.txt for FUNIT test --- src/unit_test_stubs/csm_share/CMakeLists.txt | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 src/unit_test_stubs/csm_share/CMakeLists.txt diff --git a/src/unit_test_stubs/csm_share/CMakeLists.txt b/src/unit_test_stubs/csm_share/CMakeLists.txt new file mode 100644 index 0000000000..33ddbfb342 --- /dev/null +++ b/src/unit_test_stubs/csm_share/CMakeLists.txt @@ -0,0 +1,5 @@ +list(APPEND share_sources + shr_mpi_mod_stub.F90 + ) + +sourcelist_to_parent(share_sources) From 39dc01e6f31f72246a5c3c781b5c83ab6e2a2598 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Thu, 16 May 2024 11:39:30 -0600 Subject: [PATCH 076/126] Revert "Remove the src/cpl/mct directory" This reverts commit 371f093e02a7820e8beec823c97dd23b94277c05. Jim Edwards posted the following in my PR: "although you can remove cpl7 you cannot yet remove mct. However I don't expect you to remove any externals in this tag, I will remove them in the next tag." --- src/cpl/mct/ExcessIceStreamType.F90 | 144 +++++ src/cpl/mct/FireDataBaseType.F90 | 561 +++++++++++++++++++ src/cpl/mct/SoilMoistureStreamMod.F90 | 418 ++++++++++++++ src/cpl/mct/UrbanTimeVarType.F90 | 314 +++++++++++ src/cpl/mct/ch4FInundatedStreamType.F90 | 389 +++++++++++++ src/cpl/mct/clm_cpl_indices.F90 | 330 +++++++++++ src/cpl/mct/laiStreamMod.F90 | 241 +++++++++ src/cpl/mct/lnd_comp_mct.F90 | 632 ++++++++++++++++++++++ src/cpl/mct/lnd_import_export.F90 | 354 ++++++++++++ src/cpl/mct/lnd_set_decomp_and_domain.F90 | 352 ++++++++++++ src/cpl/mct/ndepStreamMod.F90 | 376 +++++++++++++ 11 files changed, 4111 insertions(+) create mode 100644 src/cpl/mct/ExcessIceStreamType.F90 create mode 100644 src/cpl/mct/FireDataBaseType.F90 create mode 100644 src/cpl/mct/SoilMoistureStreamMod.F90 create mode 100644 src/cpl/mct/UrbanTimeVarType.F90 create mode 100644 src/cpl/mct/ch4FInundatedStreamType.F90 create mode 100644 src/cpl/mct/clm_cpl_indices.F90 create mode 100644 src/cpl/mct/laiStreamMod.F90 create mode 100644 src/cpl/mct/lnd_comp_mct.F90 create mode 100644 src/cpl/mct/lnd_import_export.F90 create mode 100644 src/cpl/mct/lnd_set_decomp_and_domain.F90 create mode 100644 src/cpl/mct/ndepStreamMod.F90 diff --git a/src/cpl/mct/ExcessIceStreamType.F90 b/src/cpl/mct/ExcessIceStreamType.F90 new file mode 100644 index 0000000000..5c5394233c --- /dev/null +++ b/src/cpl/mct/ExcessIceStreamType.F90 @@ -0,0 +1,144 @@ +module ExcessIceStreamType + + !----------------------------------------------------------------------- + ! !DESCRIPTION: + ! Stub for ExcessIceStreams for the MCT driver. So that MCT can be used + ! without excess ice streams. + ! + ! !USES + use shr_kind_mod , only : r8 => shr_kind_r8, CL => shr_kind_CL + use shr_log_mod , only : errMsg => shr_log_errMsg + use spmdMod , only : mpicom, masterproc + use clm_varctl , only : iulog + use abortutils , only : endrun + use decompMod , only : bounds_type + + ! !PUBLIC TYPES: + implicit none + private + + public :: UseExcessIceStreams ! If streams will be used + + type, public :: excessicestream_type + contains + + ! !PUBLIC MEMBER FUNCTIONS: + procedure, public :: Init ! Initialize and read data in + procedure, public :: CalcExcessIce ! Calculate excess ice ammount + + ! !PRIVATE MEMBER FUNCTIONS: + procedure, private :: ReadNML ! Read in namelist + + end type excessicestream_type + ! ! PRIVATE DATA: + + character(len=*), parameter, private :: sourcefile = & + __FILE__ + +!============================================================================== +contains +!============================================================================== + + subroutine Init(this, bounds, NLFilename) + ! + ! + ! arguments + implicit none + class(excessicestream_type) :: this + type(bounds_type), intent(in) :: bounds + character(len=*), intent(in) :: NLFilename ! Namelist filename + + ! + ! local variables + + call this%ReadNML( bounds, NLFileName ) + end subroutine Init + + subroutine CalcExcessIce(this,bounds,exice_bulk_init) + + ! only transfers grid values to columns + implicit none + class(excessicestream_type) :: this + type(bounds_type), intent(in) :: bounds + real(r8) , intent(inout) :: exice_bulk_init(bounds%begc:bounds%endc) + ! + ! !LOCAL VARIABLES: + + end subroutine CalcExcessIce + + logical function UseExcessIceStreams() + ! + ! !DESCRIPTION: + ! Return true if + ! + ! !USES: + ! + ! !ARGUMENTS: + implicit none + ! + ! !LOCAL VARIABLES: + UseExcessIceStreams = .false. +end function UseExcessIceStreams + +subroutine ReadNML(this, bounds, NLFilename) + ! + ! Read the namelist data stream information. + ! + ! Uses: + use shr_nl_mod , only : shr_nl_find_group_name + use shr_log_mod , only : errMsg => shr_log_errMsg + use shr_mpi_mod , only : shr_mpi_bcast + ! + ! arguments + implicit none + class(excessicestream_type) :: this + type(bounds_type), intent(in) :: bounds + character(len=*), intent(in) :: NLFilename ! Namelist filename + ! + ! local variables + integer :: nu_nml ! unit for namelist file + integer :: nml_error ! namelist i/o error flag + logical :: use_excess_ice_streams = .false. ! logical to turn on use of excess ice streams + character(len=CL) :: stream_fldFileName_exice = ' ' + character(len=CL) :: stream_mapalgo_exice = 'none' + character(len=*), parameter :: namelist_name = 'exice_streams' ! MUST agree with name in namelist and read + character(len=*), parameter :: subName = "('exice_streams::ReadNML')" + !----------------------------------------------------------------------- + + namelist /exice_streams/ & ! MUST agree with namelist_name above + stream_mapalgo_exice, stream_fldFileName_exice, use_excess_ice_streams + !----------------------------------------------------------------------- + ! Default values for namelist + + ! Read excess ice namelist + if (masterproc) then + open( newunit=nu_nml, file=trim(NLFilename), status='old', iostat=nml_error ) + call shr_nl_find_group_name(nu_nml, namelist_name, status=nml_error) + if (nml_error == 0) then + read(nu_nml, nml=exice_streams,iostat=nml_error) ! MUST agree with namelist_name above + if (nml_error /= 0) then + call endrun(msg=' ERROR reading '//namelist_name//' namelist'//errMsg(sourcefile, __LINE__)) + end if + else + call endrun(msg=' ERROR finding '//namelist_name//' namelist'//errMsg(sourcefile, __LINE__)) + end if + close(nu_nml) + endif + + call shr_mpi_bcast(use_excess_ice_streams , mpicom) + + if (masterproc) then + if ( use_excess_ice_streams ) then + call endrun(msg=' ERROR excess ice streams can NOT be on for the MCT driver'//errMsg(sourcefile, __LINE__)) + end if + if ( trim(stream_fldFileName_exice) /= '' ) then + call endrun(msg=' ERROR stream_fldFileName_exice can NOT be set for the MCT driver'//errMsg(sourcefile, __LINE__)) + end if + if ( trim(stream_mapalgo_exice) /= 'none' ) then + call endrun(msg=' ERROR stream_mapalgo_exice can only be none for the MCT driver'//errMsg(sourcefile, __LINE__)) + end if + endif + +end subroutine ReadNML + +end module ExcessIceStreamType diff --git a/src/cpl/mct/FireDataBaseType.F90 b/src/cpl/mct/FireDataBaseType.F90 new file mode 100644 index 0000000000..0ee635b2fa --- /dev/null +++ b/src/cpl/mct/FireDataBaseType.F90 @@ -0,0 +1,561 @@ +module FireDataBaseType + +#include "shr_assert.h" + + !----------------------------------------------------------------------- + ! !DESCRIPTION: + ! module for handling of fire data + ! + ! !USES: + use shr_kind_mod , only : r8 => shr_kind_r8, CL => shr_kind_CL + use shr_strdata_mod , only : shr_strdata_type, shr_strdata_create, shr_strdata_print + use shr_strdata_mod , only : shr_strdata_advance + use shr_log_mod , only : errMsg => shr_log_errMsg + use clm_varctl , only : iulog, inst_name + use spmdMod , only : masterproc, mpicom, comp_id + use fileutils , only : getavu, relavu + use domainMod , only : ldomain + use abortutils , only : endrun + use decompMod , only : bounds_type + use FireMethodType , only : fire_method_type + use lnd_set_decomp_and_domain, only : gsmap_global + use mct_mod + ! + implicit none + private + ! + ! !PUBLIC TYPES: + public :: fire_base_type + + ! + type, abstract, extends(fire_method_type) :: fire_base_type + private + ! !PRIVATE MEMBER DATA: + + real(r8), public, pointer :: forc_lnfm(:) ! Lightning frequency + real(r8), public, pointer :: forc_hdm(:) ! Human population density + + real(r8), public, pointer :: gdp_lf_col(:) ! col global real gdp data (k US$/capita) + real(r8), public, pointer :: peatf_lf_col(:) ! col global peatland fraction data (0-1) + integer , public, pointer :: abm_lf_col(:) ! col global peak month of crop fire emissions + + type(shr_strdata_type) :: sdat_hdm ! Human population density input data stream + type(shr_strdata_type) :: sdat_lnfm ! Lightning input data stream + + contains + ! + ! !PUBLIC MEMBER FUNCTIONS: + procedure, public :: FireInit => BaseFireInit ! Initialization of Fire + procedure, public :: BaseFireInit ! Initialization of Fire + procedure(FireReadNML_interface), public, deferred :: FireReadNML ! Read in namelist for Fire + procedure, public :: FireInterp ! Interpolate fire data + procedure(need_lightning_and_popdens_interface), public, deferred :: & + need_lightning_and_popdens ! Returns true if need lightning & popdens + ! + ! !PRIVATE MEMBER FUNCTIONS: + procedure, private :: hdm_init ! position datasets for dynamic human population density + procedure, private :: hdm_interp ! interpolates between two years of human pop. density file data + procedure, private :: lnfm_init ! position datasets for Lightning + procedure, private :: lnfm_interp ! interpolates between two years of Lightning file data + procedure, private :: surfdataread ! read fire related data from surface data set + end type fire_base_type + !----------------------------------------------------------------------- + + abstract interface + !----------------------------------------------------------------------- + function need_lightning_and_popdens_interface(this) result(need_lightning_and_popdens) + ! + ! !DESCRIPTION: + ! Returns true if need lightning and popdens, false otherwise + ! + ! USES + import :: fire_base_type + ! + ! !ARGUMENTS: + class(fire_base_type), intent(in) :: this + logical :: need_lightning_and_popdens ! function result + !----------------------------------------------------------------------- + end function need_lightning_and_popdens_interface + end interface + + character(len=*), parameter, private :: sourcefile = & + __FILE__ + +contains + + !----------------------------------------------------------------------- + subroutine FireReadNML_interface( this, NLFilename ) + ! + ! !DESCRIPTION: + ! Read the namelist for Fire + ! + ! !USES: + ! + ! !ARGUMENTS: + class(fire_base_type) :: this + character(len=*), intent(in) :: NLFilename ! Namelist filename + end subroutine FireReadNML_interface + + !----------------------------------------------------------------------- + subroutine BaseFireInit( this, bounds, NLFilename ) + ! + ! !DESCRIPTION: + ! Initialize CN Fire module + ! !USES: + use shr_infnan_mod , only : nan => shr_infnan_nan, assignment(=) + ! + ! !ARGUMENTS: + class(fire_base_type) :: this + type(bounds_type), intent(in) :: bounds + character(len=*), intent(in) :: NLFilename + !----------------------------------------------------------------------- + + if ( this%need_lightning_and_popdens() ) then + ! Allocate lightning forcing data + allocate( this%forc_lnfm(bounds%begg:bounds%endg) ) + this%forc_lnfm(bounds%begg:) = nan + ! Allocate pop dens forcing data + allocate( this%forc_hdm(bounds%begg:bounds%endg) ) + this%forc_hdm(bounds%begg:) = nan + + ! Allocate real gdp data + allocate(this%gdp_lf_col(bounds%begc:bounds%endc)) + ! Allocate peatland fraction data + allocate(this%peatf_lf_col(bounds%begc:bounds%endc)) + ! Allocates peak month of crop fire emissions + allocate(this%abm_lf_col(bounds%begc:bounds%endc)) + + + call this%hdm_init(bounds, NLFilename) + call this%hdm_interp(bounds) + call this%lnfm_init(bounds, NLFilename) + call this%lnfm_interp(bounds) + call this%surfdataread(bounds) + end if + + end subroutine BaseFireInit + + !----------------------------------------------------------------------- + subroutine FireInterp(this,bounds) + ! + ! !DESCRIPTION: + ! Interpolate CN Fire datasets + ! + ! !ARGUMENTS: + class(fire_base_type) :: this + type(bounds_type), intent(in) :: bounds + !----------------------------------------------------------------------- + + if ( this%need_lightning_and_popdens() ) then + call this%hdm_interp(bounds) + call this%lnfm_interp(bounds) + end if + + end subroutine FireInterp + + !----------------------------------------------------------------------- + subroutine hdm_init( this, bounds, NLFilename ) + ! + ! !DESCRIPTION: + ! Initialize data stream information for population density. + ! + ! !USES: + use clm_time_manager , only : get_calendar + use ncdio_pio , only : pio_subsystem + use shr_pio_mod , only : shr_pio_getiotype + use clm_nlUtilsMod , only : find_nlgroup_name + use ndepStreamMod , only : clm_domain_mct + use histFileMod , only : hist_addfld1d + ! + ! !ARGUMENTS: + implicit none + class(fire_base_type) :: this + type(bounds_type), intent(in) :: bounds + character(len=*), intent(in) :: NLFilename ! Namelist filename + ! + ! !LOCAL VARIABLES: + integer :: stream_year_first_popdens ! first year in pop. dens. stream to use + integer :: stream_year_last_popdens ! last year in pop. dens. stream to use + integer :: model_year_align_popdens ! align stream_year_first_hdm with + integer :: nu_nml ! unit for namelist file + integer :: nml_error ! namelist i/o error flag + type(mct_ggrid) :: dom_clm ! domain information + character(len=CL) :: stream_fldFileName_popdens ! population density streams filename + character(len=CL) :: popdensmapalgo = 'bilinear' ! mapping alogrithm for population density + character(len=CL) :: popdens_tintalgo = 'nearest'! time interpolation alogrithm for population density + character(len=CL) :: stream_meshfile_popdens ! not used + character(*), parameter :: subName = "('hdmdyn_init')" + character(*), parameter :: F00 = "('(hdmdyn_init) ',4a)" + !----------------------------------------------------------------------- + + namelist /popd_streams/ & + stream_year_first_popdens, & + stream_year_last_popdens, & + model_year_align_popdens, & + popdensmapalgo, & + stream_fldFileName_popdens, & + stream_meshfile_popdens , & + popdens_tintalgo + + ! Default values for namelist + stream_year_first_popdens = 1 ! first year in stream to use + stream_year_last_popdens = 1 ! last year in stream to use + model_year_align_popdens = 1 ! align stream_year_first_popdens with this model year + stream_fldFileName_popdens = ' ' + + ! Read popd_streams namelist + if (masterproc) then + nu_nml = getavu() + open( nu_nml, file=trim(NLFilename), status='old', iostat=nml_error ) + call find_nlgroup_name(nu_nml, 'popd_streams', status=nml_error) + if (nml_error == 0) then + read(nu_nml, nml=popd_streams,iostat=nml_error) + if (nml_error /= 0) then + call endrun(msg='ERROR reading popd_streams namelist'//errMsg(sourcefile, __LINE__)) + end if + end if + close(nu_nml) + call relavu( nu_nml ) + endif + + call shr_mpi_bcast(stream_year_first_popdens, mpicom) + call shr_mpi_bcast(stream_year_last_popdens, mpicom) + call shr_mpi_bcast(model_year_align_popdens, mpicom) + call shr_mpi_bcast(stream_fldFileName_popdens, mpicom) + call shr_mpi_bcast(popdens_tintalgo, mpicom) + + if (masterproc) then + write(iulog,*) ' ' + write(iulog,*) 'popdens_streams settings:' + write(iulog,*) ' stream_year_first_popdens = ',stream_year_first_popdens + write(iulog,*) ' stream_year_last_popdens = ',stream_year_last_popdens + write(iulog,*) ' model_year_align_popdens = ',model_year_align_popdens + write(iulog,*) ' stream_fldFileName_popdens = ',stream_fldFileName_popdens + write(iulog,*) ' popdens_tintalgo = ',popdens_tintalgo + write(iulog,*) ' ' + endif + + call clm_domain_mct (bounds, dom_clm) + + call shr_strdata_create(this%sdat_hdm,name="clmhdm", & + pio_subsystem=pio_subsystem, & + pio_iotype=shr_pio_getiotype(inst_name), & + mpicom=mpicom, compid=comp_id, & + gsmap=gsmap_global, ggrid=dom_clm, & + nxg=ldomain%ni, nyg=ldomain%nj, & + yearFirst=stream_year_first_popdens, & + yearLast=stream_year_last_popdens, & + yearAlign=model_year_align_popdens, & + offset=0, & + domFilePath='', & + domFileName=trim(stream_fldFileName_popdens), & + domTvarName='time', & + domXvarName='lon' , & + domYvarName='lat' , & + domAreaName='area', & + domMaskName='mask', & + filePath='', & + filename=(/trim(stream_fldFileName_popdens)/) , & + fldListFile='hdm', & + fldListModel='hdm', & + fillalgo='none', & + mapalgo=popdensmapalgo, & + calendar=get_calendar(), & + tintalgo=popdens_tintalgo, & + taxmode='extend' ) + + if (masterproc) then + call shr_strdata_print(this%sdat_hdm,'population density data') + endif + + ! Add history fields + call hist_addfld1d (fname='HDM', units='counts/km^2', & + avgflag='A', long_name='human population density', & + ptr_lnd=this%forc_hdm, default='inactive') + + end subroutine hdm_init + + !----------------------------------------------------------------------- + subroutine hdm_interp( this, bounds) + ! + ! !DESCRIPTION: + ! Interpolate data stream information for population density. + ! + ! !USES: + use clm_time_manager, only : get_curr_date + ! + ! !ARGUMENTS: + class(fire_base_type) :: this + type(bounds_type), intent(in) :: bounds + ! + ! !LOCAL VARIABLES: + integer :: g, ig + integer :: year ! year (0, ...) for nstep+1 + integer :: mon ! month (1, ..., 12) for nstep+1 + integer :: day ! day of month (1, ..., 31) for nstep+1 + integer :: sec ! seconds into current date for nstep+1 + integer :: mcdate ! Current model date (yyyymmdd) + !----------------------------------------------------------------------- + + call get_curr_date(year, mon, day, sec) + mcdate = year*10000 + mon*100 + day + + call shr_strdata_advance(this%sdat_hdm, mcdate, sec, mpicom, 'hdmdyn') + + ig = 0 + do g = bounds%begg,bounds%endg + ig = ig+1 + this%forc_hdm(g) = this%sdat_hdm%avs(1)%rAttr(1,ig) + end do + + end subroutine hdm_interp + + !----------------------------------------------------------------------- + subroutine lnfm_init( this, bounds, NLFilename ) + ! + ! !DESCRIPTION: + ! + ! Initialize data stream information for Lightning. + ! + ! !USES: + use clm_time_manager , only : get_calendar + use ncdio_pio , only : pio_subsystem + use shr_pio_mod , only : shr_pio_getiotype + use clm_nlUtilsMod , only : find_nlgroup_name + use ndepStreamMod , only : clm_domain_mct + use histFileMod , only : hist_addfld1d + ! + ! !ARGUMENTS: + implicit none + class(fire_base_type) :: this + type(bounds_type), intent(in) :: bounds + character(len=*), intent(in) :: NLFilename + ! + ! !LOCAL VARIABLES: + integer :: stream_year_first_lightng ! first year in Lightning stream to use + integer :: stream_year_last_lightng ! last year in Lightning stream to use + integer :: model_year_align_lightng ! align stream_year_first_lnfm with + integer :: nu_nml ! unit for namelist file + integer :: nml_error ! namelist i/o error flag + type(mct_ggrid) :: dom_clm ! domain information + character(len=CL) :: stream_fldFileName_lightng ! lightning stream filename to read + character(len=CL) :: lightng_tintalgo = 'linear'! time interpolation alogrithm + character(len=CL) :: lightngmapalgo = 'bilinear'! Mapping alogrithm + character(*), parameter :: subName = "('lnfmdyn_init')" + character(*), parameter :: F00 = "('(lnfmdyn_init) ',4a)" + !----------------------------------------------------------------------- + + namelist /light_streams/ & + stream_year_first_lightng, & + stream_year_last_lightng, & + model_year_align_lightng, & + lightngmapalgo, & + stream_fldFileName_lightng, & + lightng_tintalgo + + ! Default values for namelist + stream_year_first_lightng = 1 ! first year in stream to use + stream_year_last_lightng = 1 ! last year in stream to use + model_year_align_lightng = 1 ! align stream_year_first_lnfm with this model year + stream_fldFileName_lightng = ' ' + + ! Read light_streams namelist + if (masterproc) then + nu_nml = getavu() + open( nu_nml, file=trim(NLFilename), status='old', iostat=nml_error ) + call find_nlgroup_name(nu_nml, 'light_streams', status=nml_error) + if (nml_error == 0) then + read(nu_nml, nml=light_streams,iostat=nml_error) + if (nml_error /= 0) then + call endrun(msg='ERROR reading light_streams namelist'//errMsg(sourcefile, __LINE__)) + end if + end if + close(nu_nml) + call relavu( nu_nml ) + endif + + call shr_mpi_bcast(stream_year_first_lightng, mpicom) + call shr_mpi_bcast(stream_year_last_lightng, mpicom) + call shr_mpi_bcast(model_year_align_lightng, mpicom) + call shr_mpi_bcast(stream_fldFileName_lightng, mpicom) + call shr_mpi_bcast(lightng_tintalgo, mpicom) + + if (masterproc) then + write(iulog,*) ' ' + write(iulog,*) 'light_stream settings:' + write(iulog,*) ' stream_year_first_lightng = ',stream_year_first_lightng + write(iulog,*) ' stream_year_last_lightng = ',stream_year_last_lightng + write(iulog,*) ' model_year_align_lightng = ',model_year_align_lightng + write(iulog,*) ' stream_fldFileName_lightng = ',stream_fldFileName_lightng + write(iulog,*) ' lightng_tintalgo = ',lightng_tintalgo + write(iulog,*) ' ' + endif + + call clm_domain_mct (bounds, dom_clm) + + call shr_strdata_create(this%sdat_lnfm,name="clmlnfm", & + pio_subsystem=pio_subsystem, & + pio_iotype=shr_pio_getiotype(inst_name), & + mpicom=mpicom, compid=comp_id, & + gsmap=gsmap_global, ggrid=dom_clm, & + nxg=ldomain%ni, nyg=ldomain%nj, & + yearFirst=stream_year_first_lightng, & + yearLast=stream_year_last_lightng, & + yearAlign=model_year_align_lightng, & + offset=0, & + domFilePath='', & + domFileName=trim(stream_fldFileName_lightng), & + domTvarName='time', & + domXvarName='lon' , & + domYvarName='lat' , & + domAreaName='area', & + domMaskName='mask', & + filePath='', & + filename=(/trim(stream_fldFileName_lightng)/), & + fldListFile='lnfm', & + fldListModel='lnfm', & + fillalgo='none', & + tintalgo=lightng_tintalgo, & + mapalgo=lightngmapalgo, & + calendar=get_calendar(), & + taxmode='cycle' ) + + if (masterproc) then + call shr_strdata_print(this%sdat_lnfm,'Lightning data') + endif + + ! Add history fields + call hist_addfld1d (fname='LNFM', units='counts/km^2/hr', & + avgflag='A', long_name='Lightning frequency', & + ptr_lnd=this%forc_lnfm, default='inactive') + + end subroutine lnfm_init + + !----------------------------------------------------------------------- + subroutine lnfm_interp(this, bounds ) + ! + ! !DESCRIPTION: + ! Interpolate data stream information for Lightning. + ! + ! !USES: + use clm_time_manager, only : get_curr_date + ! + ! !ARGUMENTS: + class(fire_base_type) :: this + type(bounds_type), intent(in) :: bounds + ! + ! !LOCAL VARIABLES: + integer :: g, ig + integer :: year ! year (0, ...) for nstep+1 + integer :: mon ! month (1, ..., 12) for nstep+1 + integer :: day ! day of month (1, ..., 31) for nstep+1 + integer :: sec ! seconds into current date for nstep+1 + integer :: mcdate ! Current model date (yyyymmdd) + !----------------------------------------------------------------------- + + call get_curr_date(year, mon, day, sec) + mcdate = year*10000 + mon*100 + day + + call shr_strdata_advance(this%sdat_lnfm, mcdate, sec, mpicom, 'lnfmdyn') + + ig = 0 + do g = bounds%begg,bounds%endg + ig = ig+1 + this%forc_lnfm(g) = this%sdat_lnfm%avs(1)%rAttr(1,ig) + end do + + end subroutine lnfm_interp + + !----------------------------------------------------------------------- + subroutine surfdataread(this, bounds) + ! + ! !DESCRIPTION: + ! Read surface data set to populate relevant fire-related variables + ! + ! !USES: + use spmdMod , only : masterproc + use clm_varctl , only : nsrest, nsrStartup, fsurdat + use clm_varcon , only : grlnd + use ColumnType , only : col + use fileutils , only : getfil + use ncdio_pio + ! + ! !ARGUMENTS: + class(fire_base_type) :: this + type(bounds_type), intent(in) :: bounds + ! + ! !LOCAL VARIABLES: + integer :: g,c ! indices + type(file_desc_t) :: ncid ! netcdf id + logical :: readvar ! true => variable is on initial dataset + character(len=256) :: locfn ! local filename + real(r8), pointer :: gdp(:) ! global gdp data (needs to be a pointer for use in ncdio) + real(r8), pointer :: peatf(:) ! global peatf data (needs to be a pointer for use in ncdio) + integer, pointer :: abm(:) ! global abm data (needs to be a pointer for use in ncdio) + !----------------------------------------------------------------------- + + ! -------------------------------------------------------------------- + ! Open surface dataset + ! -------------------------------------------------------------------- + + call getfil (fsurdat, locfn, 0) + call ncd_pio_openfile (ncid, locfn, 0) + + ! -------------------------------------------------------------------- + ! Read in GDP data + ! -------------------------------------------------------------------- + + allocate(gdp(bounds%begg:bounds%endg)) + call ncd_io(ncid=ncid, varname='gdp', flag='read', data=gdp, dim1name=grlnd, readvar=readvar) + if (.not. readvar) then + call endrun(msg=' ERROR: gdp NOT on surfdata file'//errMsg(sourcefile, __LINE__)) + end if + do c = bounds%begc, bounds%endc + g = col%gridcell(c) + this%gdp_lf_col(c) = gdp(g) + end do + deallocate(gdp) + + ! -------------------------------------------------------------------- + ! Read in peatf data + ! -------------------------------------------------------------------- + + allocate(peatf(bounds%begg:bounds%endg)) + call ncd_io(ncid=ncid, varname='peatf', flag='read', data=peatf, dim1name=grlnd, readvar=readvar) + if (.not. readvar) then + call endrun(msg=' ERROR: peatf NOT on surfdata file'//errMsg(sourcefile, __LINE__)) + end if + do c = bounds%begc, bounds%endc + g = col%gridcell(c) + this%peatf_lf_col(c) = peatf(g) + end do + deallocate(peatf) + + ! -------------------------------------------------------------------- + ! Read in ABM data + ! -------------------------------------------------------------------- + + allocate(abm(bounds%begg:bounds%endg)) + call ncd_io(ncid=ncid, varname='abm', flag='read', data=abm, dim1name=grlnd, readvar=readvar) + if (.not. readvar) then + call endrun(msg=' ERROR: abm NOT on surfdata file'//errMsg(sourcefile, __LINE__)) + end if + do c = bounds%begc, bounds%endc + g = col%gridcell(c) + this%abm_lf_col(c) = abm(g) + end do + deallocate(abm) + + ! Close file + + call ncd_pio_closefile(ncid) + + if (masterproc) then + write(iulog,*) 'Successfully read fmax, soil color, sand and clay boundary data' + write(iulog,*) + endif + + end subroutine surfdataread + + +end module FireDataBaseType diff --git a/src/cpl/mct/SoilMoistureStreamMod.F90 b/src/cpl/mct/SoilMoistureStreamMod.F90 new file mode 100644 index 0000000000..8b366d6c8e --- /dev/null +++ b/src/cpl/mct/SoilMoistureStreamMod.F90 @@ -0,0 +1,418 @@ +module SoilMoistureStreamMod + + ! ********************************************************************** + ! --------------------------- IMPORTANT NOTE --------------------------- + ! + ! In cases using the NUOPC driver/mediator, we use a different version of this module, + ! based on CDEPS, which resides in src/cpl/nuopc/. Changes to the science here should + ! also be made in the similar file in src/cpl/nuopc. Once we start using CDEPS by + ! default, we can remove this version and move the CDEPS-based version into its place. + ! ********************************************************************** + +#include "shr_assert.h" + + !----------------------------------------------------------------------- + ! !DESCRIPTION: + ! Read in soil moisture from data stream + ! + ! !USES: + use shr_strdata_mod , only : shr_strdata_type, shr_strdata_create + use shr_strdata_mod , only : shr_strdata_print, shr_strdata_advance + use shr_kind_mod , only : r8 => shr_kind_r8 + use shr_kind_mod , only : CL => shr_kind_CL, CXX => shr_kind_CXX + use shr_log_mod , only : errMsg => shr_log_errMsg + use decompMod , only : bounds_type, subgrid_level_column + use abortutils , only : endrun + use clm_varctl , only : iulog, use_soil_moisture_streams, inst_name + use clm_varcon , only : grlnd + use controlMod , only : NLFilename + use domainMod , only : ldomain + use LandunitType , only : lun + use ColumnType , only : col + use SoilStateType , only : soilstate_type + use WaterStateBulkType , only : waterstatebulk_type + use perf_mod , only : t_startf, t_stopf + use spmdMod , only : masterproc, mpicom, comp_id + use lnd_set_decomp_and_domain , only : gsMap_lnd2Dsoi_gdc2glo + use mct_mod + use ncdio_pio + ! + ! !PUBLIC TYPES: + implicit none + private + ! + ! !PUBLIC MEMBER FUNCTIONS: + public :: PrescribedSoilMoistureInit ! position datasets for soil moisture + public :: PrescribedSoilMoistureAdvance ! Advance the soil moisture stream (outside of Open-MP loops) + public :: PrescribedSoilMoistureInterp ! interpolates between two periods of soil moisture data + + ! !PRIVATE MEMBER DATA: + type(shr_strdata_type) :: sdat_soilm ! soil moisture input data stream + integer :: ism ! Soil moisture steram index + integer, allocatable :: g_to_ig(:) ! Array matching gridcell index to data index + logical :: soilm_ignore_data_if_missing ! If should ignore overridding a point with soil moisture data + ! from the streams file, if the streams file shows that point + ! as missing (namelist item) + ! + ! !PRIVATE TYPES: + + character(len=*), parameter, private :: sourcefile = & + __FILE__ + !----------------------------------------------------------------------- + +contains + + !----------------------------------------------------------------------- + ! + ! soil_moisture_init + ! + !----------------------------------------------------------------------- + subroutine PrescribedSoilMoistureInit(bounds) + ! + ! Initialize data stream information for soil moisture. + ! + ! + ! !USES: + use clm_time_manager , only : get_calendar + use ncdio_pio , only : pio_subsystem + use shr_pio_mod , only : shr_pio_getiotype + use clm_nlUtilsMod , only : find_nlgroup_name + use ndepStreamMod , only : clm_domain_mct + use shr_stream_mod , only : shr_stream_file_null + use shr_string_mod , only : shr_string_listCreateField + use clm_varpar , only : nlevsoi + ! + ! !ARGUMENTS: + implicit none + type(bounds_type), intent(in) :: bounds ! bounds + ! + ! !LOCAL VARIABLES: + integer :: i ! index + integer :: stream_year_first_soilm ! first year in Ustar stream to use + integer :: stream_year_last_soilm ! last year in Ustar stream to use + integer :: model_year_align_soilm ! align stream_year_first_soilm with + integer :: nu_nml ! unit for namelist file + integer :: nml_error ! namelist i/o error flag + integer :: soilm_offset ! Offset in time for dataset (sec) + type(mct_ggrid) :: dom_clm ! domain information + character(len=CL) :: stream_fldfilename_soilm ! ustar stream filename to read + character(len=CL) :: soilm_tintalgo = 'linear' ! Time interpolation alogrithm + + character(*), parameter :: subName = "('PrescribedSoilMoistureInit')" + character(*), parameter :: F00 = "('(PrescribedSoilMoistureInit) ',4a)" + character(*), parameter :: soilmString = "H2OSOI" ! base string for field string + character(CXX) :: fldList ! field string + !----------------------------------------------------------------------- + ! + ! deal with namelist variables here in init + ! + namelist /soil_moisture_streams/ & + stream_year_first_soilm, & + stream_year_last_soilm, & + model_year_align_soilm, & + soilm_tintalgo, & + soilm_offset, & + soilm_ignore_data_if_missing, & + stream_fldfilename_soilm + + ! Default values for namelist + stream_year_first_soilm = 1 ! first year in stream to use + stream_year_last_soilm = 1 ! last year in stream to use + model_year_align_soilm = 1 ! align stream_year_first_soilm with this model year + stream_fldfilename_soilm = shr_stream_file_null + soilm_offset = 0 + soilm_ignore_data_if_missing = .false. + + ! Read soilm_streams namelist + if (masterproc) then + open( newunit=nu_nml, file=trim(NLFilename), status='old', iostat=nml_error ) + call find_nlgroup_name(nu_nml, 'soil_moisture_streams', status=nml_error) + if (nml_error == 0) then + read(nu_nml, nml=soil_moisture_streams,iostat=nml_error) + if (nml_error /= 0) then + call endrun(subname // ':: ERROR reading soil_moisture_streams namelist') + end if + else + call endrun(subname // ':: ERROR finding soilm_streams namelist') + end if + close(nu_nml) + endif + + call shr_mpi_bcast(stream_year_first_soilm, mpicom) + call shr_mpi_bcast(stream_year_last_soilm, mpicom) + call shr_mpi_bcast(model_year_align_soilm, mpicom) + call shr_mpi_bcast(stream_fldfilename_soilm, mpicom) + call shr_mpi_bcast(soilm_tintalgo, mpicom) + call shr_mpi_bcast(soilm_offset, mpicom) + call shr_mpi_bcast(soilm_ignore_data_if_missing, mpicom) + + if (masterproc) then + + write(iulog,*) ' ' + write(iulog,*) 'soil_moisture_stream settings:' + write(iulog,*) ' stream_year_first_soilm = ',stream_year_first_soilm + write(iulog,*) ' stream_year_last_soilm = ',stream_year_last_soilm + write(iulog,*) ' model_year_align_soilm = ',model_year_align_soilm + write(iulog,*) ' stream_fldfilename_soilm = ',trim(stream_fldfilename_soilm) + write(iulog,*) ' soilm_tintalgo = ',trim(soilm_tintalgo) + write(iulog,*) ' soilm_offset = ',soilm_offset + if ( soilm_ignore_data_if_missing )then + write(iulog,*) ' Do NOT override a point with streams data if the streams data is missing' + else + write(iulog,*) ' Abort, if you find a model point where the input streams data is set to missing value' + end if + + endif + + call clm_domain_mct (bounds, dom_clm, nlevels=nlevsoi) + + ! create the field list for these fields...use in shr_strdata_create + fldList = trim(soilmString) + if (masterproc) write(iulog,*) 'fieldlist: ', trim(fldList) + + call shr_strdata_create(sdat_soilm,name="soil_moisture", & + pio_subsystem=pio_subsystem, & + pio_iotype=shr_pio_getiotype(inst_name), & + mpicom=mpicom, compid=comp_id, & + gsmap=gsMap_lnd2Dsoi_gdc2glo, ggrid=dom_clm, & + nxg=ldomain%ni, nyg=ldomain%nj, & + nzg=nlevsoi, & + yearFirst=stream_year_first_soilm, & + yearLast=stream_year_last_soilm, & + yearAlign=model_year_align_soilm, & + offset=soilm_offset, & + domFilePath='', & + domFileName=trim(stream_fldFileName_soilm), & + domTvarName='time', & + domXvarName='lon' , & + domYvarName='lat' , & + domZvarName='levsoi' , & + domAreaName='area', & + domMaskName='mask', & + filePath='', & + filename=(/stream_fldFileName_soilm/), & + fldListFile=fldList, & + fldListModel=fldList, & + fillalgo='none', & + mapalgo='none', & + tintalgo=soilm_tintalgo, & + calendar=get_calendar(), & + dtlimit = 15._r8, & + taxmode='cycle' ) + + if (masterproc) then + call shr_strdata_print(sdat_soilm,'soil moisture data') + endif + + end subroutine PrescribedSoilMoistureInit + + + !----------------------------------------------------------------------- + ! + ! PrescribedSoilMoistureAdvance + ! + !----------------------------------------------------------------------- + subroutine PrescribedSoilMoistureAdvance( bounds ) + ! + ! Advanace the prescribed soil moisture stream + ! + ! !USES: + use clm_time_manager, only : get_curr_date + ! + ! !ARGUMENTS: + type(bounds_type) , intent(in) :: bounds + ! + ! !LOCAL VARIABLES: + character(len=CL) :: stream_var_name + integer :: g, ig + integer :: ier ! error code + integer :: year ! year (0, ...) for nstep+1 + integer :: mon ! month (1, ..., 12) for nstep+1 + integer :: day ! day of month (1, ..., 31) for nstep+1 + integer :: sec ! seconds into current date for nstep+1 + integer :: mcdate ! Current model date (yyyymmdd) + + call get_curr_date(year, mon, day, sec) + mcdate = year*10000 + mon*100 + day + + stream_var_name = 'H2OSOI' + + ! Determine variable index + ism = mct_aVect_indexRA(sdat_soilm%avs(1),trim(stream_var_name)) + + call shr_strdata_advance(sdat_soilm, mcdate, sec, mpicom, trim(stream_var_name)) + + ! Map gridcell to AV index + ier = 0 + if ( .not. allocated(g_to_ig) )then + allocate (g_to_ig(bounds%begg:bounds%endg), stat=ier) + if (ier /= 0) then + write(iulog,*) 'Prescribed soil moisture allocation error' + call endrun(msg=errMsg(sourcefile, __LINE__)) + end if + + ig = 0 + do g = bounds%begg,bounds%endg + ig = ig+1 + g_to_ig(g) = ig + end do + end if + + end subroutine PrescribedSoilMoistureAdvance + + !----------------------------------------------------------------------- + ! + ! PrescribedSoilMoistureInterp + ! + !----------------------------------------------------------------------- + subroutine PrescribedSoilMoistureInterp(bounds, soilstate_inst, & + waterstatebulk_inst) + ! + ! Assign data stream information for prescribed soil moisture. + ! + ! !USES: + use clm_time_manager, only : get_curr_date + use clm_varpar , only : nlevsoi + use clm_varcon , only : denh2o, denice, watmin, spval + use landunit_varcon , only : istsoil, istcrop + ! + ! !ARGUMENTS: + implicit none + type(bounds_type) , intent(in) :: bounds + type(soilstate_type) , intent(in) :: soilstate_inst + type(waterstatebulk_type) , intent(inout) :: waterstatebulk_inst + ! + ! !LOCAL VARIABLES: + integer :: c, g, j, ig, n + real(r8) :: soilm_liq_frac ! liquid fraction of soil moisture + real(r8) :: soilm_ice_frac ! ice fraction of soil moisture + real(r8) :: moisture_increment ! soil moisture adjustment increment + real(r8) :: h2osoi_vol_initial ! initial vwc value + character(*), parameter :: subName = "('PrescribedSoilMoistureInterp')" + + !----------------------------------------------------------------------- + + SHR_ASSERT_FL( (lbound(sdat_soilm%avs(1)%rAttr,1) == ism ), sourcefile, __LINE__) + SHR_ASSERT_FL( (ubound(sdat_soilm%avs(1)%rAttr,1) == ism ), sourcefile, __LINE__) + SHR_ASSERT_FL( (lbound(g_to_ig,1) <= bounds%begg ), sourcefile, __LINE__) + SHR_ASSERT_FL( (ubound(g_to_ig,1) >= bounds%endg ), sourcefile, __LINE__) + SHR_ASSERT_FL( (lbound(sdat_soilm%avs(1)%rAttr,2) <= g_to_ig(bounds%begg) ), sourcefile, __LINE__) + SHR_ASSERT_FL( (ubound(sdat_soilm%avs(1)%rAttr,2) >= g_to_ig(bounds%endg)+(nlevsoi-1)*size(g_to_ig) ), sourcefile, __LINE__) + associate( & + dz => col%dz , & ! Input: [real(r8) (:,:) ] layer depth (m) + watsat => soilstate_inst%watsat_col , & ! Input: [real(r8) (:,:) ] volumetric soil water at saturation (porosity) + h2osoi_liq => waterstatebulk_inst%h2osoi_liq_col , & ! Input/Output: [real(r8) (:,:) ] liquid water (kg/m2) + h2osoi_ice => waterstatebulk_inst%h2osoi_ice_col , & ! Input/Output: [real(r8) (:,:) ] ice water (kg/m2) + h2osoi_vol => waterstatebulk_inst%h2osoi_vol_col , & ! Output: volumetric soil water (m3/m3) + h2osoi_vol_prs => waterstatebulk_inst%h2osoi_vol_prs_grc & ! Output: prescribed volumetric soil water (m3/m3) + ) + SHR_ASSERT_FL( (lbound(h2osoi_vol,1) <= bounds%begc ), sourcefile, __LINE__) + SHR_ASSERT_FL( (ubound(h2osoi_vol,1) >= bounds%endc ), sourcefile, __LINE__) + SHR_ASSERT_FL( (lbound(h2osoi_vol,2) == 1 ), sourcefile, __LINE__) + SHR_ASSERT_FL( (ubound(h2osoi_vol,2) >= nlevsoi ), sourcefile, __LINE__) + SHR_ASSERT_FL( (lbound(dz,1) <= bounds%begc ), sourcefile, __LINE__) + SHR_ASSERT_FL( (ubound(dz,1) >= bounds%endc ), sourcefile, __LINE__) + SHR_ASSERT_FL( (lbound(dz,2) <= 1 ), sourcefile, __LINE__) + SHR_ASSERT_FL( (ubound(dz,2) >= nlevsoi ), sourcefile, __LINE__) + SHR_ASSERT_FL( (lbound(watsat,1) <= bounds%begc ), sourcefile, __LINE__) + SHR_ASSERT_FL( (ubound(watsat,1) >= bounds%endc ), sourcefile, __LINE__) + SHR_ASSERT_FL( (lbound(watsat,2) <= 1 ), sourcefile, __LINE__) + SHR_ASSERT_FL( (ubound(watsat,2) >= nlevsoi ), sourcefile, __LINE__) + SHR_ASSERT_FL( (lbound(h2osoi_liq,1) <= bounds%begc ), sourcefile, __LINE__) + SHR_ASSERT_FL( (ubound(h2osoi_liq,1) >= bounds%endc ), sourcefile, __LINE__) + SHR_ASSERT_FL( (lbound(h2osoi_liq,2) <= 1 ), sourcefile, __LINE__) + SHR_ASSERT_FL( (ubound(h2osoi_liq,2) >= nlevsoi ), sourcefile, __LINE__) + SHR_ASSERT_FL( (lbound(h2osoi_ice,1) <= bounds%begc ), sourcefile, __LINE__) + SHR_ASSERT_FL( (ubound(h2osoi_ice,1) >= bounds%endc ), sourcefile, __LINE__) + SHR_ASSERT_FL( (lbound(h2osoi_ice,2) <= 1 ), sourcefile, __LINE__) + SHR_ASSERT_FL( (ubound(h2osoi_ice,2) >= nlevsoi ), sourcefile, __LINE__) + SHR_ASSERT_FL( (lbound(h2osoi_vol_prs,1) <= bounds%begg ), sourcefile, __LINE__) + SHR_ASSERT_FL( (ubound(h2osoi_vol_prs,1) >= bounds%endg ), sourcefile, __LINE__) + SHR_ASSERT_FL( (lbound(h2osoi_vol_prs,2) == 1 ), sourcefile, __LINE__) + SHR_ASSERT_FL( (ubound(h2osoi_vol_prs,2) >= nlevsoi ), sourcefile, __LINE__) + ! + ! Set the prescribed soil moisture read from the file everywhere + ! + do g = bounds%begg, bounds%endg + ig = g_to_ig(g) + do j = 1, nlevsoi + + !n = ig + (j-1)*size(g_to_ig) + n = ig + (j-1)*size(g_to_ig) + + h2osoi_vol_prs(g,j) = sdat_soilm%avs(1)%rAttr(ism,n) + + ! If soil moiture is being interpolated in time and the result is + ! large that probably means one of the two data points is missing (set to spval) + if ( h2osoi_vol_prs(g,j) > 10.0_r8 .and. (h2osoi_vol_prs(g,j) /= spval) )then + h2osoi_vol_prs(g,j) = spval + end if + + end do + end do + + do c = bounds%begc, bounds%endc + ! + ! Set variable for each gridcell/column combination + ! + g = col%gridcell(c) + ig = g_to_ig(g) + + ! EBK Jan/2020, also check weights on gridcell (See https://github.com/ESCOMP/CTSM/issues/847) + if ( (lun%itype(col%landunit(c)) == istsoil) .or. (lun%itype(col%landunit(c)) == istcrop) .and. & + (col%wtgcell(c) /= 0._r8) ) then + ! this is a 2d field (gridcell/nlevsoi) ! + do j = 1, nlevsoi + + n = ig + (j-1)*size(g_to_ig) + + ! if soil water is zero, liq/ice fractions cannot be calculated + if((h2osoi_liq(c, j) + h2osoi_ice(c, j)) > 0._r8) then + + ! save original soil moisture value + h2osoi_vol_initial = h2osoi_vol(c,j) + + ! Check if the vegetated land mask from the dataset on the + ! file is different + if ( (h2osoi_vol_prs(g,j) == spval) .and. (h2osoi_vol_initial /= spval) )then + if ( soilm_ignore_data_if_missing )then + cycle + else + write(iulog,*) 'Input soil moisture dataset is not vegetated as expected: gridcell=', & + g, ' active = ', col%active(c) + call endrun(subgrid_index=c, subgrid_level=subgrid_level_column, & + msg = subname // & + ' ERROR:: The input soil moisture stream is NOT vegetated for one of the land points' ) + end if + end if + + ! update volumetric soil moisture from data prescribed from the file + h2osoi_vol(c,j) = h2osoi_vol_prs(g,j) + + + ! calculate liq/ice mass fractions + soilm_liq_frac = h2osoi_liq(c, j) /(h2osoi_liq(c, j) + h2osoi_ice(c, j)) + soilm_ice_frac = h2osoi_ice(c, j) /(h2osoi_liq(c, j) + h2osoi_ice(c, j)) + + ! calculate moisture increment + moisture_increment = h2osoi_vol(c,j) - h2osoi_vol_initial + ! add limitation check + moisture_increment = min((watsat(c,j) - h2osoi_vol_initial),max(-(h2osoi_vol_initial-watmin),moisture_increment)) + + ! update liq/ice water mass due to (volumetric) moisture increment + h2osoi_liq(c,j) = h2osoi_liq(c,j) + (soilm_liq_frac * moisture_increment * dz(c, j) * denh2o) + h2osoi_ice(c,j) = h2osoi_ice(c,j) + (soilm_ice_frac * moisture_increment * dz(c, j) * denice) + + else + call endrun(subgrid_index=c, subgrid_level=subgrid_level_column, & + msg = subname // ':: ERROR h2osoil liquid plus ice is zero') + endif + enddo + endif + end do + + end associate + + end subroutine PrescribedSoilMoistureInterp + +end module SoilMoistureStreamMod diff --git a/src/cpl/mct/UrbanTimeVarType.F90 b/src/cpl/mct/UrbanTimeVarType.F90 new file mode 100644 index 0000000000..805ac47fbf --- /dev/null +++ b/src/cpl/mct/UrbanTimeVarType.F90 @@ -0,0 +1,314 @@ +module UrbanTimeVarType + + !------------------------------------------------------------------------------ + ! !DESCRIPTION: + ! Urban Time Varying Data + ! + ! !USES: + use shr_kind_mod , only : r8 => shr_kind_r8, CL => shr_kind_CL + use shr_log_mod , only : errMsg => shr_log_errMsg + use abortutils , only : endrun + use decompMod , only : bounds_type, subgrid_level_landunit + use clm_varctl , only : iulog, inst_name + use landunit_varcon , only : isturb_MIN, isturb_MAX + use clm_varcon , only : spval + use LandunitType , only : lun + use GridcellType , only : grc + use mct_mod + use shr_strdata_mod , only : shr_strdata_type + ! + implicit none + save + private + ! + ! + + ! !PUBLIC TYPE + type, public :: urbantv_type + + real(r8), public, pointer :: t_building_max(:) ! lun maximum internal building air temperature (K) + type(shr_strdata_type) :: sdat_urbantv ! urban time varying input data stream + contains + + ! !PUBLIC MEMBER FUNCTIONS: + procedure, public :: Init ! Allocate and initialize urbantv + procedure, public :: urbantv_init ! Initialize urban time varying stream + procedure, public :: urbantv_interp ! Interpolate urban time varying stream + + end type urbantv_type + + !----------------------------------------------------------------------- + character(15), private :: stream_var_name(isturb_MIN:isturb_MAX) + + character(len=*), parameter, private :: sourcefile = & + __FILE__ + +contains + + !----------------------------------------------------------------------- + subroutine Init(this, bounds, NLFilename) + ! + ! Allocate module variables and data structures + ! + ! !USES: + use shr_infnan_mod , only : nan => shr_infnan_nan, assignment(=) + use histFileMod , only : hist_addfld1d + ! + ! !ARGUMENTS: + class(urbantv_type) :: this + type(bounds_type) , intent(in) :: bounds + character(len=*) , intent(in) :: NLFilename ! Namelist filename + ! + ! !LOCAL VARIABLES: + integer :: begl, endl + !--------------------------------------------------------------------- + + begl = bounds%begl; endl = bounds%endl + + ! Allocate urbantv data structure + + allocate(this%t_building_max (begl:endl)) ; this%t_building_max (:) = nan + + call this%urbantv_init(bounds, NLFilename) + call this%urbantv_interp(bounds) + + ! Add history fields + call hist_addfld1d (fname='TBUILD_MAX', units='K', & + avgflag='A', long_name='prescribed maximum interior building temperature', & + ptr_lunit=this%t_building_max, default='inactive', set_nourb=spval, & + l2g_scale_type='unity') + + + end subroutine Init + + !----------------------------------------------------------------------- + + !----------------------------------------------------------------------- + subroutine urbantv_init(this, bounds, NLFilename) + ! + ! !DESCRIPTION: + ! Initialize data stream information for urban time varying data + ! + ! !USES: + use clm_time_manager , only : get_calendar + use ncdio_pio , only : pio_subsystem + use shr_pio_mod , only : shr_pio_getiotype + use clm_nlUtilsMod , only : find_nlgroup_name + use ndepStreamMod , only : clm_domain_mct + use spmdMod , only : masterproc, mpicom, comp_id + use fileutils , only : getavu, relavu + use shr_mpi_mod , only : shr_mpi_bcast + use shr_string_mod , only : shr_string_listAppend + use shr_strdata_mod , only : shr_strdata_create, shr_strdata_print + use domainMod , only : ldomain + use shr_infnan_mod , only : nan => shr_infnan_nan, assignment(=) + use landunit_varcon , only : isturb_TBD, isturb_HD, isturb_MD + use lnd_set_decomp_and_domain , only : gsmap_global + ! + ! !ARGUMENTS: + implicit none + class(urbantv_type) :: this + type(bounds_type), intent(in) :: bounds + character(len=*), intent(in) :: NLFilename ! Namelist filename + ! + ! !LOCAL VARIABLES: + integer :: begl, endl ! landunits + integer :: ifield ! field index + integer :: stream_year_first_urbantv ! first year in urban tv stream to use + integer :: stream_year_last_urbantv ! last year in urban tv stream to use + integer :: model_year_align_urbantv ! align stream_year_first_urbantv + ! with this model year + integer :: nu_nml ! unit for namelist file + integer :: nml_error ! namelist i/o error flag + type(mct_ggrid) :: dom_clm ! domain information + character(len=CL) :: stream_fldFileName_urbantv ! urban tv streams filename + character(len=CL) :: urbantvmapalgo = 'nn' ! mapping alogrithm for urban ac + character(len=CL) :: urbantv_tintalgo = 'linear' ! time interpolation alogrithm + character(len=CL) :: fldList ! field string + character(*), parameter :: urbantvString = "tbuildmax_" ! base string for field string + character(*), parameter :: subName = "('urbantv_init')" + character(*), parameter :: F00 = "('(urbantv_init) ',4a)" + !----------------------------------------------------------------------- + namelist /urbantv_streams/ & + stream_year_first_urbantv, & + stream_year_last_urbantv, & + model_year_align_urbantv, & + urbantvmapalgo, & + stream_fldFileName_urbantv, & + urbantv_tintalgo + !----------------------------------------------------------------------- + + begl = bounds%begl; endl = bounds%endl + + ! Default values for namelist + stream_year_first_urbantv = 1 ! first year in stream to use + stream_year_last_urbantv = 1 ! last year in stream to use + model_year_align_urbantv = 1 ! align stream_year_first_urbantv with this model year + stream_fldFileName_urbantv = ' ' + + ! Read urbantv_streams namelist + if (masterproc) then + nu_nml = getavu() + open( nu_nml, file=trim(NLFilename), status='old', iostat=nml_error ) + call find_nlgroup_name(nu_nml, 'urbantv_streams', status=nml_error) + if (nml_error == 0) then + read(nu_nml, nml=urbantv_streams,iostat=nml_error) + if (nml_error /= 0) then + call endrun(msg='ERROR reading urbantv_streams namelist'//errMsg(sourcefile, __LINE__)) + end if + end if + close(nu_nml) + call relavu( nu_nml ) + endif + + call shr_mpi_bcast(stream_year_first_urbantv, mpicom) + call shr_mpi_bcast(stream_year_last_urbantv, mpicom) + call shr_mpi_bcast(model_year_align_urbantv, mpicom) + call shr_mpi_bcast(stream_fldFileName_urbantv, mpicom) + call shr_mpi_bcast(urbantv_tintalgo, mpicom) + + if (masterproc) then + write(iulog,*) ' ' + write(iulog,*) 'urbantv_streams settings:' + write(iulog,*) ' stream_year_first_urbantv = ',stream_year_first_urbantv + write(iulog,*) ' stream_year_last_urbantv = ',stream_year_last_urbantv + write(iulog,*) ' model_year_align_urbantv = ',model_year_align_urbantv + write(iulog,*) ' stream_fldFileName_urbantv = ',stream_fldFileName_urbantv + write(iulog,*) ' urbantv_tintalgo = ',urbantv_tintalgo + write(iulog,*) ' ' + endif + + call clm_domain_mct (bounds, dom_clm) + + ! create the field list for these urbantv fields...use in shr_strdata_create + stream_var_name(:) = "NOT_SET" + stream_var_name(isturb_TBD) = urbantvString//"TBD" + stream_var_name(isturb_HD) = urbantvString//"HD" + stream_var_name(isturb_MD) = urbantvString//"MD" + fldList = "" + do ifield = isturb_MIN, isturb_MAX + call shr_string_listAppend( fldList, stream_var_name(ifield) ) + end do + + call shr_strdata_create(this%sdat_urbantv,name="clmurbantv", & + pio_subsystem=pio_subsystem, & + pio_iotype=shr_pio_getiotype(inst_name), & + mpicom=mpicom, compid=comp_id, & + gsmap=gsmap_global, ggrid=dom_clm, & + nxg=ldomain%ni, nyg=ldomain%nj, & + yearFirst=stream_year_first_urbantv, & + yearLast=stream_year_last_urbantv, & + yearAlign=model_year_align_urbantv, & + offset=0, & + domFilePath='', & + domFileName=trim(stream_fldFileName_urbantv), & + domTvarName='time', & + domXvarName='lon' , & + domYvarName='lat' , & + domAreaName='area', & + domMaskName='LANDMASK', & + filePath='', & + filename=(/trim(stream_fldFileName_urbantv)/) , & + fldListFile=fldList, & + fldListModel=fldList, & + fillalgo='none', & + mapalgo=urbantvmapalgo, & + calendar=get_calendar(), & + tintalgo=urbantv_tintalgo, & + taxmode='extend' ) + + if (masterproc) then + call shr_strdata_print(this%sdat_urbantv,'urban time varying data') + endif + + + end subroutine urbantv_init + + !----------------------------------------------------------------------- + subroutine urbantv_interp(this, bounds) + ! + ! !DESCRIPTION: + ! Interpolate data stream information for urban time varying data. + ! + ! !USES: + use clm_time_manager, only : get_curr_date + use spmdMod , only : mpicom + use shr_strdata_mod , only : shr_strdata_advance + use clm_instur , only : urban_valid + ! + ! !ARGUMENTS: + class(urbantv_type) :: this + type(bounds_type), intent(in) :: bounds + ! + ! !LOCAL VARIABLES: + logical :: found + integer :: l, glun, ig, g, ip + integer :: year ! year (0, ...) for nstep+1 + integer :: mon ! month (1, ..., 12) for nstep+1 + integer :: day ! day of month (1, ..., 31) for nstep+1 + integer :: sec ! seconds into current date for nstep+1 + integer :: mcdate ! Current model date (yyyymmdd) + integer :: lindx ! landunit index + integer :: gindx ! gridcell index + !----------------------------------------------------------------------- + + call get_curr_date(year, mon, day, sec) + mcdate = year*10000 + mon*100 + day + + call shr_strdata_advance(this%sdat_urbantv, mcdate, sec, mpicom, 'urbantvdyn') + + do l = bounds%begl,bounds%endl + if (lun%urbpoi(l)) then + glun = lun%gridcell(l) + ip = mct_aVect_indexRA(this%sdat_urbantv%avs(1),trim(stream_var_name(lun%itype(l)))) + ! + ! Determine vector index corresponding to glun + ! + ig = 0 + do g = bounds%begg,bounds%endg + ig = ig+1 + if (g == glun) exit + end do + + this%t_building_max(l) = this%sdat_urbantv%avs(1)%rAttr(ip,ig) + else + this%t_building_max(l) = spval + end if + end do + + found = .false. + do l = bounds%begl,bounds%endl + if (lun%urbpoi(l)) then + glun = lun%gridcell(l) + ! + ! Determine vector index corresponding to glun + ! + ig = 0 + do g = bounds%begg,bounds%endg + ig = ig+1 + if (g == glun) exit + end do + + if ( .not. urban_valid(g) .or. (this%t_building_max(l) <= 0._r8)) then + found = .true. + gindx = g + lindx = l + exit + end if + end if + end do + if ( found ) then + write(iulog,*)'ERROR: no valid urban data for g= ',gindx + write(iulog,*)'landunit type: ',lun%itype(lindx) + write(iulog,*)'urban_valid: ',urban_valid(gindx) + write(iulog,*)'t_building_max: ',this%t_building_max(lindx) + call endrun(subgrid_index=lindx, subgrid_level=subgrid_level_landunit, & + msg=errmsg(sourcefile, __LINE__)) + end if + + + end subroutine urbantv_interp + + !----------------------------------------------------------------------- + +end module UrbanTimeVarType diff --git a/src/cpl/mct/ch4FInundatedStreamType.F90 b/src/cpl/mct/ch4FInundatedStreamType.F90 new file mode 100644 index 0000000000..3c26f4d109 --- /dev/null +++ b/src/cpl/mct/ch4FInundatedStreamType.F90 @@ -0,0 +1,389 @@ +module ch4FInundatedStreamType + +#include "shr_assert.h" + + !----------------------------------------------------------------------- + ! !DESCRIPTION: + ! Contains methods for reading in finundated streams file for methane code. + ! + ! !USES + use shr_kind_mod , only: r8 => shr_kind_r8, CL => shr_kind_cl + use spmdMod , only: mpicom, masterproc + use clm_varctl , only: iulog, inst_name + use abortutils , only: endrun + use decompMod , only: bounds_type + use ch4varcon , only: finundation_mtd + + ! !PUBLIC TYPES: + implicit none + private + save + + type, public :: ch4finundatedstream_type + real(r8), pointer, private :: zwt0_gdc (:) ! col coefficient for determining finundated (m) + real(r8), pointer, private :: f0_gdc (:) ! col maximum inundated fraction for a gridcell (for methane code) + real(r8), pointer, private :: p3_gdc (:) ! col coefficient for determining finundated (m) + real(r8), pointer, private :: fws_slope_gdc (:) ! col slope in fws = slope * tws + intercept (A coefficient) + real(r8), pointer, private :: fws_intercept_gdc (:) ! col slope in fws = slope * tws + intercept (B coefficient) + contains + + ! !PUBLIC MEMBER FUNCTIONS: + procedure, public :: Init ! Initialize and read data in + procedure, public :: CalcFinundated ! Calculate finundated based on input streams + procedure, public :: UseStreams ! If streams will be used + + ! !PRIVATE MEMBER FUNCTIONS: + procedure, private :: InitAllocate ! Allocate data + + end type ch4finundatedstream_type + + + ! ! PRIVATE DATA: + + type, private :: streamcontrol_type + character(len=CL) :: stream_fldFileName_ch4finundated ! Filename + character(len=CL) :: ch4finundatedmapalgo ! map algo + character(len=CL) :: fldList ! List of fields to read + contains + procedure, private :: ReadNML ! Read in namelist + end type streamcontrol_type + + type(streamcontrol_type), private :: control ! Stream control data + + character(len=*), parameter, private :: sourcefile = & + __FILE__ + !============================================================================== + +contains + + !============================================================================== + + subroutine Init(this, bounds, NLFilename) + ! + ! Initialize the ch4 finundated stream object + ! + ! Uses: + use clm_time_manager , only : get_calendar, get_curr_date + use ncdio_pio , only : pio_subsystem + use shr_pio_mod , only : shr_pio_getiotype + use shr_nl_mod , only : shr_nl_find_group_name + use shr_mpi_mod , only : shr_mpi_bcast + use ndepStreamMod , only : clm_domain_mct + use domainMod , only : ldomain + use decompMod , only : bounds_type + use mct_mod , only : mct_ggrid, mct_avect_indexra + use shr_strdata_mod , only : shr_strdata_type, shr_strdata_create + use shr_strdata_mod , only : shr_strdata_print, shr_strdata_advance + use spmdMod , only : comp_id, iam + use ch4varcon , only : finundation_mtd_h2osfc + use ch4varcon , only : finundation_mtd_ZWT_inversion, finundation_mtd_TWS_inversion + use lnd_set_decomp_and_domain , only : gsmap_global + ! + ! arguments + implicit none + class(ch4finundatedstream_type) :: this + type(bounds_type), intent(in) :: bounds + character(len=*), intent(in) :: NLFilename ! Namelist filename + ! + ! local variables + integer :: ig, g ! Indices + type(mct_ggrid) :: dom_clm ! domain information + type(shr_strdata_type) :: sdat ! input data stream + integer :: index_ZWT0 = 0 ! Index of ZWT0 field + integer :: index_F0 = 0 ! Index of F0 field + integer :: index_P3 = 0 ! Index of P3 field + integer :: index_FWS_TWS_A = 0 ! Index of FWS_TWS_A field + integer :: index_FWS_TWS_B = 0 ! Index of FWS_TWS_B field + integer :: year ! year (0, ...) for nstep+1 + integer :: mon ! month (1, ..., 12) for nstep+1 + integer :: day ! day of month (1, ..., 31) for nstep+1 + integer :: sec ! seconds into current date for nstep+1 + integer :: mcdate ! Current model date (yyyymmdd) + character(len=*), parameter :: stream_name = 'ch4finundated' + character(*), parameter :: subName = "('ch4finundatedstream::Init')" + !----------------------------------------------------------------------- + if ( finundation_mtd /= finundation_mtd_h2osfc )then + call this%InitAllocate( bounds ) + call control%ReadNML( bounds, NLFileName ) + + if ( this%useStreams() )then + call clm_domain_mct (bounds, dom_clm) + + call shr_strdata_create(sdat,name=stream_name, & + pio_subsystem=pio_subsystem, & + pio_iotype=shr_pio_getiotype(inst_name), & + mpicom=mpicom, compid=comp_id, & + gsmap=gsmap_global, ggrid=dom_clm, & + nxg=ldomain%ni, nyg=ldomain%nj, & + yearFirst=1996, & + yearLast=1996, & + yearAlign=1, & + offset=0, & + domFilePath='', & + domFileName=trim(control%stream_fldFileName_ch4finundated), & + domTvarName='time', & + domXvarName='LONGXY' , & + domYvarName='LATIXY' , & + domAreaName='AREA', & + domMaskName='LANDMASK', & + filePath='', & + filename=(/trim(control%stream_fldFileName_ch4finundated)/), & + fldListFile=control%fldList, & + fldListModel=control%fldList, & + fillalgo='none', & + mapalgo=control%ch4finundatedmapalgo, & + calendar=get_calendar(), & + taxmode='extend' ) + + if (masterproc) then + call shr_strdata_print(sdat,'CLM '//stream_name//' data') + endif + + if( finundation_mtd == finundation_mtd_ZWT_inversion )then + index_ZWT0 = mct_avect_indexra(sdat%avs(1),'ZWT0') + index_F0 = mct_avect_indexra(sdat%avs(1),'F0' ) + index_P3 = mct_avect_indexra(sdat%avs(1),'P3' ) + else if( finundation_mtd == finundation_mtd_TWS_inversion )then + index_FWS_TWS_A = mct_avect_indexra(sdat%avs(1),'FWS_TWS_A') + index_FWS_TWS_B = mct_avect_indexra(sdat%avs(1),'FWS_TWS_B') + end if + + + ! Explicitly set current date to a hardcoded constant value. Otherwise + ! using the real date can cause roundoff differences that are + ! detrected as issues with exact restart. EBK M05/20/2017 + !call get_curr_date(year, mon, day, sec) + year = 1996 + mon = 12 + day = 31 + sec = 0 + mcdate = year*10000 + mon*100 + day + + call shr_strdata_advance(sdat, mcdate, sec, mpicom, 'ch4finundated') + + ! Get the data + ig = 0 + do g = bounds%begg,bounds%endg + ig = ig+1 + if ( index_ZWT0 > 0 )then + this%zwt0_gdc(g) = sdat%avs(1)%rAttr(index_ZWT0,ig) + end if + if ( index_F0 > 0 )then + this%f0_gdc(g) = sdat%avs(1)%rAttr(index_F0,ig) + end if + if ( index_P3 > 0 )then + this%p3_gdc(g) = sdat%avs(1)%rAttr(index_P3,ig) + end if + if ( index_FWS_TWS_A > 0 )then + this%fws_slope_gdc(g) = sdat%avs(1)%rAttr(index_FWS_TWS_A,ig) + end if + if ( index_FWS_TWS_B > 0 )then + this%fws_intercept_gdc(g) = sdat%avs(1)%rAttr(index_FWS_TWS_B,ig) + end if + end do + end if + end if + + end subroutine Init + + !----------------------------------------------------------------------- + logical function UseStreams(this) + ! + ! !DESCRIPTION: + ! Return true if + ! + ! !USES: + ! + ! !ARGUMENTS: + implicit none + class(ch4finundatedstream_type) :: this + ! + ! !LOCAL VARIABLES: + if ( trim(control%stream_fldFileName_ch4finundated) == '' )then + UseStreams = .false. + else + UseStreams = .true. + end if + end function UseStreams + + !----------------------------------------------------------------------- + subroutine InitAllocate(this, bounds) + ! + ! !DESCRIPTION: + ! Allocate module variables and data structures + ! + ! !USES: + use shr_infnan_mod, only: nan => shr_infnan_nan, assignment(=) + use ch4varcon , only: finundation_mtd_ZWT_inversion, finundation_mtd_TWS_inversion + ! + ! !ARGUMENTS: + implicit none + class(ch4finundatedstream_type) :: this + type(bounds_type), intent(in) :: bounds + ! + ! !LOCAL VARIABLES: + integer :: begc, endc + integer :: begg, endg + !--------------------------------------------------------------------- + + begc = bounds%begc; endc = bounds%endc + begg = bounds%begg; endg = bounds%endg + + if( finundation_mtd == finundation_mtd_ZWT_inversion )then + allocate(this%zwt0_gdc (begg:endg)) ; this%zwt0_gdc (:) = nan + allocate(this%f0_gdc (begg:endg)) ; this%f0_gdc (:) = nan + allocate(this%p3_gdc (begg:endg)) ; this%p3_gdc (:) = nan + else if( finundation_mtd == finundation_mtd_TWS_inversion )then + allocate(this%fws_slope_gdc (begg:endg)) ; this%fws_slope_gdc (:) = nan + allocate(this%fws_intercept_gdc(begg:endg)) ; this%fws_intercept_gdc(:) = nan + end if + + end subroutine InitAllocate + + !----------------------------------------------------------------------- + subroutine CalcFinundated(this, bounds, num_soilc, filter_soilc, soilhydrology_inst, & + waterdiagnosticbulk_inst, qflx_surf_lag_col, finundated ) + ! + ! !DESCRIPTION: + ! + ! Calculate finundated according to the appropriate methodology + ! + ! !USES: + use ColumnType , only : col + use ch4varcon , only : finundation_mtd_h2osfc, finundation_mtd_ZWT_inversion + use ch4varcon , only : finundation_mtd_TWS_inversion + use clm_varpar , only : nlevsoi + use SoilHydrologyType, only : soilhydrology_type + use WaterDiagnosticBulkType , only : waterdiagnosticbulk_type + ! + ! !ARGUMENTS: + implicit none + class(ch4finundatedstream_type) :: this + type(bounds_type) , intent(in) :: bounds + integer , intent(in) :: num_soilc ! number of column soil points in column filter + integer , intent(in) :: filter_soilc(:) ! column filter for soil points + type(soilhydrology_type) , intent(in) :: soilhydrology_inst + type(waterdiagnosticbulk_type) , intent(in) :: waterdiagnosticbulk_inst + real(r8) , intent(in) :: qflx_surf_lag_col(bounds%begc:) !time-lagged surface runoff (mm H2O /s) + real(r8) , intent(inout) :: finundated(bounds%begc:) ! fractional inundated area in soil column (excluding dedicated wetland columns) + ! + ! !LOCAL VARIABLES: + integer :: g, c, fc ! Indices + real(r8) :: zwt_actual ! Total water storage (ZWT) to use either perched or total depending on conditions + + SHR_ASSERT_ALL_FL((ubound(qflx_surf_lag_col) == (/bounds%endc/)), sourcefile, __LINE__) + SHR_ASSERT_ALL_FL((ubound(finundated) == (/bounds%endc/)), sourcefile, __LINE__) + + associate( & + z => col%z , & ! Input: [real(r8) (:,:) ] layer depth (m) (-nlevsno+1:nlevsoi) + zwt => soilhydrology_inst%zwt_col , & ! Input: [real(r8) (:) ] water table depth (m) + zwt_perched => soilhydrology_inst%zwt_perched_col , & ! Input: [real(r8) (:) ] perched water table depth (m) + tws => waterdiagnosticbulk_inst%tws_grc , & ! Input: [real(r8) (:) ] total water storage (kg m-2) + frac_h2osfc => waterdiagnosticbulk_inst%frac_h2osfc_col & ! Input: [real(r8) (:) ] fraction of ground covered by surface water (0 to 1) + ) + + ! Calculate finundated + do fc = 1, num_soilc + c = filter_soilc(fc) + g = col%gridcell(c) + select case( finundation_mtd ) + case ( finundation_mtd_h2osfc ) + finundated(c) = frac_h2osfc(c) + case ( finundation_mtd_ZWT_inversion ) + if (this%zwt0_gdc(g) > 0._r8) then + if (zwt_perched(c) < z(c,nlevsoi)-1.e-5_r8 .and. zwt_perched(c) < zwt(c)) then + zwt_actual = zwt_perched(c) + else + zwt_actual = zwt(c) + end if + finundated(c) = this%f0_gdc(g) * exp(-zwt_actual/this%zwt0_gdc(g)) + this%p3_gdc(g)*qflx_surf_lag_col(c) + else + finundated(c) = this%p3_gdc(g)*qflx_surf_lag_col(c) + end if + case ( finundation_mtd_TWS_inversion ) + finundated(c) = this%fws_slope_gdc(g) * tws(g) + this%fws_intercept_gdc(g) + end select + finundated(c) = min( 1.0_r8, max( 0.0_r8, finundated(c) ) ) + end do + end associate + + end subroutine CalcFinundated + !============================================================================== + + subroutine ReadNML(this, bounds, NLFilename) + ! + ! Read the namelist data stream information. + ! + ! Uses: + use clm_time_manager , only : get_calendar + use ncdio_pio , only : pio_subsystem + use shr_pio_mod , only : shr_pio_getiotype + use shr_nl_mod , only : shr_nl_find_group_name + use shr_log_mod , only : errMsg => shr_log_errMsg + use shr_mpi_mod , only : shr_mpi_bcast + use fileutils , only : getavu, relavu + use ch4varcon , only : finundation_mtd_ZWT_inversion, finundation_mtd_TWS_inversion + ! + ! arguments + implicit none + class(streamcontrol_type) :: this + type(bounds_type), intent(in) :: bounds + character(len=*), intent(in) :: NLFilename ! Namelist filename + ! + ! local variables + integer :: nu_nml ! unit for namelist file + integer :: nml_error ! namelist i/o error flag + character(len=CL) :: stream_fldFileName_ch4finundated = ' ' + character(len=CL) :: ch4finundatedmapalgo = 'bilinear' + character(len=*), parameter :: namelist_name = 'ch4finundated' ! MUST agree with name in namelist and read + character(len=*), parameter :: shr_strdata_unset = 'NOT_SET' + character(len=*), parameter :: subName = "('ch4finundated::ReadNML')" + character(len=*), parameter :: F00 = "('(ch4finundated_readnml) ',4a)" + !----------------------------------------------------------------------- + + namelist /ch4finundated/ & ! MUST agree with namelist_name above + ch4finundatedmapalgo, stream_fldFileName_ch4finundated + + ! Default values for namelist + + ! Read ch4finundated namelist + if (masterproc) then + nu_nml = getavu() + open( nu_nml, file=trim(NLFilename), status='old', iostat=nml_error ) + call shr_nl_find_group_name(nu_nml, namelist_name, status=nml_error) + if (nml_error == 0) then + read(nu_nml, nml=ch4finundated,iostat=nml_error) ! MUST agree with namelist_name above + if (nml_error /= 0) then + call endrun(msg=' ERROR reading '//namelist_name//' namelist'//errMsg(sourcefile, __LINE__)) + end if + else + call endrun(msg=' ERROR finding '//namelist_name//' namelist'//errMsg(sourcefile, __LINE__)) + end if + close(nu_nml) + call relavu( nu_nml ) + endif + + call shr_mpi_bcast(stream_fldFileName_ch4finundated, mpicom) + call shr_mpi_bcast(ch4finundatedmapalgo , mpicom) + + if (masterproc) then + write(iulog,*) ' ' + write(iulog,*) namelist_name, ' stream settings:' + write(iulog,*) ' stream_fldFileName_ch4finundated = ',stream_fldFileName_ch4finundated + write(iulog,*) ' ch4finundatedmapalgo = ',ch4finundatedmapalgo + write(iulog,*) ' ' + endif + this%stream_fldFileName_ch4finundated = stream_fldFileName_ch4finundated + this%ch4finundatedmapalgo = ch4finundatedmapalgo + if ( finundation_mtd == finundation_mtd_ZWT_inversion )then + this%fldList = "ZWT0:F0:P3" + else if ( finundation_mtd == finundation_mtd_TWS_inversion )then + this%fldList = "FWS_TWS_A:FWS_TWS_B" + else + call endrun(msg=' ERROR do NOT know what list of variables to read for this finundation_mtd type'// & + errMsg(sourcefile, __LINE__)) + end if + + end subroutine ReadNML + +end module ch4FInundatedStreamType diff --git a/src/cpl/mct/clm_cpl_indices.F90 b/src/cpl/mct/clm_cpl_indices.F90 new file mode 100644 index 0000000000..09ed89e92d --- /dev/null +++ b/src/cpl/mct/clm_cpl_indices.F90 @@ -0,0 +1,330 @@ +module clm_cpl_indices + !----------------------------------------------------------------------- + ! !DESCRIPTION: + ! Module containing the indices for the fields passed between CLM and + ! the driver. Includes the River Transport Model fields (RTM) and the + ! fields needed by the land-ice component (sno). + ! + ! !USES: + + use shr_sys_mod, only : shr_sys_abort + implicit none + + SAVE + private ! By default make data private + ! + ! !PUBLIC MEMBER FUNCTIONS: + public :: clm_cpl_indices_set ! Set the coupler indices + ! + ! !PUBLIC DATA MEMBERS: + ! + integer , public :: glc_nec ! number of elevation classes for glacier_mec landunits + ! (from coupler) - must equal maxpatch_glc from namelist + + ! lnd -> drv (required) + + integer, public ::index_l2x_Flrl_rofsur ! lnd->rtm input liquid surface fluxes + integer, public ::index_l2x_Flrl_rofgwl ! lnd->rtm input liquid gwl fluxes + integer, public ::index_l2x_Flrl_rofsub ! lnd->rtm input liquid subsurface fluxes + integer, public ::index_l2x_Flrl_rofi ! lnd->rtm input frozen fluxes + integer, public ::index_l2x_Flrl_irrig ! irrigation withdrawal + + integer, public ::index_l2x_Sl_t ! temperature + integer, public ::index_l2x_Sl_tref ! 2m reference temperature + integer, public ::index_l2x_Sl_qref ! 2m reference specific humidity + integer, public ::index_l2x_Sl_avsdr ! albedo: direct , visible + integer, public ::index_l2x_Sl_anidr ! albedo: direct , near-ir + integer, public ::index_l2x_Sl_avsdf ! albedo: diffuse, visible + integer, public ::index_l2x_Sl_anidf ! albedo: diffuse, near-ir + integer, public ::index_l2x_Sl_snowh ! snow height + integer, public ::index_l2x_Sl_u10 ! 10m wind + integer, public ::index_l2x_Sl_ddvel ! dry deposition velocities (optional) + integer, public ::index_l2x_Sl_fv ! friction velocity + integer, public ::index_l2x_Sl_ram1 ! aerodynamical resistance + integer, public ::index_l2x_Sl_soilw ! volumetric soil water + integer, public ::index_l2x_Fall_taux ! wind stress, zonal + integer, public ::index_l2x_Fall_tauy ! wind stress, meridional + integer, public ::index_l2x_Fall_lat ! latent heat flux + integer, public ::index_l2x_Fall_sen ! sensible heat flux + integer, public ::index_l2x_Fall_lwup ! upward longwave heat flux + integer, public ::index_l2x_Fall_evap ! evaporation water flux + integer, public ::index_l2x_Fall_swnet ! heat flux shortwave net + integer, public ::index_l2x_Fall_fco2_lnd ! co2 flux **For testing set to 0 + integer, public ::index_l2x_Fall_flxdst1 ! dust flux size bin 1 + integer, public ::index_l2x_Fall_flxdst2 ! dust flux size bin 2 + integer, public ::index_l2x_Fall_flxdst3 ! dust flux size bin 3 + integer, public ::index_l2x_Fall_flxdst4 ! dust flux size bin 4 + integer, public ::index_l2x_Fall_flxvoc ! MEGAN fluxes + integer, public ::index_l2x_Fall_flxfire ! Fire fluxes + integer, public ::index_l2x_Sl_ztopfire ! Top of fire emissions (m) + + ! In the following, index 0 is bare land, other indices are glc elevation classes + integer, allocatable, public ::index_l2x_Sl_tsrf(:) ! glc MEC temperature + integer, allocatable, public ::index_l2x_Sl_topo(:) ! glc MEC topo height + integer, allocatable, public ::index_l2x_Flgl_qice(:) ! glc MEC ice flux + + integer, public ::index_x2l_Sa_methane + integer, public ::index_l2x_Fall_methane + + integer, public :: nflds_l2x = 0 + + ! drv -> lnd (required) + + integer, public ::index_x2l_Sa_z ! bottom atm level height + integer, public ::index_x2l_Sa_topo ! atm surface height (m) + integer, public ::index_x2l_Sa_u ! bottom atm level zon wind + integer, public ::index_x2l_Sa_v ! bottom atm level mer wind + integer, public ::index_x2l_Sa_ptem ! bottom atm level pot temp + integer, public ::index_x2l_Sa_shum ! bottom atm level spec hum + integer, public ::index_x2l_Sa_pbot ! bottom atm level pressure + integer, public ::index_x2l_Sa_tbot ! bottom atm level temp + integer, public ::index_x2l_Faxa_lwdn ! downward lw heat flux + integer, public ::index_x2l_Faxa_rainc ! prec: liquid "convective" + integer, public ::index_x2l_Faxa_rainl ! prec: liquid "large scale" + integer, public ::index_x2l_Faxa_snowc ! prec: frozen "convective" + integer, public ::index_x2l_Faxa_snowl ! prec: frozen "large scale" + integer, public ::index_x2l_Faxa_swndr ! sw: nir direct downward + integer, public ::index_x2l_Faxa_swvdr ! sw: vis direct downward + integer, public ::index_x2l_Faxa_swndf ! sw: nir diffuse downward + integer, public ::index_x2l_Faxa_swvdf ! sw: vis diffuse downward + integer, public ::index_x2l_Sa_co2prog ! bottom atm level prognostic co2 + integer, public ::index_x2l_Sa_co2diag ! bottom atm level diagnostic co2 + integer, public ::index_x2l_Faxa_bcphidry ! flux: Black Carbon hydrophilic dry deposition + integer, public ::index_x2l_Faxa_bcphodry ! flux: Black Carbon hydrophobic dry deposition + integer, public ::index_x2l_Faxa_bcphiwet ! flux: Black Carbon hydrophilic wet deposition + integer, public ::index_x2l_Faxa_ocphidry ! flux: Organic Carbon hydrophilic dry deposition + integer, public ::index_x2l_Faxa_ocphodry ! flux: Organic Carbon hydrophobic dry deposition + integer, public ::index_x2l_Faxa_ocphiwet ! flux: Organic Carbon hydrophilic dry deposition + integer, public ::index_x2l_Faxa_dstwet1 ! flux: Size 1 dust -- wet deposition + integer, public ::index_x2l_Faxa_dstwet2 ! flux: Size 2 dust -- wet deposition + integer, public ::index_x2l_Faxa_dstwet3 ! flux: Size 3 dust -- wet deposition + integer, public ::index_x2l_Faxa_dstwet4 ! flux: Size 4 dust -- wet deposition + integer, public ::index_x2l_Faxa_dstdry1 ! flux: Size 1 dust -- dry deposition + integer, public ::index_x2l_Faxa_dstdry2 ! flux: Size 2 dust -- dry deposition + integer, public ::index_x2l_Faxa_dstdry3 ! flux: Size 3 dust -- dry deposition + integer, public ::index_x2l_Faxa_dstdry4 ! flux: Size 4 dust -- dry deposition + + integer, public ::index_x2l_Faxa_nhx ! flux nhx from atm + integer, public ::index_x2l_Faxa_noy ! flux noy from atm + + integer, public ::index_x2l_Flrr_flood ! rtm->lnd rof flood flux + integer, public ::index_x2l_Flrr_volr ! rtm->lnd rof volr total volume + integer, public ::index_x2l_Flrr_volrmch ! rtm->lnd rof volr main channel volume + + ! In the following, index 0 is bare land, other indices are glc elevation classes + integer, allocatable, public ::index_x2l_Sg_ice_covered(:) ! Fraction of glacier from glc model + integer, allocatable, public ::index_x2l_Sg_topo(:) ! Topo height from glc model + integer, allocatable, public ::index_x2l_Flgg_hflx(:) ! Heat flux from glc model + + integer, public ::index_x2l_Sg_icemask + integer, public ::index_x2l_Sg_icemask_coupled_fluxes + + integer, public :: nflds_x2l = 0 + + !----------------------------------------------------------------------- + +contains + + !----------------------------------------------------------------------- + subroutine clm_cpl_indices_set( ) + ! + ! !DESCRIPTION: + ! Set the coupler indices needed by the land model coupler + ! interface. + ! + ! !USES: + use seq_flds_mod , only: seq_flds_x2l_fields, seq_flds_l2x_fields + use mct_mod , only: mct_aVect, mct_aVect_init, mct_avect_indexra + use mct_mod , only: mct_aVect_clean, mct_avect_nRattr + use shr_drydep_mod , only: drydep_fields_token, n_drydep + use shr_megan_mod , only: shr_megan_fields_token, shr_megan_mechcomps_n + use shr_fire_emis_mod,only: shr_fire_emis_fields_token, shr_fire_emis_ztop_token, shr_fire_emis_mechcomps_n + use clm_varctl , only: ndep_from_cpl + use glc_elevclass_mod, only: glc_get_num_elevation_classes, glc_elevclass_as_string + ! + ! !ARGUMENTS: + implicit none + ! + ! !REVISION HISTORY: + ! Author: Mariana Vertenstein + ! 01/2011, Erik Kluzek: Added protex headers + ! + ! !LOCAL VARIABLES: + type(mct_aVect) :: l2x ! temporary, land to coupler + type(mct_aVect) :: x2l ! temporary, coupler to land + integer :: num + character(len=:), allocatable :: nec_str ! string version of glc elev. class number + character(len=64) :: name + character(len=32) :: subname = 'clm_cpl_indices_set' ! subroutine name + !----------------------------------------------------------------------- + + ! Determine attribute vector indices + + ! create temporary attribute vectors + call mct_aVect_init(x2l, rList=seq_flds_x2l_fields, lsize=1) + nflds_x2l = mct_avect_nRattr(x2l) + + call mct_aVect_init(l2x, rList=seq_flds_l2x_fields, lsize=1) + nflds_l2x = mct_avect_nRattr(l2x) + + !------------------------------------------------------------- + ! clm -> drv + !------------------------------------------------------------- + + index_l2x_Flrl_rofsur = mct_avect_indexra(l2x,'Flrl_rofsur') + index_l2x_Flrl_rofgwl = mct_avect_indexra(l2x,'Flrl_rofgwl') + index_l2x_Flrl_rofsub = mct_avect_indexra(l2x,'Flrl_rofsub') + index_l2x_Flrl_rofi = mct_avect_indexra(l2x,'Flrl_rofi') + index_l2x_Flrl_irrig = mct_avect_indexra(l2x,'Flrl_irrig') + + index_l2x_Sl_t = mct_avect_indexra(l2x,'Sl_t') + index_l2x_Sl_snowh = mct_avect_indexra(l2x,'Sl_snowh') + index_l2x_Sl_avsdr = mct_avect_indexra(l2x,'Sl_avsdr') + index_l2x_Sl_anidr = mct_avect_indexra(l2x,'Sl_anidr') + index_l2x_Sl_avsdf = mct_avect_indexra(l2x,'Sl_avsdf') + index_l2x_Sl_anidf = mct_avect_indexra(l2x,'Sl_anidf') + index_l2x_Sl_tref = mct_avect_indexra(l2x,'Sl_tref') + index_l2x_Sl_qref = mct_avect_indexra(l2x,'Sl_qref') + index_l2x_Sl_u10 = mct_avect_indexra(l2x,'Sl_u10') + index_l2x_Sl_ram1 = mct_avect_indexra(l2x,'Sl_ram1') + index_l2x_Sl_fv = mct_avect_indexra(l2x,'Sl_fv') + index_l2x_Sl_soilw = mct_avect_indexra(l2x,'Sl_soilw',perrwith='quiet') + + if ( n_drydep>0 )then + index_l2x_Sl_ddvel = mct_avect_indexra(l2x, trim(drydep_fields_token)) + else + index_l2x_Sl_ddvel = 0 + end if + + index_l2x_Fall_taux = mct_avect_indexra(l2x,'Fall_taux') + index_l2x_Fall_tauy = mct_avect_indexra(l2x,'Fall_tauy') + index_l2x_Fall_lat = mct_avect_indexra(l2x,'Fall_lat') + index_l2x_Fall_sen = mct_avect_indexra(l2x,'Fall_sen') + index_l2x_Fall_lwup = mct_avect_indexra(l2x,'Fall_lwup') + index_l2x_Fall_evap = mct_avect_indexra(l2x,'Fall_evap') + index_l2x_Fall_swnet = mct_avect_indexra(l2x,'Fall_swnet') + index_l2x_Fall_flxdst1 = mct_avect_indexra(l2x,'Fall_flxdst1') + index_l2x_Fall_flxdst2 = mct_avect_indexra(l2x,'Fall_flxdst2') + index_l2x_Fall_flxdst3 = mct_avect_indexra(l2x,'Fall_flxdst3') + index_l2x_Fall_flxdst4 = mct_avect_indexra(l2x,'Fall_flxdst4') + + index_l2x_Fall_fco2_lnd = mct_avect_indexra(l2x,'Fall_fco2_lnd',perrwith='quiet') + + index_l2x_Fall_methane = mct_avect_indexra(l2x,'Fall_methane',perrWith='quiet') + + ! MEGAN fluxes + if (shr_megan_mechcomps_n>0) then + index_l2x_Fall_flxvoc = mct_avect_indexra(l2x,trim(shr_megan_fields_token)) + else + index_l2x_Fall_flxvoc = 0 + endif + + ! Fire fluxes + if (shr_fire_emis_mechcomps_n>0) then + index_l2x_Fall_flxfire = mct_avect_indexra(l2x,trim(shr_fire_emis_fields_token)) + index_l2x_Sl_ztopfire = mct_avect_indexra(l2x,trim(shr_fire_emis_ztop_token)) + else + index_l2x_Fall_flxfire = 0 + index_l2x_Sl_ztopfire = 0 + endif + + !------------------------------------------------------------- + ! drv -> clm + !------------------------------------------------------------- + + index_x2l_Sa_z = mct_avect_indexra(x2l,'Sa_z') + index_x2l_Sa_topo = mct_avect_indexra(x2l,'Sa_topo') + index_x2l_Sa_u = mct_avect_indexra(x2l,'Sa_u') + index_x2l_Sa_v = mct_avect_indexra(x2l,'Sa_v') + index_x2l_Sa_ptem = mct_avect_indexra(x2l,'Sa_ptem') + index_x2l_Sa_pbot = mct_avect_indexra(x2l,'Sa_pbot') + index_x2l_Sa_tbot = mct_avect_indexra(x2l,'Sa_tbot') + index_x2l_Sa_shum = mct_avect_indexra(x2l,'Sa_shum') + index_x2l_Sa_co2prog = mct_avect_indexra(x2l,'Sa_co2prog',perrwith='quiet') + index_x2l_Sa_co2diag = mct_avect_indexra(x2l,'Sa_co2diag',perrwith='quiet') + + index_x2l_Sa_methane = mct_avect_indexra(x2l,'Sa_methane',perrWith='quiet') + + index_x2l_Flrr_volr = mct_avect_indexra(x2l,'Flrr_volr') + index_x2l_Flrr_volrmch = mct_avect_indexra(x2l,'Flrr_volrmch') + + index_x2l_Faxa_lwdn = mct_avect_indexra(x2l,'Faxa_lwdn') + index_x2l_Faxa_rainc = mct_avect_indexra(x2l,'Faxa_rainc') + index_x2l_Faxa_rainl = mct_avect_indexra(x2l,'Faxa_rainl') + index_x2l_Faxa_snowc = mct_avect_indexra(x2l,'Faxa_snowc') + index_x2l_Faxa_snowl = mct_avect_indexra(x2l,'Faxa_snowl') + index_x2l_Faxa_swndr = mct_avect_indexra(x2l,'Faxa_swndr') + index_x2l_Faxa_swvdr = mct_avect_indexra(x2l,'Faxa_swvdr') + index_x2l_Faxa_swndf = mct_avect_indexra(x2l,'Faxa_swndf') + index_x2l_Faxa_swvdf = mct_avect_indexra(x2l,'Faxa_swvdf') + index_x2l_Faxa_bcphidry = mct_avect_indexra(x2l,'Faxa_bcphidry') + index_x2l_Faxa_bcphodry = mct_avect_indexra(x2l,'Faxa_bcphodry') + index_x2l_Faxa_bcphiwet = mct_avect_indexra(x2l,'Faxa_bcphiwet') + index_x2l_Faxa_ocphidry = mct_avect_indexra(x2l,'Faxa_ocphidry') + index_x2l_Faxa_ocphodry = mct_avect_indexra(x2l,'Faxa_ocphodry') + index_x2l_Faxa_ocphiwet = mct_avect_indexra(x2l,'Faxa_ocphiwet') + index_x2l_Faxa_dstdry1 = mct_avect_indexra(x2l,'Faxa_dstdry1') + index_x2l_Faxa_dstdry2 = mct_avect_indexra(x2l,'Faxa_dstdry2') + index_x2l_Faxa_dstdry3 = mct_avect_indexra(x2l,'Faxa_dstdry3') + index_x2l_Faxa_dstdry4 = mct_avect_indexra(x2l,'Faxa_dstdry4') + index_x2l_Faxa_dstwet1 = mct_avect_indexra(x2l,'Faxa_dstwet1') + index_x2l_Faxa_dstwet2 = mct_avect_indexra(x2l,'Faxa_dstwet2') + index_x2l_Faxa_dstwet3 = mct_avect_indexra(x2l,'Faxa_dstwet3') + index_x2l_Faxa_dstwet4 = mct_avect_indexra(x2l,'Faxa_dstwet4') + + index_x2l_Faxa_nhx = mct_avect_indexra(x2l,'Faxa_nhx', perrWith='quiet') + index_x2l_Faxa_noy = mct_avect_indexra(x2l,'Faxa_noy', perrWith='quiet') + + if (index_x2l_Faxa_nhx > 0 .and. index_x2l_Faxa_noy > 0) then + ndep_from_cpl = .true. + end if + + index_x2l_Flrr_flood = mct_avect_indexra(x2l,'Flrr_flood') + + !------------------------------------------------------------- + ! glc coupling + !------------------------------------------------------------- + + index_x2l_Sg_icemask = mct_avect_indexra(x2l,'Sg_icemask') + index_x2l_Sg_icemask_coupled_fluxes = mct_avect_indexra(x2l,'Sg_icemask_coupled_fluxes') + + glc_nec = glc_get_num_elevation_classes() + if (glc_nec < 1) then + call shr_sys_abort('ERROR: In CLM4.5 and later, glc_nec must be at least 1.') + end if + + ! Create coupling fields for all glc elevation classes (1:glc_nec) plus bare land + ! (index 0). + allocate(index_l2x_Sl_tsrf(0:glc_nec)) + allocate(index_l2x_Sl_topo(0:glc_nec)) + allocate(index_l2x_Flgl_qice(0:glc_nec)) + allocate(index_x2l_Sg_ice_covered(0:glc_nec)) + allocate(index_x2l_Sg_topo(0:glc_nec)) + allocate(index_x2l_Flgg_hflx(0:glc_nec)) + + do num = 0,glc_nec + nec_str = glc_elevclass_as_string(num) + + name = 'Sg_ice_covered' // nec_str + index_x2l_Sg_ice_covered(num) = mct_avect_indexra(x2l,trim(name)) + name = 'Sg_topo' // nec_str + index_x2l_Sg_topo(num) = mct_avect_indexra(x2l,trim(name)) + name = 'Flgg_hflx' // nec_str + index_x2l_Flgg_hflx(num) = mct_avect_indexra(x2l,trim(name)) + + name = 'Sl_tsrf' // nec_str + index_l2x_Sl_tsrf(num) = mct_avect_indexra(l2x,trim(name)) + name = 'Sl_topo' // nec_str + index_l2x_Sl_topo(num) = mct_avect_indexra(l2x,trim(name)) + name = 'Flgl_qice' // nec_str + index_l2x_Flgl_qice(num) = mct_avect_indexra(l2x,trim(name)) + end do + + call mct_aVect_clean(x2l) + call mct_aVect_clean(l2x) + + end subroutine clm_cpl_indices_set + +!======================================================================= + +end module clm_cpl_indices diff --git a/src/cpl/mct/laiStreamMod.F90 b/src/cpl/mct/laiStreamMod.F90 new file mode 100644 index 0000000000..47d25287b7 --- /dev/null +++ b/src/cpl/mct/laiStreamMod.F90 @@ -0,0 +1,241 @@ +module laiStreamMod + +#include "shr_assert.h" + + !----------------------------------------------------------------------- + ! !DESCRIPTION: + ! Read LAI from stream + ! + ! !USES: + use shr_strdata_mod , only : shr_strdata_type, shr_strdata_create + use shr_strdata_mod , only : shr_strdata_print, shr_strdata_advance + use shr_kind_mod , only : r8=>shr_kind_r8, CL=>shr_kind_CL, CS=>shr_kind_CS, CXX=>shr_kind_CXX + use shr_log_mod , only : errMsg => shr_log_errMsg + use decompMod , only : bounds_type + use abortutils , only : endrun + use clm_varctl , only : iulog, inst_name + use perf_mod , only : t_startf, t_stopf + use spmdMod , only : masterproc, mpicom, comp_id + use ncdio_pio + use mct_mod + ! + ! !PUBLIC TYPES: + implicit none + private + + ! !PUBLIC MEMBER FUNCTIONS: + public :: lai_init ! position datasets for LAI + public :: lai_advance ! Advance the LAI streams (outside of a Open-MP threading loop) + public :: lai_interp ! interpolates between two years of LAI data (when LAI streams + + ! !PRIVATE MEMBER DATA: + integer, allocatable :: g_to_ig(:) ! Array matching gridcell index to data index + type(shr_strdata_type) :: sdat_lai ! LAI input data stream + + character(len=*), parameter :: sourcefile = & + __FILE__ + +!============================================================================== +contains +!============================================================================== + + subroutine lai_init(bounds) + ! + ! Initialize data stream information for LAI. + ! + ! !USES: + use clm_time_manager , only : get_calendar + use ncdio_pio , only : pio_subsystem + use shr_pio_mod , only : shr_pio_getiotype + use shr_stream_mod , only : shr_stream_file_null + use shr_string_mod , only : shr_string_listCreateField + use clm_nlUtilsMod , only : find_nlgroup_name + use ndepStreamMod , only : clm_domain_mct + use histFileMod , only : hist_addfld1d + use domainMod , only : ldomain + use controlMod , only : NLFilename + use lnd_set_decomp_and_domain , only : gsmap_global + ! + ! !ARGUMENTS: + implicit none + type(bounds_type), intent(in) :: bounds ! bounds + ! + ! !LOCAL VARIABLES: + integer :: stream_year_first_lai ! first year in Lai stream to use + integer :: stream_year_last_lai ! last year in Lai stream to use + integer :: model_year_align_lai ! align stream_year_first_lai with + integer :: nu_nml ! unit for namelist file + integer :: nml_error ! namelist i/o error flag + type(mct_ggrid) :: dom_clm ! domain information + character(len=CL) :: stream_fldFileName_lai ! lai stream filename to read + character(len=CL) :: lai_mapalgo = 'bilinear' ! Mapping alogrithm + character(len=CL) :: lai_tintalgo = 'linear' ! Time interpolation alogrithm + character(len=CXX) :: fldList ! field string + character(*), parameter :: laiString = "LAI" ! base string for field string + integer , parameter :: numLaiFields = 16 ! number of fields to build field string + character(*), parameter :: subName = "('laidyn_init')" + !----------------------------------------------------------------------- + ! + ! deal with namelist variables here in init + ! + namelist /lai_streams/ & + stream_year_first_lai, & + stream_year_last_lai, & + model_year_align_lai, & + lai_mapalgo, & + stream_fldFileName_lai, & + lai_tintalgo + + ! Default values for namelist + stream_year_first_lai = 1 ! first year in stream to use + stream_year_last_lai = 1 ! last year in stream to use + model_year_align_lai = 1 ! align stream_year_first_lai with this model year + stream_fldFileName_lai = shr_stream_file_null + + ! Read lai_streams namelist + if (masterproc) then + open( newunit=nu_nml, file=trim(NLFilename), status='old', iostat=nml_error ) + call find_nlgroup_name(nu_nml, 'lai_streams', status=nml_error) + if (nml_error == 0) then + read(nu_nml, nml=lai_streams,iostat=nml_error) + if (nml_error /= 0) then + call endrun(subname // ':: ERROR reading lai_streams namelist') + end if + else + call endrun(subname // ':: ERROR finding lai_streams namelist') + end if + close(nu_nml) + endif + call shr_mpi_bcast(stream_year_first_lai , mpicom) + call shr_mpi_bcast(stream_year_last_lai , mpicom) + call shr_mpi_bcast(model_year_align_lai , mpicom) + call shr_mpi_bcast(stream_fldFileName_lai , mpicom) + call shr_mpi_bcast(lai_tintalgo , mpicom) + + if (masterproc) then + write(iulog,*) ' ' + write(iulog,*) 'lai_stream settings:' + write(iulog,*) ' stream_year_first_lai = ',stream_year_first_lai + write(iulog,*) ' stream_year_last_lai = ',stream_year_last_lai + write(iulog,*) ' model_year_align_lai = ',model_year_align_lai + write(iulog,*) ' stream_fldFileName_lai = ',trim(stream_fldFileName_lai) + write(iulog,*) ' lai_tintalgo = ',trim(lai_tintalgo) + endif + + call clm_domain_mct (bounds, dom_clm) + + ! create the field list for these lai fields...use in shr_strdata_create + fldList = shr_string_listCreateField( numLaiFields, laiString ) + + call shr_strdata_create(sdat_lai,name="laidyn", & + pio_subsystem=pio_subsystem, & + pio_iotype=shr_pio_getiotype(inst_name), & + mpicom=mpicom, compid=comp_id, & + gsmap=gsmap_global, ggrid=dom_clm, & + nxg=ldomain%ni, nyg=ldomain%nj, & + yearFirst=stream_year_first_lai, & + yearLast=stream_year_last_lai, & + yearAlign=model_year_align_lai, & + offset=0, & + domFilePath='', & + domFileName=trim(stream_fldFileName_lai), & + domTvarName='time', & + domXvarName='lon' , & + domYvarName='lat' , & + domAreaName='area', & + domMaskName='mask', & + filePath='', & + filename=(/stream_fldFileName_lai/), & + fldListFile=fldList, & + fldListModel=fldList, & + fillalgo='none', & + mapalgo=lai_mapalgo, & + tintalgo=lai_tintalgo, & + calendar=get_calendar(), & + taxmode='cycle' ) + + if (masterproc) then + call shr_strdata_print(sdat_lai,'LAI data') + endif + + end subroutine lai_init + + !============================================================================== + subroutine lai_advance( bounds ) + ! + ! Advance LAI streams + ! + ! !USES: + use clm_time_manager, only : get_curr_date + ! + ! !ARGUMENTS: + implicit none + type(bounds_type) , intent(in) :: bounds + ! + ! !LOCAL VARIABLES: + integer :: g, ig ! Indices + integer :: year ! year (0, ...) for nstep+1 + integer :: mon ! month (1, ..., 12) for nstep+1 + integer :: day ! day of month (1, ..., 31) for nstep+1 + integer :: sec ! seconds into current date for nstep+1 + integer :: mcdate ! Current model date (yyyymmdd) + !----------------------------------------------------------------------- + + call get_curr_date(year, mon, day, sec) + mcdate = year*10000 + mon*100 + day + + call shr_strdata_advance(sdat_lai, mcdate, sec, mpicom, 'laidyn') + if ( .not. allocated(g_to_ig) )then + allocate (g_to_ig(bounds%begg:bounds%endg) ) + ig = 0 + do g = bounds%begg,bounds%endg + ig = ig+1 + g_to_ig(g) = ig + end do + end if + + end subroutine lai_advance + + !============================================================================== + subroutine lai_interp(bounds, canopystate_inst) + ! + ! Interpolate data stream information for Lai. + ! + ! !USES: + use pftconMod , only : noveg + use CanopyStateType , only : canopystate_type + use PatchType , only : patch + ! + ! !ARGUMENTS: + implicit none + type(bounds_type) , intent(in) :: bounds + type(canopystate_type) , intent(inout) :: canopystate_inst + ! + ! !LOCAL VARIABLES: + integer :: ivt, p, ip, ig + character(len=CL) :: stream_var_name + !----------------------------------------------------------------------- + SHR_ASSERT_FL( (lbound(g_to_ig,1) <= bounds%begg ), sourcefile, __LINE__) + SHR_ASSERT_FL( (ubound(g_to_ig,1) >= bounds%endg ), sourcefile, __LINE__) + SHR_ASSERT_FL( (lbound(sdat_lai%avs(1)%rAttr,2) <= g_to_ig(bounds%begg) ), sourcefile, __LINE__) + SHR_ASSERT_FL( (ubound(sdat_lai%avs(1)%rAttr,2) >= g_to_ig(bounds%endg) ), sourcefile, __LINE__) + + do p = bounds%begp, bounds%endp + ivt = patch%itype(p) + ! Set lai for each gridcell/patch combination + if (ivt /= noveg) then + ! vegetated pft + write(stream_var_name,"(i6)") ivt + stream_var_name = 'LAI_'//trim(adjustl(stream_var_name)) + ip = mct_aVect_indexRA(sdat_lai%avs(1),trim(stream_var_name)) + ig = g_to_ig(patch%gridcell(p)) + canopystate_inst%tlai_patch(p) = sdat_lai%avs(1)%rAttr(ip,ig) + else + ! non-vegetated pft + canopystate_inst%tlai_patch(p) = 0._r8 + endif + end do + + end subroutine lai_interp + +end module LaiStreamMod diff --git a/src/cpl/mct/lnd_comp_mct.F90 b/src/cpl/mct/lnd_comp_mct.F90 new file mode 100644 index 0000000000..e50602a378 --- /dev/null +++ b/src/cpl/mct/lnd_comp_mct.F90 @@ -0,0 +1,632 @@ +module lnd_comp_mct + + !--------------------------------------------------------------------------- + ! !DESCRIPTION: + ! Interface of the active land model component of CESM the CLM (Community Land Model) + ! with the main CESM driver. This is a thin interface taking CESM driver information + ! in MCT (Model Coupling Toolkit) format and converting it to use by CLM. + ! + ! !uses: + use shr_kind_mod , only : r8 => shr_kind_r8 + use shr_sys_mod , only : shr_sys_flush + use shr_log_mod , only : errMsg => shr_log_errMsg + use mct_mod , only : mct_avect, mct_gsmap, mct_gGrid + use decompmod , only : bounds_type + use lnd_import_export, only : lnd_import, lnd_export + ! + ! !public member functions: + implicit none + private ! by default make data private + ! + ! !public member functions: + public :: lnd_init_mct ! clm initialization + public :: lnd_run_mct ! clm run phase + public :: lnd_final_mct ! clm finalization/cleanup + ! + ! !private member functions: + private :: lnd_domain_mct ! set the land model domain information + private :: lnd_handle_resume ! handle pause/resume signals from the coupler + + character(len=*), parameter, private :: sourcefile = & + __FILE__ + +!==================================================================================== +contains +!==================================================================================== + + subroutine lnd_init_mct( EClock, cdata_l, x2l_l, l2x_l, NLFilename ) + ! + ! !DESCRIPTION: + ! Initialize land surface model and obtain relevant atmospheric model arrays + ! back from (i.e. albedos, surface temperature and snow cover over land). + ! + ! !USES: + use shr_kind_mod , only : shr_kind_cl + use abortutils , only : endrun + use clm_time_manager , only : get_nstep, set_timemgr_init + use clm_initializeMod, only : initialize1, initialize2 + use clm_instMod , only : water_inst, lnd2atm_inst, lnd2glc_inst + use clm_varctl , only : finidat, single_column, clm_varctl_set, iulog + use clm_varctl , only : inst_index, inst_suffix, inst_name + use clm_varorb , only : eccen, obliqr, lambm0, mvelpp + use controlMod , only : control_setNL + use decompMod , only : get_proc_bounds + use domainMod , only : ldomain + use shr_file_mod , only : shr_file_setLogUnit, shr_file_setLogLevel + use shr_file_mod , only : shr_file_getLogUnit, shr_file_getLogLevel + use shr_file_mod , only : shr_file_getUnit, shr_file_setIO + use seq_cdata_mod , only : seq_cdata, seq_cdata_setptrs + use seq_timemgr_mod , only : seq_timemgr_EClockGetData + use seq_infodata_mod , only : seq_infodata_type, seq_infodata_GetData, seq_infodata_PutData, & + seq_infodata_start_type_start, seq_infodata_start_type_cont, & + seq_infodata_start_type_brnch + use seq_comm_mct , only : seq_comm_suffix, seq_comm_inst, seq_comm_name + use seq_flds_mod , only : seq_flds_x2l_fields, seq_flds_l2x_fields + use spmdMod , only : masterproc, spmd_init + use clm_varctl , only : nsrStartup, nsrContinue, nsrBranch + use clm_cpl_indices , only : clm_cpl_indices_set + use mct_mod , only : mct_aVect_init, mct_aVect_zero, mct_gsMap, mct_gsMap_init + use decompMod , only : gindex_global + use lnd_set_decomp_and_domain, only : lnd_set_decomp_and_domain_from_surfrd, gsmap_global + use ESMF + ! + ! !ARGUMENTS: + type(ESMF_Clock), intent(inout) :: EClock ! Input synchronization clock + type(seq_cdata), intent(inout) :: cdata_l ! Input land-model driver data + type(mct_aVect), intent(inout) :: x2l_l, l2x_l ! land model import and export states + character(len=*), optional, intent(in) :: NLFilename ! Namelist filename to read + ! + ! !LOCAL VARIABLES: + integer :: LNDID ! Land identifyer + integer :: mpicom_lnd ! MPI communicator + type(mct_gsMap), pointer :: GSMap_lnd ! Land model MCT GS map + type(mct_gGrid), pointer :: dom_l ! Land model domain + type(seq_infodata_type), pointer :: infodata ! CESM driver level info data + integer :: lsize ! size of attribute vector + integer :: gsize ! global size + integer :: g,i,j ! indices + integer :: dtime_sync ! coupling time-step from the input synchronization clock + logical :: exists ! true if file exists + logical :: atm_aero ! Flag if aerosol data sent from atm model + real(r8) :: scmlat ! single-column latitude + real(r8) :: scmlon ! single-column longitude + character(len=SHR_KIND_CL) :: caseid ! case identifier name + character(len=SHR_KIND_CL) :: ctitle ! case description title + character(len=SHR_KIND_CL) :: starttype ! start-type (startup, continue, branch, hybrid) + character(len=SHR_KIND_CL) :: calendar ! calendar type name + character(len=SHR_KIND_CL) :: hostname ! hostname of machine running on + character(len=SHR_KIND_CL) :: version ! Model version + character(len=SHR_KIND_CL) :: username ! user running the model + integer :: nsrest ! clm restart type + integer :: ref_ymd ! reference date (YYYYMMDD) + integer :: ref_tod ! reference time of day (sec) + integer :: start_ymd ! start date (YYYYMMDD) + integer :: start_tod ! start time of day (sec) + logical :: brnch_retain_casename ! flag if should retain the case name on a branch start type + integer :: lbnum ! input to memory diagnostic + integer :: shrlogunit,shrloglev ! old values for log unit and log level + type(bounds_type) :: bounds ! bounds + logical :: noland + integer :: ni,nj + real(r8) , parameter :: rundef = -9999999._r8 + character(len=32), parameter :: sub = 'lnd_init_mct' + character(len=*), parameter :: format = "('("//trim(sub)//") :',A)" + !----------------------------------------------------------------------- + + ! Set cdata data + call seq_cdata_setptrs(cdata_l, ID=LNDID, mpicom=mpicom_lnd, & + gsMap=GSMap_lnd, dom=dom_l, infodata=infodata) + + ! Determine attriute vector indices + call clm_cpl_indices_set() + + ! Initialize clm MPI communicator + call spmd_init( mpicom_lnd, LNDID ) + +#if (defined _MEMTRACE) + if(masterproc) then + lbnum=1 + call memmon_dump_fort('memmon.out','lnd_init_mct:start::',lbnum) + endif +#endif + + inst_name = seq_comm_name(LNDID) + inst_index = seq_comm_inst(LNDID) + inst_suffix = seq_comm_suffix(LNDID) + ! Initialize io log unit + + call shr_file_getLogUnit (shrlogunit) + if (masterproc) then + inquire(file='lnd_modelio.nml'//trim(inst_suffix),exist=exists) + if (exists) then + iulog = shr_file_getUnit() + call shr_file_setIO('lnd_modelio.nml'//trim(inst_suffix),iulog) + end if + write(iulog,format) "CLM land model initialization" + else + iulog = shrlogunit + end if + + call shr_file_getLogLevel(shrloglev) + call shr_file_setLogUnit (iulog) + + ! Use infodata to set orbital values + call seq_infodata_GetData( infodata, orb_eccen=eccen, orb_mvelpp=mvelpp, & + orb_lambm0=lambm0, orb_obliqr=obliqr ) + + ! Consistency check on namelist filename + call control_setNL("lnd_in"//trim(inst_suffix)) + + ! Initialize clm + ! initialize1 reads namelists + ! decomp and domain are set in lnd_set_decomp_and_domain_from_surfrd + ! initialize2 performs the rest of initialization + call seq_timemgr_EClockGetData(EClock, & + start_ymd=start_ymd, & + start_tod=start_tod, ref_ymd=ref_ymd, & + ref_tod=ref_tod, & + calendar=calendar, & + dtime=dtime_sync) + if (masterproc) then + write(iulog,*)'dtime = ',dtime_sync + end if + call seq_infodata_GetData(infodata, case_name=caseid, & + case_desc=ctitle, single_column=single_column, & + scmlat=scmlat, scmlon=scmlon, & + brnch_retain_casename=brnch_retain_casename, & + start_type=starttype, model_version=version, & + hostname=hostname, username=username ) + + ! Single Column + if ( single_column .and. (scmlat == rundef .or. scmlon == rundef ) ) then + call endrun(msg=' ERROR:: single column mode on -- but scmlat and scmlon are NOT set'//& + errMsg(sourcefile, __LINE__)) + end if + + ! Note that we assume that CTSM's internal dtime matches the coupling time step. + ! i.e., we currently do NOT allow sub-cycling within a coupling time step. + call set_timemgr_init( calendar_in=calendar, start_ymd_in=start_ymd, start_tod_in=start_tod, & + ref_ymd_in=ref_ymd, ref_tod_in=ref_tod, dtime_in=dtime_sync) + + if ( trim(starttype) == trim(seq_infodata_start_type_start)) then + nsrest = nsrStartup + else if (trim(starttype) == trim(seq_infodata_start_type_cont) ) then + nsrest = nsrContinue + else if (trim(starttype) == trim(seq_infodata_start_type_brnch)) then + nsrest = nsrBranch + else + call endrun( sub//' ERROR: unknown starttype' ) + end if + + ! set default values for run control variables + call clm_varctl_set(caseid_in=caseid, ctitle_in=ctitle, & + brnch_retain_casename_in=brnch_retain_casename, & + single_column_in=single_column, scmlat_in=scmlat, & + scmlon_in=scmlon, nsrest_in=nsrest, version_in=version, & + hostname_in=hostname, username_in=username) + + ! Read namelists + call initialize1(dtime=dtime_sync) + + ! Initialize decomposition and domain (ldomain) type + call lnd_set_decomp_and_domain_from_surfrd(noland, ni, nj) + + ! If no land then exit out of initialization + if ( noland ) then + + call seq_infodata_PutData( infodata, lnd_present =.false.) + call seq_infodata_PutData( infodata, lnd_prognostic=.false.) + + else + + ! Determine if aerosol and dust deposition come from atmosphere component + call seq_infodata_GetData(infodata, atm_aero=atm_aero ) + if ( .not. atm_aero )then + call endrun( sub//' ERROR: atmosphere model MUST send aerosols to CLM' ) + end if + + ! Initialize clm gsMap, clm domain and clm attribute vectors + call get_proc_bounds( bounds ) + lsize = bounds%endg - bounds%begg + 1 + gsize = ldomain%ni * ldomain%nj + call mct_gsMap_init( gsMap_lnd, gindex_global, mpicom_lnd, LNDID, lsize, gsize ) + gsmap_global => gsmap_lnd ! module variable in lnd_set_decomp_and_domain + call lnd_domain_mct( bounds, lsize, gsMap_lnd, dom_l ) + call mct_aVect_init(x2l_l, rList=seq_flds_x2l_fields, lsize=lsize) + call mct_aVect_zero(x2l_l) + call mct_aVect_init(l2x_l, rList=seq_flds_l2x_fields, lsize=lsize) + call mct_aVect_zero(l2x_l) + + ! Finish initializing clm + call initialize2(ni,nj) + + ! Create land export state + call lnd_export(bounds, water_inst%waterlnd2atmbulk_inst, lnd2atm_inst, lnd2glc_inst, l2x_l%rattr) + + ! Fill in infodata settings + call seq_infodata_PutData(infodata, lnd_prognostic=.true.) + call seq_infodata_PutData(infodata, lnd_nx=ldomain%ni, lnd_ny=ldomain%nj) + call lnd_handle_resume( cdata_l ) + + ! Reset shr logging to original values + call shr_file_setLogUnit (shrlogunit) + call shr_file_setLogLevel(shrloglev) + +#if (defined _MEMTRACE) + if(masterproc) then + write(iulog,*) TRIM(Sub) // ':end::' + lbnum=1 + call memmon_dump_fort('memmon.out','lnd_int_mct:end::',lbnum) + call memmon_reset_addr() + endif +#endif + end if + + end subroutine lnd_init_mct + + !==================================================================================== + subroutine lnd_run_mct(EClock, cdata_l, x2l_l, l2x_l) + ! + ! !DESCRIPTION: + ! Run clm model + ! + ! !USES: + use shr_kind_mod , only : r8 => shr_kind_r8 + use clm_instMod , only : water_inst, lnd2atm_inst, atm2lnd_inst, lnd2glc_inst, glc2lnd_inst + use clm_driver , only : clm_drv + use clm_time_manager, only : get_curr_date, get_nstep, get_curr_calday, get_step_size + use clm_time_manager, only : advance_timestep, update_rad_dtime + use decompMod , only : get_proc_bounds + use abortutils , only : endrun + use clm_varctl , only : iulog + use clm_varorb , only : eccen, obliqr, lambm0, mvelpp + use shr_file_mod , only : shr_file_setLogUnit, shr_file_setLogLevel + use shr_file_mod , only : shr_file_getLogUnit, shr_file_getLogLevel + use seq_cdata_mod , only : seq_cdata, seq_cdata_setptrs + use seq_timemgr_mod , only : seq_timemgr_EClockGetData, seq_timemgr_StopAlarmIsOn + use seq_timemgr_mod , only : seq_timemgr_RestartAlarmIsOn, seq_timemgr_EClockDateInSync + use seq_infodata_mod, only : seq_infodata_type, seq_infodata_GetData + use spmdMod , only : masterproc, mpicom + use perf_mod , only : t_startf, t_stopf, t_barrierf + use shr_orb_mod , only : shr_orb_decl + use ESMF + ! + ! !ARGUMENTS: + type(ESMF_Clock) , intent(inout) :: EClock ! Input synchronization clock from driver + type(seq_cdata) , intent(inout) :: cdata_l ! Input driver data for land model + type(mct_aVect) , intent(inout) :: x2l_l ! Import state to land model + type(mct_aVect) , intent(inout) :: l2x_l ! Export state from land model + ! + ! !LOCAL VARIABLES: + integer :: ymd_sync ! Sync date (YYYYMMDD) + integer :: yr_sync ! Sync current year + integer :: mon_sync ! Sync current month + integer :: day_sync ! Sync current day + integer :: tod_sync ! Sync current time of day (sec) + integer :: ymd ! CLM current date (YYYYMMDD) + integer :: yr ! CLM current year + integer :: mon ! CLM current month + integer :: day ! CLM current day + integer :: tod ! CLM current time of day (sec) + integer :: dtime ! time step increment (sec) + integer :: nstep ! time step index + logical :: rstwr_sync ! .true. ==> write restart file before returning + logical :: rstwr ! .true. ==> write restart file before returning + logical :: nlend_sync ! Flag signaling last time-step + logical :: nlend ! .true. ==> last time-step + logical :: dosend ! true => send data back to driver + logical :: doalb ! .true. ==> do albedo calculation on this time step + logical :: rof_prognostic ! .true. => running with a prognostic ROF model + logical :: glc_present ! .true. => running with a non-stub GLC model + real(r8) :: nextsw_cday ! calday from clock of next radiation computation + real(r8) :: caldayp1 ! clm calday plus dtime offset + integer :: shrlogunit,shrloglev ! old values for share log unit and log level + integer :: lbnum ! input to memory diagnostic + integer :: g,i,lsize ! counters + real(r8) :: calday ! calendar day for nstep + real(r8) :: declin ! solar declination angle in radians for nstep + real(r8) :: declinp1 ! solar declination angle in radians for nstep+1 + real(r8) :: eccf ! earth orbit eccentricity factor + real(r8) :: recip ! reciprical + logical,save :: first_call = .true. ! first call work + type(seq_infodata_type),pointer :: infodata ! CESM information from the driver + type(mct_gGrid), pointer :: dom_l ! Land model domain data + type(bounds_type) :: bounds ! bounds + character(len=32) :: rdate ! date char string for restart file names + character(len=32), parameter :: sub = "lnd_run_mct" + !--------------------------------------------------------------------------- + + ! Determine processor bounds + + call get_proc_bounds(bounds) + +#if (defined _MEMTRACE) + if(masterproc) then + lbnum=1 + call memmon_dump_fort('memmon.out','lnd_run_mct:start::',lbnum) + endif +#endif + + ! Reset shr logging to my log file + call shr_file_getLogUnit (shrlogunit) + call shr_file_getLogLevel(shrloglev) + call shr_file_setLogUnit (iulog) + + ! Determine time of next atmospheric shortwave calculation + call seq_cdata_setptrs(cdata_l, infodata=infodata, dom=dom_l) + call seq_timemgr_EClockGetData(EClock, & + curr_ymd=ymd, curr_tod=tod_sync, & + curr_yr=yr_sync, curr_mon=mon_sync, curr_day=day_sync) + call seq_infodata_GetData(infodata, nextsw_cday=nextsw_cday ) + + dtime = get_step_size() + + ! Handle pause/resume signals from coupler + call lnd_handle_resume( cdata_l ) + + write(rdate,'(i4.4,"-",i2.2,"-",i2.2,"-",i5.5)') yr_sync,mon_sync,day_sync,tod_sync + nlend_sync = seq_timemgr_StopAlarmIsOn( EClock ) + rstwr_sync = seq_timemgr_RestartAlarmIsOn( EClock ) + + ! Determine if we're running with a prognostic ROF model, and if we're running with a + ! non-stub GLC model. These won't change throughout the run, but we can't count on + ! their being set in initialization, so need to get them in the run method. + + call seq_infodata_GetData( infodata, & + rof_prognostic=rof_prognostic, & + glc_present=glc_present) + + ! Map MCT to land data type + ! Perform downscaling if appropriate + + + ! Map to clm (only when state and/or fluxes need to be updated) + + call t_startf ('lc_lnd_import') + call lnd_import( bounds, & + x2l = x2l_l%rattr, & + glc_present = glc_present, & + atm2lnd_inst = atm2lnd_inst, & + glc2lnd_inst = glc2lnd_inst, & + wateratm2lndbulk_inst = water_inst%wateratm2lndbulk_inst) + call t_stopf ('lc_lnd_import') + + ! Use infodata to set orbital values if updated mid-run + + call seq_infodata_GetData( infodata, orb_eccen=eccen, orb_mvelpp=mvelpp, & + orb_lambm0=lambm0, orb_obliqr=obliqr ) + + ! Loop over time steps in coupling interval + + dosend = .false. + do while(.not. dosend) + + ! Determine if dosend + ! When time is not updated at the beginning of the loop - then return only if + ! are in sync with clock before time is updated + ! + ! NOTE(wjs, 2020-03-09) I think the do while (.not. dosend) loop only is important + ! for the first time step (when we run 2 steps). After that, we now assume that we + ! run one time step per coupling interval (based on setting the model's dtime from + ! the driver). (According to Mariana Vertenstein, sub-cycling (running multiple + ! land model time steps per coupling interval) used to be supported, but hasn't + ! been fully supported for a long time.) We may want to rework this logic to make + ! this more explicit, or - ideally - get rid of this extra time step at the start + ! of the run, at which point I think we could do away with this looping entirely. + + call get_curr_date( yr, mon, day, tod ) + ymd = yr*10000 + mon*100 + day + tod = tod + dosend = (seq_timemgr_EClockDateInSync( EClock, ymd, tod)) + + ! Determine doalb based on nextsw_cday sent from atm model + + nstep = get_nstep() + caldayp1 = get_curr_calday(offset=dtime, reuse_day_365_for_day_366=.true.) + if (nstep == 0) then + doalb = .false. + else if (nstep == 1) then + doalb = (abs(nextsw_cday- caldayp1) < 1.e-10_r8) + else + doalb = (nextsw_cday >= -0.5_r8) + end if + call update_rad_dtime(doalb) + + ! Determine if time to write restart and stop + + rstwr = .false. + if (rstwr_sync .and. dosend) rstwr = .true. + nlend = .false. + if (nlend_sync .and. dosend) nlend = .true. + + ! Run clm + + call t_barrierf('sync_clm_run1', mpicom) + call t_startf ('clm_run') + call t_startf ('shr_orb_decl') + calday = get_curr_calday(reuse_day_365_for_day_366=.true.) + call shr_orb_decl( calday , eccen, mvelpp, lambm0, obliqr, declin , eccf ) + call shr_orb_decl( nextsw_cday, eccen, mvelpp, lambm0, obliqr, declinp1, eccf ) + call t_stopf ('shr_orb_decl') + call clm_drv(doalb, nextsw_cday, declinp1, declin, rstwr, nlend, rdate, rof_prognostic) + call t_stopf ('clm_run') + + ! Create l2x_l export state - add river runoff input to l2x_l if appropriate + + call t_startf ('lc_lnd_export') + call lnd_export(bounds, water_inst%waterlnd2atmbulk_inst, lnd2atm_inst, lnd2glc_inst, l2x_l%rattr) + call t_stopf ('lc_lnd_export') + + ! Advance clm time step + + call t_startf ('lc_clm2_adv_timestep') + call advance_timestep() + call t_stopf ('lc_clm2_adv_timestep') + + end do + + ! Check that internal clock is in sync with master clock + + call get_curr_date( yr, mon, day, tod, offset=-dtime ) + ymd = yr*10000 + mon*100 + day + tod = tod + if ( .not. seq_timemgr_EClockDateInSync( EClock, ymd, tod ) )then + call seq_timemgr_EclockGetData( EClock, curr_ymd=ymd_sync, curr_tod=tod_sync ) + write(iulog,*)' clm ymd=',ymd ,' clm tod= ',tod + write(iulog,*)'sync ymd=',ymd_sync,' sync tod= ',tod_sync + call endrun( sub//":: CLM clock not in sync with Master Sync clock" ) + end if + + ! Reset shr logging to my original values + + call shr_file_setLogUnit (shrlogunit) + call shr_file_setLogLevel(shrloglev) + +#if (defined _MEMTRACE) + if(masterproc) then + lbnum=1 + call memmon_dump_fort('memmon.out','lnd_run_mct:end::',lbnum) + call memmon_reset_addr() + endif +#endif + + first_call = .false. + + end subroutine lnd_run_mct + + !==================================================================================== + subroutine lnd_final_mct( EClock, cdata_l, x2l_l, l2x_l) + ! + ! !DESCRIPTION: + ! Finalize land surface model + + use seq_cdata_mod ,only : seq_cdata, seq_cdata_setptrs + use seq_timemgr_mod ,only : seq_timemgr_EClockGetData, seq_timemgr_StopAlarmIsOn + use seq_timemgr_mod ,only : seq_timemgr_RestartAlarmIsOn, seq_timemgr_EClockDateInSync + use esmf + ! + ! !ARGUMENTS: + type(ESMF_Clock) , intent(inout) :: EClock ! Input synchronization clock from driver + type(seq_cdata) , intent(inout) :: cdata_l ! Input driver data for land model + type(mct_aVect) , intent(inout) :: x2l_l ! Import state to land model + type(mct_aVect) , intent(inout) :: l2x_l ! Export state from land model + !--------------------------------------------------------------------------- + + ! fill this in + end subroutine lnd_final_mct + + !==================================================================================== + subroutine lnd_domain_mct( bounds, lsize, gsMap_l, dom_l ) + ! + ! !DESCRIPTION: + ! Send the land model domain information to the coupler + ! + ! !USES: + use clm_varcon , only: re + use domainMod , only: ldomain + use spmdMod , only: iam + use mct_mod , only: mct_gGrid_importIAttr + use mct_mod , only: mct_gGrid_importRAttr, mct_gGrid_init, mct_gsMap_orderedPoints + use seq_flds_mod, only: seq_flds_dom_coord, seq_flds_dom_other + ! + ! !ARGUMENTS: + type(bounds_type), intent(in) :: bounds ! bounds + integer , intent(in) :: lsize ! land model domain data size + type(mct_gsMap), intent(inout) :: gsMap_l ! Output land model MCT GS map + type(mct_ggrid), intent(out) :: dom_l ! Output domain information for land model + ! + ! Local Variables + integer :: g,i,j ! index + real(r8), pointer :: data(:) ! temporary + integer , pointer :: idata(:) ! temporary + !--------------------------------------------------------------------------- + ! + ! Initialize mct domain type + ! lat/lon in degrees, area in radians^2, mask is 1 (land), 0 (non-land) + ! Note that in addition land carries around landfrac for the purposes of domain checking + ! + call mct_gGrid_init( GGrid=dom_l, CoordChars=trim(seq_flds_dom_coord), & + OtherChars=trim(seq_flds_dom_other), lsize=lsize ) + ! + ! Allocate memory + ! + allocate(data(lsize)) + ! + ! Determine global gridpoint number attribute, GlobGridNum, which is set automatically by MCT + ! + call mct_gsMap_orderedPoints(gsMap_l, iam, idata) + call mct_gGrid_importIAttr(dom_l,'GlobGridNum',idata,lsize) + ! + ! Determine domain (numbering scheme is: West to East and South to North to South pole) + ! Initialize attribute vector with special value + ! + data(:) = -9999.0_R8 + call mct_gGrid_importRAttr(dom_l,"lat" ,data,lsize) + call mct_gGrid_importRAttr(dom_l,"lon" ,data,lsize) + call mct_gGrid_importRAttr(dom_l,"area" ,data,lsize) + call mct_gGrid_importRAttr(dom_l,"aream",data,lsize) + data(:) = 0.0_R8 + call mct_gGrid_importRAttr(dom_l,"mask" ,data,lsize) + ! + ! Fill in correct values for domain components + ! Note aream will be filled in in the atm-lnd mapper + ! + do g = bounds%begg,bounds%endg + i = 1 + (g - bounds%begg) + data(i) = ldomain%lonc(g) + end do + call mct_gGrid_importRattr(dom_l,"lon",data,lsize) + + do g = bounds%begg,bounds%endg + i = 1 + (g - bounds%begg) + data(i) = ldomain%latc(g) + end do + call mct_gGrid_importRattr(dom_l,"lat",data,lsize) + + do g = bounds%begg,bounds%endg + i = 1 + (g - bounds%begg) + data(i) = ldomain%area(g)/(re*re) + end do + call mct_gGrid_importRattr(dom_l,"area",data,lsize) + + do g = bounds%begg,bounds%endg + i = 1 + (g - bounds%begg) + data(i) = real(ldomain%mask(g), r8) + end do + call mct_gGrid_importRattr(dom_l,"mask",data,lsize) + + do g = bounds%begg,bounds%endg + i = 1 + (g - bounds%begg) + data(i) = real(ldomain%frac(g), r8) + end do + call mct_gGrid_importRattr(dom_l,"frac",data,lsize) + + deallocate(data) + deallocate(idata) + + end subroutine lnd_domain_mct + + !==================================================================================== + subroutine lnd_handle_resume( cdata_l ) + ! + ! !DESCRIPTION: + ! Handle resume signals for Data Assimilation (DA) + ! + ! !USES: + use clm_time_manager , only : update_DA_nstep + use seq_cdata_mod , only : seq_cdata, seq_cdata_setptrs + implicit none + ! !ARGUMENTS: + type(seq_cdata), intent(inout) :: cdata_l ! Input land-model driver data + ! !LOCAL VARIABLES: + logical :: resume_from_data_assim ! flag if we are resuming after data assimulation was done + !--------------------------------------------------------------------------- + + ! Check to see if restart was modified and we are resuming from data + ! assimilation + call seq_cdata_setptrs(cdata_l, post_assimilation=resume_from_data_assim) + if ( resume_from_data_assim ) call update_DA_nstep() + + end subroutine lnd_handle_resume + +end module lnd_comp_mct diff --git a/src/cpl/mct/lnd_import_export.F90 b/src/cpl/mct/lnd_import_export.F90 new file mode 100644 index 0000000000..537abd49d9 --- /dev/null +++ b/src/cpl/mct/lnd_import_export.F90 @@ -0,0 +1,354 @@ +module lnd_import_export + + use shr_kind_mod , only: r8 => shr_kind_r8, cl=>shr_kind_cl + use abortutils , only: endrun + use decompmod , only: bounds_type, subgrid_level_gridcell + use lnd2atmType , only: lnd2atm_type + use lnd2glcMod , only: lnd2glc_type + use atm2lndType , only: atm2lnd_type + use glc2lndMod , only: glc2lnd_type + use Waterlnd2atmBulkType , only: waterlnd2atmbulk_type + use Wateratm2lndBulkType , only: wateratm2lndbulk_type + use clm_cpl_indices + use GridcellType , only : grc + ! + implicit none + !=============================================================================== + +contains + + !=============================================================================== + subroutine lnd_import( bounds, x2l, glc_present, atm2lnd_inst, glc2lnd_inst, wateratm2lndbulk_inst) + + !--------------------------------------------------------------------------- + ! !DESCRIPTION: + ! Convert the input data from the coupler to the land model + ! + ! !USES: + use seq_flds_mod , only: seq_flds_x2l_fields + use clm_varctl , only: co2_type, co2_ppmv, iulog, use_c13 + use clm_varctl , only: ndep_from_cpl + use clm_varcon , only: c13ratio + use domainMod , only: ldomain + use lnd_import_export_utils, only : derive_quantities, check_for_errors, check_for_nans + ! + ! !ARGUMENTS: + type(bounds_type) , intent(in) :: bounds ! bounds + real(r8) , intent(in) :: x2l(:,:) ! driver import state to land model + logical , intent(in) :: glc_present ! .true. => running with a non-stub GLC model + type(atm2lnd_type) , intent(inout) :: atm2lnd_inst ! clm internal input data type + type(glc2lnd_type) , intent(inout) :: glc2lnd_inst ! clm internal input data type + type(wateratm2lndbulk_type), intent(inout) :: wateratm2lndbulk_inst ! clm internal input data type + ! + ! !LOCAL VARIABLES: + integer :: begg, endg ! bounds + integer :: g,i,k,nstep,ier ! indices, number of steps, and error code + real(r8) :: qsat_kg_kg ! saturation specific humidity (kg/kg) + real(r8) :: forc_pbot ! atmospheric pressure (Pa) + real(r8) :: forc_rainc(bounds%begg:bounds%endg) ! rainxy Atm flux mm/s + real(r8) :: forc_rainl(bounds%begg:bounds%endg) ! rainxy Atm flux mm/s + real(r8) :: forc_snowc(bounds%begg:bounds%endg) ! snowfxy Atm flux mm/s + real(r8) :: forc_snowl(bounds%begg:bounds%endg) ! snowfxl Atm flux mm/s + real(r8) :: co2_ppmv_diag ! temporary + real(r8) :: co2_ppmv_prog ! temporary + real(r8) :: co2_ppmv_val ! temporary + integer :: co2_type_idx ! integer flag for co2_type options + character(len=32) :: fname ! name of field that is NaN + character(len=32), parameter :: sub = 'lnd_import' + + !--------------------------------------------------------------------------- + + ! Set bounds + begg = bounds%begg; endg = bounds%endg + + co2_type_idx = 0 + if (co2_type == 'prognostic') then + co2_type_idx = 1 + else if (co2_type == 'diagnostic') then + co2_type_idx = 2 + end if + if (co2_type == 'prognostic' .and. index_x2l_Sa_co2prog == 0) then + call endrun( sub//' ERROR: must have nonzero index_x2l_Sa_co2prog for co2_type equal to prognostic' ) + else if (co2_type == 'diagnostic' .and. index_x2l_Sa_co2diag == 0) then + call endrun( sub//' ERROR: must have nonzero index_x2l_Sa_co2diag for co2_type equal to diagnostic' ) + end if + + ! Note that the precipitation fluxes received from the coupler + ! are in units of kg/s/m^2. To convert these precipitation rates + ! in units of mm/sec, one must divide by 1000 kg/m^3 and multiply + ! by 1000 mm/m resulting in an overall factor of unity. + ! Below the units are therefore given in mm/s. + + do g = begg,endg + i = 1 + (g - begg) + + ! Determine flooding input, sign convention is positive downward and + ! hierarchy is atm/glc/lnd/rof/ice/ocn. so water sent from rof to land is negative, + ! change the sign to indicate addition of water to system. + + wateratm2lndbulk_inst%forc_flood_grc(g) = -x2l(index_x2l_Flrr_flood,i) + + wateratm2lndbulk_inst%volr_grc(g) = x2l(index_x2l_Flrr_volr,i) * (ldomain%area(g) * 1.e6_r8) + wateratm2lndbulk_inst%volrmch_grc(g)= x2l(index_x2l_Flrr_volrmch,i) * (ldomain%area(g) * 1.e6_r8) + + ! Determine required receive fields + + atm2lnd_inst%forc_hgt_grc(g) = x2l(index_x2l_Sa_z,i) ! zgcmxy Atm state m + atm2lnd_inst%forc_topo_grc(g) = x2l(index_x2l_Sa_topo,i) ! Atm surface height (m) + atm2lnd_inst%forc_u_grc(g) = x2l(index_x2l_Sa_u,i) ! forc_uxy Atm state m/s + atm2lnd_inst%forc_v_grc(g) = x2l(index_x2l_Sa_v,i) ! forc_vxy Atm state m/s + atm2lnd_inst%forc_solad_not_downscaled_grc(g,2) = x2l(index_x2l_Faxa_swndr,i) ! forc_sollxy Atm flux W/m^2 + atm2lnd_inst%forc_solad_not_downscaled_grc(g,1) = x2l(index_x2l_Faxa_swvdr,i) ! forc_solsxy Atm flux W/m^2 + atm2lnd_inst%forc_solai_grc(g,2) = x2l(index_x2l_Faxa_swndf,i) ! forc_solldxy Atm flux W/m^2 + atm2lnd_inst%forc_solai_grc(g,1) = x2l(index_x2l_Faxa_swvdf,i) ! forc_solsdxy Atm flux W/m^2 + + atm2lnd_inst%forc_th_not_downscaled_grc(g) = x2l(index_x2l_Sa_ptem,i) ! forc_thxy Atm state K + wateratm2lndbulk_inst%forc_q_not_downscaled_grc(g) = x2l(index_x2l_Sa_shum,i) ! forc_qxy Atm state kg/kg + atm2lnd_inst%forc_pbot_not_downscaled_grc(g) = x2l(index_x2l_Sa_pbot,i) ! ptcmxy Atm state Pa + atm2lnd_inst%forc_t_not_downscaled_grc(g) = x2l(index_x2l_Sa_tbot,i) ! forc_txy Atm state K + atm2lnd_inst%forc_lwrad_not_downscaled_grc(g) = x2l(index_x2l_Faxa_lwdn,i) ! flwdsxy Atm flux W/m^2 + + forc_rainc(g) = x2l(index_x2l_Faxa_rainc,i) ! mm/s + forc_rainl(g) = x2l(index_x2l_Faxa_rainl,i) ! mm/s + forc_snowc(g) = x2l(index_x2l_Faxa_snowc,i) ! mm/s + forc_snowl(g) = x2l(index_x2l_Faxa_snowl,i) ! mm/s + + ! atmosphere coupling, for prognostic/prescribed aerosols + atm2lnd_inst%forc_aer_grc(g,1) = x2l(index_x2l_Faxa_bcphidry,i) + atm2lnd_inst%forc_aer_grc(g,2) = x2l(index_x2l_Faxa_bcphodry,i) + atm2lnd_inst%forc_aer_grc(g,3) = x2l(index_x2l_Faxa_bcphiwet,i) + atm2lnd_inst%forc_aer_grc(g,4) = x2l(index_x2l_Faxa_ocphidry,i) + atm2lnd_inst%forc_aer_grc(g,5) = x2l(index_x2l_Faxa_ocphodry,i) + atm2lnd_inst%forc_aer_grc(g,6) = x2l(index_x2l_Faxa_ocphiwet,i) + atm2lnd_inst%forc_aer_grc(g,7) = x2l(index_x2l_Faxa_dstwet1,i) + atm2lnd_inst%forc_aer_grc(g,8) = x2l(index_x2l_Faxa_dstdry1,i) + atm2lnd_inst%forc_aer_grc(g,9) = x2l(index_x2l_Faxa_dstwet2,i) + atm2lnd_inst%forc_aer_grc(g,10) = x2l(index_x2l_Faxa_dstdry2,i) + atm2lnd_inst%forc_aer_grc(g,11) = x2l(index_x2l_Faxa_dstwet3,i) + atm2lnd_inst%forc_aer_grc(g,12) = x2l(index_x2l_Faxa_dstdry3,i) + atm2lnd_inst%forc_aer_grc(g,13) = x2l(index_x2l_Faxa_dstwet4,i) + atm2lnd_inst%forc_aer_grc(g,14) = x2l(index_x2l_Faxa_dstdry4,i) + + if (index_x2l_Sa_methane /= 0) then + atm2lnd_inst%forc_pch4_grc(g) = x2l(index_x2l_Sa_methane,i) + endif + + !-------------------------- + ! Check for nans from coupler + !-------------------------- + + call check_for_nans(x2l(:,i), fname, begg, "x2l") + + end do + + !-------------------------- + ! Derived quantities for required fields + ! and corresponding error checks + !-------------------------- + + call derive_quantities(bounds, atm2lnd_inst, wateratm2lndbulk_inst, & + forc_rainc, forc_rainl, forc_snowc, forc_snowl) + + call check_for_errors(bounds, atm2lnd_inst, wateratm2lndbulk_inst) + + ! Determine derived quantities for optional fields + ! Note that the following does unit conversions from ppmv to partial pressures (Pa) + ! Note that forc_pbot is in Pa + + do g = begg,endg + i = 1 + (g - begg) + + forc_pbot = atm2lnd_inst%forc_pbot_not_downscaled_grc(g) + + ! Determine optional receive fields + if (index_x2l_Sa_co2prog /= 0) then + co2_ppmv_prog = x2l(index_x2l_Sa_co2prog,i) ! co2 atm state prognostic + else + co2_ppmv_prog = co2_ppmv + end if + if (index_x2l_Sa_co2diag /= 0) then + co2_ppmv_diag = x2l(index_x2l_Sa_co2diag,i) ! co2 atm state diagnostic + else + co2_ppmv_diag = co2_ppmv + end if + + if (co2_type_idx == 1) then + co2_ppmv_val = co2_ppmv_prog + else if (co2_type_idx == 2) then + co2_ppmv_val = co2_ppmv_diag + else + co2_ppmv_val = co2_ppmv + end if + if ( (co2_ppmv_val < 10.0_r8) .or. (co2_ppmv_val > 15000.0_r8) )then + call endrun(subgrid_index=g, subgrid_level=subgrid_level_gridcell, & + msg = sub//' ERROR: CO2 is outside of an expected range' ) + end if + atm2lnd_inst%forc_pco2_grc(g) = co2_ppmv_val * 1.e-6_r8 * forc_pbot + if (use_c13) then + atm2lnd_inst%forc_pc13o2_grc(g) = co2_ppmv_val * c13ratio * 1.e-6_r8 * forc_pbot + end if + + if (ndep_from_cpl) then + ! The coupler is sending ndep in units if kgN/m2/s - and clm uses units of gN/m2/sec - so the + ! following conversion needs to happen + atm2lnd_inst%forc_ndep_grc(g) = (x2l(index_x2l_Faxa_nhx, i) + x2l(index_x2l_faxa_noy, i))*1000._r8 + end if + + end do + + call glc2lnd_inst%set_glc2lnd_fields_mct( & + bounds = bounds, & + glc_present = glc_present, & + ! NOTE(wjs, 2017-12-13) the x2l argument doesn't have the typical bounds + ! subsetting (bounds%begg:bounds%endg). This mirrors the lack of these bounds in + ! the call to lnd_import from lnd_run_mct. This is okay as long as this code is + ! outside a clump loop. + x2l = x2l, & + index_x2l_Sg_ice_covered = index_x2l_Sg_ice_covered, & + index_x2l_Sg_topo = index_x2l_Sg_topo, & + index_x2l_Flgg_hflx = index_x2l_Flgg_hflx, & + index_x2l_Sg_icemask = index_x2l_Sg_icemask, & + index_x2l_Sg_icemask_coupled_fluxes = index_x2l_Sg_icemask_coupled_fluxes) + + end subroutine lnd_import + + !=============================================================================== + + subroutine lnd_export( bounds, waterlnd2atmbulk_inst, lnd2atm_inst, lnd2glc_inst, l2x) + + !--------------------------------------------------------------------------- + ! !DESCRIPTION: + ! Convert the data to be sent from the clm model to the coupler + ! + ! !USES: + use shr_kind_mod , only : r8 => shr_kind_r8 + use seq_flds_mod , only : seq_flds_l2x_fields + use clm_varctl , only : iulog + use shr_drydep_mod , only : n_drydep + use shr_megan_mod , only : shr_megan_mechcomps_n + use shr_fire_emis_mod , only : shr_fire_emis_mechcomps_n + use lnd_import_export_utils, only : check_for_nans + ! + ! !ARGUMENTS: + implicit none + type(bounds_type) , intent(in) :: bounds ! bounds + type(lnd2atm_type), intent(inout) :: lnd2atm_inst ! clm land to atmosphere exchange data type + type(lnd2glc_type), intent(inout) :: lnd2glc_inst ! clm land to atmosphere exchange data type + type(waterlnd2atmbulk_type), intent(in) :: waterlnd2atmbulk_inst + real(r8) , intent(out) :: l2x(:,:)! land to coupler export state on land grid + ! + ! !LOCAL VARIABLES: + integer :: begg, endg ! bounds + integer :: g,i,k ! indices + integer :: ier ! error status + integer :: nstep ! time step index + integer :: dtime ! time step + integer :: num ! counter + character(len=32) :: fname ! name of field that is NaN + character(len=32), parameter :: sub = 'lnd_export' + !--------------------------------------------------------------------------- + + ! Set bounds + begg = bounds%begg; endg = bounds%endg + + ! cesm sign convention is that fluxes are positive downward + + l2x(:,:) = 0.0_r8 + + do g = begg,endg + i = 1 + (g-begg) + l2x(index_l2x_Sl_t,i) = lnd2atm_inst%t_rad_grc(g) + l2x(index_l2x_Sl_snowh,i) = waterlnd2atmbulk_inst%h2osno_grc(g) + l2x(index_l2x_Sl_avsdr,i) = lnd2atm_inst%albd_grc(g,1) + l2x(index_l2x_Sl_anidr,i) = lnd2atm_inst%albd_grc(g,2) + l2x(index_l2x_Sl_avsdf,i) = lnd2atm_inst%albi_grc(g,1) + l2x(index_l2x_Sl_anidf,i) = lnd2atm_inst%albi_grc(g,2) + l2x(index_l2x_Sl_tref,i) = lnd2atm_inst%t_ref2m_grc(g) + l2x(index_l2x_Sl_qref,i) = waterlnd2atmbulk_inst%q_ref2m_grc(g) + l2x(index_l2x_Sl_u10,i) = lnd2atm_inst%u_ref10m_grc(g) + l2x(index_l2x_Fall_taux,i) = -lnd2atm_inst%taux_grc(g) + l2x(index_l2x_Fall_tauy,i) = -lnd2atm_inst%tauy_grc(g) + l2x(index_l2x_Fall_lat,i) = -lnd2atm_inst%eflx_lh_tot_grc(g) + l2x(index_l2x_Fall_sen,i) = -lnd2atm_inst%eflx_sh_tot_grc(g) + l2x(index_l2x_Fall_lwup,i) = -lnd2atm_inst%eflx_lwrad_out_grc(g) + l2x(index_l2x_Fall_evap,i) = -waterlnd2atmbulk_inst%qflx_evap_tot_grc(g) + l2x(index_l2x_Fall_swnet,i) = lnd2atm_inst%fsa_grc(g) + if (index_l2x_Fall_fco2_lnd /= 0) then + l2x(index_l2x_Fall_fco2_lnd,i) = -lnd2atm_inst%net_carbon_exchange_grc(g) + end if + + ! Additional fields for DUST, PROGSSLT, dry-deposition and VOC + ! These are now standard fields, but the check on the index makes sure the driver handles them + if (index_l2x_Sl_ram1 /= 0 ) l2x(index_l2x_Sl_ram1,i) = lnd2atm_inst%ram1_grc(g) + if (index_l2x_Sl_fv /= 0 ) l2x(index_l2x_Sl_fv,i) = lnd2atm_inst%fv_grc(g) + if (index_l2x_Sl_soilw /= 0 ) l2x(index_l2x_Sl_soilw,i) = waterlnd2atmbulk_inst%h2osoi_vol_grc(g,1) + if (index_l2x_Fall_flxdst1 /= 0 ) l2x(index_l2x_Fall_flxdst1,i)= -lnd2atm_inst%flxdst_grc(g,1) + if (index_l2x_Fall_flxdst2 /= 0 ) l2x(index_l2x_Fall_flxdst2,i)= -lnd2atm_inst%flxdst_grc(g,2) + if (index_l2x_Fall_flxdst3 /= 0 ) l2x(index_l2x_Fall_flxdst3,i)= -lnd2atm_inst%flxdst_grc(g,3) + if (index_l2x_Fall_flxdst4 /= 0 ) l2x(index_l2x_Fall_flxdst4,i)= -lnd2atm_inst%flxdst_grc(g,4) + + + ! for dry dep velocities + if (index_l2x_Sl_ddvel /= 0 ) then + l2x(index_l2x_Sl_ddvel:index_l2x_Sl_ddvel+n_drydep-1,i) = & + lnd2atm_inst%ddvel_grc(g,:n_drydep) + end if + + ! for MEGAN VOC emis fluxes + if (index_l2x_Fall_flxvoc /= 0 ) then + l2x(index_l2x_Fall_flxvoc:index_l2x_Fall_flxvoc+shr_megan_mechcomps_n-1,i) = & + -lnd2atm_inst%flxvoc_grc(g,:shr_megan_mechcomps_n) + end if + + + ! for fire emis fluxes + if (index_l2x_Fall_flxfire /= 0 ) then + l2x(index_l2x_Fall_flxfire:index_l2x_Fall_flxfire+shr_fire_emis_mechcomps_n-1,i) = & + -lnd2atm_inst%fireflx_grc(g,:shr_fire_emis_mechcomps_n) + l2x(index_l2x_Sl_ztopfire,i) = lnd2atm_inst%fireztop_grc(g) + end if + + if (index_l2x_Fall_methane /= 0) then + l2x(index_l2x_Fall_methane,i) = -lnd2atm_inst%ch4_surf_flux_tot_grc(g) + endif + + ! sign convention is positive downward with + ! hierarchy of atm/glc/lnd/rof/ice/ocn. + ! I.e. water sent from land to rof is positive + + l2x(index_l2x_Flrl_rofsur,i) = waterlnd2atmbulk_inst%qflx_rofliq_qsur_grc(g) + + ! subsurface runoff is the sum of qflx_drain and qflx_perched_drain + l2x(index_l2x_Flrl_rofsub,i) = waterlnd2atmbulk_inst%qflx_rofliq_qsub_grc(g) & + + waterlnd2atmbulk_inst%qflx_rofliq_drain_perched_grc(g) + + ! qgwl sent individually to coupler + l2x(index_l2x_Flrl_rofgwl,i) = waterlnd2atmbulk_inst%qflx_rofliq_qgwl_grc(g) + + ! ice sent individually to coupler + l2x(index_l2x_Flrl_rofi,i) = waterlnd2atmbulk_inst%qflx_rofice_grc(g) + + ! irrigation flux to be removed from main channel storage (negative) + l2x(index_l2x_Flrl_irrig,i) = - waterlnd2atmbulk_inst%qirrig_grc(g) + + ! glc coupling + ! We could avoid setting these fields if glc_present is .false., if that would + ! help with performance. (The downside would be that we wouldn't have these fields + ! available for diagnostic purposes or to force a later T compset with dlnd.) + do num = 0,glc_nec + l2x(index_l2x_Sl_tsrf(num),i) = lnd2glc_inst%tsrf_grc(g,num) + l2x(index_l2x_Sl_topo(num),i) = lnd2glc_inst%topo_grc(g,num) + l2x(index_l2x_Flgl_qice(num),i) = lnd2glc_inst%qice_grc(g,num) + end do + + !-------------------------- + ! Check for nans to coupler + !-------------------------- + + call check_for_nans(l2x(:,i), fname, begg, "l2x") + + end do + + end subroutine lnd_export + +end module lnd_import_export diff --git a/src/cpl/mct/lnd_set_decomp_and_domain.F90 b/src/cpl/mct/lnd_set_decomp_and_domain.F90 new file mode 100644 index 0000000000..0a37554313 --- /dev/null +++ b/src/cpl/mct/lnd_set_decomp_and_domain.F90 @@ -0,0 +1,352 @@ +module lnd_set_decomp_and_domain + + use shr_kind_mod , only : r8 => shr_kind_r8 + use spmdMod , only : masterproc + use clm_varctl , only : iulog + use mct_mod , only : mct_gsMap + + implicit none + private ! except + + ! public member routines + public :: lnd_set_decomp_and_domain_from_surfrd + + ! private member routines + private :: surfrd_get_globmask ! Reads global land mask (needed for setting domain decomp) + private :: surfrd_get_grid ! Read grid/ladnfrac data into domain (after domain decomp) + + ! translation between local and global indices at gridcell level + type(mct_gsmap), pointer, public :: gsmap_global + + ! translation between local and global indices at gridcell level for multiple levels + ! needed for 3d soil moisture stream + type(mct_gsmap), target , public :: gsMap_lnd2Dsoi_gdc2glo + + character(len=*), parameter, private :: sourcefile = & + __FILE__ + +!=============================================================================== +contains +!=============================================================================== + + subroutine lnd_set_decomp_and_domain_from_surfrd(noland, ni, nj) + + ! Initialize ldomain data types + + use clm_varpar , only: nlevsoi + use clm_varctl , only: fatmlndfrc, use_soil_moisture_streams + use decompInitMod , only: decompInit_lnd + use decompMod , only: bounds_type, get_proc_bounds + use domainMod , only: ldomain, domain_check + + ! input/output variables + logical, intent(out) :: noland + integer, intent(out) :: ni, nj ! global grid sizes + + ! local variables + integer ,pointer :: amask(:) ! global land mask + integer :: begg, endg ! processor bounds + type(bounds_type) :: bounds ! bounds + character(len=32) :: subname = 'lnd_set_decomp_and_domain_from_surfrd' + !----------------------------------------------------------------------- + + ! Read in global land grid and land mask (amask)- needed to set decomposition + ! global memory for amask is allocate in surfrd_get_glomask - must be deallocated below + if (masterproc) then + write(iulog,*) 'Attempting to read global land mask from ',trim(fatmlndfrc) + endif + + ! Get global mask, ni and nj + call surfrd_get_globmask(filename=fatmlndfrc, mask=amask, ni=ni, nj=nj) + + ! Exit early if no valid land points + if ( all(amask == 0) )then + if (masterproc) write(iulog,*) trim(subname)//': no valid land points do NOT run clm' + noland = .true. + return + else + noland = .false. + end if + + ! Determine ctsm gridcell decomposition and processor bounds for gridcells + call decompInit_lnd(ni, nj, amask) + deallocate(amask) + if (use_soil_moisture_streams) call decompInit_lnd3D(ni, nj, nlevsoi) + + ! Initialize bounds for just gridcells + ! Remaining bounds (landunits, columns, patches) will be determined + ! after the call to decompInit_glcp - so get_proc_bounds is called + ! twice and the gridcell information is just filled in twice + call get_proc_bounds(bounds) + + ! Get grid cell bounds values + begg = bounds%begg + endg = bounds%endg + + ! Initialize ldomain data type + if (masterproc) then + write(iulog,*) 'Attempting to read ldomain from ',trim(fatmlndfrc) + endif + call surfrd_get_grid(begg, endg, ldomain, fatmlndfrc) + if (masterproc) then + call domain_check(ldomain) + endif + ldomain%mask = 1 !!! TODO - is this needed? + + end subroutine lnd_set_decomp_and_domain_from_surfrd + + !----------------------------------------------------------------------- + subroutine surfrd_get_globmask(filename, mask, ni, nj) + + ! Read the surface dataset grid related information + ! This is used to set the domain decomposition - so global data is read here + + use fileutils , only : getfil + use ncdio_pio , only : ncd_io, ncd_pio_openfile, ncd_pio_closefile, ncd_inqfdims, file_desc_t + use abortutils , only : endrun + use shr_log_mod, only : errMsg => shr_log_errMsg + + ! input/output variables + character(len=*), intent(in) :: filename ! grid filename + integer , pointer :: mask(:) ! grid mask + integer , intent(out) :: ni, nj ! global grid sizes + + ! local variables + logical :: isgrid2d + integer :: dimid,varid ! netCDF id's + integer :: ns ! size of grid on file + integer :: n,i,j ! index + integer :: ier ! error status + type(file_desc_t) :: ncid ! netcdf id + character(len=256) :: locfn ! local file name + logical :: readvar ! read variable in or not + integer , allocatable :: idata2d(:,:) + character(len=32) :: subname = 'surfrd_get_globmask' ! subroutine name + !----------------------------------------------------------------------- + + if (filename == ' ') then + mask(:) = 1 + else + ! Check if file exists + if (masterproc) then + if (filename == ' ') then + write(iulog,*) trim(subname),' ERROR: filename must be specified ' + call endrun(msg=errMsg(sourcefile, __LINE__)) + endif + end if + + ! Open file + call getfil( filename, locfn, 0 ) + call ncd_pio_openfile (ncid, trim(locfn), 0) + + ! Determine dimensions and if grid file is 2d or 1d + call ncd_inqfdims(ncid, isgrid2d, ni, nj, ns) + if (masterproc) then + write(iulog,*)'lat/lon grid flag (isgrid2d) is ',isgrid2d + end if + allocate(mask(ns)) + mask(:) = 1 + if (isgrid2d) then + ! Grid is 2d + allocate(idata2d(ni,nj)) + idata2d(:,:) = 1 + call ncd_io(ncid=ncid, varname='LANDMASK', data=idata2d, flag='read', readvar=readvar) + if (.not. readvar) then + call ncd_io(ncid=ncid, varname='mask', data=idata2d, flag='read', readvar=readvar) + end if + if (readvar) then + do j = 1,nj + do i = 1,ni + n = (j-1)*ni + i + mask(n) = idata2d(i,j) + enddo + enddo + end if + deallocate(idata2d) + else + ! Grid is not 2d + call ncd_io(ncid=ncid, varname='LANDMASK', data=mask, flag='read', readvar=readvar) + if (.not. readvar) then + call ncd_io(ncid=ncid, varname='mask', data=mask, flag='read', readvar=readvar) + end if + end if + if (.not. readvar) call endrun( msg=' ERROR: landmask not on fatmlndfrc file'//errMsg(sourcefile, __LINE__)) + + ! Close file + call ncd_pio_closefile(ncid) + end if + + end subroutine surfrd_get_globmask + + !----------------------------------------------------------------------- + subroutine surfrd_get_grid(begg, endg, ldomain, filename) + + ! Read the surface dataset grid related information: + ! This is called after the domain decomposition has been created + ! - real latitude of grid cell (degrees) + ! - real longitude of grid cell (degrees) + + use clm_varcon , only : spval, re, grlnd + use domainMod , only : domain_type, lon1d, lat1d, domain_init + use fileutils , only : getfil + use abortutils , only : endrun + use shr_log_mod , only : errMsg => shr_log_errMsg + use ncdio_pio , only : file_desc_t, ncd_pio_openfile, ncd_pio_closefile + use ncdio_pio , only : ncd_io, check_var, ncd_inqfdims, check_dim_size + use pio + + ! input/output variables + integer , intent(in) :: begg, endg + type(domain_type) , intent(inout) :: ldomain ! domain to init + character(len=*) , intent(in) :: filename ! grid filename + + ! local variables + type(file_desc_t) :: ncid ! netcdf id + integer :: beg ! local beg index + integer :: end ! local end index + integer :: ni,nj,ns ! size of grid on file + logical :: readvar ! true => variable is on input file + logical :: isgrid2d ! true => file is 2d lat/lon + logical :: istype_domain ! true => input file is of type domain + real(r8), allocatable :: rdata2d(:,:) ! temporary + character(len=16) :: vname ! temporary + character(len=256) :: locfn ! local file name + integer :: n ! indices + character(len=32) :: subname = 'surfrd_get_grid' ! subroutine name + !----------------------------------------------------------------------- + + if (masterproc) then + if (filename == ' ') then + write(iulog,*) trim(subname),' ERROR: filename must be specified ' + call endrun(msg=errMsg(sourcefile, __LINE__)) + endif + end if + + call getfil( filename, locfn, 0 ) + call ncd_pio_openfile (ncid, trim(locfn), 0) + + ! Determine dimensions + call ncd_inqfdims(ncid, isgrid2d, ni, nj, ns) + + ! Determine isgrid2d flag for domain + call domain_init(ldomain, isgrid2d=isgrid2d, ni=ni, nj=nj, nbeg=begg, nend=endg) + + ! Determine type of file - old style grid file or new style domain file + call check_var(ncid=ncid, varname='xc', readvar=readvar) + if (readvar)then + istype_domain = .true. + else + istype_domain = .false. + end if + + ! Read in area, lon, lat + if (istype_domain) then + call ncd_io(ncid=ncid, varname= 'area', flag='read', data=ldomain%area, & + dim1name=grlnd, readvar=readvar) + ! convert from radians**2 to km**2 + ldomain%area = ldomain%area * (re**2) + if (.not. readvar) call endrun( msg=' ERROR: area NOT on file'//errMsg(sourcefile, __LINE__)) + call ncd_io(ncid=ncid, varname= 'xc', flag='read', data=ldomain%lonc, & + dim1name=grlnd, readvar=readvar) + if (.not. readvar) call endrun( msg=' ERROR: xc NOT on file'//errMsg(sourcefile, __LINE__)) + call ncd_io(ncid=ncid, varname= 'yc', flag='read', data=ldomain%latc, & + dim1name=grlnd, readvar=readvar) + if (.not. readvar) call endrun( msg=' ERROR: yc NOT on file'//errMsg(sourcefile, __LINE__)) + else + call endrun( msg=" ERROR: can no longer read non domain files" ) + end if + + if (isgrid2d) then + allocate(rdata2d(ni,nj), lon1d(ni), lat1d(nj)) + if (istype_domain) vname = 'xc' + call ncd_io(ncid=ncid, varname=trim(vname), data=rdata2d, flag='read', readvar=readvar) + lon1d(:) = rdata2d(:,1) + if (istype_domain) vname = 'yc' + call ncd_io(ncid=ncid, varname=trim(vname), data=rdata2d, flag='read', readvar=readvar) + lat1d(:) = rdata2d(1,:) + deallocate(rdata2d) + end if + + ! Check lat limited to -90,90 + if (minval(ldomain%latc) < -90.0_r8 .or. & + maxval(ldomain%latc) > 90.0_r8) then + write(iulog,*) trim(subname),' WARNING: lat/lon min/max is ', & + minval(ldomain%latc),maxval(ldomain%latc) + endif + if ( any(ldomain%lonc < 0.0_r8) )then + call endrun( msg=' ERROR: lonc is negative (see https://github.com/ESCOMP/ctsm/issues/507)' & + //errMsg(sourcefile, __LINE__)) + endif + call ncd_io(ncid=ncid, varname='mask', flag='read', data=ldomain%mask, & + dim1name=grlnd, readvar=readvar) + if (.not. readvar) then + call endrun( msg=' ERROR: LANDMASK NOT on fracdata file'//errMsg(sourcefile, __LINE__)) + end if + call ncd_io(ncid=ncid, varname='frac', flag='read', data=ldomain%frac, & + dim1name=grlnd, readvar=readvar) + if (.not. readvar) then + call endrun( msg=' ERROR: LANDFRAC NOT on fracdata file'//errMsg(sourcefile, __LINE__)) + end if + + call ncd_pio_closefile(ncid) + + end subroutine surfrd_get_grid + + !------------------------------------------------------------------------------ + subroutine decompInit_lnd3D(lni,lnj,lnk) + ! + ! !DESCRIPTION: + ! Create a 3D decomposition gsmap for the global 2D grid with soil levels + ! as the 3rd dimesnion. + ! + ! !USES: + use decompMod, only : gindex_global, bounds_type, get_proc_bounds + use spmdMod , only : comp_id, mpicom + use mct_mod , only : mct_gsmap_init + ! + ! !ARGUMENTS: + integer , intent(in) :: lni,lnj,lnk ! domain global size + ! + ! !LOCAL VARIABLES: + integer :: m,n,k ! indices + integer :: begg,endg,lsize,gsize ! used for gsmap init + integer :: begg3d,endg3d + integer, pointer :: gindex(:) ! global index for gsmap init + type(bounds_type) :: bounds + !------------------------------------------------------------------------------ + + ! Initialize gsmap_lnd2dsoi_gdc2glo + call get_proc_bounds(bounds) + begg = bounds%begg; endg=bounds%endg + + begg3d = (begg-1)*lnk + 1 + endg3d = endg*lnk + lsize = (endg3d - begg3d + 1 ) + allocate(gindex(begg3d:endg3d)) + do k = 1, lnk + do n = begg,endg + m = (begg-1)*lnk + (k-1)*(endg-begg+1) + (n-begg+1) + gindex(m) = gindex_global(n-begg+1) + (k-1)*(lni*lnj) + enddo + enddo + gsize = lni * lnj * lnk + call mct_gsMap_init(gsMap_lnd2Dsoi_gdc2glo, gindex, mpicom, comp_id, lsize, gsize) + + ! Diagnostic output + + if (masterproc) then + write(iulog,*)' 3D GSMap' + write(iulog,*)' longitude points = ',lni + write(iulog,*)' latitude points = ',lnj + write(iulog,*)' soil levels = ',lnk + write(iulog,*)' gsize = ',gsize + write(iulog,*)' lsize = ',lsize + write(iulog,*)' bounds(gindex) = ',size(gindex) + write(iulog,*) + end if + + deallocate(gindex) + + end subroutine decompInit_lnd3D + +end module lnd_set_decomp_and_domain diff --git a/src/cpl/mct/ndepStreamMod.F90 b/src/cpl/mct/ndepStreamMod.F90 new file mode 100644 index 0000000000..d26ff7c95e --- /dev/null +++ b/src/cpl/mct/ndepStreamMod.F90 @@ -0,0 +1,376 @@ +module ndepStreamMod + + !----------------------------------------------------------------------- + ! !DESCRIPTION: + ! Contains methods for reading in nitrogen deposition data file + ! Also includes functions for dynamic ndep file handling and + ! interpolation. + ! + ! !USES + use shr_kind_mod, only: r8 => shr_kind_r8, CL => shr_kind_cl + use shr_strdata_mod, only: shr_strdata_type, shr_strdata_create + use shr_strdata_mod, only: shr_strdata_print, shr_strdata_advance + use mct_mod , only: mct_ggrid + use spmdMod , only: mpicom, masterproc, comp_id, iam + use clm_varctl , only: iulog, inst_name + use abortutils , only: endrun + use decompMod , only: bounds_type + use domainMod , only: ldomain + + ! !PUBLIC TYPES: + implicit none + private + + ! !PUBLIC MEMBER FUNCTIONS: + public :: ndep_init ! position datasets for dynamic ndep + public :: ndep_interp ! interpolates between two years of ndep file data + public :: clm_domain_mct ! Sets up MCT domain for this resolution + + ! !PRIVATE MEMBER FUNCTIONS: + private :: check_units ! Check the units and make sure they can be used + + ! ! PRIVATE TYPES + type(shr_strdata_type) :: sdat ! input data stream + integer :: stream_year_first_ndep ! first year in stream to use + integer :: stream_year_last_ndep ! last year in stream to use + integer :: model_year_align_ndep ! align stream_year_firstndep with + logical :: divide_by_secs_per_yr = .true. ! divide by the number of seconds per year + + character(len=*), parameter, private :: sourcefile = & + __FILE__ + !============================================================================== + +contains + + !============================================================================== + + subroutine ndep_init(bounds, NLFilename) + ! + ! Initialize data stream information. + ! + ! Uses: + use shr_kind_mod , only : CS => shr_kind_cs + use clm_time_manager , only : get_calendar + use ncdio_pio , only : pio_subsystem + use shr_pio_mod , only : shr_pio_getiotype + use shr_nl_mod , only : shr_nl_find_group_name + use shr_log_mod , only : errMsg => shr_log_errMsg + use shr_mpi_mod , only : shr_mpi_bcast + use lnd_set_decomp_and_domain , only : gsMap_lnd2Dsoi_gdc2glo, gsmap_global + ! + ! arguments + implicit none + type(bounds_type), intent(in) :: bounds + character(len=*), intent(in) :: NLFilename ! Namelist filename + ! + ! local variables + integer :: nu_nml ! unit for namelist file + integer :: nml_error ! namelist i/o error flag + type(mct_ggrid) :: dom_clm ! domain information + character(len=CL) :: stream_fldFileName_ndep + character(len=CL) :: ndepmapalgo = 'bilinear' + character(len=CL) :: ndep_tintalgo = 'linear' + character(len=CS) :: ndep_taxmode = 'extend' + character(len=CL) :: ndep_varlist = 'NDEP_year' + character(*), parameter :: shr_strdata_unset = 'NOT_SET' + character(*), parameter :: subName = "('ndepdyn_init')" + character(*), parameter :: F00 = "('(ndepdyn_init) ',4a)" + !----------------------------------------------------------------------- + + namelist /ndepdyn_nml/ & + stream_year_first_ndep, & + stream_year_last_ndep, & + model_year_align_ndep, & + ndepmapalgo, ndep_taxmode, & + ndep_varlist, & + stream_fldFileName_ndep, & + ndep_tintalgo + + ! Default values for namelist + stream_year_first_ndep = 1 ! first year in stream to use + stream_year_last_ndep = 1 ! last year in stream to use + model_year_align_ndep = 1 ! align stream_year_first_ndep with this model year + stream_fldFileName_ndep = ' ' + + ! Read ndepdyn_nml namelist + if (masterproc) then + open( newunit=nu_nml, file=trim(NLFilename), status='old', iostat=nml_error ) + call shr_nl_find_group_name(nu_nml, 'ndepdyn_nml', status=nml_error) + if (nml_error == 0) then + read(nu_nml, nml=ndepdyn_nml,iostat=nml_error) + if (nml_error /= 0) then + call endrun(msg=' ERROR reading ndepdyn_nml namelist'//errMsg(sourcefile, __LINE__)) + end if + else + call endrun(msg=' ERROR finding ndepdyn_nml namelist'//errMsg(sourcefile, __LINE__)) + end if + close(nu_nml) + endif + + call shr_mpi_bcast(stream_year_first_ndep , mpicom) + call shr_mpi_bcast(stream_year_last_ndep , mpicom) + call shr_mpi_bcast(model_year_align_ndep , mpicom) + call shr_mpi_bcast(stream_fldFileName_ndep, mpicom) + call shr_mpi_bcast(ndep_varlist , mpicom) + call shr_mpi_bcast(ndep_taxmode , mpicom) + call shr_mpi_bcast(ndep_tintalgo , mpicom) + + if (masterproc) then + write(iulog,*) ' ' + write(iulog,*) 'ndepdyn stream settings:' + write(iulog,*) ' stream_year_first_ndep = ',stream_year_first_ndep + write(iulog,*) ' stream_year_last_ndep = ',stream_year_last_ndep + write(iulog,*) ' model_year_align_ndep = ',model_year_align_ndep + write(iulog,*) ' stream_fldFileName_ndep = ',stream_fldFileName_ndep + write(iulog,*) ' ndep_varList = ',ndep_varList + write(iulog,*) ' ndep_taxmode = ',ndep_taxmode + write(iulog,*) ' ndep_tintalgo = ',ndep_tintalgo + write(iulog,*) ' ' + endif + ! Read in units + call check_units( stream_fldFileName_ndep, ndep_varList ) + + ! Set domain and create streams + call clm_domain_mct (bounds, dom_clm) + + call shr_strdata_create(sdat,name="clmndep", & + pio_subsystem=pio_subsystem, & + pio_iotype=shr_pio_getiotype(inst_name), & + mpicom=mpicom, compid=comp_id, & + gsmap=gsmap_global, ggrid=dom_clm, & + nxg=ldomain%ni, nyg=ldomain%nj, & + yearFirst=stream_year_first_ndep, & + yearLast=stream_year_last_ndep, & + yearAlign=model_year_align_ndep, & + offset=0, & + domFilePath='', & + domFileName=trim(stream_fldFileName_ndep), & + domTvarName='time', & + domXvarName='lon' , & + domYvarName='lat' , & + domAreaName='area', & + domMaskName='mask', & + filePath='', & + filename=(/trim(stream_fldFileName_ndep)/),& + fldListFile=ndep_varlist, & + fldListModel=ndep_varlist, & + fillalgo='none', & + mapalgo=ndepmapalgo, & + tintalgo=ndep_tintalgo, & + calendar=get_calendar(), & + taxmode=ndep_taxmode ) + + + if (masterproc) then + call shr_strdata_print(sdat,'CLMNDEP data') + endif + + end subroutine ndep_init + !================================================================ + + subroutine check_units( stream_fldFileName_ndep, ndep_varList ) + !------------------------------------------------------------------- + ! Check that units are correct on the file and if need any conversion + use ncdio_pio , only : ncd_pio_openfile, ncd_inqvid, ncd_getatt, ncd_pio_closefile, ncd_nowrite + use ncdio_pio , only : file_desc_t, var_desc_t + use shr_kind_mod , only : CS => shr_kind_cs + use shr_log_mod , only : errMsg => shr_log_errMsg + use shr_string_mod, only : shr_string_listGetName + implicit none + + !----------------------------------------------------------------------- + ! + ! Arguments + character(len=*), intent(IN) :: stream_fldFileName_ndep ! ndep filename + character(len=*), intent(IN) :: ndep_varList ! ndep variable list to examine + ! + ! Local variables + type(file_desc_t) :: ncid ! NetCDF filehandle for ndep file + type(var_desc_t) :: vardesc ! variable descriptor + integer :: varid ! variable index + logical :: readvar ! If variable was read + character(len=CS) :: ndepunits! ndep units + character(len=CS) :: fname ! ndep field name + !----------------------------------------------------------------------- + call ncd_pio_openfile( ncid, trim(stream_fldFileName_ndep), ncd_nowrite ) + call shr_string_listGetName( ndep_varList, 1, fname ) + call ncd_inqvid( ncid, fname, varid, vardesc, readvar=readvar ) + if ( readvar ) then + call ncd_getatt( ncid, varid, "units", ndepunits ) + else + call endrun(msg=' ERROR finding variable: '//trim(fname)//" in file: "// & + trim(stream_fldFileName_ndep)//errMsg(sourcefile, __LINE__)) + end if + call ncd_pio_closefile( ncid ) + + ! Now check to make sure they are correct + if ( trim(ndepunits) == "g(N)/m2/s" )then + divide_by_secs_per_yr = .false. + else if ( trim(ndepunits) == "g(N)/m2/yr" )then + divide_by_secs_per_yr = .true. + else + call endrun(msg=' ERROR in units for nitrogen deposition equal to: '//trim(ndepunits)//" not units expected"// & + errMsg(sourcefile, __LINE__)) + end if + + end subroutine check_units + + !================================================================ + subroutine ndep_interp(bounds, atm2lnd_inst) + + !----------------------------------------------------------------------- + use clm_time_manager, only : get_curr_date, get_curr_days_per_year + use clm_varcon , only : secspday + use atm2lndType , only : atm2lnd_type + ! + ! Arguments + type(bounds_type) , intent(in) :: bounds + type(atm2lnd_type), intent(inout) :: atm2lnd_inst + ! + ! Local variables + integer :: g, ig + integer :: year ! year (0, ...) for nstep+1 + integer :: mon ! month (1, ..., 12) for nstep+1 + integer :: day ! day of month (1, ..., 31) for nstep+1 + integer :: sec ! seconds into current date for nstep+1 + integer :: mcdate ! Current model date (yyyymmdd) + integer :: dayspyr ! days per year + !----------------------------------------------------------------------- + + call get_curr_date(year, mon, day, sec) + mcdate = year*10000 + mon*100 + day + + call shr_strdata_advance(sdat, mcdate, sec, mpicom, 'ndepdyn') + + if ( divide_by_secs_per_yr )then + ig = 0 + dayspyr = get_curr_days_per_year( ) + do g = bounds%begg,bounds%endg + ig = ig+1 + atm2lnd_inst%forc_ndep_grc(g) = sdat%avs(1)%rAttr(1,ig) / (secspday * dayspyr) + end do + else + ig = 0 + do g = bounds%begg,bounds%endg + ig = ig+1 + atm2lnd_inst%forc_ndep_grc(g) = sdat%avs(1)%rAttr(1,ig) + end do + end if + + end subroutine ndep_interp + + !============================================================================== + subroutine clm_domain_mct(bounds, dom_clm, nlevels) + + !------------------------------------------------------------------- + ! Set domain data type for internal clm grid + use clm_varcon , only : re + use domainMod , only : ldomain + use mct_mod , only : mct_ggrid, mct_gsMap_lsize, mct_gGrid_init + use mct_mod , only : mct_gsMap_orderedPoints, mct_gGrid_importIAttr + use mct_mod , only : mct_gGrid_importRAttr, mct_gsMap + use lnd_set_decomp_and_domain , only : gsMap_lnd2Dsoi_gdc2glo, gsmap_global + implicit none + ! + ! arguments + type(bounds_type), intent(in) :: bounds + type(mct_ggrid), intent(out) :: dom_clm ! Output domain information for land model + integer, intent(in), optional :: nlevels ! Number of levels if this is a 3D field + ! + ! local variables + integer :: g,i,j,k ! index + integer :: lsize ! land model domain data size + real(r8), pointer :: data(:) ! temporary + integer , pointer :: idata(:) ! temporary + integer :: nlevs ! Number of vertical levels + type(mct_gsMap), pointer :: gsmap => null() ! MCT GS map + !------------------------------------------------------------------- + ! SEt number of levels, and get the GS map for either the 2D or 3D grid + nlevs = 1 + if ( present(nlevels) ) nlevs = nlevels + if ( nlevs == 1 ) then + gsmap => gsmap_global + else + gsmap => gsMap_lnd2Dsoi_gdc2glo + end if + ! + ! Initialize mct domain type + ! lat/lon in degrees, area in radians^2, mask is 1 (land), 0 (non-land) + ! Note that in addition land carries around landfrac for the purposes of domain checking + ! + lsize = mct_gsMap_lsize(gsmap, mpicom) + call mct_gGrid_init( GGrid=dom_clm, & + CoordChars='lat:lon:hgt', OtherChars='area:aream:mask:frac', lsize=lsize ) + ! + ! Allocate memory + ! + allocate(data(lsize)) + ! + ! Determine global gridpoint number attribute, GlobGridNum, which is set automatically by MCT + ! + call mct_gsMap_orderedPoints(gsmap, iam, idata) + gsmap => null() + call mct_gGrid_importIAttr(dom_clm,'GlobGridNum',idata,lsize) + ! + ! Determine domain (numbering scheme is: West to East and South to North to South pole) + ! Initialize attribute vector with special value + ! + data(:) = -9999.0_R8 + call mct_gGrid_importRAttr(dom_clm,"lat" ,data,lsize) + call mct_gGrid_importRAttr(dom_clm,"lon" ,data,lsize) + call mct_gGrid_importRAttr(dom_clm,"area" ,data,lsize) + call mct_gGrid_importRAttr(dom_clm,"aream",data,lsize) + data(:) = 0.0_R8 + call mct_gGrid_importRAttr(dom_clm,"mask" ,data,lsize) + ! + ! Determine bounds + ! + ! Fill in correct values for domain components + ! Note aream will be filled in in the atm-lnd mapper + ! + do k = 1, nlevs + do g = bounds%begg,bounds%endg + i = 1 + (g - bounds%begg) + data(i) = ldomain%lonc(g) + end do + end do + call mct_gGrid_importRattr(dom_clm,"lon",data,lsize) + + do k = 1, nlevs + do g = bounds%begg,bounds%endg + i = 1 + (g - bounds%begg) + data(i) = ldomain%latc(g) + end do + end do + call mct_gGrid_importRattr(dom_clm,"lat",data,lsize) + + do k = 1, nlevs + do g = bounds%begg,bounds%endg + i = 1 + (g - bounds%begg) + data(i) = ldomain%area(g)/(re*re) + end do + end do + call mct_gGrid_importRattr(dom_clm,"area",data,lsize) + + do k = 1, nlevs + do g = bounds%begg,bounds%endg + i = 1 + (g - bounds%begg) + data(i) = real(ldomain%mask(g), r8) + end do + end do + call mct_gGrid_importRattr(dom_clm,"mask",data,lsize) + + do k = 1, nlevs + do g = bounds%begg,bounds%endg + i = 1 + (g - bounds%begg) + data(i) = real(ldomain%frac(g), r8) + end do + end do + call mct_gGrid_importRattr(dom_clm,"frac",data,lsize) + + deallocate(data) + deallocate(idata) + + end subroutine clm_domain_mct + +end module ndepStreamMod From 7c295b84189ae3e2e62d369cc8e65fbac80406b5 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Thu, 16 May 2024 14:47:23 -0600 Subject: [PATCH 077/126] Remove test/tools/test_driver.sh, unrelated to this PR, good to clean up --- test/tools/test_driver.sh | 721 -------------------------------------- 1 file changed, 721 deletions(-) delete mode 100755 test/tools/test_driver.sh diff --git a/test/tools/test_driver.sh b/test/tools/test_driver.sh deleted file mode 100755 index 307d1accf8..0000000000 --- a/test/tools/test_driver.sh +++ /dev/null @@ -1,721 +0,0 @@ -#!/bin/sh -# -# test_driver.sh: driver script for the offline testing of CLM of tools -# -# interactive usage on all machines: -# -# env ./test_driver.sh -i -# -# valid arguments: -# -i interactive usage -# -d debug usage -- display tests that will run -- but do NOT actually execute them -# -f force batch submission (avoids user prompt) -# -h displays this help message -# -# -# **pass environment variables by preceding above commands -# with 'env var1=setting var2=setting ' -# **more details in the CLM testing user's guide, accessible -# from the CLM developers web page - - -#will attach timestamp onto end of script name to prevent overwriting -cur_time=`date '+%H:%M:%S'` - -hostname=`hostname` -echo $hostname -case $hostname in - - ##Derecho - derecho* | dec*) - submit_script="test_driver_derecho${cur_time}.sh" - -##vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv writing to batch script vvvvvvvvvvvvvvvvvvv -cat > ./${submit_script} << EOF -#!/bin/sh -# - -interactive="YES" -input_file="tests_pretag_derecho_nompi" -c_threads=128 - -export INITMODULES="/glade/u/apps/derecho/23.06/spack/opt/spack/lmod/8.7.20/gcc/7.5.0/pdxb/lmod/lmod/init/sh" -. \$INITMODULES - -module --force purge -module load ncarenv -module load craype -module load intel -module load mkl -module load ncarcompilers -module load netcdf -module load nco -module load ncl - -#omp threads -if [ -z "\$CLM_THREADS" ]; then #threads NOT set on command line - export CLM_THREADS=\$c_threads -fi - -# Stop on first failed test -if [ -z "\$CLM_SOFF" ]; then #CLM_SOFF NOT set - export CLM_SOFF=FALSE -fi - -export CESM_MACH="derecho" -export CESM_COMP="intel" - -export NETCDF_DIR=\$NETCDF -export INC_NETCDF=\$NETCDF/include -export LIB_NETCDF=\$NETCDF/lib -export MAKE_CMD="gmake -j " -export CFG_STRING="" -export TOOLS_MAKE_STRING="USER_FC=ifort USER_LINKER=ifort USER_CPPDEFS=-DLINUX" -export MACH_WORKSPACE=\$SCRATCH -export CPRNC_EXE="$CESMDATAROOT/cprnc/cprnc" -dataroot="$CESMDATAROOT/inputdata" -export TOOLSLIBS="" -export REGRID_PROC=1 -export TOOLS_CONF_STRING="--mpilib mpi-serial" - - -echo_arg="" - -EOF -#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ writing to batch script ^^^^^^^^^^^^^^^^^^^ - ;; - - ##cheyenne - cheyenne* | r*i*n*) - submit_script="test_driver_cheyenne${cur_time}.sh" - -#vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv writing to batch script vvvvvvvvvvvvvvvvvvv -at > ./${submit_script} << EOF -!/bin/sh - - -interactive="YES" -input_file="tests_pretag_cheyenne_nompi" -c_threads=36 - - -export INITMODULES="/glade/u/apps/ch/opt/lmod/8.1.7/lmod/lmod/init/sh" -. \$INITMODULES - -module purge -module load ncarenv -module load intel -module load mkl -module load ncarcompilers -module load netcdf - -module load nco -module load ncl - -module load conda - - -##omp threads -if [ -z "\$CLM_THREADS" ]; then #threads NOT set on command line - export CLM_THREADS=\$c_threads -fi - -# Stop on first failed test -if [ -z "\$CLM_SOFF" ]; then #CLM_SOFF NOT set - export CLM_SOFF=FALSE -fi - -export CESM_MACH="cheyenne" -export CESM_COMP="intel" - -export NETCDF_DIR=\$NETCDF -export INC_NETCDF=\$NETCDF/include -export LIB_NETCDF=\$NETCDF/lib -export MAKE_CMD="gmake -j " -export CFG_STRING="" -export TOOLS_MAKE_STRING="USER_FC=ifort USER_LINKER=ifort USER_CPPDEFS=-DLINUX" -export MACH_WORKSPACE="/glade/scratch" -export CPRNC_EXE="$CESMDATAROOT/tools/cime/tools/cprnc/cprnc.cheyenne" -dataroot="$CESMDATAROOT" -export TOOLSLIBS="" -export REGRID_PROC=1 -export TOOLS_CONF_STRING="--mpilib mpi-serial" - - -echo_arg="" - -EOF -##^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ writing to batch script ^^^^^^^^^^^^^^^^^^^ - ;; - - ## DAV cluster - casper* | pronghorn*) - submit_script="test_driver_dav${cur_time}.sh" - -##vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv writing to batch script vvvvvvvvvvvvvvvvvvv -cat > ./${submit_script} << EOF -#!/bin/sh -# - -interactive="YES" -input_file="tests_posttag_dav_mpi" -c_threads=36 - - -export INITMODULES="/glade/u/apps/ch/opt/lmod/8.1.7/lmod/lmod/init/sh" -. \$INITMODULES - -module purge -module load ncarenv -module load intel -module load mkl -module load ncarcompilers -module load netcdf -module load openmpi - -module load nco -module load conda -module load ncl - - -##omp threads -if [ -z "\$CLM_THREADS" ]; then #threads NOT set on command line - export CLM_THREADS=\$c_threads -fi - -# Stop on first failed test -if [ -z "\$CLM_SOFF" ]; then #CLM_SOFF NOT set - export CLM_SOFF=FALSE -fi - -export CESM_MACH="cheyenne" -export CESM_COMP="intel" - -export NETCDF_DIR=\$NETCDF -export INC_NETCDF=\$NETCDF/include -export LIB_NETCDF=\$NETCDF/lib -export MAKE_CMD="gmake -j " -export CFG_STRING="" -export TOOLS_MAKE_STRING="USER_FC=ifort USER_LINKER=ifort USER_CPPDEFS=-DLINUX" -export MACH_WORKSPACE="/glade/scratch" -export CPRNC_EXE="$CESMDATAROOT/tools/cime/tools/cprnc/cprnc.cheyenne" -dataroot="$CESMDATAROOT" -export TOOLSLIBS="" -export TOOLS_CONF_STRING="--mpilib mpich" - - -echo_arg="" - -EOF -##^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ writing to batch script ^^^^^^^^^^^^^^^^^^^ - ;; - - ## hobart - hobart* | h*.cgd.ucar.edu) - submit_script="test_driver_hobart_${cur_time}.sh" - export PATH=/cluster/torque/bin:${PATH} - -##vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv writing to batch script vvvvvvvvvvvvvvvvvvv -cat > ./${submit_script} << EOF -#!/bin/sh -# - -# Name of the queue (CHANGE THIS if needed) -#PBS -q long -# Number of nodes (CHANGE THIS if needed) -#PBS -l nodes=1:ppn=24 -# output file base name -#PBS -N test_dr -# Put standard error and standard out in same file -#PBS -j oe -# Export all Environment variables -#PBS -V -# End of options - -if [ -n "\$PBS_JOBID" ]; then #batch job - export JOBID=\`echo \${PBS_JOBID} | cut -f1 -d'.'\` - initdir=\${PBS_O_WORKDIR} -fi - -if [ "\$PBS_ENVIRONMENT" = "PBS_BATCH" ]; then - interactive="NO" - input_file="tests_posttag_hobart" -else - interactive="YES" - input_file="tests_posttag_hobart_nompi" -fi - -##omp threads -if [ -z "\$CLM_THREADS" ]; then #threads NOT set on command line - export CLM_THREADS=2 -fi -export CLM_RESTART_THREADS=1 - -##mpi tasks -export CLM_TASKS=24 -export CLM_RESTART_TASKS=20 - -export P4_GLOBMEMSIZE=500000000 - - -export CESM_MACH="hobart" - -ulimit -s unlimited -ulimit -c unlimited - -export CESM_COMP="intel" -export TOOLS_MAKE_STRING="USER_FC=ifort USER_CC=icc " -export TOOLS_CONF_STRING=" -mpilib mpi-serial" -export CFG_STRING="" -export INITMODULES="/usr/share/Modules/init/sh" - -. \$INITMODULES -module purge -module load compiler/intel -module load tool/nco -module load tool/netcdf -module load lang/python - -export NETCDF_DIR=\$NETCDF_PATH -export INC_NETCDF=\${NETCDF_PATH}/include -export LIB_NETCDF=\${NETCDF_PATH}/lib -export MAKE_CMD="gmake -j 5" ##using hyper-threading on hobart -export MACH_WORKSPACE="/scratch/cluster" -export CPRNC_EXE=/fs/cgd/csm/tools/cprnc/cprnc -export DATM_QIAN_DATA_DIR="/project/tss/atm_forcing.datm7.Qian.T62.c080727" -dataroot="/fs/cgd/csm" -export TOOLSSLIBS="" -echo_arg="-e" - -EOF -##^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ writing to batch script ^^^^^^^^^^^^^^^^^^^ - ;; - - ## izumi - izumi* | i*.unified.ucar.edu) - submit_script="test_driver_izumi_${cur_time}.sh" - export PATH=/cluster/torque/bin:${PATH} - -##vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv writing to batch script vvvvvvvvvvvvvvvvvvv -cat > ./${submit_script} << EOF -#!/bin/sh -# - -# Name of the queue (CHANGE THIS if needed) -#PBS -q long -# Number of nodes (CHANGE THIS if needed) -#PBS -l nodes=1:ppn=24 -# output file base name -#PBS -N test_dr -# Put standard error and standard out in same file -#PBS -j oe -# Export all Environment variables -#PBS -V -# End of options - -if [ -n "\$PBS_JOBID" ]; then #batch job - export JOBID=\`echo \${PBS_JOBID} | cut -f1 -d'.'\` - initdir=\${PBS_O_WORKDIR} -fi - -if [ "\$PBS_ENVIRONMENT" = "PBS_BATCH" ]; then - interactive="NO" - input_file="tests_posttag_izumi" -else - interactive="YES" - input_file="tests_posttag_izumi_nompi" -fi - -##omp threads -if [ -z "\$CLM_THREADS" ]; then #threads NOT set on command line - export CLM_THREADS=2 -fi -export CLM_RESTART_THREADS=1 - -##mpi tasks -export CLM_TASKS=24 -export CLM_RESTART_TASKS=20 - -export P4_GLOBMEMSIZE=500000000 - - -export CESM_MACH="izumi" - -ulimit -s unlimited -ulimit -c unlimited - -export CESM_COMP="intel" -export TOOLS_MAKE_STRING="USER_FC=ifort USER_CC=icc " -export TOOLS_CONF_STRING=" -mpilib mpi-serial" -export CFG_STRING="" -export INITMODULES="/usr/share/Modules/init/sh" - -. \$INITMODULES -module purge -module load compiler/intel -module load tool/nco -module load tool/netcdf -module load lang/python - -export NETCDF_DIR=\$NETCDF_PATH -export INC_NETCDF=\${NETCDF_PATH}/include -export LIB_NETCDF=\${NETCDF_PATH}/lib -export MAKE_CMD="gmake -j 5" ##using hyper-threading on izumi -export MACH_WORKSPACE="/scratch/cluster" -export CPRNC_EXE=/fs/cgd/csm/tools/cprnc/cprnc.izumi -export DATM_QIAN_DATA_DIR="/project/tss/atm_forcing.datm7.Qian.T62.c080727" -dataroot="/fs/cgd/csm" -export TOOLSSLIBS="" -echo_arg="-e" - -EOF -##^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ writing to batch script ^^^^^^^^^^^^^^^^^^^ - ;; - - * ) - echo "Only setup to work on: derecho, cheyenne, hobart and izumi" - exit - - -esac - -##vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv writing to batch script vvvvvvvvvvvvvvvvvvv -cat >> ./${submit_script} << EOF - -export CPRNC_OPT="" -if [ -n "\${CLM_JOBID}" ]; then - export JOBID=\${CLM_JOBID} -fi -##check if interactive job - -if [ "\$interactive" = "YES" ]; then - - if [ -z "\${JOBID}" ]; then - export JOBID=\$\$ - fi - echo "test_driver.sh: interactive run - setting JOBID to \$JOBID" - if [ \$0 = "test_driver.sh" ]; then - initdir="." - else - initdir=\${0%/*} - fi -else - echo "ERROR: you *always* need to use the interactive option (-i)" - echo " currently doesn't work without it" - exit 3 -fi - -##establish script dir and clm_root -if [ -f \${initdir}/test_driver.sh ]; then - export CLM_SCRIPTDIR=\`cd \${initdir}; pwd \` - export CLM_ROOT=\`cd \${CLM_SCRIPTDIR}/../..; pwd \` - export CTSM_ROOT=\${CLM_ROOT} - if [ -d \${CLM_ROOT}/cime ]; then - export CIME_ROOT=\${CLM_ROOT}/cime - else - export CIME_ROOT=\${CLM_ROOT}/../../cime - fi - if [ ! -d \${CIME_ROOT} ]; then - echo "ERROR: trouble finding the CIME_ROOT directory: \$CIME_ROOT" - exit 3 - fi -else - if [ -n "\${CLM_ROOT}" ] && [ -f \${CLM_ROOT}/test/tools/test_driver.sh ]; then - export CLM_SCRIPTDIR=\`cd \${CLM_ROOT}/test/tools; pwd \` - else - echo "ERROR: unable to determine script directory " - echo " if initiating batch job from directory other than the one containing test_driver.sh, " - echo " you must set the environment variable CLM_ROOT to the full path of directory containing " - echo " . " - exit 3 - fi -fi - -# Setup conda environment -conda activate ctsm_pylib -if [ \$? -ne 0 ]; then - echo "ERROR: Trouble activating the ctsm_pylib conda environment, be sure it's setup with \$CLM_ROOT/py_env_create, then rerun" - exit 4 -fi - -##output files -clm_log=\${initdir}/td.\${JOBID}.log -if [ -f \$clm_log ]; then - rm \$clm_log -fi -clm_status=\${initdir}/td.\${JOBID}.status -if [ -f \$clm_status ]; then - rm \$clm_status -fi - -##setup test work directory -if [ -z "\$CLM_TESTDIR" ]; then - export CLM_TESTDIR=\${MACH_WORKSPACE}/\$LOGNAME/clmTests/test-driver.\${JOBID} - if [ -d \$CLM_TESTDIR ] && [ \$CLM_RETAIN_FILES != "TRUE" ]; then - rm -r \$CLM_TESTDIR - fi -fi -if [ ! -d \$CLM_TESTDIR ]; then - mkdir -p \$CLM_TESTDIR - if [ \$? -ne 0 ]; then - echo "ERROR: unable to create work directory \$CLM_TESTDIR" - exit 4 - fi -fi - -## PIO build directory -export PIO_LIBDIR=\$CLM_TESTDIR/pio - -##set our own environment vars -export CSMDATA=\${dataroot}/inputdata -export DIN_LOC_ROOT=\${CSMDATA} -export MPI_TYPE_MAX=100000 - -##process other env vars possibly coming in -if [ -z "\$CLM_RETAIN_FILES" ]; then - export CLM_RETAIN_FILES=FALSE -fi -if [ -n "\${CLM_INPUT_TESTS}" ]; then - input_file=\$CLM_INPUT_TESTS -else - input_file=\${CLM_SCRIPTDIR}/\${input_file} -fi -if [ ! -f \${input_file} ]; then - echo "ERROR: unable to locate input file \${input_file}" - exit 5 -fi - -if [ \$interactive = "YES" ]; then - echo "reading tests from \${input_file}" -else - echo "reading tests from \${input_file}" >> \${clm_log} -fi - -num_tests=\`wc -w < \${input_file}\` -echo "STATUS OF CLM TESTING UNDER JOB \${JOBID}; scheduled to run \$num_tests tests from:" >> \${clm_status} -echo "\$input_file" >> \${clm_status} -echo "" >> \${clm_status} -echo " on machine: $hostname" >> \${clm_status} -if [ -n "${BL_ROOT}" ]; then - echo "tests of baseline will use source code from:" >> \${clm_status} - echo "\$BL_ROOT" >> \${clm_status} -fi -if [ \$interactive = "NO" ]; then - echo "see \${clm_log} for more detailed output" >> \${clm_status} -fi -echo "" >> \${clm_status} - -test_list="" -while read input_line; do - test_list="\${test_list}\${input_line} " -done < \${input_file} - - -##initialize flags, counter -skipped_tests="NO" -pending_tests="NO" -count=0 - -##loop through the tests of input file -for test_id in \${test_list}; do - count=\`expr \$count + 1\` - while [ \${#count} -lt 3 ]; do - count="0\${count}" - done - - master_line=\`grep \$test_id \${CLM_SCRIPTDIR}/input_tests_master\` - status_out="" - for arg in \${master_line}; do - status_out="\${status_out}\${arg} " - done - - if [ -z "\$status_out" ]; then - echo "No test matches \$test_id in \${CLM_SCRIPTDIR}/input_tests_master" - exit 3 - fi - - test_cmd=\${status_out#* } - - status_out="\${count} \${status_out}" - - if [ \$interactive = "YES" ]; then - echo "" - echo "***********************************************************************************" - echo "\${status_out}" - echo "***********************************************************************************" - else - echo "" >> \${clm_log} - echo "***********************************************************************************"\ - >> \${clm_log} - echo "\$status_out" >> \${clm_log} - echo "***********************************************************************************"\ - >> \${clm_log} - fi - - if [ \${#status_out} -gt 94 ]; then - status_out=\`echo "\${status_out}" | cut -c1-100\` - fi - while [ \${#status_out} -lt 97 ]; do - status_out="\${status_out}." - done - - echo \$echo_arg "\$status_out\c" >> \${clm_status} - - if [ \$interactive = "YES" ]; then - \${CLM_SCRIPTDIR}/\${test_cmd} - rc=\$? - else - \${CLM_SCRIPTDIR}/\${test_cmd} >> \${clm_log} 2>&1 - rc=\$? - fi - if [ \$rc -eq 0 ]; then - echo "PASS" >> \${clm_status} - elif [ \$rc -eq 255 ]; then - echo "SKIPPED*" >> \${clm_status} - skipped_tests="YES" - elif [ \$rc -eq 254 ]; then - echo "PENDING**" >> \${clm_status} - pending_tests="YES" - else - echo " rc=\$rc FAIL" >> \${clm_status} - if [ "\$CLM_SOFF" = "TRUE" ]; then - echo "stopping on first failure" >> \${clm_status} - echo "stopping on first failure" >> \${clm_log} - exit 6 - fi - fi -done - -echo "end of input" >> \${clm_status} -if [ \$interactive = "YES" ]; then - echo "end of input" -else - echo "end of input" >> \${clm_log} -fi - -if [ \$skipped_tests = "YES" ]; then - echo "* please verify that any skipped tests are not required of your clm commit" >> \${clm_status} -fi -if [ \$pending_tests = "YES" ]; then - echo "** tests that are pending must be checked manually for a successful completion" >> \${clm_status} - if [ \$interactive = "NO" ]; then - echo " see the test's output in \${clm_log} " >> \${clm_status} - echo " for the location of test results" >> \${clm_status} - fi -fi - -if [ "\$interactive" = "YES" ]; then - passInt="test_driver.sh-i" -else - passInt="test_driver.sh" -fi - -../../bld/unit_testers/xFail/wrapClmTests.pl -statusFile "\${clm_status}" -numberOfTests "\${num_tests}" -callingScript "\${passInt}" - -exit 0 - -EOF -##^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ writing to batch script ^^^^^^^^^^^^^^^^^^^ - - -chmod a+x $submit_script -if [ ! -z "$CLM_RETAIN_FILES" ]; then - export CLM_RETAIN_FILES="FALSE" -fi -arg1=${1##*-} -case $arg1 in - [iI]* ) - debug="NO" - interactive="YES" - compile_only="NO" - export debug - export interactive - export compile_only - ./${submit_script} - exit 0 - ;; - - [cC]* ) - debug="NO" - interactive="YES" - compile_only="YES" - export debug - export CLM_RETAIN_FILES="TRUE" - export interactive - export compile_only - export CLM_RETAIN_FILES="TRUE" - ./${submit_script} - exit 0 - ;; - - [dD]* ) - debug="YES" - interactive="YES" - compile_only="NO" - export debug - export interactive - export compile_only - ./${submit_script} - exit 0 - ;; - - [fF]* ) - debug="NO" - interactive="NO" - compile_only="NO" - export debug - export interactive - export compile_only - ;; - - "" ) - echo "" - echo "**********************" - echo "$submit_script has been created and will be submitted to the batch queue..." - echo "(ret) to continue, (a) to abort" - read ans - case $ans in - [aA]* ) - echo "aborting...type ./test_driver.sh -h for help message" - exit 0 - ;; - esac - debug="NO" - interactive="NO" - compile_only="NO" - export debug - export interactive - export compile_only - ;; - - * ) - echo "" - echo "**********************" - echo "usage on derecho, cheyenne, hobart, and izumi: " - echo "./test_driver.sh -i" - echo "" - echo "valid arguments: " - echo "-i interactive usage" - echo "-c compile-only usage (run configure and compile do not run clm)" - echo "-d debug-only usage (run configure and build-namelist do NOT compile or run clm)" - echo "-f force batch submission (avoids user prompt)" - echo "-h displays this help message" - echo "" - echo "**pass environment variables by preceding above commands " - echo " with 'env var1=setting var2=setting '" - echo "" - echo "**********************" - exit 0 - ;; -esac - -echo "submitting..." -case $hostname in - #default - * ) - echo "no submission capability on this machine use the interactive option: -i" - exit 0 - ;; - -esac -exit 0 From 0a08723cfe7365a7fb639513232bbeb1c12716c9 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Thu, 16 May 2024 16:38:06 -0600 Subject: [PATCH 078/126] Fix for issue #2546 --- cime_config/config_component.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cime_config/config_component.xml b/cime_config/config_component.xml index be6916ae0c..d69bdbc302 100644 --- a/cime_config/config_component.xml +++ b/cime_config/config_component.xml @@ -78,7 +78,7 @@ UNSET - clm5_0_cam6.0,clm5_0_cam7.0,clm5.0_cam5.0,clm5.0_cam4.0,clm5_0_GSWP3v1,clm5_0_CRUv7,clm5_0_QIAN,clm5_0_1PT,clm5_0_NLDAS2,clm5_0_ERA5,clm4_5_CRUv7,clm4_5_GSWP3v1,clm4_5_QIAN,clm4_5_cam6.0,clm4_5_cam7.0,clm4_5_cam5.0,clm4_5_cam4.0,clm4_5_1PT,clm4_5_NLDAS2,clm4_5_ERA5,clm5_1_CRUv7,clm5_1_GSWP3v1,clm5_1_cam6.0,clm5_1_QIAN,clm5_1_1PT,clm5_1_NLDAS2,clm5_1_ERA5,clm6_0_CRUv7,clm6_0_GSWP3v1,clm6_0_cam6.0,clm6_0_cam7.0,clm6_0_cam5.0,clm6_0_cam4.0,clm6_0_QIAN,clm6_0_1PT,clm6_0_NLDAS2,clm6_0_ERA5 + clm5_0_cam6.0,clm5_0_cam7.0,clm5_0_cam5.0,clm5_0_cam4.0,clm5_0_GSWP3v1,clm5_0_CRUv7,clm5_0_QIAN,clm5_0_1PT,clm5_0_NLDAS2,clm5_0_ERA5,clm4_5_CRUv7,clm4_5_GSWP3v1,clm4_5_QIAN,clm4_5_cam6.0,clm4_5_cam7.0,clm4_5_cam5.0,clm4_5_cam4.0,clm4_5_1PT,clm4_5_NLDAS2,clm4_5_ERA5,clm5_1_CRUv7,clm5_1_GSWP3v1,clm5_1_cam6.0,clm5_1_QIAN,clm5_1_1PT,clm5_1_NLDAS2,clm5_1_ERA5,clm6_0_CRUv7,clm6_0_GSWP3v1,clm6_0_cam6.0,clm6_0_cam7.0,clm6_0_cam5.0,clm6_0_cam4.0,clm6_0_QIAN,clm6_0_1PT,clm6_0_NLDAS2,clm6_0_ERA5 From e4544510e23abf8a9a68e8dd143840742eb8b2fc Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Fri, 17 May 2024 13:55:46 -0600 Subject: [PATCH 079/126] Add izumi nag debug tests to ExpectedTestFails as per ESCOMP/CMEPS#460 --- cime_config/testdefs/ExpectedTestFails.xml | 85 ++++++++++++++++++++++ 1 file changed, 85 insertions(+) diff --git a/cime_config/testdefs/ExpectedTestFails.xml b/cime_config/testdefs/ExpectedTestFails.xml index f600444547..09a425ed9e 100644 --- a/cime_config/testdefs/ExpectedTestFails.xml +++ b/cime_config/testdefs/ExpectedTestFails.xml @@ -44,6 +44,91 @@ + + + FAIL + https://github.com/ESCOMP/CMEPS/pull/460 + + + + + FAIL + https://github.com/ESCOMP/CMEPS/pull/460 + + + + + FAIL + https://github.com/ESCOMP/CMEPS/pull/460 + + + + + FAIL + https://github.com/ESCOMP/CMEPS/pull/460 + + + + + FAIL + https://github.com/ESCOMP/CMEPS/pull/460 + + + + + FAIL + https://github.com/ESCOMP/CMEPS/pull/460 + + + + + FAIL + https://github.com/ESCOMP/CMEPS/pull/460 + + + + + FAIL + https://github.com/ESCOMP/CMEPS/pull/460 + + + + + FAIL + https://github.com/ESCOMP/CMEPS/pull/460 + + + + + FAIL + https://github.com/ESCOMP/CMEPS/pull/460 + + + + + FAIL + https://github.com/ESCOMP/CMEPS/pull/460 + + + + + FAIL + https://github.com/ESCOMP/CMEPS/pull/460 + + + + + FAIL + https://github.com/ESCOMP/CMEPS/pull/460 + + + + + FAIL + https://github.com/ESCOMP/CMEPS/pull/460 + + + FAIL From 9397431486d17cfdda6267c6eca77d2f65a888ae Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Fri, 17 May 2024 16:46:18 -0600 Subject: [PATCH 080/126] First draft ChangeLog/ChangeSum --- doc/ChangeLog | 106 ++++++++++++++++++++++++++++++++++++++++++++++++++ doc/ChangeSum | 1 + 2 files changed, 107 insertions(+) diff --git a/doc/ChangeLog b/doc/ChangeLog index de37d4934c..e4af944b8f 100644 --- a/doc/ChangeLog +++ b/doc/ChangeLog @@ -1,4 +1,110 @@ =============================================================== +Tag name: ctsm5.2.006 +Originator(s): slevis (Samuel Levis) +Date: Fri May 17 16:17:54 MDT 2024 +One-line Summary: Update externals to cesm2_3_beta17 + +Purpose and description of changes +---------------------------------- + +#2493 update externals to beta17 +#2294 remove references to mct but do not remove /src/mct + +Changes unrelated to the tag's title: +#2546 fix error in cam4/cam5 test +Remove /test/tools/test_driver.sh + +Significant changes to scientifically-supported configurations +-------------------------------------------------------------- + +Does this tag change answers significantly for any of the following physics configurations? +(Details of any changes will be given in the "Answer changes" section below.) + + [Put an [X] in the box for any configuration with significant answer changes.] + +[ ] clm6_0 + +[ ] clm5_1 + +[ ] clm5_0 + +[ ] ctsm5_0-nwp + +[ ] clm4_5 + + +Bugs fixed +---------- +List of CTSM issues fixed (include CTSM Issue # and description) [one per line]: +Fixes #2493 update externals to beta17 +Fixes #2546 fix error in cam4/cam5 test (unrelated) + +Notes of particular relevance for users +--------------------------------------- +Changes to documentation: Remove references to mct and cpl7 + +Substantial timing or memory changes: Not considered + +Notes of particular relevance for developers: +--------------------------------------------- +Changes to tests or testing: +I added a long list of izumi nag debug tests to ExpectedFailures as per +https://github.com/ESCOMP/CMEPS/pull/460 + +Testing summary: +---------------- + [PASS means all tests PASS; OK means tests PASS other than expected fails.] + + build-namelist tests (if CLMBuildNamelist.pm has changed): + + derecho - + + python testing (if python code has changed; see instructions in python/README.md; document testing done): + + (any machine) - + + regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing): + + derecho ----- + izumi ------- IN PROGRESS + + any other testing (give details below): + + ctsm_sci + derecho ---- + +If the tag used for baseline comparisons was NOT the previous tag, note that here: + + +Answer changes +-------------- + +Changes answers relative to baseline: + + [ If a tag changes answers relative to baseline comparison the + following should be filled in (otherwise remove this section). + And always remove these three lines and parts that don't apply. ] + + Summarize any changes to answers, i.e., + - what code configurations: + - what platforms/compilers: + - nature of change (roundoff; larger than roundoff/same climate; new climate): + + If bitwise differences were observed, how did you show they were no worse + than roundoff? Roundoff differences means one or more lines of code change results + only by roundoff level (because order of operation changes for example). Roundoff + changes to state fields usually grow to greater than roundoff as the simulation progresses. + +Other details +------------- +List any externals directories updated (cime, rtm, mosart, cism, fates, etc.): +cism, ccs_config, cime, cmeps, cdeps + +Pull Requests that document the changes (include PR ids): + https://github.com/ESCOMP/ctsm/pull/2539 + +=============================================================== +=============================================================== Tag name: ctsm5.2.005 Originator(s): erik (Erik Kluzek,UCAR/TSS,303-497-1326) Date: Mon 13 May 2024 04:46:10 PM MDT diff --git a/doc/ChangeSum b/doc/ChangeSum index 0edccb3a18..c73cb19309 100644 --- a/doc/ChangeSum +++ b/doc/ChangeSum @@ -1,5 +1,6 @@ Tag Who Date Summary ============================================================================================================================ + ctsm5.2.006 slevis 05/17/2024 Update externals to cesm2_3_beta17 ctsm5.2.005 erik 05/13/2024 Fix clm6_0 defaults and CESM testing issues, add tests to detect these problems ctsm5.2.004 multiple 05/09/2024 CTSM5.2 1979 fsurdat and 1979-2026 landuse ne0np4 files + two fixes ctsm5.2.003 samrabin 05/02/2024 Merge b4b-dev From 6b460a21afc98134f330a37f9a6cda837f364aee Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Fri, 17 May 2024 18:40:30 -0600 Subject: [PATCH 081/126] Update ntests in build-namelist_test.pl --- bld/unit_testers/build-namelist_test.pl | 2 +- doc/ChangeLog | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bld/unit_testers/build-namelist_test.pl b/bld/unit_testers/build-namelist_test.pl index aa579cf908..53c71ae0e9 100755 --- a/bld/unit_testers/build-namelist_test.pl +++ b/bld/unit_testers/build-namelist_test.pl @@ -163,7 +163,7 @@ sub cat_and_create_namelistinfile { # # Figure out number of tests that will run # -my $ntests = 3313; +my $ntests = 3254; if ( defined($opts{'compare'}) ) { $ntests += 2001; diff --git a/doc/ChangeLog b/doc/ChangeLog index e4af944b8f..5644ed295d 100644 --- a/doc/ChangeLog +++ b/doc/ChangeLog @@ -11,7 +11,7 @@ Purpose and description of changes #2294 remove references to mct but do not remove /src/mct Changes unrelated to the tag's title: -#2546 fix error in cam4/cam5 test +#2546 fix error in cam4/cam5 test (ekluzek) Remove /test/tools/test_driver.sh Significant changes to scientifically-supported configurations From 7072f7aa8e1e44a5af15b5c82c4634268b5dc50b Mon Sep 17 00:00:00 2001 From: Erik Kluzek Date: Mon, 20 May 2024 11:29:08 -0600 Subject: [PATCH 082/126] Resolve the mizuRoute issue --- Externals.cfg | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Externals.cfg b/Externals.cfg index a8a77a40f1..cb2622dedf 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -27,10 +27,10 @@ tag = mosart1_0_49 required = True [mizuRoute] -local_path = components/mizuRoute +tag = cesm-coupling.n02_v2.1.2 protocol = git +local_path = components/mizuroute repo_url = https://github.com/ESCOMP/mizuRoute -hash = 81c720c required = True [ccs_config] From ff4d35f2607bb637e37baaca0109d4d189efe0fd Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Tue, 21 May 2024 10:30:14 -0600 Subject: [PATCH 083/126] Remove set_glc2lnd_fields_mct (not tested, yet) --- src/main/glc2lndMod.F90 | 56 ----------------------------------------- 1 file changed, 56 deletions(-) diff --git a/src/main/glc2lndMod.F90 b/src/main/glc2lndMod.F90 index ecd6818210..2d0dbb5791 100644 --- a/src/main/glc2lndMod.F90 +++ b/src/main/glc2lndMod.F90 @@ -78,7 +78,6 @@ module glc2lndMod ! - set_glc2lnd_fields ! - update_glc2lnd_fracs ! - update_glc2lnd_topo - procedure, public :: set_glc2lnd_fields_mct ! set coupling fields sent from glc to lnd procedure, public :: set_glc2lnd_fields_nuopc ! set coupling fields sent from glc to lnd procedure, public :: update_glc2lnd_fracs ! update subgrid fractions based on input from GLC procedure, public :: update_glc2lnd_topo ! update topographic heights @@ -242,61 +241,6 @@ subroutine Clean(this) end subroutine Clean - !----------------------------------------------------------------------- - subroutine set_glc2lnd_fields_mct(this, bounds, glc_present, x2l, & - index_x2l_Sg_ice_covered, index_x2l_Sg_topo, index_x2l_Flgg_hflx, & - index_x2l_Sg_icemask, index_x2l_Sg_icemask_coupled_fluxes) - ! - ! !DESCRIPTION: - ! Set coupling fields sent from glc to lnd - ! - ! If glc_present is true, then the given fields are all assumed to be valid; if - ! glc_present is false, then these fields are ignored. - ! - ! !ARGUMENTS: - class(glc2lnd_type), intent(inout) :: this - type(bounds_type) , intent(in) :: bounds - logical , intent(in) :: glc_present ! true if running with a non-stub glc model - real(r8) , intent(in) :: x2l(:, bounds%begg: ) ! driver import state to land model [field, gridcell] - integer , intent(in) :: index_x2l_Sg_ice_covered( 0: ) ! indices of ice-covered field in x2l, for each elevation class - integer , intent(in) :: index_x2l_Sg_topo( 0: ) ! indices of topo field in x2l, for each elevation class - integer , intent(in) :: index_x2l_Flgg_hflx( 0: ) ! indices of heat flux field in x2l, for each elevation class - integer , intent(in) :: index_x2l_Sg_icemask ! index of icemask field in x2l - integer , intent(in) :: index_x2l_Sg_icemask_coupled_fluxes ! index of icemask_coupled_fluxes field in x2l - ! - ! !LOCAL VARIABLES: - integer :: g - integer :: ice_class - - character(len=*), parameter :: subname = 'set_glc2lnd_fields_mct' - !----------------------------------------------------------------------- - - SHR_ASSERT_FL((ubound(x2l, 2) == bounds%endg), sourcefile, __LINE__) - SHR_ASSERT_ALL_FL((ubound(index_x2l_Sg_ice_covered) == (/maxpatch_glc/)), sourcefile, __LINE__) - SHR_ASSERT_ALL_FL((ubound(index_x2l_Sg_topo) == (/maxpatch_glc/)), sourcefile, __LINE__) - SHR_ASSERT_ALL_FL((ubound(index_x2l_Flgg_hflx) == (/maxpatch_glc/)), sourcefile, __LINE__) - - if (glc_present) then - do g = bounds%begg, bounds%endg - do ice_class = 0, maxpatch_glc - this%frac_grc(g,ice_class) = x2l(index_x2l_Sg_ice_covered(ice_class),g) - this%topo_grc(g,ice_class) = x2l(index_x2l_Sg_topo(ice_class),g) - this%hflx_grc(g,ice_class) = x2l(index_x2l_Flgg_hflx(ice_class),g) - end do - this%icemask_grc(g) = x2l(index_x2l_Sg_icemask,g) - this%icemask_coupled_fluxes_grc(g) = x2l(index_x2l_Sg_icemask_coupled_fluxes,g) - end do - - call this%set_glc2lnd_fields_wrapup(bounds) - else - if (glc_do_dynglacier) then - call endrun(' ERROR: With glc_present false (e.g., a stub glc model), glc_do_dynglacier must be false '// & - errMsg(sourcefile, __LINE__)) - end if - end if - - end subroutine set_glc2lnd_fields_mct - !----------------------------------------------------------------------- subroutine set_glc2lnd_fields_nuopc(this, bounds, glc_present, & frac_grc, topo_grc, hflx_grc, icemask_grc, icemask_coupled_fluxes_grc) From 4d62578a920eae09f482776576a30e9750e1e8e1 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Tue, 21 May 2024 10:47:56 -0600 Subject: [PATCH 084/126] Remove /src/cpl/mct again; I was right the first time --- src/cpl/mct/ExcessIceStreamType.F90 | 144 ----- src/cpl/mct/FireDataBaseType.F90 | 561 ------------------- src/cpl/mct/SoilMoistureStreamMod.F90 | 418 -------------- src/cpl/mct/UrbanTimeVarType.F90 | 314 ----------- src/cpl/mct/ch4FInundatedStreamType.F90 | 389 ------------- src/cpl/mct/clm_cpl_indices.F90 | 330 ----------- src/cpl/mct/laiStreamMod.F90 | 241 --------- src/cpl/mct/lnd_comp_mct.F90 | 632 ---------------------- src/cpl/mct/lnd_import_export.F90 | 354 ------------ src/cpl/mct/lnd_set_decomp_and_domain.F90 | 352 ------------ src/cpl/mct/ndepStreamMod.F90 | 376 ------------- 11 files changed, 4111 deletions(-) delete mode 100644 src/cpl/mct/ExcessIceStreamType.F90 delete mode 100644 src/cpl/mct/FireDataBaseType.F90 delete mode 100644 src/cpl/mct/SoilMoistureStreamMod.F90 delete mode 100644 src/cpl/mct/UrbanTimeVarType.F90 delete mode 100644 src/cpl/mct/ch4FInundatedStreamType.F90 delete mode 100644 src/cpl/mct/clm_cpl_indices.F90 delete mode 100644 src/cpl/mct/laiStreamMod.F90 delete mode 100644 src/cpl/mct/lnd_comp_mct.F90 delete mode 100644 src/cpl/mct/lnd_import_export.F90 delete mode 100644 src/cpl/mct/lnd_set_decomp_and_domain.F90 delete mode 100644 src/cpl/mct/ndepStreamMod.F90 diff --git a/src/cpl/mct/ExcessIceStreamType.F90 b/src/cpl/mct/ExcessIceStreamType.F90 deleted file mode 100644 index 5c5394233c..0000000000 --- a/src/cpl/mct/ExcessIceStreamType.F90 +++ /dev/null @@ -1,144 +0,0 @@ -module ExcessIceStreamType - - !----------------------------------------------------------------------- - ! !DESCRIPTION: - ! Stub for ExcessIceStreams for the MCT driver. So that MCT can be used - ! without excess ice streams. - ! - ! !USES - use shr_kind_mod , only : r8 => shr_kind_r8, CL => shr_kind_CL - use shr_log_mod , only : errMsg => shr_log_errMsg - use spmdMod , only : mpicom, masterproc - use clm_varctl , only : iulog - use abortutils , only : endrun - use decompMod , only : bounds_type - - ! !PUBLIC TYPES: - implicit none - private - - public :: UseExcessIceStreams ! If streams will be used - - type, public :: excessicestream_type - contains - - ! !PUBLIC MEMBER FUNCTIONS: - procedure, public :: Init ! Initialize and read data in - procedure, public :: CalcExcessIce ! Calculate excess ice ammount - - ! !PRIVATE MEMBER FUNCTIONS: - procedure, private :: ReadNML ! Read in namelist - - end type excessicestream_type - ! ! PRIVATE DATA: - - character(len=*), parameter, private :: sourcefile = & - __FILE__ - -!============================================================================== -contains -!============================================================================== - - subroutine Init(this, bounds, NLFilename) - ! - ! - ! arguments - implicit none - class(excessicestream_type) :: this - type(bounds_type), intent(in) :: bounds - character(len=*), intent(in) :: NLFilename ! Namelist filename - - ! - ! local variables - - call this%ReadNML( bounds, NLFileName ) - end subroutine Init - - subroutine CalcExcessIce(this,bounds,exice_bulk_init) - - ! only transfers grid values to columns - implicit none - class(excessicestream_type) :: this - type(bounds_type), intent(in) :: bounds - real(r8) , intent(inout) :: exice_bulk_init(bounds%begc:bounds%endc) - ! - ! !LOCAL VARIABLES: - - end subroutine CalcExcessIce - - logical function UseExcessIceStreams() - ! - ! !DESCRIPTION: - ! Return true if - ! - ! !USES: - ! - ! !ARGUMENTS: - implicit none - ! - ! !LOCAL VARIABLES: - UseExcessIceStreams = .false. -end function UseExcessIceStreams - -subroutine ReadNML(this, bounds, NLFilename) - ! - ! Read the namelist data stream information. - ! - ! Uses: - use shr_nl_mod , only : shr_nl_find_group_name - use shr_log_mod , only : errMsg => shr_log_errMsg - use shr_mpi_mod , only : shr_mpi_bcast - ! - ! arguments - implicit none - class(excessicestream_type) :: this - type(bounds_type), intent(in) :: bounds - character(len=*), intent(in) :: NLFilename ! Namelist filename - ! - ! local variables - integer :: nu_nml ! unit for namelist file - integer :: nml_error ! namelist i/o error flag - logical :: use_excess_ice_streams = .false. ! logical to turn on use of excess ice streams - character(len=CL) :: stream_fldFileName_exice = ' ' - character(len=CL) :: stream_mapalgo_exice = 'none' - character(len=*), parameter :: namelist_name = 'exice_streams' ! MUST agree with name in namelist and read - character(len=*), parameter :: subName = "('exice_streams::ReadNML')" - !----------------------------------------------------------------------- - - namelist /exice_streams/ & ! MUST agree with namelist_name above - stream_mapalgo_exice, stream_fldFileName_exice, use_excess_ice_streams - !----------------------------------------------------------------------- - ! Default values for namelist - - ! Read excess ice namelist - if (masterproc) then - open( newunit=nu_nml, file=trim(NLFilename), status='old', iostat=nml_error ) - call shr_nl_find_group_name(nu_nml, namelist_name, status=nml_error) - if (nml_error == 0) then - read(nu_nml, nml=exice_streams,iostat=nml_error) ! MUST agree with namelist_name above - if (nml_error /= 0) then - call endrun(msg=' ERROR reading '//namelist_name//' namelist'//errMsg(sourcefile, __LINE__)) - end if - else - call endrun(msg=' ERROR finding '//namelist_name//' namelist'//errMsg(sourcefile, __LINE__)) - end if - close(nu_nml) - endif - - call shr_mpi_bcast(use_excess_ice_streams , mpicom) - - if (masterproc) then - if ( use_excess_ice_streams ) then - call endrun(msg=' ERROR excess ice streams can NOT be on for the MCT driver'//errMsg(sourcefile, __LINE__)) - end if - if ( trim(stream_fldFileName_exice) /= '' ) then - call endrun(msg=' ERROR stream_fldFileName_exice can NOT be set for the MCT driver'//errMsg(sourcefile, __LINE__)) - end if - if ( trim(stream_mapalgo_exice) /= 'none' ) then - call endrun(msg=' ERROR stream_mapalgo_exice can only be none for the MCT driver'//errMsg(sourcefile, __LINE__)) - end if - endif - -end subroutine ReadNML - -end module ExcessIceStreamType diff --git a/src/cpl/mct/FireDataBaseType.F90 b/src/cpl/mct/FireDataBaseType.F90 deleted file mode 100644 index 0ee635b2fa..0000000000 --- a/src/cpl/mct/FireDataBaseType.F90 +++ /dev/null @@ -1,561 +0,0 @@ -module FireDataBaseType - -#include "shr_assert.h" - - !----------------------------------------------------------------------- - ! !DESCRIPTION: - ! module for handling of fire data - ! - ! !USES: - use shr_kind_mod , only : r8 => shr_kind_r8, CL => shr_kind_CL - use shr_strdata_mod , only : shr_strdata_type, shr_strdata_create, shr_strdata_print - use shr_strdata_mod , only : shr_strdata_advance - use shr_log_mod , only : errMsg => shr_log_errMsg - use clm_varctl , only : iulog, inst_name - use spmdMod , only : masterproc, mpicom, comp_id - use fileutils , only : getavu, relavu - use domainMod , only : ldomain - use abortutils , only : endrun - use decompMod , only : bounds_type - use FireMethodType , only : fire_method_type - use lnd_set_decomp_and_domain, only : gsmap_global - use mct_mod - ! - implicit none - private - ! - ! !PUBLIC TYPES: - public :: fire_base_type - - ! - type, abstract, extends(fire_method_type) :: fire_base_type - private - ! !PRIVATE MEMBER DATA: - - real(r8), public, pointer :: forc_lnfm(:) ! Lightning frequency - real(r8), public, pointer :: forc_hdm(:) ! Human population density - - real(r8), public, pointer :: gdp_lf_col(:) ! col global real gdp data (k US$/capita) - real(r8), public, pointer :: peatf_lf_col(:) ! col global peatland fraction data (0-1) - integer , public, pointer :: abm_lf_col(:) ! col global peak month of crop fire emissions - - type(shr_strdata_type) :: sdat_hdm ! Human population density input data stream - type(shr_strdata_type) :: sdat_lnfm ! Lightning input data stream - - contains - ! - ! !PUBLIC MEMBER FUNCTIONS: - procedure, public :: FireInit => BaseFireInit ! Initialization of Fire - procedure, public :: BaseFireInit ! Initialization of Fire - procedure(FireReadNML_interface), public, deferred :: FireReadNML ! Read in namelist for Fire - procedure, public :: FireInterp ! Interpolate fire data - procedure(need_lightning_and_popdens_interface), public, deferred :: & - need_lightning_and_popdens ! Returns true if need lightning & popdens - ! - ! !PRIVATE MEMBER FUNCTIONS: - procedure, private :: hdm_init ! position datasets for dynamic human population density - procedure, private :: hdm_interp ! interpolates between two years of human pop. density file data - procedure, private :: lnfm_init ! position datasets for Lightning - procedure, private :: lnfm_interp ! interpolates between two years of Lightning file data - procedure, private :: surfdataread ! read fire related data from surface data set - end type fire_base_type - !----------------------------------------------------------------------- - - abstract interface - !----------------------------------------------------------------------- - function need_lightning_and_popdens_interface(this) result(need_lightning_and_popdens) - ! - ! !DESCRIPTION: - ! Returns true if need lightning and popdens, false otherwise - ! - ! USES - import :: fire_base_type - ! - ! !ARGUMENTS: - class(fire_base_type), intent(in) :: this - logical :: need_lightning_and_popdens ! function result - !----------------------------------------------------------------------- - end function need_lightning_and_popdens_interface - end interface - - character(len=*), parameter, private :: sourcefile = & - __FILE__ - -contains - - !----------------------------------------------------------------------- - subroutine FireReadNML_interface( this, NLFilename ) - ! - ! !DESCRIPTION: - ! Read the namelist for Fire - ! - ! !USES: - ! - ! !ARGUMENTS: - class(fire_base_type) :: this - character(len=*), intent(in) :: NLFilename ! Namelist filename - end subroutine FireReadNML_interface - - !----------------------------------------------------------------------- - subroutine BaseFireInit( this, bounds, NLFilename ) - ! - ! !DESCRIPTION: - ! Initialize CN Fire module - ! !USES: - use shr_infnan_mod , only : nan => shr_infnan_nan, assignment(=) - ! - ! !ARGUMENTS: - class(fire_base_type) :: this - type(bounds_type), intent(in) :: bounds - character(len=*), intent(in) :: NLFilename - !----------------------------------------------------------------------- - - if ( this%need_lightning_and_popdens() ) then - ! Allocate lightning forcing data - allocate( this%forc_lnfm(bounds%begg:bounds%endg) ) - this%forc_lnfm(bounds%begg:) = nan - ! Allocate pop dens forcing data - allocate( this%forc_hdm(bounds%begg:bounds%endg) ) - this%forc_hdm(bounds%begg:) = nan - - ! Allocate real gdp data - allocate(this%gdp_lf_col(bounds%begc:bounds%endc)) - ! Allocate peatland fraction data - allocate(this%peatf_lf_col(bounds%begc:bounds%endc)) - ! Allocates peak month of crop fire emissions - allocate(this%abm_lf_col(bounds%begc:bounds%endc)) - - - call this%hdm_init(bounds, NLFilename) - call this%hdm_interp(bounds) - call this%lnfm_init(bounds, NLFilename) - call this%lnfm_interp(bounds) - call this%surfdataread(bounds) - end if - - end subroutine BaseFireInit - - !----------------------------------------------------------------------- - subroutine FireInterp(this,bounds) - ! - ! !DESCRIPTION: - ! Interpolate CN Fire datasets - ! - ! !ARGUMENTS: - class(fire_base_type) :: this - type(bounds_type), intent(in) :: bounds - !----------------------------------------------------------------------- - - if ( this%need_lightning_and_popdens() ) then - call this%hdm_interp(bounds) - call this%lnfm_interp(bounds) - end if - - end subroutine FireInterp - - !----------------------------------------------------------------------- - subroutine hdm_init( this, bounds, NLFilename ) - ! - ! !DESCRIPTION: - ! Initialize data stream information for population density. - ! - ! !USES: - use clm_time_manager , only : get_calendar - use ncdio_pio , only : pio_subsystem - use shr_pio_mod , only : shr_pio_getiotype - use clm_nlUtilsMod , only : find_nlgroup_name - use ndepStreamMod , only : clm_domain_mct - use histFileMod , only : hist_addfld1d - ! - ! !ARGUMENTS: - implicit none - class(fire_base_type) :: this - type(bounds_type), intent(in) :: bounds - character(len=*), intent(in) :: NLFilename ! Namelist filename - ! - ! !LOCAL VARIABLES: - integer :: stream_year_first_popdens ! first year in pop. dens. stream to use - integer :: stream_year_last_popdens ! last year in pop. dens. stream to use - integer :: model_year_align_popdens ! align stream_year_first_hdm with - integer :: nu_nml ! unit for namelist file - integer :: nml_error ! namelist i/o error flag - type(mct_ggrid) :: dom_clm ! domain information - character(len=CL) :: stream_fldFileName_popdens ! population density streams filename - character(len=CL) :: popdensmapalgo = 'bilinear' ! mapping alogrithm for population density - character(len=CL) :: popdens_tintalgo = 'nearest'! time interpolation alogrithm for population density - character(len=CL) :: stream_meshfile_popdens ! not used - character(*), parameter :: subName = "('hdmdyn_init')" - character(*), parameter :: F00 = "('(hdmdyn_init) ',4a)" - !----------------------------------------------------------------------- - - namelist /popd_streams/ & - stream_year_first_popdens, & - stream_year_last_popdens, & - model_year_align_popdens, & - popdensmapalgo, & - stream_fldFileName_popdens, & - stream_meshfile_popdens , & - popdens_tintalgo - - ! Default values for namelist - stream_year_first_popdens = 1 ! first year in stream to use - stream_year_last_popdens = 1 ! last year in stream to use - model_year_align_popdens = 1 ! align stream_year_first_popdens with this model year - stream_fldFileName_popdens = ' ' - - ! Read popd_streams namelist - if (masterproc) then - nu_nml = getavu() - open( nu_nml, file=trim(NLFilename), status='old', iostat=nml_error ) - call find_nlgroup_name(nu_nml, 'popd_streams', status=nml_error) - if (nml_error == 0) then - read(nu_nml, nml=popd_streams,iostat=nml_error) - if (nml_error /= 0) then - call endrun(msg='ERROR reading popd_streams namelist'//errMsg(sourcefile, __LINE__)) - end if - end if - close(nu_nml) - call relavu( nu_nml ) - endif - - call shr_mpi_bcast(stream_year_first_popdens, mpicom) - call shr_mpi_bcast(stream_year_last_popdens, mpicom) - call shr_mpi_bcast(model_year_align_popdens, mpicom) - call shr_mpi_bcast(stream_fldFileName_popdens, mpicom) - call shr_mpi_bcast(popdens_tintalgo, mpicom) - - if (masterproc) then - write(iulog,*) ' ' - write(iulog,*) 'popdens_streams settings:' - write(iulog,*) ' stream_year_first_popdens = ',stream_year_first_popdens - write(iulog,*) ' stream_year_last_popdens = ',stream_year_last_popdens - write(iulog,*) ' model_year_align_popdens = ',model_year_align_popdens - write(iulog,*) ' stream_fldFileName_popdens = ',stream_fldFileName_popdens - write(iulog,*) ' popdens_tintalgo = ',popdens_tintalgo - write(iulog,*) ' ' - endif - - call clm_domain_mct (bounds, dom_clm) - - call shr_strdata_create(this%sdat_hdm,name="clmhdm", & - pio_subsystem=pio_subsystem, & - pio_iotype=shr_pio_getiotype(inst_name), & - mpicom=mpicom, compid=comp_id, & - gsmap=gsmap_global, ggrid=dom_clm, & - nxg=ldomain%ni, nyg=ldomain%nj, & - yearFirst=stream_year_first_popdens, & - yearLast=stream_year_last_popdens, & - yearAlign=model_year_align_popdens, & - offset=0, & - domFilePath='', & - domFileName=trim(stream_fldFileName_popdens), & - domTvarName='time', & - domXvarName='lon' , & - domYvarName='lat' , & - domAreaName='area', & - domMaskName='mask', & - filePath='', & - filename=(/trim(stream_fldFileName_popdens)/) , & - fldListFile='hdm', & - fldListModel='hdm', & - fillalgo='none', & - mapalgo=popdensmapalgo, & - calendar=get_calendar(), & - tintalgo=popdens_tintalgo, & - taxmode='extend' ) - - if (masterproc) then - call shr_strdata_print(this%sdat_hdm,'population density data') - endif - - ! Add history fields - call hist_addfld1d (fname='HDM', units='counts/km^2', & - avgflag='A', long_name='human population density', & - ptr_lnd=this%forc_hdm, default='inactive') - - end subroutine hdm_init - - !----------------------------------------------------------------------- - subroutine hdm_interp( this, bounds) - ! - ! !DESCRIPTION: - ! Interpolate data stream information for population density. - ! - ! !USES: - use clm_time_manager, only : get_curr_date - ! - ! !ARGUMENTS: - class(fire_base_type) :: this - type(bounds_type), intent(in) :: bounds - ! - ! !LOCAL VARIABLES: - integer :: g, ig - integer :: year ! year (0, ...) for nstep+1 - integer :: mon ! month (1, ..., 12) for nstep+1 - integer :: day ! day of month (1, ..., 31) for nstep+1 - integer :: sec ! seconds into current date for nstep+1 - integer :: mcdate ! Current model date (yyyymmdd) - !----------------------------------------------------------------------- - - call get_curr_date(year, mon, day, sec) - mcdate = year*10000 + mon*100 + day - - call shr_strdata_advance(this%sdat_hdm, mcdate, sec, mpicom, 'hdmdyn') - - ig = 0 - do g = bounds%begg,bounds%endg - ig = ig+1 - this%forc_hdm(g) = this%sdat_hdm%avs(1)%rAttr(1,ig) - end do - - end subroutine hdm_interp - - !----------------------------------------------------------------------- - subroutine lnfm_init( this, bounds, NLFilename ) - ! - ! !DESCRIPTION: - ! - ! Initialize data stream information for Lightning. - ! - ! !USES: - use clm_time_manager , only : get_calendar - use ncdio_pio , only : pio_subsystem - use shr_pio_mod , only : shr_pio_getiotype - use clm_nlUtilsMod , only : find_nlgroup_name - use ndepStreamMod , only : clm_domain_mct - use histFileMod , only : hist_addfld1d - ! - ! !ARGUMENTS: - implicit none - class(fire_base_type) :: this - type(bounds_type), intent(in) :: bounds - character(len=*), intent(in) :: NLFilename - ! - ! !LOCAL VARIABLES: - integer :: stream_year_first_lightng ! first year in Lightning stream to use - integer :: stream_year_last_lightng ! last year in Lightning stream to use - integer :: model_year_align_lightng ! align stream_year_first_lnfm with - integer :: nu_nml ! unit for namelist file - integer :: nml_error ! namelist i/o error flag - type(mct_ggrid) :: dom_clm ! domain information - character(len=CL) :: stream_fldFileName_lightng ! lightning stream filename to read - character(len=CL) :: lightng_tintalgo = 'linear'! time interpolation alogrithm - character(len=CL) :: lightngmapalgo = 'bilinear'! Mapping alogrithm - character(*), parameter :: subName = "('lnfmdyn_init')" - character(*), parameter :: F00 = "('(lnfmdyn_init) ',4a)" - !----------------------------------------------------------------------- - - namelist /light_streams/ & - stream_year_first_lightng, & - stream_year_last_lightng, & - model_year_align_lightng, & - lightngmapalgo, & - stream_fldFileName_lightng, & - lightng_tintalgo - - ! Default values for namelist - stream_year_first_lightng = 1 ! first year in stream to use - stream_year_last_lightng = 1 ! last year in stream to use - model_year_align_lightng = 1 ! align stream_year_first_lnfm with this model year - stream_fldFileName_lightng = ' ' - - ! Read light_streams namelist - if (masterproc) then - nu_nml = getavu() - open( nu_nml, file=trim(NLFilename), status='old', iostat=nml_error ) - call find_nlgroup_name(nu_nml, 'light_streams', status=nml_error) - if (nml_error == 0) then - read(nu_nml, nml=light_streams,iostat=nml_error) - if (nml_error /= 0) then - call endrun(msg='ERROR reading light_streams namelist'//errMsg(sourcefile, __LINE__)) - end if - end if - close(nu_nml) - call relavu( nu_nml ) - endif - - call shr_mpi_bcast(stream_year_first_lightng, mpicom) - call shr_mpi_bcast(stream_year_last_lightng, mpicom) - call shr_mpi_bcast(model_year_align_lightng, mpicom) - call shr_mpi_bcast(stream_fldFileName_lightng, mpicom) - call shr_mpi_bcast(lightng_tintalgo, mpicom) - - if (masterproc) then - write(iulog,*) ' ' - write(iulog,*) 'light_stream settings:' - write(iulog,*) ' stream_year_first_lightng = ',stream_year_first_lightng - write(iulog,*) ' stream_year_last_lightng = ',stream_year_last_lightng - write(iulog,*) ' model_year_align_lightng = ',model_year_align_lightng - write(iulog,*) ' stream_fldFileName_lightng = ',stream_fldFileName_lightng - write(iulog,*) ' lightng_tintalgo = ',lightng_tintalgo - write(iulog,*) ' ' - endif - - call clm_domain_mct (bounds, dom_clm) - - call shr_strdata_create(this%sdat_lnfm,name="clmlnfm", & - pio_subsystem=pio_subsystem, & - pio_iotype=shr_pio_getiotype(inst_name), & - mpicom=mpicom, compid=comp_id, & - gsmap=gsmap_global, ggrid=dom_clm, & - nxg=ldomain%ni, nyg=ldomain%nj, & - yearFirst=stream_year_first_lightng, & - yearLast=stream_year_last_lightng, & - yearAlign=model_year_align_lightng, & - offset=0, & - domFilePath='', & - domFileName=trim(stream_fldFileName_lightng), & - domTvarName='time', & - domXvarName='lon' , & - domYvarName='lat' , & - domAreaName='area', & - domMaskName='mask', & - filePath='', & - filename=(/trim(stream_fldFileName_lightng)/), & - fldListFile='lnfm', & - fldListModel='lnfm', & - fillalgo='none', & - tintalgo=lightng_tintalgo, & - mapalgo=lightngmapalgo, & - calendar=get_calendar(), & - taxmode='cycle' ) - - if (masterproc) then - call shr_strdata_print(this%sdat_lnfm,'Lightning data') - endif - - ! Add history fields - call hist_addfld1d (fname='LNFM', units='counts/km^2/hr', & - avgflag='A', long_name='Lightning frequency', & - ptr_lnd=this%forc_lnfm, default='inactive') - - end subroutine lnfm_init - - !----------------------------------------------------------------------- - subroutine lnfm_interp(this, bounds ) - ! - ! !DESCRIPTION: - ! Interpolate data stream information for Lightning. - ! - ! !USES: - use clm_time_manager, only : get_curr_date - ! - ! !ARGUMENTS: - class(fire_base_type) :: this - type(bounds_type), intent(in) :: bounds - ! - ! !LOCAL VARIABLES: - integer :: g, ig - integer :: year ! year (0, ...) for nstep+1 - integer :: mon ! month (1, ..., 12) for nstep+1 - integer :: day ! day of month (1, ..., 31) for nstep+1 - integer :: sec ! seconds into current date for nstep+1 - integer :: mcdate ! Current model date (yyyymmdd) - !----------------------------------------------------------------------- - - call get_curr_date(year, mon, day, sec) - mcdate = year*10000 + mon*100 + day - - call shr_strdata_advance(this%sdat_lnfm, mcdate, sec, mpicom, 'lnfmdyn') - - ig = 0 - do g = bounds%begg,bounds%endg - ig = ig+1 - this%forc_lnfm(g) = this%sdat_lnfm%avs(1)%rAttr(1,ig) - end do - - end subroutine lnfm_interp - - !----------------------------------------------------------------------- - subroutine surfdataread(this, bounds) - ! - ! !DESCRIPTION: - ! Read surface data set to populate relevant fire-related variables - ! - ! !USES: - use spmdMod , only : masterproc - use clm_varctl , only : nsrest, nsrStartup, fsurdat - use clm_varcon , only : grlnd - use ColumnType , only : col - use fileutils , only : getfil - use ncdio_pio - ! - ! !ARGUMENTS: - class(fire_base_type) :: this - type(bounds_type), intent(in) :: bounds - ! - ! !LOCAL VARIABLES: - integer :: g,c ! indices - type(file_desc_t) :: ncid ! netcdf id - logical :: readvar ! true => variable is on initial dataset - character(len=256) :: locfn ! local filename - real(r8), pointer :: gdp(:) ! global gdp data (needs to be a pointer for use in ncdio) - real(r8), pointer :: peatf(:) ! global peatf data (needs to be a pointer for use in ncdio) - integer, pointer :: abm(:) ! global abm data (needs to be a pointer for use in ncdio) - !----------------------------------------------------------------------- - - ! -------------------------------------------------------------------- - ! Open surface dataset - ! -------------------------------------------------------------------- - - call getfil (fsurdat, locfn, 0) - call ncd_pio_openfile (ncid, locfn, 0) - - ! -------------------------------------------------------------------- - ! Read in GDP data - ! -------------------------------------------------------------------- - - allocate(gdp(bounds%begg:bounds%endg)) - call ncd_io(ncid=ncid, varname='gdp', flag='read', data=gdp, dim1name=grlnd, readvar=readvar) - if (.not. readvar) then - call endrun(msg=' ERROR: gdp NOT on surfdata file'//errMsg(sourcefile, __LINE__)) - end if - do c = bounds%begc, bounds%endc - g = col%gridcell(c) - this%gdp_lf_col(c) = gdp(g) - end do - deallocate(gdp) - - ! -------------------------------------------------------------------- - ! Read in peatf data - ! -------------------------------------------------------------------- - - allocate(peatf(bounds%begg:bounds%endg)) - call ncd_io(ncid=ncid, varname='peatf', flag='read', data=peatf, dim1name=grlnd, readvar=readvar) - if (.not. readvar) then - call endrun(msg=' ERROR: peatf NOT on surfdata file'//errMsg(sourcefile, __LINE__)) - end if - do c = bounds%begc, bounds%endc - g = col%gridcell(c) - this%peatf_lf_col(c) = peatf(g) - end do - deallocate(peatf) - - ! -------------------------------------------------------------------- - ! Read in ABM data - ! -------------------------------------------------------------------- - - allocate(abm(bounds%begg:bounds%endg)) - call ncd_io(ncid=ncid, varname='abm', flag='read', data=abm, dim1name=grlnd, readvar=readvar) - if (.not. readvar) then - call endrun(msg=' ERROR: abm NOT on surfdata file'//errMsg(sourcefile, __LINE__)) - end if - do c = bounds%begc, bounds%endc - g = col%gridcell(c) - this%abm_lf_col(c) = abm(g) - end do - deallocate(abm) - - ! Close file - - call ncd_pio_closefile(ncid) - - if (masterproc) then - write(iulog,*) 'Successfully read fmax, soil color, sand and clay boundary data' - write(iulog,*) - endif - - end subroutine surfdataread - - -end module FireDataBaseType diff --git a/src/cpl/mct/SoilMoistureStreamMod.F90 b/src/cpl/mct/SoilMoistureStreamMod.F90 deleted file mode 100644 index 8b366d6c8e..0000000000 --- a/src/cpl/mct/SoilMoistureStreamMod.F90 +++ /dev/null @@ -1,418 +0,0 @@ -module SoilMoistureStreamMod - - ! ********************************************************************** - ! --------------------------- IMPORTANT NOTE --------------------------- - ! - ! In cases using the NUOPC driver/mediator, we use a different version of this module, - ! based on CDEPS, which resides in src/cpl/nuopc/. Changes to the science here should - ! also be made in the similar file in src/cpl/nuopc. Once we start using CDEPS by - ! default, we can remove this version and move the CDEPS-based version into its place. - ! ********************************************************************** - -#include "shr_assert.h" - - !----------------------------------------------------------------------- - ! !DESCRIPTION: - ! Read in soil moisture from data stream - ! - ! !USES: - use shr_strdata_mod , only : shr_strdata_type, shr_strdata_create - use shr_strdata_mod , only : shr_strdata_print, shr_strdata_advance - use shr_kind_mod , only : r8 => shr_kind_r8 - use shr_kind_mod , only : CL => shr_kind_CL, CXX => shr_kind_CXX - use shr_log_mod , only : errMsg => shr_log_errMsg - use decompMod , only : bounds_type, subgrid_level_column - use abortutils , only : endrun - use clm_varctl , only : iulog, use_soil_moisture_streams, inst_name - use clm_varcon , only : grlnd - use controlMod , only : NLFilename - use domainMod , only : ldomain - use LandunitType , only : lun - use ColumnType , only : col - use SoilStateType , only : soilstate_type - use WaterStateBulkType , only : waterstatebulk_type - use perf_mod , only : t_startf, t_stopf - use spmdMod , only : masterproc, mpicom, comp_id - use lnd_set_decomp_and_domain , only : gsMap_lnd2Dsoi_gdc2glo - use mct_mod - use ncdio_pio - ! - ! !PUBLIC TYPES: - implicit none - private - ! - ! !PUBLIC MEMBER FUNCTIONS: - public :: PrescribedSoilMoistureInit ! position datasets for soil moisture - public :: PrescribedSoilMoistureAdvance ! Advance the soil moisture stream (outside of Open-MP loops) - public :: PrescribedSoilMoistureInterp ! interpolates between two periods of soil moisture data - - ! !PRIVATE MEMBER DATA: - type(shr_strdata_type) :: sdat_soilm ! soil moisture input data stream - integer :: ism ! Soil moisture steram index - integer, allocatable :: g_to_ig(:) ! Array matching gridcell index to data index - logical :: soilm_ignore_data_if_missing ! If should ignore overridding a point with soil moisture data - ! from the streams file, if the streams file shows that point - ! as missing (namelist item) - ! - ! !PRIVATE TYPES: - - character(len=*), parameter, private :: sourcefile = & - __FILE__ - !----------------------------------------------------------------------- - -contains - - !----------------------------------------------------------------------- - ! - ! soil_moisture_init - ! - !----------------------------------------------------------------------- - subroutine PrescribedSoilMoistureInit(bounds) - ! - ! Initialize data stream information for soil moisture. - ! - ! - ! !USES: - use clm_time_manager , only : get_calendar - use ncdio_pio , only : pio_subsystem - use shr_pio_mod , only : shr_pio_getiotype - use clm_nlUtilsMod , only : find_nlgroup_name - use ndepStreamMod , only : clm_domain_mct - use shr_stream_mod , only : shr_stream_file_null - use shr_string_mod , only : shr_string_listCreateField - use clm_varpar , only : nlevsoi - ! - ! !ARGUMENTS: - implicit none - type(bounds_type), intent(in) :: bounds ! bounds - ! - ! !LOCAL VARIABLES: - integer :: i ! index - integer :: stream_year_first_soilm ! first year in Ustar stream to use - integer :: stream_year_last_soilm ! last year in Ustar stream to use - integer :: model_year_align_soilm ! align stream_year_first_soilm with - integer :: nu_nml ! unit for namelist file - integer :: nml_error ! namelist i/o error flag - integer :: soilm_offset ! Offset in time for dataset (sec) - type(mct_ggrid) :: dom_clm ! domain information - character(len=CL) :: stream_fldfilename_soilm ! ustar stream filename to read - character(len=CL) :: soilm_tintalgo = 'linear' ! Time interpolation alogrithm - - character(*), parameter :: subName = "('PrescribedSoilMoistureInit')" - character(*), parameter :: F00 = "('(PrescribedSoilMoistureInit) ',4a)" - character(*), parameter :: soilmString = "H2OSOI" ! base string for field string - character(CXX) :: fldList ! field string - !----------------------------------------------------------------------- - ! - ! deal with namelist variables here in init - ! - namelist /soil_moisture_streams/ & - stream_year_first_soilm, & - stream_year_last_soilm, & - model_year_align_soilm, & - soilm_tintalgo, & - soilm_offset, & - soilm_ignore_data_if_missing, & - stream_fldfilename_soilm - - ! Default values for namelist - stream_year_first_soilm = 1 ! first year in stream to use - stream_year_last_soilm = 1 ! last year in stream to use - model_year_align_soilm = 1 ! align stream_year_first_soilm with this model year - stream_fldfilename_soilm = shr_stream_file_null - soilm_offset = 0 - soilm_ignore_data_if_missing = .false. - - ! Read soilm_streams namelist - if (masterproc) then - open( newunit=nu_nml, file=trim(NLFilename), status='old', iostat=nml_error ) - call find_nlgroup_name(nu_nml, 'soil_moisture_streams', status=nml_error) - if (nml_error == 0) then - read(nu_nml, nml=soil_moisture_streams,iostat=nml_error) - if (nml_error /= 0) then - call endrun(subname // ':: ERROR reading soil_moisture_streams namelist') - end if - else - call endrun(subname // ':: ERROR finding soilm_streams namelist') - end if - close(nu_nml) - endif - - call shr_mpi_bcast(stream_year_first_soilm, mpicom) - call shr_mpi_bcast(stream_year_last_soilm, mpicom) - call shr_mpi_bcast(model_year_align_soilm, mpicom) - call shr_mpi_bcast(stream_fldfilename_soilm, mpicom) - call shr_mpi_bcast(soilm_tintalgo, mpicom) - call shr_mpi_bcast(soilm_offset, mpicom) - call shr_mpi_bcast(soilm_ignore_data_if_missing, mpicom) - - if (masterproc) then - - write(iulog,*) ' ' - write(iulog,*) 'soil_moisture_stream settings:' - write(iulog,*) ' stream_year_first_soilm = ',stream_year_first_soilm - write(iulog,*) ' stream_year_last_soilm = ',stream_year_last_soilm - write(iulog,*) ' model_year_align_soilm = ',model_year_align_soilm - write(iulog,*) ' stream_fldfilename_soilm = ',trim(stream_fldfilename_soilm) - write(iulog,*) ' soilm_tintalgo = ',trim(soilm_tintalgo) - write(iulog,*) ' soilm_offset = ',soilm_offset - if ( soilm_ignore_data_if_missing )then - write(iulog,*) ' Do NOT override a point with streams data if the streams data is missing' - else - write(iulog,*) ' Abort, if you find a model point where the input streams data is set to missing value' - end if - - endif - - call clm_domain_mct (bounds, dom_clm, nlevels=nlevsoi) - - ! create the field list for these fields...use in shr_strdata_create - fldList = trim(soilmString) - if (masterproc) write(iulog,*) 'fieldlist: ', trim(fldList) - - call shr_strdata_create(sdat_soilm,name="soil_moisture", & - pio_subsystem=pio_subsystem, & - pio_iotype=shr_pio_getiotype(inst_name), & - mpicom=mpicom, compid=comp_id, & - gsmap=gsMap_lnd2Dsoi_gdc2glo, ggrid=dom_clm, & - nxg=ldomain%ni, nyg=ldomain%nj, & - nzg=nlevsoi, & - yearFirst=stream_year_first_soilm, & - yearLast=stream_year_last_soilm, & - yearAlign=model_year_align_soilm, & - offset=soilm_offset, & - domFilePath='', & - domFileName=trim(stream_fldFileName_soilm), & - domTvarName='time', & - domXvarName='lon' , & - domYvarName='lat' , & - domZvarName='levsoi' , & - domAreaName='area', & - domMaskName='mask', & - filePath='', & - filename=(/stream_fldFileName_soilm/), & - fldListFile=fldList, & - fldListModel=fldList, & - fillalgo='none', & - mapalgo='none', & - tintalgo=soilm_tintalgo, & - calendar=get_calendar(), & - dtlimit = 15._r8, & - taxmode='cycle' ) - - if (masterproc) then - call shr_strdata_print(sdat_soilm,'soil moisture data') - endif - - end subroutine PrescribedSoilMoistureInit - - - !----------------------------------------------------------------------- - ! - ! PrescribedSoilMoistureAdvance - ! - !----------------------------------------------------------------------- - subroutine PrescribedSoilMoistureAdvance( bounds ) - ! - ! Advanace the prescribed soil moisture stream - ! - ! !USES: - use clm_time_manager, only : get_curr_date - ! - ! !ARGUMENTS: - type(bounds_type) , intent(in) :: bounds - ! - ! !LOCAL VARIABLES: - character(len=CL) :: stream_var_name - integer :: g, ig - integer :: ier ! error code - integer :: year ! year (0, ...) for nstep+1 - integer :: mon ! month (1, ..., 12) for nstep+1 - integer :: day ! day of month (1, ..., 31) for nstep+1 - integer :: sec ! seconds into current date for nstep+1 - integer :: mcdate ! Current model date (yyyymmdd) - - call get_curr_date(year, mon, day, sec) - mcdate = year*10000 + mon*100 + day - - stream_var_name = 'H2OSOI' - - ! Determine variable index - ism = mct_aVect_indexRA(sdat_soilm%avs(1),trim(stream_var_name)) - - call shr_strdata_advance(sdat_soilm, mcdate, sec, mpicom, trim(stream_var_name)) - - ! Map gridcell to AV index - ier = 0 - if ( .not. allocated(g_to_ig) )then - allocate (g_to_ig(bounds%begg:bounds%endg), stat=ier) - if (ier /= 0) then - write(iulog,*) 'Prescribed soil moisture allocation error' - call endrun(msg=errMsg(sourcefile, __LINE__)) - end if - - ig = 0 - do g = bounds%begg,bounds%endg - ig = ig+1 - g_to_ig(g) = ig - end do - end if - - end subroutine PrescribedSoilMoistureAdvance - - !----------------------------------------------------------------------- - ! - ! PrescribedSoilMoistureInterp - ! - !----------------------------------------------------------------------- - subroutine PrescribedSoilMoistureInterp(bounds, soilstate_inst, & - waterstatebulk_inst) - ! - ! Assign data stream information for prescribed soil moisture. - ! - ! !USES: - use clm_time_manager, only : get_curr_date - use clm_varpar , only : nlevsoi - use clm_varcon , only : denh2o, denice, watmin, spval - use landunit_varcon , only : istsoil, istcrop - ! - ! !ARGUMENTS: - implicit none - type(bounds_type) , intent(in) :: bounds - type(soilstate_type) , intent(in) :: soilstate_inst - type(waterstatebulk_type) , intent(inout) :: waterstatebulk_inst - ! - ! !LOCAL VARIABLES: - integer :: c, g, j, ig, n - real(r8) :: soilm_liq_frac ! liquid fraction of soil moisture - real(r8) :: soilm_ice_frac ! ice fraction of soil moisture - real(r8) :: moisture_increment ! soil moisture adjustment increment - real(r8) :: h2osoi_vol_initial ! initial vwc value - character(*), parameter :: subName = "('PrescribedSoilMoistureInterp')" - - !----------------------------------------------------------------------- - - SHR_ASSERT_FL( (lbound(sdat_soilm%avs(1)%rAttr,1) == ism ), sourcefile, __LINE__) - SHR_ASSERT_FL( (ubound(sdat_soilm%avs(1)%rAttr,1) == ism ), sourcefile, __LINE__) - SHR_ASSERT_FL( (lbound(g_to_ig,1) <= bounds%begg ), sourcefile, __LINE__) - SHR_ASSERT_FL( (ubound(g_to_ig,1) >= bounds%endg ), sourcefile, __LINE__) - SHR_ASSERT_FL( (lbound(sdat_soilm%avs(1)%rAttr,2) <= g_to_ig(bounds%begg) ), sourcefile, __LINE__) - SHR_ASSERT_FL( (ubound(sdat_soilm%avs(1)%rAttr,2) >= g_to_ig(bounds%endg)+(nlevsoi-1)*size(g_to_ig) ), sourcefile, __LINE__) - associate( & - dz => col%dz , & ! Input: [real(r8) (:,:) ] layer depth (m) - watsat => soilstate_inst%watsat_col , & ! Input: [real(r8) (:,:) ] volumetric soil water at saturation (porosity) - h2osoi_liq => waterstatebulk_inst%h2osoi_liq_col , & ! Input/Output: [real(r8) (:,:) ] liquid water (kg/m2) - h2osoi_ice => waterstatebulk_inst%h2osoi_ice_col , & ! Input/Output: [real(r8) (:,:) ] ice water (kg/m2) - h2osoi_vol => waterstatebulk_inst%h2osoi_vol_col , & ! Output: volumetric soil water (m3/m3) - h2osoi_vol_prs => waterstatebulk_inst%h2osoi_vol_prs_grc & ! Output: prescribed volumetric soil water (m3/m3) - ) - SHR_ASSERT_FL( (lbound(h2osoi_vol,1) <= bounds%begc ), sourcefile, __LINE__) - SHR_ASSERT_FL( (ubound(h2osoi_vol,1) >= bounds%endc ), sourcefile, __LINE__) - SHR_ASSERT_FL( (lbound(h2osoi_vol,2) == 1 ), sourcefile, __LINE__) - SHR_ASSERT_FL( (ubound(h2osoi_vol,2) >= nlevsoi ), sourcefile, __LINE__) - SHR_ASSERT_FL( (lbound(dz,1) <= bounds%begc ), sourcefile, __LINE__) - SHR_ASSERT_FL( (ubound(dz,1) >= bounds%endc ), sourcefile, __LINE__) - SHR_ASSERT_FL( (lbound(dz,2) <= 1 ), sourcefile, __LINE__) - SHR_ASSERT_FL( (ubound(dz,2) >= nlevsoi ), sourcefile, __LINE__) - SHR_ASSERT_FL( (lbound(watsat,1) <= bounds%begc ), sourcefile, __LINE__) - SHR_ASSERT_FL( (ubound(watsat,1) >= bounds%endc ), sourcefile, __LINE__) - SHR_ASSERT_FL( (lbound(watsat,2) <= 1 ), sourcefile, __LINE__) - SHR_ASSERT_FL( (ubound(watsat,2) >= nlevsoi ), sourcefile, __LINE__) - SHR_ASSERT_FL( (lbound(h2osoi_liq,1) <= bounds%begc ), sourcefile, __LINE__) - SHR_ASSERT_FL( (ubound(h2osoi_liq,1) >= bounds%endc ), sourcefile, __LINE__) - SHR_ASSERT_FL( (lbound(h2osoi_liq,2) <= 1 ), sourcefile, __LINE__) - SHR_ASSERT_FL( (ubound(h2osoi_liq,2) >= nlevsoi ), sourcefile, __LINE__) - SHR_ASSERT_FL( (lbound(h2osoi_ice,1) <= bounds%begc ), sourcefile, __LINE__) - SHR_ASSERT_FL( (ubound(h2osoi_ice,1) >= bounds%endc ), sourcefile, __LINE__) - SHR_ASSERT_FL( (lbound(h2osoi_ice,2) <= 1 ), sourcefile, __LINE__) - SHR_ASSERT_FL( (ubound(h2osoi_ice,2) >= nlevsoi ), sourcefile, __LINE__) - SHR_ASSERT_FL( (lbound(h2osoi_vol_prs,1) <= bounds%begg ), sourcefile, __LINE__) - SHR_ASSERT_FL( (ubound(h2osoi_vol_prs,1) >= bounds%endg ), sourcefile, __LINE__) - SHR_ASSERT_FL( (lbound(h2osoi_vol_prs,2) == 1 ), sourcefile, __LINE__) - SHR_ASSERT_FL( (ubound(h2osoi_vol_prs,2) >= nlevsoi ), sourcefile, __LINE__) - ! - ! Set the prescribed soil moisture read from the file everywhere - ! - do g = bounds%begg, bounds%endg - ig = g_to_ig(g) - do j = 1, nlevsoi - - !n = ig + (j-1)*size(g_to_ig) - n = ig + (j-1)*size(g_to_ig) - - h2osoi_vol_prs(g,j) = sdat_soilm%avs(1)%rAttr(ism,n) - - ! If soil moiture is being interpolated in time and the result is - ! large that probably means one of the two data points is missing (set to spval) - if ( h2osoi_vol_prs(g,j) > 10.0_r8 .and. (h2osoi_vol_prs(g,j) /= spval) )then - h2osoi_vol_prs(g,j) = spval - end if - - end do - end do - - do c = bounds%begc, bounds%endc - ! - ! Set variable for each gridcell/column combination - ! - g = col%gridcell(c) - ig = g_to_ig(g) - - ! EBK Jan/2020, also check weights on gridcell (See https://github.com/ESCOMP/CTSM/issues/847) - if ( (lun%itype(col%landunit(c)) == istsoil) .or. (lun%itype(col%landunit(c)) == istcrop) .and. & - (col%wtgcell(c) /= 0._r8) ) then - ! this is a 2d field (gridcell/nlevsoi) ! - do j = 1, nlevsoi - - n = ig + (j-1)*size(g_to_ig) - - ! if soil water is zero, liq/ice fractions cannot be calculated - if((h2osoi_liq(c, j) + h2osoi_ice(c, j)) > 0._r8) then - - ! save original soil moisture value - h2osoi_vol_initial = h2osoi_vol(c,j) - - ! Check if the vegetated land mask from the dataset on the - ! file is different - if ( (h2osoi_vol_prs(g,j) == spval) .and. (h2osoi_vol_initial /= spval) )then - if ( soilm_ignore_data_if_missing )then - cycle - else - write(iulog,*) 'Input soil moisture dataset is not vegetated as expected: gridcell=', & - g, ' active = ', col%active(c) - call endrun(subgrid_index=c, subgrid_level=subgrid_level_column, & - msg = subname // & - ' ERROR:: The input soil moisture stream is NOT vegetated for one of the land points' ) - end if - end if - - ! update volumetric soil moisture from data prescribed from the file - h2osoi_vol(c,j) = h2osoi_vol_prs(g,j) - - - ! calculate liq/ice mass fractions - soilm_liq_frac = h2osoi_liq(c, j) /(h2osoi_liq(c, j) + h2osoi_ice(c, j)) - soilm_ice_frac = h2osoi_ice(c, j) /(h2osoi_liq(c, j) + h2osoi_ice(c, j)) - - ! calculate moisture increment - moisture_increment = h2osoi_vol(c,j) - h2osoi_vol_initial - ! add limitation check - moisture_increment = min((watsat(c,j) - h2osoi_vol_initial),max(-(h2osoi_vol_initial-watmin),moisture_increment)) - - ! update liq/ice water mass due to (volumetric) moisture increment - h2osoi_liq(c,j) = h2osoi_liq(c,j) + (soilm_liq_frac * moisture_increment * dz(c, j) * denh2o) - h2osoi_ice(c,j) = h2osoi_ice(c,j) + (soilm_ice_frac * moisture_increment * dz(c, j) * denice) - - else - call endrun(subgrid_index=c, subgrid_level=subgrid_level_column, & - msg = subname // ':: ERROR h2osoil liquid plus ice is zero') - endif - enddo - endif - end do - - end associate - - end subroutine PrescribedSoilMoistureInterp - -end module SoilMoistureStreamMod diff --git a/src/cpl/mct/UrbanTimeVarType.F90 b/src/cpl/mct/UrbanTimeVarType.F90 deleted file mode 100644 index 805ac47fbf..0000000000 --- a/src/cpl/mct/UrbanTimeVarType.F90 +++ /dev/null @@ -1,314 +0,0 @@ -module UrbanTimeVarType - - !------------------------------------------------------------------------------ - ! !DESCRIPTION: - ! Urban Time Varying Data - ! - ! !USES: - use shr_kind_mod , only : r8 => shr_kind_r8, CL => shr_kind_CL - use shr_log_mod , only : errMsg => shr_log_errMsg - use abortutils , only : endrun - use decompMod , only : bounds_type, subgrid_level_landunit - use clm_varctl , only : iulog, inst_name - use landunit_varcon , only : isturb_MIN, isturb_MAX - use clm_varcon , only : spval - use LandunitType , only : lun - use GridcellType , only : grc - use mct_mod - use shr_strdata_mod , only : shr_strdata_type - ! - implicit none - save - private - ! - ! - - ! !PUBLIC TYPE - type, public :: urbantv_type - - real(r8), public, pointer :: t_building_max(:) ! lun maximum internal building air temperature (K) - type(shr_strdata_type) :: sdat_urbantv ! urban time varying input data stream - contains - - ! !PUBLIC MEMBER FUNCTIONS: - procedure, public :: Init ! Allocate and initialize urbantv - procedure, public :: urbantv_init ! Initialize urban time varying stream - procedure, public :: urbantv_interp ! Interpolate urban time varying stream - - end type urbantv_type - - !----------------------------------------------------------------------- - character(15), private :: stream_var_name(isturb_MIN:isturb_MAX) - - character(len=*), parameter, private :: sourcefile = & - __FILE__ - -contains - - !----------------------------------------------------------------------- - subroutine Init(this, bounds, NLFilename) - ! - ! Allocate module variables and data structures - ! - ! !USES: - use shr_infnan_mod , only : nan => shr_infnan_nan, assignment(=) - use histFileMod , only : hist_addfld1d - ! - ! !ARGUMENTS: - class(urbantv_type) :: this - type(bounds_type) , intent(in) :: bounds - character(len=*) , intent(in) :: NLFilename ! Namelist filename - ! - ! !LOCAL VARIABLES: - integer :: begl, endl - !--------------------------------------------------------------------- - - begl = bounds%begl; endl = bounds%endl - - ! Allocate urbantv data structure - - allocate(this%t_building_max (begl:endl)) ; this%t_building_max (:) = nan - - call this%urbantv_init(bounds, NLFilename) - call this%urbantv_interp(bounds) - - ! Add history fields - call hist_addfld1d (fname='TBUILD_MAX', units='K', & - avgflag='A', long_name='prescribed maximum interior building temperature', & - ptr_lunit=this%t_building_max, default='inactive', set_nourb=spval, & - l2g_scale_type='unity') - - - end subroutine Init - - !----------------------------------------------------------------------- - - !----------------------------------------------------------------------- - subroutine urbantv_init(this, bounds, NLFilename) - ! - ! !DESCRIPTION: - ! Initialize data stream information for urban time varying data - ! - ! !USES: - use clm_time_manager , only : get_calendar - use ncdio_pio , only : pio_subsystem - use shr_pio_mod , only : shr_pio_getiotype - use clm_nlUtilsMod , only : find_nlgroup_name - use ndepStreamMod , only : clm_domain_mct - use spmdMod , only : masterproc, mpicom, comp_id - use fileutils , only : getavu, relavu - use shr_mpi_mod , only : shr_mpi_bcast - use shr_string_mod , only : shr_string_listAppend - use shr_strdata_mod , only : shr_strdata_create, shr_strdata_print - use domainMod , only : ldomain - use shr_infnan_mod , only : nan => shr_infnan_nan, assignment(=) - use landunit_varcon , only : isturb_TBD, isturb_HD, isturb_MD - use lnd_set_decomp_and_domain , only : gsmap_global - ! - ! !ARGUMENTS: - implicit none - class(urbantv_type) :: this - type(bounds_type), intent(in) :: bounds - character(len=*), intent(in) :: NLFilename ! Namelist filename - ! - ! !LOCAL VARIABLES: - integer :: begl, endl ! landunits - integer :: ifield ! field index - integer :: stream_year_first_urbantv ! first year in urban tv stream to use - integer :: stream_year_last_urbantv ! last year in urban tv stream to use - integer :: model_year_align_urbantv ! align stream_year_first_urbantv - ! with this model year - integer :: nu_nml ! unit for namelist file - integer :: nml_error ! namelist i/o error flag - type(mct_ggrid) :: dom_clm ! domain information - character(len=CL) :: stream_fldFileName_urbantv ! urban tv streams filename - character(len=CL) :: urbantvmapalgo = 'nn' ! mapping alogrithm for urban ac - character(len=CL) :: urbantv_tintalgo = 'linear' ! time interpolation alogrithm - character(len=CL) :: fldList ! field string - character(*), parameter :: urbantvString = "tbuildmax_" ! base string for field string - character(*), parameter :: subName = "('urbantv_init')" - character(*), parameter :: F00 = "('(urbantv_init) ',4a)" - !----------------------------------------------------------------------- - namelist /urbantv_streams/ & - stream_year_first_urbantv, & - stream_year_last_urbantv, & - model_year_align_urbantv, & - urbantvmapalgo, & - stream_fldFileName_urbantv, & - urbantv_tintalgo - !----------------------------------------------------------------------- - - begl = bounds%begl; endl = bounds%endl - - ! Default values for namelist - stream_year_first_urbantv = 1 ! first year in stream to use - stream_year_last_urbantv = 1 ! last year in stream to use - model_year_align_urbantv = 1 ! align stream_year_first_urbantv with this model year - stream_fldFileName_urbantv = ' ' - - ! Read urbantv_streams namelist - if (masterproc) then - nu_nml = getavu() - open( nu_nml, file=trim(NLFilename), status='old', iostat=nml_error ) - call find_nlgroup_name(nu_nml, 'urbantv_streams', status=nml_error) - if (nml_error == 0) then - read(nu_nml, nml=urbantv_streams,iostat=nml_error) - if (nml_error /= 0) then - call endrun(msg='ERROR reading urbantv_streams namelist'//errMsg(sourcefile, __LINE__)) - end if - end if - close(nu_nml) - call relavu( nu_nml ) - endif - - call shr_mpi_bcast(stream_year_first_urbantv, mpicom) - call shr_mpi_bcast(stream_year_last_urbantv, mpicom) - call shr_mpi_bcast(model_year_align_urbantv, mpicom) - call shr_mpi_bcast(stream_fldFileName_urbantv, mpicom) - call shr_mpi_bcast(urbantv_tintalgo, mpicom) - - if (masterproc) then - write(iulog,*) ' ' - write(iulog,*) 'urbantv_streams settings:' - write(iulog,*) ' stream_year_first_urbantv = ',stream_year_first_urbantv - write(iulog,*) ' stream_year_last_urbantv = ',stream_year_last_urbantv - write(iulog,*) ' model_year_align_urbantv = ',model_year_align_urbantv - write(iulog,*) ' stream_fldFileName_urbantv = ',stream_fldFileName_urbantv - write(iulog,*) ' urbantv_tintalgo = ',urbantv_tintalgo - write(iulog,*) ' ' - endif - - call clm_domain_mct (bounds, dom_clm) - - ! create the field list for these urbantv fields...use in shr_strdata_create - stream_var_name(:) = "NOT_SET" - stream_var_name(isturb_TBD) = urbantvString//"TBD" - stream_var_name(isturb_HD) = urbantvString//"HD" - stream_var_name(isturb_MD) = urbantvString//"MD" - fldList = "" - do ifield = isturb_MIN, isturb_MAX - call shr_string_listAppend( fldList, stream_var_name(ifield) ) - end do - - call shr_strdata_create(this%sdat_urbantv,name="clmurbantv", & - pio_subsystem=pio_subsystem, & - pio_iotype=shr_pio_getiotype(inst_name), & - mpicom=mpicom, compid=comp_id, & - gsmap=gsmap_global, ggrid=dom_clm, & - nxg=ldomain%ni, nyg=ldomain%nj, & - yearFirst=stream_year_first_urbantv, & - yearLast=stream_year_last_urbantv, & - yearAlign=model_year_align_urbantv, & - offset=0, & - domFilePath='', & - domFileName=trim(stream_fldFileName_urbantv), & - domTvarName='time', & - domXvarName='lon' , & - domYvarName='lat' , & - domAreaName='area', & - domMaskName='LANDMASK', & - filePath='', & - filename=(/trim(stream_fldFileName_urbantv)/) , & - fldListFile=fldList, & - fldListModel=fldList, & - fillalgo='none', & - mapalgo=urbantvmapalgo, & - calendar=get_calendar(), & - tintalgo=urbantv_tintalgo, & - taxmode='extend' ) - - if (masterproc) then - call shr_strdata_print(this%sdat_urbantv,'urban time varying data') - endif - - - end subroutine urbantv_init - - !----------------------------------------------------------------------- - subroutine urbantv_interp(this, bounds) - ! - ! !DESCRIPTION: - ! Interpolate data stream information for urban time varying data. - ! - ! !USES: - use clm_time_manager, only : get_curr_date - use spmdMod , only : mpicom - use shr_strdata_mod , only : shr_strdata_advance - use clm_instur , only : urban_valid - ! - ! !ARGUMENTS: - class(urbantv_type) :: this - type(bounds_type), intent(in) :: bounds - ! - ! !LOCAL VARIABLES: - logical :: found - integer :: l, glun, ig, g, ip - integer :: year ! year (0, ...) for nstep+1 - integer :: mon ! month (1, ..., 12) for nstep+1 - integer :: day ! day of month (1, ..., 31) for nstep+1 - integer :: sec ! seconds into current date for nstep+1 - integer :: mcdate ! Current model date (yyyymmdd) - integer :: lindx ! landunit index - integer :: gindx ! gridcell index - !----------------------------------------------------------------------- - - call get_curr_date(year, mon, day, sec) - mcdate = year*10000 + mon*100 + day - - call shr_strdata_advance(this%sdat_urbantv, mcdate, sec, mpicom, 'urbantvdyn') - - do l = bounds%begl,bounds%endl - if (lun%urbpoi(l)) then - glun = lun%gridcell(l) - ip = mct_aVect_indexRA(this%sdat_urbantv%avs(1),trim(stream_var_name(lun%itype(l)))) - ! - ! Determine vector index corresponding to glun - ! - ig = 0 - do g = bounds%begg,bounds%endg - ig = ig+1 - if (g == glun) exit - end do - - this%t_building_max(l) = this%sdat_urbantv%avs(1)%rAttr(ip,ig) - else - this%t_building_max(l) = spval - end if - end do - - found = .false. - do l = bounds%begl,bounds%endl - if (lun%urbpoi(l)) then - glun = lun%gridcell(l) - ! - ! Determine vector index corresponding to glun - ! - ig = 0 - do g = bounds%begg,bounds%endg - ig = ig+1 - if (g == glun) exit - end do - - if ( .not. urban_valid(g) .or. (this%t_building_max(l) <= 0._r8)) then - found = .true. - gindx = g - lindx = l - exit - end if - end if - end do - if ( found ) then - write(iulog,*)'ERROR: no valid urban data for g= ',gindx - write(iulog,*)'landunit type: ',lun%itype(lindx) - write(iulog,*)'urban_valid: ',urban_valid(gindx) - write(iulog,*)'t_building_max: ',this%t_building_max(lindx) - call endrun(subgrid_index=lindx, subgrid_level=subgrid_level_landunit, & - msg=errmsg(sourcefile, __LINE__)) - end if - - - end subroutine urbantv_interp - - !----------------------------------------------------------------------- - -end module UrbanTimeVarType diff --git a/src/cpl/mct/ch4FInundatedStreamType.F90 b/src/cpl/mct/ch4FInundatedStreamType.F90 deleted file mode 100644 index 3c26f4d109..0000000000 --- a/src/cpl/mct/ch4FInundatedStreamType.F90 +++ /dev/null @@ -1,389 +0,0 @@ -module ch4FInundatedStreamType - -#include "shr_assert.h" - - !----------------------------------------------------------------------- - ! !DESCRIPTION: - ! Contains methods for reading in finundated streams file for methane code. - ! - ! !USES - use shr_kind_mod , only: r8 => shr_kind_r8, CL => shr_kind_cl - use spmdMod , only: mpicom, masterproc - use clm_varctl , only: iulog, inst_name - use abortutils , only: endrun - use decompMod , only: bounds_type - use ch4varcon , only: finundation_mtd - - ! !PUBLIC TYPES: - implicit none - private - save - - type, public :: ch4finundatedstream_type - real(r8), pointer, private :: zwt0_gdc (:) ! col coefficient for determining finundated (m) - real(r8), pointer, private :: f0_gdc (:) ! col maximum inundated fraction for a gridcell (for methane code) - real(r8), pointer, private :: p3_gdc (:) ! col coefficient for determining finundated (m) - real(r8), pointer, private :: fws_slope_gdc (:) ! col slope in fws = slope * tws + intercept (A coefficient) - real(r8), pointer, private :: fws_intercept_gdc (:) ! col slope in fws = slope * tws + intercept (B coefficient) - contains - - ! !PUBLIC MEMBER FUNCTIONS: - procedure, public :: Init ! Initialize and read data in - procedure, public :: CalcFinundated ! Calculate finundated based on input streams - procedure, public :: UseStreams ! If streams will be used - - ! !PRIVATE MEMBER FUNCTIONS: - procedure, private :: InitAllocate ! Allocate data - - end type ch4finundatedstream_type - - - ! ! PRIVATE DATA: - - type, private :: streamcontrol_type - character(len=CL) :: stream_fldFileName_ch4finundated ! Filename - character(len=CL) :: ch4finundatedmapalgo ! map algo - character(len=CL) :: fldList ! List of fields to read - contains - procedure, private :: ReadNML ! Read in namelist - end type streamcontrol_type - - type(streamcontrol_type), private :: control ! Stream control data - - character(len=*), parameter, private :: sourcefile = & - __FILE__ - !============================================================================== - -contains - - !============================================================================== - - subroutine Init(this, bounds, NLFilename) - ! - ! Initialize the ch4 finundated stream object - ! - ! Uses: - use clm_time_manager , only : get_calendar, get_curr_date - use ncdio_pio , only : pio_subsystem - use shr_pio_mod , only : shr_pio_getiotype - use shr_nl_mod , only : shr_nl_find_group_name - use shr_mpi_mod , only : shr_mpi_bcast - use ndepStreamMod , only : clm_domain_mct - use domainMod , only : ldomain - use decompMod , only : bounds_type - use mct_mod , only : mct_ggrid, mct_avect_indexra - use shr_strdata_mod , only : shr_strdata_type, shr_strdata_create - use shr_strdata_mod , only : shr_strdata_print, shr_strdata_advance - use spmdMod , only : comp_id, iam - use ch4varcon , only : finundation_mtd_h2osfc - use ch4varcon , only : finundation_mtd_ZWT_inversion, finundation_mtd_TWS_inversion - use lnd_set_decomp_and_domain , only : gsmap_global - ! - ! arguments - implicit none - class(ch4finundatedstream_type) :: this - type(bounds_type), intent(in) :: bounds - character(len=*), intent(in) :: NLFilename ! Namelist filename - ! - ! local variables - integer :: ig, g ! Indices - type(mct_ggrid) :: dom_clm ! domain information - type(shr_strdata_type) :: sdat ! input data stream - integer :: index_ZWT0 = 0 ! Index of ZWT0 field - integer :: index_F0 = 0 ! Index of F0 field - integer :: index_P3 = 0 ! Index of P3 field - integer :: index_FWS_TWS_A = 0 ! Index of FWS_TWS_A field - integer :: index_FWS_TWS_B = 0 ! Index of FWS_TWS_B field - integer :: year ! year (0, ...) for nstep+1 - integer :: mon ! month (1, ..., 12) for nstep+1 - integer :: day ! day of month (1, ..., 31) for nstep+1 - integer :: sec ! seconds into current date for nstep+1 - integer :: mcdate ! Current model date (yyyymmdd) - character(len=*), parameter :: stream_name = 'ch4finundated' - character(*), parameter :: subName = "('ch4finundatedstream::Init')" - !----------------------------------------------------------------------- - if ( finundation_mtd /= finundation_mtd_h2osfc )then - call this%InitAllocate( bounds ) - call control%ReadNML( bounds, NLFileName ) - - if ( this%useStreams() )then - call clm_domain_mct (bounds, dom_clm) - - call shr_strdata_create(sdat,name=stream_name, & - pio_subsystem=pio_subsystem, & - pio_iotype=shr_pio_getiotype(inst_name), & - mpicom=mpicom, compid=comp_id, & - gsmap=gsmap_global, ggrid=dom_clm, & - nxg=ldomain%ni, nyg=ldomain%nj, & - yearFirst=1996, & - yearLast=1996, & - yearAlign=1, & - offset=0, & - domFilePath='', & - domFileName=trim(control%stream_fldFileName_ch4finundated), & - domTvarName='time', & - domXvarName='LONGXY' , & - domYvarName='LATIXY' , & - domAreaName='AREA', & - domMaskName='LANDMASK', & - filePath='', & - filename=(/trim(control%stream_fldFileName_ch4finundated)/), & - fldListFile=control%fldList, & - fldListModel=control%fldList, & - fillalgo='none', & - mapalgo=control%ch4finundatedmapalgo, & - calendar=get_calendar(), & - taxmode='extend' ) - - if (masterproc) then - call shr_strdata_print(sdat,'CLM '//stream_name//' data') - endif - - if( finundation_mtd == finundation_mtd_ZWT_inversion )then - index_ZWT0 = mct_avect_indexra(sdat%avs(1),'ZWT0') - index_F0 = mct_avect_indexra(sdat%avs(1),'F0' ) - index_P3 = mct_avect_indexra(sdat%avs(1),'P3' ) - else if( finundation_mtd == finundation_mtd_TWS_inversion )then - index_FWS_TWS_A = mct_avect_indexra(sdat%avs(1),'FWS_TWS_A') - index_FWS_TWS_B = mct_avect_indexra(sdat%avs(1),'FWS_TWS_B') - end if - - - ! Explicitly set current date to a hardcoded constant value. Otherwise - ! using the real date can cause roundoff differences that are - ! detrected as issues with exact restart. EBK M05/20/2017 - !call get_curr_date(year, mon, day, sec) - year = 1996 - mon = 12 - day = 31 - sec = 0 - mcdate = year*10000 + mon*100 + day - - call shr_strdata_advance(sdat, mcdate, sec, mpicom, 'ch4finundated') - - ! Get the data - ig = 0 - do g = bounds%begg,bounds%endg - ig = ig+1 - if ( index_ZWT0 > 0 )then - this%zwt0_gdc(g) = sdat%avs(1)%rAttr(index_ZWT0,ig) - end if - if ( index_F0 > 0 )then - this%f0_gdc(g) = sdat%avs(1)%rAttr(index_F0,ig) - end if - if ( index_P3 > 0 )then - this%p3_gdc(g) = sdat%avs(1)%rAttr(index_P3,ig) - end if - if ( index_FWS_TWS_A > 0 )then - this%fws_slope_gdc(g) = sdat%avs(1)%rAttr(index_FWS_TWS_A,ig) - end if - if ( index_FWS_TWS_B > 0 )then - this%fws_intercept_gdc(g) = sdat%avs(1)%rAttr(index_FWS_TWS_B,ig) - end if - end do - end if - end if - - end subroutine Init - - !----------------------------------------------------------------------- - logical function UseStreams(this) - ! - ! !DESCRIPTION: - ! Return true if - ! - ! !USES: - ! - ! !ARGUMENTS: - implicit none - class(ch4finundatedstream_type) :: this - ! - ! !LOCAL VARIABLES: - if ( trim(control%stream_fldFileName_ch4finundated) == '' )then - UseStreams = .false. - else - UseStreams = .true. - end if - end function UseStreams - - !----------------------------------------------------------------------- - subroutine InitAllocate(this, bounds) - ! - ! !DESCRIPTION: - ! Allocate module variables and data structures - ! - ! !USES: - use shr_infnan_mod, only: nan => shr_infnan_nan, assignment(=) - use ch4varcon , only: finundation_mtd_ZWT_inversion, finundation_mtd_TWS_inversion - ! - ! !ARGUMENTS: - implicit none - class(ch4finundatedstream_type) :: this - type(bounds_type), intent(in) :: bounds - ! - ! !LOCAL VARIABLES: - integer :: begc, endc - integer :: begg, endg - !--------------------------------------------------------------------- - - begc = bounds%begc; endc = bounds%endc - begg = bounds%begg; endg = bounds%endg - - if( finundation_mtd == finundation_mtd_ZWT_inversion )then - allocate(this%zwt0_gdc (begg:endg)) ; this%zwt0_gdc (:) = nan - allocate(this%f0_gdc (begg:endg)) ; this%f0_gdc (:) = nan - allocate(this%p3_gdc (begg:endg)) ; this%p3_gdc (:) = nan - else if( finundation_mtd == finundation_mtd_TWS_inversion )then - allocate(this%fws_slope_gdc (begg:endg)) ; this%fws_slope_gdc (:) = nan - allocate(this%fws_intercept_gdc(begg:endg)) ; this%fws_intercept_gdc(:) = nan - end if - - end subroutine InitAllocate - - !----------------------------------------------------------------------- - subroutine CalcFinundated(this, bounds, num_soilc, filter_soilc, soilhydrology_inst, & - waterdiagnosticbulk_inst, qflx_surf_lag_col, finundated ) - ! - ! !DESCRIPTION: - ! - ! Calculate finundated according to the appropriate methodology - ! - ! !USES: - use ColumnType , only : col - use ch4varcon , only : finundation_mtd_h2osfc, finundation_mtd_ZWT_inversion - use ch4varcon , only : finundation_mtd_TWS_inversion - use clm_varpar , only : nlevsoi - use SoilHydrologyType, only : soilhydrology_type - use WaterDiagnosticBulkType , only : waterdiagnosticbulk_type - ! - ! !ARGUMENTS: - implicit none - class(ch4finundatedstream_type) :: this - type(bounds_type) , intent(in) :: bounds - integer , intent(in) :: num_soilc ! number of column soil points in column filter - integer , intent(in) :: filter_soilc(:) ! column filter for soil points - type(soilhydrology_type) , intent(in) :: soilhydrology_inst - type(waterdiagnosticbulk_type) , intent(in) :: waterdiagnosticbulk_inst - real(r8) , intent(in) :: qflx_surf_lag_col(bounds%begc:) !time-lagged surface runoff (mm H2O /s) - real(r8) , intent(inout) :: finundated(bounds%begc:) ! fractional inundated area in soil column (excluding dedicated wetland columns) - ! - ! !LOCAL VARIABLES: - integer :: g, c, fc ! Indices - real(r8) :: zwt_actual ! Total water storage (ZWT) to use either perched or total depending on conditions - - SHR_ASSERT_ALL_FL((ubound(qflx_surf_lag_col) == (/bounds%endc/)), sourcefile, __LINE__) - SHR_ASSERT_ALL_FL((ubound(finundated) == (/bounds%endc/)), sourcefile, __LINE__) - - associate( & - z => col%z , & ! Input: [real(r8) (:,:) ] layer depth (m) (-nlevsno+1:nlevsoi) - zwt => soilhydrology_inst%zwt_col , & ! Input: [real(r8) (:) ] water table depth (m) - zwt_perched => soilhydrology_inst%zwt_perched_col , & ! Input: [real(r8) (:) ] perched water table depth (m) - tws => waterdiagnosticbulk_inst%tws_grc , & ! Input: [real(r8) (:) ] total water storage (kg m-2) - frac_h2osfc => waterdiagnosticbulk_inst%frac_h2osfc_col & ! Input: [real(r8) (:) ] fraction of ground covered by surface water (0 to 1) - ) - - ! Calculate finundated - do fc = 1, num_soilc - c = filter_soilc(fc) - g = col%gridcell(c) - select case( finundation_mtd ) - case ( finundation_mtd_h2osfc ) - finundated(c) = frac_h2osfc(c) - case ( finundation_mtd_ZWT_inversion ) - if (this%zwt0_gdc(g) > 0._r8) then - if (zwt_perched(c) < z(c,nlevsoi)-1.e-5_r8 .and. zwt_perched(c) < zwt(c)) then - zwt_actual = zwt_perched(c) - else - zwt_actual = zwt(c) - end if - finundated(c) = this%f0_gdc(g) * exp(-zwt_actual/this%zwt0_gdc(g)) + this%p3_gdc(g)*qflx_surf_lag_col(c) - else - finundated(c) = this%p3_gdc(g)*qflx_surf_lag_col(c) - end if - case ( finundation_mtd_TWS_inversion ) - finundated(c) = this%fws_slope_gdc(g) * tws(g) + this%fws_intercept_gdc(g) - end select - finundated(c) = min( 1.0_r8, max( 0.0_r8, finundated(c) ) ) - end do - end associate - - end subroutine CalcFinundated - !============================================================================== - - subroutine ReadNML(this, bounds, NLFilename) - ! - ! Read the namelist data stream information. - ! - ! Uses: - use clm_time_manager , only : get_calendar - use ncdio_pio , only : pio_subsystem - use shr_pio_mod , only : shr_pio_getiotype - use shr_nl_mod , only : shr_nl_find_group_name - use shr_log_mod , only : errMsg => shr_log_errMsg - use shr_mpi_mod , only : shr_mpi_bcast - use fileutils , only : getavu, relavu - use ch4varcon , only : finundation_mtd_ZWT_inversion, finundation_mtd_TWS_inversion - ! - ! arguments - implicit none - class(streamcontrol_type) :: this - type(bounds_type), intent(in) :: bounds - character(len=*), intent(in) :: NLFilename ! Namelist filename - ! - ! local variables - integer :: nu_nml ! unit for namelist file - integer :: nml_error ! namelist i/o error flag - character(len=CL) :: stream_fldFileName_ch4finundated = ' ' - character(len=CL) :: ch4finundatedmapalgo = 'bilinear' - character(len=*), parameter :: namelist_name = 'ch4finundated' ! MUST agree with name in namelist and read - character(len=*), parameter :: shr_strdata_unset = 'NOT_SET' - character(len=*), parameter :: subName = "('ch4finundated::ReadNML')" - character(len=*), parameter :: F00 = "('(ch4finundated_readnml) ',4a)" - !----------------------------------------------------------------------- - - namelist /ch4finundated/ & ! MUST agree with namelist_name above - ch4finundatedmapalgo, stream_fldFileName_ch4finundated - - ! Default values for namelist - - ! Read ch4finundated namelist - if (masterproc) then - nu_nml = getavu() - open( nu_nml, file=trim(NLFilename), status='old', iostat=nml_error ) - call shr_nl_find_group_name(nu_nml, namelist_name, status=nml_error) - if (nml_error == 0) then - read(nu_nml, nml=ch4finundated,iostat=nml_error) ! MUST agree with namelist_name above - if (nml_error /= 0) then - call endrun(msg=' ERROR reading '//namelist_name//' namelist'//errMsg(sourcefile, __LINE__)) - end if - else - call endrun(msg=' ERROR finding '//namelist_name//' namelist'//errMsg(sourcefile, __LINE__)) - end if - close(nu_nml) - call relavu( nu_nml ) - endif - - call shr_mpi_bcast(stream_fldFileName_ch4finundated, mpicom) - call shr_mpi_bcast(ch4finundatedmapalgo , mpicom) - - if (masterproc) then - write(iulog,*) ' ' - write(iulog,*) namelist_name, ' stream settings:' - write(iulog,*) ' stream_fldFileName_ch4finundated = ',stream_fldFileName_ch4finundated - write(iulog,*) ' ch4finundatedmapalgo = ',ch4finundatedmapalgo - write(iulog,*) ' ' - endif - this%stream_fldFileName_ch4finundated = stream_fldFileName_ch4finundated - this%ch4finundatedmapalgo = ch4finundatedmapalgo - if ( finundation_mtd == finundation_mtd_ZWT_inversion )then - this%fldList = "ZWT0:F0:P3" - else if ( finundation_mtd == finundation_mtd_TWS_inversion )then - this%fldList = "FWS_TWS_A:FWS_TWS_B" - else - call endrun(msg=' ERROR do NOT know what list of variables to read for this finundation_mtd type'// & - errMsg(sourcefile, __LINE__)) - end if - - end subroutine ReadNML - -end module ch4FInundatedStreamType diff --git a/src/cpl/mct/clm_cpl_indices.F90 b/src/cpl/mct/clm_cpl_indices.F90 deleted file mode 100644 index 09ed89e92d..0000000000 --- a/src/cpl/mct/clm_cpl_indices.F90 +++ /dev/null @@ -1,330 +0,0 @@ -module clm_cpl_indices - !----------------------------------------------------------------------- - ! !DESCRIPTION: - ! Module containing the indices for the fields passed between CLM and - ! the driver. Includes the River Transport Model fields (RTM) and the - ! fields needed by the land-ice component (sno). - ! - ! !USES: - - use shr_sys_mod, only : shr_sys_abort - implicit none - - SAVE - private ! By default make data private - ! - ! !PUBLIC MEMBER FUNCTIONS: - public :: clm_cpl_indices_set ! Set the coupler indices - ! - ! !PUBLIC DATA MEMBERS: - ! - integer , public :: glc_nec ! number of elevation classes for glacier_mec landunits - ! (from coupler) - must equal maxpatch_glc from namelist - - ! lnd -> drv (required) - - integer, public ::index_l2x_Flrl_rofsur ! lnd->rtm input liquid surface fluxes - integer, public ::index_l2x_Flrl_rofgwl ! lnd->rtm input liquid gwl fluxes - integer, public ::index_l2x_Flrl_rofsub ! lnd->rtm input liquid subsurface fluxes - integer, public ::index_l2x_Flrl_rofi ! lnd->rtm input frozen fluxes - integer, public ::index_l2x_Flrl_irrig ! irrigation withdrawal - - integer, public ::index_l2x_Sl_t ! temperature - integer, public ::index_l2x_Sl_tref ! 2m reference temperature - integer, public ::index_l2x_Sl_qref ! 2m reference specific humidity - integer, public ::index_l2x_Sl_avsdr ! albedo: direct , visible - integer, public ::index_l2x_Sl_anidr ! albedo: direct , near-ir - integer, public ::index_l2x_Sl_avsdf ! albedo: diffuse, visible - integer, public ::index_l2x_Sl_anidf ! albedo: diffuse, near-ir - integer, public ::index_l2x_Sl_snowh ! snow height - integer, public ::index_l2x_Sl_u10 ! 10m wind - integer, public ::index_l2x_Sl_ddvel ! dry deposition velocities (optional) - integer, public ::index_l2x_Sl_fv ! friction velocity - integer, public ::index_l2x_Sl_ram1 ! aerodynamical resistance - integer, public ::index_l2x_Sl_soilw ! volumetric soil water - integer, public ::index_l2x_Fall_taux ! wind stress, zonal - integer, public ::index_l2x_Fall_tauy ! wind stress, meridional - integer, public ::index_l2x_Fall_lat ! latent heat flux - integer, public ::index_l2x_Fall_sen ! sensible heat flux - integer, public ::index_l2x_Fall_lwup ! upward longwave heat flux - integer, public ::index_l2x_Fall_evap ! evaporation water flux - integer, public ::index_l2x_Fall_swnet ! heat flux shortwave net - integer, public ::index_l2x_Fall_fco2_lnd ! co2 flux **For testing set to 0 - integer, public ::index_l2x_Fall_flxdst1 ! dust flux size bin 1 - integer, public ::index_l2x_Fall_flxdst2 ! dust flux size bin 2 - integer, public ::index_l2x_Fall_flxdst3 ! dust flux size bin 3 - integer, public ::index_l2x_Fall_flxdst4 ! dust flux size bin 4 - integer, public ::index_l2x_Fall_flxvoc ! MEGAN fluxes - integer, public ::index_l2x_Fall_flxfire ! Fire fluxes - integer, public ::index_l2x_Sl_ztopfire ! Top of fire emissions (m) - - ! In the following, index 0 is bare land, other indices are glc elevation classes - integer, allocatable, public ::index_l2x_Sl_tsrf(:) ! glc MEC temperature - integer, allocatable, public ::index_l2x_Sl_topo(:) ! glc MEC topo height - integer, allocatable, public ::index_l2x_Flgl_qice(:) ! glc MEC ice flux - - integer, public ::index_x2l_Sa_methane - integer, public ::index_l2x_Fall_methane - - integer, public :: nflds_l2x = 0 - - ! drv -> lnd (required) - - integer, public ::index_x2l_Sa_z ! bottom atm level height - integer, public ::index_x2l_Sa_topo ! atm surface height (m) - integer, public ::index_x2l_Sa_u ! bottom atm level zon wind - integer, public ::index_x2l_Sa_v ! bottom atm level mer wind - integer, public ::index_x2l_Sa_ptem ! bottom atm level pot temp - integer, public ::index_x2l_Sa_shum ! bottom atm level spec hum - integer, public ::index_x2l_Sa_pbot ! bottom atm level pressure - integer, public ::index_x2l_Sa_tbot ! bottom atm level temp - integer, public ::index_x2l_Faxa_lwdn ! downward lw heat flux - integer, public ::index_x2l_Faxa_rainc ! prec: liquid "convective" - integer, public ::index_x2l_Faxa_rainl ! prec: liquid "large scale" - integer, public ::index_x2l_Faxa_snowc ! prec: frozen "convective" - integer, public ::index_x2l_Faxa_snowl ! prec: frozen "large scale" - integer, public ::index_x2l_Faxa_swndr ! sw: nir direct downward - integer, public ::index_x2l_Faxa_swvdr ! sw: vis direct downward - integer, public ::index_x2l_Faxa_swndf ! sw: nir diffuse downward - integer, public ::index_x2l_Faxa_swvdf ! sw: vis diffuse downward - integer, public ::index_x2l_Sa_co2prog ! bottom atm level prognostic co2 - integer, public ::index_x2l_Sa_co2diag ! bottom atm level diagnostic co2 - integer, public ::index_x2l_Faxa_bcphidry ! flux: Black Carbon hydrophilic dry deposition - integer, public ::index_x2l_Faxa_bcphodry ! flux: Black Carbon hydrophobic dry deposition - integer, public ::index_x2l_Faxa_bcphiwet ! flux: Black Carbon hydrophilic wet deposition - integer, public ::index_x2l_Faxa_ocphidry ! flux: Organic Carbon hydrophilic dry deposition - integer, public ::index_x2l_Faxa_ocphodry ! flux: Organic Carbon hydrophobic dry deposition - integer, public ::index_x2l_Faxa_ocphiwet ! flux: Organic Carbon hydrophilic dry deposition - integer, public ::index_x2l_Faxa_dstwet1 ! flux: Size 1 dust -- wet deposition - integer, public ::index_x2l_Faxa_dstwet2 ! flux: Size 2 dust -- wet deposition - integer, public ::index_x2l_Faxa_dstwet3 ! flux: Size 3 dust -- wet deposition - integer, public ::index_x2l_Faxa_dstwet4 ! flux: Size 4 dust -- wet deposition - integer, public ::index_x2l_Faxa_dstdry1 ! flux: Size 1 dust -- dry deposition - integer, public ::index_x2l_Faxa_dstdry2 ! flux: Size 2 dust -- dry deposition - integer, public ::index_x2l_Faxa_dstdry3 ! flux: Size 3 dust -- dry deposition - integer, public ::index_x2l_Faxa_dstdry4 ! flux: Size 4 dust -- dry deposition - - integer, public ::index_x2l_Faxa_nhx ! flux nhx from atm - integer, public ::index_x2l_Faxa_noy ! flux noy from atm - - integer, public ::index_x2l_Flrr_flood ! rtm->lnd rof flood flux - integer, public ::index_x2l_Flrr_volr ! rtm->lnd rof volr total volume - integer, public ::index_x2l_Flrr_volrmch ! rtm->lnd rof volr main channel volume - - ! In the following, index 0 is bare land, other indices are glc elevation classes - integer, allocatable, public ::index_x2l_Sg_ice_covered(:) ! Fraction of glacier from glc model - integer, allocatable, public ::index_x2l_Sg_topo(:) ! Topo height from glc model - integer, allocatable, public ::index_x2l_Flgg_hflx(:) ! Heat flux from glc model - - integer, public ::index_x2l_Sg_icemask - integer, public ::index_x2l_Sg_icemask_coupled_fluxes - - integer, public :: nflds_x2l = 0 - - !----------------------------------------------------------------------- - -contains - - !----------------------------------------------------------------------- - subroutine clm_cpl_indices_set( ) - ! - ! !DESCRIPTION: - ! Set the coupler indices needed by the land model coupler - ! interface. - ! - ! !USES: - use seq_flds_mod , only: seq_flds_x2l_fields, seq_flds_l2x_fields - use mct_mod , only: mct_aVect, mct_aVect_init, mct_avect_indexra - use mct_mod , only: mct_aVect_clean, mct_avect_nRattr - use shr_drydep_mod , only: drydep_fields_token, n_drydep - use shr_megan_mod , only: shr_megan_fields_token, shr_megan_mechcomps_n - use shr_fire_emis_mod,only: shr_fire_emis_fields_token, shr_fire_emis_ztop_token, shr_fire_emis_mechcomps_n - use clm_varctl , only: ndep_from_cpl - use glc_elevclass_mod, only: glc_get_num_elevation_classes, glc_elevclass_as_string - ! - ! !ARGUMENTS: - implicit none - ! - ! !REVISION HISTORY: - ! Author: Mariana Vertenstein - ! 01/2011, Erik Kluzek: Added protex headers - ! - ! !LOCAL VARIABLES: - type(mct_aVect) :: l2x ! temporary, land to coupler - type(mct_aVect) :: x2l ! temporary, coupler to land - integer :: num - character(len=:), allocatable :: nec_str ! string version of glc elev. class number - character(len=64) :: name - character(len=32) :: subname = 'clm_cpl_indices_set' ! subroutine name - !----------------------------------------------------------------------- - - ! Determine attribute vector indices - - ! create temporary attribute vectors - call mct_aVect_init(x2l, rList=seq_flds_x2l_fields, lsize=1) - nflds_x2l = mct_avect_nRattr(x2l) - - call mct_aVect_init(l2x, rList=seq_flds_l2x_fields, lsize=1) - nflds_l2x = mct_avect_nRattr(l2x) - - !------------------------------------------------------------- - ! clm -> drv - !------------------------------------------------------------- - - index_l2x_Flrl_rofsur = mct_avect_indexra(l2x,'Flrl_rofsur') - index_l2x_Flrl_rofgwl = mct_avect_indexra(l2x,'Flrl_rofgwl') - index_l2x_Flrl_rofsub = mct_avect_indexra(l2x,'Flrl_rofsub') - index_l2x_Flrl_rofi = mct_avect_indexra(l2x,'Flrl_rofi') - index_l2x_Flrl_irrig = mct_avect_indexra(l2x,'Flrl_irrig') - - index_l2x_Sl_t = mct_avect_indexra(l2x,'Sl_t') - index_l2x_Sl_snowh = mct_avect_indexra(l2x,'Sl_snowh') - index_l2x_Sl_avsdr = mct_avect_indexra(l2x,'Sl_avsdr') - index_l2x_Sl_anidr = mct_avect_indexra(l2x,'Sl_anidr') - index_l2x_Sl_avsdf = mct_avect_indexra(l2x,'Sl_avsdf') - index_l2x_Sl_anidf = mct_avect_indexra(l2x,'Sl_anidf') - index_l2x_Sl_tref = mct_avect_indexra(l2x,'Sl_tref') - index_l2x_Sl_qref = mct_avect_indexra(l2x,'Sl_qref') - index_l2x_Sl_u10 = mct_avect_indexra(l2x,'Sl_u10') - index_l2x_Sl_ram1 = mct_avect_indexra(l2x,'Sl_ram1') - index_l2x_Sl_fv = mct_avect_indexra(l2x,'Sl_fv') - index_l2x_Sl_soilw = mct_avect_indexra(l2x,'Sl_soilw',perrwith='quiet') - - if ( n_drydep>0 )then - index_l2x_Sl_ddvel = mct_avect_indexra(l2x, trim(drydep_fields_token)) - else - index_l2x_Sl_ddvel = 0 - end if - - index_l2x_Fall_taux = mct_avect_indexra(l2x,'Fall_taux') - index_l2x_Fall_tauy = mct_avect_indexra(l2x,'Fall_tauy') - index_l2x_Fall_lat = mct_avect_indexra(l2x,'Fall_lat') - index_l2x_Fall_sen = mct_avect_indexra(l2x,'Fall_sen') - index_l2x_Fall_lwup = mct_avect_indexra(l2x,'Fall_lwup') - index_l2x_Fall_evap = mct_avect_indexra(l2x,'Fall_evap') - index_l2x_Fall_swnet = mct_avect_indexra(l2x,'Fall_swnet') - index_l2x_Fall_flxdst1 = mct_avect_indexra(l2x,'Fall_flxdst1') - index_l2x_Fall_flxdst2 = mct_avect_indexra(l2x,'Fall_flxdst2') - index_l2x_Fall_flxdst3 = mct_avect_indexra(l2x,'Fall_flxdst3') - index_l2x_Fall_flxdst4 = mct_avect_indexra(l2x,'Fall_flxdst4') - - index_l2x_Fall_fco2_lnd = mct_avect_indexra(l2x,'Fall_fco2_lnd',perrwith='quiet') - - index_l2x_Fall_methane = mct_avect_indexra(l2x,'Fall_methane',perrWith='quiet') - - ! MEGAN fluxes - if (shr_megan_mechcomps_n>0) then - index_l2x_Fall_flxvoc = mct_avect_indexra(l2x,trim(shr_megan_fields_token)) - else - index_l2x_Fall_flxvoc = 0 - endif - - ! Fire fluxes - if (shr_fire_emis_mechcomps_n>0) then - index_l2x_Fall_flxfire = mct_avect_indexra(l2x,trim(shr_fire_emis_fields_token)) - index_l2x_Sl_ztopfire = mct_avect_indexra(l2x,trim(shr_fire_emis_ztop_token)) - else - index_l2x_Fall_flxfire = 0 - index_l2x_Sl_ztopfire = 0 - endif - - !------------------------------------------------------------- - ! drv -> clm - !------------------------------------------------------------- - - index_x2l_Sa_z = mct_avect_indexra(x2l,'Sa_z') - index_x2l_Sa_topo = mct_avect_indexra(x2l,'Sa_topo') - index_x2l_Sa_u = mct_avect_indexra(x2l,'Sa_u') - index_x2l_Sa_v = mct_avect_indexra(x2l,'Sa_v') - index_x2l_Sa_ptem = mct_avect_indexra(x2l,'Sa_ptem') - index_x2l_Sa_pbot = mct_avect_indexra(x2l,'Sa_pbot') - index_x2l_Sa_tbot = mct_avect_indexra(x2l,'Sa_tbot') - index_x2l_Sa_shum = mct_avect_indexra(x2l,'Sa_shum') - index_x2l_Sa_co2prog = mct_avect_indexra(x2l,'Sa_co2prog',perrwith='quiet') - index_x2l_Sa_co2diag = mct_avect_indexra(x2l,'Sa_co2diag',perrwith='quiet') - - index_x2l_Sa_methane = mct_avect_indexra(x2l,'Sa_methane',perrWith='quiet') - - index_x2l_Flrr_volr = mct_avect_indexra(x2l,'Flrr_volr') - index_x2l_Flrr_volrmch = mct_avect_indexra(x2l,'Flrr_volrmch') - - index_x2l_Faxa_lwdn = mct_avect_indexra(x2l,'Faxa_lwdn') - index_x2l_Faxa_rainc = mct_avect_indexra(x2l,'Faxa_rainc') - index_x2l_Faxa_rainl = mct_avect_indexra(x2l,'Faxa_rainl') - index_x2l_Faxa_snowc = mct_avect_indexra(x2l,'Faxa_snowc') - index_x2l_Faxa_snowl = mct_avect_indexra(x2l,'Faxa_snowl') - index_x2l_Faxa_swndr = mct_avect_indexra(x2l,'Faxa_swndr') - index_x2l_Faxa_swvdr = mct_avect_indexra(x2l,'Faxa_swvdr') - index_x2l_Faxa_swndf = mct_avect_indexra(x2l,'Faxa_swndf') - index_x2l_Faxa_swvdf = mct_avect_indexra(x2l,'Faxa_swvdf') - index_x2l_Faxa_bcphidry = mct_avect_indexra(x2l,'Faxa_bcphidry') - index_x2l_Faxa_bcphodry = mct_avect_indexra(x2l,'Faxa_bcphodry') - index_x2l_Faxa_bcphiwet = mct_avect_indexra(x2l,'Faxa_bcphiwet') - index_x2l_Faxa_ocphidry = mct_avect_indexra(x2l,'Faxa_ocphidry') - index_x2l_Faxa_ocphodry = mct_avect_indexra(x2l,'Faxa_ocphodry') - index_x2l_Faxa_ocphiwet = mct_avect_indexra(x2l,'Faxa_ocphiwet') - index_x2l_Faxa_dstdry1 = mct_avect_indexra(x2l,'Faxa_dstdry1') - index_x2l_Faxa_dstdry2 = mct_avect_indexra(x2l,'Faxa_dstdry2') - index_x2l_Faxa_dstdry3 = mct_avect_indexra(x2l,'Faxa_dstdry3') - index_x2l_Faxa_dstdry4 = mct_avect_indexra(x2l,'Faxa_dstdry4') - index_x2l_Faxa_dstwet1 = mct_avect_indexra(x2l,'Faxa_dstwet1') - index_x2l_Faxa_dstwet2 = mct_avect_indexra(x2l,'Faxa_dstwet2') - index_x2l_Faxa_dstwet3 = mct_avect_indexra(x2l,'Faxa_dstwet3') - index_x2l_Faxa_dstwet4 = mct_avect_indexra(x2l,'Faxa_dstwet4') - - index_x2l_Faxa_nhx = mct_avect_indexra(x2l,'Faxa_nhx', perrWith='quiet') - index_x2l_Faxa_noy = mct_avect_indexra(x2l,'Faxa_noy', perrWith='quiet') - - if (index_x2l_Faxa_nhx > 0 .and. index_x2l_Faxa_noy > 0) then - ndep_from_cpl = .true. - end if - - index_x2l_Flrr_flood = mct_avect_indexra(x2l,'Flrr_flood') - - !------------------------------------------------------------- - ! glc coupling - !------------------------------------------------------------- - - index_x2l_Sg_icemask = mct_avect_indexra(x2l,'Sg_icemask') - index_x2l_Sg_icemask_coupled_fluxes = mct_avect_indexra(x2l,'Sg_icemask_coupled_fluxes') - - glc_nec = glc_get_num_elevation_classes() - if (glc_nec < 1) then - call shr_sys_abort('ERROR: In CLM4.5 and later, glc_nec must be at least 1.') - end if - - ! Create coupling fields for all glc elevation classes (1:glc_nec) plus bare land - ! (index 0). - allocate(index_l2x_Sl_tsrf(0:glc_nec)) - allocate(index_l2x_Sl_topo(0:glc_nec)) - allocate(index_l2x_Flgl_qice(0:glc_nec)) - allocate(index_x2l_Sg_ice_covered(0:glc_nec)) - allocate(index_x2l_Sg_topo(0:glc_nec)) - allocate(index_x2l_Flgg_hflx(0:glc_nec)) - - do num = 0,glc_nec - nec_str = glc_elevclass_as_string(num) - - name = 'Sg_ice_covered' // nec_str - index_x2l_Sg_ice_covered(num) = mct_avect_indexra(x2l,trim(name)) - name = 'Sg_topo' // nec_str - index_x2l_Sg_topo(num) = mct_avect_indexra(x2l,trim(name)) - name = 'Flgg_hflx' // nec_str - index_x2l_Flgg_hflx(num) = mct_avect_indexra(x2l,trim(name)) - - name = 'Sl_tsrf' // nec_str - index_l2x_Sl_tsrf(num) = mct_avect_indexra(l2x,trim(name)) - name = 'Sl_topo' // nec_str - index_l2x_Sl_topo(num) = mct_avect_indexra(l2x,trim(name)) - name = 'Flgl_qice' // nec_str - index_l2x_Flgl_qice(num) = mct_avect_indexra(l2x,trim(name)) - end do - - call mct_aVect_clean(x2l) - call mct_aVect_clean(l2x) - - end subroutine clm_cpl_indices_set - -!======================================================================= - -end module clm_cpl_indices diff --git a/src/cpl/mct/laiStreamMod.F90 b/src/cpl/mct/laiStreamMod.F90 deleted file mode 100644 index 47d25287b7..0000000000 --- a/src/cpl/mct/laiStreamMod.F90 +++ /dev/null @@ -1,241 +0,0 @@ -module laiStreamMod - -#include "shr_assert.h" - - !----------------------------------------------------------------------- - ! !DESCRIPTION: - ! Read LAI from stream - ! - ! !USES: - use shr_strdata_mod , only : shr_strdata_type, shr_strdata_create - use shr_strdata_mod , only : shr_strdata_print, shr_strdata_advance - use shr_kind_mod , only : r8=>shr_kind_r8, CL=>shr_kind_CL, CS=>shr_kind_CS, CXX=>shr_kind_CXX - use shr_log_mod , only : errMsg => shr_log_errMsg - use decompMod , only : bounds_type - use abortutils , only : endrun - use clm_varctl , only : iulog, inst_name - use perf_mod , only : t_startf, t_stopf - use spmdMod , only : masterproc, mpicom, comp_id - use ncdio_pio - use mct_mod - ! - ! !PUBLIC TYPES: - implicit none - private - - ! !PUBLIC MEMBER FUNCTIONS: - public :: lai_init ! position datasets for LAI - public :: lai_advance ! Advance the LAI streams (outside of a Open-MP threading loop) - public :: lai_interp ! interpolates between two years of LAI data (when LAI streams - - ! !PRIVATE MEMBER DATA: - integer, allocatable :: g_to_ig(:) ! Array matching gridcell index to data index - type(shr_strdata_type) :: sdat_lai ! LAI input data stream - - character(len=*), parameter :: sourcefile = & - __FILE__ - -!============================================================================== -contains -!============================================================================== - - subroutine lai_init(bounds) - ! - ! Initialize data stream information for LAI. - ! - ! !USES: - use clm_time_manager , only : get_calendar - use ncdio_pio , only : pio_subsystem - use shr_pio_mod , only : shr_pio_getiotype - use shr_stream_mod , only : shr_stream_file_null - use shr_string_mod , only : shr_string_listCreateField - use clm_nlUtilsMod , only : find_nlgroup_name - use ndepStreamMod , only : clm_domain_mct - use histFileMod , only : hist_addfld1d - use domainMod , only : ldomain - use controlMod , only : NLFilename - use lnd_set_decomp_and_domain , only : gsmap_global - ! - ! !ARGUMENTS: - implicit none - type(bounds_type), intent(in) :: bounds ! bounds - ! - ! !LOCAL VARIABLES: - integer :: stream_year_first_lai ! first year in Lai stream to use - integer :: stream_year_last_lai ! last year in Lai stream to use - integer :: model_year_align_lai ! align stream_year_first_lai with - integer :: nu_nml ! unit for namelist file - integer :: nml_error ! namelist i/o error flag - type(mct_ggrid) :: dom_clm ! domain information - character(len=CL) :: stream_fldFileName_lai ! lai stream filename to read - character(len=CL) :: lai_mapalgo = 'bilinear' ! Mapping alogrithm - character(len=CL) :: lai_tintalgo = 'linear' ! Time interpolation alogrithm - character(len=CXX) :: fldList ! field string - character(*), parameter :: laiString = "LAI" ! base string for field string - integer , parameter :: numLaiFields = 16 ! number of fields to build field string - character(*), parameter :: subName = "('laidyn_init')" - !----------------------------------------------------------------------- - ! - ! deal with namelist variables here in init - ! - namelist /lai_streams/ & - stream_year_first_lai, & - stream_year_last_lai, & - model_year_align_lai, & - lai_mapalgo, & - stream_fldFileName_lai, & - lai_tintalgo - - ! Default values for namelist - stream_year_first_lai = 1 ! first year in stream to use - stream_year_last_lai = 1 ! last year in stream to use - model_year_align_lai = 1 ! align stream_year_first_lai with this model year - stream_fldFileName_lai = shr_stream_file_null - - ! Read lai_streams namelist - if (masterproc) then - open( newunit=nu_nml, file=trim(NLFilename), status='old', iostat=nml_error ) - call find_nlgroup_name(nu_nml, 'lai_streams', status=nml_error) - if (nml_error == 0) then - read(nu_nml, nml=lai_streams,iostat=nml_error) - if (nml_error /= 0) then - call endrun(subname // ':: ERROR reading lai_streams namelist') - end if - else - call endrun(subname // ':: ERROR finding lai_streams namelist') - end if - close(nu_nml) - endif - call shr_mpi_bcast(stream_year_first_lai , mpicom) - call shr_mpi_bcast(stream_year_last_lai , mpicom) - call shr_mpi_bcast(model_year_align_lai , mpicom) - call shr_mpi_bcast(stream_fldFileName_lai , mpicom) - call shr_mpi_bcast(lai_tintalgo , mpicom) - - if (masterproc) then - write(iulog,*) ' ' - write(iulog,*) 'lai_stream settings:' - write(iulog,*) ' stream_year_first_lai = ',stream_year_first_lai - write(iulog,*) ' stream_year_last_lai = ',stream_year_last_lai - write(iulog,*) ' model_year_align_lai = ',model_year_align_lai - write(iulog,*) ' stream_fldFileName_lai = ',trim(stream_fldFileName_lai) - write(iulog,*) ' lai_tintalgo = ',trim(lai_tintalgo) - endif - - call clm_domain_mct (bounds, dom_clm) - - ! create the field list for these lai fields...use in shr_strdata_create - fldList = shr_string_listCreateField( numLaiFields, laiString ) - - call shr_strdata_create(sdat_lai,name="laidyn", & - pio_subsystem=pio_subsystem, & - pio_iotype=shr_pio_getiotype(inst_name), & - mpicom=mpicom, compid=comp_id, & - gsmap=gsmap_global, ggrid=dom_clm, & - nxg=ldomain%ni, nyg=ldomain%nj, & - yearFirst=stream_year_first_lai, & - yearLast=stream_year_last_lai, & - yearAlign=model_year_align_lai, & - offset=0, & - domFilePath='', & - domFileName=trim(stream_fldFileName_lai), & - domTvarName='time', & - domXvarName='lon' , & - domYvarName='lat' , & - domAreaName='area', & - domMaskName='mask', & - filePath='', & - filename=(/stream_fldFileName_lai/), & - fldListFile=fldList, & - fldListModel=fldList, & - fillalgo='none', & - mapalgo=lai_mapalgo, & - tintalgo=lai_tintalgo, & - calendar=get_calendar(), & - taxmode='cycle' ) - - if (masterproc) then - call shr_strdata_print(sdat_lai,'LAI data') - endif - - end subroutine lai_init - - !============================================================================== - subroutine lai_advance( bounds ) - ! - ! Advance LAI streams - ! - ! !USES: - use clm_time_manager, only : get_curr_date - ! - ! !ARGUMENTS: - implicit none - type(bounds_type) , intent(in) :: bounds - ! - ! !LOCAL VARIABLES: - integer :: g, ig ! Indices - integer :: year ! year (0, ...) for nstep+1 - integer :: mon ! month (1, ..., 12) for nstep+1 - integer :: day ! day of month (1, ..., 31) for nstep+1 - integer :: sec ! seconds into current date for nstep+1 - integer :: mcdate ! Current model date (yyyymmdd) - !----------------------------------------------------------------------- - - call get_curr_date(year, mon, day, sec) - mcdate = year*10000 + mon*100 + day - - call shr_strdata_advance(sdat_lai, mcdate, sec, mpicom, 'laidyn') - if ( .not. allocated(g_to_ig) )then - allocate (g_to_ig(bounds%begg:bounds%endg) ) - ig = 0 - do g = bounds%begg,bounds%endg - ig = ig+1 - g_to_ig(g) = ig - end do - end if - - end subroutine lai_advance - - !============================================================================== - subroutine lai_interp(bounds, canopystate_inst) - ! - ! Interpolate data stream information for Lai. - ! - ! !USES: - use pftconMod , only : noveg - use CanopyStateType , only : canopystate_type - use PatchType , only : patch - ! - ! !ARGUMENTS: - implicit none - type(bounds_type) , intent(in) :: bounds - type(canopystate_type) , intent(inout) :: canopystate_inst - ! - ! !LOCAL VARIABLES: - integer :: ivt, p, ip, ig - character(len=CL) :: stream_var_name - !----------------------------------------------------------------------- - SHR_ASSERT_FL( (lbound(g_to_ig,1) <= bounds%begg ), sourcefile, __LINE__) - SHR_ASSERT_FL( (ubound(g_to_ig,1) >= bounds%endg ), sourcefile, __LINE__) - SHR_ASSERT_FL( (lbound(sdat_lai%avs(1)%rAttr,2) <= g_to_ig(bounds%begg) ), sourcefile, __LINE__) - SHR_ASSERT_FL( (ubound(sdat_lai%avs(1)%rAttr,2) >= g_to_ig(bounds%endg) ), sourcefile, __LINE__) - - do p = bounds%begp, bounds%endp - ivt = patch%itype(p) - ! Set lai for each gridcell/patch combination - if (ivt /= noveg) then - ! vegetated pft - write(stream_var_name,"(i6)") ivt - stream_var_name = 'LAI_'//trim(adjustl(stream_var_name)) - ip = mct_aVect_indexRA(sdat_lai%avs(1),trim(stream_var_name)) - ig = g_to_ig(patch%gridcell(p)) - canopystate_inst%tlai_patch(p) = sdat_lai%avs(1)%rAttr(ip,ig) - else - ! non-vegetated pft - canopystate_inst%tlai_patch(p) = 0._r8 - endif - end do - - end subroutine lai_interp - -end module LaiStreamMod diff --git a/src/cpl/mct/lnd_comp_mct.F90 b/src/cpl/mct/lnd_comp_mct.F90 deleted file mode 100644 index e50602a378..0000000000 --- a/src/cpl/mct/lnd_comp_mct.F90 +++ /dev/null @@ -1,632 +0,0 @@ -module lnd_comp_mct - - !--------------------------------------------------------------------------- - ! !DESCRIPTION: - ! Interface of the active land model component of CESM the CLM (Community Land Model) - ! with the main CESM driver. This is a thin interface taking CESM driver information - ! in MCT (Model Coupling Toolkit) format and converting it to use by CLM. - ! - ! !uses: - use shr_kind_mod , only : r8 => shr_kind_r8 - use shr_sys_mod , only : shr_sys_flush - use shr_log_mod , only : errMsg => shr_log_errMsg - use mct_mod , only : mct_avect, mct_gsmap, mct_gGrid - use decompmod , only : bounds_type - use lnd_import_export, only : lnd_import, lnd_export - ! - ! !public member functions: - implicit none - private ! by default make data private - ! - ! !public member functions: - public :: lnd_init_mct ! clm initialization - public :: lnd_run_mct ! clm run phase - public :: lnd_final_mct ! clm finalization/cleanup - ! - ! !private member functions: - private :: lnd_domain_mct ! set the land model domain information - private :: lnd_handle_resume ! handle pause/resume signals from the coupler - - character(len=*), parameter, private :: sourcefile = & - __FILE__ - -!==================================================================================== -contains -!==================================================================================== - - subroutine lnd_init_mct( EClock, cdata_l, x2l_l, l2x_l, NLFilename ) - ! - ! !DESCRIPTION: - ! Initialize land surface model and obtain relevant atmospheric model arrays - ! back from (i.e. albedos, surface temperature and snow cover over land). - ! - ! !USES: - use shr_kind_mod , only : shr_kind_cl - use abortutils , only : endrun - use clm_time_manager , only : get_nstep, set_timemgr_init - use clm_initializeMod, only : initialize1, initialize2 - use clm_instMod , only : water_inst, lnd2atm_inst, lnd2glc_inst - use clm_varctl , only : finidat, single_column, clm_varctl_set, iulog - use clm_varctl , only : inst_index, inst_suffix, inst_name - use clm_varorb , only : eccen, obliqr, lambm0, mvelpp - use controlMod , only : control_setNL - use decompMod , only : get_proc_bounds - use domainMod , only : ldomain - use shr_file_mod , only : shr_file_setLogUnit, shr_file_setLogLevel - use shr_file_mod , only : shr_file_getLogUnit, shr_file_getLogLevel - use shr_file_mod , only : shr_file_getUnit, shr_file_setIO - use seq_cdata_mod , only : seq_cdata, seq_cdata_setptrs - use seq_timemgr_mod , only : seq_timemgr_EClockGetData - use seq_infodata_mod , only : seq_infodata_type, seq_infodata_GetData, seq_infodata_PutData, & - seq_infodata_start_type_start, seq_infodata_start_type_cont, & - seq_infodata_start_type_brnch - use seq_comm_mct , only : seq_comm_suffix, seq_comm_inst, seq_comm_name - use seq_flds_mod , only : seq_flds_x2l_fields, seq_flds_l2x_fields - use spmdMod , only : masterproc, spmd_init - use clm_varctl , only : nsrStartup, nsrContinue, nsrBranch - use clm_cpl_indices , only : clm_cpl_indices_set - use mct_mod , only : mct_aVect_init, mct_aVect_zero, mct_gsMap, mct_gsMap_init - use decompMod , only : gindex_global - use lnd_set_decomp_and_domain, only : lnd_set_decomp_and_domain_from_surfrd, gsmap_global - use ESMF - ! - ! !ARGUMENTS: - type(ESMF_Clock), intent(inout) :: EClock ! Input synchronization clock - type(seq_cdata), intent(inout) :: cdata_l ! Input land-model driver data - type(mct_aVect), intent(inout) :: x2l_l, l2x_l ! land model import and export states - character(len=*), optional, intent(in) :: NLFilename ! Namelist filename to read - ! - ! !LOCAL VARIABLES: - integer :: LNDID ! Land identifyer - integer :: mpicom_lnd ! MPI communicator - type(mct_gsMap), pointer :: GSMap_lnd ! Land model MCT GS map - type(mct_gGrid), pointer :: dom_l ! Land model domain - type(seq_infodata_type), pointer :: infodata ! CESM driver level info data - integer :: lsize ! size of attribute vector - integer :: gsize ! global size - integer :: g,i,j ! indices - integer :: dtime_sync ! coupling time-step from the input synchronization clock - logical :: exists ! true if file exists - logical :: atm_aero ! Flag if aerosol data sent from atm model - real(r8) :: scmlat ! single-column latitude - real(r8) :: scmlon ! single-column longitude - character(len=SHR_KIND_CL) :: caseid ! case identifier name - character(len=SHR_KIND_CL) :: ctitle ! case description title - character(len=SHR_KIND_CL) :: starttype ! start-type (startup, continue, branch, hybrid) - character(len=SHR_KIND_CL) :: calendar ! calendar type name - character(len=SHR_KIND_CL) :: hostname ! hostname of machine running on - character(len=SHR_KIND_CL) :: version ! Model version - character(len=SHR_KIND_CL) :: username ! user running the model - integer :: nsrest ! clm restart type - integer :: ref_ymd ! reference date (YYYYMMDD) - integer :: ref_tod ! reference time of day (sec) - integer :: start_ymd ! start date (YYYYMMDD) - integer :: start_tod ! start time of day (sec) - logical :: brnch_retain_casename ! flag if should retain the case name on a branch start type - integer :: lbnum ! input to memory diagnostic - integer :: shrlogunit,shrloglev ! old values for log unit and log level - type(bounds_type) :: bounds ! bounds - logical :: noland - integer :: ni,nj - real(r8) , parameter :: rundef = -9999999._r8 - character(len=32), parameter :: sub = 'lnd_init_mct' - character(len=*), parameter :: format = "('("//trim(sub)//") :',A)" - !----------------------------------------------------------------------- - - ! Set cdata data - call seq_cdata_setptrs(cdata_l, ID=LNDID, mpicom=mpicom_lnd, & - gsMap=GSMap_lnd, dom=dom_l, infodata=infodata) - - ! Determine attriute vector indices - call clm_cpl_indices_set() - - ! Initialize clm MPI communicator - call spmd_init( mpicom_lnd, LNDID ) - -#if (defined _MEMTRACE) - if(masterproc) then - lbnum=1 - call memmon_dump_fort('memmon.out','lnd_init_mct:start::',lbnum) - endif -#endif - - inst_name = seq_comm_name(LNDID) - inst_index = seq_comm_inst(LNDID) - inst_suffix = seq_comm_suffix(LNDID) - ! Initialize io log unit - - call shr_file_getLogUnit (shrlogunit) - if (masterproc) then - inquire(file='lnd_modelio.nml'//trim(inst_suffix),exist=exists) - if (exists) then - iulog = shr_file_getUnit() - call shr_file_setIO('lnd_modelio.nml'//trim(inst_suffix),iulog) - end if - write(iulog,format) "CLM land model initialization" - else - iulog = shrlogunit - end if - - call shr_file_getLogLevel(shrloglev) - call shr_file_setLogUnit (iulog) - - ! Use infodata to set orbital values - call seq_infodata_GetData( infodata, orb_eccen=eccen, orb_mvelpp=mvelpp, & - orb_lambm0=lambm0, orb_obliqr=obliqr ) - - ! Consistency check on namelist filename - call control_setNL("lnd_in"//trim(inst_suffix)) - - ! Initialize clm - ! initialize1 reads namelists - ! decomp and domain are set in lnd_set_decomp_and_domain_from_surfrd - ! initialize2 performs the rest of initialization - call seq_timemgr_EClockGetData(EClock, & - start_ymd=start_ymd, & - start_tod=start_tod, ref_ymd=ref_ymd, & - ref_tod=ref_tod, & - calendar=calendar, & - dtime=dtime_sync) - if (masterproc) then - write(iulog,*)'dtime = ',dtime_sync - end if - call seq_infodata_GetData(infodata, case_name=caseid, & - case_desc=ctitle, single_column=single_column, & - scmlat=scmlat, scmlon=scmlon, & - brnch_retain_casename=brnch_retain_casename, & - start_type=starttype, model_version=version, & - hostname=hostname, username=username ) - - ! Single Column - if ( single_column .and. (scmlat == rundef .or. scmlon == rundef ) ) then - call endrun(msg=' ERROR:: single column mode on -- but scmlat and scmlon are NOT set'//& - errMsg(sourcefile, __LINE__)) - end if - - ! Note that we assume that CTSM's internal dtime matches the coupling time step. - ! i.e., we currently do NOT allow sub-cycling within a coupling time step. - call set_timemgr_init( calendar_in=calendar, start_ymd_in=start_ymd, start_tod_in=start_tod, & - ref_ymd_in=ref_ymd, ref_tod_in=ref_tod, dtime_in=dtime_sync) - - if ( trim(starttype) == trim(seq_infodata_start_type_start)) then - nsrest = nsrStartup - else if (trim(starttype) == trim(seq_infodata_start_type_cont) ) then - nsrest = nsrContinue - else if (trim(starttype) == trim(seq_infodata_start_type_brnch)) then - nsrest = nsrBranch - else - call endrun( sub//' ERROR: unknown starttype' ) - end if - - ! set default values for run control variables - call clm_varctl_set(caseid_in=caseid, ctitle_in=ctitle, & - brnch_retain_casename_in=brnch_retain_casename, & - single_column_in=single_column, scmlat_in=scmlat, & - scmlon_in=scmlon, nsrest_in=nsrest, version_in=version, & - hostname_in=hostname, username_in=username) - - ! Read namelists - call initialize1(dtime=dtime_sync) - - ! Initialize decomposition and domain (ldomain) type - call lnd_set_decomp_and_domain_from_surfrd(noland, ni, nj) - - ! If no land then exit out of initialization - if ( noland ) then - - call seq_infodata_PutData( infodata, lnd_present =.false.) - call seq_infodata_PutData( infodata, lnd_prognostic=.false.) - - else - - ! Determine if aerosol and dust deposition come from atmosphere component - call seq_infodata_GetData(infodata, atm_aero=atm_aero ) - if ( .not. atm_aero )then - call endrun( sub//' ERROR: atmosphere model MUST send aerosols to CLM' ) - end if - - ! Initialize clm gsMap, clm domain and clm attribute vectors - call get_proc_bounds( bounds ) - lsize = bounds%endg - bounds%begg + 1 - gsize = ldomain%ni * ldomain%nj - call mct_gsMap_init( gsMap_lnd, gindex_global, mpicom_lnd, LNDID, lsize, gsize ) - gsmap_global => gsmap_lnd ! module variable in lnd_set_decomp_and_domain - call lnd_domain_mct( bounds, lsize, gsMap_lnd, dom_l ) - call mct_aVect_init(x2l_l, rList=seq_flds_x2l_fields, lsize=lsize) - call mct_aVect_zero(x2l_l) - call mct_aVect_init(l2x_l, rList=seq_flds_l2x_fields, lsize=lsize) - call mct_aVect_zero(l2x_l) - - ! Finish initializing clm - call initialize2(ni,nj) - - ! Create land export state - call lnd_export(bounds, water_inst%waterlnd2atmbulk_inst, lnd2atm_inst, lnd2glc_inst, l2x_l%rattr) - - ! Fill in infodata settings - call seq_infodata_PutData(infodata, lnd_prognostic=.true.) - call seq_infodata_PutData(infodata, lnd_nx=ldomain%ni, lnd_ny=ldomain%nj) - call lnd_handle_resume( cdata_l ) - - ! Reset shr logging to original values - call shr_file_setLogUnit (shrlogunit) - call shr_file_setLogLevel(shrloglev) - -#if (defined _MEMTRACE) - if(masterproc) then - write(iulog,*) TRIM(Sub) // ':end::' - lbnum=1 - call memmon_dump_fort('memmon.out','lnd_int_mct:end::',lbnum) - call memmon_reset_addr() - endif -#endif - end if - - end subroutine lnd_init_mct - - !==================================================================================== - subroutine lnd_run_mct(EClock, cdata_l, x2l_l, l2x_l) - ! - ! !DESCRIPTION: - ! Run clm model - ! - ! !USES: - use shr_kind_mod , only : r8 => shr_kind_r8 - use clm_instMod , only : water_inst, lnd2atm_inst, atm2lnd_inst, lnd2glc_inst, glc2lnd_inst - use clm_driver , only : clm_drv - use clm_time_manager, only : get_curr_date, get_nstep, get_curr_calday, get_step_size - use clm_time_manager, only : advance_timestep, update_rad_dtime - use decompMod , only : get_proc_bounds - use abortutils , only : endrun - use clm_varctl , only : iulog - use clm_varorb , only : eccen, obliqr, lambm0, mvelpp - use shr_file_mod , only : shr_file_setLogUnit, shr_file_setLogLevel - use shr_file_mod , only : shr_file_getLogUnit, shr_file_getLogLevel - use seq_cdata_mod , only : seq_cdata, seq_cdata_setptrs - use seq_timemgr_mod , only : seq_timemgr_EClockGetData, seq_timemgr_StopAlarmIsOn - use seq_timemgr_mod , only : seq_timemgr_RestartAlarmIsOn, seq_timemgr_EClockDateInSync - use seq_infodata_mod, only : seq_infodata_type, seq_infodata_GetData - use spmdMod , only : masterproc, mpicom - use perf_mod , only : t_startf, t_stopf, t_barrierf - use shr_orb_mod , only : shr_orb_decl - use ESMF - ! - ! !ARGUMENTS: - type(ESMF_Clock) , intent(inout) :: EClock ! Input synchronization clock from driver - type(seq_cdata) , intent(inout) :: cdata_l ! Input driver data for land model - type(mct_aVect) , intent(inout) :: x2l_l ! Import state to land model - type(mct_aVect) , intent(inout) :: l2x_l ! Export state from land model - ! - ! !LOCAL VARIABLES: - integer :: ymd_sync ! Sync date (YYYYMMDD) - integer :: yr_sync ! Sync current year - integer :: mon_sync ! Sync current month - integer :: day_sync ! Sync current day - integer :: tod_sync ! Sync current time of day (sec) - integer :: ymd ! CLM current date (YYYYMMDD) - integer :: yr ! CLM current year - integer :: mon ! CLM current month - integer :: day ! CLM current day - integer :: tod ! CLM current time of day (sec) - integer :: dtime ! time step increment (sec) - integer :: nstep ! time step index - logical :: rstwr_sync ! .true. ==> write restart file before returning - logical :: rstwr ! .true. ==> write restart file before returning - logical :: nlend_sync ! Flag signaling last time-step - logical :: nlend ! .true. ==> last time-step - logical :: dosend ! true => send data back to driver - logical :: doalb ! .true. ==> do albedo calculation on this time step - logical :: rof_prognostic ! .true. => running with a prognostic ROF model - logical :: glc_present ! .true. => running with a non-stub GLC model - real(r8) :: nextsw_cday ! calday from clock of next radiation computation - real(r8) :: caldayp1 ! clm calday plus dtime offset - integer :: shrlogunit,shrloglev ! old values for share log unit and log level - integer :: lbnum ! input to memory diagnostic - integer :: g,i,lsize ! counters - real(r8) :: calday ! calendar day for nstep - real(r8) :: declin ! solar declination angle in radians for nstep - real(r8) :: declinp1 ! solar declination angle in radians for nstep+1 - real(r8) :: eccf ! earth orbit eccentricity factor - real(r8) :: recip ! reciprical - logical,save :: first_call = .true. ! first call work - type(seq_infodata_type),pointer :: infodata ! CESM information from the driver - type(mct_gGrid), pointer :: dom_l ! Land model domain data - type(bounds_type) :: bounds ! bounds - character(len=32) :: rdate ! date char string for restart file names - character(len=32), parameter :: sub = "lnd_run_mct" - !--------------------------------------------------------------------------- - - ! Determine processor bounds - - call get_proc_bounds(bounds) - -#if (defined _MEMTRACE) - if(masterproc) then - lbnum=1 - call memmon_dump_fort('memmon.out','lnd_run_mct:start::',lbnum) - endif -#endif - - ! Reset shr logging to my log file - call shr_file_getLogUnit (shrlogunit) - call shr_file_getLogLevel(shrloglev) - call shr_file_setLogUnit (iulog) - - ! Determine time of next atmospheric shortwave calculation - call seq_cdata_setptrs(cdata_l, infodata=infodata, dom=dom_l) - call seq_timemgr_EClockGetData(EClock, & - curr_ymd=ymd, curr_tod=tod_sync, & - curr_yr=yr_sync, curr_mon=mon_sync, curr_day=day_sync) - call seq_infodata_GetData(infodata, nextsw_cday=nextsw_cday ) - - dtime = get_step_size() - - ! Handle pause/resume signals from coupler - call lnd_handle_resume( cdata_l ) - - write(rdate,'(i4.4,"-",i2.2,"-",i2.2,"-",i5.5)') yr_sync,mon_sync,day_sync,tod_sync - nlend_sync = seq_timemgr_StopAlarmIsOn( EClock ) - rstwr_sync = seq_timemgr_RestartAlarmIsOn( EClock ) - - ! Determine if we're running with a prognostic ROF model, and if we're running with a - ! non-stub GLC model. These won't change throughout the run, but we can't count on - ! their being set in initialization, so need to get them in the run method. - - call seq_infodata_GetData( infodata, & - rof_prognostic=rof_prognostic, & - glc_present=glc_present) - - ! Map MCT to land data type - ! Perform downscaling if appropriate - - - ! Map to clm (only when state and/or fluxes need to be updated) - - call t_startf ('lc_lnd_import') - call lnd_import( bounds, & - x2l = x2l_l%rattr, & - glc_present = glc_present, & - atm2lnd_inst = atm2lnd_inst, & - glc2lnd_inst = glc2lnd_inst, & - wateratm2lndbulk_inst = water_inst%wateratm2lndbulk_inst) - call t_stopf ('lc_lnd_import') - - ! Use infodata to set orbital values if updated mid-run - - call seq_infodata_GetData( infodata, orb_eccen=eccen, orb_mvelpp=mvelpp, & - orb_lambm0=lambm0, orb_obliqr=obliqr ) - - ! Loop over time steps in coupling interval - - dosend = .false. - do while(.not. dosend) - - ! Determine if dosend - ! When time is not updated at the beginning of the loop - then return only if - ! are in sync with clock before time is updated - ! - ! NOTE(wjs, 2020-03-09) I think the do while (.not. dosend) loop only is important - ! for the first time step (when we run 2 steps). After that, we now assume that we - ! run one time step per coupling interval (based on setting the model's dtime from - ! the driver). (According to Mariana Vertenstein, sub-cycling (running multiple - ! land model time steps per coupling interval) used to be supported, but hasn't - ! been fully supported for a long time.) We may want to rework this logic to make - ! this more explicit, or - ideally - get rid of this extra time step at the start - ! of the run, at which point I think we could do away with this looping entirely. - - call get_curr_date( yr, mon, day, tod ) - ymd = yr*10000 + mon*100 + day - tod = tod - dosend = (seq_timemgr_EClockDateInSync( EClock, ymd, tod)) - - ! Determine doalb based on nextsw_cday sent from atm model - - nstep = get_nstep() - caldayp1 = get_curr_calday(offset=dtime, reuse_day_365_for_day_366=.true.) - if (nstep == 0) then - doalb = .false. - else if (nstep == 1) then - doalb = (abs(nextsw_cday- caldayp1) < 1.e-10_r8) - else - doalb = (nextsw_cday >= -0.5_r8) - end if - call update_rad_dtime(doalb) - - ! Determine if time to write restart and stop - - rstwr = .false. - if (rstwr_sync .and. dosend) rstwr = .true. - nlend = .false. - if (nlend_sync .and. dosend) nlend = .true. - - ! Run clm - - call t_barrierf('sync_clm_run1', mpicom) - call t_startf ('clm_run') - call t_startf ('shr_orb_decl') - calday = get_curr_calday(reuse_day_365_for_day_366=.true.) - call shr_orb_decl( calday , eccen, mvelpp, lambm0, obliqr, declin , eccf ) - call shr_orb_decl( nextsw_cday, eccen, mvelpp, lambm0, obliqr, declinp1, eccf ) - call t_stopf ('shr_orb_decl') - call clm_drv(doalb, nextsw_cday, declinp1, declin, rstwr, nlend, rdate, rof_prognostic) - call t_stopf ('clm_run') - - ! Create l2x_l export state - add river runoff input to l2x_l if appropriate - - call t_startf ('lc_lnd_export') - call lnd_export(bounds, water_inst%waterlnd2atmbulk_inst, lnd2atm_inst, lnd2glc_inst, l2x_l%rattr) - call t_stopf ('lc_lnd_export') - - ! Advance clm time step - - call t_startf ('lc_clm2_adv_timestep') - call advance_timestep() - call t_stopf ('lc_clm2_adv_timestep') - - end do - - ! Check that internal clock is in sync with master clock - - call get_curr_date( yr, mon, day, tod, offset=-dtime ) - ymd = yr*10000 + mon*100 + day - tod = tod - if ( .not. seq_timemgr_EClockDateInSync( EClock, ymd, tod ) )then - call seq_timemgr_EclockGetData( EClock, curr_ymd=ymd_sync, curr_tod=tod_sync ) - write(iulog,*)' clm ymd=',ymd ,' clm tod= ',tod - write(iulog,*)'sync ymd=',ymd_sync,' sync tod= ',tod_sync - call endrun( sub//":: CLM clock not in sync with Master Sync clock" ) - end if - - ! Reset shr logging to my original values - - call shr_file_setLogUnit (shrlogunit) - call shr_file_setLogLevel(shrloglev) - -#if (defined _MEMTRACE) - if(masterproc) then - lbnum=1 - call memmon_dump_fort('memmon.out','lnd_run_mct:end::',lbnum) - call memmon_reset_addr() - endif -#endif - - first_call = .false. - - end subroutine lnd_run_mct - - !==================================================================================== - subroutine lnd_final_mct( EClock, cdata_l, x2l_l, l2x_l) - ! - ! !DESCRIPTION: - ! Finalize land surface model - - use seq_cdata_mod ,only : seq_cdata, seq_cdata_setptrs - use seq_timemgr_mod ,only : seq_timemgr_EClockGetData, seq_timemgr_StopAlarmIsOn - use seq_timemgr_mod ,only : seq_timemgr_RestartAlarmIsOn, seq_timemgr_EClockDateInSync - use esmf - ! - ! !ARGUMENTS: - type(ESMF_Clock) , intent(inout) :: EClock ! Input synchronization clock from driver - type(seq_cdata) , intent(inout) :: cdata_l ! Input driver data for land model - type(mct_aVect) , intent(inout) :: x2l_l ! Import state to land model - type(mct_aVect) , intent(inout) :: l2x_l ! Export state from land model - !--------------------------------------------------------------------------- - - ! fill this in - end subroutine lnd_final_mct - - !==================================================================================== - subroutine lnd_domain_mct( bounds, lsize, gsMap_l, dom_l ) - ! - ! !DESCRIPTION: - ! Send the land model domain information to the coupler - ! - ! !USES: - use clm_varcon , only: re - use domainMod , only: ldomain - use spmdMod , only: iam - use mct_mod , only: mct_gGrid_importIAttr - use mct_mod , only: mct_gGrid_importRAttr, mct_gGrid_init, mct_gsMap_orderedPoints - use seq_flds_mod, only: seq_flds_dom_coord, seq_flds_dom_other - ! - ! !ARGUMENTS: - type(bounds_type), intent(in) :: bounds ! bounds - integer , intent(in) :: lsize ! land model domain data size - type(mct_gsMap), intent(inout) :: gsMap_l ! Output land model MCT GS map - type(mct_ggrid), intent(out) :: dom_l ! Output domain information for land model - ! - ! Local Variables - integer :: g,i,j ! index - real(r8), pointer :: data(:) ! temporary - integer , pointer :: idata(:) ! temporary - !--------------------------------------------------------------------------- - ! - ! Initialize mct domain type - ! lat/lon in degrees, area in radians^2, mask is 1 (land), 0 (non-land) - ! Note that in addition land carries around landfrac for the purposes of domain checking - ! - call mct_gGrid_init( GGrid=dom_l, CoordChars=trim(seq_flds_dom_coord), & - OtherChars=trim(seq_flds_dom_other), lsize=lsize ) - ! - ! Allocate memory - ! - allocate(data(lsize)) - ! - ! Determine global gridpoint number attribute, GlobGridNum, which is set automatically by MCT - ! - call mct_gsMap_orderedPoints(gsMap_l, iam, idata) - call mct_gGrid_importIAttr(dom_l,'GlobGridNum',idata,lsize) - ! - ! Determine domain (numbering scheme is: West to East and South to North to South pole) - ! Initialize attribute vector with special value - ! - data(:) = -9999.0_R8 - call mct_gGrid_importRAttr(dom_l,"lat" ,data,lsize) - call mct_gGrid_importRAttr(dom_l,"lon" ,data,lsize) - call mct_gGrid_importRAttr(dom_l,"area" ,data,lsize) - call mct_gGrid_importRAttr(dom_l,"aream",data,lsize) - data(:) = 0.0_R8 - call mct_gGrid_importRAttr(dom_l,"mask" ,data,lsize) - ! - ! Fill in correct values for domain components - ! Note aream will be filled in in the atm-lnd mapper - ! - do g = bounds%begg,bounds%endg - i = 1 + (g - bounds%begg) - data(i) = ldomain%lonc(g) - end do - call mct_gGrid_importRattr(dom_l,"lon",data,lsize) - - do g = bounds%begg,bounds%endg - i = 1 + (g - bounds%begg) - data(i) = ldomain%latc(g) - end do - call mct_gGrid_importRattr(dom_l,"lat",data,lsize) - - do g = bounds%begg,bounds%endg - i = 1 + (g - bounds%begg) - data(i) = ldomain%area(g)/(re*re) - end do - call mct_gGrid_importRattr(dom_l,"area",data,lsize) - - do g = bounds%begg,bounds%endg - i = 1 + (g - bounds%begg) - data(i) = real(ldomain%mask(g), r8) - end do - call mct_gGrid_importRattr(dom_l,"mask",data,lsize) - - do g = bounds%begg,bounds%endg - i = 1 + (g - bounds%begg) - data(i) = real(ldomain%frac(g), r8) - end do - call mct_gGrid_importRattr(dom_l,"frac",data,lsize) - - deallocate(data) - deallocate(idata) - - end subroutine lnd_domain_mct - - !==================================================================================== - subroutine lnd_handle_resume( cdata_l ) - ! - ! !DESCRIPTION: - ! Handle resume signals for Data Assimilation (DA) - ! - ! !USES: - use clm_time_manager , only : update_DA_nstep - use seq_cdata_mod , only : seq_cdata, seq_cdata_setptrs - implicit none - ! !ARGUMENTS: - type(seq_cdata), intent(inout) :: cdata_l ! Input land-model driver data - ! !LOCAL VARIABLES: - logical :: resume_from_data_assim ! flag if we are resuming after data assimulation was done - !--------------------------------------------------------------------------- - - ! Check to see if restart was modified and we are resuming from data - ! assimilation - call seq_cdata_setptrs(cdata_l, post_assimilation=resume_from_data_assim) - if ( resume_from_data_assim ) call update_DA_nstep() - - end subroutine lnd_handle_resume - -end module lnd_comp_mct diff --git a/src/cpl/mct/lnd_import_export.F90 b/src/cpl/mct/lnd_import_export.F90 deleted file mode 100644 index 537abd49d9..0000000000 --- a/src/cpl/mct/lnd_import_export.F90 +++ /dev/null @@ -1,354 +0,0 @@ -module lnd_import_export - - use shr_kind_mod , only: r8 => shr_kind_r8, cl=>shr_kind_cl - use abortutils , only: endrun - use decompmod , only: bounds_type, subgrid_level_gridcell - use lnd2atmType , only: lnd2atm_type - use lnd2glcMod , only: lnd2glc_type - use atm2lndType , only: atm2lnd_type - use glc2lndMod , only: glc2lnd_type - use Waterlnd2atmBulkType , only: waterlnd2atmbulk_type - use Wateratm2lndBulkType , only: wateratm2lndbulk_type - use clm_cpl_indices - use GridcellType , only : grc - ! - implicit none - !=============================================================================== - -contains - - !=============================================================================== - subroutine lnd_import( bounds, x2l, glc_present, atm2lnd_inst, glc2lnd_inst, wateratm2lndbulk_inst) - - !--------------------------------------------------------------------------- - ! !DESCRIPTION: - ! Convert the input data from the coupler to the land model - ! - ! !USES: - use seq_flds_mod , only: seq_flds_x2l_fields - use clm_varctl , only: co2_type, co2_ppmv, iulog, use_c13 - use clm_varctl , only: ndep_from_cpl - use clm_varcon , only: c13ratio - use domainMod , only: ldomain - use lnd_import_export_utils, only : derive_quantities, check_for_errors, check_for_nans - ! - ! !ARGUMENTS: - type(bounds_type) , intent(in) :: bounds ! bounds - real(r8) , intent(in) :: x2l(:,:) ! driver import state to land model - logical , intent(in) :: glc_present ! .true. => running with a non-stub GLC model - type(atm2lnd_type) , intent(inout) :: atm2lnd_inst ! clm internal input data type - type(glc2lnd_type) , intent(inout) :: glc2lnd_inst ! clm internal input data type - type(wateratm2lndbulk_type), intent(inout) :: wateratm2lndbulk_inst ! clm internal input data type - ! - ! !LOCAL VARIABLES: - integer :: begg, endg ! bounds - integer :: g,i,k,nstep,ier ! indices, number of steps, and error code - real(r8) :: qsat_kg_kg ! saturation specific humidity (kg/kg) - real(r8) :: forc_pbot ! atmospheric pressure (Pa) - real(r8) :: forc_rainc(bounds%begg:bounds%endg) ! rainxy Atm flux mm/s - real(r8) :: forc_rainl(bounds%begg:bounds%endg) ! rainxy Atm flux mm/s - real(r8) :: forc_snowc(bounds%begg:bounds%endg) ! snowfxy Atm flux mm/s - real(r8) :: forc_snowl(bounds%begg:bounds%endg) ! snowfxl Atm flux mm/s - real(r8) :: co2_ppmv_diag ! temporary - real(r8) :: co2_ppmv_prog ! temporary - real(r8) :: co2_ppmv_val ! temporary - integer :: co2_type_idx ! integer flag for co2_type options - character(len=32) :: fname ! name of field that is NaN - character(len=32), parameter :: sub = 'lnd_import' - - !--------------------------------------------------------------------------- - - ! Set bounds - begg = bounds%begg; endg = bounds%endg - - co2_type_idx = 0 - if (co2_type == 'prognostic') then - co2_type_idx = 1 - else if (co2_type == 'diagnostic') then - co2_type_idx = 2 - end if - if (co2_type == 'prognostic' .and. index_x2l_Sa_co2prog == 0) then - call endrun( sub//' ERROR: must have nonzero index_x2l_Sa_co2prog for co2_type equal to prognostic' ) - else if (co2_type == 'diagnostic' .and. index_x2l_Sa_co2diag == 0) then - call endrun( sub//' ERROR: must have nonzero index_x2l_Sa_co2diag for co2_type equal to diagnostic' ) - end if - - ! Note that the precipitation fluxes received from the coupler - ! are in units of kg/s/m^2. To convert these precipitation rates - ! in units of mm/sec, one must divide by 1000 kg/m^3 and multiply - ! by 1000 mm/m resulting in an overall factor of unity. - ! Below the units are therefore given in mm/s. - - do g = begg,endg - i = 1 + (g - begg) - - ! Determine flooding input, sign convention is positive downward and - ! hierarchy is atm/glc/lnd/rof/ice/ocn. so water sent from rof to land is negative, - ! change the sign to indicate addition of water to system. - - wateratm2lndbulk_inst%forc_flood_grc(g) = -x2l(index_x2l_Flrr_flood,i) - - wateratm2lndbulk_inst%volr_grc(g) = x2l(index_x2l_Flrr_volr,i) * (ldomain%area(g) * 1.e6_r8) - wateratm2lndbulk_inst%volrmch_grc(g)= x2l(index_x2l_Flrr_volrmch,i) * (ldomain%area(g) * 1.e6_r8) - - ! Determine required receive fields - - atm2lnd_inst%forc_hgt_grc(g) = x2l(index_x2l_Sa_z,i) ! zgcmxy Atm state m - atm2lnd_inst%forc_topo_grc(g) = x2l(index_x2l_Sa_topo,i) ! Atm surface height (m) - atm2lnd_inst%forc_u_grc(g) = x2l(index_x2l_Sa_u,i) ! forc_uxy Atm state m/s - atm2lnd_inst%forc_v_grc(g) = x2l(index_x2l_Sa_v,i) ! forc_vxy Atm state m/s - atm2lnd_inst%forc_solad_not_downscaled_grc(g,2) = x2l(index_x2l_Faxa_swndr,i) ! forc_sollxy Atm flux W/m^2 - atm2lnd_inst%forc_solad_not_downscaled_grc(g,1) = x2l(index_x2l_Faxa_swvdr,i) ! forc_solsxy Atm flux W/m^2 - atm2lnd_inst%forc_solai_grc(g,2) = x2l(index_x2l_Faxa_swndf,i) ! forc_solldxy Atm flux W/m^2 - atm2lnd_inst%forc_solai_grc(g,1) = x2l(index_x2l_Faxa_swvdf,i) ! forc_solsdxy Atm flux W/m^2 - - atm2lnd_inst%forc_th_not_downscaled_grc(g) = x2l(index_x2l_Sa_ptem,i) ! forc_thxy Atm state K - wateratm2lndbulk_inst%forc_q_not_downscaled_grc(g) = x2l(index_x2l_Sa_shum,i) ! forc_qxy Atm state kg/kg - atm2lnd_inst%forc_pbot_not_downscaled_grc(g) = x2l(index_x2l_Sa_pbot,i) ! ptcmxy Atm state Pa - atm2lnd_inst%forc_t_not_downscaled_grc(g) = x2l(index_x2l_Sa_tbot,i) ! forc_txy Atm state K - atm2lnd_inst%forc_lwrad_not_downscaled_grc(g) = x2l(index_x2l_Faxa_lwdn,i) ! flwdsxy Atm flux W/m^2 - - forc_rainc(g) = x2l(index_x2l_Faxa_rainc,i) ! mm/s - forc_rainl(g) = x2l(index_x2l_Faxa_rainl,i) ! mm/s - forc_snowc(g) = x2l(index_x2l_Faxa_snowc,i) ! mm/s - forc_snowl(g) = x2l(index_x2l_Faxa_snowl,i) ! mm/s - - ! atmosphere coupling, for prognostic/prescribed aerosols - atm2lnd_inst%forc_aer_grc(g,1) = x2l(index_x2l_Faxa_bcphidry,i) - atm2lnd_inst%forc_aer_grc(g,2) = x2l(index_x2l_Faxa_bcphodry,i) - atm2lnd_inst%forc_aer_grc(g,3) = x2l(index_x2l_Faxa_bcphiwet,i) - atm2lnd_inst%forc_aer_grc(g,4) = x2l(index_x2l_Faxa_ocphidry,i) - atm2lnd_inst%forc_aer_grc(g,5) = x2l(index_x2l_Faxa_ocphodry,i) - atm2lnd_inst%forc_aer_grc(g,6) = x2l(index_x2l_Faxa_ocphiwet,i) - atm2lnd_inst%forc_aer_grc(g,7) = x2l(index_x2l_Faxa_dstwet1,i) - atm2lnd_inst%forc_aer_grc(g,8) = x2l(index_x2l_Faxa_dstdry1,i) - atm2lnd_inst%forc_aer_grc(g,9) = x2l(index_x2l_Faxa_dstwet2,i) - atm2lnd_inst%forc_aer_grc(g,10) = x2l(index_x2l_Faxa_dstdry2,i) - atm2lnd_inst%forc_aer_grc(g,11) = x2l(index_x2l_Faxa_dstwet3,i) - atm2lnd_inst%forc_aer_grc(g,12) = x2l(index_x2l_Faxa_dstdry3,i) - atm2lnd_inst%forc_aer_grc(g,13) = x2l(index_x2l_Faxa_dstwet4,i) - atm2lnd_inst%forc_aer_grc(g,14) = x2l(index_x2l_Faxa_dstdry4,i) - - if (index_x2l_Sa_methane /= 0) then - atm2lnd_inst%forc_pch4_grc(g) = x2l(index_x2l_Sa_methane,i) - endif - - !-------------------------- - ! Check for nans from coupler - !-------------------------- - - call check_for_nans(x2l(:,i), fname, begg, "x2l") - - end do - - !-------------------------- - ! Derived quantities for required fields - ! and corresponding error checks - !-------------------------- - - call derive_quantities(bounds, atm2lnd_inst, wateratm2lndbulk_inst, & - forc_rainc, forc_rainl, forc_snowc, forc_snowl) - - call check_for_errors(bounds, atm2lnd_inst, wateratm2lndbulk_inst) - - ! Determine derived quantities for optional fields - ! Note that the following does unit conversions from ppmv to partial pressures (Pa) - ! Note that forc_pbot is in Pa - - do g = begg,endg - i = 1 + (g - begg) - - forc_pbot = atm2lnd_inst%forc_pbot_not_downscaled_grc(g) - - ! Determine optional receive fields - if (index_x2l_Sa_co2prog /= 0) then - co2_ppmv_prog = x2l(index_x2l_Sa_co2prog,i) ! co2 atm state prognostic - else - co2_ppmv_prog = co2_ppmv - end if - if (index_x2l_Sa_co2diag /= 0) then - co2_ppmv_diag = x2l(index_x2l_Sa_co2diag,i) ! co2 atm state diagnostic - else - co2_ppmv_diag = co2_ppmv - end if - - if (co2_type_idx == 1) then - co2_ppmv_val = co2_ppmv_prog - else if (co2_type_idx == 2) then - co2_ppmv_val = co2_ppmv_diag - else - co2_ppmv_val = co2_ppmv - end if - if ( (co2_ppmv_val < 10.0_r8) .or. (co2_ppmv_val > 15000.0_r8) )then - call endrun(subgrid_index=g, subgrid_level=subgrid_level_gridcell, & - msg = sub//' ERROR: CO2 is outside of an expected range' ) - end if - atm2lnd_inst%forc_pco2_grc(g) = co2_ppmv_val * 1.e-6_r8 * forc_pbot - if (use_c13) then - atm2lnd_inst%forc_pc13o2_grc(g) = co2_ppmv_val * c13ratio * 1.e-6_r8 * forc_pbot - end if - - if (ndep_from_cpl) then - ! The coupler is sending ndep in units if kgN/m2/s - and clm uses units of gN/m2/sec - so the - ! following conversion needs to happen - atm2lnd_inst%forc_ndep_grc(g) = (x2l(index_x2l_Faxa_nhx, i) + x2l(index_x2l_faxa_noy, i))*1000._r8 - end if - - end do - - call glc2lnd_inst%set_glc2lnd_fields_mct( & - bounds = bounds, & - glc_present = glc_present, & - ! NOTE(wjs, 2017-12-13) the x2l argument doesn't have the typical bounds - ! subsetting (bounds%begg:bounds%endg). This mirrors the lack of these bounds in - ! the call to lnd_import from lnd_run_mct. This is okay as long as this code is - ! outside a clump loop. - x2l = x2l, & - index_x2l_Sg_ice_covered = index_x2l_Sg_ice_covered, & - index_x2l_Sg_topo = index_x2l_Sg_topo, & - index_x2l_Flgg_hflx = index_x2l_Flgg_hflx, & - index_x2l_Sg_icemask = index_x2l_Sg_icemask, & - index_x2l_Sg_icemask_coupled_fluxes = index_x2l_Sg_icemask_coupled_fluxes) - - end subroutine lnd_import - - !=============================================================================== - - subroutine lnd_export( bounds, waterlnd2atmbulk_inst, lnd2atm_inst, lnd2glc_inst, l2x) - - !--------------------------------------------------------------------------- - ! !DESCRIPTION: - ! Convert the data to be sent from the clm model to the coupler - ! - ! !USES: - use shr_kind_mod , only : r8 => shr_kind_r8 - use seq_flds_mod , only : seq_flds_l2x_fields - use clm_varctl , only : iulog - use shr_drydep_mod , only : n_drydep - use shr_megan_mod , only : shr_megan_mechcomps_n - use shr_fire_emis_mod , only : shr_fire_emis_mechcomps_n - use lnd_import_export_utils, only : check_for_nans - ! - ! !ARGUMENTS: - implicit none - type(bounds_type) , intent(in) :: bounds ! bounds - type(lnd2atm_type), intent(inout) :: lnd2atm_inst ! clm land to atmosphere exchange data type - type(lnd2glc_type), intent(inout) :: lnd2glc_inst ! clm land to atmosphere exchange data type - type(waterlnd2atmbulk_type), intent(in) :: waterlnd2atmbulk_inst - real(r8) , intent(out) :: l2x(:,:)! land to coupler export state on land grid - ! - ! !LOCAL VARIABLES: - integer :: begg, endg ! bounds - integer :: g,i,k ! indices - integer :: ier ! error status - integer :: nstep ! time step index - integer :: dtime ! time step - integer :: num ! counter - character(len=32) :: fname ! name of field that is NaN - character(len=32), parameter :: sub = 'lnd_export' - !--------------------------------------------------------------------------- - - ! Set bounds - begg = bounds%begg; endg = bounds%endg - - ! cesm sign convention is that fluxes are positive downward - - l2x(:,:) = 0.0_r8 - - do g = begg,endg - i = 1 + (g-begg) - l2x(index_l2x_Sl_t,i) = lnd2atm_inst%t_rad_grc(g) - l2x(index_l2x_Sl_snowh,i) = waterlnd2atmbulk_inst%h2osno_grc(g) - l2x(index_l2x_Sl_avsdr,i) = lnd2atm_inst%albd_grc(g,1) - l2x(index_l2x_Sl_anidr,i) = lnd2atm_inst%albd_grc(g,2) - l2x(index_l2x_Sl_avsdf,i) = lnd2atm_inst%albi_grc(g,1) - l2x(index_l2x_Sl_anidf,i) = lnd2atm_inst%albi_grc(g,2) - l2x(index_l2x_Sl_tref,i) = lnd2atm_inst%t_ref2m_grc(g) - l2x(index_l2x_Sl_qref,i) = waterlnd2atmbulk_inst%q_ref2m_grc(g) - l2x(index_l2x_Sl_u10,i) = lnd2atm_inst%u_ref10m_grc(g) - l2x(index_l2x_Fall_taux,i) = -lnd2atm_inst%taux_grc(g) - l2x(index_l2x_Fall_tauy,i) = -lnd2atm_inst%tauy_grc(g) - l2x(index_l2x_Fall_lat,i) = -lnd2atm_inst%eflx_lh_tot_grc(g) - l2x(index_l2x_Fall_sen,i) = -lnd2atm_inst%eflx_sh_tot_grc(g) - l2x(index_l2x_Fall_lwup,i) = -lnd2atm_inst%eflx_lwrad_out_grc(g) - l2x(index_l2x_Fall_evap,i) = -waterlnd2atmbulk_inst%qflx_evap_tot_grc(g) - l2x(index_l2x_Fall_swnet,i) = lnd2atm_inst%fsa_grc(g) - if (index_l2x_Fall_fco2_lnd /= 0) then - l2x(index_l2x_Fall_fco2_lnd,i) = -lnd2atm_inst%net_carbon_exchange_grc(g) - end if - - ! Additional fields for DUST, PROGSSLT, dry-deposition and VOC - ! These are now standard fields, but the check on the index makes sure the driver handles them - if (index_l2x_Sl_ram1 /= 0 ) l2x(index_l2x_Sl_ram1,i) = lnd2atm_inst%ram1_grc(g) - if (index_l2x_Sl_fv /= 0 ) l2x(index_l2x_Sl_fv,i) = lnd2atm_inst%fv_grc(g) - if (index_l2x_Sl_soilw /= 0 ) l2x(index_l2x_Sl_soilw,i) = waterlnd2atmbulk_inst%h2osoi_vol_grc(g,1) - if (index_l2x_Fall_flxdst1 /= 0 ) l2x(index_l2x_Fall_flxdst1,i)= -lnd2atm_inst%flxdst_grc(g,1) - if (index_l2x_Fall_flxdst2 /= 0 ) l2x(index_l2x_Fall_flxdst2,i)= -lnd2atm_inst%flxdst_grc(g,2) - if (index_l2x_Fall_flxdst3 /= 0 ) l2x(index_l2x_Fall_flxdst3,i)= -lnd2atm_inst%flxdst_grc(g,3) - if (index_l2x_Fall_flxdst4 /= 0 ) l2x(index_l2x_Fall_flxdst4,i)= -lnd2atm_inst%flxdst_grc(g,4) - - - ! for dry dep velocities - if (index_l2x_Sl_ddvel /= 0 ) then - l2x(index_l2x_Sl_ddvel:index_l2x_Sl_ddvel+n_drydep-1,i) = & - lnd2atm_inst%ddvel_grc(g,:n_drydep) - end if - - ! for MEGAN VOC emis fluxes - if (index_l2x_Fall_flxvoc /= 0 ) then - l2x(index_l2x_Fall_flxvoc:index_l2x_Fall_flxvoc+shr_megan_mechcomps_n-1,i) = & - -lnd2atm_inst%flxvoc_grc(g,:shr_megan_mechcomps_n) - end if - - - ! for fire emis fluxes - if (index_l2x_Fall_flxfire /= 0 ) then - l2x(index_l2x_Fall_flxfire:index_l2x_Fall_flxfire+shr_fire_emis_mechcomps_n-1,i) = & - -lnd2atm_inst%fireflx_grc(g,:shr_fire_emis_mechcomps_n) - l2x(index_l2x_Sl_ztopfire,i) = lnd2atm_inst%fireztop_grc(g) - end if - - if (index_l2x_Fall_methane /= 0) then - l2x(index_l2x_Fall_methane,i) = -lnd2atm_inst%ch4_surf_flux_tot_grc(g) - endif - - ! sign convention is positive downward with - ! hierarchy of atm/glc/lnd/rof/ice/ocn. - ! I.e. water sent from land to rof is positive - - l2x(index_l2x_Flrl_rofsur,i) = waterlnd2atmbulk_inst%qflx_rofliq_qsur_grc(g) - - ! subsurface runoff is the sum of qflx_drain and qflx_perched_drain - l2x(index_l2x_Flrl_rofsub,i) = waterlnd2atmbulk_inst%qflx_rofliq_qsub_grc(g) & - + waterlnd2atmbulk_inst%qflx_rofliq_drain_perched_grc(g) - - ! qgwl sent individually to coupler - l2x(index_l2x_Flrl_rofgwl,i) = waterlnd2atmbulk_inst%qflx_rofliq_qgwl_grc(g) - - ! ice sent individually to coupler - l2x(index_l2x_Flrl_rofi,i) = waterlnd2atmbulk_inst%qflx_rofice_grc(g) - - ! irrigation flux to be removed from main channel storage (negative) - l2x(index_l2x_Flrl_irrig,i) = - waterlnd2atmbulk_inst%qirrig_grc(g) - - ! glc coupling - ! We could avoid setting these fields if glc_present is .false., if that would - ! help with performance. (The downside would be that we wouldn't have these fields - ! available for diagnostic purposes or to force a later T compset with dlnd.) - do num = 0,glc_nec - l2x(index_l2x_Sl_tsrf(num),i) = lnd2glc_inst%tsrf_grc(g,num) - l2x(index_l2x_Sl_topo(num),i) = lnd2glc_inst%topo_grc(g,num) - l2x(index_l2x_Flgl_qice(num),i) = lnd2glc_inst%qice_grc(g,num) - end do - - !-------------------------- - ! Check for nans to coupler - !-------------------------- - - call check_for_nans(l2x(:,i), fname, begg, "l2x") - - end do - - end subroutine lnd_export - -end module lnd_import_export diff --git a/src/cpl/mct/lnd_set_decomp_and_domain.F90 b/src/cpl/mct/lnd_set_decomp_and_domain.F90 deleted file mode 100644 index 0a37554313..0000000000 --- a/src/cpl/mct/lnd_set_decomp_and_domain.F90 +++ /dev/null @@ -1,352 +0,0 @@ -module lnd_set_decomp_and_domain - - use shr_kind_mod , only : r8 => shr_kind_r8 - use spmdMod , only : masterproc - use clm_varctl , only : iulog - use mct_mod , only : mct_gsMap - - implicit none - private ! except - - ! public member routines - public :: lnd_set_decomp_and_domain_from_surfrd - - ! private member routines - private :: surfrd_get_globmask ! Reads global land mask (needed for setting domain decomp) - private :: surfrd_get_grid ! Read grid/ladnfrac data into domain (after domain decomp) - - ! translation between local and global indices at gridcell level - type(mct_gsmap), pointer, public :: gsmap_global - - ! translation between local and global indices at gridcell level for multiple levels - ! needed for 3d soil moisture stream - type(mct_gsmap), target , public :: gsMap_lnd2Dsoi_gdc2glo - - character(len=*), parameter, private :: sourcefile = & - __FILE__ - -!=============================================================================== -contains -!=============================================================================== - - subroutine lnd_set_decomp_and_domain_from_surfrd(noland, ni, nj) - - ! Initialize ldomain data types - - use clm_varpar , only: nlevsoi - use clm_varctl , only: fatmlndfrc, use_soil_moisture_streams - use decompInitMod , only: decompInit_lnd - use decompMod , only: bounds_type, get_proc_bounds - use domainMod , only: ldomain, domain_check - - ! input/output variables - logical, intent(out) :: noland - integer, intent(out) :: ni, nj ! global grid sizes - - ! local variables - integer ,pointer :: amask(:) ! global land mask - integer :: begg, endg ! processor bounds - type(bounds_type) :: bounds ! bounds - character(len=32) :: subname = 'lnd_set_decomp_and_domain_from_surfrd' - !----------------------------------------------------------------------- - - ! Read in global land grid and land mask (amask)- needed to set decomposition - ! global memory for amask is allocate in surfrd_get_glomask - must be deallocated below - if (masterproc) then - write(iulog,*) 'Attempting to read global land mask from ',trim(fatmlndfrc) - endif - - ! Get global mask, ni and nj - call surfrd_get_globmask(filename=fatmlndfrc, mask=amask, ni=ni, nj=nj) - - ! Exit early if no valid land points - if ( all(amask == 0) )then - if (masterproc) write(iulog,*) trim(subname)//': no valid land points do NOT run clm' - noland = .true. - return - else - noland = .false. - end if - - ! Determine ctsm gridcell decomposition and processor bounds for gridcells - call decompInit_lnd(ni, nj, amask) - deallocate(amask) - if (use_soil_moisture_streams) call decompInit_lnd3D(ni, nj, nlevsoi) - - ! Initialize bounds for just gridcells - ! Remaining bounds (landunits, columns, patches) will be determined - ! after the call to decompInit_glcp - so get_proc_bounds is called - ! twice and the gridcell information is just filled in twice - call get_proc_bounds(bounds) - - ! Get grid cell bounds values - begg = bounds%begg - endg = bounds%endg - - ! Initialize ldomain data type - if (masterproc) then - write(iulog,*) 'Attempting to read ldomain from ',trim(fatmlndfrc) - endif - call surfrd_get_grid(begg, endg, ldomain, fatmlndfrc) - if (masterproc) then - call domain_check(ldomain) - endif - ldomain%mask = 1 !!! TODO - is this needed? - - end subroutine lnd_set_decomp_and_domain_from_surfrd - - !----------------------------------------------------------------------- - subroutine surfrd_get_globmask(filename, mask, ni, nj) - - ! Read the surface dataset grid related information - ! This is used to set the domain decomposition - so global data is read here - - use fileutils , only : getfil - use ncdio_pio , only : ncd_io, ncd_pio_openfile, ncd_pio_closefile, ncd_inqfdims, file_desc_t - use abortutils , only : endrun - use shr_log_mod, only : errMsg => shr_log_errMsg - - ! input/output variables - character(len=*), intent(in) :: filename ! grid filename - integer , pointer :: mask(:) ! grid mask - integer , intent(out) :: ni, nj ! global grid sizes - - ! local variables - logical :: isgrid2d - integer :: dimid,varid ! netCDF id's - integer :: ns ! size of grid on file - integer :: n,i,j ! index - integer :: ier ! error status - type(file_desc_t) :: ncid ! netcdf id - character(len=256) :: locfn ! local file name - logical :: readvar ! read variable in or not - integer , allocatable :: idata2d(:,:) - character(len=32) :: subname = 'surfrd_get_globmask' ! subroutine name - !----------------------------------------------------------------------- - - if (filename == ' ') then - mask(:) = 1 - else - ! Check if file exists - if (masterproc) then - if (filename == ' ') then - write(iulog,*) trim(subname),' ERROR: filename must be specified ' - call endrun(msg=errMsg(sourcefile, __LINE__)) - endif - end if - - ! Open file - call getfil( filename, locfn, 0 ) - call ncd_pio_openfile (ncid, trim(locfn), 0) - - ! Determine dimensions and if grid file is 2d or 1d - call ncd_inqfdims(ncid, isgrid2d, ni, nj, ns) - if (masterproc) then - write(iulog,*)'lat/lon grid flag (isgrid2d) is ',isgrid2d - end if - allocate(mask(ns)) - mask(:) = 1 - if (isgrid2d) then - ! Grid is 2d - allocate(idata2d(ni,nj)) - idata2d(:,:) = 1 - call ncd_io(ncid=ncid, varname='LANDMASK', data=idata2d, flag='read', readvar=readvar) - if (.not. readvar) then - call ncd_io(ncid=ncid, varname='mask', data=idata2d, flag='read', readvar=readvar) - end if - if (readvar) then - do j = 1,nj - do i = 1,ni - n = (j-1)*ni + i - mask(n) = idata2d(i,j) - enddo - enddo - end if - deallocate(idata2d) - else - ! Grid is not 2d - call ncd_io(ncid=ncid, varname='LANDMASK', data=mask, flag='read', readvar=readvar) - if (.not. readvar) then - call ncd_io(ncid=ncid, varname='mask', data=mask, flag='read', readvar=readvar) - end if - end if - if (.not. readvar) call endrun( msg=' ERROR: landmask not on fatmlndfrc file'//errMsg(sourcefile, __LINE__)) - - ! Close file - call ncd_pio_closefile(ncid) - end if - - end subroutine surfrd_get_globmask - - !----------------------------------------------------------------------- - subroutine surfrd_get_grid(begg, endg, ldomain, filename) - - ! Read the surface dataset grid related information: - ! This is called after the domain decomposition has been created - ! - real latitude of grid cell (degrees) - ! - real longitude of grid cell (degrees) - - use clm_varcon , only : spval, re, grlnd - use domainMod , only : domain_type, lon1d, lat1d, domain_init - use fileutils , only : getfil - use abortutils , only : endrun - use shr_log_mod , only : errMsg => shr_log_errMsg - use ncdio_pio , only : file_desc_t, ncd_pio_openfile, ncd_pio_closefile - use ncdio_pio , only : ncd_io, check_var, ncd_inqfdims, check_dim_size - use pio - - ! input/output variables - integer , intent(in) :: begg, endg - type(domain_type) , intent(inout) :: ldomain ! domain to init - character(len=*) , intent(in) :: filename ! grid filename - - ! local variables - type(file_desc_t) :: ncid ! netcdf id - integer :: beg ! local beg index - integer :: end ! local end index - integer :: ni,nj,ns ! size of grid on file - logical :: readvar ! true => variable is on input file - logical :: isgrid2d ! true => file is 2d lat/lon - logical :: istype_domain ! true => input file is of type domain - real(r8), allocatable :: rdata2d(:,:) ! temporary - character(len=16) :: vname ! temporary - character(len=256) :: locfn ! local file name - integer :: n ! indices - character(len=32) :: subname = 'surfrd_get_grid' ! subroutine name - !----------------------------------------------------------------------- - - if (masterproc) then - if (filename == ' ') then - write(iulog,*) trim(subname),' ERROR: filename must be specified ' - call endrun(msg=errMsg(sourcefile, __LINE__)) - endif - end if - - call getfil( filename, locfn, 0 ) - call ncd_pio_openfile (ncid, trim(locfn), 0) - - ! Determine dimensions - call ncd_inqfdims(ncid, isgrid2d, ni, nj, ns) - - ! Determine isgrid2d flag for domain - call domain_init(ldomain, isgrid2d=isgrid2d, ni=ni, nj=nj, nbeg=begg, nend=endg) - - ! Determine type of file - old style grid file or new style domain file - call check_var(ncid=ncid, varname='xc', readvar=readvar) - if (readvar)then - istype_domain = .true. - else - istype_domain = .false. - end if - - ! Read in area, lon, lat - if (istype_domain) then - call ncd_io(ncid=ncid, varname= 'area', flag='read', data=ldomain%area, & - dim1name=grlnd, readvar=readvar) - ! convert from radians**2 to km**2 - ldomain%area = ldomain%area * (re**2) - if (.not. readvar) call endrun( msg=' ERROR: area NOT on file'//errMsg(sourcefile, __LINE__)) - call ncd_io(ncid=ncid, varname= 'xc', flag='read', data=ldomain%lonc, & - dim1name=grlnd, readvar=readvar) - if (.not. readvar) call endrun( msg=' ERROR: xc NOT on file'//errMsg(sourcefile, __LINE__)) - call ncd_io(ncid=ncid, varname= 'yc', flag='read', data=ldomain%latc, & - dim1name=grlnd, readvar=readvar) - if (.not. readvar) call endrun( msg=' ERROR: yc NOT on file'//errMsg(sourcefile, __LINE__)) - else - call endrun( msg=" ERROR: can no longer read non domain files" ) - end if - - if (isgrid2d) then - allocate(rdata2d(ni,nj), lon1d(ni), lat1d(nj)) - if (istype_domain) vname = 'xc' - call ncd_io(ncid=ncid, varname=trim(vname), data=rdata2d, flag='read', readvar=readvar) - lon1d(:) = rdata2d(:,1) - if (istype_domain) vname = 'yc' - call ncd_io(ncid=ncid, varname=trim(vname), data=rdata2d, flag='read', readvar=readvar) - lat1d(:) = rdata2d(1,:) - deallocate(rdata2d) - end if - - ! Check lat limited to -90,90 - if (minval(ldomain%latc) < -90.0_r8 .or. & - maxval(ldomain%latc) > 90.0_r8) then - write(iulog,*) trim(subname),' WARNING: lat/lon min/max is ', & - minval(ldomain%latc),maxval(ldomain%latc) - endif - if ( any(ldomain%lonc < 0.0_r8) )then - call endrun( msg=' ERROR: lonc is negative (see https://github.com/ESCOMP/ctsm/issues/507)' & - //errMsg(sourcefile, __LINE__)) - endif - call ncd_io(ncid=ncid, varname='mask', flag='read', data=ldomain%mask, & - dim1name=grlnd, readvar=readvar) - if (.not. readvar) then - call endrun( msg=' ERROR: LANDMASK NOT on fracdata file'//errMsg(sourcefile, __LINE__)) - end if - call ncd_io(ncid=ncid, varname='frac', flag='read', data=ldomain%frac, & - dim1name=grlnd, readvar=readvar) - if (.not. readvar) then - call endrun( msg=' ERROR: LANDFRAC NOT on fracdata file'//errMsg(sourcefile, __LINE__)) - end if - - call ncd_pio_closefile(ncid) - - end subroutine surfrd_get_grid - - !------------------------------------------------------------------------------ - subroutine decompInit_lnd3D(lni,lnj,lnk) - ! - ! !DESCRIPTION: - ! Create a 3D decomposition gsmap for the global 2D grid with soil levels - ! as the 3rd dimesnion. - ! - ! !USES: - use decompMod, only : gindex_global, bounds_type, get_proc_bounds - use spmdMod , only : comp_id, mpicom - use mct_mod , only : mct_gsmap_init - ! - ! !ARGUMENTS: - integer , intent(in) :: lni,lnj,lnk ! domain global size - ! - ! !LOCAL VARIABLES: - integer :: m,n,k ! indices - integer :: begg,endg,lsize,gsize ! used for gsmap init - integer :: begg3d,endg3d - integer, pointer :: gindex(:) ! global index for gsmap init - type(bounds_type) :: bounds - !------------------------------------------------------------------------------ - - ! Initialize gsmap_lnd2dsoi_gdc2glo - call get_proc_bounds(bounds) - begg = bounds%begg; endg=bounds%endg - - begg3d = (begg-1)*lnk + 1 - endg3d = endg*lnk - lsize = (endg3d - begg3d + 1 ) - allocate(gindex(begg3d:endg3d)) - do k = 1, lnk - do n = begg,endg - m = (begg-1)*lnk + (k-1)*(endg-begg+1) + (n-begg+1) - gindex(m) = gindex_global(n-begg+1) + (k-1)*(lni*lnj) - enddo - enddo - gsize = lni * lnj * lnk - call mct_gsMap_init(gsMap_lnd2Dsoi_gdc2glo, gindex, mpicom, comp_id, lsize, gsize) - - ! Diagnostic output - - if (masterproc) then - write(iulog,*)' 3D GSMap' - write(iulog,*)' longitude points = ',lni - write(iulog,*)' latitude points = ',lnj - write(iulog,*)' soil levels = ',lnk - write(iulog,*)' gsize = ',gsize - write(iulog,*)' lsize = ',lsize - write(iulog,*)' bounds(gindex) = ',size(gindex) - write(iulog,*) - end if - - deallocate(gindex) - - end subroutine decompInit_lnd3D - -end module lnd_set_decomp_and_domain diff --git a/src/cpl/mct/ndepStreamMod.F90 b/src/cpl/mct/ndepStreamMod.F90 deleted file mode 100644 index d26ff7c95e..0000000000 --- a/src/cpl/mct/ndepStreamMod.F90 +++ /dev/null @@ -1,376 +0,0 @@ -module ndepStreamMod - - !----------------------------------------------------------------------- - ! !DESCRIPTION: - ! Contains methods for reading in nitrogen deposition data file - ! Also includes functions for dynamic ndep file handling and - ! interpolation. - ! - ! !USES - use shr_kind_mod, only: r8 => shr_kind_r8, CL => shr_kind_cl - use shr_strdata_mod, only: shr_strdata_type, shr_strdata_create - use shr_strdata_mod, only: shr_strdata_print, shr_strdata_advance - use mct_mod , only: mct_ggrid - use spmdMod , only: mpicom, masterproc, comp_id, iam - use clm_varctl , only: iulog, inst_name - use abortutils , only: endrun - use decompMod , only: bounds_type - use domainMod , only: ldomain - - ! !PUBLIC TYPES: - implicit none - private - - ! !PUBLIC MEMBER FUNCTIONS: - public :: ndep_init ! position datasets for dynamic ndep - public :: ndep_interp ! interpolates between two years of ndep file data - public :: clm_domain_mct ! Sets up MCT domain for this resolution - - ! !PRIVATE MEMBER FUNCTIONS: - private :: check_units ! Check the units and make sure they can be used - - ! ! PRIVATE TYPES - type(shr_strdata_type) :: sdat ! input data stream - integer :: stream_year_first_ndep ! first year in stream to use - integer :: stream_year_last_ndep ! last year in stream to use - integer :: model_year_align_ndep ! align stream_year_firstndep with - logical :: divide_by_secs_per_yr = .true. ! divide by the number of seconds per year - - character(len=*), parameter, private :: sourcefile = & - __FILE__ - !============================================================================== - -contains - - !============================================================================== - - subroutine ndep_init(bounds, NLFilename) - ! - ! Initialize data stream information. - ! - ! Uses: - use shr_kind_mod , only : CS => shr_kind_cs - use clm_time_manager , only : get_calendar - use ncdio_pio , only : pio_subsystem - use shr_pio_mod , only : shr_pio_getiotype - use shr_nl_mod , only : shr_nl_find_group_name - use shr_log_mod , only : errMsg => shr_log_errMsg - use shr_mpi_mod , only : shr_mpi_bcast - use lnd_set_decomp_and_domain , only : gsMap_lnd2Dsoi_gdc2glo, gsmap_global - ! - ! arguments - implicit none - type(bounds_type), intent(in) :: bounds - character(len=*), intent(in) :: NLFilename ! Namelist filename - ! - ! local variables - integer :: nu_nml ! unit for namelist file - integer :: nml_error ! namelist i/o error flag - type(mct_ggrid) :: dom_clm ! domain information - character(len=CL) :: stream_fldFileName_ndep - character(len=CL) :: ndepmapalgo = 'bilinear' - character(len=CL) :: ndep_tintalgo = 'linear' - character(len=CS) :: ndep_taxmode = 'extend' - character(len=CL) :: ndep_varlist = 'NDEP_year' - character(*), parameter :: shr_strdata_unset = 'NOT_SET' - character(*), parameter :: subName = "('ndepdyn_init')" - character(*), parameter :: F00 = "('(ndepdyn_init) ',4a)" - !----------------------------------------------------------------------- - - namelist /ndepdyn_nml/ & - stream_year_first_ndep, & - stream_year_last_ndep, & - model_year_align_ndep, & - ndepmapalgo, ndep_taxmode, & - ndep_varlist, & - stream_fldFileName_ndep, & - ndep_tintalgo - - ! Default values for namelist - stream_year_first_ndep = 1 ! first year in stream to use - stream_year_last_ndep = 1 ! last year in stream to use - model_year_align_ndep = 1 ! align stream_year_first_ndep with this model year - stream_fldFileName_ndep = ' ' - - ! Read ndepdyn_nml namelist - if (masterproc) then - open( newunit=nu_nml, file=trim(NLFilename), status='old', iostat=nml_error ) - call shr_nl_find_group_name(nu_nml, 'ndepdyn_nml', status=nml_error) - if (nml_error == 0) then - read(nu_nml, nml=ndepdyn_nml,iostat=nml_error) - if (nml_error /= 0) then - call endrun(msg=' ERROR reading ndepdyn_nml namelist'//errMsg(sourcefile, __LINE__)) - end if - else - call endrun(msg=' ERROR finding ndepdyn_nml namelist'//errMsg(sourcefile, __LINE__)) - end if - close(nu_nml) - endif - - call shr_mpi_bcast(stream_year_first_ndep , mpicom) - call shr_mpi_bcast(stream_year_last_ndep , mpicom) - call shr_mpi_bcast(model_year_align_ndep , mpicom) - call shr_mpi_bcast(stream_fldFileName_ndep, mpicom) - call shr_mpi_bcast(ndep_varlist , mpicom) - call shr_mpi_bcast(ndep_taxmode , mpicom) - call shr_mpi_bcast(ndep_tintalgo , mpicom) - - if (masterproc) then - write(iulog,*) ' ' - write(iulog,*) 'ndepdyn stream settings:' - write(iulog,*) ' stream_year_first_ndep = ',stream_year_first_ndep - write(iulog,*) ' stream_year_last_ndep = ',stream_year_last_ndep - write(iulog,*) ' model_year_align_ndep = ',model_year_align_ndep - write(iulog,*) ' stream_fldFileName_ndep = ',stream_fldFileName_ndep - write(iulog,*) ' ndep_varList = ',ndep_varList - write(iulog,*) ' ndep_taxmode = ',ndep_taxmode - write(iulog,*) ' ndep_tintalgo = ',ndep_tintalgo - write(iulog,*) ' ' - endif - ! Read in units - call check_units( stream_fldFileName_ndep, ndep_varList ) - - ! Set domain and create streams - call clm_domain_mct (bounds, dom_clm) - - call shr_strdata_create(sdat,name="clmndep", & - pio_subsystem=pio_subsystem, & - pio_iotype=shr_pio_getiotype(inst_name), & - mpicom=mpicom, compid=comp_id, & - gsmap=gsmap_global, ggrid=dom_clm, & - nxg=ldomain%ni, nyg=ldomain%nj, & - yearFirst=stream_year_first_ndep, & - yearLast=stream_year_last_ndep, & - yearAlign=model_year_align_ndep, & - offset=0, & - domFilePath='', & - domFileName=trim(stream_fldFileName_ndep), & - domTvarName='time', & - domXvarName='lon' , & - domYvarName='lat' , & - domAreaName='area', & - domMaskName='mask', & - filePath='', & - filename=(/trim(stream_fldFileName_ndep)/),& - fldListFile=ndep_varlist, & - fldListModel=ndep_varlist, & - fillalgo='none', & - mapalgo=ndepmapalgo, & - tintalgo=ndep_tintalgo, & - calendar=get_calendar(), & - taxmode=ndep_taxmode ) - - - if (masterproc) then - call shr_strdata_print(sdat,'CLMNDEP data') - endif - - end subroutine ndep_init - !================================================================ - - subroutine check_units( stream_fldFileName_ndep, ndep_varList ) - !------------------------------------------------------------------- - ! Check that units are correct on the file and if need any conversion - use ncdio_pio , only : ncd_pio_openfile, ncd_inqvid, ncd_getatt, ncd_pio_closefile, ncd_nowrite - use ncdio_pio , only : file_desc_t, var_desc_t - use shr_kind_mod , only : CS => shr_kind_cs - use shr_log_mod , only : errMsg => shr_log_errMsg - use shr_string_mod, only : shr_string_listGetName - implicit none - - !----------------------------------------------------------------------- - ! - ! Arguments - character(len=*), intent(IN) :: stream_fldFileName_ndep ! ndep filename - character(len=*), intent(IN) :: ndep_varList ! ndep variable list to examine - ! - ! Local variables - type(file_desc_t) :: ncid ! NetCDF filehandle for ndep file - type(var_desc_t) :: vardesc ! variable descriptor - integer :: varid ! variable index - logical :: readvar ! If variable was read - character(len=CS) :: ndepunits! ndep units - character(len=CS) :: fname ! ndep field name - !----------------------------------------------------------------------- - call ncd_pio_openfile( ncid, trim(stream_fldFileName_ndep), ncd_nowrite ) - call shr_string_listGetName( ndep_varList, 1, fname ) - call ncd_inqvid( ncid, fname, varid, vardesc, readvar=readvar ) - if ( readvar ) then - call ncd_getatt( ncid, varid, "units", ndepunits ) - else - call endrun(msg=' ERROR finding variable: '//trim(fname)//" in file: "// & - trim(stream_fldFileName_ndep)//errMsg(sourcefile, __LINE__)) - end if - call ncd_pio_closefile( ncid ) - - ! Now check to make sure they are correct - if ( trim(ndepunits) == "g(N)/m2/s" )then - divide_by_secs_per_yr = .false. - else if ( trim(ndepunits) == "g(N)/m2/yr" )then - divide_by_secs_per_yr = .true. - else - call endrun(msg=' ERROR in units for nitrogen deposition equal to: '//trim(ndepunits)//" not units expected"// & - errMsg(sourcefile, __LINE__)) - end if - - end subroutine check_units - - !================================================================ - subroutine ndep_interp(bounds, atm2lnd_inst) - - !----------------------------------------------------------------------- - use clm_time_manager, only : get_curr_date, get_curr_days_per_year - use clm_varcon , only : secspday - use atm2lndType , only : atm2lnd_type - ! - ! Arguments - type(bounds_type) , intent(in) :: bounds - type(atm2lnd_type), intent(inout) :: atm2lnd_inst - ! - ! Local variables - integer :: g, ig - integer :: year ! year (0, ...) for nstep+1 - integer :: mon ! month (1, ..., 12) for nstep+1 - integer :: day ! day of month (1, ..., 31) for nstep+1 - integer :: sec ! seconds into current date for nstep+1 - integer :: mcdate ! Current model date (yyyymmdd) - integer :: dayspyr ! days per year - !----------------------------------------------------------------------- - - call get_curr_date(year, mon, day, sec) - mcdate = year*10000 + mon*100 + day - - call shr_strdata_advance(sdat, mcdate, sec, mpicom, 'ndepdyn') - - if ( divide_by_secs_per_yr )then - ig = 0 - dayspyr = get_curr_days_per_year( ) - do g = bounds%begg,bounds%endg - ig = ig+1 - atm2lnd_inst%forc_ndep_grc(g) = sdat%avs(1)%rAttr(1,ig) / (secspday * dayspyr) - end do - else - ig = 0 - do g = bounds%begg,bounds%endg - ig = ig+1 - atm2lnd_inst%forc_ndep_grc(g) = sdat%avs(1)%rAttr(1,ig) - end do - end if - - end subroutine ndep_interp - - !============================================================================== - subroutine clm_domain_mct(bounds, dom_clm, nlevels) - - !------------------------------------------------------------------- - ! Set domain data type for internal clm grid - use clm_varcon , only : re - use domainMod , only : ldomain - use mct_mod , only : mct_ggrid, mct_gsMap_lsize, mct_gGrid_init - use mct_mod , only : mct_gsMap_orderedPoints, mct_gGrid_importIAttr - use mct_mod , only : mct_gGrid_importRAttr, mct_gsMap - use lnd_set_decomp_and_domain , only : gsMap_lnd2Dsoi_gdc2glo, gsmap_global - implicit none - ! - ! arguments - type(bounds_type), intent(in) :: bounds - type(mct_ggrid), intent(out) :: dom_clm ! Output domain information for land model - integer, intent(in), optional :: nlevels ! Number of levels if this is a 3D field - ! - ! local variables - integer :: g,i,j,k ! index - integer :: lsize ! land model domain data size - real(r8), pointer :: data(:) ! temporary - integer , pointer :: idata(:) ! temporary - integer :: nlevs ! Number of vertical levels - type(mct_gsMap), pointer :: gsmap => null() ! MCT GS map - !------------------------------------------------------------------- - ! SEt number of levels, and get the GS map for either the 2D or 3D grid - nlevs = 1 - if ( present(nlevels) ) nlevs = nlevels - if ( nlevs == 1 ) then - gsmap => gsmap_global - else - gsmap => gsMap_lnd2Dsoi_gdc2glo - end if - ! - ! Initialize mct domain type - ! lat/lon in degrees, area in radians^2, mask is 1 (land), 0 (non-land) - ! Note that in addition land carries around landfrac for the purposes of domain checking - ! - lsize = mct_gsMap_lsize(gsmap, mpicom) - call mct_gGrid_init( GGrid=dom_clm, & - CoordChars='lat:lon:hgt', OtherChars='area:aream:mask:frac', lsize=lsize ) - ! - ! Allocate memory - ! - allocate(data(lsize)) - ! - ! Determine global gridpoint number attribute, GlobGridNum, which is set automatically by MCT - ! - call mct_gsMap_orderedPoints(gsmap, iam, idata) - gsmap => null() - call mct_gGrid_importIAttr(dom_clm,'GlobGridNum',idata,lsize) - ! - ! Determine domain (numbering scheme is: West to East and South to North to South pole) - ! Initialize attribute vector with special value - ! - data(:) = -9999.0_R8 - call mct_gGrid_importRAttr(dom_clm,"lat" ,data,lsize) - call mct_gGrid_importRAttr(dom_clm,"lon" ,data,lsize) - call mct_gGrid_importRAttr(dom_clm,"area" ,data,lsize) - call mct_gGrid_importRAttr(dom_clm,"aream",data,lsize) - data(:) = 0.0_R8 - call mct_gGrid_importRAttr(dom_clm,"mask" ,data,lsize) - ! - ! Determine bounds - ! - ! Fill in correct values for domain components - ! Note aream will be filled in in the atm-lnd mapper - ! - do k = 1, nlevs - do g = bounds%begg,bounds%endg - i = 1 + (g - bounds%begg) - data(i) = ldomain%lonc(g) - end do - end do - call mct_gGrid_importRattr(dom_clm,"lon",data,lsize) - - do k = 1, nlevs - do g = bounds%begg,bounds%endg - i = 1 + (g - bounds%begg) - data(i) = ldomain%latc(g) - end do - end do - call mct_gGrid_importRattr(dom_clm,"lat",data,lsize) - - do k = 1, nlevs - do g = bounds%begg,bounds%endg - i = 1 + (g - bounds%begg) - data(i) = ldomain%area(g)/(re*re) - end do - end do - call mct_gGrid_importRattr(dom_clm,"area",data,lsize) - - do k = 1, nlevs - do g = bounds%begg,bounds%endg - i = 1 + (g - bounds%begg) - data(i) = real(ldomain%mask(g), r8) - end do - end do - call mct_gGrid_importRattr(dom_clm,"mask",data,lsize) - - do k = 1, nlevs - do g = bounds%begg,bounds%endg - i = 1 + (g - bounds%begg) - data(i) = real(ldomain%frac(g), r8) - end do - end do - call mct_gGrid_importRattr(dom_clm,"frac",data,lsize) - - deallocate(data) - deallocate(idata) - - end subroutine clm_domain_mct - -end module ndepStreamMod From 4cbc00d42ba54b07fd1992e7d574b94c29ae939b Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Tue, 21 May 2024 10:48:35 -0600 Subject: [PATCH 085/126] Remove another mct comment --- src/cpl/nuopc/lnd_import_export.F90 | 1 - 1 file changed, 1 deletion(-) diff --git a/src/cpl/nuopc/lnd_import_export.F90 b/src/cpl/nuopc/lnd_import_export.F90 index 11cc807640..b9966f81e9 100644 --- a/src/cpl/nuopc/lnd_import_export.F90 +++ b/src/cpl/nuopc/lnd_import_export.F90 @@ -248,7 +248,6 @@ subroutine advertise_fields(gcomp, flds_scalar_name, glc_present, cism_evolve, r if (shr_megan_mechcomps_n .ne. megan_nflds) call shr_sys_abort('ERROR: megan field count mismatch') ! CARMA volumetric soil water from land - ! TODO: is the following correct - the CARMA field exchange is very confusing in mct call shr_carma_readnl('drv_flds_in', carma_fields) ! export to atm From 854ff9d438aa24edb644459f31c89494ed4ee983 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Tue, 21 May 2024 10:50:43 -0600 Subject: [PATCH 086/126] Remove cime_config/testdefs/testmods_dirs/clm/nuopc_cap_bfb directory --- .../testdefs/testmods_dirs/clm/nuopc_cap_bfb/README | 10 ---------- .../testmods_dirs/clm/nuopc_cap_bfb/include_user_mods | 1 - .../testmods_dirs/clm/nuopc_cap_bfb/user_nl_cpl | 4 ---- 3 files changed, 15 deletions(-) delete mode 100644 cime_config/testdefs/testmods_dirs/clm/nuopc_cap_bfb/README delete mode 100644 cime_config/testdefs/testmods_dirs/clm/nuopc_cap_bfb/include_user_mods delete mode 100644 cime_config/testdefs/testmods_dirs/clm/nuopc_cap_bfb/user_nl_cpl diff --git a/cime_config/testdefs/testmods_dirs/clm/nuopc_cap_bfb/README b/cime_config/testdefs/testmods_dirs/clm/nuopc_cap_bfb/README deleted file mode 100644 index aefd8adee7..0000000000 --- a/cime_config/testdefs/testmods_dirs/clm/nuopc_cap_bfb/README +++ /dev/null @@ -1,10 +0,0 @@ -This testmod directory currently isn't used in any tests, but is useful -for the following reason: - -According to Mariana Vertenstein: At least at one point, you could get -bit-for-bit answers when comparing the mct and nuopc versions of CTSM in -an I compset with SROF and SGLC, if using the changes in this testmod in -both the mct and nuopc runs. - -So we are keeping this around in case someone wants to reproduce that -comparison. diff --git a/cime_config/testdefs/testmods_dirs/clm/nuopc_cap_bfb/include_user_mods b/cime_config/testdefs/testmods_dirs/clm/nuopc_cap_bfb/include_user_mods deleted file mode 100644 index fe0e18cf88..0000000000 --- a/cime_config/testdefs/testmods_dirs/clm/nuopc_cap_bfb/include_user_mods +++ /dev/null @@ -1 +0,0 @@ -../default diff --git a/cime_config/testdefs/testmods_dirs/clm/nuopc_cap_bfb/user_nl_cpl b/cime_config/testdefs/testmods_dirs/clm/nuopc_cap_bfb/user_nl_cpl deleted file mode 100644 index 6bfefec90b..0000000000 --- a/cime_config/testdefs/testmods_dirs/clm/nuopc_cap_bfb/user_nl_cpl +++ /dev/null @@ -1,4 +0,0 @@ -orb_eccen = 0. -orb_mvelp = 0. -orb_obliq = 0. -orb_mode = "fixed_parameters" From 6b6ea52f5271866c525cef19c480c2c7bbc35031 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Tue, 21 May 2024 10:55:30 -0600 Subject: [PATCH 087/126] Remove mention of mct in README.NUOPC_driver.md --- README.NUOPC_driver.md | 6 ------ 1 file changed, 6 deletions(-) diff --git a/README.NUOPC_driver.md b/README.NUOPC_driver.md index 9eb5d5b2cb..6caf63a9bd 100644 --- a/README.NUOPC_driver.md +++ b/README.NUOPC_driver.md @@ -40,9 +40,3 @@ ESMF_PROFILING_LEVEL --- Verbosity level for ESMF profiling nuopc.runseq is a text file that determines how the driver operates. You can change the operation by having an updated copy in your case directory. - -## What if I want to use the MCT driver? - -The MCT driver is now removed. For more notes see: - -https://docs.google.com/presentation/d/1yjiKSEV53JDAJbYxhpY2T9GTxlWFzQAn From 40367c56210797ae6fe27991be8cb82f9708894d Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Tue, 21 May 2024 10:56:28 -0600 Subject: [PATCH 088/126] Remove an expected failure the mentions mct in ExpectedTestFails.xml --- cime_config/testdefs/ExpectedTestFails.xml | 7 ------- 1 file changed, 7 deletions(-) diff --git a/cime_config/testdefs/ExpectedTestFails.xml b/cime_config/testdefs/ExpectedTestFails.xml index dc850d51ab..a29e55cc9e 100644 --- a/cime_config/testdefs/ExpectedTestFails.xml +++ b/cime_config/testdefs/ExpectedTestFails.xml @@ -143,13 +143,6 @@ - - - FAIL - #1887 - - - FAIL From e99eaeaba4a502277e4b2f99096b0e130cff46d8 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Tue, 21 May 2024 11:00:11 -0600 Subject: [PATCH 089/126] Add back mct to Externals.cfg --- Externals.cfg | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/Externals.cfg b/Externals.cfg index cb2622dedf..8ef524bf24 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -69,6 +69,13 @@ repo_url = https://github.com/ESCOMP/CESM_share local_path = share required = True +[mct] +tag = MCT_2.11.0 +protocol = git +repo_url = https://github.com/MCSclimate/MCT +local_path = libraries/mct +required = True + [parallelio] tag = pio2_6_2 protocol = git From fed6805e85ca0c2ccb6eada9946f4c4f634f5e54 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Tue, 21 May 2024 11:05:10 -0600 Subject: [PATCH 090/126] Remove /test subdirectory --- test/tools/CLM_compare.sh | 39 ----- test/tools/Makefile | 12 -- test/tools/README | 73 ---------- test/tools/README.testnames | 69 --------- test/tools/TBLCFGtools.sh | 120 ---------------- test/tools/TBLscript_tools.sh | 122 ---------------- test/tools/TBLtools.sh | 119 --------------- test/tools/TCBCFGtools.sh | 135 ------------------ test/tools/TCBtools.sh | 121 ---------------- test/tools/TSMCFGtools.sh | 113 --------------- test/tools/TSMscript_tools.sh | 98 ------------- test/tools/TSMtools.sh | 117 --------------- test/tools/config_files/CFGtools__ds | 0 test/tools/config_files/README | 9 -- test/tools/config_files/tools__do | 1 - test/tools/config_files/tools__ds | 1 - test/tools/config_files/tools__o | 1 - test/tools/config_files/tools__s | 1 - test/tools/gen_test_table.sh | 80 ----------- test/tools/get_cprnc_diffs.sh | 56 -------- test/tools/input_tests_master | 32 ----- test/tools/nl_files/mesh_maker_fv09 | 1 - test/tools/nl_files/mkmapdata_if10 | 1 - test/tools/nl_files/mkmapdata_ne30np4 | 1 - .../nl_files/mkprocdata_ne30_to_f19_I2000 | 1 - test/tools/nl_files/modify_data_YELL | 1 - test/tools/nl_files/run_neon_OSBS | 1 - test/tools/nl_files/run_neon_YELL_PRISM | 1 - test/tools/nl_files/subset_data_KONA | 1 - test/tools/nl_files/subset_data_US-UMB | 1 - test/tools/nl_files/subset_data_YELL | 1 - .../nl_files/subset_data_f09_1x1pt_townshipSD | 1 - .../subset_data_f09_37x288pt_PanBoreal | 1 - .../subset_data_f09_4x9pt_AlaskaTananaValley | 1 - .../subset_data_f09_58x45pt_SouthAmerica | 1 - .../subset_data_f09_90x288pt_1850PanTropics | 1 - test/tools/show_var_diffs.sh | 79 ---------- test/tools/tests_posttag_hobart_nompi | 1 - test/tools/tests_posttag_nompi_regression | 1 - test/tools/tests_pretag_cheyenne_nompi | 3 - test/tools/tests_pretag_derecho_nompi | 9 -- test/tools/tests_pretag_nompi_neon | 8 -- 42 files changed, 1435 deletions(-) delete mode 100755 test/tools/CLM_compare.sh delete mode 100644 test/tools/Makefile delete mode 100644 test/tools/README delete mode 100644 test/tools/README.testnames delete mode 100755 test/tools/TBLCFGtools.sh delete mode 100755 test/tools/TBLscript_tools.sh delete mode 100755 test/tools/TBLtools.sh delete mode 100755 test/tools/TCBCFGtools.sh delete mode 100755 test/tools/TCBtools.sh delete mode 100755 test/tools/TSMCFGtools.sh delete mode 100755 test/tools/TSMscript_tools.sh delete mode 100755 test/tools/TSMtools.sh delete mode 100644 test/tools/config_files/CFGtools__ds delete mode 100644 test/tools/config_files/README delete mode 100644 test/tools/config_files/tools__do delete mode 100644 test/tools/config_files/tools__ds delete mode 100644 test/tools/config_files/tools__o delete mode 100644 test/tools/config_files/tools__s delete mode 100755 test/tools/gen_test_table.sh delete mode 100755 test/tools/get_cprnc_diffs.sh delete mode 100644 test/tools/input_tests_master delete mode 100644 test/tools/nl_files/mesh_maker_fv09 delete mode 100644 test/tools/nl_files/mkmapdata_if10 delete mode 100644 test/tools/nl_files/mkmapdata_ne30np4 delete mode 100644 test/tools/nl_files/mkprocdata_ne30_to_f19_I2000 delete mode 100644 test/tools/nl_files/modify_data_YELL delete mode 100644 test/tools/nl_files/run_neon_OSBS delete mode 100644 test/tools/nl_files/run_neon_YELL_PRISM delete mode 100644 test/tools/nl_files/subset_data_KONA delete mode 100644 test/tools/nl_files/subset_data_US-UMB delete mode 100644 test/tools/nl_files/subset_data_YELL delete mode 100644 test/tools/nl_files/subset_data_f09_1x1pt_townshipSD delete mode 100644 test/tools/nl_files/subset_data_f09_37x288pt_PanBoreal delete mode 100644 test/tools/nl_files/subset_data_f09_4x9pt_AlaskaTananaValley delete mode 100644 test/tools/nl_files/subset_data_f09_58x45pt_SouthAmerica delete mode 100644 test/tools/nl_files/subset_data_f09_90x288pt_1850PanTropics delete mode 100755 test/tools/show_var_diffs.sh delete mode 100644 test/tools/tests_posttag_hobart_nompi delete mode 100644 test/tools/tests_posttag_nompi_regression delete mode 100644 test/tools/tests_pretag_cheyenne_nompi delete mode 100644 test/tools/tests_pretag_derecho_nompi delete mode 100644 test/tools/tests_pretag_nompi_neon diff --git a/test/tools/CLM_compare.sh b/test/tools/CLM_compare.sh deleted file mode 100755 index 38f547c3ab..0000000000 --- a/test/tools/CLM_compare.sh +++ /dev/null @@ -1,39 +0,0 @@ -#!/bin/sh -# - -if [ $# -ne 2 ]; then - echo "CLM_compare.sh: incorrect number of input arguments" - exit 1 -fi - -echo "CLM_compare.sh: comparing $1 " -echo " with $2" - -##note syntax here as stderr and stdout from cprnc command go -##to separate places! -${CPRNC_EXE} ${CPRNC_OPT} $1 $2 2>&1 > cprnc.out -rc=$? -if [ $rc -ne 0 ]; then - echo "CLM_compare.sh: error doing comparison, cprnc error= $rc" - exit 2 -fi - -result_old=`perl -e 'while (my $ll = <>) \ - { if ($ll =~ /(\d+)[^0-9]+compared[^0-9]+(\d+)/) \ - { print "PASS" if $1>0 && $2==0 }}' cprnc.out` -if grep -c "the two files seem to be IDENTICAL" cprnc.out > /dev/null; then - result=PASS -elif grep -c "the two files seem to be DIFFERENT" cprnc.out > /dev/null; then - result=FAIL -else - result=$result_old -fi - -if [ "$result" = "PASS" ]; then - echo "CLM_compare.sh: files are b4b" -else - echo "CLM_compare.sh: files are NOT b4b" - exit 3 -fi - -exit 0 diff --git a/test/tools/Makefile b/test/tools/Makefile deleted file mode 100644 index b5031abdba..0000000000 --- a/test/tools/Makefile +++ /dev/null @@ -1,12 +0,0 @@ -# -# Makefile to build clm testing documentation -# - -# Get list of tests_ files -SOURCES = $(wildcard tests_*) - -all: test_table.html - -test_table.html: $(SOURCES) - gen_test_table.sh - diff --git a/test/tools/README b/test/tools/README deleted file mode 100644 index c545f625b8..0000000000 --- a/test/tools/README +++ /dev/null @@ -1,73 +0,0 @@ -$CTSMROOT/clm/test/tools/README 06/08/2018 - -Scripts for testing the CLM support tools with many different -configurations and run-time options. - -I. MAIN SCRIPTS: - -test_driver.sh - Test the CLM offline tools - -To use... - -./test_driver.sh -i - -on Derecho - -qcmd -l walltime=08:00:00 -- ./test_driver.sh -i >& run.out & - -And to for example to compare to another baseline code (in this case ctsm5.1.dev066, which would need to be cloned at the given -path) ... - -qcmd -l walltime=08:00:00 -- env BL_ROOT=/glade/scratch/erik/ctsm5.1.dev066 ./test_driver.sh -i >& run.out & - -on izumi - -nohup ./test_driver.sh -i >& run.out & - -release tests - -qcmd -l walltime=10:00:00 -- env CLM_INPUT_TESTS=`pwd`/tests_posttag_nompi_regression \ -./test_driver.sh -i >& run_regress.out & - -To run neon-specific tests, please use login nodes: -env CLM_INPUT_TESTS=`pwd`/tests_pretag_nompi_neon ./test_driver.sh -i > & run_neon.out & - - -Intended for use on NCAR machines Derecho, Casper (DAV) and izumi. - -II. RUNNING test_driver.sh TOOLS TESTING: - -Basic use: - -./test_driver.sh -i -./test_driver.sh -h # to get help on options - -Important environment variables (just used by test_driver.sh) - -BL_ROOT ---------------- Root directory of CLM baseline code to compare to - (if not set BL test will not be performed) -BL_TESTDIR ------------- Root directory of where to put baseline tests -CLM_INPUT_TESTS -------- Filename of file with list of tests to perform -CLM_TESTDIR ------------ Root directory of where to put most tests -CLM_RETAIN_FILES ------- If set to TRUE -- don't cleanup files after testing -CLM_FC ----------------- Use given compiler -CLM_JOBID -------------- Job identification number to use (rather than process ID) -CLM_THREADS ------------ Number of open-MP threads to use - (by default this is set differently by machine) -CLM_SOFF --------------- If set to TRUE -- stop on first failed test (default FALSE) - -Important files for test_driver tools testing: - -test_driver.sh ------- Main test script for tools -nl_files ------------- Directory with various namelists to test -config_files --------- Directory with various configurations to test -input_tests_master --- Master list of tests -tests_pretag_* ------- Tests for specific machines to do by default before a tag is done -tests_posttag_* ------ Tests for specific machines to do for more extensive testing - after a tag is done -CLM_compare.sh ------- Compares output history files between two cases -T*.sh ---------------- Basic test script to do a specific type of test -gen_test_table.sh ---- Creates HTML table of tests -Makefile ------------- Will build the HTML table of tests - -../../tools/README.testing - Information on how the testing works for the CLM tools diff --git a/test/tools/README.testnames b/test/tools/README.testnames deleted file mode 100644 index 74dbe8e5f3..0000000000 --- a/test/tools/README.testnames +++ /dev/null @@ -1,69 +0,0 @@ -Tests for test_driver are for the CLM tools only. - -Test naming conventions for the test_driver.sh script: - -Test names are: - -xxnmi - -Where: xx is the two-letter test type - sm=smoke, br=branch, er=exact restart, bl=base-line comparision, - cb=configure-build, rp=reproducibility, op=OpenMP threading for tools - -n is the configuration type: - -1 -- unused -2 -- unused -3 -- unused -4 -- unused -5 -- unused -6 -- unused -7 -- unused -8 -- unused -9 -- mesh_maker -0 -- run_neon -a -- modify_data -b -- subset_data -c -- mkprocdata_map -d -- mkmapgrids -e -- unused -f -- unused -g -- unused -h -- unused -i -- tools scripts - -m is the resolution - -0 -- 0.9x1.25 -1 -- 48x96 -5 -- 10x15 -6 -- 5x5_amazon -7 -- 1x1 brazil -8 -- US-UMB -9 -- 4x5 -a -- NEON YELL -b -- NEON KONA -c -- NEON OSBS -d -- SouthAmerica -e -- 1850PanTropics -f -- PanBoreal -g -- AlaskaTananaValley -h -- single point from the 0.9x1.25 grid (Township SD) -y -- 1.9x2.5 with transient 1850-2100 for rcp=2.6 and glacier-MEC on -T -- 1x1_numaIA -Z -- 10x15 with crop on -@ -- ne120np4 -# -- ne30np4 - -i is the specific test (usually this implies...) - -1 -- Serial script -2 -- Serial -3 -- OpenMP only -4 -- serial, DEBUG -7 -- OpenMP only second test, DEBUG -8 -- OpenMP only third test, DEBUG -9 -- Serial Script -0 -- Serial Script - - diff --git a/test/tools/TBLCFGtools.sh b/test/tools/TBLCFGtools.sh deleted file mode 100755 index 6276c885e2..0000000000 --- a/test/tools/TBLCFGtools.sh +++ /dev/null @@ -1,120 +0,0 @@ -#!/bin/sh -# - -if [ $# -ne 3 ]; then - echo "TBLCFGtools.sh: incorrect number of input arguments" - exit 1 -fi - -if [ -z "$BL_ROOT" ] && [ -z "$BL_TESTDIR" ]; then - echo "TBL.sh: no environment variables set for baseline test - will skip" - exit 255 -fi - -tool=$(basename $1) -test_name=TBLCFGtools.$tool.$2.$3 - -if [ -f ${CLM_TESTDIR}/${test_name}/TestStatus ]; then - if grep -c PASS ${CLM_TESTDIR}/${test_name}/TestStatus > /dev/null; then - echo "TBLCFGtools.sh: smoke test has already passed; results are in " - echo " ${CLM_TESTDIR}/${test_name}" - exit 0 - elif grep -c GEN ${CLM_TESTDIR}/${test_name}/TestStatus > /dev/null; then - echo "TBLCFGtools.sh: test already generated" - else - read fail_msg < ${CLM_TESTDIR}/${test_name}/TestStatus - prev_jobid=${fail_msg#*job} - - if [ $JOBID = $prev_jobid ]; then - echo "TBLCFGtools.sh: smoke test has already failed for this job - will not reattempt; " - echo " results are in: ${CLM_TESTDIR}/${test_name}" - exit 2 - else - echo "TBLCFGtools.sh: this smoke test failed under job ${prev_jobid} - moving those results to " - echo " ${CLM_TESTDIR}/${test_name}_FAIL.job$prev_jobid and trying again" - cp -rp ${CLM_TESTDIR}/${test_name} ${CLM_TESTDIR}/${test_name}_FAIL.job$prev_jobid - fi - fi -fi - -rundir=${CLM_TESTDIR}/${test_name} -if [ -d ${rundir} ]; then - rm -r ${rundir} -fi -mkdir -p ${rundir} -if [ $? -ne 0 ]; then - echo "TBLCFGtools.sh: error, unable to create work subdirectory" - exit 3 -fi -cd ${rundir} - -echo "TBLCFGtools.sh: calling TSMCFGtools.sh to run $tool executable" -${CLM_SCRIPTDIR}/TSMCFGtools.sh $1 $2 $3 -rc=$? -if [ $rc -ne 0 ]; then - echo "TBLCFGtools.sh: error from TSMCFGtools.sh= $rc" - echo "FAIL.job${JOBID}" > TestStatus - exit 4 -fi - -if [ -n "${BL_ROOT}" ]; then - if [ -z "$BL_TESTDIR" ]; then - BL_TESTDIR=${CLM_TESTDIR}.bl - fi - echo "TBLCFGtools.sh: generating baseline data from root $BL_ROOT - results in $BL_TESTDIR" - - echo "TBLCFGtools.sh: calling ****baseline**** TSMCFGtools.sh for smoke test" - bl_dir=`/bin/ls -1d ${BL_ROOT}/test/tools` - env CLM_TESTDIR=${BL_TESTDIR} \ - CLM_ROOT=${BL_ROOT} \ - CLM_SCRIPTDIR=$bl_dir \ - $bl_dir/TSMCFGtools.sh $1 $2 $3 - rc=$? - if [ $rc -ne 0 ]; then - echo "TBLCFGtools.sh: error from *baseline* TSMCFGtools.sh= $rc" - echo "FAIL.job${JOBID}" > TestStatus - exit 5 - fi -fi - -echo "TBLCFGtools.sh: starting b4b comparisons " -files_to_compare=`cd ${CLM_TESTDIR}/TSMCFGtools.$tool.$2.$3; ls *.nc` -if [ -z "${files_to_compare}" ] && [ "$debug" != "YES" ]; then - echo "TBLCFGtools.sh: error locating files to compare" - echo "FAIL.job${JOBID}" > TestStatus - exit 6 -fi - -all_comparisons_good="TRUE" -for compare_file in ${files_to_compare}; do - - env CPRNC_OPT="-m" \ - ${CLM_SCRIPTDIR}/CLM_compare.sh \ - ${BL_TESTDIR}/TSMCFGtools.$tool.$2.$3/${compare_file} \ - ${CLM_TESTDIR}/TSMCFGtools.$tool.$2.$3/${compare_file} - rc=$? - mv cprnc.out cprnc.${compare_file}.out - if [ $rc -eq 0 ]; then - echo "TBLCFGtools.sh: comparison successful; output in ${rundir}/cprnc.${compare_file}.out" - else - echo "TBLCFGtools.sh: error from CLM_compare.sh= $rc; see ${rundir}/cprnc.${compare_file}.out for details -" - all_comparisons_good="FALSE" - fi -done - -if [ ${all_comparisons_good} = "TRUE" ]; then - echo "TBLCFGtools.sh: baseline test passed" - echo "PASS" > TestStatus - if [ $CLM_RETAIN_FILES != "TRUE" ]; then - echo "TBLCFGtools.sh: removing some unneeded files to save disc space" - rm *.nc - rm *.r* - fi -else - echo "TBLCFGtools.sh: at least one file comparison did not pass" - echo "FAIL.job${JOBID}" > TestStatus - exit 7 -fi - -exit 0 diff --git a/test/tools/TBLscript_tools.sh b/test/tools/TBLscript_tools.sh deleted file mode 100755 index d05492c687..0000000000 --- a/test/tools/TBLscript_tools.sh +++ /dev/null @@ -1,122 +0,0 @@ -#!/bin/sh -# - -if [ $# -ne 3 ]; then - echo "TBLscript_tools.sh: incorrect number of input arguments" - exit 1 -fi - -if [ -z "$BL_ROOT" ] && [ -z "$BL_TESTDIR" ]; then - echo "TBLscript_tools.sh: no environment variables set for baseline test - will skip" - exit 255 -fi - -test_name=TBLscript_tools.$1.$2.$3 - -if [ -f ${CLM_TESTDIR}/${test_name}/TestStatus ]; then - if grep -c PASS ${CLM_TESTDIR}/${test_name}/TestStatus > /dev/null; then - echo "TBLscript_tools.sh: smoke test has already passed; results are in " - echo " ${CLM_TESTDIR}/${test_name}" - exit 0 - elif grep -c GEN ${CLM_TESTDIR}/${test_name}/TestStatus > /dev/null; then - echo "TBLscript_tools.sh: test already generated" - else - read fail_msg < ${CLM_TESTDIR}/${test_name}/TestStatus - prev_jobid=${fail_msg#*job} - - if [ $JOBID = $prev_jobid ]; then - echo "TBLscript_tools.sh: smoke test has already failed for this job - will not reattempt; " - echo " results are in: ${CLM_TESTDIR}/${test_name}" - exit 2 - else - echo "TBLscript_tools.sh: this smoke test failed under job ${prev_jobid} - moving those results to " - echo " ${CLM_TESTDIR}/${test_name}_FAIL.job$prev_jobid and trying again" - cp -rp ${CLM_TESTDIR}/${test_name} ${CLM_TESTDIR}/${test_name}_FAIL.job$prev_jobid - fi - fi -fi - -rundir=${CLM_TESTDIR}/${test_name} -if [ -d ${rundir} ]; then - rm -r ${rundir} -fi -mkdir -p ${rundir} -if [ $? -ne 0 ]; then - echo "TBLscript_tools.sh: error, unable to create work subdirectory" - exit 3 -fi -cd ${rundir} - -echo "TBLscript_tools.sh: calling TSMscript_tools.sh to run $1 executable" -${CLM_SCRIPTDIR}/TSMscript_tools.sh $1 $2 $3 -rc=$? -if [ $rc -ne 0 ]; then - echo "TBLscript_tools.sh: error from TSMtools.sh= $rc" - echo "FAIL.job${JOBID}" > TestStatus - exit 4 -fi - -if [ -n "${BL_ROOT}" ]; then - if [ -z "$BL_TESTDIR" ]; then - BL_TESTDIR=${CLM_TESTDIR}.bl - fi - echo "TBLscript_tools.sh: generating baseline data from root $BL_ROOT - results in $BL_TESTDIR" - - echo "TBLscript_tools.sh: calling ****baseline**** TSMtools.sh for smoke test" - bl_dir=`/bin/ls -1d ${BL_ROOT}/test/tools` - env CLM_TESTDIR=${BL_TESTDIR} \ - CLM_SCRIPTDIR=$bl_dir \ - CLM_ROOT=$BL_ROOT \ - CTSM_ROOT=$BL_ROOT \ - CIME_ROOT=$BL_ROOT/cime \ - $bl_dir/TSMscript_tools.sh $1 $2 $3 - rc=$? - if [ $rc -ne 0 ]; then - echo "TBLscript_tools.sh: error from *baseline* TSMscript_tools.sh= $rc" - echo "FAIL.job${JOBID}" > TestStatus - exit 5 - fi -fi - -echo "TBLscript_tools.sh: starting b4b comparisons " -files_to_compare=`cd ${CLM_TESTDIR}/TSMscript_tools.$1.$2.$3; ls *.nc` -if [ -z "${files_to_compare}" ] && [ "$debug" != "YES" ]; then - echo "TBLscript_tools.sh: error locating files to compare" - echo "FAIL.job${JOBID}" > TestStatus - exit 6 -fi - -all_comparisons_good="TRUE" -for compare_file in ${files_to_compare}; do - - env CPRNC_OPT="-m" \ - ${CLM_SCRIPTDIR}/CLM_compare.sh \ - ${BL_TESTDIR}/TSMscript_tools.$1.$2.$3/${compare_file} \ - ${CLM_TESTDIR}/TSMscript_tools.$1.$2.$3/${compare_file} - rc=$? - mv cprnc.out cprnc.${compare_file}.out - if [ $rc -eq 0 ]; then - echo "TBLscript_tools.sh: comparison successful; output in ${rundir}/cprnc.${compare_file}.out" - else - echo "TBLscript_tools.sh: error from CLM_compare.sh= $rc; see ${rundir}/cprnc.${compare_file}.out for details" - all_comparisons_good="FALSE" - fi -done - -if [ ${all_comparisons_good} = "TRUE" ]; then - echo "TBLscript_tools.sh: baseline test passed" - echo "PASS" > TestStatus - if [ $CLM_RETAIN_FILES != "TRUE" ]; then - echo "TBLscript_tools.sh: removing some unneeded files to save disc space" - rm *.nc - rm *.r* - fi -else - echo "TBLscript_tools.sh: at least one file comparison did not pass" - echo "FAIL.job${JOBID}" > TestStatus - exit 7 -fi - - - -exit 0 diff --git a/test/tools/TBLtools.sh b/test/tools/TBLtools.sh deleted file mode 100755 index 555ea7d1be..0000000000 --- a/test/tools/TBLtools.sh +++ /dev/null @@ -1,119 +0,0 @@ -#!/bin/sh -# - -if [ $# -ne 3 ]; then - echo "TBLtools.sh: incorrect number of input arguments" - exit 1 -fi - -if [ -z "$BL_ROOT" ] && [ -z "$BL_TESTDIR" ]; then - echo "TBL.sh: no environment variables set for baseline test - will skip" - exit 255 -fi - -test_name=TBLtools.$1.$2.$3 - -if [ -f ${CLM_TESTDIR}/${test_name}/TestStatus ]; then - if grep -c PASS ${CLM_TESTDIR}/${test_name}/TestStatus > /dev/null; then - echo "TBLtools.sh: smoke test has already passed; results are in " - echo " ${CLM_TESTDIR}/${test_name}" - exit 0 - elif grep -c GEN ${CLM_TESTDIR}/${test_name}/TestStatus > /dev/null; then - echo "TBLtools.sh: test already generated" - else - read fail_msg < ${CLM_TESTDIR}/${test_name}/TestStatus - prev_jobid=${fail_msg#*job} - - if [ $JOBID = $prev_jobid ]; then - echo "TBLtools.sh: smoke test has already failed for this job - will not reattempt; " - echo " results are in: ${CLM_TESTDIR}/${test_name}" - exit 2 - else - echo "TBLtools.sh: this smoke test failed under job ${prev_jobid} - moving those results to " - echo " ${CLM_TESTDIR}/${test_name}_FAIL.job$prev_jobid and trying again" - cp -rp ${CLM_TESTDIR}/${test_name} ${CLM_TESTDIR}/${test_name}_FAIL.job$prev_jobid - fi - fi -fi - -rundir=${CLM_TESTDIR}/${test_name} -if [ -d ${rundir} ]; then - rm -r ${rundir} -fi -mkdir -p ${rundir} -if [ $? -ne 0 ]; then - echo "TBLtools.sh: error, unable to create work subdirectory" - exit 3 -fi -cd ${rundir} - -echo "TBLtools.sh: calling TSMtools.sh to run $1 executable" -${CLM_SCRIPTDIR}/TSMtools.sh $1 $2 $3 -rc=$? -if [ $rc -ne 0 ]; then - echo "TBLtools.sh: error from TSMtools.sh= $rc" - echo "FAIL.job${JOBID}" > TestStatus - exit 4 -fi - -if [ -n "${BL_ROOT}" ]; then - if [ -z "$BL_TESTDIR" ]; then - BL_TESTDIR=${CLM_TESTDIR}.bl - fi - echo "TBLtools.sh: generating baseline data from root $BL_ROOT - results in $BL_TESTDIR" - - echo "TBLtools.sh: calling ****baseline**** TSMtools.sh for smoke test" - bl_dir=`/bin/ls -1d ${BL_ROOT}/test/tools` - env CLM_TESTDIR=${BL_TESTDIR} \ - CLM_ROOT=${BL_ROOT} \ - CLM_SCRIPTDIR=$bl_dir \ - $bl_dir/TSMtools.sh $1 $2 $3 - rc=$? - if [ $rc -ne 0 ]; then - echo "TBLtools.sh: error from *baseline* TSMtools.sh= $rc" - echo "FAIL.job${JOBID}" > TestStatus - exit 5 - fi -fi - -echo "TBLtools.sh: starting b4b comparisons " -files_to_compare=`cd ${CLM_TESTDIR}/TSMtools.$1.$2.$3; ls *.nc` -if [ -z "${files_to_compare}" ] && [ "$debug" != "YES" ]; then - echo "TBLtools.sh: error locating files to compare" - echo "FAIL.job${JOBID}" > TestStatus - exit 6 -fi - -all_comparisons_good="TRUE" -for compare_file in ${files_to_compare}; do - - env CPRNC_OPT="-m" \ - ${CLM_SCRIPTDIR}/CLM_compare.sh \ - ${BL_TESTDIR}/TSMtools.$1.$2.$3/${compare_file} \ - ${CLM_TESTDIR}/TSMtools.$1.$2.$3/${compare_file} - rc=$? - mv cprnc.out cprnc.${compare_file}.out - if [ $rc -eq 0 ]; then - echo "TBLtools.sh: comparison successful; output in ${rundir}/cprnc.${compare_file}.out" - else - echo "TBLtools.sh: error from CLM_compare.sh= $rc; see ${rundir}/cprnc.${compare_file}.out for details -" - all_comparisons_good="FALSE" - fi -done - -if [ ${all_comparisons_good} = "TRUE" ]; then - echo "TBLtools.sh: baseline test passed" - echo "PASS" > TestStatus - if [ $CLM_RETAIN_FILES != "TRUE" ]; then - echo "TBLtools.sh: removing some unneeded files to save disc space" - rm *.nc - rm *.r* - fi -else - echo "TBLtools.sh: at least one file comparison did not pass" - echo "FAIL.job${JOBID}" > TestStatus - exit 7 -fi - -exit 0 diff --git a/test/tools/TCBCFGtools.sh b/test/tools/TCBCFGtools.sh deleted file mode 100755 index 5c0b015123..0000000000 --- a/test/tools/TCBCFGtools.sh +++ /dev/null @@ -1,135 +0,0 @@ -#!/bin/sh -# - -if [ $# -ne 2 ]; then - echo "TCBCFGtools.sh: incorrect number of input arguments" - exit 1 -fi - -tool=$(basename $1) -test_name=TCBCFGtools.$tool.$2 - -if [ -f ${CLM_TESTDIR}/${test_name}/TestStatus ]; then - if grep -c PASS ${CLM_TESTDIR}/${test_name}/TestStatus > /dev/null; then - echo "TCBCFGtools.sh: build test has already passed; results are in " - echo " ${CLM_TESTDIR}/${test_name}" - exit 0 - elif grep -c GEN ${CLM_TESTDIR}/${test_name}/TestStatus > /dev/null; then - echo "TCBCFGtools.sh: test already generated" - else - read fail_msg < ${CLM_TESTDIR}/${test_name}/TestStatus - prev_jobid=${fail_msg#*job} - - if [ $JOBID = $prev_jobid ]; then - echo "TCBCFGtools.sh: build test has already failed for this job - will not reattempt; " - echo " results are in: ${CLM_TESTDIR}/${test_name}" - exit 2 - else - echo "TCBCFGtools.sh: this build test failed under job ${prev_jobid} - moving those results to " - echo " ${CLM_TESTDIR}/${test_name}_FAIL.job$prev_jobid and trying again" - cp -rp ${CLM_TESTDIR}/${test_name} ${CLM_TESTDIR}/${test_name}_FAIL.job$prev_jobid - fi - fi -fi - -cfgdir=`ls -1d ${CLM_ROOT}/tools/${1}` -if [ $? -ne 0 ];then - cfgdir=`ls -1d ${CIME_ROOT}/tools/mapping/${1}*` - echo "use: $cfgdir" -fi -blddir=${CLM_TESTDIR}/${test_name}/src -if [ -d ${blddir} ]; then - rm -r ${blddir} -fi -mkdir -p ${blddir} -if [ $? -ne 0 ]; then - echo "TCBCFGtools.sh: error, unable to create work subdirectory" - exit 3 -fi -cd ${blddir} - -echo "TCBCFGtools.sh: building $tool executable; output in ${blddir}/test.log" -# -# Copy build files over -# -cp $cfgdir/src/Makefile . -cp $cfgdir/src/Filepath . -# -# Add cfgdir path to beginning of each path in Filepath -# -touch Filepath -while read filepath_arg; do - echo "${cfgdir}/src/${filepath_arg}" >> Filepath -done < ${cfgdir}/src/Filepath - -# -# Figure out configuration -# -if [ ! -f ${CLM_SCRIPTDIR}/config_files/$tool ]; then - echo "TCB.sh: configure options file ${CLM_SCRIPTDIR}/config_files/$tool not found" - echo "FAIL.job${JOBID}" > TestStatus - exit 4 -fi - -##construct string of args to configure -config_string=" " -while read config_arg; do - config_string="${config_string}${config_arg} " -done < ${CLM_SCRIPTDIR}/config_files/$tool - -if [ "$TOOLSLIBS" != "" ]; then - export SLIBS=$TOOLSLIBS -fi -echo "env CIMEROOT=$CLM_ROOT/cime COMPILER=$CESM_COMP $config_string $CLM_ROOT/cime/tools/configure --macros-format Makefile --machine $CESM_MACH $TOOLS_CONF_STRING" -env CIMEROOT=$CLM_ROOT/cime COMPILER=$CESM_COMP $config_string $CLM_ROOT/cime/tools/configure --macros-format Makefile --machine $CESM_MACH $TOOLS_CONF_STRING >> test.log 2>&1 -rc=$? -if [ $rc -ne 0 ]; then - echo "TCBCFGtools.sh: configure failed, error from configure= $rc" - echo "TCBCFGtools.sh: see ${blddir}/test.log for details" - echo "FAIL.job${JOBID}" > TestStatus - exit 5 -fi - -. $INITMODULES -. ./.env_mach_specific.sh - -attempt=1 -still_compiling="TRUE" -while [ $still_compiling = "TRUE" ]; do - - echo "TCBCFGtools.sh: call to make:" - echo " ${MAKE_CMD} USER_CPPDEFS=-DLINUX" - if [ "$debug" != "YES" ]; then - ${MAKE_CMD} USER_CPPDEFS=-DLINUX >> test.log 2>&1 - status="PASS" - rc=$? - else - status="GEN" - rc=0 - fi - if [ $rc -eq 0 ]; then - echo "TCBCFGtools.sh: make was successful" - echo "TCBCFGtools.sh: configure and build test passed" - echo "$status" > TestStatus - if [ $CLM_RETAIN_FILES != "TRUE" ]; then - echo "TCBCFGtools.sh: removing some unneeded files to save disc space" - rm *.o - rm *.mod - fi - still_compiling="FALSE" - elif [ $attempt -lt 10 ] && \ - grep -c "LICENSE MANAGER PROBLEM" test.log > /dev/null; then - attempt=`expr $attempt + 1` - echo "TCBCFGtools.sh: encountered License Manager Problem; launching attempt #$attempt" - else - echo "TCBCFGtools.sh: clm build failed, error from make= $rc" - echo "TCBCFGtools.sh: see ${blddir}/test.log for details" - echo "FAIL.job${JOBID}" > TestStatus - exit 6 - fi -done -if [ "$TOOLSLIBS" != "" ]; then - export -n SLIBS -fi - -exit 0 diff --git a/test/tools/TCBtools.sh b/test/tools/TCBtools.sh deleted file mode 100755 index 205b2e9da0..0000000000 --- a/test/tools/TCBtools.sh +++ /dev/null @@ -1,121 +0,0 @@ -#!/bin/sh -# - -if [ $# -ne 2 ]; then - echo "TCBtools.sh: incorrect number of input arguments" - exit 1 -fi - -test_name=TCBtools.$1.$2 - -if [ -f ${CLM_TESTDIR}/${test_name}/TestStatus ]; then - if grep -c PASS ${CLM_TESTDIR}/${test_name}/TestStatus > /dev/null; then - echo "TCBtools.sh: build test has already passed; results are in " - echo " ${CLM_TESTDIR}/${test_name}" - exit 0 - elif grep -c GEN ${CLM_TESTDIR}/${test_name}/TestStatus > /dev/null; then - echo "TCBtools.sh: test already generated" - else - read fail_msg < ${CLM_TESTDIR}/${test_name}/TestStatus - prev_jobid=${fail_msg#*job} - - if [ $JOBID = $prev_jobid ]; then - echo "TCBtools.sh: build test has already failed for this job - will not reattempt; " - echo " results are in: ${CLM_TESTDIR}/${test_name}" - exit 2 - else - echo "TCBtools.sh: this build test failed under job ${prev_jobid} - moving those results to " - echo " ${CLM_TESTDIR}/${test_name}_FAIL.job$prev_jobid and trying again" - cp -rp ${CLM_TESTDIR}/${test_name} ${CLM_TESTDIR}/${test_name}_FAIL.job$prev_jobid - fi - fi -fi - -cfgdir=`ls -1d ${CLM_ROOT}/tools/$1` -blddir=${CLM_TESTDIR}/${test_name}/src -if [ -d ${blddir} ]; then - rm -r ${blddir} -fi -mkdir -p ${blddir} -if [ $? -ne 0 ]; then - echo "TCBtools.sh: error, unable to create work subdirectory" - exit 3 -fi -cd ${blddir} - -echo "TCBtools.sh: building $1 executable; output in ${blddir}/test.log" -# -# Copy build files over -# -cp $cfgdir/src/Makefile . -cp $cfgdir/src/Srcfiles . -cp $cfgdir/src/Mkdepends . -cp $cfgdir/src/Makefile.common . -# -# Add cfgdir path to beginning of each path in Filepath -# -touch Filepath -while read filepath_arg; do - echo "${cfgdir}/src/${filepath_arg}" >> Filepath -done < ${cfgdir}/src/Filepath - -# -# Figure out configuration -# -if [ ! -f ${CLM_SCRIPTDIR}/config_files/$2 ]; then - echo "TCB.sh: configure options file ${CLM_SCRIPTDIR}/config_files/$2 not found" - echo "FAIL.job${JOBID}" > TestStatus - exit 4 -fi - -##construct string of args to configure -config_string="$TOOLS_MAKE_STRING TOOLROOT=$cfgdir " -while read config_arg; do - config_string="${config_string}${config_arg} " -done < ${CLM_SCRIPTDIR}/config_files/$2 - -attempt=1 -still_compiling="TRUE" -if [ "$TOOLSLIBS" != "" ]; then - export SLIBS=$TOOLSLIBS -fi -while [ $still_compiling = "TRUE" ]; do - - ln -s Macros.make Macros - - echo "TCBtools.sh: call to make:" - echo " ${MAKE_CMD} ${config_string} " - if [ "$debug" != "YES" ]; then - ${MAKE_CMD} ${config_string} >> test.log 2>&1 - status="PASS" - rc=$(( $rc + $? )) - else - status="GEN" - rc=0 - fi - if [ $rc -eq 0 ]; then - echo "TCBtools.sh: make was successful" - echo "TCBtools.sh: configure and build test passed" - echo "$status" > TestStatus - if [ $CLM_RETAIN_FILES != "TRUE" ]; then - echo "TCBtools.sh: removing some unneeded files to save disc space" - rm *.o - rm *.mod - fi - still_compiling="FALSE" - elif [ $attempt -lt 10 ] && \ - grep -c "LICENSE MANAGER PROBLEM" test.log > /dev/null; then - attempt=`expr $attempt + 1` - echo "TCBtools.sh: encountered License Manager Problem; launching attempt #$attempt" - else - echo "TCBtools.sh: clm build failed, error from make= $rc" - echo "TCBtools.sh: see ${CLM_TESTDIR}/${test_name}/test.log for details" - echo "FAIL.job${JOBID}" > TestStatus - exit 6 - fi -done -if [ "$TOOLSLIBS" != "" ]; then - export -n SLIBS -fi - -exit 0 diff --git a/test/tools/TSMCFGtools.sh b/test/tools/TSMCFGtools.sh deleted file mode 100755 index b667a4c6ec..0000000000 --- a/test/tools/TSMCFGtools.sh +++ /dev/null @@ -1,113 +0,0 @@ -#!/bin/sh -# - -if [ $# -ne 3 ]; then - echo "TSMCFGtools.sh: incorrect number of input arguments" - exit 1 -fi - -tool=$(basename $1) -test_name=TSMCFGtools.$tool.$2.$3 - - -if [ -z "$CLM_RERUN" ]; then - CLM_RERUN="no" -fi - -if [ "$CLM_RERUN" != "yes" ] && [ -f ${CLM_TESTDIR}/${test_name}/TestStatus ]; then - if grep -c PASS ${CLM_TESTDIR}/${test_name}/TestStatus > /dev/null; then - echo "TSMCFGtools.sh: smoke test has already passed; results are in " - echo " ${CLM_TESTDIR}/${test_name}" - exit 0 - elif grep -c GEN ${CLM_TESTDIR}/${test_name}/TestStatus > /dev/null; then - echo "TSMCFGtools.sh: test already generated" - else - read fail_msg < ${CLM_TESTDIR}/${test_name}/TestStatus - prev_jobid=${fail_msg#*job} - - if [ $JOBID = $prev_jobid ]; then - echo "TSMCFGtools.sh: smoke test has already failed for this job - will not reattempt; " - echo " results are in: ${CLM_TESTDIR}/${test_name}" - exit 2 - else - echo "TSMCFGtools.sh: this smoke test failed under job ${prev_jobid} - moving those results to " - echo " ${CLM_TESTDIR}/${test_name}_FAIL.job$prev_jobid and trying again" - cp -rp ${CLM_TESTDIR}/${test_name} ${CLM_TESTDIR}/${test_name}_FAIL.job$prev_jobid - fi - fi -fi - -cfgdir=`ls -1d ${CLM_ROOT}/tools/${1}*` -rundir=${CLM_TESTDIR}/${test_name} -if [ -d ${rundir} ]; then - rm -r ${rundir} -fi -mkdir -p ${rundir} -if [ $? -ne 0 ]; then - echo "TSMCFGtools.sh: error, unable to create work subdirectory" - exit 3 -fi -cd ${rundir} - -echo "TSMCFGtools.sh: calling TCBCFGtools.sh to prepare $tool executable" -${CLM_SCRIPTDIR}/TCBCFGtools.sh $1 $2 -rc=$? -if [ $rc -ne 0 ]; then - echo "TSMCFGtools.sh: error from TCBtools.sh= $rc" - echo "FAIL.job${JOBID}" > TestStatus - exit 4 -fi - -echo "TSMCFGtools.sh: running $tool output in ${rundir}/test.log" - -if [ "$2" = "CFGtools__o" ] || [ "$2" = "CFGtools__do" ]; then - toolrun="env OMP_NUM_THREADS=${CLM_THREADS} ${CLM_TESTDIR}/TCBCFGtools.$tool.$2/${tool}*" -else - toolrun="${CLM_TESTDIR}/TCBCFGtools.$tool.$2/${tool}*" -fi - -runfile="${CLM_SCRIPTDIR}/nl_files/$tool.$3" -if [ ! -f "${runfile}" ]; then - echo "TSMCFGtools.sh: error ${runfile} input run file not found" - echo "FAIL.job${JOBID}" > TestStatus - exit 5 -fi - -echo "Run file type = ${3#*.}" -if [ ${3#*.} == "runoptions" ]; then - runopts=`cat ${runfile} | sed -e "s|CSMDATA|$CSMDATA|g"` - echo "$toolrun $runopts" - cp $cfgdir/*.nc . - if [ "$debug" != "YES" ] && [ "$compile_only" != "YES" ]; then - $toolrun $runopts >> test.log 2>&1 - rc=$? - status="PASS" - else - echo "Successfully created file" > test.log - status="GEN" - rc=0 - fi -else - echo "$toolrun < ${runfile}" - if [ "$debug" != "YES" ] && [ "$compile_only" != "YES" ]; then - $toolrun < ${runfile} >> test.log 2>&1 - rc=$? - status="PASS" - else - echo "Successfully created file" > test.log - status="GEN" - rc=0 - fi -fi - -if [ $rc -eq 0 ] && grep -ci "Successfully created " test.log > /dev/null; then - echo "TSMCFGtools.sh: smoke test passed" - echo "$status" > TestStatus -else - echo "TSMCFGtools.sh: error running $tool, error= $rc" - echo "TSMCFGtools.sh: see ${CLM_TESTDIR}/${test_name}/test.log for details" - echo "FAIL.job${JOBID}" > TestStatus - exit 6 -fi - -exit 0 diff --git a/test/tools/TSMscript_tools.sh b/test/tools/TSMscript_tools.sh deleted file mode 100755 index 943fec97f2..0000000000 --- a/test/tools/TSMscript_tools.sh +++ /dev/null @@ -1,98 +0,0 @@ -#!/bin/sh -# - -if [ $# -ne 3 ]; then - echo "TSMscript_tools.sh: incorrect number of input arguments" - exit 1 -fi - -test_name=TSMscript_tools.$1.$2.$3 - -if [ -f ${CLM_TESTDIR}/${test_name}/TestStatus ]; then - if grep -c PASS ${CLM_TESTDIR}/${test_name}/TestStatus > /dev/null; then - echo "TSMscript_tools.sh: smoke test has already passed; results are in " - echo " ${CLM_TESTDIR}/${test_name}" - exit 0 - elif grep -c GEN ${CLM_TESTDIR}/${test_name}/TestStatus > /dev/null; then - echo "TSMscript_tools.sh: test already generated" - else - read fail_msg < ${CLM_TESTDIR}/${test_name}/TestStatus - prev_jobid=${fail_msg#*job} - - if [ $JOBID = $prev_jobid ]; then - echo "TSMscript_tools.sh: smoke test has already failed for this job - will not reattempt; " - echo " results are in: ${CLM_TESTDIR}/${test_name}" - exit 2 - else - echo "TSMscript_tools.sh: this smoke test failed under job ${prev_jobid} - moving those results to " - echo " ${CLM_TESTDIR}/${test_name}_FAIL.job$prev_jobid and trying again" - cp -rp ${CLM_TESTDIR}/${test_name} ${CLM_TESTDIR}/${test_name}_FAIL.job$prev_jobid - fi - fi -fi - -cfgdir=`ls -1d ${CLM_ROOT}/tools/$1` -rundir=${CLM_TESTDIR}/${test_name} -if [ -d ${rundir} ]; then - rm -r ${rundir} -fi -mkdir -p ${rundir} -if [ $? -ne 0 ]; then - echo "TSMscript_tools.sh: error, unable to create work subdirectory" - exit 3 -fi -cd ${rundir} - -optfile=${3%^*} -cfgfile=${3#*^} - -if [ "$optfile" != "$3" ]; then - echo "TSMscript_tools.sh: calling TCBtools.sh to prepare $1 executable" - ${CLM_SCRIPTDIR}/TCBtools.sh $1 $cfgfile - rc=$? - if [ $rc -ne 0 ]; then - echo "TSMscript_tools.sh: error from TCBtools.sh= $rc" - echo "FAIL.job${JOBID}" > TestStatus - exit 4 - fi - tcbtools=${CLM_TESTDIR}/TCBtools.$1.$cfgfile -else - tcbtools="$rundir" -fi - -scopts=`cat ${CLM_SCRIPTDIR}/nl_files/$optfile | sed -e "s|CSMDATA|$CSMDATA|g" | sed -e "s|EXEDIR|$tcbtools|g" | sed -e "s|CFGDIR|$cfgdir|g"` -scopts=`echo $scopts | sed -e "s|CTSM_ROOT|$CTSM_ROOT|g" | sed -e "s|CIME_ROOT|$CIME_ROOT|g"` - -echo "TSMscript_tools.sh: running ${cfgdir}/$2 with $scopts; output in ${rundir}/test.log" - -if [ ! -f "${cfgdir}/$2" ]; then - echo "TSMscript_tools.sh: error ${cfgdir}/$2 input script not found" - echo "FAIL.job${JOBID}" > TestStatus - exit 5 -fi - -if [ "$debug" != "YES" ] && [ "$compile_only" != "YES" ]; then - ${cfgdir}/$2 $scopts >> test.log 2>&1 - rc=$? - status="PASS" -else - echo "success" > test.log - status="GEN" - rc=0 -fi - -if [ $rc -eq 0 ] && grep -ci "Successfully " test.log > /dev/null; then - echo "TSMscript_tools.sh: smoke test passed" - echo "$status" > TestStatus - # Copy files from subdirectories up... - # (use hard links rather than symbolic links because 'ln -s' does funny - # things when there are no matching files) - ln */*.nc */*/*.nc . -else - echo "TSMscript_tools.sh: error running $2, error= $rc" - echo "TSMscript_tools.sh: see ${CLM_TESTDIR}/${test_name}/test.log for details" - echo "FAIL.job${JOBID}" > TestStatus - exit 6 -fi - -exit 0 diff --git a/test/tools/TSMtools.sh b/test/tools/TSMtools.sh deleted file mode 100755 index 33a2316973..0000000000 --- a/test/tools/TSMtools.sh +++ /dev/null @@ -1,117 +0,0 @@ -#!/bin/sh -# - -if [ $# -ne 3 ]; then - echo "TSMtools.sh: incorrect number of input arguments" - exit 1 -fi - -test_name=TSMtools.$1.$2.$3 - -if [ -z "$CLM_RERUN" ]; then - CLM_RERUN="no" -fi - -if [ "$CLM_RERUN" != "yes" ] && [ -f ${CLM_TESTDIR}/${test_name}/TestStatus ]; then - if grep -c PASS ${CLM_TESTDIR}/${test_name}/TestStatus > /dev/null; then - echo "TSMtools.sh: smoke test has already passed; results are in " - echo " ${CLM_TESTDIR}/${test_name}" - exit 0 - elif grep -c GEN ${CLM_TESTDIR}/${test_name}/TestStatus > /dev/null; then - echo "TSMtools.sh: test already generated" - else - read fail_msg < ${CLM_TESTDIR}/${test_name}/TestStatus - prev_jobid=${fail_msg#*job} - - if [ $JOBID = $prev_jobid ]; then - echo "TSMtools.sh: smoke test has already failed for this job - will not reattempt; " - echo " results are in: ${CLM_TESTDIR}/${test_name}" - exit 2 - else - echo "TSMtools.sh: this smoke test failed under job ${prev_jobid} - moving those results to " - echo " ${CLM_TESTDIR}/${test_name}_FAIL.job$prev_jobid and trying again" - cp -rp ${CLM_TESTDIR}/${test_name} ${CLM_TESTDIR}/${test_name}_FAIL.job$prev_jobid - fi - fi -fi - -cfgdir=`ls -1d ${CLM_ROOT}/tools/$1` -rundir=${CLM_TESTDIR}/${test_name} -if [ -d ${rundir} ]; then - rm -r ${rundir} -fi -mkdir -p ${rundir} -if [ $? -ne 0 ]; then - echo "TSMtools.sh: error, unable to create work subdirectory" - exit 3 -fi -cd ${rundir} - -echo "Copy any text files over" -cp $cfgdir/*.txt $rundir - -echo "TSMtools.sh: calling TCBtools.sh to prepare $1 executable" -${CLM_SCRIPTDIR}/TCBtools.sh $1 $2 -rc=$? -if [ $rc -ne 0 ]; then - echo "TSMtools.sh: error from TCBtools.sh= $rc" - echo "FAIL.job${JOBID}" > TestStatus - exit 4 -fi - -echo "TSMtools.sh: running $1; output in ${rundir}/test.log" - -if [ "$3" = "tools__o" ] || [ "$3" = "tools__do" ]; then - toolrun="env OMP_NUM_THREADS=${CLM_THREADS} ${CLM_TESTDIR}/TCBtools.$1.$2/$1" -else - toolrun="${CLM_TESTDIR}/TCBtools.$1.$2/$1" -fi - -runfile="${cfgdir}/$1.$3" - -if [ ! -f "${runfile}" ]; then - runfile="${CLM_SCRIPTDIR}/nl_files/$1.$3" - if [ ! -f "${runfile}" ]; then - echo "TSMtools.sh: error ${runfile} input run file not found" - echo "FAIL.job${JOBID}" > TestStatus - exit 5 - fi -fi - -echo "Run file type = ${3#*.}" -if [ ${3#*.} == "runoptions" ]; then - echo "$toolrun "`cat ${runfile}` - cp $cfgdir/*.nc . - if [ "$debug" != "YES" ] && [ "$compile_only" != "YES" ]; then - $toolrun `cat ${runfile}` >> test.log 2>&1 - rc=$? - status="PASS" - else - echo "Successfully created file" > test.log - status="GEN" - rc=0 - fi -else - echo "$toolrun < ${runfile}" - if [ "$debug" != "YES" ] && [ "$compile_only" != "YES" ]; then - $toolrun < ${runfile} >> test.log 2>&1 - rc=$? - status="PASS" - else - echo "Successfully created file" > test.log - status="GEN" - rc=0 - fi -fi - -if [ $rc -eq 0 ] && grep -ci "Successfully created " test.log > /dev/null; then - echo "TSMtools.sh: smoke test passed" - echo "$status" > TestStatus -else - echo "TSMtools.sh: error running $1, error= $rc" - echo "TSMtools.sh: see ${CLM_TESTDIR}/${test_name}/test.log for details" - echo "FAIL.job${JOBID}" > TestStatus - exit 6 -fi - -exit 0 diff --git a/test/tools/config_files/CFGtools__ds b/test/tools/config_files/CFGtools__ds deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/test/tools/config_files/README b/test/tools/config_files/README deleted file mode 100644 index bdfe5e0dd0..0000000000 --- a/test/tools/config_files/README +++ /dev/null @@ -1,9 +0,0 @@ -_do => debug on, omp only on -_ds => debug on, serial mode (neither mpi nor omp) - -_o => debug off, omp only on -_s => debug off, serial mode (neither mpi nor omp) - -tools__ds => options for tools, debug on, serial mode -tools__do => options for tools, debug on, omp only on -tools__o => options for tools, debug off, omp only on diff --git a/test/tools/config_files/tools__do b/test/tools/config_files/tools__do deleted file mode 100644 index 7f061ed65d..0000000000 --- a/test/tools/config_files/tools__do +++ /dev/null @@ -1 +0,0 @@ -SMP=TRUE OPT=FALSE diff --git a/test/tools/config_files/tools__ds b/test/tools/config_files/tools__ds deleted file mode 100644 index cf2d414b28..0000000000 --- a/test/tools/config_files/tools__ds +++ /dev/null @@ -1 +0,0 @@ -OPT=FALSE diff --git a/test/tools/config_files/tools__o b/test/tools/config_files/tools__o deleted file mode 100644 index 8821e0bc5a..0000000000 --- a/test/tools/config_files/tools__o +++ /dev/null @@ -1 +0,0 @@ -SMP=TRUE OPT=TRUE diff --git a/test/tools/config_files/tools__s b/test/tools/config_files/tools__s deleted file mode 100644 index 507973f8be..0000000000 --- a/test/tools/config_files/tools__s +++ /dev/null @@ -1 +0,0 @@ -OPT=TRUE diff --git a/test/tools/gen_test_table.sh b/test/tools/gen_test_table.sh deleted file mode 100755 index 0791ad0447..0000000000 --- a/test/tools/gen_test_table.sh +++ /dev/null @@ -1,80 +0,0 @@ -#!/bin/sh -# - -# this script, when executed in the directory containing the test-driver -# scripts (~/test/system) will loop through the default test -# lists for pre and post tag testing of clm and create an html file -# (test_table.html) with the specifics of each test detailed - -outfile="./test_table.html" - -echo '' > $outfile -echo '' >> $outfile -echo '' >> $outfile -echo '' >> $outfile -echo 'CLM Testing Information Page' >> $outfile -echo '' >> $outfile -echo '' >> $outfile - -######################################################################################### -for input_file in `ls tests_*` ; do - echo '' >> $outfile - echo "" >> $outfile - echo "" >> $outfile - echo "" >> $outfile - echo "" >> $outfile - echo "" >> $outfile - echo "" >> $outfile - echo "" >> $outfile - echo "" >> $outfile - echo "" >> $outfile - - test_list="" - while read input_line; do - test_list="${test_list}${input_line} " - done < ./${input_file} - - count=0 - ##loop through the tests of input file - for test_id in ${test_list}; do - echo "" >> $outfile - count=`expr $count + 1` - while [ ${#count} -lt 3 ]; do - count="0${count}" - done - echo "" >> $outfile - - master_line=`grep $test_id ./input_tests_master` - dir="" - for arg in ${master_line}; do - arg1=${arg%^*} - arg2=${arg#*^} - if [ -d ../../tools/$arg ]; then - dir=$arg - elif [ -f ./nl_files/$arg ]; then - echo "" >> $outfile - elif [ -f ./config_files/$arg ]; then - echo "" >> $outfile - elif [ -f ./nl_files/$arg1 ] && [ -f ./nl_files/$arg2 ]; then - echo "" >> $outfile - elif [ -f ./nl_files/$arg1 ] && [ -f ./config_files/$arg2 ]; then - echo "" >> $outfile - elif [ -f ../../tools/$dir/$dir.$arg ]; then - echo "" >> $outfile - else - echo "" >> $outfile - fi - done - echo '' >> $outfile - done - echo '
$input_file
test# testid test script arg1 arg2 arg3
$count $arg $arg $arg1^" \ - "$arg2$arg1^" \ - "$arg2$arg $arg
' >> $outfile - echo '
' >> $outfile
-    echo ' ' >> $outfile
-    echo '
' >> $outfile -done -echo '' >> $outfile -echo '' >> $outfile - -exit 0 diff --git a/test/tools/get_cprnc_diffs.sh b/test/tools/get_cprnc_diffs.sh deleted file mode 100755 index 360220cb71..0000000000 --- a/test/tools/get_cprnc_diffs.sh +++ /dev/null @@ -1,56 +0,0 @@ -#!/bin/bash - -# This script extracts lines from the output of cprnc that tell us -# which variables differ between two files -# -# Usage: get_cprnc_diffs filename - -# ---------------------------------------------------------------------- -# SET PARAMETERS HERE -# ---------------------------------------------------------------------- - -# maximum number of differences to extract from the cprnc output -maxdiffs=200 - -# ---------------------------------------------------------------------- -# LOCAL FUNCTIONS DEFINED HERE -# ---------------------------------------------------------------------- - -# This function gets differences for one prefix (e.g., "RMS") -# Usage: get_diffs prefix -# (also uses $infile and $maxdiffs from the parent script) -function get_diffs { - prefix=$1 - outfile=${infile}.${prefix}.$$ - grep "$prefix" $infile > $outfile - numlines=`wc -l $outfile | awk '{print $1}'` - if [ $numlines -gt $maxdiffs ]; then - echo "WARNING: Too many instances of $prefix - only printing last $maxdiffs" - tail -$maxdiffs $outfile - else - cat $outfile - fi - rm $outfile -} - -# ---------------------------------------------------------------------- -# BEGIN MAIN SCRIPT -# ---------------------------------------------------------------------- - -# ---------------------------------------------------------------------- -# Handle command-line arguments -# ---------------------------------------------------------------------- - -if [[ $# -ne 1 ]]; then - echo "Usage: get_cprnc_diffs filename" - exit 1 -fi - -infile=$1 - -# ---------------------------------------------------------------------- -# Do the processing -# ---------------------------------------------------------------------- - -get_diffs RMS -get_diffs FILLDIFF diff --git a/test/tools/input_tests_master b/test/tools/input_tests_master deleted file mode 100644 index 7da8c19803..0000000000 --- a/test/tools/input_tests_master +++ /dev/null @@ -1,32 +0,0 @@ - - -smc#4 TSMscript_tools.sh mkprocdata_map mkprocdata_map_wrap mkprocdata_ne30_to_f19_I2000^tools__ds -blc#4 TBLscript_tools.sh mkprocdata_map mkprocdata_map_wrap mkprocdata_ne30_to_f19_I2000^tools__ds - -sm0c1 TSMscript_tools.sh site_and_regional run_neon.py run_neon_OSBS -bl0c1 TBLscript_tools.sh site_and_regional run_neon.py run_neon_OSBS -sm0a1 TSMscript_tools.sh site_and_regional run_neon.py run_neon_YELL_PRISM -bl0a1 TBLscript_tools.sh site_and_regional run_neon.py run_neon_YELL_PRISM - -smba1 TSMscript_tools.sh site_and_regional subset_data subset_data_YELL -blba1 TBLscript_tools.sh site_and_regional subset_data subset_data_YELL -smbb1 TSMscript_tools.sh site_and_regional subset_data subset_data_KONA -blbb1 TBLscript_tools.sh site_and_regional subset_data subset_data_KONA -smb81 TSMscript_tools.sh site_and_regional subset_data subset_data_US-UMB -blb81 TBLscript_tools.sh site_and_regional subset_data subset_data_US-UMB -smbh1 TSMscript_tools.sh site_and_regional subset_data subset_data_f09_1x1pt_townshipSD -blbh1 TBLscript_tools.sh site_and_regional subset_data subset_data_f09_1x1pt_townshipSD -smbd1 TSMscript_tools.sh site_and_regional subset_data subset_data_f09_58x45pt_SouthAmerica -blbd1 TBLscript_tools.sh site_and_regional subset_data subset_data_f09_58x45pt_SouthAmerica -smbe1 TSMscript_tools.sh site_and_regional subset_data subset_data_f09_90x288pt_1850PanTropics -blbe1 TBLscript_tools.sh site_and_regional subset_data subset_data_f09_90x288pt_1850PanTropics -smbf1 TSMscript_tools.sh site_and_regional subset_data subset_data_f09_37x288pt_PanBoreal -blbf1 TBLscript_tools.sh site_and_regional subset_data subset_data_f09_37x288pt_PanBoreal -smbg1 TSMscript_tools.sh site_and_regional subset_data subset_data_f09_4x9pt_AlaskaTananaValley -blbg1 TBLscript_tools.sh site_and_regional subset_data subset_data_f09_4x9pt_AlaskaTananaValley - -sm901 TSMscript_tools.sh site_and_regional mesh_maker mesh_maker_fv09 -bl901 TBLscript_tools.sh site_and_regional mesh_maker mesh_maker_fv09 - -smaa2 TSMscript_tools.sh site_and_regional modify_singlept_site_neon.py modify_data_YELL -blaa2 TBLscript_tools.sh site_and_regional modify_singlept_site_neon.py modify_data_YELL diff --git a/test/tools/nl_files/mesh_maker_fv09 b/test/tools/nl_files/mesh_maker_fv09 deleted file mode 100644 index 7de951fee1..0000000000 --- a/test/tools/nl_files/mesh_maker_fv09 +++ /dev/null @@ -1 +0,0 @@ - --input CSMDATA/atm/datm7/domain.lnd.fv0.9x1.25_gx1v6.090309.nc --lat yc --lon xc --overwrite --mask mask --area area --verbose diff --git a/test/tools/nl_files/mkmapdata_if10 b/test/tools/nl_files/mkmapdata_if10 deleted file mode 100644 index f726ea34e7..0000000000 --- a/test/tools/nl_files/mkmapdata_if10 +++ /dev/null @@ -1 +0,0 @@ --r 10x15 --fast --batch diff --git a/test/tools/nl_files/mkmapdata_ne30np4 b/test/tools/nl_files/mkmapdata_ne30np4 deleted file mode 100644 index ae435ac2bc..0000000000 --- a/test/tools/nl_files/mkmapdata_ne30np4 +++ /dev/null @@ -1 +0,0 @@ --r ne30np4 --fast --batch diff --git a/test/tools/nl_files/mkprocdata_ne30_to_f19_I2000 b/test/tools/nl_files/mkprocdata_ne30_to_f19_I2000 deleted file mode 100644 index af85dcf226..0000000000 --- a/test/tools/nl_files/mkprocdata_ne30_to_f19_I2000 +++ /dev/null @@ -1 +0,0 @@ --i CSMDATA/lnd/clm2/test_mkprocdata_map/clm4054_ne30g16_I2000.clm2.h0.2000-01_c170430.nc -o ne30output_onf19grid.nc -m CSMDATA/lnd/clm2/test_mkprocdata_map/map_ne30np4_nomask_to_fv1.9x2.5_nomask_aave_da_c121107.nc -t CSMDATA/lnd/clm2/test_mkprocdata_map/clm4054_f19g16_I2000.clm2.h0.2000-01_c170430.nc -e EXEDIR diff --git a/test/tools/nl_files/modify_data_YELL b/test/tools/nl_files/modify_data_YELL deleted file mode 100644 index 0d180e8bf6..0000000000 --- a/test/tools/nl_files/modify_data_YELL +++ /dev/null @@ -1 +0,0 @@ ---neon_site YELL --surf_dir CSMDATA/lnd/clm2/surfdata_esmf/NEON --out_dir EXEDIR --inputdata-dir CSMDATA diff --git a/test/tools/nl_files/run_neon_OSBS b/test/tools/nl_files/run_neon_OSBS deleted file mode 100644 index 0c274b13ad..0000000000 --- a/test/tools/nl_files/run_neon_OSBS +++ /dev/null @@ -1 +0,0 @@ ---verbose --run-type ad --setup-only --neon-site OSBS diff --git a/test/tools/nl_files/run_neon_YELL_PRISM b/test/tools/nl_files/run_neon_YELL_PRISM deleted file mode 100644 index f5ebdf9fdf..0000000000 --- a/test/tools/nl_files/run_neon_YELL_PRISM +++ /dev/null @@ -1 +0,0 @@ ---verbose --run-type transient --setup-only --neon-site YELL --prism --neon-version v2 --experiment toolstest diff --git a/test/tools/nl_files/subset_data_KONA b/test/tools/nl_files/subset_data_KONA deleted file mode 100644 index 0df59b1b17..0000000000 --- a/test/tools/nl_files/subset_data_KONA +++ /dev/null @@ -1 +0,0 @@ -point --lon 263.38956 --lat 39.1082 --site KONA --dompft 17 19 23 45 --pctpft 28 12 32 28 --crop --create-surface --outdir EXEDIR/KONA_user-mod_and_data --user-mods-dir EXEDIR/KONA_user-mod_and_data --verbose --inputdata-dir CSMDATA diff --git a/test/tools/nl_files/subset_data_US-UMB b/test/tools/nl_files/subset_data_US-UMB deleted file mode 100644 index 935b0dc99d..0000000000 --- a/test/tools/nl_files/subset_data_US-UMB +++ /dev/null @@ -1 +0,0 @@ -point --lon 275.28626 --lat 45.5598 --site 1x1_US-UMB --dompft 7 --cap-saturation --uniform-snowpack --create-surface --outdir EXEDIR/US-UMB_user-mod_and_data --user-mods-dir EXEDIR/US-UMB_user-mod_and_data --verbose --inputdata-dir CSMDATA diff --git a/test/tools/nl_files/subset_data_YELL b/test/tools/nl_files/subset_data_YELL deleted file mode 100644 index 0d6960e7f5..0000000000 --- a/test/tools/nl_files/subset_data_YELL +++ /dev/null @@ -1 +0,0 @@ -point --lon 250.45804 --lat 44.95597 --site YELL --dompft 1 --crop --uniform-snowpack --cap-saturation --create-surface --outdir EXEDIR/YELL_user-mod_and_data --user-mods-dir EXEDIR/YELL_user-mod_and_data --silent --inputdata-dir CSMDATA diff --git a/test/tools/nl_files/subset_data_f09_1x1pt_townshipSD b/test/tools/nl_files/subset_data_f09_1x1pt_townshipSD deleted file mode 100644 index aa25c07d1e..0000000000 --- a/test/tools/nl_files/subset_data_f09_1x1pt_townshipSD +++ /dev/null @@ -1 +0,0 @@ -point --lon 257.5 --lat 43.822 --site f09_1x1pt_townshipSD --include-nonveg --crop --create-datm --create-user-mods --datm-syr 2000 --datm-eyr 2000 --create-surface --outdir EXEDIR/f09_US_pt_user-mod_and_data --user-mods-dir EXEDIR/f09_US_pt_user-mod_and_data --verbose --inputdata-dir CSMDATA diff --git a/test/tools/nl_files/subset_data_f09_37x288pt_PanBoreal b/test/tools/nl_files/subset_data_f09_37x288pt_PanBoreal deleted file mode 100644 index 448b5052d6..0000000000 --- a/test/tools/nl_files/subset_data_f09_37x288pt_PanBoreal +++ /dev/null @@ -1 +0,0 @@ -region --lat1 55 --lat2 89.1 --lon1 0 --lon2 360 --create-mesh --create-surface --create-domain --create-user-mods --verbose --overwrite --reg f09_37x288pt_PanBoreal --inputdata-dir CSMDATA diff --git a/test/tools/nl_files/subset_data_f09_4x9pt_AlaskaTananaValley b/test/tools/nl_files/subset_data_f09_4x9pt_AlaskaTananaValley deleted file mode 100644 index 9928d78429..0000000000 --- a/test/tools/nl_files/subset_data_f09_4x9pt_AlaskaTananaValley +++ /dev/null @@ -1 +0,0 @@ -region --lat1 62 --lat2 66 --lon1 -152 --lon2 -141 --create-mesh --create-domain --create-surface --create-user-mods --verbose --overwrite --reg f09_4x9pt_AlaskaTananaValley --inputdata-dir CSMDATA diff --git a/test/tools/nl_files/subset_data_f09_58x45pt_SouthAmerica b/test/tools/nl_files/subset_data_f09_58x45pt_SouthAmerica deleted file mode 100644 index 201dd2c76c..0000000000 --- a/test/tools/nl_files/subset_data_f09_58x45pt_SouthAmerica +++ /dev/null @@ -1 +0,0 @@ -region --lat1 -40 --lat2 15 --lon1 275 --lon2 330 --create-mesh --create-surface --create-user-mods --create-domain --create-landuse --verbose --overwrite --reg f09_58x45_SouthAmerica --inputdata-dir CSMDATA diff --git a/test/tools/nl_files/subset_data_f09_90x288pt_1850PanTropics b/test/tools/nl_files/subset_data_f09_90x288pt_1850PanTropics deleted file mode 100644 index 1c9d5eace9..0000000000 --- a/test/tools/nl_files/subset_data_f09_90x288pt_1850PanTropics +++ /dev/null @@ -1 +0,0 @@ -region --lat1 -55 --lat2 30 --lon1 0 --lon2 360 --crop --create-surface --create-domain --create-mesh --overwrite --reg f09_90x288pt_1850PanTropics --inputdata-dir CSMDATA --cfg-file CTSM_ROOT/tools/mksurfdata_map/default_data_1850.cfg --verbose diff --git a/test/tools/show_var_diffs.sh b/test/tools/show_var_diffs.sh deleted file mode 100755 index f462d4ad0c..0000000000 --- a/test/tools/show_var_diffs.sh +++ /dev/null @@ -1,79 +0,0 @@ -#!/bin/bash - -# This script processes a log file that was output by test_driver, -# giving lists of all variables with differences in values (those with -# RMS errors), and all variables with differences in fill patterns. -# -# This assumes that the log file contains output like: -# RMS foo -# RMS bar -# FILLDIFF foo -# FILLDIFF bar -# Some characteristics of these output lines are: -# - they begin with a leading space, followed by RMS or FILLDIFF -# - the variable name is in the second column of the line -# -# Note that (as of 4-5-12) the log file only contains output from the -# last file that didn't match, so this could potentially miss -# something -- especially if there are both h0 and h1 files in the -# comparison. - -# Usage: show_var_diffs logfile - -# ---------------------------------------------------------------------- -# LOCAL FUNCTIONS DEFINED HERE -# ---------------------------------------------------------------------- - -# This function shows the differences for one prefix (e.g., "RMS") -# Usage: show_diffs prefix -# (also uses $logfile from the parent script) -# -# Matches lines that start with the regular expression "^ ${prefix}" -# (note that one leading space is expected before the prefix) -# -# Assumes that the variable name is in the second column of matching lines -function show_diffs { - prefix=$1 - - # first determine if there were warnings relating to this prefix - grep "WARNING: Too many instances of ${prefix}" $logfile > /dev/null - if [ $? -eq 0 ]; then # found a warning - echo "WARNING: Some output was truncated; this may not be a complete list" - fi - - # now make a list of all variables matching this prefix - grep "^ ${prefix}" $logfile > $logfile.tmp.$$ - if [ $? -eq 0 ]; then - awk '{print $2}' $logfile.tmp.$$ | sort | uniq - else - echo "(no differences)" - fi - - rm $logfile.tmp.$$ -} - -# ---------------------------------------------------------------------- -# BEGIN MAIN SCRIPT -# ---------------------------------------------------------------------- - -# ---------------------------------------------------------------------- -# Handle command-line arguments -# ---------------------------------------------------------------------- - -if [[ $# -ne 1 ]]; then - echo "Usage: show_var_diffs logfile" - exit 1 -fi - -logfile=$1 - -# ---------------------------------------------------------------------- -# Do the processing -# ---------------------------------------------------------------------- - -echo "Variables with differences in values:" -show_diffs "RMS" - -echo "" -echo "Variables with differences in fill patterns:" -show_diffs "FILLDIFF" \ No newline at end of file diff --git a/test/tools/tests_posttag_hobart_nompi b/test/tools/tests_posttag_hobart_nompi deleted file mode 100644 index c185428868..0000000000 --- a/test/tools/tests_posttag_hobart_nompi +++ /dev/null @@ -1 +0,0 @@ -smc#4 blc#4 diff --git a/test/tools/tests_posttag_nompi_regression b/test/tools/tests_posttag_nompi_regression deleted file mode 100644 index c185428868..0000000000 --- a/test/tools/tests_posttag_nompi_regression +++ /dev/null @@ -1 +0,0 @@ -smc#4 blc#4 diff --git a/test/tools/tests_pretag_cheyenne_nompi b/test/tools/tests_pretag_cheyenne_nompi deleted file mode 100644 index e92ffaaaad..0000000000 --- a/test/tools/tests_pretag_cheyenne_nompi +++ /dev/null @@ -1,3 +0,0 @@ -smc#4 blc#4 -smba1 blba1 -smbd1 blbd1 diff --git a/test/tools/tests_pretag_derecho_nompi b/test/tools/tests_pretag_derecho_nompi deleted file mode 100644 index 5fdaf335ae..0000000000 --- a/test/tools/tests_pretag_derecho_nompi +++ /dev/null @@ -1,9 +0,0 @@ -smba1 blba1 -smbd1 blbd1 -sm0a1 bl0a1 -sm0c1 bl0c1 -smaa2 blaa2 -smba1 blba1 -smb81 blb81 -smbc1 blbc1 -smbd1 blbd1 diff --git a/test/tools/tests_pretag_nompi_neon b/test/tools/tests_pretag_nompi_neon deleted file mode 100644 index e5fa27e6c4..0000000000 --- a/test/tools/tests_pretag_nompi_neon +++ /dev/null @@ -1,8 +0,0 @@ -sm0a1 bl0a1 -sm0c1 bl0c1 -smaa2 blaa2 -smba1 blba1 -smbb1 blbb1 -smb81 blb81 -smbc1 blbc1 -smbd1 blbd1 From 69dbee8149945c173c66cfa8670adecccf333d16 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Tue, 21 May 2024 11:08:53 -0600 Subject: [PATCH 091/126] Remove bld/unit_testers/xFail/wrapClmTests.pl --- bld/unit_testers/xFail/wrapClmTests.pl | 128 ------------------------- 1 file changed, 128 deletions(-) delete mode 100755 bld/unit_testers/xFail/wrapClmTests.pl diff --git a/bld/unit_testers/xFail/wrapClmTests.pl b/bld/unit_testers/xFail/wrapClmTests.pl deleted file mode 100755 index 28238b9d5d..0000000000 --- a/bld/unit_testers/xFail/wrapClmTests.pl +++ /dev/null @@ -1,128 +0,0 @@ -#!/usr/bin/env perl - -#-# ========================================================================================= - -=head1 wrapClmTest.pl - -=head1 Overview - -This is a wrapper script that is called from test_driver.sh for either interactive or batch -tests. It calls the CTOR for the xFail::expectedFail.pm module and also parses the td*.status -file to create a new file with xFails listed. - -It takes the following arguments: - - numberOfTests -> number of tests from test_driver.sh - statusFile -> name of the td..status file - callingScript -> name of script calling this. For test_driver.sh it may be one of: - 1) test_driver.sh-i for interactive tests - 2) test_driver.sh for batch tests - -=head1 Notes - -This script may be run standalone which is useful for testing purposes. - -=cut - -#-# ========================================================================================= - -use strict; -use Getopt::Long; -use English; -use Cwd; -use Scalar::Util qw(looks_like_number); - -my $DEBUG=0; - -sub usage { - die < 0, - numberOfTests => undef, - statusFile => undef, - callingScript => undef, - ); - -GetOptions( - "h|help" => \$opts{'help'}, - "numberOfTests=s" => \$opts{'numberOfTests'}, - "statusFile=s" => \$opts{'statusFile'}, - "callingScript=s" => \$opts{'callingScript'}, -) or usage(); - -# Give usage message. -usage() if $opts{'help'}; - -my $statFoo = undef; -my $nTests = undef; -my $script= undef; - -if (defined($opts{'statusFile'})) { - $statFoo = $opts{'statusFile'}; -} -if (defined($opts{'numberOfTests'})) { - $nTests = $opts{'numberOfTests'}; -} -if (defined($opts{'callingScript'})) { - $script = $opts{'callingScript'}; -} - -my ( $self ) = @_; - -#Figure out where configure directory is and where can use the XML/Lite module from -my $ProgName; -($ProgName = $PROGRAM_NAME) =~ s!(.*)/!!; # name of program -my $ProgDir = $1; # name of directory where program lives - -my $cwd = getcwd(); # current working directory -my $cfgdir; - -if ($ProgDir) { $cfgdir = $ProgDir; } -else { $cfgdir = $cwd; } - -#----------------------------------------------------------------------------------------------- -# Add $cfgdir to the list of paths that Perl searches for modules -#----------------------------------------------------------------------------------------------- -my @dirs = ( $cfgdir, - "$cfgdir/../", - "$cfgdir/../../../../../cime/utils/perl5lib"); -unshift @INC, @dirs; -my $result = eval "require expectedFail"; -if ( ! defined($result) ) { - die <<"EOF"; -** Cannot find perl module \"xFail/expectedFail.pm\" from directories: @dirs ** -EOF -} - -#_# ==================================== -#_# setup work complete. Now parse file -#_# ==================================== - -if ($DEBUG) { - print (" wrapClmTests.pl:: calling script $script \n"); - print (" wrapClmTests.pl:: number of tests $nTests \n"); - print (" wrapClmTests.pl:: processing $statFoo \n"); -} - -#_# compGen not used for CLM batch or interactive tests, but we use "compare" as the default in this case -my $compGen="compare"; -my $xFail = xFail::expectedFail->new($script,$compGen,$nTests); - -$xFail->parseOutputCLM($statFoo); - -exit(0); From 8ef1ff4a3a4c2ca4c44c0d59bd654819b32edd62 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Tue, 21 May 2024 11:19:49 -0600 Subject: [PATCH 092/126] Remove obsolete parts of bld/unit_testers/xFail/expectedFail.pm --- bld/unit_testers/xFail/expectedFail.pm | 119 +------------------------ 1 file changed, 2 insertions(+), 117 deletions(-) diff --git a/bld/unit_testers/xFail/expectedFail.pm b/bld/unit_testers/xFail/expectedFail.pm index 9feaa3e38b..067e3a51d8 100755 --- a/bld/unit_testers/xFail/expectedFail.pm +++ b/bld/unit_testers/xFail/expectedFail.pm @@ -5,7 +5,7 @@ Documentation for expectedFail.pm =head1 Overview The module expectedFail.pm supplies the capability of checking if a failed test is expected to fail. -It is called directly from either test_driver.sh (for batch and interactive tests) or build-namelist_test.pl. +It is called directly from build-namelist_test.pl. Future plans involve integrating this module into cesm tests. =head1 Use Case @@ -112,7 +112,7 @@ sub new { =head1 parseOutput -parseOutput parsese the output from the build-namelist_test.pl script. It is similar +parseOutput parses the output from the build-namelist_test.pl script. It is similar to, but not interchangable with parseOutputCLM. The only argument is that of the reference variable that contains the information dumped @@ -223,119 +223,6 @@ sub parseOutput # ############################################################################## -=head1 parseOutputCLM - -parseOutputCLM parsese the output from the test_driver.sh script. It is similar -to, but not interchangable with parseOutput. - -parseOutputCLM takes one arguments: - $statFoo-> the name of the td..status file - -returns: nothing - -=cut - -############################################################################## -# -############################################################################## -sub parseOutputCLM -{ - - my $report; - my $testId; - my @testName={}; - my $testReason; - - my ($self, $statFoo) = @_ ; - - open(FOO, "< $statFoo"); # open for input - open(FOO_OUT, "> $statFoo.xFail"); # open for input - - my(@reportLines); - - while () { - - my($line) = $_; - - my @outArr=split(/ /,$line); - if (looks_like_number(@outArr[0])) { - - $self->{_numericalTestId}++; - - my $num=sprintf("%03d", $self->{_numericalTestId}); - my $totNum=sprintf("%03d", $self->{_totTests}); - - #_# last element has the pass/fail info. - chomp(@outArr[-1]); - my $repPass=substr(@outArr[-1], -4, 4); - - if ($DEBUG) { - print ("xFail::expectedFail::parseOutput @outArr[0] \n"); - print ("xFail::expectedFail::parseOutput @outArr[1] \n"); - print ("xFail::expectedFail::parseOutput @outArr[2] \n"); - print ("xFail::expectedFail::parseOutput @outArr[3] \n"); - print ("xFail::expectedFail::parseOutput @outArr[4] \n"); - print ("xFail::expectedFail::parseOutput @outArr[5] \n"); - print ("xFail::expectedFail::parseOutput @outArr[6] \n"); - print ("xFail::expectedFail::parseOutput @outArr[-1] \n"); - print ("xFail::expectedFail::parseOutput $repPass \n"); - } - - my $size = @outArr-1; - if ($DEBUG) { - print ("size of line $size \n"); - } - my $endOfDesc=$size-1; - - if ($repPass eq "PASS") { - $report=$pass; - $testId=@outArr[1]; - @testName=@outArr[2..$endOfDesc]; - - my ($retVal,$xFailText)=$self->_searchExpectedFail($testId); - - my $testReason=$self->_testNowPassing($testId,$retVal,$xFailText); - - #_# print out the test results - print FOO_OUT ("$num/$totNum <$report> $testReason \n"); - - } else { - $testId=@outArr[1]; - my ($retVal,$xFailText)=$self->_searchExpectedFail($testId); - - if ($DEBUG) { - print ("xFail::expectedFail::parseOutput Id $retVal,$xFailText \n"); - } - - @testName=@outArr[2..$endOfDesc]; - - if ($retVal eq "TRUE"){ - #_# found an expected FAIL (xFAIL) - $report=$xfail; - $testReason= ""; - } else { - #_# print a regular FAIL - $report=$fail; - $testReason=""; - } - - #_# print out the test results - print FOO_OUT ("$num/$totNum <$report> $testReason \n"); - - } - - } else { - print FOO_OUT $line; - } - } - close(FOO); - close(FOO_OUT); -} - -############################################################################## -# -############################################################################## - =head1 _searchExpectedFail searches the list of expected fails for a match with testId. @@ -608,8 +495,6 @@ sub _getTestType my %testTypes = ( "build-namelist_test.pl" => "namelistTest", - "test_driver.sh-i" => "clmInteractive", - "test_driver.sh" => "clmBatch", "clm-cesm.sh" => "cesm" ); From 0c26697a620292a7dcd69f8eefa55a3ee759332f Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Tue, 21 May 2024 11:22:42 -0600 Subject: [PATCH 093/126] Remove obsolete parts of expectedClmTestFails.xml: DID I rm ENOUGH? --- bld/unit_testers/xFail/expectedClmTestFails.xml | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/bld/unit_testers/xFail/expectedClmTestFails.xml b/bld/unit_testers/xFail/expectedClmTestFails.xml index c7cbfee488..380e614ea1 100644 --- a/bld/unit_testers/xFail/expectedClmTestFails.xml +++ b/bld/unit_testers/xFail/expectedClmTestFails.xml @@ -20,20 +20,6 @@ - - - - - - - - goldbach not recognized - goldbach not recognized - goldbach not recognized - - - - From 882425a205ded67450455466ded69d034f5d39fa Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Tue, 21 May 2024 11:26:11 -0600 Subject: [PATCH 094/126] Remove mention of test_driver_*.sh in .gitignore --- .gitignore | 1 - 1 file changed, 1 deletion(-) diff --git a/.gitignore b/.gitignore index 1da8072fed..163bfeda2d 100644 --- a/.gitignore +++ b/.gitignore @@ -75,7 +75,6 @@ buildnmlc td.*.status td.*.log td.*.status.xFail -test_driver_*.sh # mksurfdata output surfdata_*.log From 9407ce50522c2a624196fd4b892363cf8505b5ee Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Tue, 21 May 2024 11:33:48 -0600 Subject: [PATCH 095/126] Remove reference to /test in /doc/.../testing.rst --- doc/source/users_guide/testing/testing.rst | 6 ------ 1 file changed, 6 deletions(-) diff --git a/doc/source/users_guide/testing/testing.rst b/doc/source/users_guide/testing/testing.rst index bad1183fff..69ca1f7263 100644 --- a/doc/source/users_guide/testing/testing.rst +++ b/doc/source/users_guide/testing/testing.rst @@ -13,12 +13,6 @@ CIME Testing scripts We first introduce the test scripts that work for all CESM components. The CIME script **create_test** runs a specific type of test, at a given resolution, for a given compset using a given machine. See `CIME Chapter on Testing `_ for how to use it to run single tests as well as lists of tests. The standard testname for CLM is "aux_clm" for cheyenne with intel and gnu compilers as well as the CGD machine hobart for intel, nag, and pgi compilers. There's also a shorter test list called "clm_short". Also see the `CTSM Wiki on Testing `_. -CTSM Tools Testing -================== - -.. include:: ../../../../test/tools/README - :literal: - CTSM Fortran Unit Tests ======================= From 93c274d7570d8f33002ac0156fc4a977670feef2 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Tue, 21 May 2024 12:30:38 -0600 Subject: [PATCH 096/126] Update README by removing mention of /test directory --- README | 1 - 1 file changed, 1 deletion(-) diff --git a/README b/README index 4699fec858..af45a14909 100644 --- a/README +++ b/README @@ -47,7 +47,6 @@ doc --------------- Documentation of CTSM. bld --------------- build-namelist scripts for CTSM. src --------------- CTSM Source code. lilac ------------- Lightweight Infrastructure for Land-Atmosphere Coupling (for coupling to a host atmosphere model) -test -------------- CTSM Testing scripts for CTSM offline tools (deprecated) tools ------------- CTSM Offline tools to prepare input datasets and process output. cime_config ------- Configuration files of cime for compsets and CTSM settings manage_externals -- Script to manage the external source directories (deprecated) From 802b989d5c408a36bccf07b24f2f8a2724e4ca59 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Tue, 21 May 2024 15:11:29 -0600 Subject: [PATCH 097/126] Second draft of ChangeLog/ChangeSum --- doc/ChangeLog | 37 ++++++++++++++++++++----------------- doc/ChangeSum | 2 +- 2 files changed, 21 insertions(+), 18 deletions(-) diff --git a/doc/ChangeLog b/doc/ChangeLog index 5644ed295d..d68ef323ac 100644 --- a/doc/ChangeLog +++ b/doc/ChangeLog @@ -1,18 +1,18 @@ =============================================================== Tag name: ctsm5.2.006 Originator(s): slevis (Samuel Levis) -Date: Fri May 17 16:17:54 MDT 2024 -One-line Summary: Update externals to cesm2_3_beta17 +Date: Tue 21 May 2024 02:59:05 PM MDT +One-line Summary: Update externals to cesm2_3_beta17, remove mct, retire /test/tools Purpose and description of changes ---------------------------------- -#2493 update externals to beta17 -#2294 remove references to mct but do not remove /src/mct + #2493 update externals to beta17 + #2294 remove mct but not in Externals.cfg + #2279 Retire the /test/tools framework for CESM test system custom tests that do the same thing -Changes unrelated to the tag's title: -#2546 fix error in cam4/cam5 test (ekluzek) -Remove /test/tools/test_driver.sh + Changes unrelated to the tag's title: + #2546 fix error in cam4/cam5 test (ekluzek) Significant changes to scientifically-supported configurations -------------------------------------------------------------- @@ -36,20 +36,23 @@ Does this tag change answers significantly for any of the following physics conf Bugs fixed ---------- List of CTSM issues fixed (include CTSM Issue # and description) [one per line]: -Fixes #2493 update externals to beta17 -Fixes #2546 fix error in cam4/cam5 test (unrelated) + Fixes #2493 update externals to beta17 + Fixes #2279 Retire the /test/tools framework for CESM test system custom tests that do the same thing + Fixes #2546 fix error in cam4/cam5 test (unrelated) Notes of particular relevance for users --------------------------------------- -Changes to documentation: Remove references to mct and cpl7 +Changes to documentation: + Remove references to mct and cpl7 -Substantial timing or memory changes: Not considered +Substantial timing or memory changes: + Not considered Notes of particular relevance for developers: --------------------------------------------- Changes to tests or testing: -I added a long list of izumi nag debug tests to ExpectedFailures as per -https://github.com/ESCOMP/CMEPS/pull/460 + I added a long list of izumi nag debug tests to ExpectedFailures as per + https://github.com/ESCOMP/CMEPS/pull/460 Testing summary: ---------------- @@ -57,16 +60,16 @@ Testing summary: build-namelist tests (if CLMBuildNamelist.pm has changed): - derecho - + derecho - PASS python testing (if python code has changed; see instructions in python/README.md; document testing done): - (any machine) - + derecho - PASS regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing): derecho ----- - izumi ------- IN PROGRESS + izumi ------- OK any other testing (give details below): @@ -98,7 +101,7 @@ Changes answers relative to baseline: Other details ------------- List any externals directories updated (cime, rtm, mosart, cism, fates, etc.): -cism, ccs_config, cime, cmeps, cdeps + cism, ccs_config, cime, cmeps, cdeps Pull Requests that document the changes (include PR ids): https://github.com/ESCOMP/ctsm/pull/2539 diff --git a/doc/ChangeSum b/doc/ChangeSum index c73cb19309..7f8cdaeead 100644 --- a/doc/ChangeSum +++ b/doc/ChangeSum @@ -1,6 +1,6 @@ Tag Who Date Summary ============================================================================================================================ - ctsm5.2.006 slevis 05/17/2024 Update externals to cesm2_3_beta17 + ctsm5.2.006 slevis 05/21/2024 Update externals to cesm2_3_beta17, remove mct, retire /test/tools ctsm5.2.005 erik 05/13/2024 Fix clm6_0 defaults and CESM testing issues, add tests to detect these problems ctsm5.2.004 multiple 05/09/2024 CTSM5.2 1979 fsurdat and 1979-2026 landuse ne0np4 files + two fixes ctsm5.2.003 samrabin 05/02/2024 Merge b4b-dev From a14ab0838951426162b7af5bb1a1a977cf6353cf Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Thu, 23 May 2024 14:53:35 -0600 Subject: [PATCH 098/126] Update cmeps to cmeps0.14.63 in Externals.cfg --- Externals.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Externals.cfg b/Externals.cfg index 8ef524bf24..70d103a94b 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -48,7 +48,7 @@ tag = cime6.0.238_httpsbranch01 required = True [cmeps] -tag = cmeps0.14.60 +tag = cmeps0.14.63 protocol = git repo_url = https://github.com/ESCOMP/CMEPS.git local_path = components/cmeps From fc53412192c7fea8980b99fbc93324a10c17ba44 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Fri, 24 May 2024 13:49:32 -0600 Subject: [PATCH 099/126] Fix for the LILACSMOKE test to pass --- cime_config/SystemTests/lilacsmoke.py | 1 + 1 file changed, 1 insertion(+) diff --git a/cime_config/SystemTests/lilacsmoke.py b/cime_config/SystemTests/lilacsmoke.py index 66a94068da..366d5d1176 100644 --- a/cime_config/SystemTests/lilacsmoke.py +++ b/cime_config/SystemTests/lilacsmoke.py @@ -407,6 +407,7 @@ def run_phase(self): # case.get_mpirun_cmd creates a command that runs the executable given by # case.run_exe. So it's important that (elsewhere in this test script) we create a # link pointing from that to the atm_driver.exe executable. + self._case.load_env(reset=True) lid = new_lid() os.environ["OMP_NUM_THREADS"] = str(self._case.thread_count) cmd = self._case.get_mpirun_cmd(allow_unresolved_envvars=False) From b7a0adfa71fa088718df13411e5c04aa83b8add7 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Fri, 24 May 2024 13:54:29 -0600 Subject: [PATCH 100/126] Rm expected fails associated w https://github.com/ESCOMP/CMEPS/pull/460 --- cime_config/testdefs/ExpectedTestFails.xml | 85 ---------------------- 1 file changed, 85 deletions(-) diff --git a/cime_config/testdefs/ExpectedTestFails.xml b/cime_config/testdefs/ExpectedTestFails.xml index a29e55cc9e..ef6a00993f 100644 --- a/cime_config/testdefs/ExpectedTestFails.xml +++ b/cime_config/testdefs/ExpectedTestFails.xml @@ -44,91 +44,6 @@ - - - FAIL - https://github.com/ESCOMP/CMEPS/pull/460 - - - - - FAIL - https://github.com/ESCOMP/CMEPS/pull/460 - - - - - FAIL - https://github.com/ESCOMP/CMEPS/pull/460 - - - - - FAIL - https://github.com/ESCOMP/CMEPS/pull/460 - - - - - FAIL - https://github.com/ESCOMP/CMEPS/pull/460 - - - - - FAIL - https://github.com/ESCOMP/CMEPS/pull/460 - - - - - FAIL - https://github.com/ESCOMP/CMEPS/pull/460 - - - - - FAIL - https://github.com/ESCOMP/CMEPS/pull/460 - - - - - FAIL - https://github.com/ESCOMP/CMEPS/pull/460 - - - - - FAIL - https://github.com/ESCOMP/CMEPS/pull/460 - - - - - FAIL - https://github.com/ESCOMP/CMEPS/pull/460 - - - - - FAIL - https://github.com/ESCOMP/CMEPS/pull/460 - - - - - FAIL - https://github.com/ESCOMP/CMEPS/pull/460 - - - - - FAIL - https://github.com/ESCOMP/CMEPS/pull/460 - - - FAIL From 9fc13521793969bd15596a9194353c9dda168349 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Fri, 24 May 2024 14:28:48 -0600 Subject: [PATCH 101/126] Correct a testname in ExpectedTestFails.xml --- cime_config/testdefs/ExpectedTestFails.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cime_config/testdefs/ExpectedTestFails.xml b/cime_config/testdefs/ExpectedTestFails.xml index ef6a00993f..23e481222b 100644 --- a/cime_config/testdefs/ExpectedTestFails.xml +++ b/cime_config/testdefs/ExpectedTestFails.xml @@ -51,7 +51,7 @@ - + FAIL #2542 From a7b60fa5539991e0e75fff6c34c1b2f50f3b3e83 Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Mon, 27 May 2024 17:34:56 -0600 Subject: [PATCH 102/126] first remove manage_externals and update docs --- .github/PULL_REQUEST_TEMPLATE.md | 2 +- Externals.cfg | 94 - Externals_CLM.cfg | 9 - README | 6 +- manage_externals/.dir_locals.el | 12 - manage_externals/.github/ISSUE_TEMPLATE.md | 6 - .../.github/PULL_REQUEST_TEMPLATE.md | 17 - .../.github/workflows/bumpversion.yml | 19 - manage_externals/.github/workflows/tests.yml | 30 - manage_externals/.gitignore | 17 - manage_externals/.travis.yml | 18 - manage_externals/LICENSE.txt | 34 - manage_externals/README.md | 231 -- manage_externals/README_FIRST | 54 - manage_externals/checkout_externals | 36 - manage_externals/manic/__init__.py | 9 - manage_externals/manic/checkout.py | 446 ---- .../manic/externals_description.py | 830 -------- manage_externals/manic/externals_status.py | 164 -- manage_externals/manic/global_constants.py | 18 - manage_externals/manic/repository.py | 98 - manage_externals/manic/repository_factory.py | 30 - manage_externals/manic/repository_git.py | 859 -------- manage_externals/manic/repository_svn.py | 291 --- manage_externals/manic/sourcetree.py | 425 ---- manage_externals/manic/utils.py | 330 --- manage_externals/test/.coveragerc | 7 - manage_externals/test/.gitignore | 7 - manage_externals/test/.pylint.rc | 426 ---- manage_externals/test/Makefile | 124 -- manage_externals/test/README.md | 53 - manage_externals/test/doc/.gitignore | 2 - manage_externals/test/doc/Makefile | 20 - manage_externals/test/doc/conf.py | 172 -- manage_externals/test/doc/develop.rst | 202 -- manage_externals/test/doc/index.rst | 22 - manage_externals/test/doc/testing.rst | 123 -- manage_externals/test/repos/README.md | 33 - .../test/repos/container.git/HEAD | 1 - .../test/repos/container.git/config | 6 - .../test/repos/container.git/description | 1 - .../test/repos/container.git/info/exclude | 6 - .../41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 | Bin 133 -> 0 bytes .../71/5b8f3e4afe1802a178e1d603af404ba45d59de | Bin 136 -> 0 bytes .../b0/f87705e2b9601cb831878f3d51efa78b910d7b | Bin 89 -> 0 bytes .../f9/e08370a737e941de6f6492e3f427c2ef4c1a03 | Bin 81 -> 0 bytes .../repos/container.git/refs/heads/master | 1 - manage_externals/test/repos/error/readme.txt | 3 - .../test/repos/mixed-cont-ext.git/HEAD | 1 - .../test/repos/mixed-cont-ext.git/config | 6 - .../test/repos/mixed-cont-ext.git/description | 1 - .../repos/mixed-cont-ext.git/info/exclude | 6 - .../00/437ac2000d5f06fb8a572a01a5bbdae98b17cb | Bin 172 -> 0 bytes .../01/97458f2dbe5fcd6bc44fa46983be0a30282379 | Bin 171 -> 0 bytes .../06/ea30b03ffa2f8574705f8b9583f7ca7e2dccf7 | Bin 136 -> 0 bytes .../14/368b701616a8c53820b610414a4b9a07540cf6 | 1 - .../15/2b57e1cf23721cd17ff681cb9276e3fb9fc091 | 2 - .../1f/01fa46c17b1f38b37e6259f6e9d041bda3144f | Bin 167 -> 0 bytes .../37/f0e70b609adc90f4c09ee21d82ed1d79c81d69 | Bin 89 -> 0 bytes .../38/9a2b876b8965d3c91a3db8d28a483eaf019d5c | Bin 130 -> 0 bytes .../41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 | Bin 133 -> 0 bytes .../6e/9f4baa6e94a0af4e094836c2eb55ccedef5fc4 | Bin 129 -> 0 bytes .../6f/c379457ecb4e576a13c7610ae1fa73f845ee6a | 1 - .../93/a159deb9175bfeb2820a0006ddd92d78131332 | Bin 169 -> 0 bytes .../95/80ecc12f16334ce44e42287d5d46f927bb7b75 | 1 - .../a9/288dcd8a719a1f4ed3cba43a2a387ae7cd60fd | Bin 130 -> 0 bytes .../e8/ea32a11d30ee703f6f661ae7c2376f4ab84d38 | Bin 130 -> 0 bytes .../fd/15a5ad5204356229c60a831d2a8120a43ac901 | 2 - .../mixed-cont-ext.git/refs/heads/master | 1 - .../mixed-cont-ext.git/refs/heads/new-feature | 1 - .../test/repos/simple-ext-fork.git/HEAD | 1 - .../test/repos/simple-ext-fork.git/config | 8 - .../repos/simple-ext-fork.git/description | 1 - .../repos/simple-ext-fork.git/info/exclude | 6 - .../00/fd13e76189f9134b0506b4b8ed3172723b467f | Bin 89 -> 0 bytes .../0b/15e8af3d4615b42314216efeae3fff184046a8 | Bin 89 -> 0 bytes .../0b/67df4e7e8e6e1c6e401542738b352d18744677 | Bin 167 -> 0 bytes .../11/a76e3d9a67313dec7ce1230852ab5c86352c5c | 2 - .../16/5506a7408a482f50493434e13fffeb44af893f | Bin 89 -> 0 bytes .../24/4386e788c9bc608613e127a329c742450a60e4 | Bin 164 -> 0 bytes .../32/7e97d86e941047d809dba58f2804740c6c30cf | Bin 89 -> 0 bytes .../36/418b4e5665956a90725c9a1b5a8e551c5f3d48 | Bin 159 -> 0 bytes .../3d/7099c35404ae6c8640ce263b38bef06e98cc26 | 2 - .../3d/ec1fdf8e2f5edba28148c5db2fe8d7a842360b | 2 - .../41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 | Bin 133 -> 0 bytes .../4d/837135915ed93eed6fff6b439f284ce317296f | Bin 89 -> 0 bytes .../56/175e017ad38bf3d33d74b6bd7c74624b28466a | Bin 89 -> 0 bytes .../5f/1d4786d12e52d7ab28d2f2f1118c1059a9f1ae | Bin 93 -> 0 bytes .../67/136e5ab4d5c1c65d10c8048763b96b0e53c1d6 | Bin 165 -> 0 bytes .../7b/0bd630ac13865735a1dff3437a137d8ab50663 | Bin 119 -> 0 bytes .../88/cf20868e0cc445f5642a480ed034c71e0d7e9f | 2 - .../8d/2b3b35126224c975d23f109aa1e3cbac452989 | 2 - .../9b/75494003deca69527bb64bcaa352e801611dd2 | Bin 138 -> 0 bytes .../a2/2a5da9119328ea6d693f88861457c07e14ac04 | 1 - .../a4/2fe9144f5707bc1e9515ce1b44681f7aba6f95 | 3 - .../b9/3737be3ea6b19f6255983748a0a0f4d622f936 | Bin 89 -> 0 bytes .../c5/32bc8fde96fa63103a52057f0baffcc9f00c6b | 1 - .../c5/b315915742133dbdfbeed0753e481b55c1d364 | 1 - .../f2/68d4e56d067da9bd1d85e55bdc40a8bd2b0bca | 1 - .../repos/simple-ext-fork.git/packed-refs | 5 - .../simple-ext-fork.git/refs/heads/feature2 | 1 - .../refs/tags/abandoned-feature | 1 - .../refs/tags/forked-feature-v1 | 1 - .../test/repos/simple-ext.git/HEAD | 1 - .../test/repos/simple-ext.git/config | 6 - .../test/repos/simple-ext.git/description | 1 - .../test/repos/simple-ext.git/info/exclude | 6 - .../00/fd13e76189f9134b0506b4b8ed3172723b467f | Bin 89 -> 0 bytes .../09/0e1034746b2c865f7b0280813dbf4061a700e8 | Bin 164 -> 0 bytes .../0b/15e8af3d4615b42314216efeae3fff184046a8 | Bin 89 -> 0 bytes .../11/a76e3d9a67313dec7ce1230852ab5c86352c5c | 2 - .../14/2711fdbbcb8034d7cad6bae6801887b12fe61d | Bin 83 -> 0 bytes .../31/dbcd6de441e671a467ef317146539b7ffabb11 | Bin 90 -> 0 bytes .../36/418b4e5665956a90725c9a1b5a8e551c5f3d48 | Bin 159 -> 0 bytes .../41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 | Bin 133 -> 0 bytes .../60/7ec299c17dd285c029edc41a0109e49d441380 | Bin 168 -> 0 bytes .../60/b1cc1a38d63a4bcaa1e767262bbe23dbf9f5f5 | 2 - .../63/a99393d1baff97ccef967af30380659867b139 | 1 - .../95/3256da5612fcd9263590a353bc18c6f224e74f | 1 - .../9b/75494003deca69527bb64bcaa352e801611dd2 | Bin 138 -> 0 bytes .../a2/2a5da9119328ea6d693f88861457c07e14ac04 | 1 - .../b7/692b6d391899680da7b9b6fd8af4c413f06fe7 | Bin 137 -> 0 bytes .../c5/b315915742133dbdfbeed0753e481b55c1d364 | 1 - .../d1/163870d19c3dee34fada3a76b785cfa2a8424b | Bin 130 -> 0 bytes .../d8/ed2f33179d751937f8fde2e33921e4827babf4 | Bin 60 -> 0 bytes .../df/312890f93ba4d2c694208599b665c4a08afeff | Bin 89 -> 0 bytes .../repos/simple-ext.git/refs/heads/feature2 | 1 - .../repos/simple-ext.git/refs/heads/feature3 | 1 - .../repos/simple-ext.git/refs/heads/master | 1 - .../test/repos/simple-ext.git/refs/tags/tag1 | 1 - .../test/repos/simple-ext.git/refs/tags/tag2 | 1 - .../test/repos/simple-ext.svn/README.txt | 5 - .../test/repos/simple-ext.svn/conf/authz | 32 - .../repos/simple-ext.svn/conf/hooks-env.tmpl | 19 - .../test/repos/simple-ext.svn/conf/passwd | 8 - .../repos/simple-ext.svn/conf/svnserve.conf | 81 - .../test/repos/simple-ext.svn/db/current | 1 - .../test/repos/simple-ext.svn/db/format | 3 - .../test/repos/simple-ext.svn/db/fs-type | 1 - .../test/repos/simple-ext.svn/db/fsfs.conf | 200 -- .../repos/simple-ext.svn/db/min-unpacked-rev | 1 - .../test/repos/simple-ext.svn/db/rep-cache.db | Bin 8192 -> 0 bytes .../simple-ext.svn/db/rep-cache.db-journal | 0 .../test/repos/simple-ext.svn/db/revprops/0/0 | 5 - .../test/repos/simple-ext.svn/db/revprops/0/1 | 13 - .../test/repos/simple-ext.svn/db/revprops/0/2 | 13 - .../test/repos/simple-ext.svn/db/revprops/0/3 | 13 - .../test/repos/simple-ext.svn/db/revs/0/0 | Bin 253 -> 0 bytes .../test/repos/simple-ext.svn/db/revs/0/1 | Bin 725 -> 0 bytes .../test/repos/simple-ext.svn/db/revs/0/2 | Bin 816 -> 0 bytes .../test/repos/simple-ext.svn/db/revs/0/3 | Bin 769 -> 0 bytes .../test/repos/simple-ext.svn/db/txn-current | 1 - .../repos/simple-ext.svn/db/txn-current-lock | 0 .../test/repos/simple-ext.svn/db/uuid | 2 - .../test/repos/simple-ext.svn/db/write-lock | 0 .../test/repos/simple-ext.svn/format | 1 - .../simple-ext.svn/hooks/post-commit.tmpl | 62 - .../repos/simple-ext.svn/hooks/post-lock.tmpl | 64 - .../hooks/post-revprop-change.tmpl | 69 - .../simple-ext.svn/hooks/post-unlock.tmpl | 61 - .../simple-ext.svn/hooks/pre-commit.tmpl | 91 - .../repos/simple-ext.svn/hooks/pre-lock.tmpl | 95 - .../hooks/pre-revprop-change.tmpl | 79 - .../simple-ext.svn/hooks/pre-unlock.tmpl | 87 - .../simple-ext.svn/hooks/start-commit.tmpl | 81 - .../repos/simple-ext.svn/locks/db-logs.lock | 3 - .../test/repos/simple-ext.svn/locks/db.lock | 3 - manage_externals/test/requirements.txt | 5 - manage_externals/test/test_sys_checkout.py | 1871 ----------------- .../test/test_sys_repository_git.py | 238 --- .../test/test_unit_externals_description.py | 478 ----- .../test/test_unit_externals_status.py | 299 --- manage_externals/test/test_unit_repository.py | 208 -- .../test/test_unit_repository_git.py | 811 ------- .../test/test_unit_repository_svn.py | 501 ----- manage_externals/test/test_unit_utils.py | 350 --- 176 files changed, 2 insertions(+), 11238 deletions(-) delete mode 100644 Externals.cfg delete mode 100644 Externals_CLM.cfg delete mode 100644 manage_externals/.dir_locals.el delete mode 100644 manage_externals/.github/ISSUE_TEMPLATE.md delete mode 100644 manage_externals/.github/PULL_REQUEST_TEMPLATE.md delete mode 100644 manage_externals/.github/workflows/bumpversion.yml delete mode 100644 manage_externals/.github/workflows/tests.yml delete mode 100644 manage_externals/.gitignore delete mode 100644 manage_externals/.travis.yml delete mode 100644 manage_externals/LICENSE.txt delete mode 100644 manage_externals/README.md delete mode 100644 manage_externals/README_FIRST delete mode 100755 manage_externals/checkout_externals delete mode 100644 manage_externals/manic/__init__.py delete mode 100755 manage_externals/manic/checkout.py delete mode 100644 manage_externals/manic/externals_description.py delete mode 100644 manage_externals/manic/externals_status.py delete mode 100644 manage_externals/manic/global_constants.py delete mode 100644 manage_externals/manic/repository.py delete mode 100644 manage_externals/manic/repository_factory.py delete mode 100644 manage_externals/manic/repository_git.py delete mode 100644 manage_externals/manic/repository_svn.py delete mode 100644 manage_externals/manic/sourcetree.py delete mode 100644 manage_externals/manic/utils.py delete mode 100644 manage_externals/test/.coveragerc delete mode 100644 manage_externals/test/.gitignore delete mode 100644 manage_externals/test/.pylint.rc delete mode 100644 manage_externals/test/Makefile delete mode 100644 manage_externals/test/README.md delete mode 100644 manage_externals/test/doc/.gitignore delete mode 100644 manage_externals/test/doc/Makefile delete mode 100644 manage_externals/test/doc/conf.py delete mode 100644 manage_externals/test/doc/develop.rst delete mode 100644 manage_externals/test/doc/index.rst delete mode 100644 manage_externals/test/doc/testing.rst delete mode 100644 manage_externals/test/repos/README.md delete mode 100644 manage_externals/test/repos/container.git/HEAD delete mode 100644 manage_externals/test/repos/container.git/config delete mode 100644 manage_externals/test/repos/container.git/description delete mode 100644 manage_externals/test/repos/container.git/info/exclude delete mode 100644 manage_externals/test/repos/container.git/objects/41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 delete mode 100644 manage_externals/test/repos/container.git/objects/71/5b8f3e4afe1802a178e1d603af404ba45d59de delete mode 100644 manage_externals/test/repos/container.git/objects/b0/f87705e2b9601cb831878f3d51efa78b910d7b delete mode 100644 manage_externals/test/repos/container.git/objects/f9/e08370a737e941de6f6492e3f427c2ef4c1a03 delete mode 100644 manage_externals/test/repos/container.git/refs/heads/master delete mode 100644 manage_externals/test/repos/error/readme.txt delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/HEAD delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/config delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/description delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/info/exclude delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/objects/00/437ac2000d5f06fb8a572a01a5bbdae98b17cb delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/objects/01/97458f2dbe5fcd6bc44fa46983be0a30282379 delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/objects/06/ea30b03ffa2f8574705f8b9583f7ca7e2dccf7 delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/objects/14/368b701616a8c53820b610414a4b9a07540cf6 delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/objects/15/2b57e1cf23721cd17ff681cb9276e3fb9fc091 delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/objects/1f/01fa46c17b1f38b37e6259f6e9d041bda3144f delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/objects/37/f0e70b609adc90f4c09ee21d82ed1d79c81d69 delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/objects/38/9a2b876b8965d3c91a3db8d28a483eaf019d5c delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/objects/41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/objects/6e/9f4baa6e94a0af4e094836c2eb55ccedef5fc4 delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/objects/6f/c379457ecb4e576a13c7610ae1fa73f845ee6a delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/objects/93/a159deb9175bfeb2820a0006ddd92d78131332 delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/objects/95/80ecc12f16334ce44e42287d5d46f927bb7b75 delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/objects/a9/288dcd8a719a1f4ed3cba43a2a387ae7cd60fd delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/objects/e8/ea32a11d30ee703f6f661ae7c2376f4ab84d38 delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/objects/fd/15a5ad5204356229c60a831d2a8120a43ac901 delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/refs/heads/master delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/refs/heads/new-feature delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/HEAD delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/config delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/description delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/info/exclude delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/00/fd13e76189f9134b0506b4b8ed3172723b467f delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/0b/15e8af3d4615b42314216efeae3fff184046a8 delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/0b/67df4e7e8e6e1c6e401542738b352d18744677 delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/11/a76e3d9a67313dec7ce1230852ab5c86352c5c delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/16/5506a7408a482f50493434e13fffeb44af893f delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/24/4386e788c9bc608613e127a329c742450a60e4 delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/32/7e97d86e941047d809dba58f2804740c6c30cf delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/36/418b4e5665956a90725c9a1b5a8e551c5f3d48 delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/3d/7099c35404ae6c8640ce263b38bef06e98cc26 delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/3d/ec1fdf8e2f5edba28148c5db2fe8d7a842360b delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/4d/837135915ed93eed6fff6b439f284ce317296f delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/56/175e017ad38bf3d33d74b6bd7c74624b28466a delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/5f/1d4786d12e52d7ab28d2f2f1118c1059a9f1ae delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/67/136e5ab4d5c1c65d10c8048763b96b0e53c1d6 delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/7b/0bd630ac13865735a1dff3437a137d8ab50663 delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/88/cf20868e0cc445f5642a480ed034c71e0d7e9f delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/8d/2b3b35126224c975d23f109aa1e3cbac452989 delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/9b/75494003deca69527bb64bcaa352e801611dd2 delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/a2/2a5da9119328ea6d693f88861457c07e14ac04 delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/a4/2fe9144f5707bc1e9515ce1b44681f7aba6f95 delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/b9/3737be3ea6b19f6255983748a0a0f4d622f936 delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/c5/32bc8fde96fa63103a52057f0baffcc9f00c6b delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/c5/b315915742133dbdfbeed0753e481b55c1d364 delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/f2/68d4e56d067da9bd1d85e55bdc40a8bd2b0bca delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/packed-refs delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/refs/heads/feature2 delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/refs/tags/abandoned-feature delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/refs/tags/forked-feature-v1 delete mode 100644 manage_externals/test/repos/simple-ext.git/HEAD delete mode 100644 manage_externals/test/repos/simple-ext.git/config delete mode 100644 manage_externals/test/repos/simple-ext.git/description delete mode 100644 manage_externals/test/repos/simple-ext.git/info/exclude delete mode 100644 manage_externals/test/repos/simple-ext.git/objects/00/fd13e76189f9134b0506b4b8ed3172723b467f delete mode 100644 manage_externals/test/repos/simple-ext.git/objects/09/0e1034746b2c865f7b0280813dbf4061a700e8 delete mode 100644 manage_externals/test/repos/simple-ext.git/objects/0b/15e8af3d4615b42314216efeae3fff184046a8 delete mode 100644 manage_externals/test/repos/simple-ext.git/objects/11/a76e3d9a67313dec7ce1230852ab5c86352c5c delete mode 100644 manage_externals/test/repos/simple-ext.git/objects/14/2711fdbbcb8034d7cad6bae6801887b12fe61d delete mode 100644 manage_externals/test/repos/simple-ext.git/objects/31/dbcd6de441e671a467ef317146539b7ffabb11 delete mode 100644 manage_externals/test/repos/simple-ext.git/objects/36/418b4e5665956a90725c9a1b5a8e551c5f3d48 delete mode 100644 manage_externals/test/repos/simple-ext.git/objects/41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 delete mode 100644 manage_externals/test/repos/simple-ext.git/objects/60/7ec299c17dd285c029edc41a0109e49d441380 delete mode 100644 manage_externals/test/repos/simple-ext.git/objects/60/b1cc1a38d63a4bcaa1e767262bbe23dbf9f5f5 delete mode 100644 manage_externals/test/repos/simple-ext.git/objects/63/a99393d1baff97ccef967af30380659867b139 delete mode 100644 manage_externals/test/repos/simple-ext.git/objects/95/3256da5612fcd9263590a353bc18c6f224e74f delete mode 100644 manage_externals/test/repos/simple-ext.git/objects/9b/75494003deca69527bb64bcaa352e801611dd2 delete mode 100644 manage_externals/test/repos/simple-ext.git/objects/a2/2a5da9119328ea6d693f88861457c07e14ac04 delete mode 100644 manage_externals/test/repos/simple-ext.git/objects/b7/692b6d391899680da7b9b6fd8af4c413f06fe7 delete mode 100644 manage_externals/test/repos/simple-ext.git/objects/c5/b315915742133dbdfbeed0753e481b55c1d364 delete mode 100644 manage_externals/test/repos/simple-ext.git/objects/d1/163870d19c3dee34fada3a76b785cfa2a8424b delete mode 100644 manage_externals/test/repos/simple-ext.git/objects/d8/ed2f33179d751937f8fde2e33921e4827babf4 delete mode 100644 manage_externals/test/repos/simple-ext.git/objects/df/312890f93ba4d2c694208599b665c4a08afeff delete mode 100644 manage_externals/test/repos/simple-ext.git/refs/heads/feature2 delete mode 100644 manage_externals/test/repos/simple-ext.git/refs/heads/feature3 delete mode 100644 manage_externals/test/repos/simple-ext.git/refs/heads/master delete mode 100644 manage_externals/test/repos/simple-ext.git/refs/tags/tag1 delete mode 100644 manage_externals/test/repos/simple-ext.git/refs/tags/tag2 delete mode 100644 manage_externals/test/repos/simple-ext.svn/README.txt delete mode 100644 manage_externals/test/repos/simple-ext.svn/conf/authz delete mode 100644 manage_externals/test/repos/simple-ext.svn/conf/hooks-env.tmpl delete mode 100644 manage_externals/test/repos/simple-ext.svn/conf/passwd delete mode 100644 manage_externals/test/repos/simple-ext.svn/conf/svnserve.conf delete mode 100644 manage_externals/test/repos/simple-ext.svn/db/current delete mode 100644 manage_externals/test/repos/simple-ext.svn/db/format delete mode 100644 manage_externals/test/repos/simple-ext.svn/db/fs-type delete mode 100644 manage_externals/test/repos/simple-ext.svn/db/fsfs.conf delete mode 100644 manage_externals/test/repos/simple-ext.svn/db/min-unpacked-rev delete mode 100644 manage_externals/test/repos/simple-ext.svn/db/rep-cache.db delete mode 100644 manage_externals/test/repos/simple-ext.svn/db/rep-cache.db-journal delete mode 100644 manage_externals/test/repos/simple-ext.svn/db/revprops/0/0 delete mode 100644 manage_externals/test/repos/simple-ext.svn/db/revprops/0/1 delete mode 100644 manage_externals/test/repos/simple-ext.svn/db/revprops/0/2 delete mode 100644 manage_externals/test/repos/simple-ext.svn/db/revprops/0/3 delete mode 100644 manage_externals/test/repos/simple-ext.svn/db/revs/0/0 delete mode 100644 manage_externals/test/repos/simple-ext.svn/db/revs/0/1 delete mode 100644 manage_externals/test/repos/simple-ext.svn/db/revs/0/2 delete mode 100644 manage_externals/test/repos/simple-ext.svn/db/revs/0/3 delete mode 100644 manage_externals/test/repos/simple-ext.svn/db/txn-current delete mode 100644 manage_externals/test/repos/simple-ext.svn/db/txn-current-lock delete mode 100644 manage_externals/test/repos/simple-ext.svn/db/uuid delete mode 100644 manage_externals/test/repos/simple-ext.svn/db/write-lock delete mode 100644 manage_externals/test/repos/simple-ext.svn/format delete mode 100755 manage_externals/test/repos/simple-ext.svn/hooks/post-commit.tmpl delete mode 100755 manage_externals/test/repos/simple-ext.svn/hooks/post-lock.tmpl delete mode 100755 manage_externals/test/repos/simple-ext.svn/hooks/post-revprop-change.tmpl delete mode 100755 manage_externals/test/repos/simple-ext.svn/hooks/post-unlock.tmpl delete mode 100755 manage_externals/test/repos/simple-ext.svn/hooks/pre-commit.tmpl delete mode 100755 manage_externals/test/repos/simple-ext.svn/hooks/pre-lock.tmpl delete mode 100755 manage_externals/test/repos/simple-ext.svn/hooks/pre-revprop-change.tmpl delete mode 100755 manage_externals/test/repos/simple-ext.svn/hooks/pre-unlock.tmpl delete mode 100755 manage_externals/test/repos/simple-ext.svn/hooks/start-commit.tmpl delete mode 100644 manage_externals/test/repos/simple-ext.svn/locks/db-logs.lock delete mode 100644 manage_externals/test/repos/simple-ext.svn/locks/db.lock delete mode 100644 manage_externals/test/requirements.txt delete mode 100755 manage_externals/test/test_sys_checkout.py delete mode 100644 manage_externals/test/test_sys_repository_git.py delete mode 100644 manage_externals/test/test_unit_externals_description.py delete mode 100644 manage_externals/test/test_unit_externals_status.py delete mode 100644 manage_externals/test/test_unit_repository.py delete mode 100644 manage_externals/test/test_unit_repository_git.py delete mode 100755 manage_externals/test/test_unit_repository_svn.py delete mode 100644 manage_externals/test/test_unit_utils.py diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 7c9512c9c3..c4a381383b 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -16,7 +16,7 @@ Testing performed, if any: (List what testing you did to show your changes worked as expected) (This can be manual testing or running of the different test suites) (Documentation on system testing is here: https://github.com/ESCOMP/ctsm/wiki/System-Testing-Guide) -(aux_clm on cheyenne for intel/gnu and izumi for intel/gnu/nag/pgi is the standard for tags on master) +(aux_clm on derecho for intel/gnu and izumi for intel/gnu/nag/nvhpc is the standard for tags on master) **NOTE: Be sure to check your coding style against the standard (https://github.com/ESCOMP/ctsm/wiki/CTSM-coding-guidelines) and review diff --git a/Externals.cfg b/Externals.cfg deleted file mode 100644 index 70d103a94b..0000000000 --- a/Externals.cfg +++ /dev/null @@ -1,94 +0,0 @@ -[clm] -local_path = . -protocol = externals_only -externals = Externals_CLM.cfg -required = True - -[cism] -local_path = components/cism -protocol = git -repo_url = https://github.com/ESCOMP/CISM-wrapper -tag = cismwrap_2_1_100 -externals = Externals_CISM.cfg -required = True - -[rtm] -local_path = components/rtm -protocol = git -repo_url = https://github.com/ESCOMP/RTM -tag = rtm1_0_79 -required = True - -[mosart] -local_path = components/mosart -protocol = git -repo_url = https://github.com/ESCOMP/MOSART -tag = mosart1_0_49 -required = True - -[mizuRoute] -tag = cesm-coupling.n02_v2.1.2 -protocol = git -local_path = components/mizuroute -repo_url = https://github.com/ESCOMP/mizuRoute -required = True - -[ccs_config] -tag = ccs_config_cesm0.0.106 -protocol = git -repo_url = https://github.com/ESMCI/ccs_config_cesm.git -local_path = ccs_config -required = True - -[cime] -local_path = cime -protocol = git -repo_url = https://github.com/ESMCI/cime -tag = cime6.0.238_httpsbranch01 -required = True - -[cmeps] -tag = cmeps0.14.63 -protocol = git -repo_url = https://github.com/ESCOMP/CMEPS.git -local_path = components/cmeps -required = True - -[cdeps] -tag = cdeps1.0.33 -protocol = git -repo_url = https://github.com/ESCOMP/CDEPS.git -local_path = components/cdeps -externals = Externals_CDEPS.cfg -required = True - -[share] -tag = share1.0.18 -protocol = git -repo_url = https://github.com/ESCOMP/CESM_share -local_path = share -required = True - -[mct] -tag = MCT_2.11.0 -protocol = git -repo_url = https://github.com/MCSclimate/MCT -local_path = libraries/mct -required = True - -[parallelio] -tag = pio2_6_2 -protocol = git -repo_url = https://github.com/NCAR/ParallelIO -local_path = libraries/parallelio -required = True - -[doc-builder] -local_path = doc/doc-builder -protocol = git -repo_url = https://github.com/ESMCI/doc-builder -tag = v1.0.8 -required = False - -[externals_description] -schema_version = 1.0.0 diff --git a/Externals_CLM.cfg b/Externals_CLM.cfg deleted file mode 100644 index 866b9ac5b2..0000000000 --- a/Externals_CLM.cfg +++ /dev/null @@ -1,9 +0,0 @@ -[fates] -local_path = src/fates -protocol = git -repo_url = https://github.com/NGEET/fates -tag = sci.1.73.0_api.35.0.0 -required = True - -[externals_description] -schema_version = 1.0.0 diff --git a/README b/README index af45a14909..0f1a38edf3 100644 --- a/README +++ b/README @@ -49,7 +49,7 @@ src --------------- CTSM Source code. lilac ------------- Lightweight Infrastructure for Land-Atmosphere Coupling (for coupling to a host atmosphere model) tools ------------- CTSM Offline tools to prepare input datasets and process output. cime_config ------- Configuration files of cime for compsets and CTSM settings -manage_externals -- Script to manage the external source directories (deprecated) +bin/git-fleximod -- Script to manage the external source directories py_env_create ----- Script to setup the python environment for CTSM python tools using conda python ------------ Python modules used in tools and testing and automated checking of ALL CTSM python scirpts @@ -73,7 +73,6 @@ Top level documentation ($CTSMROOT): README ------------------- This file README.md ---------------- File that displays on github under https::/github.com/ESCOMP/CTSM.git README.rst --------------- File that displays under the project in github -README_EXTERNALS.rst ----- Information on how to work with manage_externals for CTSM (deprecated) CODE_OF_CONDUCT.md ------- Code of Conduct for how to work with each other on the CTSM project Copyright ---------------- CESM Copyright file doc/UpdateChangeLog.pl --- Script to add documentation on a tag to the @@ -97,9 +96,6 @@ bld/namelist_files/namelist_defaults_ctsm.xml ----- Default values Important files in main directories (under $CTSMROOT): ============================================================================================= -Externals.cfg --------------- File for management of the main high level external (deprecated) -Externals_CLM.cfg ----------- File for management of the CTSM specific externals (i.e. FATES) - run_sys_tests --------------- Python script to send the standard CTSM testing off (submits the create_test test suite for several different compilers on the machines we do standard CTSM testing on). diff --git a/manage_externals/.dir_locals.el b/manage_externals/.dir_locals.el deleted file mode 100644 index a370490e92..0000000000 --- a/manage_externals/.dir_locals.el +++ /dev/null @@ -1,12 +0,0 @@ -; -*- mode: Lisp -*- - -((python-mode - . ( - ;; fill the paragraph to 80 columns when using M-q - (fill-column . 80) - - ;; Use 4 spaces to indent in Python - (python-indent-offset . 4) - (indent-tabs-mode . nil) - ))) - diff --git a/manage_externals/.github/ISSUE_TEMPLATE.md b/manage_externals/.github/ISSUE_TEMPLATE.md deleted file mode 100644 index 8ecb2ae64b..0000000000 --- a/manage_externals/.github/ISSUE_TEMPLATE.md +++ /dev/null @@ -1,6 +0,0 @@ -### Summary of Issue: -### Expected behavior and actual behavior: -### Steps to reproduce the problem (should include model description file(s) or link to publi c repository): -### What is the changeset ID of the code, and the machine you are using: -### have you modified the code? If so, it must be committed and available for testing: -### Screen output or log file showing the error message and context: diff --git a/manage_externals/.github/PULL_REQUEST_TEMPLATE.md b/manage_externals/.github/PULL_REQUEST_TEMPLATE.md deleted file mode 100644 index b68b1fb5e2..0000000000 --- a/manage_externals/.github/PULL_REQUEST_TEMPLATE.md +++ /dev/null @@ -1,17 +0,0 @@ -[ 50 character, one line summary ] - -[ Description of the changes in this commit. It should be enough - information for someone not following this development to understand. - Lines should be wrapped at about 72 characters. ] - -User interface changes?: [ No/Yes ] -[ If yes, describe what changed, and steps taken to ensure backward compatibilty ] - -Fixes: [Github issue #s] And brief description of each issue. - -Testing: - test removed: - unit tests: - system tests: - manual testing: - diff --git a/manage_externals/.github/workflows/bumpversion.yml b/manage_externals/.github/workflows/bumpversion.yml deleted file mode 100644 index f4dc9b7ca5..0000000000 --- a/manage_externals/.github/workflows/bumpversion.yml +++ /dev/null @@ -1,19 +0,0 @@ -name: Bump version -on: - push: - branches: - - main -jobs: - build: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - name: Bump version and push tag - id: tag_version - uses: mathieudutour/github-tag-action@v5.5 - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - create_annotated_tag: true - default_bump: patch - dry_run: false - tag_prefix: manic- diff --git a/manage_externals/.github/workflows/tests.yml b/manage_externals/.github/workflows/tests.yml deleted file mode 100644 index dd75b91b49..0000000000 --- a/manage_externals/.github/workflows/tests.yml +++ /dev/null @@ -1,30 +0,0 @@ -# This is a workflow to compile the cmeps source without cime -name: Test Manic - -# Controls when the action will run. Triggers the workflow on push or pull request -# events but only for the master branch -on: - push: - branches: [ main ] - pull_request: - branches: [ main ] - -# A workflow run is made up of one or more jobs that can run sequentially or in parallel -jobs: - test-manic: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - name: Test Manic - run: | - pushd test - git config --global user.email "devnull@example.com" - git config --global user.name "GITHUB tester" - git config --global protocol.file.allow always - make utest - make stest - popd - - - name: Setup tmate session - if: ${{ failure() }} - uses: mxschmitt/action-tmate@v3 diff --git a/manage_externals/.gitignore b/manage_externals/.gitignore deleted file mode 100644 index a71ac0cd75..0000000000 --- a/manage_externals/.gitignore +++ /dev/null @@ -1,17 +0,0 @@ -# directories that are checked out by the tool -cime/ -cime_config/ -components/ - -# generated local files -*.log - -# editor files -*~ -*.bak - -# generated python files -*.pyc - -# test tmp file -test/tmp diff --git a/manage_externals/.travis.yml b/manage_externals/.travis.yml deleted file mode 100644 index d9b24c584d..0000000000 --- a/manage_externals/.travis.yml +++ /dev/null @@ -1,18 +0,0 @@ -language: python -os: linux -python: - - "3.4" - - "3.5" - - "3.6" - - "3.7" - - "3.8" -install: - - pip install -r test/requirements.txt -before_script: - - git --version -script: - - cd test; make test - - cd test; make lint -after_success: - - cd test; make coverage - - cd test; coveralls diff --git a/manage_externals/LICENSE.txt b/manage_externals/LICENSE.txt deleted file mode 100644 index 665ee03fbc..0000000000 --- a/manage_externals/LICENSE.txt +++ /dev/null @@ -1,34 +0,0 @@ -Copyright (c) 2017-2018, University Corporation for Atmospheric Research (UCAR) -All rights reserved. - -Developed by: - University Corporation for Atmospheric Research - National Center for Atmospheric Research - https://www2.cesm.ucar.edu/working-groups/sewg - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the "Software"), -to deal with the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom -the Software is furnished to do so, subject to the following conditions: - - - Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimers. - - Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimers in the documentation - and/or other materials provided with the distribution. - - Neither the names of [Name of Development Group, UCAR], - nor the names of its contributors may be used to endorse or promote - products derived from this Software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE -LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -POSSIBILITY OF SUCH DAMAGE. diff --git a/manage_externals/README.md b/manage_externals/README.md deleted file mode 100644 index 9475301b5d..0000000000 --- a/manage_externals/README.md +++ /dev/null @@ -1,231 +0,0 @@ --- AUTOMATICALLY GENERATED FILE. DO NOT EDIT -- - -[![Build Status](https://travis-ci.org/ESMCI/manage_externals.svg?branch=master)](https://travis-ci.org/ESMCI/manage_externals)[![Coverage Status](https://coveralls.io/repos/github/ESMCI/manage_externals/badge.svg?branch=master)](https://coveralls.io/github/ESMCI/manage_externals?branch=master) -``` -usage: checkout_externals [-h] [-e [EXTERNALS]] [-o] [-S] [-v] [--backtrace] - [-d] [--no-logging] - -checkout_externals manages checking out groups of externals from revision -control based on a externals description file. By default only the -required externals are checkout out. - -Operations performed by manage_externals utilities are explicit and -data driven. checkout_externals will always make the working copy *exactly* -match what is in the externals file when modifying the working copy of -a repository. - -If checkout_externals isn't doing what you expected, double check the contents -of the externals description file. - -Running checkout_externals without the '--status' option will always attempt to -synchronize the working copy to exactly match the externals description. - -optional arguments: - -h, --help show this help message and exit - -e [EXTERNALS], --externals [EXTERNALS] - The externals description filename. Default: - Externals.cfg. - -o, --optional By default only the required externals are checked - out. This flag will also checkout the optional - externals. - -S, --status Output status of the repositories managed by - checkout_externals. By default only summary - information is provided. Use verbose output to see - details. - -v, --verbose Output additional information to the screen and log - file. This flag can be used up to two times, - increasing the verbosity level each time. - --backtrace DEVELOPER: show exception backtraces as extra - debugging output - -d, --debug DEVELOPER: output additional debugging information to - the screen and log file. - --no-logging DEVELOPER: disable logging. - -``` -NOTE: checkout_externals *MUST* be run from the root of the source tree it -is managing. For example, if you cloned a repository with: - - $ git clone git@github.com/{SOME_ORG}/some-project some-project-dev - -Then the root of the source tree is /path/to/some-project-dev. If you -obtained a sub-project via a checkout of another project: - - $ git clone git@github.com/{SOME_ORG}/some-project some-project-dev - -and you need to checkout the sub-project externals, then the root of the -source tree is /path/to/some-project-dev. Do *NOT* run checkout_externals -from within /path/to/some-project-dev/sub-project - -The root of the source tree will be referred to as `${SRC_ROOT}` below. - -# Supported workflows - - * Checkout all required components from the default externals - description file: - - $ cd ${SRC_ROOT} - $ ./manage_externals/checkout_externals - - * To update all required components to the current values in the - externals description file, re-run checkout_externals: - - $ cd ${SRC_ROOT} - $ ./manage_externals/checkout_externals - - If there are *any* modifications to *any* working copy according - to the git or svn 'status' command, checkout_externals - will not update any external repositories. Modifications - include: modified files, added files, removed files, or missing - files. - - To avoid this safety check, edit the externals description file - and comment out the modified external block. - - * Checkout all required components from a user specified externals - description file: - - $ cd ${SRC_ROOT} - $ ./manage_externals/checkout_externals --externals my-externals.cfg - - * Status summary of the repositories managed by checkout_externals: - - $ cd ${SRC_ROOT} - $ ./manage_externals/checkout_externals --status - - ./cime - s ./components/cism - ./components/mosart - e-o ./components/rtm - M ./src/fates - e-o ./tools/PTCLM - - where: - * column one indicates the status of the repository in relation - to the externals description file. - * column two indicates whether the working copy has modified files. - * column three shows how the repository is managed, optional or required - - Column one will be one of these values: - * s : out-of-sync : repository is checked out at a different commit - compared with the externals description - * e : empty : directory does not exist - checkout_externals has not been run - * ? : unknown : directory exists but .git or .svn directories are missing - - Column two will be one of these values: - * M : Modified : modified, added, deleted or missing files - * : blank / space : clean - * - : dash : no meaningful state, for empty repositories - - Column three will be one of these values: - * o : optional : optionally repository - * : blank / space : required repository - - * Detailed git or svn status of the repositories managed by checkout_externals: - - $ cd ${SRC_ROOT} - $ ./manage_externals/checkout_externals --status --verbose - -# Externals description file - - The externals description contains a list of the external - repositories that are used and their version control locations. The - file format is the standard ini/cfg configuration file format. Each - external is defined by a section containing the component name in - square brackets: - - * name (string) : component name, e.g. [cime], [cism], etc. - - Each section has the following keyword-value pairs: - - * required (boolean) : whether the component is a required checkout, - 'true' or 'false'. - - * local_path (string) : component path *relative* to where - checkout_externals is called. - - * protoctol (string) : version control protocol that is used to - manage the component. Valid values are 'git', 'svn', - 'externals_only'. - - Switching an external between different protocols is not - supported, e.g. from svn to git. To switch protocols, you need to - manually move the old working copy to a new location. - - Note: 'externals_only' will only process the external's own - external description file without trying to manage a repository - for the component. This is used for retreiving externals for - standalone components like cam and clm. If the source root of the - externals_only component is the same as the main source root, then - the local path must be set to '.', the unix current working - directory, e. g. 'local_path = .' - - * repo_url (string) : URL for the repository location, examples: - * https://svn-ccsm-models.cgd.ucar.edu/glc - * git@github.com:esmci/cime.git - * /path/to/local/repository - * . - - NOTE: To operate on only the local clone and and ignore remote - repositories, set the url to '.' (the unix current path), - i.e. 'repo_url = .' . This can be used to checkout a local branch - instead of the upstream branch. - - If a repo url is determined to be a local path (not a network url) - then user expansion, e.g. ~/, and environment variable expansion, - e.g. $HOME or $REPO_ROOT, will be performed. - - Relative paths are difficult to get correct, especially for mixed - use repos. It is advised that local paths expand to absolute paths. - If relative paths are used, they should be relative to one level - above local_path. If local path is 'src/foo', the the relative url - should be relative to 'src'. - - * tag (string) : tag to checkout - - * hash (string) : the git hash to checkout. Only applies to git - repositories. - - * branch (string) : branch to checkout from the specified - repository. Specifying a branch on a remote repository means that - checkout_externals will checkout the version of the branch in the remote, - not the the version in the local repository (if it exists). - - Note: one and only one of tag, branch hash must be supplied. - - * externals (string) : used to make manage_externals aware of - sub-externals required by an external. This is a relative path to - the external's root directory. For example, the main externals - description has an external checkout out at 'src/useful_library'. - useful_library requires additional externals to be complete. - Those additional externals are managed from the source root by the - externals description file pointed 'useful_library/sub-xternals.cfg', - Then the main 'externals' field in the top level repo should point to - 'sub-externals.cfg'. - Note that by default, `checkout_externals` will clone an external's - submodules. As a special case, the entry, `externals = None`, will - prevent this behavior. For more control over which externals are - checked out, create an externals file (and see the `from_submodule` - configuration entry below). - - * from_submodule (True / False) : used to pull the repo_url, local_path, - and hash properties for this external from the .gitmodules file in - this repository. Note that the section name (the entry in square - brackets) must match the name in the .gitmodules file. - If from_submodule is True, the protocol must be git and no repo_url, - local_path, hash, branch, or tag entries are allowed. - Default: False - - * sparse (string) : used to control a sparse checkout. This optional - entry should point to a filename (path relative to local_path) that - contains instructions on which repository paths to include (or - exclude) from the working tree. - See the "SPARSE CHECKOUT" section of https://git-scm.com/docs/git-read-tree - Default: sparse checkout is disabled - - * Lines begining with '#' or ';' are comments and will be ignored. - -# Obtaining this tool, reporting issues, etc. - - The master repository for manage_externals is - https://github.com/ESMCI/manage_externals. Any issues with this tool - should be reported there. diff --git a/manage_externals/README_FIRST b/manage_externals/README_FIRST deleted file mode 100644 index c8a47d7806..0000000000 --- a/manage_externals/README_FIRST +++ /dev/null @@ -1,54 +0,0 @@ -CESM is comprised of a number of different components that are -developed and managed independently. Each component may have -additional 'external' dependancies and optional parts that are also -developed and managed independently. - -The checkout_externals.py tool manages retreiving and updating the -components and their externals so you have a complete set of source -files for the model. - -checkout_externals.py relies on a model description file that -describes what components are needed, where to find them and where to -put them in the source tree. The default file is called "CESM.xml" -regardless of whether you are checking out CESM or a standalone -component. - -checkout_externals requires access to git and svn repositories that -require authentication. checkout_externals may pass through -authentication requests, but it will not cache them for you. For the -best and most robust user experience, you should have svn and git -working without password authentication. See: - - https://help.github.com/articles/connecting-to-github-with-ssh/ - - ?svn ref? - -NOTE: checkout_externals.py *MUST* be run from the root of the source -tree it is managing. For example, if you cloned CLM with: - - $ git clone git@github.com/ncar/clm clm-dev - -Then the root of the source tree is /path/to/cesm-dev. If you obtained -CLM via an svn checkout of CESM and you need to checkout the CLM -externals, then the root of the source tree for CLM is: - - /path/to/cesm-dev/components/clm - -The root of the source tree will be referred to as ${SRC_ROOT} below. - -To get started quickly, checkout all required components from the -default model description file: - - $ cd ${SRC_ROOT} - $ ./checkout_cesm/checkout_externals.py - -For additional information about using checkout model, please see: - - ${SRC_ROOT}/checkout_cesm/README - -or run: - - $ cd ${SRC_ROOT} - $ ./checkout_cesm/checkout_externals.py --help - - diff --git a/manage_externals/checkout_externals b/manage_externals/checkout_externals deleted file mode 100755 index 48bce24010..0000000000 --- a/manage_externals/checkout_externals +++ /dev/null @@ -1,36 +0,0 @@ -#!/usr/bin/env python3 - -"""Main driver wrapper around the manic/checkout utility. - -Tool to assemble external respositories represented in an externals -description file. - -""" -from __future__ import absolute_import -from __future__ import unicode_literals -from __future__ import print_function - -import sys -import traceback - -import manic - -if sys.hexversion < 0x02070000: - print(70 * '*') - print('ERROR: {0} requires python >= 2.7.x. '.format(sys.argv[0])) - print('It appears that you are running python {0}'.format( - '.'.join(str(x) for x in sys.version_info[0:3]))) - print(70 * '*') - sys.exit(1) - - -if __name__ == '__main__': - ARGS = manic.checkout.commandline_arguments() - try: - RET_STATUS, _ = manic.checkout.main(ARGS) - sys.exit(RET_STATUS) - except Exception as error: # pylint: disable=broad-except - manic.printlog(str(error)) - if ARGS.backtrace: - traceback.print_exc() - sys.exit(1) diff --git a/manage_externals/manic/__init__.py b/manage_externals/manic/__init__.py deleted file mode 100644 index 11badedd3b..0000000000 --- a/manage_externals/manic/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -"""Public API for the manage_externals library -""" - -from manic import checkout -from manic.utils import printlog - -__all__ = [ - 'checkout', 'printlog', -] diff --git a/manage_externals/manic/checkout.py b/manage_externals/manic/checkout.py deleted file mode 100755 index 3f5537adce..0000000000 --- a/manage_externals/manic/checkout.py +++ /dev/null @@ -1,446 +0,0 @@ -#!/usr/bin/env python3 - -""" -Tool to assemble repositories represented in a model-description file. - -If loaded as a module (e.g., in a component's buildcpp), it can be used -to check the validity of existing subdirectories and load missing sources. -""" -from __future__ import absolute_import -from __future__ import unicode_literals -from __future__ import print_function - -import argparse -import logging -import os -import os.path -import sys - -from manic.externals_description import create_externals_description -from manic.externals_description import read_externals_description_file -from manic.externals_status import check_safe_to_update_repos -from manic.sourcetree import SourceTree -from manic.utils import printlog, fatal_error -from manic.global_constants import VERSION_SEPERATOR, LOG_FILE_NAME - -if sys.hexversion < 0x02070000: - print(70 * '*') - print('ERROR: {0} requires python >= 2.7.x. '.format(sys.argv[0])) - print('It appears that you are running python {0}'.format( - VERSION_SEPERATOR.join(str(x) for x in sys.version_info[0:3]))) - print(70 * '*') - sys.exit(1) - - -# --------------------------------------------------------------------- -# -# User input -# -# --------------------------------------------------------------------- -def commandline_arguments(args=None): - """Process the command line arguments - - Params: args - optional args. Should only be used during systems - testing. - - Returns: processed command line arguments - """ - description = ''' - -%(prog)s manages checking out groups of externals from revision -control based on an externals description file. By default only the -required externals are checkout out. - -Running %(prog)s without the '--status' option will always attempt to -synchronize the working copy to exactly match the externals description. -''' - - epilog = ''' -``` -NOTE: %(prog)s *MUST* be run from the root of the source tree it -is managing. For example, if you cloned a repository with: - - $ git clone git@github.com/{SOME_ORG}/some-project some-project-dev - -Then the root of the source tree is /path/to/some-project-dev. If you -obtained a sub-project via a checkout of another project: - - $ git clone git@github.com/{SOME_ORG}/some-project some-project-dev - -and you need to checkout the sub-project externals, then the root of the -source tree remains /path/to/some-project-dev. Do *NOT* run %(prog)s -from within /path/to/some-project-dev/sub-project - -The root of the source tree will be referred to as `${SRC_ROOT}` below. - - -# Supported workflows - - * Checkout all required components from the default externals - description file: - - $ cd ${SRC_ROOT} - $ ./manage_externals/%(prog)s - - * To update all required components to the current values in the - externals description file, re-run %(prog)s: - - $ cd ${SRC_ROOT} - $ ./manage_externals/%(prog)s - - If there are *any* modifications to *any* working copy according - to the git or svn 'status' command, %(prog)s - will not update any external repositories. Modifications - include: modified files, added files, removed files, or missing - files. - - To avoid this safety check, edit the externals description file - and comment out the modified external block. - - * Checkout all required components from a user specified externals - description file: - - $ cd ${SRC_ROOT} - $ ./manage_externals/%(prog)s --externals my-externals.cfg - - * Status summary of the repositories managed by %(prog)s: - - $ cd ${SRC_ROOT} - $ ./manage_externals/%(prog)s --status - - ./cime - s ./components/cism - ./components/mosart - e-o ./components/rtm - M ./src/fates - e-o ./tools/PTCLM - - - where: - * column one indicates the status of the repository in relation - to the externals description file. - * column two indicates whether the working copy has modified files. - * column three shows how the repository is managed, optional or required - - Column one will be one of these values: - * s : out-of-sync : repository is checked out at a different commit - compared with the externals description - * e : empty : directory does not exist - %(prog)s has not been run - * ? : unknown : directory exists but .git or .svn directories are missing - - Column two will be one of these values: - * M : Modified : modified, added, deleted or missing files - * : blank / space : clean - * - : dash : no meaningful state, for empty repositories - - Column three will be one of these values: - * o : optional : optionally repository - * : blank / space : required repository - - * Detailed git or svn status of the repositories managed by %(prog)s: - - $ cd ${SRC_ROOT} - $ ./manage_externals/%(prog)s --status --verbose - -# Externals description file - - The externals description contains a list of the external - repositories that are used and their version control locations. The - file format is the standard ini/cfg configuration file format. Each - external is defined by a section containing the component name in - square brackets: - - * name (string) : component name, e.g. [cime], [cism], etc. - - Each section has the following keyword-value pairs: - - * required (boolean) : whether the component is a required checkout, - 'true' or 'false'. - - * local_path (string) : component path *relative* to where - %(prog)s is called. - - * protoctol (string) : version control protocol that is used to - manage the component. Valid values are 'git', 'svn', - 'externals_only'. - - Switching an external between different protocols is not - supported, e.g. from svn to git. To switch protocols, you need to - manually move the old working copy to a new location. - - Note: 'externals_only' will only process the external's own - external description file without trying to manage a repository - for the component. This is used for retrieving externals for - standalone components like cam and ctsm which also serve as - sub-components within a larger project. If the source root of the - externals_only component is the same as the main source root, then - the local path must be set to '.', the unix current working - directory, e. g. 'local_path = .' - - * repo_url (string) : URL for the repository location, examples: - * https://svn-ccsm-models.cgd.ucar.edu/glc - * git@github.com:esmci/cime.git - * /path/to/local/repository - * . - - NOTE: To operate on only the local clone and and ignore remote - repositories, set the url to '.' (the unix current path), - i.e. 'repo_url = .' . This can be used to checkout a local branch - instead of the upstream branch. - - If a repo url is determined to be a local path (not a network url) - then user expansion, e.g. ~/, and environment variable expansion, - e.g. $HOME or $REPO_ROOT, will be performed. - - Relative paths are difficult to get correct, especially for mixed - use repos. It is advised that local paths expand to absolute paths. - If relative paths are used, they should be relative to one level - above local_path. If local path is 'src/foo', the the relative url - should be relative to 'src'. - - * tag (string) : tag to checkout - - * hash (string) : the git hash to checkout. Only applies to git - repositories. - - * branch (string) : branch to checkout from the specified - repository. Specifying a branch on a remote repository means that - %(prog)s will checkout the version of the branch in the remote, - not the the version in the local repository (if it exists). - - Note: one and only one of tag, branch hash must be supplied. - - * externals (string) : used to make manage_externals aware of - sub-externals required by an external. This is a relative path to - the external's root directory. For example, if LIBX is often used - as a sub-external, it might have an externals file (for its - externals) called Externals_LIBX.cfg. To use libx as a standalone - checkout, it would have another file, Externals.cfg with the - following entry: - - [ libx ] - local_path = . - protocol = externals_only - externals = Externals_LIBX.cfg - required = True - - Now, %(prog)s will process Externals.cfg and also process - Externals_LIBX.cfg as if it was a sub-external. - - Note that by default, checkout_externals will clone an external's - submodules. As a special case, the entry, "externals = None", will - prevent this behavior. For more control over which externals are - checked out, create an externals file (and see the from_submodule - configuration entry below). - - * from_submodule (True / False) : used to pull the repo_url, local_path, - and hash properties for this external from the .gitmodules file in - this repository. Note that the section name (the entry in square - brackets) must match the name in the .gitmodules file. - If from_submodule is True, the protocol must be git and no repo_url, - local_path, hash, branch, or tag entries are allowed. - Default: False - - * sparse (string) : used to control a sparse checkout. This optional - entry should point to a filename (path relative to local_path) that - contains instructions on which repository paths to include (or - exclude) from the working tree. - See the "SPARSE CHECKOUT" section of https://git-scm.com/docs/git-read-tree - Default: sparse checkout is disabled - - * Lines beginning with '#' or ';' are comments and will be ignored. - -# Obtaining this tool, reporting issues, etc. - - The master repository for manage_externals is - https://github.com/ESMCI/manage_externals. Any issues with this tool - should be reported there. - -# Troubleshooting - -Operations performed by manage_externals utilities are explicit and -data driven. %(prog)s will always attempt to make the working copy -*exactly* match what is in the externals file when modifying the -working copy of a repository. - -If %(prog)s is not doing what you expected, double check the contents -of the externals description file or examine the output of -./manage_externals/%(prog)s --status - -''' - - parser = argparse.ArgumentParser( - description=description, epilog=epilog, - formatter_class=argparse.RawDescriptionHelpFormatter) - - # - # user options - # - parser.add_argument("components", nargs="*", - help="Specific component(s) to checkout. By default, " - "all required externals are checked out.") - - parser.add_argument('-e', '--externals', nargs='?', - default='Externals.cfg', - help='The externals description filename. ' - 'Default: %(default)s.') - - parser.add_argument('-x', '--exclude', nargs='*', - help='Component(s) listed in the externals file which should be ignored.') - - parser.add_argument('-o', '--optional', action='store_true', default=False, - help='By default only the required externals ' - 'are checked out. This flag will also checkout the ' - 'optional externals.') - - parser.add_argument('-S', '--status', action='store_true', default=False, - help='Output the status of the repositories managed by ' - '%(prog)s. By default only summary information ' - 'is provided. Use the verbose option to see details.') - - parser.add_argument('-v', '--verbose', action='count', default=0, - help='Output additional information to ' - 'the screen and log file. This flag can be ' - 'used up to two times, increasing the ' - 'verbosity level each time.') - - parser.add_argument('--svn-ignore-ancestry', action='store_true', default=False, - help='By default, subversion will abort if a component is ' - 'already checked out and there is no common ancestry with ' - 'the new URL. This flag passes the "--ignore-ancestry" flag ' - 'to the svn switch call. (This is not recommended unless ' - 'you are sure about what you are doing.)') - - # - # developer options - # - parser.add_argument('--backtrace', action='store_true', - help='DEVELOPER: show exception backtraces as extra ' - 'debugging output') - - parser.add_argument('-d', '--debug', action='store_true', default=False, - help='DEVELOPER: output additional debugging ' - 'information to the screen and log file.') - - logging_group = parser.add_mutually_exclusive_group() - - logging_group.add_argument('--logging', dest='do_logging', - action='store_true', - help='DEVELOPER: enable logging.') - logging_group.add_argument('--no-logging', dest='do_logging', - action='store_false', default=False, - help='DEVELOPER: disable logging ' - '(this is the default)') - - if args: - options = parser.parse_args(args) - else: - options = parser.parse_args() - return options - -def _dirty_local_repo_msg(program_name, config_file): - return """The external repositories labeled with 'M' above are not in a clean state. -The following are four options for how to proceed: -(1) Go into each external that is not in a clean state and issue either a 'git status' or - an 'svn status' command (depending on whether the external is managed by git or - svn). Either revert or commit your changes so that all externals are in a clean - state. (To revert changes in git, follow the instructions given when you run 'git - status'.) (Note, though, that it is okay to have untracked files in your working - directory.) Then rerun {program_name}. -(2) Alternatively, you do not have to rely on {program_name}. Instead, you can manually - update out-of-sync externals (labeled with 's' above) as described in the - configuration file {config_file}. (For example, run 'git fetch' and 'git checkout' - commands to checkout the appropriate tags for each external, as given in - {config_file}.) -(3) You can also use {program_name} to manage most, but not all externals: You can specify - one or more externals to ignore using the '-x' or '--exclude' argument to - {program_name}. Excluding externals labeled with 'M' will allow {program_name} to - update the other, non-excluded externals. -(4) As a last resort, if you are confident that there is no work that needs to be saved - from a given external, you can remove that external (via "rm -rf [directory]") and - then rerun the {program_name} tool. This option is mainly useful as a workaround for - issues with this tool (such as https://github.com/ESMCI/manage_externals/issues/157). -The external repositories labeled with '?' above are not under version -control using the expected protocol. If you are sure you want to switch -protocols, and you don't have any work you need to save from this -directory, then run "rm -rf [directory]" before rerunning the -{program_name} tool. -""".format(program_name=program_name, config_file=config_file) -# --------------------------------------------------------------------- -# -# main -# -# --------------------------------------------------------------------- -def main(args): - """ - Function to call when module is called from the command line. - Parse externals file and load required repositories or all repositories if - the --all option is passed. - - Returns a tuple (overall_status, tree_status). overall_status is 0 - on success, non-zero on failure. tree_status is a dict mapping local path - to ExternalStatus -- if no checkout is happening. If checkout is happening, tree_status - is None. - """ - if args.do_logging: - logging.basicConfig(filename=LOG_FILE_NAME, - format='%(levelname)s : %(asctime)s : %(message)s', - datefmt='%Y-%m-%d %H:%M:%S', - level=logging.DEBUG) - - program_name = os.path.basename(sys.argv[0]) - logging.info('Beginning of %s', program_name) - - load_all = False - if args.optional: - load_all = True - - root_dir = os.path.abspath(os.getcwd()) - model_data = read_externals_description_file(root_dir, args.externals) - ext_description = create_externals_description( - model_data, components=args.components, exclude=args.exclude) - - for comp in args.components: - if comp not in ext_description.keys(): - # Note we can't print out the list of found externals because - # they were filtered in create_externals_description above. - fatal_error( - "No component {} found in {}".format( - comp, args.externals)) - - source_tree = SourceTree(root_dir, ext_description, svn_ignore_ancestry=args.svn_ignore_ancestry) - if args.components: - components_str = 'specified components' - else: - components_str = 'required & optional components' - printlog('Checking local status of ' + components_str + ': ', end='') - tree_status = source_tree.status(print_progress=True) - printlog('') - - if args.status: - # user requested status-only - for comp in sorted(tree_status): - tree_status[comp].log_status_message(args.verbose) - else: - # checkout / update the external repositories. - safe_to_update = check_safe_to_update_repos(tree_status) - if not safe_to_update: - # print status - for comp in sorted(tree_status): - tree_status[comp].log_status_message(args.verbose) - # exit gracefully - printlog('-' * 70) - printlog(_dirty_local_repo_msg(program_name, args.externals)) - printlog('-' * 70) - else: - if not args.components: - source_tree.checkout(args.verbose, load_all) - for comp in args.components: - source_tree.checkout(args.verbose, load_all, load_comp=comp) - printlog('') - # New tree status is unknown, don't return anything. - tree_status = None - - logging.info('%s completed without exceptions.', program_name) - # NOTE(bja, 2017-11) tree status is used by the systems tests - return 0, tree_status diff --git a/manage_externals/manic/externals_description.py b/manage_externals/manic/externals_description.py deleted file mode 100644 index 546e7fdcb4..0000000000 --- a/manage_externals/manic/externals_description.py +++ /dev/null @@ -1,830 +0,0 @@ -#!/usr/bin/env python3 - -"""Model description - -Model description is the representation of the various externals -included in the model. It processes in input data structure, and -converts it into a standard interface that is used by the rest of the -system. - -To maintain backward compatibility, externals description files should -follow semantic versioning rules, http://semver.org/ - - - -""" -from __future__ import absolute_import -from __future__ import unicode_literals -from __future__ import print_function - -import logging -import os -import os.path -import re - -# ConfigParser in python2 was renamed to configparser in python3. -# In python2, ConfigParser returns byte strings, str, instead of unicode. -# We need unicode to be compatible with xml and json parser and python3. -try: - # python2 - from ConfigParser import SafeConfigParser as config_parser - from ConfigParser import MissingSectionHeaderError - from ConfigParser import NoSectionError, NoOptionError - - USE_PYTHON2 = True - - def config_string_cleaner(text): - """convert strings into unicode - """ - return text.decode('utf-8') -except ImportError: - # python3 - from configparser import ConfigParser as config_parser - from configparser import MissingSectionHeaderError - from configparser import NoSectionError, NoOptionError - - USE_PYTHON2 = False - - def config_string_cleaner(text): - """Python3 already uses unicode strings, so just return the string - without modification. - - """ - return text - -from .utils import printlog, fatal_error, str_to_bool, expand_local_url -from .utils import execute_subprocess -from .global_constants import EMPTY_STR, PPRINTER, VERSION_SEPERATOR - -# -# Globals -# -DESCRIPTION_SECTION = 'externals_description' -VERSION_ITEM = 'schema_version' - - -def read_externals_description_file(root_dir, file_name): - """Read a file containing an externals description and - create its internal representation. - - """ - root_dir = os.path.abspath(root_dir) - msg = 'In directory : {0}'.format(root_dir) - logging.info(msg) - printlog('Processing externals description file : {0} ({1})'.format(file_name, - root_dir)) - - file_path = os.path.join(root_dir, file_name) - if not os.path.exists(file_name): - if file_name.lower() == "none": - msg = ('INTERNAL ERROR: Attempt to read externals file ' - 'from {0} when not configured'.format(file_path)) - else: - msg = ('ERROR: Model description file, "{0}", does not ' - 'exist at path:\n {1}\nDid you run from the root of ' - 'the source tree?'.format(file_name, file_path)) - - fatal_error(msg) - - externals_description = None - if file_name == ExternalsDescription.GIT_SUBMODULES_FILENAME: - externals_description = _read_gitmodules_file(root_dir, file_name) - else: - try: - config = config_parser() - config.read(file_path) - externals_description = config - except MissingSectionHeaderError: - # not a cfg file - pass - - if externals_description is None: - msg = 'Unknown file format!' - fatal_error(msg) - - return externals_description - -class LstripReader(object): - "LstripReader formats .gitmodules files to be acceptable for configparser" - def __init__(self, filename): - with open(filename, 'r') as infile: - lines = infile.readlines() - self._lines = list() - self._num_lines = len(lines) - self._index = 0 - for line in lines: - self._lines.append(line.lstrip()) - - def readlines(self): - """Return all the lines from this object's file""" - return self._lines - - def readline(self, size=-1): - """Format and return the next line or raise StopIteration""" - try: - line = self.next() - except StopIteration: - line = '' - - if (size > 0) and (len(line) < size): - return line[0:size] - - return line - - def __iter__(self): - """Begin an iteration""" - self._index = 0 - return self - - def next(self): - """Return the next line or raise StopIteration""" - if self._index >= self._num_lines: - raise StopIteration - - self._index = self._index + 1 - return self._lines[self._index - 1] - - def __next__(self): - return self.next() - -def git_submodule_status(repo_dir): - """Run the git submodule status command to obtain submodule hashes. - """ - # This function is here instead of GitRepository to avoid a dependency loop - cmd = 'git -C {repo_dir} submodule status'.format( - repo_dir=repo_dir).split() - git_output = execute_subprocess(cmd, output_to_caller=True) - submodules = {} - submods = git_output.split('\n') - for submod in submods: - if submod: - status = submod[0] - items = submod[1:].split(' ') - if len(items) > 2: - tag = items[2] - else: - tag = None - - submodules[items[1]] = {'hash':items[0], 'status':status, 'tag':tag} - - return submodules - -def parse_submodules_desc_section(section_items, file_path): - """Find the path and url for this submodule description""" - path = None - url = None - for item in section_items: - name = item[0].strip().lower() - if name == 'path': - path = item[1].strip() - elif name == 'url': - url = item[1].strip() - elif name == 'branch': - # We do not care about branch since we have a hash - silently ignore - pass - else: - msg = 'WARNING: Ignoring unknown {} property, in {}' - msg = msg.format(item[0], file_path) # fool pylint - logging.warning(msg) - - return path, url - -def _read_gitmodules_file(root_dir, file_name): - # pylint: disable=deprecated-method - # Disabling this check because the method is only used for python2 - # pylint: disable=too-many-locals - # pylint: disable=too-many-branches - # pylint: disable=too-many-statements - """Read a .gitmodules file and convert it to be compatible with an - externals description. - """ - root_dir = os.path.abspath(root_dir) - msg = 'In directory : {0}'.format(root_dir) - logging.info(msg) - - file_path = os.path.join(root_dir, file_name) - if not os.path.exists(file_name): - msg = ('ERROR: submodules description file, "{0}", does not ' - 'exist in dir:\n {1}'.format(file_name, root_dir)) - fatal_error(msg) - - submodules_description = None - externals_description = None - try: - config = config_parser() - if USE_PYTHON2: - config.readfp(LstripReader(file_path), filename=file_name) - else: - config.read_file(LstripReader(file_path), source=file_name) - - submodules_description = config - except MissingSectionHeaderError: - # not a cfg file - pass - - if submodules_description is None: - msg = 'Unknown file format!' - fatal_error(msg) - else: - # Convert the submodules description to an externals description - externals_description = config_parser() - # We need to grab all the commit hashes for this repo - submods = git_submodule_status(root_dir) - for section in submodules_description.sections(): - if section[0:9] == 'submodule': - sec_name = section[9:].strip(' "') - externals_description.add_section(sec_name) - section_items = submodules_description.items(section) - path, url = parse_submodules_desc_section(section_items, - file_path) - - if path is None: - msg = 'Submodule {} missing path'.format(sec_name) - fatal_error(msg) - - if url is None: - msg = 'Submodule {} missing url'.format(sec_name) - fatal_error(msg) - - externals_description.set(sec_name, - ExternalsDescription.PATH, path) - externals_description.set(sec_name, - ExternalsDescription.PROTOCOL, 'git') - externals_description.set(sec_name, - ExternalsDescription.REPO_URL, url) - externals_description.set(sec_name, - ExternalsDescription.REQUIRED, 'True') - if sec_name in submods: - submod_name = sec_name - else: - # The section name does not have to match the path - submod_name = path - - if submod_name in submods: - git_hash = submods[submod_name]['hash'] - externals_description.set(sec_name, - ExternalsDescription.HASH, - git_hash) - else: - emsg = "submodule status has no section, '{}'" - emsg += "\nCheck section names in externals config file" - fatal_error(emsg.format(submod_name)) - - # Required items - externals_description.add_section(DESCRIPTION_SECTION) - externals_description.set(DESCRIPTION_SECTION, VERSION_ITEM, '1.0.0') - - return externals_description - -def create_externals_description( - model_data, model_format='cfg', components=None, exclude=None, parent_repo=None): - """Create the a externals description object from the provided data - - components: list of component names to include, None to include all. If a - name isn't found, it is silently omitted from the return value. - exclude: list of component names to skip. - """ - externals_description = None - if model_format == 'dict': - externals_description = ExternalsDescriptionDict( - model_data, components=components, exclude=exclude) - elif model_format == 'cfg': - major, _, _ = get_cfg_schema_version(model_data) - if major == 1: - externals_description = ExternalsDescriptionConfigV1( - model_data, components=components, exclude=exclude, parent_repo=parent_repo) - else: - msg = ('Externals description file has unsupported schema ' - 'version "{0}".'.format(major)) - fatal_error(msg) - else: - msg = 'Unknown model data format "{0}"'.format(model_format) - fatal_error(msg) - return externals_description - - -def get_cfg_schema_version(model_cfg): - """Extract the major, minor, patch version of the config file schema - - Params: - model_cfg - config parser object containing the externas description data - - Returns: - major = integer major version - minor = integer minor version - patch = integer patch version - """ - semver_str = '' - try: - semver_str = model_cfg.get(DESCRIPTION_SECTION, VERSION_ITEM) - except (NoSectionError, NoOptionError): - msg = ('externals description file must have the required ' - 'section: "{0}" and item "{1}"'.format(DESCRIPTION_SECTION, - VERSION_ITEM)) - fatal_error(msg) - - # NOTE(bja, 2017-11) Assume we don't care about the - # build/pre-release metadata for now! - version_list = re.split(r'[-+]', semver_str) - version_str = version_list[0] - version = version_str.split(VERSION_SEPERATOR) - try: - major = int(version[0].strip()) - minor = int(version[1].strip()) - patch = int(version[2].strip()) - except ValueError: - msg = ('Config file schema version must have integer digits for ' - 'major, minor and patch versions. ' - 'Received "{0}"'.format(version_str)) - fatal_error(msg) - return major, minor, patch - - -class ExternalsDescription(dict): - """Base externals description class that is independent of the user input - format. Different input formats can all be converted to this - representation to provide a consistent represtentation for the - rest of the objects in the system. - - NOTE(bja, 2018-03): do NOT define _schema_major etc at the class - level in the base class. The nested/recursive nature of externals - means different schema versions may be present in a single run! - - All inheriting classes must overwrite: - self._schema_major and self._input_major - self._schema_minor and self._input_minor - self._schema_patch and self._input_patch - - where _schema_x is the supported schema, _input_x is the user - input value. - - """ - # keywords defining the interface into the externals description data; these - # are brought together by the schema below. - EXTERNALS = 'externals' # path to externals file. - BRANCH = 'branch' - SUBMODULE = 'from_submodule' - HASH = 'hash' - NAME = 'name' - PATH = 'local_path' - PROTOCOL = 'protocol' - REPO = 'repo' - REPO_URL = 'repo_url' - REQUIRED = 'required' - TAG = 'tag' - SPARSE = 'sparse' - - PROTOCOL_EXTERNALS_ONLY = 'externals_only' - PROTOCOL_GIT = 'git' - PROTOCOL_SVN = 'svn' - GIT_SUBMODULES_FILENAME = '.gitmodules' - KNOWN_PRROTOCOLS = [PROTOCOL_GIT, PROTOCOL_SVN, PROTOCOL_EXTERNALS_ONLY] - - # v1 xml keywords - _V1_TREE_PATH = 'TREE_PATH' - _V1_ROOT = 'ROOT' - _V1_TAG = 'TAG' - _V1_BRANCH = 'BRANCH' - _V1_REQ_SOURCE = 'REQ_SOURCE' - - # Dictionary keys are component names. The corresponding values are laid out - # according to this schema. - _source_schema = {REQUIRED: True, - PATH: 'string', - EXTERNALS: 'string', - SUBMODULE : True, - REPO: {PROTOCOL: 'string', - REPO_URL: 'string', - TAG: 'string', - BRANCH: 'string', - HASH: 'string', - SPARSE: 'string', - } - } - - def __init__(self, parent_repo=None): - """Convert the xml into a standardized dict that can be used to - construct the source objects - - """ - dict.__init__(self) - - self._schema_major = None - self._schema_minor = None - self._schema_patch = None - self._input_major = None - self._input_minor = None - self._input_patch = None - self._parent_repo = parent_repo - - def _verify_schema_version(self): - """Use semantic versioning rules to verify we can process this schema. - - """ - known = '{0}.{1}.{2}'.format(self._schema_major, - self._schema_minor, - self._schema_patch) - received = '{0}.{1}.{2}'.format(self._input_major, - self._input_minor, - self._input_patch) - - if self._input_major != self._schema_major: - # should never get here, the factory should handle this correctly! - msg = ('DEV_ERROR: version "{0}" parser received ' - 'version "{1}" input.'.format(known, received)) - fatal_error(msg) - - if self._input_minor > self._schema_minor: - msg = ('Incompatible schema version:\n' - ' User supplied schema version "{0}" is too new."\n' - ' Can only process version "{1}" files and ' - 'older.'.format(received, known)) - fatal_error(msg) - - if self._input_patch > self._schema_patch: - # NOTE(bja, 2018-03) ignoring for now... Not clear what - # conditions the test is needed. - pass - - def _check_user_input(self): - """Run a series of checks to attempt to validate the user input and - detect errors as soon as possible. - - NOTE(bja, 2018-03) These checks are called *after* the file is - read. That means the schema check can not occur here. - - Note: the order is important. check_optional will create - optional with null data. run check_data first to ensure - required data was provided correctly by the user. - - """ - self._check_data() - self._check_optional() - self._validate() - - def _check_data(self): - # pylint: disable=too-many-branches,too-many-statements - """Check user supplied data is valid where possible. - """ - for ext_name in self.keys(): - if (self[ext_name][self.REPO][self.PROTOCOL] - not in self.KNOWN_PRROTOCOLS): - msg = 'Unknown repository protocol "{0}" in "{1}".'.format( - self[ext_name][self.REPO][self.PROTOCOL], ext_name) - fatal_error(msg) - - if (self[ext_name][self.REPO][self.PROTOCOL] == - self.PROTOCOL_SVN): - if self.HASH in self[ext_name][self.REPO]: - msg = ('In repo description for "{0}". svn repositories ' - 'may not include the "hash" keyword.'.format( - ext_name)) - fatal_error(msg) - - if ((self[ext_name][self.REPO][self.PROTOCOL] != self.PROTOCOL_GIT) - and (self.SUBMODULE in self[ext_name])): - msg = ('self.SUBMODULE is only supported with {0} protocol, ' - '"{1}" is defined as an {2} repository') - fatal_error(msg.format(self.PROTOCOL_GIT, ext_name, - self[ext_name][self.REPO][self.PROTOCOL])) - - if (self[ext_name][self.REPO][self.PROTOCOL] != - self.PROTOCOL_EXTERNALS_ONLY): - ref_count = 0 - found_refs = '' - if self.TAG in self[ext_name][self.REPO]: - ref_count += 1 - found_refs = '"{0} = {1}", {2}'.format( - self.TAG, self[ext_name][self.REPO][self.TAG], - found_refs) - if self.BRANCH in self[ext_name][self.REPO]: - ref_count += 1 - found_refs = '"{0} = {1}", {2}'.format( - self.BRANCH, self[ext_name][self.REPO][self.BRANCH], - found_refs) - if self.HASH in self[ext_name][self.REPO]: - ref_count += 1 - found_refs = '"{0} = {1}", {2}'.format( - self.HASH, self[ext_name][self.REPO][self.HASH], - found_refs) - if (self.SUBMODULE in self[ext_name] and - self[ext_name][self.SUBMODULE]): - ref_count += 1 - found_refs = '"{0} = {1}", {2}'.format( - self.SUBMODULE, - self[ext_name][self.SUBMODULE], found_refs) - - if ref_count > 1: - msg = 'Model description is over specified! ' - if self.SUBMODULE in self[ext_name]: - msg += ('from_submodule is not compatible with ' - '"tag", "branch", or "hash" ') - else: - msg += (' Only one of "tag", "branch", or "hash" ' - 'may be specified ') - - msg += 'for repo description of "{0}".'.format(ext_name) - msg = '{0}\nFound: {1}'.format(msg, found_refs) - fatal_error(msg) - elif ref_count < 1: - msg = ('Model description is under specified! One of ' - '"tag", "branch", or "hash" must be specified for ' - 'repo description of "{0}"'.format(ext_name)) - fatal_error(msg) - - if (self.REPO_URL not in self[ext_name][self.REPO] and - (self.SUBMODULE not in self[ext_name] or - not self[ext_name][self.SUBMODULE])): - msg = ('Model description is under specified! Must have ' - '"repo_url" in repo ' - 'description for "{0}"'.format(ext_name)) - fatal_error(msg) - - if (self.SUBMODULE in self[ext_name] and - self[ext_name][self.SUBMODULE]): - if self.REPO_URL in self[ext_name][self.REPO]: - msg = ('Model description is over specified! ' - 'from_submodule keyword is not compatible ' - 'with {0} keyword for'.format(self.REPO_URL)) - msg = '{0} repo description of "{1}"'.format(msg, - ext_name) - fatal_error(msg) - - if self.PATH in self[ext_name]: - msg = ('Model description is over specified! ' - 'from_submodule keyword is not compatible with ' - '{0} keyword for'.format(self.PATH)) - msg = '{0} repo description of "{1}"'.format(msg, - ext_name) - fatal_error(msg) - - if self.REPO_URL in self[ext_name][self.REPO]: - url = expand_local_url( - self[ext_name][self.REPO][self.REPO_URL], ext_name) - self[ext_name][self.REPO][self.REPO_URL] = url - - def _check_optional(self): - # pylint: disable=too-many-branches - """Some fields like externals, repo:tag repo:branch are - (conditionally) optional. We don't want the user to be - required to enter them in every externals description file, but - still want to validate the input. Check conditions and add - default values if appropriate. - - """ - submod_desc = None # Only load submodules info once - for field in self: - # truely optional - if self.EXTERNALS not in self[field]: - self[field][self.EXTERNALS] = EMPTY_STR - - # git and svn repos must tags and branches for validation purposes. - if self.TAG not in self[field][self.REPO]: - self[field][self.REPO][self.TAG] = EMPTY_STR - if self.BRANCH not in self[field][self.REPO]: - self[field][self.REPO][self.BRANCH] = EMPTY_STR - if self.HASH not in self[field][self.REPO]: - self[field][self.REPO][self.HASH] = EMPTY_STR - if self.REPO_URL not in self[field][self.REPO]: - self[field][self.REPO][self.REPO_URL] = EMPTY_STR - if self.SPARSE not in self[field][self.REPO]: - self[field][self.REPO][self.SPARSE] = EMPTY_STR - - # from_submodule has a complex relationship with other fields - if self.SUBMODULE in self[field]: - # User wants to use submodule information, is it available? - if self._parent_repo is None: - # No parent == no submodule information - PPRINTER.pprint(self[field]) - msg = 'No parent submodule for "{0}"'.format(field) - fatal_error(msg) - elif self._parent_repo.protocol() != self.PROTOCOL_GIT: - PPRINTER.pprint(self[field]) - msg = 'Parent protocol, "{0}", does not support submodules' - fatal_error(msg.format(self._parent_repo.protocol())) - else: - args = self._repo_config_from_submodule(field, submod_desc) - repo_url, repo_path, ref_hash, submod_desc = args - - if repo_url is None: - msg = ('Cannot checkout "{0}" as a submodule, ' - 'repo not found in {1} file') - fatal_error(msg.format(field, - self.GIT_SUBMODULES_FILENAME)) - # Fill in submodule fields - self[field][self.REPO][self.REPO_URL] = repo_url - self[field][self.REPO][self.HASH] = ref_hash - self[field][self.PATH] = repo_path - - if self[field][self.SUBMODULE]: - # We should get everything from the parent submodule - # configuration. - pass - # No else (from _submodule = False is the default) - else: - # Add the default value (not using submodule information) - self[field][self.SUBMODULE] = False - - def _repo_config_from_submodule(self, field, submod_desc): - """Find the external config information for a repository from - its submodule configuration information. - """ - if submod_desc is None: - repo_path = os.getcwd() # Is this always correct? - submod_file = self._parent_repo.submodules_file(repo_path=repo_path) - if submod_file is None: - msg = ('Cannot checkout "{0}" from submodule information\n' - ' Parent repo, "{1}" does not have submodules') - fatal_error(msg.format(field, self._parent_repo.name())) - - printlog( - 'Processing submodules description file : {0} ({1})'.format( - submod_file, repo_path)) - submod_model_data= _read_gitmodules_file(repo_path, submod_file) - submod_desc = create_externals_description(submod_model_data) - - # Can we find our external? - repo_url = None - repo_path = None - ref_hash = None - for ext_field in submod_desc: - if field == ext_field: - ext = submod_desc[ext_field] - repo_url = ext[self.REPO][self.REPO_URL] - repo_path = ext[self.PATH] - ref_hash = ext[self.REPO][self.HASH] - break - - return repo_url, repo_path, ref_hash, submod_desc - - def _validate(self): - """Validate that the parsed externals description contains all necessary - fields. - - """ - def print_compare_difference(data_a, data_b, loc_a, loc_b): - """Look through the data structures and print the differences. - - """ - for item in data_a: - if item in data_b: - if not isinstance(data_b[item], type(data_a[item])): - printlog(" {item}: {loc} = {val} ({val_type})".format( - item=item, loc=loc_a, val=data_a[item], - val_type=type(data_a[item]))) - printlog(" {item} {loc} = {val} ({val_type})".format( - item=' ' * len(item), loc=loc_b, val=data_b[item], - val_type=type(data_b[item]))) - else: - printlog(" {item}: {loc} = {val} ({val_type})".format( - item=item, loc=loc_a, val=data_a[item], - val_type=type(data_a[item]))) - printlog(" {item} {loc} missing".format( - item=' ' * len(item), loc=loc_b)) - - def validate_data_struct(schema, data): - """Compare a data structure against a schema and validate all required - fields are present. - - """ - is_valid = False - in_ref = True - valid = True - if isinstance(schema, dict) and isinstance(data, dict): - # Both are dicts, recursively verify that all fields - # in schema are present in the data. - for key in schema: - in_ref = in_ref and (key in data) - if in_ref: - valid = valid and ( - validate_data_struct(schema[key], data[key])) - - is_valid = in_ref and valid - else: - # non-recursive structure. verify data and schema have - # the same type. - is_valid = isinstance(data, type(schema)) - - if not is_valid: - printlog(" Unmatched schema and input:") - if isinstance(schema, dict): - print_compare_difference(schema, data, 'schema', 'input') - print_compare_difference(data, schema, 'input', 'schema') - else: - printlog(" schema = {0} ({1})".format( - schema, type(schema))) - printlog(" input = {0} ({1})".format(data, type(data))) - - return is_valid - - for field in self: - valid = validate_data_struct(self._source_schema, self[field]) - if not valid: - PPRINTER.pprint(self._source_schema) - PPRINTER.pprint(self[field]) - msg = 'ERROR: source for "{0}" did not validate'.format(field) - fatal_error(msg) - - -class ExternalsDescriptionDict(ExternalsDescription): - """Create a externals description object from a dictionary using the API - representations. Primarily used to simplify creating model - description files for unit testing. - - """ - - def __init__(self, model_data, components=None, exclude=None): - """Parse a native dictionary into a externals description. - """ - ExternalsDescription.__init__(self) - self._schema_major = 1 - self._schema_minor = 0 - self._schema_patch = 0 - self._input_major = 1 - self._input_minor = 0 - self._input_patch = 0 - self._verify_schema_version() - if components: - for key in list(model_data.keys()): - if key not in components: - del model_data[key] - - if exclude: - for key in list(model_data.keys()): - if key in exclude: - del model_data[key] - - self.update(model_data) - self._check_user_input() - - -class ExternalsDescriptionConfigV1(ExternalsDescription): - """Create a externals description object from a config_parser object, - schema version 1. - - """ - - def __init__(self, model_data, components=None, exclude=None, parent_repo=None): - """Convert the config data into a standardized dict that can be used to - construct the source objects - - components: list of component names to include, None to include all. - exclude: list of component names to skip. - """ - ExternalsDescription.__init__(self, parent_repo=parent_repo) - self._schema_major = 1 - self._schema_minor = 1 - self._schema_patch = 0 - self._input_major, self._input_minor, self._input_patch = \ - get_cfg_schema_version(model_data) - self._verify_schema_version() - self._remove_metadata(model_data) - self._parse_cfg(model_data, components=components, exclude=exclude) - self._check_user_input() - - @staticmethod - def _remove_metadata(model_data): - """Remove the metadata section from the model configuration file so - that it is simpler to look through the file and construct the - externals description. - - """ - model_data.remove_section(DESCRIPTION_SECTION) - - def _parse_cfg(self, cfg_data, components=None, exclude=None): - """Parse a config_parser object into a externals description. - - components: list of component names to include, None to include all. - exclude: list of component names to skip. - """ - def list_to_dict(input_list, convert_to_lower_case=True): - """Convert a list of key-value pairs into a dictionary. - """ - output_dict = {} - for item in input_list: - key = config_string_cleaner(item[0].strip()) - value = config_string_cleaner(item[1].strip()) - if convert_to_lower_case: - key = key.lower() - output_dict[key] = value - return output_dict - - for section in cfg_data.sections(): - name = config_string_cleaner(section.lower().strip()) - if (components and name not in components) or (exclude and name in exclude): - continue - self[name] = {} - self[name].update(list_to_dict(cfg_data.items(section))) - self[name][self.REPO] = {} - loop_keys = self[name].copy().keys() - for item in loop_keys: - if item in self._source_schema: - if isinstance(self._source_schema[item], bool): - self[name][item] = str_to_bool(self[name][item]) - elif item in self._source_schema[self.REPO]: - self[name][self.REPO][item] = self[name][item] - del self[name][item] - else: - msg = ('Invalid input: "{sect}" contains unknown ' - 'item "{item}".'.format(sect=name, item=item)) - fatal_error(msg) diff --git a/manage_externals/manic/externals_status.py b/manage_externals/manic/externals_status.py deleted file mode 100644 index 6bc29e9732..0000000000 --- a/manage_externals/manic/externals_status.py +++ /dev/null @@ -1,164 +0,0 @@ -"""ExternalStatus - -Class to store status and state information about repositories and -create a string representation. - -""" -from __future__ import absolute_import -from __future__ import unicode_literals -from __future__ import print_function - -from .global_constants import EMPTY_STR -from .utils import printlog, indent_string -from .global_constants import VERBOSITY_VERBOSE, VERBOSITY_DUMP - - -class ExternalStatus(object): - """Class to represent the status of a given source repository or tree. - - Individual repositories determine their own status in the - Repository objects. This object is just resposible for storing the - information and passing it up to a higher level for reporting or - global decisions. - - There are two states of concern: - - * If the repository is in-sync with the externals description file. - - * If the repostiory working copy is clean and there are no pending - transactions (e.g. add, remove, rename, untracked files). - - """ - # sync_state and clean_state can be one of the following: - DEFAULT = '-' # not set yet (sync_state). clean_state can be this if sync_state is EMPTY. - UNKNOWN = '?' - EMPTY = 'e' - MODEL_MODIFIED = 's' # repo version != externals (sync_state only) - DIRTY = 'M' # repo is dirty (clean_state only) - STATUS_OK = ' ' # repo is clean (clean_state) or matches externals version (sync_state) - STATUS_ERROR = '!' - - # source_type can be one of the following: - OPTIONAL = 'o' - STANDALONE = 's' - MANAGED = ' ' - - def __init__(self): - self.sync_state = self.DEFAULT - self.clean_state = self.DEFAULT - self.source_type = self.DEFAULT - self.path = EMPTY_STR - self.current_version = EMPTY_STR - self.expected_version = EMPTY_STR - self.status_output = EMPTY_STR - - def log_status_message(self, verbosity): - """Write status message to the screen and log file - """ - printlog(self._default_status_message()) - if verbosity >= VERBOSITY_VERBOSE: - printlog(self._verbose_status_message()) - if verbosity >= VERBOSITY_DUMP: - printlog(self._dump_status_message()) - - def __repr__(self): - return self._default_status_message() - - def _default_status_message(self): - """Return the default terse status message string - """ - return '{sync}{clean}{src_type} {path}'.format( - sync=self.sync_state, clean=self.clean_state, - src_type=self.source_type, path=self.path) - - def _verbose_status_message(self): - """Return the verbose status message string - """ - clean_str = self.DEFAULT - if self.clean_state == self.STATUS_OK: - clean_str = 'clean sandbox' - elif self.clean_state == self.DIRTY: - clean_str = 'modified sandbox' - - sync_str = 'on {0}'.format(self.current_version) - if self.sync_state != self.STATUS_OK: - sync_str = '{current} --> {expected}'.format( - current=self.current_version, expected=self.expected_version) - return ' {clean}, {sync}'.format(clean=clean_str, sync=sync_str) - - def _dump_status_message(self): - """Return the dump status message string - """ - return indent_string(self.status_output, 12) - - def safe_to_update(self): - """Report if it is safe to update a repository. Safe is defined as: - - * If a repository is empty, it is safe to update. - - * If a repository exists and has a clean working copy state - with no pending transactions. - - """ - safe_to_update = False - repo_exists = self.exists() - if not repo_exists: - safe_to_update = True - else: - # If the repo exists, it must be in ok or modified - # sync_state. Any other sync_state at this point - # represents a logic error that should have been handled - # before now! - sync_safe = ((self.sync_state == ExternalStatus.STATUS_OK) or - (self.sync_state == ExternalStatus.MODEL_MODIFIED)) - if sync_safe: - # The clean_state must be STATUS_OK to update. Otherwise we - # are dirty or there was a missed error previously. - if self.clean_state == ExternalStatus.STATUS_OK: - safe_to_update = True - return safe_to_update - - def exists(self): - """Determine if the repo exists. This is indicated by: - - * sync_state is not EMPTY - - * if the sync_state is empty, then the valid states for - clean_state are default, empty or unknown. Anything else - and there was probably an internal logic error. - - NOTE(bja, 2017-10) For the moment we are considering a - sync_state of default or unknown to require user intervention, - but we may want to relax this convention. This is probably a - result of a network error or internal logic error but more - testing is needed. - - """ - is_empty = (self.sync_state == ExternalStatus.EMPTY) - clean_valid = ((self.clean_state == ExternalStatus.DEFAULT) or - (self.clean_state == ExternalStatus.EMPTY) or - (self.clean_state == ExternalStatus.UNKNOWN)) - - if is_empty and clean_valid: - exists = False - else: - exists = True - return exists - - -def check_safe_to_update_repos(tree_status): - """Check if *ALL* repositories are in a safe state to update. We don't - want to do a partial update of the repositories then die, leaving - the model in an inconsistent state. - - Note: if there is an update to do, the repositories will by - definiation be out of synce with the externals description, so we - can't use that as criteria for updating. - - """ - safe_to_update = True - for comp in tree_status: - stat = tree_status[comp] - safe_to_update &= stat.safe_to_update() - - return safe_to_update diff --git a/manage_externals/manic/global_constants.py b/manage_externals/manic/global_constants.py deleted file mode 100644 index 0e91cffc90..0000000000 --- a/manage_externals/manic/global_constants.py +++ /dev/null @@ -1,18 +0,0 @@ -"""Globals shared across modules -""" - -from __future__ import absolute_import -from __future__ import unicode_literals -from __future__ import print_function - -import pprint - -EMPTY_STR = '' -LOCAL_PATH_INDICATOR = '.' -VERSION_SEPERATOR = '.' -LOG_FILE_NAME = 'manage_externals.log' -PPRINTER = pprint.PrettyPrinter(indent=4) - -VERBOSITY_DEFAULT = 0 -VERBOSITY_VERBOSE = 1 -VERBOSITY_DUMP = 2 diff --git a/manage_externals/manic/repository.py b/manage_externals/manic/repository.py deleted file mode 100644 index ea4230fb7b..0000000000 --- a/manage_externals/manic/repository.py +++ /dev/null @@ -1,98 +0,0 @@ -"""Base class representation of a repository -""" - -from .externals_description import ExternalsDescription -from .utils import fatal_error -from .global_constants import EMPTY_STR - - -class Repository(object): - """ - Class to represent and operate on a repository description. - """ - - def __init__(self, component_name, repo): - """ - Parse repo externals description - """ - self._name = component_name - self._protocol = repo[ExternalsDescription.PROTOCOL] - self._tag = repo[ExternalsDescription.TAG] - self._branch = repo[ExternalsDescription.BRANCH] - self._hash = repo[ExternalsDescription.HASH] - self._url = repo[ExternalsDescription.REPO_URL] - self._sparse = repo[ExternalsDescription.SPARSE] - - if self._url is EMPTY_STR: - fatal_error('repo must have a URL') - - if ((self._tag is EMPTY_STR) and (self._branch is EMPTY_STR) and - (self._hash is EMPTY_STR)): - fatal_error('{0} repo must have a branch, tag or hash element') - - ref_count = 0 - if self._tag is not EMPTY_STR: - ref_count += 1 - if self._branch is not EMPTY_STR: - ref_count += 1 - if self._hash is not EMPTY_STR: - ref_count += 1 - if ref_count != 1: - fatal_error('repo {0} must have exactly one of ' - 'tag, branch or hash.'.format(self._name)) - - def checkout(self, base_dir_path, repo_dir_name, verbosity, recursive): # pylint: disable=unused-argument - """ - If the repo destination directory exists, ensure it is correct (from - correct URL, correct branch or tag), and possibly update the source. - If the repo destination directory does not exist, checkout the correce - branch or tag. - NB: is include as an argument for compatibility with - git functionality (repository_git.py) - """ - msg = ('DEV_ERROR: checkout method must be implemented in all ' - 'repository classes! {0}'.format(self.__class__.__name__)) - fatal_error(msg) - - def status(self, stat, repo_dir_path): # pylint: disable=unused-argument - """Report the status of the repo - - """ - msg = ('DEV_ERROR: status method must be implemented in all ' - 'repository classes! {0}'.format(self.__class__.__name__)) - fatal_error(msg) - - def submodules_file(self, repo_path=None): - # pylint: disable=no-self-use,unused-argument - """Stub for use by non-git VC systems""" - return None - - def url(self): - """Public access of repo url. - """ - return self._url - - def tag(self): - """Public access of repo tag - """ - return self._tag - - def branch(self): - """Public access of repo branch. - """ - return self._branch - - def hash(self): - """Public access of repo hash. - """ - return self._hash - - def name(self): - """Public access of repo name. - """ - return self._name - - def protocol(self): - """Public access of repo protocol. - """ - return self._protocol diff --git a/manage_externals/manic/repository_factory.py b/manage_externals/manic/repository_factory.py deleted file mode 100644 index 18c73ffc4b..0000000000 --- a/manage_externals/manic/repository_factory.py +++ /dev/null @@ -1,30 +0,0 @@ -"""Factory for creating and initializing the appropriate repository class -""" - -from __future__ import absolute_import -from __future__ import unicode_literals -from __future__ import print_function - -from .repository_git import GitRepository -from .repository_svn import SvnRepository -from .externals_description import ExternalsDescription -from .utils import fatal_error - - -def create_repository(component_name, repo_info, svn_ignore_ancestry=False): - """Determine what type of repository we have, i.e. git or svn, and - create the appropriate object. - - Can return None (e.g. if protocol is 'externals_only'). - """ - protocol = repo_info[ExternalsDescription.PROTOCOL].lower() - if protocol == 'git': - repo = GitRepository(component_name, repo_info) - elif protocol == 'svn': - repo = SvnRepository(component_name, repo_info, ignore_ancestry=svn_ignore_ancestry) - elif protocol == 'externals_only': - repo = None - else: - msg = 'Unknown repo protocol "{0}"'.format(protocol) - fatal_error(msg) - return repo diff --git a/manage_externals/manic/repository_git.py b/manage_externals/manic/repository_git.py deleted file mode 100644 index aab1a468a8..0000000000 --- a/manage_externals/manic/repository_git.py +++ /dev/null @@ -1,859 +0,0 @@ -"""Class for interacting with git repositories -""" - -from __future__ import absolute_import -from __future__ import unicode_literals -from __future__ import print_function - -import copy -import os -import sys - -from .global_constants import EMPTY_STR, LOCAL_PATH_INDICATOR -from .global_constants import VERBOSITY_VERBOSE -from .repository import Repository -from .externals_status import ExternalStatus -from .externals_description import ExternalsDescription, git_submodule_status -from .utils import expand_local_url, split_remote_url, is_remote_url -from .utils import fatal_error, printlog -from .utils import execute_subprocess - - -class GitRepository(Repository): - """Class to represent and operate on a repository description. - - For testing purpose, all system calls to git should: - - * be isolated in separate functions with no application logic - * of the form: - - cmd = 'git -C {dirname} ...'.format(dirname=dirname).split() - - value = execute_subprocess(cmd, output_to_caller={T|F}, - status_to_caller={T|F}) - - return value - * be static methods (not rely on self) - * name as _git_subcommand_args(user_args) - - This convention allows easy unit testing of the repository logic - by mocking the specific calls to return predefined results. - - """ - - def __init__(self, component_name, repo): - """ - repo: ExternalsDescription. - """ - Repository.__init__(self, component_name, repo) - self._gitmodules = None - self._submods = None - - # ---------------------------------------------------------------- - # - # Public API, defined by Repository - # - # ---------------------------------------------------------------- - def checkout(self, base_dir_path, repo_dir_name, verbosity, recursive): - """ - If the repo destination directory exists, ensure it is correct (from - correct URL, correct branch or tag), and possibly update the source. - If the repo destination directory does not exist, checkout the correct - branch or tag. - """ - repo_dir_path = os.path.join(base_dir_path, repo_dir_name) - repo_dir_exists = os.path.exists(repo_dir_path) - if (repo_dir_exists and not os.listdir( - repo_dir_path)) or not repo_dir_exists: - self._clone_repo(base_dir_path, repo_dir_name, verbosity) - self._checkout_ref(repo_dir_path, verbosity, recursive) - gmpath = os.path.join(repo_dir_path, - ExternalsDescription.GIT_SUBMODULES_FILENAME) - if os.path.exists(gmpath): - self._gitmodules = gmpath - self._submods = git_submodule_status(repo_dir_path) - else: - self._gitmodules = None - self._submods = None - - def status(self, stat, repo_dir_path): - """ - If the repo destination directory exists, ensure it is correct (from - correct URL, correct branch or tag), and possibly update the source. - If the repo destination directory does not exist, checkout the correct - branch or tag. - """ - self._check_sync(stat, repo_dir_path) - if os.path.exists(repo_dir_path): - self._status_summary(stat, repo_dir_path) - - def submodules_file(self, repo_path=None): - if repo_path is not None: - gmpath = os.path.join(repo_path, - ExternalsDescription.GIT_SUBMODULES_FILENAME) - if os.path.exists(gmpath): - self._gitmodules = gmpath - self._submods = git_submodule_status(repo_path) - - return self._gitmodules - - # ---------------------------------------------------------------- - # - # Internal work functions - # - # ---------------------------------------------------------------- - def _clone_repo(self, base_dir_path, repo_dir_name, verbosity): - """Clones repo_dir_name into base_dir_path. - """ - self._git_clone(self._url, os.path.join(base_dir_path, repo_dir_name), - verbosity=verbosity) - - def _current_ref(self, dirname): - """Determine the *name* associated with HEAD at dirname. - - If we're on a tag, then returns the tag name; otherwise, returns - the current hash. Returns an empty string if no reference can be - determined (e.g., if we're not actually in a git repository). - - If we're on a branch, then the branch name is also included in - the returned string (in addition to the tag / hash). - """ - ref_found = False - - # If we're exactly at a tag, use that as the current ref - tag_found, tag_name = self._git_current_tag(dirname) - if tag_found: - current_ref = tag_name - ref_found = True - - if not ref_found: - # Otherwise, use current hash as the current ref - hash_found, hash_name = self._git_current_hash(dirname) - if hash_found: - current_ref = hash_name - ref_found = True - - if ref_found: - # If we're on a branch, include branch name in current ref - branch_found, branch_name = self._git_current_branch(dirname) - if branch_found: - current_ref = "{} (branch {})".format(current_ref, branch_name) - else: - # If we still can't find a ref, return empty string. This - # can happen if we're not actually in a git repo - current_ref = '' - - return current_ref - - def _check_sync(self, stat, repo_dir_path): - """Determine whether a git repository is in-sync with the model - description. - - Because repos can have multiple remotes, the only criteria is - whether the branch or tag is the same. - - """ - if not os.path.exists(repo_dir_path): - # NOTE(bja, 2017-10) condition should have been determined - # by _Source() object and should never be here! - stat.sync_state = ExternalStatus.STATUS_ERROR - else: - git_dir = os.path.join(repo_dir_path, '.git') - if not os.path.exists(git_dir): - # NOTE(bja, 2017-10) directory exists, but no git repo - # info.... Can't test with subprocess git command - # because git will move up directory tree until it - # finds the parent repo git dir! - stat.sync_state = ExternalStatus.UNKNOWN - else: - self._check_sync_logic(stat, repo_dir_path) - - def _check_sync_logic(self, stat, repo_dir_path): - """Compare the underlying hashes of the currently checkout ref and the - expected ref. - - Output: sets the sync_state as well as the current and - expected ref in the input status object. - - """ - def compare_refs(current_ref, expected_ref): - """Compare the current and expected ref. - - """ - if current_ref == expected_ref: - status = ExternalStatus.STATUS_OK - else: - status = ExternalStatus.MODEL_MODIFIED - return status - - # get the full hash of the current commit - _, current_ref = self._git_current_hash(repo_dir_path) - - if self._branch: - if self._url == LOCAL_PATH_INDICATOR: - expected_ref = self._branch - else: - remote_name = self._remote_name_for_url(self._url, - repo_dir_path) - if not remote_name: - # git doesn't know about this remote. by definition - # this is a modified state. - expected_ref = "unknown_remote/{0}".format(self._branch) - else: - expected_ref = "{0}/{1}".format(remote_name, self._branch) - elif self._hash: - expected_ref = self._hash - elif self._tag: - expected_ref = self._tag - else: - msg = 'In repo "{0}": none of branch, hash or tag are set'.format( - self._name) - fatal_error(msg) - - # record the *names* of the current and expected branches - stat.current_version = self._current_ref(repo_dir_path) - stat.expected_version = copy.deepcopy(expected_ref) - - if current_ref == EMPTY_STR: - stat.sync_state = ExternalStatus.UNKNOWN - else: - # get the underlying hash of the expected ref - revparse_status, expected_ref_hash = self._git_revparse_commit( - expected_ref, repo_dir_path) - if revparse_status: - # We failed to get the hash associated with - # expected_ref. Maybe we should assign this to some special - # status, but for now we're just calling this out-of-sync to - # remain consistent with how this worked before. - stat.sync_state = ExternalStatus.MODEL_MODIFIED - else: - # compare the underlying hashes - stat.sync_state = compare_refs(current_ref, expected_ref_hash) - - @classmethod - def _remote_name_for_url(cls, remote_url, dirname): - """Return the remote name matching remote_url (or None) - - """ - git_output = cls._git_remote_verbose(dirname) - git_output = git_output.splitlines() - for line in git_output: - data = line.strip() - if not data: - continue - data = data.split() - name = data[0].strip() - url = data[1].strip() - if remote_url == url: - return name - return None - - def _create_remote_name(self): - """The url specified in the externals description file was not known - to git. We need to add it, which means adding a unique and - safe name.... - - The assigned name needs to be safe for git to use, e.g. can't - look like a path 'foo/bar' and work with both remote and local paths. - - Remote paths include but are not limited to: git, ssh, https, - github, gitlab, bitbucket, custom server, etc. - - Local paths can be relative or absolute. They may contain - shell variables, e.g. ${REPO_ROOT}/repo_name, or username - expansion, i.e. ~/ or ~someuser/. - - Relative paths must be at least one layer of redirection, i.e. - container/../ext_repo, but may be many layers deep, e.g. - container/../../../../../ext_repo - - NOTE(bja, 2017-11) - - The base name below may not be unique, for example if the - user has local paths like: - - /path/to/my/repos/nice_repo - /path/to/other/repos/nice_repo - - But the current implementation should cover most common - use cases for remotes and still provide usable names. - - """ - url = copy.deepcopy(self._url) - if is_remote_url(url): - url = split_remote_url(url) - else: - url = expand_local_url(url, self._name) - url = url.split('/') - repo_name = url[-1] - base_name = url[-2] - # repo name should nominally already be something that git can - # deal with. We need to remove other possibly troublesome - # punctuation, e.g. /, $, from the base name. - unsafe_characters = '!@#$%^&*()[]{}\\/,;~' - for unsafe in unsafe_characters: - base_name = base_name.replace(unsafe, '') - remote_name = "{0}_{1}".format(base_name, repo_name) - return remote_name - - def _checkout_ref(self, repo_dir, verbosity, submodules): - """Checkout the user supplied reference - if is True, recursively initialize and update - the repo's submodules - """ - # import pdb; pdb.set_trace() - if self._url.strip() == LOCAL_PATH_INDICATOR: - self._checkout_local_ref(verbosity, submodules, repo_dir) - else: - self._checkout_external_ref(verbosity, submodules, repo_dir) - - if self._sparse: - self._sparse_checkout(repo_dir, verbosity) - - - def _checkout_local_ref(self, verbosity, submodules, dirname): - """Checkout the reference considering the local repo only. Do not - fetch any additional remotes or specify the remote when - checkout out the ref. - if is True, recursively initialize and update - the repo's submodules - """ - if self._tag: - ref = self._tag - elif self._branch: - ref = self._branch - else: - ref = self._hash - - self._check_for_valid_ref(ref, remote_name=None, - dirname=dirname) - self._git_checkout_ref(ref, verbosity, submodules, dirname) - - def _checkout_external_ref(self, verbosity, submodules, dirname): - """Checkout the reference from a remote repository into dirname. - if is True, recursively initialize and update - the repo's submodules. - Note that this results in a 'detached HEAD' state if checking out - a branch, because we check out the remote branch rather than the - local. See https://github.com/ESMCI/manage_externals/issues/34 for - more discussion. - """ - if self._tag: - ref = self._tag - elif self._branch: - ref = self._branch - else: - ref = self._hash - - remote_name = self._remote_name_for_url(self._url, dirname) - if not remote_name: - remote_name = self._create_remote_name() - self._git_remote_add(remote_name, self._url, dirname) - self._git_fetch(remote_name, dirname) - - # NOTE(bja, 2018-03) we need to send separate ref and remote - # name to check_for_vaild_ref, but the combined name to - # checkout_ref! - self._check_for_valid_ref(ref, remote_name, dirname) - - if self._branch: - # Prepend remote name to branch. This means we avoid various - # special cases if the local branch is not tracking the remote or - # cannot be trivially fast-forwarded to match; but, it also - # means we end up in a 'detached HEAD' state. - ref = '{0}/{1}'.format(remote_name, ref) - self._git_checkout_ref(ref, verbosity, submodules, dirname) - - def _sparse_checkout(self, repo_dir, verbosity): - """Use git read-tree to thin the working tree.""" - cmd = ['cp', os.path.join(repo_dir, self._sparse), - os.path.join(repo_dir, - '.git/info/sparse-checkout')] - if verbosity >= VERBOSITY_VERBOSE: - printlog(' {0}'.format(' '.join(cmd))) - execute_subprocess(cmd) - self._git_sparse_checkout(verbosity, repo_dir) - - def _check_for_valid_ref(self, ref, remote_name, dirname): - """Try some basic sanity checks on the user supplied reference so we - can provide a more useful error message than calledprocess - error... - - remote_name can be NOne - """ - is_tag = self._ref_is_tag(ref, dirname) - is_branch = self._ref_is_branch(ref, remote_name, dirname) - is_hash = self._ref_is_hash(ref, dirname) - is_valid = is_tag or is_branch or is_hash - if not is_valid: - msg = ('In repo "{0}": reference "{1}" does not appear to be a ' - 'valid tag, branch or hash! Please verify the reference ' - 'name (e.g. spelling), is available from: {2} '.format( - self._name, ref, self._url)) - fatal_error(msg) - - if is_tag: - is_unique_tag, msg = self._is_unique_tag(ref, remote_name, - dirname) - if not is_unique_tag: - msg = ('In repo "{0}": tag "{1}" {2}'.format( - self._name, self._tag, msg)) - fatal_error(msg) - - return is_valid - - def _is_unique_tag(self, ref, remote_name, dirname): - """Verify that a reference is a valid tag and is unique (not a branch) - - Tags may be tag names, or SHA id's. It is also possible that a - branch and tag have the some name. - - Note: values returned by git_showref_* and git_revparse are - shell return codes, which are zero for success, non-zero for - error! - - """ - is_tag = self._ref_is_tag(ref, dirname) - is_branch = self._ref_is_branch(ref, remote_name, dirname) - is_hash = self._ref_is_hash(ref, dirname) - - msg = '' - is_unique_tag = False - if is_tag and not is_branch: - # unique tag - msg = 'is ok' - is_unique_tag = True - elif is_tag and is_branch: - msg = ('is both a branch and a tag. git may checkout the branch ' - 'instead of the tag depending on your version of git.') - is_unique_tag = False - elif not is_tag and is_branch: - msg = ('is a branch, and not a tag. If you intended to checkout ' - 'a branch, please change the externals description to be ' - 'a branch. If you intended to checkout a tag, it does not ' - 'exist. Please check the name.') - is_unique_tag = False - else: # not is_tag and not is_branch: - if is_hash: - # probably a sha1 or HEAD, etc, we call it a tag - msg = 'is ok' - is_unique_tag = True - else: - # undetermined state. - msg = ('does not appear to be a valid tag, branch or hash! ' - 'Please check the name and repository.') - is_unique_tag = False - - return is_unique_tag, msg - - def _ref_is_tag(self, ref, dirname): - """Verify that a reference is a valid tag according to git. - - Note: values returned by git_showref_* and git_revparse are - shell return codes, which are zero for success, non-zero for - error! - """ - is_tag = False - value = self._git_showref_tag(ref, dirname) - if value == 0: - is_tag = True - return is_tag - - def _ref_is_branch(self, ref, remote_name, dirname): - """Verify if a ref is any kind of branch (local, tracked remote, - untracked remote). - - remote_name can be None. - """ - local_branch = False - remote_branch = False - if remote_name: - remote_branch = self._ref_is_remote_branch(ref, remote_name, - dirname) - local_branch = self._ref_is_local_branch(ref, dirname) - - is_branch = False - if local_branch or remote_branch: - is_branch = True - return is_branch - - def _ref_is_local_branch(self, ref, dirname): - """Verify that a reference is a valid branch according to git. - - show-ref branch returns local branches that have been - previously checked out. It will not necessarily pick up - untracked remote branches. - - Note: values returned by git_showref_* and git_revparse are - shell return codes, which are zero for success, non-zero for - error! - - """ - is_branch = False - value = self._git_showref_branch(ref, dirname) - if value == 0: - is_branch = True - return is_branch - - def _ref_is_remote_branch(self, ref, remote_name, dirname): - """Verify that a reference is a valid branch according to git. - - show-ref branch returns local branches that have been - previously checked out. It will not necessarily pick up - untracked remote branches. - - Note: values returned by git_showref_* and git_revparse are - shell return codes, which are zero for success, non-zero for - error! - - """ - is_branch = False - value = self._git_lsremote_branch(ref, remote_name, dirname) - if value == 0: - is_branch = True - return is_branch - - def _ref_is_commit(self, ref, dirname): - """Verify that a reference is a valid commit according to git. - - This could be a tag, branch, sha1 id, HEAD and potentially others... - - Note: values returned by git_showref_* and git_revparse are - shell return codes, which are zero for success, non-zero for - error! - """ - is_commit = False - value, _ = self._git_revparse_commit(ref, dirname) - if value == 0: - is_commit = True - return is_commit - - def _ref_is_hash(self, ref, dirname): - """Verify that a reference is a valid hash according to git. - - Git doesn't seem to provide an exact way to determine if user - supplied reference is an actual hash. So we verify that the - ref is a valid commit and return the underlying commit - hash. Then check that the commit hash begins with the user - supplied string. - - Note: values returned by git_showref_* and git_revparse are - shell return codes, which are zero for success, non-zero for - error! - - """ - is_hash = False - status, git_output = self._git_revparse_commit(ref, dirname) - if status == 0: - if git_output.strip().startswith(ref): - is_hash = True - return is_hash - - def _status_summary(self, stat, repo_dir_path): - """Determine the clean/dirty status of a git repository - - """ - git_output = self._git_status_porcelain_v1z(repo_dir_path) - is_dirty = self._status_v1z_is_dirty(git_output) - if is_dirty: - stat.clean_state = ExternalStatus.DIRTY - else: - stat.clean_state = ExternalStatus.STATUS_OK - - # Now save the verbose status output incase the user wants to - # see it. - stat.status_output = self._git_status_verbose(repo_dir_path) - - @staticmethod - def _status_v1z_is_dirty(git_output): - """Parse the git status output from --porcelain=v1 -z and determine if - the repo status is clean or dirty. Dirty means: - - * modified files - * missing files - * added files - * removed - * renamed - * unmerged - - Whether untracked files are considered depends on how the status - command was run (i.e., whether it was run with the '-u' option). - - NOTE: Based on the above definition, the porcelain status - should be an empty string to be considered 'clean'. Of course - this assumes we only get an empty string from an status - command on a clean checkout, and not some error - condition... Could alse use 'git diff --quiet'. - - """ - is_dirty = False - if git_output: - is_dirty = True - return is_dirty - - # ---------------------------------------------------------------- - # - # system call to git for information gathering - # - # ---------------------------------------------------------------- - @staticmethod - def _git_current_hash(dirname): - """Return the full hash of the currently checked-out version. - - Returns a tuple, (hash_found, hash), where hash_found is a - logical specifying whether a hash was found for HEAD (False - could mean we're not in a git repository at all). (If hash_found - is False, then hash is ''.) - """ - status, git_output = GitRepository._git_revparse_commit("HEAD", - dirname) - hash_found = not status - if not hash_found: - git_output = '' - return hash_found, git_output - - @staticmethod - def _git_current_remote_branch(dirname): - """Determines the name of the current remote branch, if any. - - if dir is None, uses the cwd. - - Returns a tuple, (branch_found, branch_name), where branch_found - is a bool specifying whether a branch name was found for - HEAD. (If branch_found is False, then branch_name is ''). - branch_name is in the format '$remote/$branch', e.g. 'origin/foo'. - """ - branch_found = False - branch_name = '' - - cmd = 'git -C {dirname} log -n 1 --pretty=%d HEAD'.format( - dirname=dirname).split() - status, git_output = execute_subprocess(cmd, - output_to_caller=True, - status_to_caller=True) - branch_found = 'HEAD,' in git_output - if branch_found: - # git_output is of the form " (HEAD, origin/blah)" - branch_name = git_output.split(',')[1].strip()[:-1] - return branch_found, branch_name - - @staticmethod - def _git_current_branch(dirname): - """Determines the name of the current local branch. - - Returns a tuple, (branch_found, branch_name), where branch_found - is a bool specifying whether a branch name was found for - HEAD. (If branch_found is False, then branch_name is ''.) - Note that currently we check out the remote branch rather than - the local, so this command does not return the just-checked-out - branch. See _git_current_remote_branch. - """ - cmd = 'git -C {dirname} symbolic-ref --short -q HEAD'.format( - dirname=dirname).split() - status, git_output = execute_subprocess(cmd, - output_to_caller=True, - status_to_caller=True) - branch_found = not status - if branch_found: - git_output = git_output.strip() - else: - git_output = '' - return branch_found, git_output - - @staticmethod - def _git_current_tag(dirname): - """Determines the name tag corresponding to HEAD (if any). - - if dirname is None, uses the cwd. - - Returns a tuple, (tag_found, tag_name), where tag_found is a - bool specifying whether we found a tag name corresponding to - HEAD. (If tag_found is False, then tag_name is ''.) - """ - cmd = 'git -C {dirname} describe --exact-match --tags HEAD'.format( - dirname=dirname).split() - status, git_output = execute_subprocess(cmd, - output_to_caller=True, - status_to_caller=True) - tag_found = not status - if tag_found: - git_output = git_output.strip() - else: - git_output = '' - return tag_found, git_output - - @staticmethod - def _git_showref_tag(ref, dirname): - """Run git show-ref check if the user supplied ref is a tag. - - could also use git rev-parse --quiet --verify tagname^{tag} - """ - cmd = ('git -C {dirname} show-ref --quiet --verify refs/tags/{ref}' - .format(dirname=dirname, ref=ref).split()) - status = execute_subprocess(cmd, status_to_caller=True) - return status - - @staticmethod - def _git_showref_branch(ref, dirname): - """Run git show-ref check if the user supplied ref is a local or - tracked remote branch. - - """ - cmd = ('git -C {dirname} show-ref --quiet --verify refs/heads/{ref}' - .format(dirname=dirname, ref=ref).split()) - status = execute_subprocess(cmd, status_to_caller=True) - return status - - @staticmethod - def _git_lsremote_branch(ref, remote_name, dirname): - """Run git ls-remote to check if the user supplied ref is a remote - branch that is not being tracked - - """ - cmd = ('git -C {dirname} ls-remote --exit-code --heads ' - '{remote_name} {ref}').format( - dirname=dirname, remote_name=remote_name, ref=ref).split() - status, output = execute_subprocess(cmd, status_to_caller=True, output_to_caller=True) - if not status and not f"refs/heads/{ref}" in output: - # In this case the ref is contained in the branch name but is not the complete branch name - return -1 - return status - - @staticmethod - def _git_revparse_commit(ref, dirname): - """Run git rev-parse to detect if a reference is a SHA, HEAD or other - valid commit. - - """ - cmd = ('git -C {dirname} rev-parse --quiet --verify {ref}^{commit}' - .format(dirname=dirname, ref=ref, commit='{commit}').split()) - status, git_output = execute_subprocess(cmd, status_to_caller=True, - output_to_caller=True) - git_output = git_output.strip() - return status, git_output - - @staticmethod - def _git_status_porcelain_v1z(dirname): - """Run git status to obtain repository information. - - This is run with '--untracked=no' to ignore untracked files. - - The machine-portable format that is guaranteed not to change - between git versions or *user configuration*. - - """ - cmd = ('git -C {dirname} status --untracked-files=no --porcelain -z' - .format(dirname=dirname)).split() - git_output = execute_subprocess(cmd, output_to_caller=True) - return git_output - - @staticmethod - def _git_status_verbose(dirname): - """Run the git status command to obtain repository information. - """ - cmd = 'git -C {dirname} status'.format(dirname=dirname).split() - git_output = execute_subprocess(cmd, output_to_caller=True) - return git_output - - @staticmethod - def _git_remote_verbose(dirname): - """Run the git remote command to obtain repository information. - - Returned string is of the form: - myfork git@github.com:johnpaulalex/manage_externals_jp.git (fetch) - myfork git@github.com:johnpaulalex/manage_externals_jp.git (push) - """ - cmd = 'git -C {dirname} remote --verbose'.format( - dirname=dirname).split() - return execute_subprocess(cmd, output_to_caller=True) - - @staticmethod - def has_submodules(repo_dir_path): - """Return True iff the repository at has a - '.gitmodules' file - """ - fname = os.path.join(repo_dir_path, - ExternalsDescription.GIT_SUBMODULES_FILENAME) - - return os.path.exists(fname) - - # ---------------------------------------------------------------- - # - # system call to git for sideffects modifying the working tree - # - # ---------------------------------------------------------------- - @staticmethod - def _git_clone(url, repo_dir_name, verbosity): - """Clones url into repo_dir_name. - """ - cmd = 'git clone --quiet {url} {repo_dir_name}'.format( - url=url, repo_dir_name=repo_dir_name).split() - if verbosity >= VERBOSITY_VERBOSE: - printlog(' {0}'.format(' '.join(cmd))) - execute_subprocess(cmd) - - @staticmethod - def _git_remote_add(name, url, dirname): - """Run the git remote command for the side effect of adding a remote - """ - cmd = 'git -C {dirname} remote add {name} {url}'.format( - dirname=dirname, name=name, url=url).split() - execute_subprocess(cmd) - - @staticmethod - def _git_fetch(remote_name, dirname): - """Run the git fetch command for the side effect of updating the repo - """ - cmd = 'git -C {dirname} fetch --quiet --tags {remote_name}'.format( - dirname=dirname, remote_name=remote_name).split() - execute_subprocess(cmd) - - @staticmethod - def _git_checkout_ref(ref, verbosity, submodules, dirname): - """Run the git checkout command for the side effect of updating the repo - - Param: ref is a reference to a local or remote object in the - form 'origin/my_feature', or 'tag1'. - - """ - cmd = 'git -C {dirname} checkout --quiet {ref}'.format( - dirname=dirname, ref=ref).split() - if verbosity >= VERBOSITY_VERBOSE: - printlog(' {0}'.format(' '.join(cmd))) - execute_subprocess(cmd) - if submodules: - GitRepository._git_update_submodules(verbosity, dirname) - - @staticmethod - def _git_sparse_checkout(verbosity, dirname): - """Configure repo via read-tree.""" - cmd = 'git -C {dirname} config core.sparsecheckout true'.format( - dirname=dirname).split() - if verbosity >= VERBOSITY_VERBOSE: - printlog(' {0}'.format(' '.join(cmd))) - execute_subprocess(cmd) - cmd = 'git -C {dirname} read-tree -mu HEAD'.format( - dirname=dirname).split() - if verbosity >= VERBOSITY_VERBOSE: - printlog(' {0}'.format(' '.join(cmd))) - execute_subprocess(cmd) - - @staticmethod - def _git_update_submodules(verbosity, dirname): - """Run git submodule update for the side effect of updating this - repo's submodules. - """ - # due to https://vielmetti.typepad.com/logbook/2022/10/git-security-fixes-lead-to-fatal-transport-file-not-allowed-error-in-ci-systems-cve-2022-39253.html - # submodules from file doesn't work without overriding the protocol, this is done - # for testing submodule support but should not be done in practice - file_protocol = "" - if 'unittest' in sys.modules.keys(): - file_protocol = "-c protocol.file.allow=always" - - # First, verify that we have a .gitmodules file - if os.path.exists( - os.path.join(dirname, - ExternalsDescription.GIT_SUBMODULES_FILENAME)): - cmd = ('git {file_protocol} -C {dirname} submodule update --init --recursive' - .format(file_protocol=file_protocol, dirname=dirname)).split() - if verbosity >= VERBOSITY_VERBOSE: - printlog(' {0}'.format(' '.join(cmd))) - - execute_subprocess(cmd) diff --git a/manage_externals/manic/repository_svn.py b/manage_externals/manic/repository_svn.py deleted file mode 100644 index 32a71184b4..0000000000 --- a/manage_externals/manic/repository_svn.py +++ /dev/null @@ -1,291 +0,0 @@ -"""Class for interacting with svn repositories -""" - -from __future__ import absolute_import -from __future__ import unicode_literals -from __future__ import print_function - -import os -import re -import xml.etree.ElementTree as ET - -from .global_constants import EMPTY_STR, VERBOSITY_VERBOSE -from .repository import Repository -from .externals_status import ExternalStatus -from .utils import fatal_error, indent_string, printlog -from .utils import execute_subprocess - - -class SvnRepository(Repository): - """ - Class to represent and operate on a repository description. - - For testing purpose, all system calls to svn should: - - * be isolated in separate functions with no application logic - * of the form: - - cmd = ['svn', ...] - - value = execute_subprocess(cmd, output_to_caller={T|F}, - status_to_caller={T|F}) - - return value - * be static methods (not rely on self) - * name as _svn_subcommand_args(user_args) - - This convention allows easy unit testing of the repository logic - by mocking the specific calls to return predefined results. - - """ - RE_URLLINE = re.compile(r'^URL:') - - def __init__(self, component_name, repo, ignore_ancestry=False): - """ - Parse repo (a XML element). - """ - Repository.__init__(self, component_name, repo) - if 'github.com' in self._url: - msg = "SVN access to github.com is no longer supported" - fatal_error(msg) - self._ignore_ancestry = ignore_ancestry - if self._url.endswith('/'): - # there is already a '/' separator in the URL; no need to add another - url_sep = '' - else: - url_sep = '/' - if self._branch: - self._url = self._url + url_sep + self._branch - elif self._tag: - self._url = self._url + url_sep + self._tag - else: - msg = "DEV_ERROR in svn repository. Shouldn't be here!" - fatal_error(msg) - - # ---------------------------------------------------------------- - # - # Public API, defined by Repository - # - # ---------------------------------------------------------------- - def checkout(self, base_dir_path, repo_dir_name, verbosity, recursive): # pylint: disable=unused-argument - """Checkout or update the working copy - - If the repo destination directory exists, switch the sandbox to - match the externals description. - - If the repo destination directory does not exist, checkout the - correct branch or tag. - NB: is include as an argument for compatibility with - git functionality (repository_git.py) - - """ - repo_dir_path = os.path.join(base_dir_path, repo_dir_name) - if os.path.exists(repo_dir_path): - cwd = os.getcwd() - os.chdir(repo_dir_path) - self._svn_switch(self._url, self._ignore_ancestry, verbosity) - # svn switch can lead to a conflict state, but it gives a - # return code of 0. So now we need to make sure that we're - # in a clean (non-conflict) state. - self._abort_if_dirty(repo_dir_path, - "Expected clean state following switch") - os.chdir(cwd) - else: - self._svn_checkout(self._url, repo_dir_path, verbosity) - - def status(self, stat, repo_dir_path): - """ - Check and report the status of the repository - """ - self._check_sync(stat, repo_dir_path) - if os.path.exists(repo_dir_path): - self._status_summary(stat, repo_dir_path) - - # ---------------------------------------------------------------- - # - # Internal work functions - # - # ---------------------------------------------------------------- - def _check_sync(self, stat, repo_dir_path): - """Check to see if repository directory exists and is at the expected - url. Return: status object - - """ - if not os.path.exists(repo_dir_path): - # NOTE(bja, 2017-10) this state should have been handled by - # the source object and we never get here! - stat.sync_state = ExternalStatus.STATUS_ERROR - else: - svn_output = self._svn_info(repo_dir_path) - if not svn_output: - # directory exists, but info returned nothing. .svn - # directory removed or incomplete checkout? - stat.sync_state = ExternalStatus.UNKNOWN - else: - stat.sync_state, stat.current_version = \ - self._check_url(svn_output, self._url) - stat.expected_version = '/'.join(self._url.split('/')[3:]) - - def _abort_if_dirty(self, repo_dir_path, message): - """Check if the repo is in a dirty state; if so, abort with a - helpful message. - - """ - - stat = ExternalStatus() - self._status_summary(stat, repo_dir_path) - if stat.clean_state != ExternalStatus.STATUS_OK: - status = self._svn_status_verbose(repo_dir_path) - status = indent_string(status, 4) - errmsg = """In directory - {cwd} - -svn status now shows: -{status} - -ERROR: {message} - -One possible cause of this problem is that there may have been untracked -files in your working directory that had the same name as tracked files -in the new revision. - -To recover: Clean up the above directory (resolving conflicts, etc.), -then rerun checkout_externals. -""".format(cwd=repo_dir_path, message=message, status=status) - - fatal_error(errmsg) - - @staticmethod - def _check_url(svn_output, expected_url): - """Determine the svn url from svn info output and return whether it - matches the expected value. - - """ - url = None - for line in svn_output.splitlines(): - if SvnRepository.RE_URLLINE.match(line): - url = line.split(': ')[1].strip() - break - if not url: - status = ExternalStatus.UNKNOWN - elif url == expected_url: - status = ExternalStatus.STATUS_OK - else: - status = ExternalStatus.MODEL_MODIFIED - - if url: - current_version = '/'.join(url.split('/')[3:]) - else: - current_version = EMPTY_STR - - return status, current_version - - def _status_summary(self, stat, repo_dir_path): - """Report whether the svn repository is in-sync with the model - description and whether the sandbox is clean or dirty. - - """ - svn_output = self._svn_status_xml(repo_dir_path) - is_dirty = self.xml_status_is_dirty(svn_output) - if is_dirty: - stat.clean_state = ExternalStatus.DIRTY - else: - stat.clean_state = ExternalStatus.STATUS_OK - - # Now save the verbose status output incase the user wants to - # see it. - stat.status_output = self._svn_status_verbose(repo_dir_path) - - @staticmethod - def xml_status_is_dirty(svn_output): - """Parse svn status xml output and determine if the working copy is - clean or dirty. Dirty is defined as: - - * modified files - * added files - * deleted files - * missing files - - Unversioned files do not affect the clean/dirty status. - - 'external' is also an acceptable state - - """ - # pylint: disable=invalid-name - SVN_EXTERNAL = 'external' - SVN_UNVERSIONED = 'unversioned' - # pylint: enable=invalid-name - - is_dirty = False - try: - xml_status = ET.fromstring(svn_output) - except BaseException: - fatal_error( - "SVN returned invalid XML message {}".format(svn_output)) - xml_target = xml_status.find('./target') - entries = xml_target.findall('./entry') - for entry in entries: - status = entry.find('./wc-status') - item = status.get('item') - if item == SVN_EXTERNAL: - continue - if item == SVN_UNVERSIONED: - continue - is_dirty = True - break - return is_dirty - - # ---------------------------------------------------------------- - # - # system call to svn for information gathering - # - # ---------------------------------------------------------------- - @staticmethod - def _svn_info(repo_dir_path): - """Return results of svn info command - """ - cmd = ['svn', 'info', repo_dir_path] - output = execute_subprocess(cmd, output_to_caller=True) - return output - - @staticmethod - def _svn_status_verbose(repo_dir_path): - """capture the full svn status output - """ - cmd = ['svn', 'status', repo_dir_path] - svn_output = execute_subprocess(cmd, output_to_caller=True) - return svn_output - - @staticmethod - def _svn_status_xml(repo_dir_path): - """ - Get status of the subversion sandbox in repo_dir - """ - cmd = ['svn', 'status', '--xml', repo_dir_path] - svn_output = execute_subprocess(cmd, output_to_caller=True) - return svn_output - - # ---------------------------------------------------------------- - # - # system call to svn for sideffects modifying the working tree - # - # ---------------------------------------------------------------- - @staticmethod - def _svn_checkout(url, repo_dir_path, verbosity): - """ - Checkout a subversion repository (repo_url) to checkout_dir. - """ - cmd = ['svn', 'checkout', '--quiet', url, repo_dir_path] - if verbosity >= VERBOSITY_VERBOSE: - printlog(' {0}'.format(' '.join(cmd))) - execute_subprocess(cmd) - - @staticmethod - def _svn_switch(url, ignore_ancestry, verbosity): - """ - Switch branches for in an svn sandbox - """ - cmd = ['svn', 'switch', '--quiet'] - if ignore_ancestry: - cmd.append('--ignore-ancestry') - cmd.append(url) - if verbosity >= VERBOSITY_VERBOSE: - printlog(' {0}'.format(' '.join(cmd))) - execute_subprocess(cmd) diff --git a/manage_externals/manic/sourcetree.py b/manage_externals/manic/sourcetree.py deleted file mode 100644 index cf2a5b7569..0000000000 --- a/manage_externals/manic/sourcetree.py +++ /dev/null @@ -1,425 +0,0 @@ -""" -Classes to represent an externals config file (SourceTree) and the components -within it (_External). -""" - -import errno -import logging -import os - -from .externals_description import ExternalsDescription -from .externals_description import read_externals_description_file -from .externals_description import create_externals_description -from .repository_factory import create_repository -from .repository_git import GitRepository -from .externals_status import ExternalStatus -from .utils import fatal_error, printlog -from .global_constants import EMPTY_STR, LOCAL_PATH_INDICATOR -from .global_constants import VERBOSITY_VERBOSE - -class _External(object): - """ - A single component hosted in an external repository (and any children). - - The component may or may not be checked-out upon construction. - """ - # pylint: disable=R0902 - - def __init__(self, root_dir, name, local_path, required, subexternals_path, - repo, svn_ignore_ancestry, subexternal_sourcetree): - """Create a single external component (checked out or not). - - Input: - root_dir : string - the (checked-out) parent repo's root dir. - local_path : string - this external's (checked-out) subdir relative - to root_dir, e.g. "components/mom" - repo: Repository - the repo object for this external. Can be None (e.g. if this external just refers to another external file). - - name : string - name of this external (as named by the parent - reference). May or may not correspond to something in the path. - - ext_description : dict - source ExternalsDescription object - - svn_ignore_ancestry : bool - use --ignore-externals with svn switch - - subexternals_path: string - path to sub-externals config file, if any. Relative to local_path, or special value 'none'. - subexternal_sourcetree: SourceTree - corresponding to subexternals_path, if subexternals_path exists (it might not, if it is not checked out yet). - """ - self._name = name - self._required = required - - self._stat = None # Populated in status() - - self._local_path = local_path - # _repo_dir_path : full repository directory, e.g. - # "/components/mom" - repo_dir = os.path.join(root_dir, local_path) - self._repo_dir_path = os.path.abspath(repo_dir) - # _base_dir_path : base directory *containing* the repository, e.g. - # "/components" - self._base_dir_path = os.path.dirname(self._repo_dir_path) - # _repo_dir_name : base_dir_path + repo_dir_name = repo_dir_path - # e.g., "mom" - self._repo_dir_name = os.path.basename(self._repo_dir_path) - self._repo = repo - - # Does this component have subcomponents aka an externals config? - self._subexternals_path = subexternals_path - self._subexternal_sourcetree = subexternal_sourcetree - - - def get_name(self): - """ - Return the external object's name - """ - return self._name - - def get_local_path(self): - """ - Return the external object's path - """ - return self._local_path - - def get_repo_dir_path(self): - return self._repo_dir_path - - def get_subexternals_path(self): - return self._subexternals_path - - def get_repo(self): - return self._repo - - def status(self, force=False, print_progress=False): - """ - Returns status of this component and all subcomponents. - - Returns a dict mapping our local path (not component name!) to an - ExternalStatus dict. Any subcomponents will have their own top-level - path keys. Note the return value includes entries for this and all - subcomponents regardless of whether they are locally installed or not. - - Side-effect: If self._stat is empty or force is True, calculates _stat. - """ - calc_stat = force or not self._stat - - if calc_stat: - self._stat = ExternalStatus() - self._stat.path = self.get_local_path() - if not self._required: - self._stat.source_type = ExternalStatus.OPTIONAL - elif self._local_path == LOCAL_PATH_INDICATOR: - # LOCAL_PATH_INDICATOR, '.' paths, are standalone - # component directories that are not managed by - # checkout_subexternals. - self._stat.source_type = ExternalStatus.STANDALONE - else: - # managed by checkout_subexternals - self._stat.source_type = ExternalStatus.MANAGED - - subcomponent_stats = {} - if not os.path.exists(self._repo_dir_path): - if calc_stat: - # No local repository. - self._stat.sync_state = ExternalStatus.EMPTY - msg = ('status check: repository directory for "{0}" does not ' - 'exist.'.format(self._name)) - logging.info(msg) - self._stat.current_version = 'not checked out' - # NOTE(bja, 2018-01) directory doesn't exist, so we cannot - # use repo to determine the expected version. We just take - # a best-guess based on the assumption that only tag or - # branch should be set, but not both. - if not self._repo: - self._stat.expected_version = 'unknown' - else: - self._stat.expected_version = self._repo.tag() + self._repo.branch() - else: - # Merge local repository state (e.g. clean/dirty) into self._stat. - if calc_stat and self._repo: - self._repo.status(self._stat, self._repo_dir_path) - - # Status of subcomponents, if any. - if self._subexternals_path and self._subexternal_sourcetree: - cwd = os.getcwd() - # SourceTree.status() expects to be called from the correct - # root directory. - os.chdir(self._repo_dir_path) - subcomponent_stats = self._subexternal_sourcetree.status(self._local_path, force=force, print_progress=print_progress) - os.chdir(cwd) - - # Merge our status + subcomponent statuses into one return dict keyed - # by component path. - all_stats = {} - # don't add the root component because we don't manage it - # and can't provide useful info about it. - if self._local_path != LOCAL_PATH_INDICATOR: - # store the stats under the local_path, not comp name so - # it will be sorted correctly - all_stats[self._stat.path] = self._stat - - if subcomponent_stats: - all_stats.update(subcomponent_stats) - - return all_stats - - def checkout(self, verbosity): - """ - If the repo destination directory exists, ensure it is correct (from - correct URL, correct branch or tag), and possibly updateit. - If the repo destination directory does not exist, checkout the correct - branch or tag. - Does not check out sub-externals, see SourceTree.checkout(). - """ - # Make sure we are in correct location - if not os.path.exists(self._repo_dir_path): - # repository directory doesn't exist. Need to check it - # out, and for that we need the base_dir_path to exist - try: - os.makedirs(self._base_dir_path) - except OSError as error: - if error.errno != errno.EEXIST: - msg = 'Could not create directory "{0}"'.format( - self._base_dir_path) - fatal_error(msg) - - if not self._stat: - self.status() - assert self._stat - - if self._stat.source_type != ExternalStatus.STANDALONE: - if verbosity >= VERBOSITY_VERBOSE: - # NOTE(bja, 2018-01) probably do not want to pass - # verbosity in this case, because if (verbosity == - # VERBOSITY_DUMP), then the previous status output would - # also be dumped, adding noise to the output. - self._stat.log_status_message(VERBOSITY_VERBOSE) - - if self._repo: - if self._stat.sync_state == ExternalStatus.STATUS_OK: - # If we're already in sync, avoid showing verbose output - # from the checkout command, unless the verbosity level - # is 2 or more. - checkout_verbosity = verbosity - 1 - else: - checkout_verbosity = verbosity - - self._repo.checkout(self._base_dir_path, self._repo_dir_name, - checkout_verbosity, self.clone_recursive()) - - def replace_subexternal_sourcetree(self, sourcetree): - self._subexternal_sourcetree = sourcetree - - def clone_recursive(self): - 'Return True iff any .gitmodules files should be processed' - # Try recursive .gitmodules unless there is an externals entry - recursive = not self._subexternals_path - - return recursive - - -class SourceTree(object): - """ - SourceTree represents a group of managed externals. - - Those externals may not be checked out locally yet, they might only - have Repository objects pointing to their respective repositories. - """ - - @classmethod - def from_externals_file(cls, parent_repo_dir_path, parent_repo, - externals_path): - """Creates a SourceTree representing the given externals file. - - Looks up a git submodules file as an optional backup if there is no - externals file specified. - - Returns None if there is no externals file (i.e. it's None or 'none'), - or if the externals file hasn't been checked out yet. - - parent_repo_dir_path: parent repo root dir - parent_repo: parent repo. - externals_path: path to externals file, relative to parent_repo_dir_path. - """ - if not os.path.exists(parent_repo_dir_path): - # NOTE(bja, 2017-10) repository has not been checked out - # yet, can't process the externals file. Assume we are - # checking status before code is checkoud out and this - # will be handled correctly later. - return None - - if externals_path.lower() == 'none': - # With explicit 'none', do not look for git submodules file. - return None - - cwd = os.getcwd() - os.chdir(parent_repo_dir_path) - - if not externals_path: - if GitRepository.has_submodules(parent_repo_dir_path): - externals_path = ExternalsDescription.GIT_SUBMODULES_FILENAME - else: - return None - - if not os.path.exists(externals_path): - # NOTE(bja, 2017-10) this check is redundant with the one - # in read_externals_description_file! - msg = ('Externals description file "{0}" ' - 'does not exist! In directory: {1}'.format( - externals_path, parent_repo_dir_path)) - fatal_error(msg) - - externals_root = parent_repo_dir_path - # model_data is a dict-like object which mirrors the file format. - model_data = read_externals_description_file(externals_root, - externals_path) - # ext_description is another dict-like object (see ExternalsDescription) - ext_description = create_externals_description(model_data, - parent_repo=parent_repo) - externals_sourcetree = SourceTree(externals_root, ext_description) - os.chdir(cwd) - return externals_sourcetree - - def __init__(self, root_dir, ext_description, svn_ignore_ancestry=False): - """ - Build a SourceTree object from an ExternalDescription. - - root_dir: the (checked-out) parent repo root dir. - """ - self._root_dir = os.path.abspath(root_dir) - self._all_components = {} # component_name -> _External - self._required_compnames = [] - for comp, desc in ext_description.items(): - local_path = desc[ExternalsDescription.PATH] - required = desc[ExternalsDescription.REQUIRED] - repo_info = desc[ExternalsDescription.REPO] - subexternals_path = desc[ExternalsDescription.EXTERNALS] - - repo = create_repository(comp, - repo_info, - svn_ignore_ancestry=svn_ignore_ancestry) - - sourcetree = None - # Treat a .gitmodules file as a backup externals config - if not subexternals_path: - parent_repo_dir_path = os.path.abspath(os.path.join(root_dir, - local_path)) - if GitRepository.has_submodules(parent_repo_dir_path): - subexternals_path = ExternalsDescription.GIT_SUBMODULES_FILENAME - - # Might return None (if the subexternal isn't checked out yet, or subexternal is None or 'none') - subexternal_sourcetree = SourceTree.from_externals_file( - os.path.join(self._root_dir, local_path), - repo, - subexternals_path) - src = _External(self._root_dir, comp, local_path, required, - subexternals_path, repo, svn_ignore_ancestry, - subexternal_sourcetree) - - self._all_components[comp] = src - if required: - self._required_compnames.append(comp) - - def status(self, relative_path_base=LOCAL_PATH_INDICATOR, - force=False, print_progress=False): - """Return a dictionary of local path->ExternalStatus. - - Notes about the returned dictionary: - * It is keyed by local path (e.g. 'components/mom'), not by - component name (e.g. 'mom'). - * It contains top-level keys for all traversed components, whether - discovered by recursion or top-level. - * It contains entries for all components regardless of whether they - are locally installed or not, or required or optional. -x """ - load_comps = self._all_components.keys() - - summary = {} # Holds merged statuses from all components. - for comp in load_comps: - if print_progress: - printlog('{0}, '.format(comp), end='') - stat = self._all_components[comp].status(force=force, - print_progress=print_progress) - - # Returned status dictionary is keyed by local path; prepend - # relative_path_base if not already there. - stat_final = {} - for name in stat.keys(): - if stat[name].path.startswith(relative_path_base): - stat_final[name] = stat[name] - else: - modified_path = os.path.join(relative_path_base, - stat[name].path) - stat_final[modified_path] = stat[name] - stat_final[modified_path].path = modified_path - summary.update(stat_final) - - return summary - - def _find_installed_optional_components(self): - """Returns a list of installed optional component names, if any.""" - installed_comps = [] - for comp_name, ext in self._all_components.items(): - if comp_name in self._required_compnames: - continue - # Note that in practice we expect this status to be cached. - path_to_stat = ext.status() - - # If any part of this component exists locally, consider it - # installed and therefore eligible for updating. - if any(s.sync_state != ExternalStatus.EMPTY - for s in path_to_stat.values()): - installed_comps.append(comp_name) - return installed_comps - - def checkout(self, verbosity, load_all, load_comp=None): - """ - Checkout or update indicated components into the configured subdirs. - - If load_all is True, checkout all externals (required + optional), recursively. - If load_all is False and load_comp is set, checkout load_comp (and any required subexternals, plus any optional subexternals that are already checked out, recursively) - If load_all is False and load_comp is None, checkout all required externals, plus any optionals that are already checked out, recursively. - """ - if load_all: - tmp_comps = self._all_components.keys() - elif load_comp is not None: - tmp_comps = [load_comp] - else: - local_optional_compnames = self._find_installed_optional_components() - tmp_comps = self._required_compnames + local_optional_compnames - if local_optional_compnames: - printlog('Found locally installed optional components: ' + - ', '.join(local_optional_compnames)) - bad_compnames = set(local_optional_compnames) - set(self._all_components.keys()) - if bad_compnames: - printlog('Internal error: found locally installed components that are not in the global list of all components: ' + ','.join(bad_compnames)) - - if verbosity >= VERBOSITY_VERBOSE: - printlog('Checking out externals: ') - else: - printlog('Checking out externals: ', end='') - - # Sort by path so that if paths are nested the - # parent repo is checked out first. - load_comps = sorted(tmp_comps, key=lambda comp: self._all_components[comp].get_local_path()) - - # checkout. - for comp_name in load_comps: - if verbosity < VERBOSITY_VERBOSE: - printlog('{0}, '.format(comp_name), end='') - else: - # verbose output handled by the _External object, just - # output a newline - printlog(EMPTY_STR) - c = self._all_components[comp_name] - # Does not recurse. - c.checkout(verbosity) - # Recursively check out subexternals, if any. Returns None - # if there's no subexternals path. - component_subexternal_sourcetree = SourceTree.from_externals_file( - c.get_repo_dir_path(), - c.get_repo(), - c.get_subexternals_path()) - c.replace_subexternal_sourcetree(component_subexternal_sourcetree) - if component_subexternal_sourcetree: - component_subexternal_sourcetree.checkout(verbosity, load_all) - printlog('') diff --git a/manage_externals/manic/utils.py b/manage_externals/manic/utils.py deleted file mode 100644 index 9c63ffe65e..0000000000 --- a/manage_externals/manic/utils.py +++ /dev/null @@ -1,330 +0,0 @@ -#!/usr/bin/env python3 -""" -Common public utilities for manic package - -""" - -from __future__ import absolute_import -from __future__ import unicode_literals -from __future__ import print_function - -import logging -import os -import subprocess -import sys -from threading import Timer - -from .global_constants import LOCAL_PATH_INDICATOR - -# --------------------------------------------------------------------- -# -# screen and logging output and functions to massage text for output -# -# --------------------------------------------------------------------- - - -def log_process_output(output): - """Log each line of process output at debug level so it can be - filtered if necessary. By default, output is a single string, and - logging.debug(output) will only put log info heading on the first - line. This makes it hard to filter with grep. - - """ - output = output.split('\n') - for line in output: - logging.debug(line) - - -def printlog(msg, **kwargs): - """Wrapper script around print to ensure that everything printed to - the screen also gets logged. - - """ - logging.info(msg) - if kwargs: - print(msg, **kwargs) - else: - print(msg) - sys.stdout.flush() - - -def last_n_lines(the_string, n_lines, truncation_message=None): - """Returns the last n lines of the given string - - Args: - the_string: str - n_lines: int - truncation_message: str, optional - - Returns a string containing the last n lines of the_string - - If truncation_message is provided, the returned string begins with - the given message if and only if the string is greater than n lines - to begin with. - """ - - lines = the_string.splitlines(True) - if len(lines) <= n_lines: - return_val = the_string - else: - lines_subset = lines[-n_lines:] - str_truncated = ''.join(lines_subset) - if truncation_message: - str_truncated = truncation_message + '\n' + str_truncated - return_val = str_truncated - - return return_val - - -def indent_string(the_string, indent_level): - """Indents the given string by a given number of spaces - - Args: - the_string: str - indent_level: int - - Returns a new string that is the same as the_string, except that - each line is indented by 'indent_level' spaces. - - In python3, this can be done with textwrap.indent. - """ - - lines = the_string.splitlines(True) - padding = ' ' * indent_level - lines_indented = [padding + line for line in lines] - return ''.join(lines_indented) - -# --------------------------------------------------------------------- -# -# error handling -# -# --------------------------------------------------------------------- - - -def fatal_error(message): - """ - Error output function - """ - logging.error(message) - raise RuntimeError("{0}ERROR: {1}".format(os.linesep, message)) - - -# --------------------------------------------------------------------- -# -# Data conversion / manipulation -# -# --------------------------------------------------------------------- -def str_to_bool(bool_str): - """Convert a sting representation of as boolean into a true boolean. - - Conversion should be case insensitive. - """ - value = None - str_lower = bool_str.lower() - if str_lower in ('true', 't'): - value = True - elif str_lower in ('false', 'f'): - value = False - if value is None: - msg = ('ERROR: invalid boolean string value "{0}". ' - 'Must be "true" or "false"'.format(bool_str)) - fatal_error(msg) - return value - - -REMOTE_PREFIXES = ['http://', 'https://', 'ssh://', 'git@'] - - -def is_remote_url(url): - """check if the user provided a local file path instead of a - remote. If so, it must be expanded to an absolute - path. - - """ - remote_url = False - for prefix in REMOTE_PREFIXES: - if url.startswith(prefix): - remote_url = True - return remote_url - - -def split_remote_url(url): - """check if the user provided a local file path or a - remote. If remote, try to strip off protocol info. - - """ - remote_url = is_remote_url(url) - if not remote_url: - return url - - for prefix in REMOTE_PREFIXES: - url = url.replace(prefix, '') - - if '@' in url: - url = url.split('@')[1] - - if ':' in url: - url = url.split(':')[1] - - return url - - -def expand_local_url(url, field): - """check if the user provided a local file path instead of a - remote. If so, it must be expanded to an absolute - path. - - Note: local paths of LOCAL_PATH_INDICATOR have special meaning and - represent local copy only, don't work with the remotes. - - """ - remote_url = is_remote_url(url) - if not remote_url: - if url.strip() == LOCAL_PATH_INDICATOR: - pass - else: - url = os.path.expandvars(url) - url = os.path.expanduser(url) - if not os.path.isabs(url): - msg = ('WARNING: Externals description for "{0}" contains a ' - 'url that is not remote and does not expand to an ' - 'absolute path. Version control operations may ' - 'fail.\n\nurl={1}'.format(field, url)) - printlog(msg) - else: - url = os.path.normpath(url) - return url - - -# --------------------------------------------------------------------- -# -# subprocess -# -# --------------------------------------------------------------------- - -# Give the user a helpful message if we detect that a command seems to -# be hanging. -_HANGING_SEC = 300 - - -def _hanging_msg(working_directory, command): - print(""" - -Command '{command}' -from directory {working_directory} -has taken {hanging_sec} seconds. It may be hanging. - -The command will continue to run, but you may want to abort -manage_externals with ^C and investigate. A possible cause of hangs is -when svn or git require authentication to access a private -repository. On some systems, svn and git requests for authentication -information will not be displayed to the user. In this case, the program -will appear to hang. Ensure you can run svn and git manually and access -all repositories without entering your authentication information. - -""".format(command=command, - working_directory=working_directory, - hanging_sec=_HANGING_SEC)) - - -def execute_subprocess(commands, status_to_caller=False, - output_to_caller=False): - """Wrapper around subprocess.check_output to handle common - exceptions. - - check_output runs a command with arguments and waits - for it to complete. - - check_output raises an exception on a nonzero return code. if - status_to_caller is true, execute_subprocess returns the subprocess - return code, otherwise execute_subprocess treats non-zero return - status as an error and raises an exception. - - """ - cwd = os.getcwd() - msg = 'In directory: {0}\nexecute_subprocess running command:'.format(cwd) - logging.info(msg) - commands_str = ' '.join(commands) - logging.info(commands_str) - return_to_caller = status_to_caller or output_to_caller - status = -1 - output = '' - hanging_timer = Timer(_HANGING_SEC, _hanging_msg, - kwargs={"working_directory": cwd, - "command": commands_str}) - hanging_timer.start() - try: - output = subprocess.check_output(commands, stderr=subprocess.STDOUT, - universal_newlines=True) - log_process_output(output) - status = 0 - except OSError as error: - msg = failed_command_msg( - 'Command execution failed. Does the executable exist?', - commands) - logging.error(error) - fatal_error(msg) - except ValueError as error: - msg = failed_command_msg( - 'DEV_ERROR: Invalid arguments trying to run subprocess', - commands) - logging.error(error) - fatal_error(msg) - except subprocess.CalledProcessError as error: - # Only report the error if we are NOT returning to the - # caller. If we are returning to the caller, then it may be a - # simple status check. If returning, it is the callers - # responsibility determine if an error occurred and handle it - # appropriately. - if not return_to_caller: - msg_context = ('Process did not run successfully; ' - 'returned status {0}'.format(error.returncode)) - msg = failed_command_msg(msg_context, commands, - output=error.output) - logging.error(error) - logging.error(msg) - log_process_output(error.output) - fatal_error(msg) - status = error.returncode - finally: - hanging_timer.cancel() - - if status_to_caller and output_to_caller: - ret_value = (status, output) - elif status_to_caller: - ret_value = status - elif output_to_caller: - ret_value = output - else: - ret_value = None - - return ret_value - - -def failed_command_msg(msg_context, command, output=None): - """Template for consistent error messages from subprocess calls. - - If 'output' is given, it should provide the output from the failed - command - """ - - if output: - output_truncated = last_n_lines(output, 20, - truncation_message='[... Output truncated for brevity ...]') - errmsg = ('Failed with output:\n' + - indent_string(output_truncated, 4) + - '\nERROR: ') - else: - errmsg = '' - - command_str = ' '.join(command) - errmsg += """In directory - {cwd} -{context}: - {command} -""".format(cwd=os.getcwd(), context=msg_context, command=command_str) - - if output: - errmsg += 'See above for output from failed command.\n' - - return errmsg diff --git a/manage_externals/test/.coveragerc b/manage_externals/test/.coveragerc deleted file mode 100644 index 8b681888b8..0000000000 --- a/manage_externals/test/.coveragerc +++ /dev/null @@ -1,7 +0,0 @@ -[run] -branch = True -omit = test_unit_*.py - test_sys_*.py - /usr/* - .local/* - */site-packages/* \ No newline at end of file diff --git a/manage_externals/test/.gitignore b/manage_externals/test/.gitignore deleted file mode 100644 index dd5795998f..0000000000 --- a/manage_externals/test/.gitignore +++ /dev/null @@ -1,7 +0,0 @@ -# virtual environments -env_python* - -# python code coverage tool output -.coverage -htmlcov - diff --git a/manage_externals/test/.pylint.rc b/manage_externals/test/.pylint.rc deleted file mode 100644 index 64abd03e42..0000000000 --- a/manage_externals/test/.pylint.rc +++ /dev/null @@ -1,426 +0,0 @@ -[MASTER] - -# A comma-separated list of package or module names from where C extensions may -# be loaded. Extensions are loading into the active Python interpreter and may -# run arbitrary code -extension-pkg-whitelist= - -# Add files or directories to the blacklist. They should be base names, not -# paths. -ignore=.git,.svn,env2 - -# Add files or directories matching the regex patterns to the blacklist. The -# regex matches against base names, not paths. -ignore-patterns= - -# Python code to execute, usually for sys.path manipulation such as -# pygtk.require(). -#init-hook= - -# Use multiple processes to speed up Pylint. -jobs=1 - -# List of plugins (as comma separated values of python modules names) to load, -# usually to register additional checkers. -load-plugins= - -# Pickle collected data for later comparisons. -persistent=yes - -# Specify a configuration file. -#rcfile= - -# Allow loading of arbitrary C extensions. Extensions are imported into the -# active Python interpreter and may run arbitrary code. -unsafe-load-any-extension=no - - -[MESSAGES CONTROL] - -# Only show warnings with the listed confidence levels. Leave empty to show -# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED -confidence= - -# Disable the message, report, category or checker with the given id(s). You -# can either give multiple identifiers separated by comma (,) or put this -# option multiple times (only on the command line, not in the configuration -# file where it should appear only once).You can also use "--disable=all" to -# disable everything first and then reenable specific checks. For example, if -# you want to run only the similarities checker, you can use "--disable=all -# --enable=similarities". If you want to run only the classes checker, but have -# no Warning level messages displayed, use"--disable=all --enable=classes -# --disable=W" -disable=bad-continuation,useless-object-inheritance - - -# Enable the message, report, category or checker with the given id(s). You can -# either give multiple identifier separated by comma (,) or put this option -# multiple time (only on the command line, not in the configuration file where -# it should appear only once). See also the "--disable" option for examples. -enable= - - -[REPORTS] - -# Python expression which should return a note less than 10 (10 is the highest -# note). You have access to the variables errors warning, statement which -# respectively contain the number of errors / warnings messages and the total -# number of statements analyzed. This is used by the global evaluation report -# (RP0004). -evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) - -# Template used to display messages. This is a python new-style format string -# used to format the message information. See doc for all details -msg-template={msg_id}:{line:3d},{column:2d}: {msg} ({symbol}) - -# Set the output format. Available formats are text, parseable, colorized, json -# and msvs (visual studio).You can also give a reporter class, eg -# mypackage.mymodule.MyReporterClass. -output-format=text - -# Tells whether to display a full report or only the messages -#reports=yes - -# Activate the evaluation score. -score=yes - - -[REFACTORING] - -# Maximum number of nested blocks for function / method body -max-nested-blocks=5 - - -[BASIC] - -# Naming hint for argument names -argument-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ - -# Regular expression matching correct argument names -argument-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ - -# Naming hint for attribute names -attr-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ - -# Regular expression matching correct attribute names -attr-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ - -# Bad variable names which should always be refused, separated by a comma -bad-names=foo,bar,baz,toto,tutu,tata - -# Naming hint for class attribute names -class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ - -# Regular expression matching correct class attribute names -class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ - -# Naming hint for class names -class-name-hint=[A-Z_][a-zA-Z0-9]+$ - -# Regular expression matching correct class names -class-rgx=[A-Z_][a-zA-Z0-9]+$ - -# Naming hint for constant names -const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$ - -# Regular expression matching correct constant names -const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ - -# Minimum line length for functions/classes that require docstrings, shorter -# ones are exempt. -docstring-min-length=-1 - -# Naming hint for function names -function-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ - -# Regular expression matching correct function names -function-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ - -# Good variable names which should always be accepted, separated by a comma -good-names=i,j,k,ex,Run,_ - -# Include a hint for the correct naming format with invalid-name -include-naming-hint=no - -# Naming hint for inline iteration names -inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$ - -# Regular expression matching correct inline iteration names -inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ - -# Naming hint for method names -method-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ - -# Regular expression matching correct method names -method-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ - -# Naming hint for module names -module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ - -# Regular expression matching correct module names -module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ - -# Colon-delimited sets of names that determine each other's naming style when -# the name regexes allow several styles. -name-group= - -# Regular expression which should only match function or class names that do -# not require a docstring. -no-docstring-rgx=^_ - -# List of decorators that produce properties, such as abc.abstractproperty. Add -# to this list to register other decorators that produce valid properties. -property-classes=abc.abstractproperty - -# Naming hint for variable names -variable-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ - -# Regular expression matching correct variable names -variable-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ - - -[FORMAT] - -# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. -expected-line-ending-format= - -# Regexp for a line that is allowed to be longer than the limit. -ignore-long-lines=^\s*(# )??$ - -# Number of spaces of indent required inside a hanging or continued line. -indent-after-paren=4 - -# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 -# tab). -indent-string=' ' - -# Maximum number of characters on a single line. -max-line-length=100 - -# Maximum number of lines in a module -max-module-lines=1000 - -# List of optional constructs for which whitespace checking is disabled. `dict- -# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. -# `trailing-comma` allows a space between comma and closing bracket: (a, ). -# `empty-line` allows space-only lines. -no-space-check=trailing-comma,dict-separator - -# Allow the body of a class to be on the same line as the declaration if body -# contains single statement. -single-line-class-stmt=no - -# Allow the body of an if to be on the same line as the test if there is no -# else. -single-line-if-stmt=no - - -[LOGGING] - -# Logging modules to check that the string format arguments are in logging -# function parameter format -logging-modules=logging - - -[MISCELLANEOUS] - -# List of note tags to take in consideration, separated by a comma. -notes=FIXME,XXX,TODO - - -[SIMILARITIES] - -# Ignore comments when computing similarities. -ignore-comments=yes - -# Ignore docstrings when computing similarities. -ignore-docstrings=yes - -# Ignore imports when computing similarities. -ignore-imports=no - -# Minimum lines number of a similarity. -min-similarity-lines=4 - - -[SPELLING] - -# Spelling dictionary name. Available dictionaries: none. To make it working -# install python-enchant package. -spelling-dict= - -# List of comma separated words that should not be checked. -spelling-ignore-words= - -# A path to a file that contains private dictionary; one word per line. -spelling-private-dict-file= - -# Tells whether to store unknown words to indicated private dictionary in -# --spelling-private-dict-file option instead of raising a message. -spelling-store-unknown-words=no - - -[TYPECHECK] - -# List of decorators that produce context managers, such as -# contextlib.contextmanager. Add to this list to register other decorators that -# produce valid context managers. -contextmanager-decorators=contextlib.contextmanager - -# List of members which are set dynamically and missed by pylint inference -# system, and so shouldn't trigger E1101 when accessed. Python regular -# expressions are accepted. -generated-members= - -# Tells whether missing members accessed in mixin class should be ignored. A -# mixin class is detected if its name ends with "mixin" (case insensitive). -ignore-mixin-members=yes - -# This flag controls whether pylint should warn about no-member and similar -# checks whenever an opaque object is returned when inferring. The inference -# can return multiple potential results while evaluating a Python object, but -# some branches might not be evaluated, which results in partial inference. In -# that case, it might be useful to still emit no-member and other checks for -# the rest of the inferred objects. -ignore-on-opaque-inference=yes - -# List of class names for which member attributes should not be checked (useful -# for classes with dynamically set attributes). This supports the use of -# qualified names. -ignored-classes=optparse.Values,thread._local,_thread._local - -# List of module names for which member attributes should not be checked -# (useful for modules/projects where namespaces are manipulated during runtime -# and thus existing member attributes cannot be deduced by static analysis. It -# supports qualified module names, as well as Unix pattern matching. -ignored-modules= - -# Show a hint with possible names when a member name was not found. The aspect -# of finding the hint is based on edit distance. -missing-member-hint=yes - -# The minimum edit distance a name should have in order to be considered a -# similar match for a missing member name. -missing-member-hint-distance=1 - -# The total number of similar names that should be taken in consideration when -# showing a hint for a missing member. -missing-member-max-choices=1 - - -[VARIABLES] - -# List of additional names supposed to be defined in builtins. Remember that -# you should avoid to define new builtins when possible. -additional-builtins= - -# Tells whether unused global variables should be treated as a violation. -allow-global-unused-variables=yes - -# List of strings which can identify a callback function by name. A callback -# name must start or end with one of those strings. -callbacks=cb_,_cb - -# A regular expression matching the name of dummy variables (i.e. expectedly -# not used). -dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ - -# Argument names that match this expression will be ignored. Default to name -# with leading underscore -ignored-argument-names=_.*|^ignored_|^unused_ - -# Tells whether we should check for unused import in __init__ files. -init-import=no - -# List of qualified module names which can have objects that can redefine -# builtins. -redefining-builtins-modules=six.moves,future.builtins - - -[CLASSES] - -# List of method names used to declare (i.e. assign) instance attributes. -defining-attr-methods=__init__,__new__,setUp - -# List of member names, which should be excluded from the protected access -# warning. -exclude-protected=_asdict,_fields,_replace,_source,_make - -# List of valid names for the first argument in a class method. -valid-classmethod-first-arg=cls - -# List of valid names for the first argument in a metaclass class method. -valid-metaclass-classmethod-first-arg=mcs - - -[DESIGN] - -# Maximum number of arguments for function / method -max-args=5 - -# Maximum number of attributes for a class (see R0902). -max-attributes=7 - -# Maximum number of boolean expressions in a if statement -max-bool-expr=5 - -# Maximum number of branch for function / method body -max-branches=12 - -# Maximum number of locals for function / method body -max-locals=15 - -# Maximum number of parents for a class (see R0901). -max-parents=7 - -# Maximum number of public methods for a class (see R0904). -max-public-methods=20 - -# Maximum number of return / yield for function / method body -max-returns=6 - -# Maximum number of statements in function / method body -max-statements=50 - -# Minimum number of public methods for a class (see R0903). -min-public-methods=2 - - -[IMPORTS] - -# Allow wildcard imports from modules that define __all__. -allow-wildcard-with-all=no - -# Analyse import fallback blocks. This can be used to support both Python 2 and -# 3 compatible code, which means that the block might have code that exists -# only in one or another interpreter, leading to false positives when analysed. -analyse-fallback-blocks=no - -# Deprecated modules which should not be used, separated by a comma -deprecated-modules=regsub,TERMIOS,Bastion,rexec - -# Create a graph of external dependencies in the given file (report RP0402 must -# not be disabled) -ext-import-graph= - -# Create a graph of every (i.e. internal and external) dependencies in the -# given file (report RP0402 must not be disabled) -import-graph= - -# Create a graph of internal dependencies in the given file (report RP0402 must -# not be disabled) -int-import-graph= - -# Force import order to recognize a module as part of the standard -# compatibility libraries. -known-standard-library= - -# Force import order to recognize a module as part of a third party library. -known-third-party=enchant - - -[EXCEPTIONS] - -# Exceptions that will emit a warning when being caught. Defaults to -# "Exception" -overgeneral-exceptions=Exception diff --git a/manage_externals/test/Makefile b/manage_externals/test/Makefile deleted file mode 100644 index 293e360757..0000000000 --- a/manage_externals/test/Makefile +++ /dev/null @@ -1,124 +0,0 @@ -python = not-set -verbose = not-set -debug = not-set - -ifneq ($(python), not-set) -PYTHON=$(python) -else -PYTHON=python -endif - -# we need the python path to point one level up to access the package -# and executables -PYPATH=PYTHONPATH=..: - -# common args for running tests -TEST_ARGS=-m unittest discover - -ifeq ($(debug), not-set) - ifeq ($(verbose), not-set) - # summary only output - TEST_ARGS+=--buffer - else - # show individual test summary - TEST_ARGS+=--buffer --verbose - endif -else - # show detailed test output - TEST_ARGS+=--verbose -endif - - -# auto reformat the code -AUTOPEP8=autopep8 -AUTOPEP8_ARGS=--aggressive --in-place - -# run lint -PYLINT=pylint -PYLINT_ARGS=-j 2 --rcfile=.pylint.rc - -# code coverage -COVERAGE=coverage -COVERAGE_ARGS=--rcfile=.coveragerc - -# source files -SRC = \ - ../checkout_externals \ - ../manic/*.py - -CHECKOUT_EXE = ../checkout_externals - -TEST_DIR = . - -README = ../README.md - -# -# testing -# -.PHONY : utest -utest : FORCE - $(PYPATH) $(PYTHON) $(TEST_ARGS) --pattern 'test_unit_*.py' - -.PHONY : stest -stest : FORCE - $(PYPATH) $(PYTHON) $(TEST_ARGS) --pattern 'test_sys_*.py' - -.PHONY : test -test : utest stest - -# -# documentation -# -.PHONY : readme -readme : $(CHECKOUT_EXE) - printf "%s\n\n" "-- AUTOMATICALLY GENERATED FILE. DO NOT EDIT --" > $(README) - printf "%s" '[![Build Status](https://travis-ci.org/ESMCI/manage_externals.svg?branch=master)](https://travis-ci.org/ESMCI/manage_externals)' >> $(README) - printf "%s" '[![Coverage Status](https://coveralls.io/repos/github/ESMCI/manage_externals/badge.svg?branch=master)](https://coveralls.io/github/ESMCI/manage_externals?branch=master)' >> $(README) - printf "\n%s\n" '```' >> $(README) - $(CHECKOUT_EXE) --help >> $(README) - -# -# coding standards -# -.PHONY : style -style : FORCE - $(AUTOPEP8) $(AUTOPEP8_ARGS) --recursive $(SRC) $(TEST_DIR)/test_*.py - -.PHONY : lint -lint : FORCE - $(PYLINT) $(PYLINT_ARGS) $(SRC) $(TEST_DIR)/test_*.py - -.PHONY : stylint -stylint : style lint - -.PHONY : coverage -# Need to use a single coverage run with a single pattern rather than -# using two separate commands with separate patterns for test_unit_*.py -# and test_sys_*.py: The latter clobbers some results from the first -# run, even if we use the --append flag to 'coverage run'. -coverage : FORCE - $(PYPATH) $(COVERAGE) erase - $(PYPATH) $(COVERAGE) run $(COVERAGE_ARGS) $(TEST_ARGS) --pattern 'test_*.py' - $(PYPATH) $(COVERAGE) html - -# -# virtual environment creation -# -.PHONY : env -env : FORCE - $(PYPATH) virtualenv --python $(PYTHON) $@_$(PYTHON) - . $@_$(PYTHON)/bin/activate; pip install -r requirements.txt - -# -# utilites -# -.PHONY : clean -clean : FORCE - -rm -rf *~ *.pyc tmp fake htmlcov - -.PHONY : clobber -clobber : clean - -rm -rf env_* - -FORCE : - diff --git a/manage_externals/test/README.md b/manage_externals/test/README.md deleted file mode 100644 index 1e8f2eaa77..0000000000 --- a/manage_externals/test/README.md +++ /dev/null @@ -1,53 +0,0 @@ -# Testing for checkout_externals - -## Unit tests - -```SH - cd checkout_externals/test - make utest -``` - -## System tests - -```SH - cd checkout_externals/test - make stest -``` - -Example to run a single test: -```SH - cd checkout_externals - python -m unittest test.test_sys_checkout.TestSysCheckout.test_container_simple_required -``` - -## Static analysis - -checkout_externals is difficult to test thoroughly because it relies -on git and svn, and svn requires a live network connection and -repository. Static analysis will help catch bugs in code paths that -are not being executed, but it requires conforming to community -standards and best practices. autopep8 and pylint should be run -regularly for automatic code formatting and linting. - -```SH - cd checkout_externals/test - make lint -``` - -The canonical formatting for the code is whatever autopep8 -generates. All issues identified by pylint should be addressed. - - -## Code coverage - -All changes to the code should include maintaining existing tests and -writing new tests for new or changed functionality. To ensure test -coverage, run the code coverage tool: - -```SH - cd checkout_externals/test - make coverage - open -a Firefox.app htmlcov/index.html -``` - - diff --git a/manage_externals/test/doc/.gitignore b/manage_externals/test/doc/.gitignore deleted file mode 100644 index d4e11e5ea0..0000000000 --- a/manage_externals/test/doc/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -_build - diff --git a/manage_externals/test/doc/Makefile b/manage_externals/test/doc/Makefile deleted file mode 100644 index 18f4d5bf99..0000000000 --- a/manage_externals/test/doc/Makefile +++ /dev/null @@ -1,20 +0,0 @@ -# Minimal makefile for Sphinx documentation -# - -# You can set these variables from the command line. -SPHINXOPTS = -SPHINXBUILD = sphinx-build -SPHINXPROJ = ManageExternals -SOURCEDIR = . -BUILDDIR = _build - -# Put it first so that "make" without argument is like "make help". -help: - @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) - -.PHONY: help Makefile - -# Catch-all target: route all unknown targets to Sphinx using the new -# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). -%: Makefile - @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) \ No newline at end of file diff --git a/manage_externals/test/doc/conf.py b/manage_externals/test/doc/conf.py deleted file mode 100644 index 469c0b0dc5..0000000000 --- a/manage_externals/test/doc/conf.py +++ /dev/null @@ -1,172 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Manage Externals documentation build configuration file, created by -# sphinx-quickstart on Wed Nov 29 10:53:25 2017. -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# -# import os -# import sys -# sys.path.insert(0, os.path.abspath('.')) - - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -# -# needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = ['sphinx.ext.autodoc', - 'sphinx.ext.todo', - 'sphinx.ext.coverage', - 'sphinx.ext.viewcode', - 'sphinx.ext.githubpages'] - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -# -# source_suffix = ['.rst', '.md'] -source_suffix = '.rst' - -# The master toctree document. -master_doc = 'index' - -# General information about the project. -project = u'Manage Externals' -copyright = u'2017, CSEG at NCAR' -author = u'CSEG at NCAR' - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. -version = u'1.0.0' -# The full version, including alpha/beta/rc tags. -release = u'1.0.0' - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This patterns also effect to html_static_path and html_extra_path -exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -html_theme = 'alabaster' - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -# -# html_theme_options = {} - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] - -# Custom sidebar templates, must be a dictionary that maps document names -# to template names. -# -# This is required for the alabaster theme -# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars -html_sidebars = { - '**': [ - 'relations.html', # needs 'show_related': True theme option to display - 'searchbox.html', - ] -} - - -# -- Options for HTMLHelp output ------------------------------------------ - -# Output file base name for HTML help builder. -htmlhelp_basename = 'ManageExternalsdoc' - - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # - # 'papersize': 'letterpaper', - - # The font size ('10pt', '11pt' or '12pt'). - # - # 'pointsize': '10pt', - - # Additional stuff for the LaTeX preamble. - # - # 'preamble': '', - - # Latex figure (float) alignment - # - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - (master_doc, 'ManageExternals.tex', u'Manage Externals Documentation', - u'CSEG at NCAR', 'manual'), -] - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - (master_doc, 'manageexternals', u'Manage Externals Documentation', - [author], 1) -] - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - (master_doc, 'ManageExternals', u'Manage Externals Documentation', - author, 'ManageExternals', 'One line description of project.', - 'Miscellaneous'), -] - - - diff --git a/manage_externals/test/doc/develop.rst b/manage_externals/test/doc/develop.rst deleted file mode 100644 index b817b7b093..0000000000 --- a/manage_externals/test/doc/develop.rst +++ /dev/null @@ -1,202 +0,0 @@ -Developer Guidelines -==================== - -The manage externals utilities are a light weight replacement for svn -externals that will work with git repositories pulling in a mixture of -git and svn dependencies. - -Given an externals description and a working copy: - -* *checkout_externals* attempts to make the working copy agree with the - externals description - -* *generate_externals* attempts to make the externals description agree - with the working copy. - -For these operations utilities should: - -* operate consistently across git and svn - -* operate simply with minimal user complexity - -* robustly across a wide range of repository states - -* provide explicit error messages when a problem occurs - -* leave the working copy in a valid state - -The utilities in manage externals are **NOT** generic wrappers around -revision control operations or a replacement for common tasks. Users -are expected to: - -* create branches prior to starting development - -* add remotes and push changes - -* create tags - -* delete branches - -These types of tasks are often highly workflow dependent, e.g. branch -naming conventions may vary between repositories, have the potential -to destroy user data, introduce significant code complexit and 'edge -cases' that are extremely difficult to detect and test, and often -require subtle decision making, especially if a problem occurs. - -Users who want to automate these types are encouraged to create their -own tools. The externals description files are explicitly versioned -and the internal APIs are intended to be stable for these purposes. - -Core Design Principles ------------------------ - -1. Users can, and are actively encouraged to, modify the externals - directories using revision control outside of manage_externals - tools. You can't make any assumptions about the state of the - working copy. Examples: adding a remote, creating a branch, - switching to a branch, deleting the directory entirely. - -2. Give that the user can do anything, the manage externals library - can not preserve state between calls. The only information it can - rely on is what it expectes based on the content of the externals - description file, and what the actual state of the directory tree - is. - -3. Do *not* do anything that will possibly destroy user data! - - a. Do not remove files from the file system. We are operating on - user supplied input. If you don't call 'rm', you can't - accidentally remove the user's data. Thinking of calling - ``shutil.rmtree(user_input)``? What if the user accidentally - specified user_input such that it resolves to their home - directory.... Yeah. Don't go there. - - b. Rely on git and svn to do their job as much as possible. Don't - duplicate functionality. Examples: - - i. We require the working copies to be 'clean' as reported by - ``git status`` and ``svn status``. What if there are misc - editor files floating around that prevent an update? Use the - git and svn ignore functionality so they are not - reported. Don't try to remove them from manage_externals or - determine if they are 'safe' to ignore. - - ii. Do not use '--force'. Ever. This is a sign you are doing - something dangerous, it may not be what the user - wants. Remember, they are encouraged to modify their repo. - -4. There are often multiple ways to obtain a particular piece of - information from git. Scraping screen output is brittle and - generally not considered a stable API across different versions of - git. Given a choice between: - - a. a lower level git 'plumbing' command that processes a - specific request and returns a sucess/failure status. - - b. high level git command that produces a bunch of output - that must be processed. - - We always prefer the former. It almost always involves - writing and maintaining less code and is more likely to be - stable. - -5. Backward compatibility is critical. We have *nested* - repositories. They are trivially easy to change versions. They may - have very different versions of the top level manage_externals. The - ability to read and work with old model description files is - critical to avoid problems for users. We also have automated tools - (testdb) that must generate and read external description - files. Backward compatibility will make staging changes vastly - simpler. - -Model Users ------------ - -Consider the needs of the following model userswhen developing manage_externals: - -* Users who will checkout the code once, and never change versions. - -* Users who will checkout the code once, then work for several years, - never updating. before trying to update or request integration. - -* Users develope code but do not use revision control beyond the - initial checkout. If they have modified or untracked files in the - repo, they may be irreplacable. Don't destroy user data. - -* Intermediate users who are working with multiple repos or branches - on a regular basis. They may only use manage_externals weekly or - monthly. Keep the user interface and documentation simple and - explicit. The more command line options they have to remember or - look up, the more frustrated they git. - -* Software engineers who use the tools multiple times a day. It should - get out of their way. - -User Interface --------------- - -Basic operation for the most standard use cases should be kept as -simple as possible. Many users will only rarely run the manage -utilities. Even advanced users don't like reading a lot of help -documentation or struggling to remember commands and piece together -what they need to run. Having many command line options, even if not -needed, is exteremly frustrating and overwhelming for most users. A few -simple, explicitly named commands are better than a single command -with many options. - -How will users get help if something goes wrong? This is a custom, -one-off solution. Searching the internet for manage_externals, will -only return the user doc for this project at best. There isn't likely -to be a stackoverflow question or blog post where someone else already -answered a user's question. And very few people outside this community -will be able to provide help if something goes wrong. The sooner we -kick users out of these utilities and into standard version control -tools, the better off they are going to be if they run into a problem. - -Repositories ------------- - -There are three basic types of repositories that must be considered: - -* container repositories - repositories that are always top level - repositories, and have a group of externals that must be managed. - -* simple repositories - repositories that are externals to another - repository, and do not have any of their own externals that will be - managed. - -* mixed use repositories - repositories that can act as a top level - container repository or as an external to a top level - container. They may also have their own sub-externals that are - required. They may have different externals needs depening on - whether they are top level or not. - -Repositories must be able to checkout and switch to both branches and -tags. - -Development -=========== - -The functionality to manage externals is broken into a library of core -functionality and applications built with the library. - -The core library is called 'manic', pseduo-homophone of (man)age -(ex)ternals that is: short, pronounceable and spell-checkable. It is -also no more or less meaningful to an unfamiliar user than a random -jumble of letters forming an acronym. - -The core architecture of manic is: - -* externals description - an abstract description on an external, - including of how to obtain it, where to obtain it, where it goes in - the working tree. - -* externals - the software object representing an external. - -* source trees - collection of externals - -* repository wrappers - object oriented wrappers around repository - operations. So the higher level management of the soure tree and - external does not have to be concerned with how a particular - external is obtained and managed. - diff --git a/manage_externals/test/doc/index.rst b/manage_externals/test/doc/index.rst deleted file mode 100644 index 9ab287ad8c..0000000000 --- a/manage_externals/test/doc/index.rst +++ /dev/null @@ -1,22 +0,0 @@ -.. Manage Externals documentation master file, created by - sphinx-quickstart on Wed Nov 29 10:53:25 2017. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -Welcome to Manage Externals's documentation! -============================================ - -.. toctree:: - :maxdepth: 2 - :caption: Contents: - - - develop.rst - testing.rst - -Indices and tables -================== - -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` diff --git a/manage_externals/test/doc/testing.rst b/manage_externals/test/doc/testing.rst deleted file mode 100644 index 623f0e431c..0000000000 --- a/manage_externals/test/doc/testing.rst +++ /dev/null @@ -1,123 +0,0 @@ -Testing -======= - -The manage_externals package has an automated test suite. All pull -requests are expected to pass 100% of the automated tests, as well as -be pep8 and lint 'clean' and maintain approximately constant (at a -minimum) level of code coverage. - -Quick Start ------------ - -Do nothing approach -~~~~~~~~~~~~~~~~~~~ - -When you create a pull request on GitHub, Travis-CI continuous -integration testing will run the test suite in both python2 and -python3. Test results, lint results, and code coverage results are -available online. - -Do something approach -~~~~~~~~~~~~~~~~~~~~~ - -In the test directory, run: - -.. code-block:: shell - - make env - make lint - make test - make coverage - - -Automated Testing ------------------ - -The manage_externals manic library and executables are developed to be -python2 and python3 compatible using only the standard library. The -test suites meet the same requirements. But additional tools are -required to provide lint and code coverage metrics and generate -documentation. The requirements are maintained in the requirements.txt -file, and can be automatically installed into an isolated environment -via Makefile. - -Bootstrap requirements: - -* python2 - version 2.7.x or later - -* python3 - version 3.6 tested other versions may work - -* pip and virtualenv for python2 and python3 - -Note: all make rules can be of the form ``make python=pythonX rule`` -or ``make rule`` depending if you want to use the default system -python or specify a specific version. - -The Makefile in the test directory has the following rules: - -* ``make python=pythonX env`` - create a python virtual environment - for python2 or python3 and install all required packages. These - packages are required to run lint or coverage. - -* ``make style`` - runs autopep8 - -* ``make lint`` - runs autopep8 and pylint - -* ``make test`` - run the full test suite - -* ``make utest`` - run jus the unit tests - -* ``make stest`` - run jus the system integration tests - -* ``make coverage`` - run the full test suite through the code - coverage tool and generate an html report. - -* ``make readme`` - automatically generate the README files. - -* ``make clean`` - remove editor and pyc files - -* ``make clobber`` - remove all generated test files, including - virtual environments, coverage reports, and temporary test - repository directories. - -Unit Tests ----------- - -Unit tests are probably not 'true unit tests' for the pedantic, but -are pragmatic unit tests. They cover small practicle code blocks: -functions, class methods, and groups of functions and class methods. - -System Integration Tests ------------------------- - -NOTE(bja, 2017-11) The systems integration tests currently do not include svn repositories. - -The manage_externals package is extremely tedious and error prone to test manually. - -Combinations that must be tested to ensure basic functionality are: - -* container repository pulling in simple externals - -* container repository pulling in mixed externals with sub-externals. - -* mixed repository acting as a container, pulling in simple externals and sub-externals - -Automatic system tests are handled the same way manual testing is done: - -* clone a test repository - -* create an externals description file for the test - -* run the executable with the desired args - -* check the results - -* potentially modify the repo (checkout a different branch) - -* rerun and test - -* etc - -The automated system stores small test repositories in the main repo -by adding them as bare repositories. These repos are cloned via a -subprocess call to git and manipulated during the tests. diff --git a/manage_externals/test/repos/README.md b/manage_externals/test/repos/README.md deleted file mode 100644 index 026b684ea3..0000000000 --- a/manage_externals/test/repos/README.md +++ /dev/null @@ -1,33 +0,0 @@ -Git and svn repositories for testing git and svn-related behavior. For usage and terminology notes, see test/test_sys_checkout.py. - -For git repos: To list files and view file contents at HEAD: -``` -cd -git ls-tree --full-tree -r --name-only HEAD -git cat-file -p HEAD: -``` - -File contents at a glance: -``` -container.git/ - readme.txt - -simple-ext.git/ - (has branches: feature2, feature3) - (has tags: tag1, tag2) - readme.txt - simple_subdir/subdir_file.txt - -simple-ext-fork.git/ - (has tags: abandoned-feature, forked-feature-v1, tag1) - (has branch: feature2) - readme.txt - -mixed-cont-ext.git/ - (has branch: new-feature) - readme.txt - sub-externals.cfg ('simp_branch' section refers to 'feature2' branch in simple-ext.git/ repo) - -error/ - (no git repo here, just a readme.txt in the clear) -``` diff --git a/manage_externals/test/repos/container.git/HEAD b/manage_externals/test/repos/container.git/HEAD deleted file mode 100644 index cb089cd89a..0000000000 --- a/manage_externals/test/repos/container.git/HEAD +++ /dev/null @@ -1 +0,0 @@ -ref: refs/heads/master diff --git a/manage_externals/test/repos/container.git/config b/manage_externals/test/repos/container.git/config deleted file mode 100644 index e6da231579..0000000000 --- a/manage_externals/test/repos/container.git/config +++ /dev/null @@ -1,6 +0,0 @@ -[core] - repositoryformatversion = 0 - filemode = true - bare = true - ignorecase = true - precomposeunicode = true diff --git a/manage_externals/test/repos/container.git/description b/manage_externals/test/repos/container.git/description deleted file mode 100644 index 498b267a8c..0000000000 --- a/manage_externals/test/repos/container.git/description +++ /dev/null @@ -1 +0,0 @@ -Unnamed repository; edit this file 'description' to name the repository. diff --git a/manage_externals/test/repos/container.git/info/exclude b/manage_externals/test/repos/container.git/info/exclude deleted file mode 100644 index a5196d1be8..0000000000 --- a/manage_externals/test/repos/container.git/info/exclude +++ /dev/null @@ -1,6 +0,0 @@ -# git ls-files --others --exclude-from=.git/info/exclude -# Lines that start with '#' are comments. -# For a project mostly in C, the following would be a good set of -# exclude patterns (uncomment them if you want to use them): -# *.[oa] -# *~ diff --git a/manage_externals/test/repos/container.git/objects/41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 b/manage_externals/test/repos/container.git/objects/41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 deleted file mode 100644 index f65234e17f32800b1be0aa9908cc706458b14605..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 133 zcmV;00DAv;0acB$4#OY}L_6~pma=t7)Fr=DfpLNr2P1F?mVSF_r3_t8x_fuJAR6GY zuD1yyS3=Xu)WDKA@Ra});Xx7fWf1zv2~1TS@=422pQw4`eHcB9X3EwU=O)-GQ}s5s nqUZ%S7HaN3i|$`ck;m7Sz6S{Y_}`UoN%K{iOGozsJ+C?sZtFeC diff --git a/manage_externals/test/repos/container.git/objects/71/5b8f3e4afe1802a178e1d603af404ba45d59de b/manage_externals/test/repos/container.git/objects/71/5b8f3e4afe1802a178e1d603af404ba45d59de deleted file mode 100644 index 9759965b1ba440f1899216c1c82c0780fb65f46e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 136 zcmV;30C)d*0hNtQ3c@fDKwak)a{8g?ULFizQ5yOj!O$#BY{3QX>9e{j4e8<)

AV=y!@Ff%bx&`ZxO$xP47FG^)_lzn~QNUpn5)Pnq=ii~6DWK2pp8O#dS+Wke_L diff --git a/manage_externals/test/repos/container.git/objects/f9/e08370a737e941de6f6492e3f427c2ef4c1a03 b/manage_externals/test/repos/container.git/objects/f9/e08370a737e941de6f6492e3f427c2ef4c1a03 deleted file mode 100644 index 460fd7781917e095c826e8bc77ad53d943f199aa..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 81 zcmV-X0IvUd0R_Ry4S+BV1VG+Yu@&$_q5vvMU;#^(9XS?9_smrFie(;Fw=7}|1e56wgzpa&}fBkqfO*k&i_)dY`l?1hv=p}Fj<2Ge{uRcq{saZ z%j{g@HZ3wNvQv&lo|o_6gr*rieLQOSK`~u|R`NhFUI)68@B`BlpbA~$UTB9Ga*~zx a%Jelj*-|I)LF@ttC5adD0subgY(|R<&Qf{+ diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/01/97458f2dbe5fcd6bc44fa46983be0a30282379 b/manage_externals/test/repos/mixed-cont-ext.git/objects/01/97458f2dbe5fcd6bc44fa46983be0a30282379 deleted file mode 100644 index 032f4b1ca6bf0d25f1f9f419b1e7ab2aae1ef6c8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 171 zcmV;c095~Y0Zomu4uUWgMV;SQFt~~^I5;?f2d%r6C&MNz$f6Pi}^^zp3SC&knSt>TGbz78}9=ZOL8&?Fv(cG!`VtgKgN ZY{1E$27wP^7dQxMoWuzLd;nlTMfbC)Q$zp& diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/06/ea30b03ffa2f8574705f8b9583f7ca7e2dccf7 b/manage_externals/test/repos/mixed-cont-ext.git/objects/06/ea30b03ffa2f8574705f8b9583f7ca7e2dccf7 deleted file mode 100644 index 13d15a96a5071e98f0ba0cfbbdb2992c03990151..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 136 zcmV;30C)d*0hNtG4#FT106p`H{eaDGEd>%|)SJ(su+=pM4B|mwZ+(Kd$t05rB_(M< zmNu<2!_Lge2B#67kHO(Q1a!#· -MP…tæÇM¯0v&ù>î°KciåÇüÇ8V; \ No newline at end of file diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/1f/01fa46c17b1f38b37e6259f6e9d041bda3144f b/manage_externals/test/repos/mixed-cont-ext.git/objects/1f/01fa46c17b1f38b37e6259f6e9d041bda3144f deleted file mode 100644 index 7bacde68db5f1201015d4532aba9551660b05399..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 167 zcmV;Y09gNc0hNxy4Z<)C0C{H$F90%4+_Vxxz$T7kLnR0(O(n*sumP`oo$loMcuWmC z-)~w~gsCf*eiQX=*_sZfntbAHl&dTZ&5gE zmqjc(UfS(h;i3i3C0B(5e{oub>rV4>ggxy?ABf1q79*mQ-&@oFEO*Ws<|S?Qy{d)p VGuU)ju(jTFZd1AL+y`g^OR&}EOOOBn diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/37/f0e70b609adc90f4c09ee21d82ed1d79c81d69 b/manage_externals/test/repos/mixed-cont-ext.git/objects/37/f0e70b609adc90f4c09ee21d82ed1d79c81d69 deleted file mode 100644 index 8c6b04837ae4456cc5dc53ea7572610e6635d0d8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 89 zcmV-f0H*(V0V^p=O;s>AV=y!@Ff%bx&`ZxO$xP47FG^)_lzn`tC9-|*xG$A9N diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/38/9a2b876b8965d3c91a3db8d28a483eaf019d5c b/manage_externals/test/repos/mixed-cont-ext.git/objects/38/9a2b876b8965d3c91a3db8d28a483eaf019d5c deleted file mode 100644 index 1a35b74d479fdfb4bf24bcf199663fbb52036eee..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 130 zcmV-|0Db>>0V^p=O;s>7GGs6`FfcPQQP4}zEXhpI%P&f0aFl&|Gw+GS!K3kZ)1Ezh zejs~i1S3>cQEFmJZmM2MMG3=S(WPsHSWJ^Nk8w52YBee>u{sG;Ra}~+n_5wlT9lWV kQ>>Sqmd-GFVdTA?;?e&$HE}Vp-My(>AuMbJ03PHp2Cniq;{X5v diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 b/manage_externals/test/repos/mixed-cont-ext.git/objects/41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 deleted file mode 100644 index f65234e17f32800b1be0aa9908cc706458b14605..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 133 zcmV;00DAv;0acB$4#OY}L_6~pma=t7)Fr=DfpLNr2P1F?mVSF_r3_t8x_fuJAR6GY zuD1yyS3=Xu)WDKA@Ra});Xx7fWf1zv2~1TS@=422pQw4`eHcB9X3EwU=O)-GQ}s5s nqUZ%S7HaN3i|$`ck;m7Sz6S{Y_}`UoN%K{iOGozsJ+C?sZtFeC diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/6e/9f4baa6e94a0af4e094836c2eb55ccedef5fc4 b/manage_externals/test/repos/mixed-cont-ext.git/objects/6e/9f4baa6e94a0af4e094836c2eb55ccedef5fc4 deleted file mode 100644 index 6b2146cae4080fe2369401ecf5009fd9612c363c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 129 zcmV-{0Dk{?0V^p=O;s>7GGs6`FfcPQQP4}zEXhpI%P&f0aFl&|Gw+GS!K3kZ)1Ezh zejs~i1S3>cQEFmJZmM2MMG3=S(WPsHSWJ^Nk8w52YBee>u{sG;Ra}~+n_5wlT9lWV jQ>>Sqmd+qz)?FYbw&JLT!Zra%FYj6GAw1sz`R^`7StK`- diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/6f/c379457ecb4e576a13c7610ae1fa73f845ee6a b/manage_externals/test/repos/mixed-cont-ext.git/objects/6f/c379457ecb4e576a13c7610ae1fa73f845ee6a deleted file mode 100644 index 852a051139..0000000000 --- a/manage_externals/test/repos/mixed-cont-ext.git/objects/6f/c379457ecb4e576a13c7610ae1fa73f845ee6a +++ /dev/null @@ -1 +0,0 @@ -x•ANÄ09çsãÄÊŽ;‘~2±ÛÊJÄ^MÆ,Ï'ì8õ¥«ÔÚ¾_•ÆyyR3ØlmvˆÆ•PB°Œ˜FCñ¼Î>»y¸± *Ùbla’«-n^]D§¥,Ùx»fvÖû2p×­ }¢ÒGÍzå¿xï‰å‚ÜßÈNvq~Z¢¡Òc›âÔ èÇyäç+QåT¤íÔt;]ŠC:ÝA¹Õg¥¿AÚ( XA÷G‰®µ*=i\†_øÀ^' \ No newline at end of file diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/93/a159deb9175bfeb2820a0006ddd92d78131332 b/manage_externals/test/repos/mixed-cont-ext.git/objects/93/a159deb9175bfeb2820a0006ddd92d78131332 deleted file mode 100644 index 682d799898667fc1b506c6daece665c1af824fc1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 169 zcmV;a09OBa0X2=i4uUWgg`MwHFu01a>EM8d!9g*|M#xPmH`1igdRvT%@!c&N$Mf@@ z(`wU3>1MmAof<6C(>I`v6dJAYeYA@l%k@73%f=gNbntJ=1Cup4@hq3GQ+7Tcu*$C$ z?z1w-GQSj97De^`@|sp*JpN(#NilT+t9T-4S&VZ28ie!20Ci{*k3u`_$Vpb#D>F9W XWKV;@2eAt}0BM}W2>^TmrSn6;Se#N% diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/95/80ecc12f16334ce44e42287d5d46f927bb7b75 b/manage_externals/test/repos/mixed-cont-ext.git/objects/95/80ecc12f16334ce44e42287d5d46f927bb7b75 deleted file mode 100644 index 33c9f6cdf1..0000000000 --- a/manage_externals/test/repos/mixed-cont-ext.git/objects/95/80ecc12f16334ce44e42287d5d46f927bb7b75 +++ /dev/null @@ -1 +0,0 @@ -x•ŽKNÄ0Yç½cÅÈŸLlK7é´Ÿ5#{ä´ŽO˜°z›ªÒ“¶mW%Ó“v€8¹³äÈÁ&¶eFö²òìÙ±$/¦äéÆUÉžÝz°RœÎJ¶¡”%ZY“ |YS“ìÄC/­Ó'*}ÔÜA¯ü7ïC¸ŸÇÛ‘²ÉÏ‹1‘^L0f’Ç7Åÿ¬©cì übå/ª¼Jo5½-Å®;íî Üê³Ò…¿AÚH:XA÷D×Z:ïÚ‡èè8M¿¸^æ \ No newline at end of file diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/a9/288dcd8a719a1f4ed3cba43a2a387ae7cd60fd b/manage_externals/test/repos/mixed-cont-ext.git/objects/a9/288dcd8a719a1f4ed3cba43a2a387ae7cd60fd deleted file mode 100644 index 73e7cbfbc8e106cee027f798dcb163ec6c5d21e6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 130 zcmV-|0Db>>0V^p=O;s>7GGs6`FfcPQQP4}zEXhpI%P&f0aFl&|Gw+GS!K3kZ)1Ezh zejs~i1S3>cQEFmJZmM2MMG3=S(WPsHSWJ^Nk8w52YBee>u{sG;Ra}~+n_5wlT9lWV kQ>>Sqmd?O9-L+qLU;NqZBmPS=oA+@UXed_#01>J$$h2KJZU6uP diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/e8/ea32a11d30ee703f6f661ae7c2376f4ab84d38 b/manage_externals/test/repos/mixed-cont-ext.git/objects/e8/ea32a11d30ee703f6f661ae7c2376f4ab84d38 deleted file mode 100644 index 189ed85bb3c8b8642ae353d29a759f67040b5786..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 130 zcmV-|0Db>>0V^p=O;s>7GGs6`FfcPQQP4}zEXhpI%P&f0aFl&|Gw+GS!K3kZ)1Ezh zejs~i1S3>cQEFmJZmM2MMG3=S(WPsHSWJ^Nk8w52YBee>u{sG;Ra}~+n_5wlT9lWV kQ>>Sqmd?Q7Ty=.p¢ˆA -!ìÜ  w4ݵ¡¸Qªé€Øú=©Ã¤á¨ÏZ9ü0„þûkÌ éžG)* \ No newline at end of file diff --git a/manage_externals/test/repos/mixed-cont-ext.git/refs/heads/master b/manage_externals/test/repos/mixed-cont-ext.git/refs/heads/master deleted file mode 100644 index 1e0eef1ea3..0000000000 --- a/manage_externals/test/repos/mixed-cont-ext.git/refs/heads/master +++ /dev/null @@ -1 +0,0 @@ -6fc379457ecb4e576a13c7610ae1fa73f845ee6a diff --git a/manage_externals/test/repos/mixed-cont-ext.git/refs/heads/new-feature b/manage_externals/test/repos/mixed-cont-ext.git/refs/heads/new-feature deleted file mode 100644 index 607e80d1bc..0000000000 --- a/manage_externals/test/repos/mixed-cont-ext.git/refs/heads/new-feature +++ /dev/null @@ -1 +0,0 @@ -9580ecc12f16334ce44e42287d5d46f927bb7b75 diff --git a/manage_externals/test/repos/simple-ext-fork.git/HEAD b/manage_externals/test/repos/simple-ext-fork.git/HEAD deleted file mode 100644 index cb089cd89a..0000000000 --- a/manage_externals/test/repos/simple-ext-fork.git/HEAD +++ /dev/null @@ -1 +0,0 @@ -ref: refs/heads/master diff --git a/manage_externals/test/repos/simple-ext-fork.git/config b/manage_externals/test/repos/simple-ext-fork.git/config deleted file mode 100644 index 04eba17870..0000000000 --- a/manage_externals/test/repos/simple-ext-fork.git/config +++ /dev/null @@ -1,8 +0,0 @@ -[core] - repositoryformatversion = 0 - filemode = true - bare = true - ignorecase = true - precomposeunicode = true -[remote "origin"] - url = /Users/andreb/projects/ncar/git-conversion/checkout-model-dev/cesm-demo-externals/manage_externals/test/repos/simple-ext.git diff --git a/manage_externals/test/repos/simple-ext-fork.git/description b/manage_externals/test/repos/simple-ext-fork.git/description deleted file mode 100644 index 498b267a8c..0000000000 --- a/manage_externals/test/repos/simple-ext-fork.git/description +++ /dev/null @@ -1 +0,0 @@ -Unnamed repository; edit this file 'description' to name the repository. diff --git a/manage_externals/test/repos/simple-ext-fork.git/info/exclude b/manage_externals/test/repos/simple-ext-fork.git/info/exclude deleted file mode 100644 index a5196d1be8..0000000000 --- a/manage_externals/test/repos/simple-ext-fork.git/info/exclude +++ /dev/null @@ -1,6 +0,0 @@ -# git ls-files --others --exclude-from=.git/info/exclude -# Lines that start with '#' are comments. -# For a project mostly in C, the following would be a good set of -# exclude patterns (uncomment them if you want to use them): -# *.[oa] -# *~ diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/00/fd13e76189f9134b0506b4b8ed3172723b467f b/manage_externals/test/repos/simple-ext-fork.git/objects/00/fd13e76189f9134b0506b4b8ed3172723b467f deleted file mode 100644 index ae28c037e5e8773bab7a7f9b6b050a01c3c8402a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 89 zcmV-f0H*(V0V^p=O;s>AV=y!@Ff%bx&`ZxO$xP47FG^)_lznAV=y!@Ff%bx&`ZxO$xP47FG^)_lzn&Ekz!U-;cU~)E`&5u^pl|A>?=DrCt|Zp*KGhtORPb%uc6q&p;{~x`YAHy z#2GbEv6YQH#`fOIuH1gSE*yL=Ojyh~{nIdqe*nnpf*T V&^Fln@|2-4tBgli^9u#mM`!{nPaFUM diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/11/a76e3d9a67313dec7ce1230852ab5c86352c5c b/manage_externals/test/repos/simple-ext-fork.git/objects/11/a76e3d9a67313dec7ce1230852ab5c86352c5c deleted file mode 100644 index 564e7bba63..0000000000 --- a/manage_externals/test/repos/simple-ext-fork.git/objects/11/a76e3d9a67313dec7ce1230852ab5c86352c5c +++ /dev/null @@ -1,2 +0,0 @@ -x%ŒK -Â0@]çse&ßDÔ›L’!´˜¶„l¼½).¼Åãu.@Æ_ö¸Jê0ÇàìlM–Ä~v:ÄèmLÌÆi™åY*/ŸÛè@ŽpòÞ W ˆJ¥&Üå¿ø)´*Í \ No newline at end of file diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/16/5506a7408a482f50493434e13fffeb44af893f b/manage_externals/test/repos/simple-ext-fork.git/objects/16/5506a7408a482f50493434e13fffeb44af893f deleted file mode 100644 index 0d738af68b021dcd9918c8f2047aa4fff55bf6e4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 89 zcmV-f0H*(V0V^p=O;s>AV=y!@Ff%bx&`ZxO$xP47FG^)_lznO)_H(Z zem6QZm^^8RnmiI`ubHzgrPye+FKRN0H9F;O5%17>8Q`NMJ?ehWT|!t)2i0Np3Z=u$N9svC-|`;J-!jY5fUp SfzGuJhQeX2oy8Y4sYkDN{z{Sn diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/32/7e97d86e941047d809dba58f2804740c6c30cf b/manage_externals/test/repos/simple-ext-fork.git/objects/32/7e97d86e941047d809dba58f2804740c6c30cf deleted file mode 100644 index 0999f0d4b9b4297e5677a96f3c9677bf408ee8d9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 89 zcmV-f0H*(V0V^p=O;s>AV=y!@Ff%bx&`ZxO$xP47FG^)_lzniemt(y-3DP$mtIvOOf diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/36/418b4e5665956a90725c9a1b5a8e551c5f3d48 b/manage_externals/test/repos/simple-ext-fork.git/objects/36/418b4e5665956a90725c9a1b5a8e551c5f3d48 deleted file mode 100644 index 9da8434f65ef3bfdb57cb8117e312a56663a31a2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 159 zcmV;Q0AT-k0hNwh3c@fD0R7G>_5#Z8=Ft>H)JyoiX*NFFNQn2h9>Kq1U|^;?&-V_@ zcGH_GU?Q(kip?&NPmV1)rl3VdZ7GGKLl-2Pw=`WkjA`(0bci¹`ý}0…M”؇BÚÁs0/µâ¿}öï:: \ No newline at end of file diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/3d/ec1fdf8e2f5edba28148c5db2fe8d7a842360b b/manage_externals/test/repos/simple-ext-fork.git/objects/3d/ec1fdf8e2f5edba28148c5db2fe8d7a842360b deleted file mode 100644 index 9a31c7ef2e..0000000000 --- a/manage_externals/test/repos/simple-ext-fork.git/objects/3d/ec1fdf8e2f5edba28148c5db2fe8d7a842360b +++ /dev/null @@ -1,2 +0,0 @@ -x•ŽKnÃ0 ³Ö)x”,ÊI½EÑŸ´–A¹Ü#t7o€ŒìÛ¶vp.žzS…ÁšÆƒ&oÑ„©d¦8¹xLd@™Ì‹›ÖCð6f¯% -œpt$‰m&ŽJd…¦¡øhøÝ—½Á—VxÔÒ®ùÉpŸ7^/²o7°d­K1ÂGDsØ#¯ë¿æ{o?Z 7®²€,\g½˜AV=y!@Ff%bx&`ZxO$xP47FG^)_lznAV=y!@Ff%bx&`ZxO$xP47FG^)_lznvGy0&Z${j?E8>6rD10GHRYE2d diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/5f/1d4786d12e52d7ab28d2f2f1118c1059a9f1ae b/manage_externals/test/repos/simple-ext-fork.git/objects/5f/1d4786d12e52d7ab28d2f2f1118c1059a9f1ae deleted file mode 100644 index 25488b7bfe52fd0d530e20393b752815d9aaf16f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 93 zcmV-j0HXhR0S(JB4ue1p1i;kyiv0l8%LNPZurX=iP=VtPL2T>`g? zkh3=;83|{%kTn0{lH8#Nev_`XVPmImRbRpwOIgehnBL{IWwXg diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/67/136e5ab4d5c1c65d10c8048763b96b0e53c1d6 b/manage_externals/test/repos/simple-ext-fork.git/objects/67/136e5ab4d5c1c65d10c8048763b96b0e53c1d6 deleted file mode 100644 index d3dfe31113715fe07ea6833f0c2a25e868ac20b2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 165 zcmV;W09yZe0hNwR4#F@DL|Nw)z5pm6r*$QSfIWwB8k=t$6s6+&lq0Ykjo#?ZSf=UT zz+~D012)4Gj)~xM%ugTv-b1AFi TQ|c4S3@Y4~D&BknM3zUWvn5b3 diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/7b/0bd630ac13865735a1dff3437a137d8ab50663 b/manage_externals/test/repos/simple-ext-fork.git/objects/7b/0bd630ac13865735a1dff3437a137d8ab50663 deleted file mode 100644 index 0a2ec0494bc1600144cb54b61a6d7b43c7f3e806..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 119 zcmV--0Eqv10X50d4FVw$MNz-0;#IJTYiz*^YyjkKAhHY@MpwI+#E{&tb3>7U^YwDN zr`$2}=y`92Fm{8oNzW$w#gQ$c3ivT<^#zfQHTwFÁ©¹£rPkÖSèkJ´^ë \ No newline at end of file diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/a4/2fe9144f5707bc1e9515ce1b44681f7aba6f95 b/manage_externals/test/repos/simple-ext-fork.git/objects/a4/2fe9144f5707bc1e9515ce1b44681f7aba6f95 deleted file mode 100644 index d8ba654548..0000000000 --- a/manage_externals/test/repos/simple-ext-fork.git/objects/a4/2fe9144f5707bc1e9515ce1b44681f7aba6f95 +++ /dev/null @@ -1,3 +0,0 @@ -xUÌ[ -Â0…aŸ³ŠÙ@%Is+ˆ¨;™¤c/˜DÂq÷VðÅ×Ã>Æ ”w‡WJ Ú˜>8ò!¤!&'ƒS=)í±×CòF+ÑI2‚ßO‚Ts^Xðn`Ä2ÖBcw'ä­Ñw¨Á -\ËØNqÝ›F—)ãò8îç3(«¬Œ2:é¥ÿü0x-<×!6,i ª9 \ No newline at end of file diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/b9/3737be3ea6b19f6255983748a0a0f4d622f936 b/manage_externals/test/repos/simple-ext-fork.git/objects/b9/3737be3ea6b19f6255983748a0a0f4d622f936 deleted file mode 100644 index 9b40a0afa00b93a318cd503d3b29db1162978b03..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 89 zcmV-f0H*(V0V^p=O;s>AV=y!@Ff%bx&`ZxO$xP47FG^)_lznD—}ÂD>£Nƒv“{ŠZ¼M˜I…¥?jƒ‹Ìpžs8ÄgøÓ½„qÚ¥ZŽ€qo j†­f­ÕJ×{]þÕµÓ¥®¥Om/¨3Ü$ô¥‰Q_@ÞH© \ No newline at end of file diff --git a/manage_externals/test/repos/simple-ext-fork.git/packed-refs b/manage_externals/test/repos/simple-ext-fork.git/packed-refs deleted file mode 100644 index b8f9e86308..0000000000 --- a/manage_externals/test/repos/simple-ext-fork.git/packed-refs +++ /dev/null @@ -1,5 +0,0 @@ -# pack-refs with: peeled fully-peeled sorted -36418b4e5665956a90725c9a1b5a8e551c5f3d48 refs/heads/feature2 -9b75494003deca69527bb64bcaa352e801611dd2 refs/heads/master -11a76e3d9a67313dec7ce1230852ab5c86352c5c refs/tags/tag1 -^9b75494003deca69527bb64bcaa352e801611dd2 diff --git a/manage_externals/test/repos/simple-ext-fork.git/refs/heads/feature2 b/manage_externals/test/repos/simple-ext-fork.git/refs/heads/feature2 deleted file mode 100644 index d223b0362d..0000000000 --- a/manage_externals/test/repos/simple-ext-fork.git/refs/heads/feature2 +++ /dev/null @@ -1 +0,0 @@ -f268d4e56d067da9bd1d85e55bdc40a8bd2b0bca diff --git a/manage_externals/test/repos/simple-ext-fork.git/refs/tags/abandoned-feature b/manage_externals/test/repos/simple-ext-fork.git/refs/tags/abandoned-feature deleted file mode 100644 index 8a18bf08e9..0000000000 --- a/manage_externals/test/repos/simple-ext-fork.git/refs/tags/abandoned-feature +++ /dev/null @@ -1 +0,0 @@ -a42fe9144f5707bc1e9515ce1b44681f7aba6f95 diff --git a/manage_externals/test/repos/simple-ext-fork.git/refs/tags/forked-feature-v1 b/manage_externals/test/repos/simple-ext-fork.git/refs/tags/forked-feature-v1 deleted file mode 100644 index 2764b552d5..0000000000 --- a/manage_externals/test/repos/simple-ext-fork.git/refs/tags/forked-feature-v1 +++ /dev/null @@ -1 +0,0 @@ -8d2b3b35126224c975d23f109aa1e3cbac452989 diff --git a/manage_externals/test/repos/simple-ext.git/HEAD b/manage_externals/test/repos/simple-ext.git/HEAD deleted file mode 100644 index cb089cd89a..0000000000 --- a/manage_externals/test/repos/simple-ext.git/HEAD +++ /dev/null @@ -1 +0,0 @@ -ref: refs/heads/master diff --git a/manage_externals/test/repos/simple-ext.git/config b/manage_externals/test/repos/simple-ext.git/config deleted file mode 100644 index e6da231579..0000000000 --- a/manage_externals/test/repos/simple-ext.git/config +++ /dev/null @@ -1,6 +0,0 @@ -[core] - repositoryformatversion = 0 - filemode = true - bare = true - ignorecase = true - precomposeunicode = true diff --git a/manage_externals/test/repos/simple-ext.git/description b/manage_externals/test/repos/simple-ext.git/description deleted file mode 100644 index 498b267a8c..0000000000 --- a/manage_externals/test/repos/simple-ext.git/description +++ /dev/null @@ -1 +0,0 @@ -Unnamed repository; edit this file 'description' to name the repository. diff --git a/manage_externals/test/repos/simple-ext.git/info/exclude b/manage_externals/test/repos/simple-ext.git/info/exclude deleted file mode 100644 index a5196d1be8..0000000000 --- a/manage_externals/test/repos/simple-ext.git/info/exclude +++ /dev/null @@ -1,6 +0,0 @@ -# git ls-files --others --exclude-from=.git/info/exclude -# Lines that start with '#' are comments. -# For a project mostly in C, the following would be a good set of -# exclude patterns (uncomment them if you want to use them): -# *.[oa] -# *~ diff --git a/manage_externals/test/repos/simple-ext.git/objects/00/fd13e76189f9134b0506b4b8ed3172723b467f b/manage_externals/test/repos/simple-ext.git/objects/00/fd13e76189f9134b0506b4b8ed3172723b467f deleted file mode 100644 index ae28c037e5e8773bab7a7f9b6b050a01c3c8402a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 89 zcmV-f0H*(V0V^p=O;s>AV=y!@Ff%bx&`ZxO$xP47FG^)_lznbW_*ltIGSP}@rN;eRaRvTe4jec)&9#mV ztc{ztsDi^RDN|POQ7IsM3R)Zn^fb6Ap%fNDG*4c1YCyeUO2}@P$+4Hjj2b9dvLb3- zmJ-WQ2E*@mn-@6i1g9x43VXTpcO0*k$48gudH@`(^)|-1gKbZJZ&teIHT_#Om*271 ST(#ZC=?eOIX=gtC)=0=UK}@j# diff --git a/manage_externals/test/repos/simple-ext.git/objects/0b/15e8af3d4615b42314216efeae3fff184046a8 b/manage_externals/test/repos/simple-ext.git/objects/0b/15e8af3d4615b42314216efeae3fff184046a8 deleted file mode 100644 index 32d6896e3cb813edde3e4f0d0ca2d21963c2f1b0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 89 zcmV-f0H*(V0V^p=O;s>AV=y!@Ff%bx&`ZxO$xP47FG^)_lznåY*/ŸÛè@ŽpòÞ W ˆJ¥&Üå¿ø)´*Í \ No newline at end of file diff --git a/manage_externals/test/repos/simple-ext.git/objects/14/2711fdbbcb8034d7cad6bae6801887b12fe61d b/manage_externals/test/repos/simple-ext.git/objects/14/2711fdbbcb8034d7cad6bae6801887b12fe61d deleted file mode 100644 index acaf7889b47c54ee0dea121c73d505ca14ad369b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 83 zcmV-Z0IdIb0ZYosPg1ZjWC+Q~ELKR%%t=)!&d4v#Nl{3x$Sf{V$jnnnRLILO%1z8s pNX|%2&dx6_QAh$}pz8eG%#xDS6o{JQg2bZYRJa;FE&z4gA7ySEC>H<# diff --git a/manage_externals/test/repos/simple-ext.git/objects/31/dbcd6de441e671a467ef317146539b7ffabb11 b/manage_externals/test/repos/simple-ext.git/objects/31/dbcd6de441e671a467ef317146539b7ffabb11 deleted file mode 100644 index 0f0db6797fe19372f1d2122ebe8aa5361df07c61..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 90 zcmV-g0HyzU0V^p=O;s>AV=y!@Ff%bx&`ZxO$xP47FG^)_lzn_5#Z8=Ft>H)JyoiX*NFFNQn2h9>Kq1U|^;?&-V_@ zcGH_GU?Q(kip?&NPmV1)rl3VdZ7GGKLl-2Pw=`WkjA`(0bciMsIi;$9n_k z!>-M$Ac)DAYy~^^qUu9WLY{J}xkT>CQ3)XSxÁ©¹£rPkÖSèkJ´^ë \ No newline at end of file diff --git a/manage_externals/test/repos/simple-ext.git/objects/b7/692b6d391899680da7b9b6fd8af4c413f06fe7 b/manage_externals/test/repos/simple-ext.git/objects/b7/692b6d391899680da7b9b6fd8af4c413f06fe7 deleted file mode 100644 index 1b3b2724425492336f2816dc6ba4d818474f3c78..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 137 zcmV;40CxX)0Tql%4#F@D06FIs{s71hZqrBz@d!Q;98>0V^p=O;s>7G-EI{FfcPQQP4}zEXhpI%P&f0aFl&|Gw+GS!K3kZ)1Ezh zejs~i1S3>cQEFmJZmM2MMG3KlvCEtNF?@%PbVOT{Nm)vLb%0Bl_``r7C@umAu6 diff --git a/manage_externals/test/repos/simple-ext.git/objects/d8/ed2f33179d751937f8fde2e33921e4827babf4 b/manage_externals/test/repos/simple-ext.git/objects/d8/ed2f33179d751937f8fde2e33921e4827babf4 deleted file mode 100644 index f08ae820c9c89927f9898c5646134f7c519a6b04..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 60 zcmV-C0K@-y0V^p=O;s>4W-v4`Ff%bxC@xJ($t;Rb%gjmDE2$`95K$NWyZdy5$@Np$ Sc0Fs5Xy2&+OcnsW_!K#a0~pW% diff --git a/manage_externals/test/repos/simple-ext.git/objects/df/312890f93ba4d2c694208599b665c4a08afeff b/manage_externals/test/repos/simple-ext.git/objects/df/312890f93ba4d2c694208599b665c4a08afeff deleted file mode 100644 index 4018ea5914ee89b76d88fc282b6c98d80e4aaccd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 89 zcmV-f0H*(V0V^p=O;s>AV=y!@Ff%bx&`ZxO$xP47FG^)_lznQTy(n&njm5*>#d|ehiI!uT zo%TC>#d3w1Jtzo300Izz00bZa0SG_<0uX?}pA`5U@~wkvm49vLDta+zt1RRiHZQ=L;CU@@4?#yhXLbg~~mc>fT`3nX?Ls&t(q_@;kd&|6q zPmnj!N-"j%`;)7F<&!^&p5-&NVNc`P`LVZ4M1vhI123LfW679Jk&yS}h4J?+gU zS3MZfY8^$e`Aiw@d{Ry%1dI`+SBZs%>uw7^VT{$-o>k4Xyk=x|K`9*$$d+#CX^Zv$ zf4Az(2{}^ad;78;-W-sx-=Fn>9+LNu&)3o0-}x~i0yI}C8O(!53mF)Lc9yx|g^G|c ej~cmPzoZI_YZ6{Gdr_88^Nk1hnf(AO7WJ=&dj!fDn0uL2;xcFgCGi};7P%MLA>_hL1!bWwMgk<=JI{-``-8Sx~2ler?X@>qvZhS z>jPT>)6KB%uk~`LVLed!QU%IMrh0nGt~zC~p7r~M2xW}B&Vh{`_(=}ATKsQ!Fzyy7 zc4uq7<>SMvwZ?-a) z)0B>oW87dZf4*6*J;dJdmgjkw&ZC`k2j2VA6K~^ji#5jLtJ2Wv5xP@BFE&4IU%WYq zems5i{p{Q%din14uL~NZ-LD^~H)6E)d2he;%)5-9fBCR^<=fA>YX}J*yI_JaqN6CX rO2(X~(lQ>UT$426lExq+3DoM8C~b*QiI&DOO{6l~rdCtJNj(1>+FjA$ diff --git a/manage_externals/test/repos/simple-ext.svn/db/revs/0/2 b/manage_externals/test/repos/simple-ext.svn/db/revs/0/2 deleted file mode 100644 index 99a14cf4b7f76ee26eba753930dd53704627e1f5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 816 zcmZvZJ!lj`6vt;bpA3nm2sW<~jBwfc+L_sFOu`))jUhw{0gGh577vfyac(dHi+~?R zh?R}CT|}_e+9r*qV5N;MStC{G3)m;lNk zCm~lv(?C<68Ar-%YL%o`uYzC#rkyZdVR9kD9Q-!%}%S;O#unUwLYMU_)@exaSbTJeGis^rnsBF zpQ$A6r{xuTL@SSJeWQcholp+qxI~k*M^FFKr;u_G6ohjsvpQAEc$GQHnKxDmExDoA zqy#EZIZ5|4d3k=%Gb>dAc}$Wl&XYP%bKGo< zTmjWejYqZH7n7Q%0^8yGT2lbUjl12Rz=7w$2rz4bdULXUmfC*4ic;URV zT5%(!r#bby^on`qSY{66?ef&aVF)RMbPpw2dC8>a+6zOarB*P`g_V_4QdROm3)2?cKX5He8X8kD zRTa09dUI>+4($Ie?{8&&x)?7E=6hH1%;vc$isp`@N5_6ekGpR>52FX2McUt&KhAE3 z8yY={3bg&={dRN#qtCB?emH(VI)R>Tecg&qWAyFm%XhPPH2SpjWe4}>(DxsApLgH< zK6?@&t~A)(Gf_HKN~e|J&g9-2E__xI=82{tXa=RNLR*{|XCxC^W?VX{y(YOLf>dnb E4; /dev/null || exit 1 - -# Check that the author of this commit has the rights to perform -# the commit on the files and directories being modified. -commit-access-control.pl "$REPOS" "$TXN" commit-access-control.cfg || exit 1 - -# All checks passed, so allow the commit. -exit 0 diff --git a/manage_externals/test/repos/simple-ext.svn/hooks/pre-lock.tmpl b/manage_externals/test/repos/simple-ext.svn/hooks/pre-lock.tmpl deleted file mode 100755 index 148582a689..0000000000 --- a/manage_externals/test/repos/simple-ext.svn/hooks/pre-lock.tmpl +++ /dev/null @@ -1,95 +0,0 @@ -#!/bin/sh - -# PRE-LOCK HOOK -# -# The pre-lock hook is invoked before an exclusive lock is -# created. Subversion runs this hook by invoking a program -# (script, executable, binary, etc.) named 'pre-lock' (for which -# this file is a template), with the following ordered arguments: -# -# [1] REPOS-PATH (the path to this repository) -# [2] PATH (the path in the repository about to be locked) -# [3] USER (the user creating the lock) -# [4] COMMENT (the comment of the lock) -# [5] STEAL-LOCK (1 if the user is trying to steal the lock, else 0) -# -# If the hook program outputs anything on stdout, the output string will -# be used as the lock token for this lock operation. If you choose to use -# this feature, you must guarantee the tokens generated are unique across -# the repository each time. -# -# If the hook program exits with success, the lock is created; but -# if it exits with failure (non-zero), the lock action is aborted -# and STDERR is returned to the client. -# -# The default working directory for the invocation is undefined, so -# the program should set one explicitly if it cares. -# -# On a Unix system, the normal procedure is to have 'pre-lock' -# invoke other programs to do the real work, though it may do the -# work itself too. -# -# Note that 'pre-lock' must be executable by the user(s) who will -# invoke it (typically the user httpd runs as), and that user must -# have filesystem-level permission to access the repository. -# -# On a Windows system, you should name the hook program -# 'pre-lock.bat' or 'pre-lock.exe', -# but the basic idea is the same. -# -# The hook program runs in an empty environment, unless the server is -# explicitly configured otherwise. For example, a common problem is for -# the PATH environment variable to not be set to its usual value, so -# that subprograms fail to launch unless invoked via absolute path. -# If you're having unexpected problems with a hook program, the -# culprit may be unusual (or missing) environment variables. -# -# CAUTION: -# For security reasons, you MUST always properly quote arguments when -# you use them, as those arguments could contain whitespace or other -# problematic characters. Additionally, you should delimit the list -# of options with "--" before passing the arguments, so malicious -# clients cannot bootleg unexpected options to the commands your -# script aims to execute. -# For similar reasons, you should also add a trailing @ to URLs which -# are passed to SVN commands accepting URLs with peg revisions. -# -# Here is an example hook script, for a Unix /bin/sh interpreter. -# For more examples and pre-written hooks, see those in -# the Subversion repository at -# http://svn.apache.org/repos/asf/subversion/trunk/tools/hook-scripts/ and -# http://svn.apache.org/repos/asf/subversion/trunk/contrib/hook-scripts/ - - -REPOS="$1" -PATH="$2" -USER="$3" -COMMENT="$4" -STEAL="$5" - -# If a lock exists and is owned by a different person, don't allow it -# to be stolen (e.g., with 'svn lock --force ...'). - -# (Maybe this script could send email to the lock owner?) -SVNLOOK=/opt/homebrew/Cellar/subversion/1.14.2_1/bin/svnlook -GREP=/bin/grep -SED=/bin/sed - -LOCK_OWNER=`$SVNLOOK lock "$REPOS" "$PATH" | \ - $GREP '^Owner: ' | $SED 's/Owner: //'` - -# If we get no result from svnlook, there's no lock, allow the lock to -# happen: -if [ "$LOCK_OWNER" = "" ]; then - exit 0 -fi - -# If the person locking matches the lock's owner, allow the lock to -# happen: -if [ "$LOCK_OWNER" = "$USER" ]; then - exit 0 -fi - -# Otherwise, we've got an owner mismatch, so return failure: -echo "Error: $PATH already locked by ${LOCK_OWNER}." 1>&2 -exit 1 diff --git a/manage_externals/test/repos/simple-ext.svn/hooks/pre-revprop-change.tmpl b/manage_externals/test/repos/simple-ext.svn/hooks/pre-revprop-change.tmpl deleted file mode 100755 index 8b065d7c79..0000000000 --- a/manage_externals/test/repos/simple-ext.svn/hooks/pre-revprop-change.tmpl +++ /dev/null @@ -1,79 +0,0 @@ -#!/bin/sh - -# PRE-REVPROP-CHANGE HOOK -# -# The pre-revprop-change hook is invoked before a revision property -# is added, modified or deleted. Subversion runs this hook by invoking -# a program (script, executable, binary, etc.) named 'pre-revprop-change' -# (for which this file is a template), with the following ordered -# arguments: -# -# [1] REPOS-PATH (the path to this repository) -# [2] REV (the revision being tweaked) -# [3] USER (the username of the person tweaking the property) -# [4] PROPNAME (the property being set on the revision) -# [5] ACTION (the property is being 'A'dded, 'M'odified, or 'D'eleted) -# -# [STDIN] PROPVAL ** the new property value is passed via STDIN. -# -# If the hook program exits with success, the propchange happens; but -# if it exits with failure (non-zero), the propchange doesn't happen. -# The hook program can use the 'svnlook' utility to examine the -# existing value of the revision property. -# -# WARNING: unlike other hooks, this hook MUST exist for revision -# properties to be changed. If the hook does not exist, Subversion -# will behave as if the hook were present, but failed. The reason -# for this is that revision properties are UNVERSIONED, meaning that -# a successful propchange is destructive; the old value is gone -# forever. We recommend the hook back up the old value somewhere. -# -# The default working directory for the invocation is undefined, so -# the program should set one explicitly if it cares. -# -# On a Unix system, the normal procedure is to have 'pre-revprop-change' -# invoke other programs to do the real work, though it may do the -# work itself too. -# -# Note that 'pre-revprop-change' must be executable by the user(s) who will -# invoke it (typically the user httpd runs as), and that user must -# have filesystem-level permission to access the repository. -# -# On a Windows system, you should name the hook program -# 'pre-revprop-change.bat' or 'pre-revprop-change.exe', -# but the basic idea is the same. -# -# The hook program runs in an empty environment, unless the server is -# explicitly configured otherwise. For example, a common problem is for -# the PATH environment variable to not be set to its usual value, so -# that subprograms fail to launch unless invoked via absolute path. -# If you're having unexpected problems with a hook program, the -# culprit may be unusual (or missing) environment variables. -# -# CAUTION: -# For security reasons, you MUST always properly quote arguments when -# you use them, as those arguments could contain whitespace or other -# problematic characters. Additionally, you should delimit the list -# of options with "--" before passing the arguments, so malicious -# clients cannot bootleg unexpected options to the commands your -# script aims to execute. -# For similar reasons, you should also add a trailing @ to URLs which -# are passed to SVN commands accepting URLs with peg revisions. -# -# Here is an example hook script, for a Unix /bin/sh interpreter. -# For more examples and pre-written hooks, see those in -# the Subversion repository at -# http://svn.apache.org/repos/asf/subversion/trunk/tools/hook-scripts/ and -# http://svn.apache.org/repos/asf/subversion/trunk/contrib/hook-scripts/ - - -REPOS="$1" -REV="$2" -USER="$3" -PROPNAME="$4" -ACTION="$5" - -if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi - -echo "Changing revision properties other than svn:log is prohibited" >&2 -exit 1 diff --git a/manage_externals/test/repos/simple-ext.svn/hooks/pre-unlock.tmpl b/manage_externals/test/repos/simple-ext.svn/hooks/pre-unlock.tmpl deleted file mode 100755 index 9ba99d071b..0000000000 --- a/manage_externals/test/repos/simple-ext.svn/hooks/pre-unlock.tmpl +++ /dev/null @@ -1,87 +0,0 @@ -#!/bin/sh - -# PRE-UNLOCK HOOK -# -# The pre-unlock hook is invoked before an exclusive lock is -# destroyed. Subversion runs this hook by invoking a program -# (script, executable, binary, etc.) named 'pre-unlock' (for which -# this file is a template), with the following ordered arguments: -# -# [1] REPOS-PATH (the path to this repository) -# [2] PATH (the path in the repository about to be unlocked) -# [3] USER (the user destroying the lock) -# [4] TOKEN (the lock token to be destroyed) -# [5] BREAK-UNLOCK (1 if the user is breaking the lock, else 0) -# -# If the hook program exits with success, the lock is destroyed; but -# if it exits with failure (non-zero), the unlock action is aborted -# and STDERR is returned to the client. -# -# The default working directory for the invocation is undefined, so -# the program should set one explicitly if it cares. -# -# On a Unix system, the normal procedure is to have 'pre-unlock' -# invoke other programs to do the real work, though it may do the -# work itself too. -# -# Note that 'pre-unlock' must be executable by the user(s) who will -# invoke it (typically the user httpd runs as), and that user must -# have filesystem-level permission to access the repository. -# -# On a Windows system, you should name the hook program -# 'pre-unlock.bat' or 'pre-unlock.exe', -# but the basic idea is the same. -# -# The hook program runs in an empty environment, unless the server is -# explicitly configured otherwise. For example, a common problem is for -# the PATH environment variable to not be set to its usual value, so -# that subprograms fail to launch unless invoked via absolute path. -# If you're having unexpected problems with a hook program, the -# culprit may be unusual (or missing) environment variables. -# -# CAUTION: -# For security reasons, you MUST always properly quote arguments when -# you use them, as those arguments could contain whitespace or other -# problematic characters. Additionally, you should delimit the list -# of options with "--" before passing the arguments, so malicious -# clients cannot bootleg unexpected options to the commands your -# script aims to execute. -# For similar reasons, you should also add a trailing @ to URLs which -# are passed to SVN commands accepting URLs with peg revisions. -# -# Here is an example hook script, for a Unix /bin/sh interpreter. -# For more examples and pre-written hooks, see those in -# the Subversion repository at -# http://svn.apache.org/repos/asf/subversion/trunk/tools/hook-scripts/ and -# http://svn.apache.org/repos/asf/subversion/trunk/contrib/hook-scripts/ - - -REPOS="$1" -PATH="$2" -USER="$3" -TOKEN="$4" -BREAK="$5" - -# If a lock is owned by a different person, don't allow it be broken. -# (Maybe this script could send email to the lock owner?) - -SVNLOOK=/opt/homebrew/Cellar/subversion/1.14.2_1/bin/svnlook -GREP=/bin/grep -SED=/bin/sed - -LOCK_OWNER=`$SVNLOOK lock "$REPOS" "$PATH" | \ - $GREP '^Owner: ' | $SED 's/Owner: //'` - -# If we get no result from svnlook, there's no lock, return success: -if [ "$LOCK_OWNER" = "" ]; then - exit 0 -fi - -# If the person unlocking matches the lock's owner, return success: -if [ "$LOCK_OWNER" = "$USER" ]; then - exit 0 -fi - -# Otherwise, we've got an owner mismatch, so return failure: -echo "Error: $PATH locked by ${LOCK_OWNER}." 1>&2 -exit 1 diff --git a/manage_externals/test/repos/simple-ext.svn/hooks/start-commit.tmpl b/manage_externals/test/repos/simple-ext.svn/hooks/start-commit.tmpl deleted file mode 100755 index 1395e8315a..0000000000 --- a/manage_externals/test/repos/simple-ext.svn/hooks/start-commit.tmpl +++ /dev/null @@ -1,81 +0,0 @@ -#!/bin/sh - -# START-COMMIT HOOK -# -# The start-commit hook is invoked immediately after a Subversion txn is -# created and populated with initial revprops in the process of doing a -# commit. Subversion runs this hook by invoking a program (script, -# executable, binary, etc.) named 'start-commit' (for which this file -# is a template) with the following ordered arguments: -# -# [1] REPOS-PATH (the path to this repository) -# [2] USER (the authenticated user attempting to commit) -# [3] CAPABILITIES (a colon-separated list of capabilities reported -# by the client; see note below) -# [4] TXN-NAME (the name of the commit txn just created) -# -# Note: The CAPABILITIES parameter is new in Subversion 1.5, and 1.5 -# clients will typically report at least the "mergeinfo" capability. -# If there are other capabilities, then the list is colon-separated, -# e.g.: "mergeinfo:some-other-capability" (the order is undefined). -# -# The list is self-reported by the client. Therefore, you should not -# make security assumptions based on the capabilities list, nor should -# you assume that clients reliably report every capability they have. -# -# Note: The TXN-NAME parameter is new in Subversion 1.8. Prior to version -# 1.8, the start-commit hook was invoked before the commit txn was even -# created, so the ability to inspect the commit txn and its metadata from -# within the start-commit hook was not possible. -# -# If the hook program exits with success, the commit continues; but -# if it exits with failure (non-zero), the commit is stopped before -# a Subversion txn is created, and STDERR is returned to the client. -# -# The default working directory for the invocation is undefined, so -# the program should set one explicitly if it cares. -# -# On a Unix system, the normal procedure is to have 'start-commit' -# invoke other programs to do the real work, though it may do the -# work itself too. -# -# Note that 'start-commit' must be executable by the user(s) who will -# invoke it (typically the user httpd runs as), and that user must -# have filesystem-level permission to access the repository. -# -# On a Windows system, you should name the hook program -# 'start-commit.bat' or 'start-commit.exe', -# but the basic idea is the same. -# -# The hook program runs in an empty environment, unless the server is -# explicitly configured otherwise. For example, a common problem is for -# the PATH environment variable to not be set to its usual value, so -# that subprograms fail to launch unless invoked via absolute path. -# If you're having unexpected problems with a hook program, the -# culprit may be unusual (or missing) environment variables. -# -# CAUTION: -# For security reasons, you MUST always properly quote arguments when -# you use them, as those arguments could contain whitespace or other -# problematic characters. Additionally, you should delimit the list -# of options with "--" before passing the arguments, so malicious -# clients cannot bootleg unexpected options to the commands your -# script aims to execute. -# For similar reasons, you should also add a trailing @ to URLs which -# are passed to SVN commands accepting URLs with peg revisions. -# -# Here is an example hook script, for a Unix /bin/sh interpreter. -# For more examples and pre-written hooks, see those in -# the Subversion repository at -# http://svn.apache.org/repos/asf/subversion/trunk/tools/hook-scripts/ and -# http://svn.apache.org/repos/asf/subversion/trunk/contrib/hook-scripts/ - - -REPOS="$1" -USER="$2" - -commit-allower.pl --repository "$REPOS" --user "$USER" || exit 1 -special-auth-check.py --user "$USER" --auth-level 3 || exit 1 - -# All checks passed, so allow the commit. -exit 0 diff --git a/manage_externals/test/repos/simple-ext.svn/locks/db-logs.lock b/manage_externals/test/repos/simple-ext.svn/locks/db-logs.lock deleted file mode 100644 index 20dd6369be..0000000000 --- a/manage_externals/test/repos/simple-ext.svn/locks/db-logs.lock +++ /dev/null @@ -1,3 +0,0 @@ -This file is not used by Subversion 1.3.x or later. -However, its existence is required for compatibility with -Subversion 1.2.x or earlier. diff --git a/manage_externals/test/repos/simple-ext.svn/locks/db.lock b/manage_externals/test/repos/simple-ext.svn/locks/db.lock deleted file mode 100644 index 20dd6369be..0000000000 --- a/manage_externals/test/repos/simple-ext.svn/locks/db.lock +++ /dev/null @@ -1,3 +0,0 @@ -This file is not used by Subversion 1.3.x or later. -However, its existence is required for compatibility with -Subversion 1.2.x or earlier. diff --git a/manage_externals/test/requirements.txt b/manage_externals/test/requirements.txt deleted file mode 100644 index d66f6f1e67..0000000000 --- a/manage_externals/test/requirements.txt +++ /dev/null @@ -1,5 +0,0 @@ -pylint>=1.7.0 -autopep8>=1.3.0 -coverage>=4.4.0 -coveralls>=1.2.0 -sphinx>=1.6.0 diff --git a/manage_externals/test/test_sys_checkout.py b/manage_externals/test/test_sys_checkout.py deleted file mode 100755 index 664160dc99..0000000000 --- a/manage_externals/test/test_sys_checkout.py +++ /dev/null @@ -1,1871 +0,0 @@ -#!/usr/bin/env python3 - -"""Unit test driver for checkout_externals - -Terminology: - * 'container': a repo that has externals - * 'simple': a repo that has no externals, but is referenced as an external by another repo. - * 'mixed': a repo that both has externals and is referenced as an external by another repo. - - * 'clean': the local repo matches the version in the externals and has no local modifications. - * 'empty': the external isn't checked out at all. - -Note: this script assume the path to the manic and -checkout_externals module is already in the python path. This is -usually handled by the makefile. If you call it directly, you may need -to adjust your path. - -NOTE(bja, 2017-11) If a test fails, we want to keep the repo for that -test. But the tests will keep running, so we need a unique name. Also, -tearDown is always called after each test. I haven't figured out how -to determine if an assertion failed and whether it is safe to clean up -the test repos. - -So the solution is: - -* assign a unique id to each test repo. - -* never cleanup during the run. - -* Erase any existing repos at the begining of the module in -setUpModule. -""" - -# NOTE(bja, 2017-11) pylint complains that the module is too big, but -# I'm still working on how to break up the tests and still have the -# temporary directory be preserved.... -# pylint: disable=too-many-lines - - -from __future__ import absolute_import -from __future__ import unicode_literals -from __future__ import print_function - -import logging -import os -import os.path -import shutil -import unittest - -from manic.externals_description import ExternalsDescription -from manic.externals_description import DESCRIPTION_SECTION, VERSION_ITEM -from manic.externals_description import git_submodule_status -from manic.externals_status import ExternalStatus -from manic.repository_git import GitRepository -from manic.utils import printlog, execute_subprocess -from manic.global_constants import LOCAL_PATH_INDICATOR, VERBOSITY_DEFAULT -from manic.global_constants import LOG_FILE_NAME -from manic import checkout - -# ConfigParser was renamed in python2 to configparser. In python2, -# ConfigParser returns byte strings, str, instead of unicode. We need -# unicode to be compatible with xml and json parser and python3. -try: - # python2 - from ConfigParser import SafeConfigParser as config_parser -except ImportError: - # python3 - from configparser import ConfigParser as config_parser - -# --------------------------------------------------------------------- -# -# Global constants -# -# --------------------------------------------------------------------- - - -# Module-wide root directory for all the per-test subdirs we'll create on -# the fly (which are placed under wherever $CWD is when the test runs). -# Set by setupModule(). -module_tmp_root_dir = None -TMP_REPO_DIR_NAME = 'tmp' # subdir under $CWD - -# subdir under test/ that holds all of our checked-in repositories (which we -# will clone for these tests). -BARE_REPO_ROOT_NAME = 'repos' - -# Environment var referenced by checked-in externals file in mixed-cont-ext.git, -# which should be pointed to the fully-resolved BARE_REPO_ROOT_NAME directory. -# We explicitly clear this after every test, via tearDown(). -MIXED_CONT_EXT_ROOT_ENV_VAR = 'MANIC_TEST_BARE_REPO_ROOT' - -# Subdirs under bare repo root, each holding a repository. For more info -# on the contents of these repositories, see test/repos/README.md. In these -# tests the 'parent' repos are cloned as a starting point, whereas the 'child' -# repos are checked out when the tests run checkout_externals. -CONTAINER_REPO = 'container.git' # Parent repo -SIMPLE_REPO = 'simple-ext.git' # Child repo -SIMPLE_FORK_REPO = 'simple-ext-fork.git' # Child repo -MIXED_REPO = 'mixed-cont-ext.git' # Both parent and child -SVN_TEST_REPO = 'simple-ext.svn' # Subversion repository - -# Standard (arbitrary) external names for test configs -TAG_SECTION = 'simp_tag' -BRANCH_SECTION = 'simp_branch' -HASH_SECTION = 'simp_hash' - -# All the configs we construct check out their externals into these local paths. -EXTERNALS_PATH = 'externals' -SUB_EXTERNALS_PATH = 'src' # For mixed test repos, - -# For testing behavior with '.' instead of an explicit paths. -SIMPLE_LOCAL_ONLY_NAME = '.' - -# Externals files. -CFG_NAME = 'externals.cfg' # We construct this on a per-test basis. -CFG_SUB_NAME = 'sub-externals.cfg' # Already exists in mixed-cont-ext repo. - -# Arbitrary text file in all the test repos. -README_NAME = 'readme.txt' - -# Branch that exists in both the simple and simple-fork repos. -REMOTE_BRANCH_FEATURE2 = 'feature2' - -# Disable too-many-public-methods error -# pylint: disable=R0904 - -def setUpModule(): # pylint: disable=C0103 - """Setup for all tests in this module. It is called once per module! - """ - logging.basicConfig(filename=LOG_FILE_NAME, - format='%(levelname)s : %(asctime)s : %(message)s', - datefmt='%Y-%m-%d %H:%M:%S', - level=logging.DEBUG) - repo_root = os.path.join(os.getcwd(), TMP_REPO_DIR_NAME) - repo_root = os.path.abspath(repo_root) - # delete if it exists from previous runs - try: - shutil.rmtree(repo_root) - except BaseException: - pass - # create clean dir for this run - os.mkdir(repo_root) - - # Make available to all tests in this file. - global module_tmp_root_dir - assert module_tmp_root_dir == None, module_tmp_root_dir - module_tmp_root_dir = repo_root - - -class RepoUtils(object): - """Convenience methods for interacting with git repos.""" - @staticmethod - def create_branch(repo_base_dir, external_name, branch, with_commit=False): - """Create branch and optionally (with_commit) add a single commit. - """ - # pylint: disable=R0913 - cwd = os.getcwd() - repo_root = os.path.join(repo_base_dir, EXTERNALS_PATH, external_name) - os.chdir(repo_root) - cmd = ['git', 'checkout', '-b', branch, ] - execute_subprocess(cmd) - if with_commit: - msg = 'start work on {0}'.format(branch) - with open(README_NAME, 'a') as handle: - handle.write(msg) - cmd = ['git', 'add', README_NAME, ] - execute_subprocess(cmd) - cmd = ['git', 'commit', '-m', msg, ] - execute_subprocess(cmd) - os.chdir(cwd) - - @staticmethod - def create_commit(repo_base_dir, external_name): - """Make a commit to the given external. - - This is used to test sync state changes from local commits on - detached heads and tracking branches. - """ - cwd = os.getcwd() - repo_root = os.path.join(repo_base_dir, EXTERNALS_PATH, external_name) - os.chdir(repo_root) - - msg = 'work on great new feature!' - with open(README_NAME, 'a') as handle: - handle.write(msg) - cmd = ['git', 'add', README_NAME, ] - execute_subprocess(cmd) - cmd = ['git', 'commit', '-m', msg, ] - execute_subprocess(cmd) - os.chdir(cwd) - - @staticmethod - def clone_test_repo(bare_root, test_id, parent_repo_name, dest_dir_in): - """Clone repo at / into dest_dir_in or local per-test-subdir. - - Returns output dir. - """ - parent_repo_dir = os.path.join(bare_root, parent_repo_name) - if dest_dir_in is None: - # create unique subdir for this test - test_dir_name = test_id - print("Test repository name: {0}".format(test_dir_name)) - dest_dir = os.path.join(module_tmp_root_dir, test_dir_name) - else: - dest_dir = dest_dir_in - - # pylint: disable=W0212 - GitRepository._git_clone(parent_repo_dir, dest_dir, VERBOSITY_DEFAULT) - return dest_dir - - @staticmethod - def add_file_to_repo(under_test_dir, filename, tracked): - """Add a file to the repository so we can put it into a dirty state - - """ - cwd = os.getcwd() - os.chdir(under_test_dir) - with open(filename, 'w') as tmp: - tmp.write('Hello, world!') - - if tracked: - # NOTE(bja, 2018-01) brittle hack to obtain repo dir and - # file name - path_data = filename.split('/') - repo_dir = os.path.join(path_data[0], path_data[1]) - os.chdir(repo_dir) - tracked_file = path_data[2] - cmd = ['git', 'add', tracked_file] - execute_subprocess(cmd) - - os.chdir(cwd) - -class GenerateExternalsDescriptionCfgV1(object): - """Building blocks to create ExternalsDescriptionCfgV1 files. - - Basic usage: create_config() multiple create_*(), then write_config(). - Optionally after that: write_with_*(). - """ - - def __init__(self, bare_root): - self._schema_version = '1.1.0' - self._config = None - - # directory where we have test repositories (which we will clone for - # tests) - self._bare_root = bare_root - - def write_config(self, dest_dir, filename=CFG_NAME): - """Write self._config to disk - - """ - dest_path = os.path.join(dest_dir, filename) - with open(dest_path, 'w') as configfile: - self._config.write(configfile) - - def create_config(self): - """Create an config object and add the required metadata section - - """ - self._config = config_parser() - self.create_metadata() - - def create_metadata(self): - """Create the metadata section of the config file - """ - self._config.add_section(DESCRIPTION_SECTION) - - self._config.set(DESCRIPTION_SECTION, VERSION_ITEM, - self._schema_version) - - def url_for_repo_path(self, repo_path, repo_path_abs=None): - if repo_path_abs is not None: - return repo_path_abs - else: - return os.path.join(self._bare_root, repo_path) - - def create_section(self, repo_path, name, tag='', branch='', - ref_hash='', required=True, path=EXTERNALS_PATH, - sub_externals='', repo_path_abs=None, from_submodule=False, - sparse='', nested=False): - # pylint: disable=too-many-branches - """Create a config ExternalsDescription section with the given name. - - Autofills some items and handles some optional items. - - repo_path_abs overrides repo_path (which is relative to the bare repo) - path is a subdir under repo_path to check out to. - """ - # pylint: disable=R0913 - self._config.add_section(name) - if not from_submodule: - if nested: - self._config.set(name, ExternalsDescription.PATH, path) - else: - self._config.set(name, ExternalsDescription.PATH, - os.path.join(path, name)) - - self._config.set(name, ExternalsDescription.PROTOCOL, - ExternalsDescription.PROTOCOL_GIT) - - # from_submodules is incompatible with some other options, turn them off - if (from_submodule and - ((repo_path_abs is not None) or tag or ref_hash or branch)): - printlog('create_section: "from_submodule" is incompatible with ' - '"repo_url", "tag", "hash", and "branch" options;\n' - 'Ignoring those options for {}'.format(name)) - repo_url = None - tag = '' - ref_hash = '' - branch = '' - - repo_url = self.url_for_repo_path(repo_path, repo_path_abs) - - if not from_submodule: - self._config.set(name, ExternalsDescription.REPO_URL, repo_url) - - self._config.set(name, ExternalsDescription.REQUIRED, str(required)) - - if tag: - self._config.set(name, ExternalsDescription.TAG, tag) - - if branch: - self._config.set(name, ExternalsDescription.BRANCH, branch) - - if ref_hash: - self._config.set(name, ExternalsDescription.HASH, ref_hash) - - if sub_externals: - self._config.set(name, ExternalsDescription.EXTERNALS, - sub_externals) - - if sparse: - self._config.set(name, ExternalsDescription.SPARSE, sparse) - - if from_submodule: - self._config.set(name, ExternalsDescription.SUBMODULE, "True") - - def create_section_reference_to_subexternal(self, name): - """Just a reference to another externals file. - - """ - # pylint: disable=R0913 - self._config.add_section(name) - self._config.set(name, ExternalsDescription.PATH, LOCAL_PATH_INDICATOR) - - self._config.set(name, ExternalsDescription.PROTOCOL, - ExternalsDescription.PROTOCOL_EXTERNALS_ONLY) - - self._config.set(name, ExternalsDescription.REPO_URL, - LOCAL_PATH_INDICATOR) - - self._config.set(name, ExternalsDescription.REQUIRED, str(True)) - - self._config.set(name, ExternalsDescription.EXTERNALS, CFG_SUB_NAME) - - def create_svn_external(self, name, url, tag='', branch=''): - """Create a config section for an svn repository. - - """ - self._config.add_section(name) - self._config.set(name, ExternalsDescription.PATH, - os.path.join(EXTERNALS_PATH, name)) - - self._config.set(name, ExternalsDescription.PROTOCOL, - ExternalsDescription.PROTOCOL_SVN) - - self._config.set(name, ExternalsDescription.REPO_URL, url) - - self._config.set(name, ExternalsDescription.REQUIRED, str(True)) - - if tag: - self._config.set(name, ExternalsDescription.TAG, tag) - - if branch: - self._config.set(name, ExternalsDescription.BRANCH, branch) - - def write_with_git_branch(self, dest_dir, name, branch, new_remote_repo_path=None): - """Update fields in our config and write it to disk. - - name is the key of the ExternalsDescription in self._config to update. - """ - # pylint: disable=R0913 - self._config.set(name, ExternalsDescription.BRANCH, branch) - - if new_remote_repo_path: - if new_remote_repo_path == SIMPLE_LOCAL_ONLY_NAME: - repo_url = SIMPLE_LOCAL_ONLY_NAME - else: - repo_url = os.path.join(self._bare_root, new_remote_repo_path) - self._config.set(name, ExternalsDescription.REPO_URL, repo_url) - - try: - # remove the tag if it existed - self._config.remove_option(name, ExternalsDescription.TAG) - except BaseException: - pass - - self.write_config(dest_dir) - - def write_with_svn_branch(self, dest_dir, name, branch): - """Update a repository branch, and potentially the remote. - """ - # pylint: disable=R0913 - self._config.set(name, ExternalsDescription.BRANCH, branch) - - try: - # remove the tag if it existed - self._config.remove_option(name, ExternalsDescription.TAG) - except BaseException: - pass - - self.write_config(dest_dir) - - def write_with_tag_and_remote_repo(self, dest_dir, name, tag, new_remote_repo_path, - remove_branch=True): - """Update a repository tag and the remote. - - NOTE(bja, 2017-11) remove_branch=False should result in an - overspecified external with both a branch and tag. This is - used for error condition testing. - - """ - # pylint: disable=R0913 - self._config.set(name, ExternalsDescription.TAG, tag) - - if new_remote_repo_path: - repo_url = os.path.join(self._bare_root, new_remote_repo_path) - self._config.set(name, ExternalsDescription.REPO_URL, repo_url) - - try: - # remove the branch if it existed - if remove_branch: - self._config.remove_option(name, ExternalsDescription.BRANCH) - except BaseException: - pass - - self.write_config(dest_dir) - - def write_without_branch_tag(self, dest_dir, name): - """Update a repository protocol, and potentially the remote - """ - # pylint: disable=R0913 - try: - # remove the branch if it existed - self._config.remove_option(name, ExternalsDescription.BRANCH) - except BaseException: - pass - - try: - # remove the tag if it existed - self._config.remove_option(name, ExternalsDescription.TAG) - except BaseException: - pass - - self.write_config(dest_dir) - - def write_without_repo_url(self, dest_dir, name): - """Update a repository protocol, and potentially the remote - """ - # pylint: disable=R0913 - try: - # remove the repo url if it existed - self._config.remove_option(name, ExternalsDescription.REPO_URL) - except BaseException: - pass - - self.write_config(dest_dir) - - def write_with_protocol(self, dest_dir, name, protocol, repo_path=None): - """Update a repository protocol, and potentially the remote - """ - # pylint: disable=R0913 - self._config.set(name, ExternalsDescription.PROTOCOL, protocol) - - if repo_path: - repo_url = os.path.join(self._bare_root, repo_path) - self._config.set(name, ExternalsDescription.REPO_URL, repo_url) - - self.write_config(dest_dir) - - -def _execute_checkout_in_dir(dirname, args, debug_env=''): - """Execute the checkout command in the appropriate repo dir with the - specified additional args. - - args should be a list of strings. - debug_env shuld be a string of the form 'FOO=bar' or the empty string. - - Note that we are calling the command line processing and main - routines and not using a subprocess call so that we get code - coverage results! Note this means that environment variables are passed - to checkout_externals via os.environ; debug_env is just used to aid - manual reproducibility of a given call. - - Returns (overall_status, tree_status) - where overall_status is 0 for success, nonzero otherwise. - and tree_status is set if --status was passed in, None otherwise. - - Note this command executes the checkout command, it doesn't - necessarily do any checking out (e.g. if --status is passed in). - """ - cwd = os.getcwd() - - # Construct a command line for reproducibility; this command is not - # actually executed in the test. - os.chdir(dirname) - cmdline = ['--externals', CFG_NAME, ] - cmdline += args - manual_cmd = ('Running equivalent of:\n' - 'pushd {dirname}; ' - '{debug_env} /path/to/checkout_externals {args}'.format( - dirname=dirname, debug_env=debug_env, - args=' '.join(cmdline))) - printlog(manual_cmd) - options = checkout.commandline_arguments(cmdline) - overall_status, tree_status = checkout.main(options) - os.chdir(cwd) - return overall_status, tree_status - -class BaseTestSysCheckout(unittest.TestCase): - """Base class of reusable systems level test setup for - checkout_externals - - """ - # NOTE(bja, 2017-11) pylint complains about long method names, but - # it is hard to differentiate tests without making them more - # cryptic. - # pylint: disable=invalid-name - - # Command-line args for checkout_externals, used in execute_checkout_in_dir() - status_args = ['--status'] - checkout_args = [] - optional_args = ['--optional'] - verbose_args = ['--status', '--verbose'] - - def setUp(self): - """Setup for all individual checkout_externals tests - """ - # directory we want to return to after the test system and - # checkout_externals are done cd'ing all over the place. - self._return_dir = os.getcwd() - - self._test_id = self.id().split('.')[-1] - - # find root - if os.path.exists(os.path.join(os.getcwd(), 'checkout_externals')): - root_dir = os.path.abspath(os.getcwd()) - else: - # maybe we are in a subdir, search up - root_dir = os.path.abspath(os.path.join(os.getcwd(), os.pardir)) - while os.path.basename(root_dir): - if os.path.exists(os.path.join(root_dir, 'checkout_externals')): - break - root_dir = os.path.dirname(root_dir) - - if not os.path.exists(os.path.join(root_dir, 'checkout_externals')): - raise RuntimeError('Cannot find checkout_externals') - - # path to the executable - self._checkout = os.path.join(root_dir, 'checkout_externals') - - # directory where we have test repositories (which we will clone for - # tests) - self._bare_root = os.path.abspath( - os.path.join(root_dir, 'test', BARE_REPO_ROOT_NAME)) - - # set the input file generator - self._generator = GenerateExternalsDescriptionCfgV1(self._bare_root) - # set the input file generator for secondary externals - self._sub_generator = GenerateExternalsDescriptionCfgV1(self._bare_root) - - def tearDown(self): - """Tear down for individual tests - """ - # return to our common starting point - os.chdir(self._return_dir) - - # (in case this was set) Don't pollute environment of other tests. - os.environ.pop(MIXED_CONT_EXT_ROOT_ENV_VAR, - None) # Don't care if key wasn't set. - - def clone_test_repo(self, parent_repo_name, dest_dir_in=None): - """Clones repo under self._bare_root""" - return RepoUtils.clone_test_repo(self._bare_root, self._test_id, - parent_repo_name, dest_dir_in) - - def execute_checkout_in_dir(self, dirname, args, debug_env=''): - overall_status, tree_status = _execute_checkout_in_dir(dirname, args, - debug_env=debug_env) - self.assertEqual(overall_status, 0) - return tree_status - - def execute_checkout_with_status(self, dirname, args, debug_env=''): - """Calls checkout a second time to get status if needed.""" - tree_status = self.execute_checkout_in_dir( - dirname, args, debug_env=debug_env) - if tree_status is None: - tree_status = self.execute_checkout_in_dir(dirname, - self.status_args, - debug_env=debug_env) - self.assertNotEqual(tree_status, None) - return tree_status - - def _check_sync_clean(self, ext_status, expected_sync_state, - expected_clean_state): - self.assertEqual(ext_status.sync_state, expected_sync_state) - self.assertEqual(ext_status.clean_state, expected_clean_state) - - @staticmethod - def _external_path(section_name, base_path=EXTERNALS_PATH): - return './{0}/{1}'.format(base_path, section_name) - - def _check_file_exists(self, repo_dir, pathname): - "Check that exists in " - self.assertTrue(os.path.exists(os.path.join(repo_dir, pathname))) - - def _check_file_absent(self, repo_dir, pathname): - "Check that does not exist in " - self.assertFalse(os.path.exists(os.path.join(repo_dir, pathname))) - - -class TestSysCheckout(BaseTestSysCheckout): - """Run systems level tests of checkout_externals - """ - # NOTE(bja, 2017-11) pylint complains about long method names, but - # it is hard to differentiate tests without making them more - # cryptic. - # pylint: disable=invalid-name - - # ---------------------------------------------------------------- - # - # Run systems tests - # - # ---------------------------------------------------------------- - def test_required_bytag(self): - """Check out a required external pointing to a git tag.""" - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - self._generator.create_config() - self._generator.create_section(SIMPLE_REPO, TAG_SECTION, - tag='tag1') - self._generator.write_config(cloned_repo_dir) - - # externals start out 'empty' aka not checked out. - tree = self.execute_checkout_in_dir(cloned_repo_dir, - self.status_args) - local_path_rel = self._external_path(TAG_SECTION) - self._check_sync_clean(tree[local_path_rel], - ExternalStatus.EMPTY, - ExternalStatus.DEFAULT) - local_path_abs = os.path.join(cloned_repo_dir, local_path_rel) - self.assertFalse(os.path.exists(local_path_abs)) - - # after checkout, the external is 'clean' aka at the correct version. - tree = self.execute_checkout_with_status(cloned_repo_dir, - self.checkout_args) - self._check_sync_clean(tree[local_path_rel], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - # Actually checked out the desired repo. - self.assertEqual('origin', GitRepository._remote_name_for_url( - # Which url to look up - self._generator.url_for_repo_path(SIMPLE_REPO), - # Which directory has the local checked-out repo. - dirname=local_path_abs)) - - # Actually checked out the desired tag. - (tag_found, tag_name) = GitRepository._git_current_tag(local_path_abs) - self.assertEqual(tag_name, 'tag1') - - # Check existence of some simp_tag files - tag_path = os.path.join('externals', TAG_SECTION) - self._check_file_exists(cloned_repo_dir, - os.path.join(tag_path, README_NAME)) - # Subrepo should not exist (not referenced by configs). - self._check_file_absent(cloned_repo_dir, os.path.join(tag_path, - 'simple_subdir', - 'subdir_file.txt')) - - def test_required_bybranch(self): - """Check out a required external pointing to a git branch.""" - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - self._generator.create_config() - self._generator.create_section(SIMPLE_REPO, BRANCH_SECTION, - branch=REMOTE_BRANCH_FEATURE2) - self._generator.write_config(cloned_repo_dir) - - # externals start out 'empty' aka not checked out. - tree = self.execute_checkout_in_dir(cloned_repo_dir, - self.status_args) - local_path_rel = self._external_path(BRANCH_SECTION) - self._check_sync_clean(tree[local_path_rel], - ExternalStatus.EMPTY, - ExternalStatus.DEFAULT) - local_path_abs = os.path.join(cloned_repo_dir, local_path_rel) - self.assertFalse(os.path.exists(local_path_abs)) - - # after checkout, the external is 'clean' aka at the correct version. - tree = self.execute_checkout_with_status(cloned_repo_dir, - self.checkout_args) - self._check_sync_clean(tree[local_path_rel], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - self.assertTrue(os.path.exists(local_path_abs)) - - # Actually checked out the desired repo. - self.assertEqual('origin', GitRepository._remote_name_for_url( - # Which url to look up - self._generator.url_for_repo_path(SIMPLE_REPO), - # Which directory has the local checked-out repo. - dirname=local_path_abs)) - - # Actually checked out the desired branch. - (branch_found, branch_name) = GitRepository._git_current_remote_branch( - local_path_abs) - self.assertEquals(branch_name, 'origin/' + REMOTE_BRANCH_FEATURE2) - - def test_required_byhash(self): - """Check out a required external pointing to a git hash.""" - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - self._generator.create_config() - self._generator.create_section(SIMPLE_REPO, HASH_SECTION, - ref_hash='60b1cc1a38d63') - self._generator.write_config(cloned_repo_dir) - - # externals start out 'empty' aka not checked out. - tree = self.execute_checkout_in_dir(cloned_repo_dir, - self.status_args) - local_path_rel = self._external_path(HASH_SECTION) - self._check_sync_clean(tree[local_path_rel], - ExternalStatus.EMPTY, - ExternalStatus.DEFAULT) - local_path_abs = os.path.join(cloned_repo_dir, local_path_rel) - self.assertFalse(os.path.exists(local_path_abs)) - - # after checkout, the externals are 'clean' aka at their correct version. - tree = self.execute_checkout_with_status(cloned_repo_dir, - self.checkout_args) - self._check_sync_clean(tree[local_path_rel], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - # Actually checked out the desired repo. - self.assertEqual('origin', GitRepository._remote_name_for_url( - # Which url to look up - self._generator.url_for_repo_path(SIMPLE_REPO), - # Which directory has the local checked-out repo. - dirname=local_path_abs)) - - # Actually checked out the desired hash. - (hash_found, hash_name) = GitRepository._git_current_hash( - local_path_abs) - self.assertTrue(hash_name.startswith('60b1cc1a38d63'), - msg=hash_name) - - def test_container_nested_required(self): - """Verify that a container with nested subrepos generates the correct initial status. - Tests over all possible permutations - """ - # Output subdirs for each of the externals, to test that one external can be - # checked out in a subdir of another. - NESTED_SUBDIR = ['./fred', './fred/wilma', './fred/wilma/barney'] - - # Assert that each type of external (e.g. tag vs branch) can be at any parent level - # (e.g. child/parent/grandparent). - orders = [[0, 1, 2], [1, 2, 0], [2, 0, 1], - [0, 2, 1], [2, 1, 0], [1, 0, 2]] - for n, order in enumerate(orders): - dest_dir = os.path.join(module_tmp_root_dir, self._test_id, - "test"+str(n)) - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO, - dest_dir_in=dest_dir) - self._generator.create_config() - # We happen to check out each section via a different reference (tag/branch/hash) but - # those don't really matter, we just need to check out three repos into a nested set of - # directories. - self._generator.create_section( - SIMPLE_REPO, TAG_SECTION, nested=True, - tag='tag1', path=NESTED_SUBDIR[order[0]]) - self._generator.create_section( - SIMPLE_REPO, BRANCH_SECTION, nested=True, - branch=REMOTE_BRANCH_FEATURE2, path=NESTED_SUBDIR[order[1]]) - self._generator.create_section( - SIMPLE_REPO, HASH_SECTION, nested=True, - ref_hash='60b1cc1a38d63', path=NESTED_SUBDIR[order[2]]) - self._generator.write_config(cloned_repo_dir) - - # all externals start out 'empty' aka not checked out. - tree = self.execute_checkout_in_dir(cloned_repo_dir, - self.status_args) - self._check_sync_clean(tree[NESTED_SUBDIR[order[0]]], - ExternalStatus.EMPTY, - ExternalStatus.DEFAULT) - self._check_sync_clean(tree[NESTED_SUBDIR[order[1]]], - ExternalStatus.EMPTY, - ExternalStatus.DEFAULT) - self._check_sync_clean(tree[NESTED_SUBDIR[order[2]]], - ExternalStatus.EMPTY, - ExternalStatus.DEFAULT) - - # after checkout, all the repos are 'clean'. - tree = self.execute_checkout_with_status(cloned_repo_dir, - self.checkout_args) - self._check_sync_clean(tree[NESTED_SUBDIR[order[0]]], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - self._check_sync_clean(tree[NESTED_SUBDIR[order[1]]], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - self._check_sync_clean(tree[NESTED_SUBDIR[order[2]]], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - def test_container_simple_optional(self): - """Verify that container with an optional simple subrepos generates - the correct initial status. - - """ - # create repo and externals config. - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - self._generator.create_config() - self._generator.create_section(SIMPLE_REPO, 'simp_req', - tag='tag1') - - self._generator.create_section(SIMPLE_REPO, 'simp_opt', - tag='tag1', required=False) - - self._generator.write_config(cloned_repo_dir) - - # all externals start out 'empty' aka not checked out. - tree = self.execute_checkout_in_dir(cloned_repo_dir, - self.status_args) - req_status = tree[self._external_path('simp_req')] - self._check_sync_clean(req_status, - ExternalStatus.EMPTY, - ExternalStatus.DEFAULT) - self.assertEqual(req_status.source_type, ExternalStatus.MANAGED) - - opt_status = tree[self._external_path('simp_opt')] - self._check_sync_clean(opt_status, - ExternalStatus.EMPTY, - ExternalStatus.DEFAULT) - self.assertEqual(opt_status.source_type, ExternalStatus.OPTIONAL) - - # after checkout, required external is clean, optional is still empty. - tree = self.execute_checkout_with_status(cloned_repo_dir, - self.checkout_args) - req_status = tree[self._external_path('simp_req')] - self._check_sync_clean(req_status, - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - self.assertEqual(req_status.source_type, ExternalStatus.MANAGED) - - opt_status = tree[self._external_path('simp_opt')] - self._check_sync_clean(opt_status, - ExternalStatus.EMPTY, - ExternalStatus.DEFAULT) - self.assertEqual(opt_status.source_type, ExternalStatus.OPTIONAL) - - # after checking out optionals, the optional external is also clean. - tree = self.execute_checkout_with_status(cloned_repo_dir, - self.optional_args) - req_status = tree[self._external_path('simp_req')] - self._check_sync_clean(req_status, - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - self.assertEqual(req_status.source_type, ExternalStatus.MANAGED) - - opt_status = tree[self._external_path('simp_opt')] - self._check_sync_clean(opt_status, - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - self.assertEqual(opt_status.source_type, ExternalStatus.OPTIONAL) - - def test_container_simple_verbose(self): - """Verify that verbose status matches non-verbose. - """ - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - self._generator.create_config() - self._generator.create_section(SIMPLE_REPO, TAG_SECTION, - tag='tag1') - self._generator.write_config(cloned_repo_dir) - - # after checkout, all externals should be 'clean'. - tree = self.execute_checkout_with_status(cloned_repo_dir, - self.checkout_args) - self._check_sync_clean(tree[self._external_path(TAG_SECTION)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - # 'Verbose' status should tell the same story. - tree = self.execute_checkout_in_dir(cloned_repo_dir, - self.verbose_args) - self._check_sync_clean(tree[self._external_path(TAG_SECTION)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - def test_container_simple_dirty(self): - """Verify that a container with a new tracked file is marked dirty. - """ - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - self._generator.create_config() - self._generator.create_section(SIMPLE_REPO, TAG_SECTION, - tag='tag1') - self._generator.write_config(cloned_repo_dir) - - # checkout, should start out clean. - tree = self.execute_checkout_with_status(cloned_repo_dir, self.checkout_args) - self._check_sync_clean(tree[self._external_path(TAG_SECTION)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - # add a tracked file to the simp_tag external, should be dirty. - RepoUtils.add_file_to_repo(cloned_repo_dir, - 'externals/{0}/tmp.txt'.format(TAG_SECTION), - tracked=True) - tree = self.execute_checkout_in_dir(cloned_repo_dir, self.status_args) - self._check_sync_clean(tree[self._external_path(TAG_SECTION)], - ExternalStatus.STATUS_OK, - ExternalStatus.DIRTY) - - # Re-checkout; simp_tag should still be dirty. - tree = self.execute_checkout_with_status(cloned_repo_dir, - self.checkout_args) - self._check_sync_clean(tree[self._external_path(TAG_SECTION)], - ExternalStatus.STATUS_OK, - ExternalStatus.DIRTY) - - def test_container_simple_untracked(self): - """Verify that a container with simple subrepos and a untracked files - is not considered 'dirty' and will attempt an update. - - """ - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - self._generator.create_config() - self._generator.create_section(SIMPLE_REPO, TAG_SECTION, - tag='tag1') - self._generator.write_config(cloned_repo_dir) - - # checkout, should start out clean. - tree = self.execute_checkout_with_status(cloned_repo_dir, - self.checkout_args) - self._check_sync_clean(tree[self._external_path(TAG_SECTION)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - # add an untracked file to the simp_tag external, should stay clean. - RepoUtils.add_file_to_repo(cloned_repo_dir, - 'externals/{0}/tmp.txt'.format(TAG_SECTION), - tracked=False) - tree = self.execute_checkout_in_dir(cloned_repo_dir, self.status_args) - self._check_sync_clean(tree[self._external_path(TAG_SECTION)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - # After checkout, the external should still be 'clean'. - tree = self.execute_checkout_with_status(cloned_repo_dir, - self.checkout_args) - self._check_sync_clean(tree[self._external_path(TAG_SECTION)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - def test_container_simple_detached_sync(self): - """Verify that a container with simple subrepos generates the correct - out of sync status when making commits from a detached head - state. - - For more info about 'detached head' state: https://www.cloudbees.com/blog/git-detached-head - """ - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - self._generator.create_config() - self._generator.create_section(SIMPLE_REPO, TAG_SECTION, - tag='tag1') - - self._generator.create_section(SIMPLE_REPO, BRANCH_SECTION, - branch=REMOTE_BRANCH_FEATURE2) - - self._generator.create_section(SIMPLE_REPO, 'simp_hash', - ref_hash='60b1cc1a38d63') - - self._generator.write_config(cloned_repo_dir) - - # externals start out 'empty' aka not checked out. - tree = self.execute_checkout_in_dir(cloned_repo_dir, self.status_args) - self._check_sync_clean(tree[self._external_path(TAG_SECTION)], - ExternalStatus.EMPTY, - ExternalStatus.DEFAULT) - self._check_sync_clean(tree[self._external_path(BRANCH_SECTION)], - ExternalStatus.EMPTY, - ExternalStatus.DEFAULT) - self._check_sync_clean(tree[self._external_path(HASH_SECTION)], - ExternalStatus.EMPTY, - ExternalStatus.DEFAULT) - - # checkout - self.execute_checkout_in_dir(cloned_repo_dir, self.checkout_args) - - # Commit on top of the tag and hash (creating the detached head state in those two - # externals' repos) - # The branch commit does not create the detached head state, but here for completeness. - RepoUtils.create_commit(cloned_repo_dir, TAG_SECTION) - RepoUtils.create_commit(cloned_repo_dir, HASH_SECTION) - RepoUtils.create_commit(cloned_repo_dir, BRANCH_SECTION) - - # sync status of all three should be 'modified' (uncommitted changes) - # clean status is 'ok' (matches externals version) - tree = self.execute_checkout_in_dir(cloned_repo_dir, self.status_args) - self._check_sync_clean(tree[self._external_path(TAG_SECTION)], - ExternalStatus.MODEL_MODIFIED, - ExternalStatus.STATUS_OK) - self._check_sync_clean(tree[self._external_path(BRANCH_SECTION)], - ExternalStatus.MODEL_MODIFIED, - ExternalStatus.STATUS_OK) - self._check_sync_clean(tree[self._external_path(HASH_SECTION)], - ExternalStatus.MODEL_MODIFIED, - ExternalStatus.STATUS_OK) - - # after checkout, all externals should be totally clean (no uncommitted changes, - # and matches externals version). - tree = self.execute_checkout_with_status(cloned_repo_dir, self.checkout_args) - self._check_sync_clean(tree[self._external_path(TAG_SECTION)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - self._check_sync_clean(tree[self._external_path(BRANCH_SECTION)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - self._check_sync_clean(tree[self._external_path(HASH_SECTION)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - def test_container_remote_branch(self): - """Verify that a container with remote branch change works - - """ - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - self._generator.create_config() - self._generator.create_section(SIMPLE_REPO, BRANCH_SECTION, - branch=REMOTE_BRANCH_FEATURE2) - self._generator.write_config(cloned_repo_dir) - - # initial checkout - self.execute_checkout_in_dir(cloned_repo_dir, self.checkout_args) - - # update the branch external to point to a different remote with the same branch, - # then simp_branch should be out of sync - self._generator.write_with_git_branch(cloned_repo_dir, - name=BRANCH_SECTION, - branch=REMOTE_BRANCH_FEATURE2, - new_remote_repo_path=SIMPLE_FORK_REPO) - tree = self.execute_checkout_in_dir(cloned_repo_dir, self.status_args) - self._check_sync_clean(tree[self._external_path(BRANCH_SECTION)], - ExternalStatus.MODEL_MODIFIED, - ExternalStatus.STATUS_OK) - - # checkout new externals, now simp_branch should be clean. - tree = self.execute_checkout_with_status(cloned_repo_dir, self.checkout_args) - self._check_sync_clean(tree[self._external_path(BRANCH_SECTION)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - def test_container_remote_tag_same_branch(self): - """Verify that a container with remote tag change works. The new tag - should not be in the original repo, only the new remote - fork. The new tag is automatically fetched because it is on - the branch. - - """ - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - self._generator.create_config() - self._generator.create_section(SIMPLE_REPO, BRANCH_SECTION, - branch=REMOTE_BRANCH_FEATURE2) - self._generator.write_config(cloned_repo_dir) - - # initial checkout - self.execute_checkout_in_dir(cloned_repo_dir, self.checkout_args) - - # update the config file to point to a different remote with - # the new tag replacing the old branch. Tag MUST NOT be in the original - # repo! status of simp_branch should then be out of sync - self._generator.write_with_tag_and_remote_repo(cloned_repo_dir, BRANCH_SECTION, - tag='forked-feature-v1', - new_remote_repo_path=SIMPLE_FORK_REPO) - tree = self.execute_checkout_in_dir(cloned_repo_dir, - self.status_args) - self._check_sync_clean(tree[self._external_path(BRANCH_SECTION)], - ExternalStatus.MODEL_MODIFIED, - ExternalStatus.STATUS_OK) - - # checkout new externals, then should be synced. - tree = self.execute_checkout_with_status(cloned_repo_dir, - self.checkout_args) - self._check_sync_clean(tree[self._external_path(BRANCH_SECTION)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - def test_container_remote_tag_fetch_all(self): - """Verify that a container with remote tag change works. The new tag - should not be in the original repo, only the new remote - fork. It should also not be on a branch that will be fetched, - and therefore not fetched by default with 'git fetch'. It will - only be retrieved by 'git fetch --tags' - """ - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - self._generator.create_config() - self._generator.create_section(SIMPLE_REPO, BRANCH_SECTION, - branch=REMOTE_BRANCH_FEATURE2) - self._generator.write_config(cloned_repo_dir) - - # initial checkout - self.execute_checkout_in_dir(cloned_repo_dir, self.checkout_args) - - # update the config file to point to a different remote with - # the new tag instead of the old branch. Tag MUST NOT be in the original - # repo! status of simp_branch should then be out of sync. - self._generator.write_with_tag_and_remote_repo(cloned_repo_dir, BRANCH_SECTION, - tag='abandoned-feature', - new_remote_repo_path=SIMPLE_FORK_REPO) - tree = self.execute_checkout_in_dir(cloned_repo_dir, self.status_args) - self._check_sync_clean(tree[self._external_path(BRANCH_SECTION)], - ExternalStatus.MODEL_MODIFIED, - ExternalStatus.STATUS_OK) - - # checkout new externals, should be clean again. - tree = self.execute_checkout_with_status(cloned_repo_dir, - self.checkout_args) - self._check_sync_clean(tree[self._external_path(BRANCH_SECTION)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - def test_container_preserve_dot(self): - """Verify that after inital checkout, modifying an external git repo - url to '.' and the current branch will leave it unchanged. - - """ - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - self._generator.create_config() - self._generator.create_section(SIMPLE_REPO, BRANCH_SECTION, - branch=REMOTE_BRANCH_FEATURE2) - self._generator.write_config(cloned_repo_dir) - - # initial checkout - self.execute_checkout_in_dir(cloned_repo_dir, self.checkout_args) - - # update the config file to point to a different remote with - # the same branch. - self._generator.write_with_git_branch(cloned_repo_dir, name=BRANCH_SECTION, - branch=REMOTE_BRANCH_FEATURE2, - new_remote_repo_path=SIMPLE_FORK_REPO) - # after checkout, should be clean again. - tree = self.execute_checkout_with_status(cloned_repo_dir, self.checkout_args) - self._check_sync_clean(tree[self._external_path(BRANCH_SECTION)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - # update branch to point to a new branch that only exists in - # the local fork - RepoUtils.create_branch(cloned_repo_dir, external_name=BRANCH_SECTION, - branch='private-feature', with_commit=True) - self._generator.write_with_git_branch(cloned_repo_dir, name=BRANCH_SECTION, - branch='private-feature', - new_remote_repo_path=SIMPLE_LOCAL_ONLY_NAME) - # after checkout, should be clean again. - tree = self.execute_checkout_with_status(cloned_repo_dir, self.checkout_args) - self._check_sync_clean(tree[self._external_path(BRANCH_SECTION)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - def test_container_mixed_subrepo(self): - """Verify container with mixed subrepo. - - The mixed subrepo has a sub-externals file with different - sub-externals on different branches. - - """ - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - - self._generator.create_config() - self._generator.create_section(MIXED_REPO, 'mixed_req', - branch='master', sub_externals=CFG_SUB_NAME) - self._generator.write_config(cloned_repo_dir) - - # The subrepo has a repo_url that uses this environment variable. - # It'll be cleared in tearDown(). - os.environ[MIXED_CONT_EXT_ROOT_ENV_VAR] = self._bare_root - debug_env = MIXED_CONT_EXT_ROOT_ENV_VAR + '=' + self._bare_root - - # inital checkout: all requireds are clean, and optional is empty. - tree = self.execute_checkout_with_status(cloned_repo_dir, - self.checkout_args, - debug_env=debug_env) - mixed_req_path = self._external_path('mixed_req') - self._check_sync_clean(tree[mixed_req_path], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - sub_ext_base_path = "{0}/{1}/{2}".format(EXTERNALS_PATH, 'mixed_req', SUB_EXTERNALS_PATH) - # The already-checked-in subexternals file has a 'simp_branch' section - self._check_sync_clean(tree[self._external_path('simp_branch', base_path=sub_ext_base_path)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - # update the mixed-use external to point to different branch - # status should become out of sync for mixed_req, but sub-externals - # are still in sync - self._generator.write_with_git_branch(cloned_repo_dir, name='mixed_req', - branch='new-feature', - new_remote_repo_path=MIXED_REPO) - tree = self.execute_checkout_in_dir(cloned_repo_dir, self.status_args, - debug_env=debug_env) - self._check_sync_clean(tree[mixed_req_path], - ExternalStatus.MODEL_MODIFIED, - ExternalStatus.STATUS_OK) - self._check_sync_clean(tree[self._external_path('simp_branch', base_path=sub_ext_base_path)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - # run the checkout. Now the mixed use external and its sub-externals should be clean. - tree = self.execute_checkout_with_status(cloned_repo_dir, self.checkout_args, - debug_env=debug_env) - self._check_sync_clean(tree[mixed_req_path], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - self._check_sync_clean(tree[self._external_path('simp_branch', base_path=sub_ext_base_path)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - def test_container_component(self): - """Verify that optional component checkout works - """ - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - - # create the top level externals file - self._generator.create_config() - # Optional external, by tag. - self._generator.create_section(SIMPLE_REPO, 'simp_opt', - tag='tag1', required=False) - - # Required external, by branch. - self._generator.create_section(SIMPLE_REPO, BRANCH_SECTION, - branch=REMOTE_BRANCH_FEATURE2) - - # Required external, by hash. - self._generator.create_section(SIMPLE_REPO, HASH_SECTION, - ref_hash='60b1cc1a38d63') - self._generator.write_config(cloned_repo_dir) - - # inital checkout, first try a nonexistent component argument noref - checkout_args = ['simp_opt', 'noref'] - checkout_args.extend(self.checkout_args) - - with self.assertRaises(RuntimeError): - self.execute_checkout_in_dir(cloned_repo_dir, checkout_args) - - # Now explicitly check out one optional component.. - # Explicitly listed component (opt) should be present, the other two not. - checkout_args = ['simp_opt'] - checkout_args.extend(self.checkout_args) - tree = self.execute_checkout_with_status(cloned_repo_dir, - checkout_args) - self._check_sync_clean(tree[self._external_path('simp_opt')], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - self._check_sync_clean(tree[self._external_path(BRANCH_SECTION)], - ExternalStatus.EMPTY, - ExternalStatus.DEFAULT) - self._check_sync_clean(tree[self._external_path(HASH_SECTION)], - ExternalStatus.EMPTY, - ExternalStatus.DEFAULT) - - # Check out a second component, this one required. - # Explicitly listed component (branch) should be present, the still-unlisted one (tag) not. - checkout_args.append(BRANCH_SECTION) - tree = self.execute_checkout_with_status(cloned_repo_dir, - checkout_args) - self._check_sync_clean(tree[self._external_path('simp_opt')], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - self._check_sync_clean(tree[self._external_path(BRANCH_SECTION)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - self._check_sync_clean(tree[self._external_path(HASH_SECTION)], - ExternalStatus.EMPTY, - ExternalStatus.DEFAULT) - - - def test_container_exclude_component(self): - """Verify that exclude component checkout works - """ - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - self._generator.create_config() - self._generator.create_section(SIMPLE_REPO, TAG_SECTION, - tag='tag1') - - self._generator.create_section(SIMPLE_REPO, BRANCH_SECTION, - branch=REMOTE_BRANCH_FEATURE2) - - self._generator.create_section(SIMPLE_REPO, 'simp_hash', - ref_hash='60b1cc1a38d63') - - self._generator.write_config(cloned_repo_dir) - - # inital checkout should result in all externals being clean except excluded TAG_SECTION. - checkout_args = ['--exclude', TAG_SECTION] - checkout_args.extend(self.checkout_args) - tree = self.execute_checkout_with_status(cloned_repo_dir, checkout_args) - self._check_sync_clean(tree[self._external_path(TAG_SECTION)], - ExternalStatus.EMPTY, - ExternalStatus.DEFAULT) - self._check_sync_clean(tree[self._external_path(BRANCH_SECTION)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - self._check_sync_clean(tree[self._external_path(HASH_SECTION)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - def test_subexternal(self): - """Verify that an externals file can be brought in as a reference. - - """ - cloned_repo_dir = self.clone_test_repo(MIXED_REPO) - - self._generator.create_config() - self._generator.create_section_reference_to_subexternal('mixed_base') - self._generator.write_config(cloned_repo_dir) - - # The subrepo has a repo_url that uses this environment variable. - # It'll be cleared in tearDown(). - os.environ[MIXED_CONT_EXT_ROOT_ENV_VAR] = self._bare_root - debug_env = MIXED_CONT_EXT_ROOT_ENV_VAR + '=' + self._bare_root - - # After checkout, confirm required's are clean and the referenced - # subexternal's contents are also clean. - tree = self.execute_checkout_with_status(cloned_repo_dir, - self.checkout_args, - debug_env=debug_env) - - self._check_sync_clean( - tree[self._external_path(BRANCH_SECTION, base_path=SUB_EXTERNALS_PATH)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - def test_container_sparse(self): - """Verify that 'full' container with simple subrepo - can run a sparse checkout and generate the correct initial status. - - """ - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - - # Create a file to list filenames to checkout. - sparse_filename = 'sparse_checkout' - with open(os.path.join(cloned_repo_dir, sparse_filename), 'w') as sfile: - sfile.write(README_NAME) - - self._generator.create_config() - self._generator.create_section(SIMPLE_REPO, TAG_SECTION, - tag='tag2') - - # Same tag as above, but with a sparse file too. - sparse_relpath = '../../' + sparse_filename - self._generator.create_section(SIMPLE_REPO, 'simp_sparse', - tag='tag2', sparse=sparse_relpath) - - self._generator.write_config(cloned_repo_dir) - - # inital checkout, confirm required's are clean. - tree = self.execute_checkout_with_status(cloned_repo_dir, - self.checkout_args) - self._check_sync_clean(tree[self._external_path(TAG_SECTION)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - self._check_sync_clean(tree[self._external_path('simp_sparse')], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - # Check existence of some files - full set in TAG_SECTION, and sparse set - # in 'simp_sparse'. - subrepo_path = os.path.join('externals', TAG_SECTION) - self._check_file_exists(cloned_repo_dir, - os.path.join(subrepo_path, README_NAME)) - self._check_file_exists(cloned_repo_dir, os.path.join(subrepo_path, - 'simple_subdir', - 'subdir_file.txt')) - subrepo_path = os.path.join('externals', 'simp_sparse') - self._check_file_exists(cloned_repo_dir, - os.path.join(subrepo_path, README_NAME)) - self._check_file_absent(cloned_repo_dir, os.path.join(subrepo_path, - 'simple_subdir', - 'subdir_file.txt')) - -class TestSysCheckoutSVN(BaseTestSysCheckout): - """Run systems level tests of checkout_externals accessing svn repositories - - SVN tests - these tests use the svn repository interface. - """ - - @staticmethod - def _svn_branch_name(): - return './{0}/svn_branch'.format(EXTERNALS_PATH) - - @staticmethod - def _svn_tag_name(): - return './{0}/svn_tag'.format(EXTERNALS_PATH) - - def _svn_test_repo_url(self): - return 'file://' + os.path.join(self._bare_root, SVN_TEST_REPO) - - def _check_tag_branch_svn_tag_clean(self, tree): - self._check_sync_clean(tree[self._external_path(TAG_SECTION)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - self._check_sync_clean(tree[self._svn_branch_name()], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - self._check_sync_clean(tree[self._svn_tag_name()], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - def _have_svn_access(self): - """Check if we have svn access so we can enable tests that use svn. - - """ - have_svn = False - cmd = ['svn', 'ls', self._svn_test_repo_url(), ] - try: - execute_subprocess(cmd) - have_svn = True - except BaseException: - pass - return have_svn - - def _skip_if_no_svn_access(self): - """Function decorator to disable svn tests when svn isn't available - """ - have_svn = self._have_svn_access() - if not have_svn: - raise unittest.SkipTest("No svn access") - - def test_container_simple_svn(self): - """Verify that a container repo can pull in an svn branch and svn tag. - - """ - self._skip_if_no_svn_access() - # create repo - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - - self._generator.create_config() - # Git repo. - self._generator.create_section(SIMPLE_REPO, TAG_SECTION, tag='tag1') - - # Svn repos. - self._generator.create_svn_external('svn_branch', self._svn_test_repo_url(), branch='trunk') - self._generator.create_svn_external('svn_tag', self._svn_test_repo_url(), tag='tags/cesm2.0.beta07') - - self._generator.write_config(cloned_repo_dir) - - # checkout, make sure all sections are clean. - tree = self.execute_checkout_with_status(cloned_repo_dir, - self.checkout_args) - self._check_tag_branch_svn_tag_clean(tree) - - # update description file to make the tag into a branch and - # trigger a switch - self._generator.write_with_svn_branch(cloned_repo_dir, 'svn_tag', - 'trunk') - - # checkout, again the results should be clean. - tree = self.execute_checkout_with_status(cloned_repo_dir, - self.checkout_args) - self._check_tag_branch_svn_tag_clean(tree) - - # add an untracked file to the repo - tracked = False - RepoUtils.add_file_to_repo(cloned_repo_dir, - 'externals/svn_branch/tmp.txt', tracked) - - # run a no-op checkout. - self.execute_checkout_in_dir(cloned_repo_dir, self.checkout_args) - - # update description file to make the branch into a tag and - # trigger a modified sync status - self._generator.write_with_svn_branch(cloned_repo_dir, 'svn_tag', - 'tags/cesm2.0.beta07') - - self.execute_checkout_in_dir(cloned_repo_dir,self.checkout_args) - - # verify status is still clean and unmodified, last - # checkout modified the working dir state. - tree = self.execute_checkout_in_dir(cloned_repo_dir, - self.verbose_args) - self._check_tag_branch_svn_tag_clean(tree) - -class TestSubrepoCheckout(BaseTestSysCheckout): - # Need to store information at setUp time for checking - # pylint: disable=too-many-instance-attributes - """Run tests to ensure proper handling of repos with submodules. - - By default, submodules in git repositories are checked out. A git - repository checked out as a submodule is treated as if it was - listed in an external with the same properties as in the source - .gitmodules file. - """ - - def setUp(self): - """Setup for all submodule checkout tests - Create a repo with two submodule repositories. - """ - - # Run the basic setup - super().setUp() - # create test repo - # We need to do this here (rather than have a static repo) because - # git submodules do not allow for variables in .gitmodules files - self._test_repo_name = 'test_repo_with_submodules' - self._bare_branch_name = 'subrepo_branch' - self._config_branch_name = 'subrepo_config_branch' - self._container_extern_name = 'externals_container.cfg' - self._my_test_dir = os.path.join(module_tmp_root_dir, self._test_id) - self._repo_dir = os.path.join(self._my_test_dir, self._test_repo_name) - self._checkout_dir = 'repo_with_submodules' - check_dir = self.clone_test_repo(CONTAINER_REPO, - dest_dir_in=self._repo_dir) - self.assertTrue(self._repo_dir == check_dir) - # Add the submodules - cwd = os.getcwd() - fork_repo_dir = os.path.join(self._bare_root, SIMPLE_FORK_REPO) - simple_repo_dir = os.path.join(self._bare_root, SIMPLE_REPO) - self._simple_ext_fork_name = os.path.splitext(SIMPLE_FORK_REPO)[0] - self._simple_ext_name = os.path.join('sourc', - os.path.splitext(SIMPLE_REPO)[0]) - os.chdir(self._repo_dir) - # Add a branch with a subrepo - cmd = ['git', 'branch', self._bare_branch_name, 'master'] - execute_subprocess(cmd) - cmd = ['git', 'checkout', self._bare_branch_name] - execute_subprocess(cmd) - cmd = ['git', '-c', 'protocol.file.allow=always','submodule', 'add', fork_repo_dir] - execute_subprocess(cmd) - cmd = ['git', 'commit', '-am', "'Added simple-ext-fork as a submodule'"] - execute_subprocess(cmd) - # Save the fork repo hash for comparison - os.chdir(self._simple_ext_fork_name) - self._fork_hash_check = self.get_git_hash() - os.chdir(self._repo_dir) - # Now, create a branch to test from_sbmodule - cmd = ['git', 'branch', - self._config_branch_name, self._bare_branch_name] - execute_subprocess(cmd) - cmd = ['git', 'checkout', self._config_branch_name] - execute_subprocess(cmd) - cmd = ['git', '-c', 'protocol.file.allow=always', 'submodule', 'add', '--name', SIMPLE_REPO, - simple_repo_dir, self._simple_ext_name] - execute_subprocess(cmd) - # Checkout feature2 - os.chdir(self._simple_ext_name) - cmd = ['git', 'branch', 'feature2', 'origin/feature2'] - execute_subprocess(cmd) - cmd = ['git', 'checkout', 'feature2'] - execute_subprocess(cmd) - # Save the fork repo hash for comparison - self._simple_hash_check = self.get_git_hash() - os.chdir(self._repo_dir) - self.write_externals_config(filename=self._container_extern_name, - dest_dir=self._repo_dir, from_submodule=True) - cmd = ['git', 'add', self._container_extern_name] - execute_subprocess(cmd) - cmd = ['git', 'commit', '-am', "'Added simple-ext as a submodule'"] - execute_subprocess(cmd) - # Reset to master - cmd = ['git', 'checkout', 'master'] - execute_subprocess(cmd) - os.chdir(cwd) - - @staticmethod - def get_git_hash(revision="HEAD"): - """Return the hash for """ - cmd = ['git', 'rev-parse', revision] - git_out = execute_subprocess(cmd, output_to_caller=True) - return git_out.strip() - - def write_externals_config(self, name='', dest_dir=None, - filename=CFG_NAME, - branch_name=None, sub_externals=None, - from_submodule=False): - # pylint: disable=too-many-arguments - """Create a container externals file with only simple externals. - - """ - self._generator.create_config() - - if dest_dir is None: - dest_dir = self._my_test_dir - - if from_submodule: - self._generator.create_section(SIMPLE_FORK_REPO, - self._simple_ext_fork_name, - from_submodule=True) - self._generator.create_section(SIMPLE_REPO, - self._simple_ext_name, - branch='feature3', path='', - from_submodule=False) - else: - if branch_name is None: - branch_name = 'master' - - self._generator.create_section(self._test_repo_name, - self._checkout_dir, - branch=branch_name, - path=name, sub_externals=sub_externals, - repo_path_abs=self._repo_dir) - - self._generator.write_config(dest_dir, filename=filename) - - def idempotence_check(self, checkout_dir): - """Verify that calling checkout_externals and - checkout_externals --status does not cause errors""" - cwd = os.getcwd() - os.chdir(checkout_dir) - self.execute_checkout_in_dir(self._my_test_dir, - self.checkout_args) - self.execute_checkout_in_dir(self._my_test_dir, - self.status_args) - os.chdir(cwd) - - def test_submodule_checkout_bare(self): - """Verify that a git repo with submodule is properly checked out - This test if for where there is no 'externals' keyword in the - parent repo. - Correct behavior is that the submodule is checked out using - normal git submodule behavior. - """ - simple_ext_fork_tag = "(tag1)" - simple_ext_fork_status = " " - self.write_externals_config(branch_name=self._bare_branch_name) - self.execute_checkout_in_dir(self._my_test_dir, - self.checkout_args) - cwd = os.getcwd() - checkout_dir = os.path.join(self._my_test_dir, self._checkout_dir) - fork_file = os.path.join(checkout_dir, - self._simple_ext_fork_name, "readme.txt") - self.assertTrue(os.path.exists(fork_file)) - - submods = git_submodule_status(checkout_dir) - print('checking status of', checkout_dir, ':', submods) - self.assertEqual(len(submods.keys()), 1) - self.assertTrue(self._simple_ext_fork_name in submods) - submod = submods[self._simple_ext_fork_name] - self.assertTrue('hash' in submod) - self.assertEqual(submod['hash'], self._fork_hash_check) - self.assertTrue('status' in submod) - self.assertEqual(submod['status'], simple_ext_fork_status) - self.assertTrue('tag' in submod) - self.assertEqual(submod['tag'], simple_ext_fork_tag) - self.idempotence_check(checkout_dir) - - def test_submodule_checkout_none(self): - """Verify that a git repo with submodule is properly checked out - This test is for when 'externals=None' is in parent repo's - externals cfg file. - Correct behavior is the submodle is not checked out. - """ - self.write_externals_config(branch_name=self._bare_branch_name, - sub_externals="none") - self.execute_checkout_in_dir(self._my_test_dir, - self.checkout_args) - cwd = os.getcwd() - checkout_dir = os.path.join(self._my_test_dir, self._checkout_dir) - fork_file = os.path.join(checkout_dir, - self._simple_ext_fork_name, "readme.txt") - self.assertFalse(os.path.exists(fork_file)) - os.chdir(cwd) - self.idempotence_check(checkout_dir) - - def test_submodule_checkout_config(self): # pylint: disable=too-many-locals - """Verify that a git repo with submodule is properly checked out - This test if for when the 'from_submodule' keyword is used in the - parent repo. - Correct behavior is that the submodule is checked out using - normal git submodule behavior. - """ - tag_check = None # Not checked out as submodule - status_check = "-" # Not checked out as submodule - self.write_externals_config(branch_name=self._config_branch_name, - sub_externals=self._container_extern_name) - self.execute_checkout_in_dir(self._my_test_dir, - self.checkout_args) - cwd = os.getcwd() - checkout_dir = os.path.join(self._my_test_dir, self._checkout_dir) - fork_file = os.path.join(checkout_dir, - self._simple_ext_fork_name, "readme.txt") - self.assertTrue(os.path.exists(fork_file)) - os.chdir(checkout_dir) - # Check submodule status - submods = git_submodule_status(checkout_dir) - self.assertEqual(len(submods.keys()), 2) - self.assertTrue(self._simple_ext_fork_name in submods) - submod = submods[self._simple_ext_fork_name] - self.assertTrue('hash' in submod) - self.assertEqual(submod['hash'], self._fork_hash_check) - self.assertTrue('status' in submod) - self.assertEqual(submod['status'], status_check) - self.assertTrue('tag' in submod) - self.assertEqual(submod['tag'], tag_check) - self.assertTrue(self._simple_ext_name in submods) - submod = submods[self._simple_ext_name] - self.assertTrue('hash' in submod) - self.assertEqual(submod['hash'], self._simple_hash_check) - self.assertTrue('status' in submod) - self.assertEqual(submod['status'], status_check) - self.assertTrue('tag' in submod) - self.assertEqual(submod['tag'], tag_check) - # Check fork repo status - os.chdir(self._simple_ext_fork_name) - self.assertEqual(self.get_git_hash(), self._fork_hash_check) - os.chdir(checkout_dir) - os.chdir(self._simple_ext_name) - hash_check = self.get_git_hash('origin/feature3') - self.assertEqual(self.get_git_hash(), hash_check) - os.chdir(cwd) - self.idempotence_check(checkout_dir) - -class TestSysCheckoutErrors(BaseTestSysCheckout): - """Run systems level tests of error conditions in checkout_externals - - Error conditions - these tests are designed to trigger specific - error conditions and ensure that they are being handled as - runtime errors (and hopefully usefull error messages) instead of - the default internal message that won't mean anything to the - user, e.g. key error, called process error, etc. - - These are not 'expected failures'. They are pass when a - RuntimeError is raised, fail if any other error is raised (or no - error is raised). - - """ - - # NOTE(bja, 2017-11) pylint complains about long method names, but - # it is hard to differentiate tests without making them more - # cryptic. - # pylint: disable=invalid-name - - def test_error_unknown_protocol(self): - """Verify that a runtime error is raised when the user specified repo - protocol is not known. - - """ - # create repo - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - self._generator.create_config() - self._generator.create_section(SIMPLE_REPO, BRANCH_SECTION, - branch=REMOTE_BRANCH_FEATURE2) - self._generator.write_config(cloned_repo_dir) - - # update the config file to point to a different remote with - # the tag instead of branch. Tag MUST NOT be in the original - # repo! - self._generator.write_with_protocol(cloned_repo_dir, BRANCH_SECTION, - 'this-protocol-does-not-exist') - - with self.assertRaises(RuntimeError): - self.execute_checkout_in_dir(cloned_repo_dir, self.checkout_args) - - def test_error_switch_protocol(self): - """Verify that a runtime error is raised when the user switches - protocols, git to svn. - - TODO(bja, 2017-11) This correctly results in an error, but it - isn't a helpful error message. - - """ - # create repo - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - self._generator.create_config() - self._generator.create_section(SIMPLE_REPO, BRANCH_SECTION, - branch=REMOTE_BRANCH_FEATURE2) - self._generator.write_config(cloned_repo_dir) - - # update the config file to point to a different remote with - # the tag instead of branch. Tag MUST NOT be in the original - # repo! - self._generator.write_with_protocol(cloned_repo_dir, BRANCH_SECTION, 'svn') - with self.assertRaises(RuntimeError): - self.execute_checkout_in_dir(cloned_repo_dir, self.checkout_args) - - def test_error_unknown_tag(self): - """Verify that a runtime error is raised when the user specified tag - does not exist. - - """ - # create repo - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - self._generator.create_config() - self._generator.create_section(SIMPLE_REPO, BRANCH_SECTION, - branch=REMOTE_BRANCH_FEATURE2) - self._generator.write_config(cloned_repo_dir) - - # update the config file to point to a different remote with - # the tag instead of branch. Tag MUST NOT be in the original - # repo! - self._generator.write_with_tag_and_remote_repo(cloned_repo_dir, BRANCH_SECTION, - tag='this-tag-does-not-exist', - new_remote_repo_path=SIMPLE_REPO) - - with self.assertRaises(RuntimeError): - self.execute_checkout_in_dir(cloned_repo_dir, self.checkout_args) - - def test_error_overspecify_tag_branch(self): - """Verify that a runtime error is raised when the user specified both - tag and a branch - - """ - # create repo - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - self._generator.create_config() - self._generator.create_section(SIMPLE_REPO, BRANCH_SECTION, - branch=REMOTE_BRANCH_FEATURE2) - self._generator.write_config(cloned_repo_dir) - - # update the config file to point to a different remote with - # the tag instead of branch. Tag MUST NOT be in the original - # repo! - self._generator.write_with_tag_and_remote_repo(cloned_repo_dir, BRANCH_SECTION, - tag='this-tag-does-not-exist', - new_remote_repo_path=SIMPLE_REPO, - remove_branch=False) - - with self.assertRaises(RuntimeError): - self.execute_checkout_in_dir(cloned_repo_dir, self.checkout_args) - - def test_error_underspecify_tag_branch(self): - """Verify that a runtime error is raised when the user specified - neither a tag or a branch - - """ - # create repo - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - self._generator.create_config() - self._generator.create_section(SIMPLE_REPO, BRANCH_SECTION, - branch=REMOTE_BRANCH_FEATURE2) - self._generator.write_config(cloned_repo_dir) - - # update the config file to point to a different remote with - # the tag instead of branch. Tag MUST NOT be in the original - # repo! - self._generator.write_without_branch_tag(cloned_repo_dir, BRANCH_SECTION) - - with self.assertRaises(RuntimeError): - self.execute_checkout_in_dir(cloned_repo_dir, self.checkout_args) - - def test_error_missing_url(self): - """Verify that a runtime error is raised when the user specified - neither a tag or a branch - - """ - # create repo - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - self._generator.create_config() - self._generator.create_section(SIMPLE_REPO, BRANCH_SECTION, - branch=REMOTE_BRANCH_FEATURE2) - self._generator.write_config(cloned_repo_dir) - - # update the config file to point to a different remote with - # the tag instead of branch. Tag MUST NOT be in the original - # repo! - self._generator.write_without_repo_url(cloned_repo_dir, - BRANCH_SECTION) - - with self.assertRaises(RuntimeError): - self.execute_checkout_in_dir(cloned_repo_dir, self.checkout_args) - - -if __name__ == '__main__': - unittest.main() diff --git a/manage_externals/test/test_sys_repository_git.py b/manage_externals/test/test_sys_repository_git.py deleted file mode 100644 index 7e5fb5020d..0000000000 --- a/manage_externals/test/test_sys_repository_git.py +++ /dev/null @@ -1,238 +0,0 @@ -#!/usr/bin/env python3 - -"""Tests of some of the functionality in repository_git.py that actually -interacts with git repositories. - -We're calling these "system" tests because we expect them to be a lot -slower than most of the unit tests. - -""" - -from __future__ import absolute_import -from __future__ import unicode_literals -from __future__ import print_function - -import os -import shutil -import tempfile -import unittest - -from manic.repository_git import GitRepository -from manic.externals_description import ExternalsDescription -from manic.externals_description import ExternalsDescriptionDict -from manic.utils import execute_subprocess - -# NOTE(wjs, 2018-04-09) I find a mix of camel case and underscores to be -# more readable for unit test names, so I'm disabling pylint's naming -# convention check -# pylint: disable=C0103 - -# Allow access to protected members -# pylint: disable=W0212 - - -class GitTestCase(unittest.TestCase): - """Adds some git-specific unit test functionality on top of TestCase""" - - def assertIsHash(self, maybe_hash): - """Assert that the string given by maybe_hash really does look - like a git hash. - """ - - # Ensure it is non-empty - self.assertTrue(maybe_hash, msg="maybe_hash is empty") - - # Ensure it has a single string - self.assertEqual(1, len(maybe_hash.split()), - msg="maybe_hash has multiple strings: {}".format(maybe_hash)) - - # Ensure that the only characters in the string are ones allowed - # in hashes - allowed_chars_set = set('0123456789abcdef') - self.assertTrue(set(maybe_hash) <= allowed_chars_set, - msg="maybe_hash has non-hash characters: {}".format(maybe_hash)) - - -class TestGitTestCase(GitTestCase): - """Tests GitTestCase""" - - def test_assertIsHash_true(self): - """Ensure that assertIsHash passes for something that looks - like a hash""" - self.assertIsHash('abc123') - - def test_assertIsHash_empty(self): - """Ensure that assertIsHash raises an AssertionError for an - empty string""" - with self.assertRaises(AssertionError): - self.assertIsHash('') - - def test_assertIsHash_multipleStrings(self): - """Ensure that assertIsHash raises an AssertionError when - given multiple strings""" - with self.assertRaises(AssertionError): - self.assertIsHash('abc123 def456') - - def test_assertIsHash_badChar(self): - """Ensure that assertIsHash raises an AssertionError when given a - string that has a character that doesn't belong in a hash - """ - with self.assertRaises(AssertionError): - self.assertIsHash('abc123g') - - -class TestGitRepositoryGitCommands(GitTestCase): - """Test some git commands in RepositoryGit - - It's silly that we need to create a repository in order to test - these git commands. Much or all of the git functionality that is - currently in repository_git.py should eventually be moved to a - separate module that is solely responsible for wrapping git - commands; that would allow us to test it independently of this - repository class. - """ - - # ======================================================================== - # Test helper functions - # ======================================================================== - - def setUp(self): - # directory we want to return to after the test system and - # checkout_externals are done cd'ing all over the place. - self._return_dir = os.getcwd() - - self._tmpdir = tempfile.mkdtemp() - os.chdir(self._tmpdir) - - self._name = 'component' - rdata = {ExternalsDescription.PROTOCOL: 'git', - ExternalsDescription.REPO_URL: - '/path/to/local/repo', - ExternalsDescription.TAG: - 'tag1', - } - - data = {self._name: - { - ExternalsDescription.REQUIRED: False, - ExternalsDescription.PATH: 'junk', - ExternalsDescription.EXTERNALS: '', - ExternalsDescription.REPO: rdata, - }, - } - model = ExternalsDescriptionDict(data) - repo = model[self._name][ExternalsDescription.REPO] - self._repo = GitRepository('test', repo) - - def tearDown(self): - # return to our common starting point - os.chdir(self._return_dir) - - shutil.rmtree(self._tmpdir, ignore_errors=True) - - @staticmethod - def make_cwd_git_repo(): - """Turn the current directory into an empty git repository""" - execute_subprocess(['git', 'init']) - - @staticmethod - def add_cwd_git_commit(): - """Add a git commit in the current directory""" - with open('README', 'a') as myfile: - myfile.write('more info') - execute_subprocess(['git', 'add', 'README']) - execute_subprocess(['git', 'commit', '-m', 'my commit message']) - - @staticmethod - def checkout_cwd_git_branch(branchname): - """Checkout a new branch in the current directory""" - execute_subprocess(['git', 'checkout', '-b', branchname]) - - @staticmethod - def make_cwd_git_tag(tagname): - """Make a lightweight tag at the current commit""" - execute_subprocess(['git', 'tag', '-m', 'making a tag', tagname]) - - @staticmethod - def checkout_cwd_ref(refname): - """Checkout the given refname in the current directory""" - execute_subprocess(['git', 'checkout', refname]) - - # ======================================================================== - # Begin actual tests - # ======================================================================== - - def test_currentHash_returnsHash(self): - """Ensure that the _git_current_hash function returns a hash""" - self.make_cwd_git_repo() - self.add_cwd_git_commit() - hash_found, myhash = self._repo._git_current_hash(os.getcwd()) - self.assertTrue(hash_found) - self.assertIsHash(myhash) - - def test_currentHash_outsideGitRepo(self): - """Ensure that the _git_current_hash function returns False when - outside a git repository""" - hash_found, myhash = self._repo._git_current_hash(os.getcwd()) - self.assertFalse(hash_found) - self.assertEqual('', myhash) - - def test_currentBranch_onBranch(self): - """Ensure that the _git_current_branch function returns the name - of the branch""" - self.make_cwd_git_repo() - self.add_cwd_git_commit() - self.checkout_cwd_git_branch('foo') - branch_found, mybranch = self._repo._git_current_branch(os.getcwd()) - self.assertTrue(branch_found) - self.assertEqual('foo', mybranch) - - def test_currentBranch_notOnBranch(self): - """Ensure that the _git_current_branch function returns False - when not on a branch""" - self.make_cwd_git_repo() - self.add_cwd_git_commit() - self.make_cwd_git_tag('mytag') - self.checkout_cwd_ref('mytag') - branch_found, mybranch = self._repo._git_current_branch(os.getcwd()) - self.assertFalse(branch_found) - self.assertEqual('', mybranch) - - def test_currentBranch_outsideGitRepo(self): - """Ensure that the _git_current_branch function returns False - when outside a git repository""" - branch_found, mybranch = self._repo._git_current_branch(os.getcwd()) - self.assertFalse(branch_found) - self.assertEqual('', mybranch) - - def test_currentTag_onTag(self): - """Ensure that the _git_current_tag function returns the name of - the tag""" - self.make_cwd_git_repo() - self.add_cwd_git_commit() - self.make_cwd_git_tag('some_tag') - tag_found, mytag = self._repo._git_current_tag(os.getcwd()) - self.assertTrue(tag_found) - self.assertEqual('some_tag', mytag) - - def test_currentTag_notOnTag(self): - """Ensure tha the _git_current_tag function returns False when - not on a tag""" - self.make_cwd_git_repo() - self.add_cwd_git_commit() - self.make_cwd_git_tag('some_tag') - self.add_cwd_git_commit() - tag_found, mytag = self._repo._git_current_tag(os.getcwd()) - self.assertFalse(tag_found) - self.assertEqual('', mytag) - - def test_currentTag_outsideGitRepo(self): - """Ensure that the _git_current_tag function returns False when - outside a git repository""" - tag_found, mytag = self._repo._git_current_tag(os.getcwd()) - self.assertFalse(tag_found) - self.assertEqual('', mytag) - - -if __name__ == '__main__': - unittest.main() diff --git a/manage_externals/test/test_unit_externals_description.py b/manage_externals/test/test_unit_externals_description.py deleted file mode 100644 index 30e5288499..0000000000 --- a/manage_externals/test/test_unit_externals_description.py +++ /dev/null @@ -1,478 +0,0 @@ -#!/usr/bin/env python3 - -"""Unit test driver for checkout_externals - -Note: this script assume the path to the checkout_externals.py module is -already in the python path. - -""" - -from __future__ import absolute_import -from __future__ import unicode_literals -from __future__ import print_function - -import os -import os.path -import shutil -import unittest - -try: - # python2 - from ConfigParser import SafeConfigParser as config_parser - - def config_string_cleaner(text): - """convert strings into unicode - """ - return text.decode('utf-8') -except ImportError: - # python3 - from configparser import ConfigParser as config_parser - - def config_string_cleaner(text): - """Python3 already uses unicode strings, so just return the string - without modification. - - """ - return text - -from manic.externals_description import DESCRIPTION_SECTION, VERSION_ITEM -from manic.externals_description import ExternalsDescription -from manic.externals_description import ExternalsDescriptionDict -from manic.externals_description import ExternalsDescriptionConfigV1 -from manic.externals_description import get_cfg_schema_version -from manic.externals_description import read_externals_description_file -from manic.externals_description import create_externals_description - -from manic.global_constants import EMPTY_STR - - -class TestCfgSchemaVersion(unittest.TestCase): - """Test that schema identification for the externals description - returns the correct results. - - """ - - def setUp(self): - """Reusable config object - """ - self._config = config_parser() - self._config.add_section('section1') - self._config.set('section1', 'keword', 'value') - - self._config.add_section(DESCRIPTION_SECTION) - - def test_schema_version_valid(self): - """Test that schema identification returns the correct version for a - valid tag. - - """ - version_str = '2.1.3' - self._config.set(DESCRIPTION_SECTION, VERSION_ITEM, version_str) - major, minor, patch = get_cfg_schema_version(self._config) - expected_major = 2 - expected_minor = 1 - expected_patch = 3 - self.assertEqual(expected_major, major) - self.assertEqual(expected_minor, minor) - self.assertEqual(expected_patch, patch) - - def test_schema_section_missing(self): - """Test that an error is returned if the schema section is missing - from the input file. - - """ - self._config.remove_section(DESCRIPTION_SECTION) - with self.assertRaises(RuntimeError): - get_cfg_schema_version(self._config) - - def test_schema_version_missing(self): - """Test that a externals description file without a version raises a - runtime error. - - """ - # Note: the default setup method shouldn't include a version - # keyword, but remove it just to be future proof.... - self._config.remove_option(DESCRIPTION_SECTION, VERSION_ITEM) - with self.assertRaises(RuntimeError): - get_cfg_schema_version(self._config) - - def test_schema_version_not_int(self): - """Test that a externals description file a version that doesn't - decompose to integer major, minor and patch versions raises - runtime error. - - """ - self._config.set(DESCRIPTION_SECTION, VERSION_ITEM, 'unknown') - with self.assertRaises(RuntimeError): - get_cfg_schema_version(self._config) - - -class TestModelDescritionConfigV1(unittest.TestCase): - """Test that parsing config/ini fileproduces a correct dictionary - for the externals description. - - """ - # pylint: disable=R0902 - - def setUp(self): - """Boiler plate construction of string containing xml for multiple components. - """ - self._comp1_name = 'comp1' - self._comp1_path = 'path/to/comp1' - self._comp1_protocol = 'svn' - self._comp1_url = 'https://svn.somewhere.com/path/of/comp1' - self._comp1_tag = 'a_nice_tag_v1' - self._comp1_is_required = 'True' - self._comp1_externals = '' - - self._comp2_name = 'comp2' - self._comp2_path = 'path/to/comp2' - self._comp2_protocol = 'git' - self._comp2_url = '/local/clone/of/comp2' - self._comp2_branch = 'a_very_nice_branch' - self._comp2_is_required = 'False' - self._comp2_externals = 'path/to/comp2.cfg' - - def _setup_comp1(self, config): - """Boiler plate construction of xml string for componet 1 - """ - config.add_section(self._comp1_name) - config.set(self._comp1_name, 'local_path', self._comp1_path) - config.set(self._comp1_name, 'protocol', self._comp1_protocol) - config.set(self._comp1_name, 'repo_url', self._comp1_url) - config.set(self._comp1_name, 'tag', self._comp1_tag) - config.set(self._comp1_name, 'required', self._comp1_is_required) - - def _setup_comp2(self, config): - """Boiler plate construction of xml string for componet 2 - """ - config.add_section(self._comp2_name) - config.set(self._comp2_name, 'local_path', self._comp2_path) - config.set(self._comp2_name, 'protocol', self._comp2_protocol) - config.set(self._comp2_name, 'repo_url', self._comp2_url) - config.set(self._comp2_name, 'branch', self._comp2_branch) - config.set(self._comp2_name, 'required', self._comp2_is_required) - config.set(self._comp2_name, 'externals', self._comp2_externals) - - @staticmethod - def _setup_externals_description(config): - """Add the required exernals description section - """ - - config.add_section(DESCRIPTION_SECTION) - config.set(DESCRIPTION_SECTION, VERSION_ITEM, '1.0.1') - - def _check_comp1(self, model): - """Test that component one was constructed correctly. - """ - self.assertTrue(self._comp1_name in model) - comp1 = model[self._comp1_name] - self.assertEqual(comp1[ExternalsDescription.PATH], self._comp1_path) - self.assertTrue(comp1[ExternalsDescription.REQUIRED]) - repo = comp1[ExternalsDescription.REPO] - self.assertEqual(repo[ExternalsDescription.PROTOCOL], - self._comp1_protocol) - self.assertEqual(repo[ExternalsDescription.REPO_URL], self._comp1_url) - self.assertEqual(repo[ExternalsDescription.TAG], self._comp1_tag) - self.assertEqual(EMPTY_STR, comp1[ExternalsDescription.EXTERNALS]) - - def _check_comp2(self, model): - """Test that component two was constucted correctly. - """ - self.assertTrue(self._comp2_name in model) - comp2 = model[self._comp2_name] - self.assertEqual(comp2[ExternalsDescription.PATH], self._comp2_path) - self.assertFalse(comp2[ExternalsDescription.REQUIRED]) - repo = comp2[ExternalsDescription.REPO] - self.assertEqual(repo[ExternalsDescription.PROTOCOL], - self._comp2_protocol) - self.assertEqual(repo[ExternalsDescription.REPO_URL], self._comp2_url) - self.assertEqual(repo[ExternalsDescription.BRANCH], self._comp2_branch) - self.assertEqual(self._comp2_externals, - comp2[ExternalsDescription.EXTERNALS]) - - def test_one_tag_required(self): - """Test that a component source with a tag is correctly parsed. - """ - config = config_parser() - self._setup_comp1(config) - self._setup_externals_description(config) - model = ExternalsDescriptionConfigV1(config) - print(model) - self._check_comp1(model) - - def test_one_branch_externals(self): - """Test that a component source with a branch is correctly parsed. - """ - config = config_parser() - self._setup_comp2(config) - self._setup_externals_description(config) - model = ExternalsDescriptionConfigV1(config) - print(model) - self._check_comp2(model) - - def test_two_sources(self): - """Test that multiple component sources are correctly parsed. - """ - config = config_parser() - self._setup_comp1(config) - self._setup_comp2(config) - self._setup_externals_description(config) - model = ExternalsDescriptionConfigV1(config) - print(model) - self._check_comp1(model) - self._check_comp2(model) - - def test_cfg_v1_reject_unknown_item(self): - """Test that a v1 description object will reject unknown items - """ - config = config_parser() - self._setup_comp1(config) - self._setup_externals_description(config) - config.set(self._comp1_name, 'junk', 'foobar') - with self.assertRaises(RuntimeError): - ExternalsDescriptionConfigV1(config) - - def test_cfg_v1_reject_v2(self): - """Test that a v1 description object won't try to parse a v2 file. - """ - config = config_parser() - self._setup_comp1(config) - self._setup_externals_description(config) - config.set(DESCRIPTION_SECTION, VERSION_ITEM, '2.0.1') - with self.assertRaises(RuntimeError): - ExternalsDescriptionConfigV1(config) - - def test_cfg_v1_reject_v1_too_new(self): - """Test that a v1 description object won't try to parse a v2 file. - """ - config = config_parser() - self._setup_comp1(config) - self._setup_externals_description(config) - config.set(DESCRIPTION_SECTION, VERSION_ITEM, '1.100.0') - with self.assertRaises(RuntimeError): - ExternalsDescriptionConfigV1(config) - - -class TestReadExternalsDescription(unittest.TestCase): - """Test the application logic of read_externals_description_file - """ - TMP_FAKE_DIR = 'fake' - - def setUp(self): - """Setup directory for tests - """ - if not os.path.exists(self.TMP_FAKE_DIR): - os.makedirs(self.TMP_FAKE_DIR) - - def tearDown(self): - """Cleanup tmp stuff on the file system - """ - if os.path.exists(self.TMP_FAKE_DIR): - shutil.rmtree(self.TMP_FAKE_DIR) - - def test_no_file_error(self): - """Test that a runtime error is raised when the file does not exist - - """ - root_dir = os.getcwd() - filename = 'this-file-should-not-exist' - with self.assertRaises(RuntimeError): - read_externals_description_file(root_dir, filename) - - def test_no_dir_error(self): - """Test that a runtime error is raised when the file does not exist - - """ - root_dir = '/path/to/some/repo' - filename = 'externals.cfg' - with self.assertRaises(RuntimeError): - read_externals_description_file(root_dir, filename) - - def test_no_invalid_error(self): - """Test that a runtime error is raised when the file format is invalid - - """ - root_dir = os.getcwd() - filename = 'externals.cfg' - file_path = os.path.join(root_dir, filename) - file_path = os.path.abspath(file_path) - contents = """ - -invalid file format -""" - with open(file_path, 'w') as fhandle: - fhandle.write(contents) - with self.assertRaises(RuntimeError): - read_externals_description_file(root_dir, filename) - os.remove(file_path) - - -class TestCreateExternalsDescription(unittest.TestCase): - """Test the application logic of creat_externals_description - """ - - def setUp(self): - """Create config object used as basis for all tests - """ - self._config = config_parser() - self._gmconfig = config_parser() - self.setup_config() - - def setup_config(self): - """Boiler plate construction of xml string for componet 1 - """ - # Create a standard externals config with a single external - name = 'test' - self._config.add_section(name) - self._config.set(name, ExternalsDescription.PATH, 'externals') - self._config.set(name, ExternalsDescription.PROTOCOL, 'git') - self._config.set(name, ExternalsDescription.REPO_URL, '/path/to/repo') - self._config.set(name, ExternalsDescription.TAG, 'test_tag') - self._config.set(name, ExternalsDescription.REQUIRED, 'True') - - self._config.add_section(DESCRIPTION_SECTION) - self._config.set(DESCRIPTION_SECTION, VERSION_ITEM, '1.0.0') - - # Create a .gitmodules test - name = 'submodule "gitmodules_test"' - self._gmconfig.add_section(name) - self._gmconfig.set(name, "path", 'externals/test') - self._gmconfig.set(name, "url", '/path/to/repo') - # NOTE(goldy, 2019-03) Should test other possible keywords such as - # fetchRecurseSubmodules, ignore, and shallow - - @staticmethod - def setup_dict_config(): - """Create the full container dictionary with simple and mixed use - externals - - """ - rdatat = {ExternalsDescription.PROTOCOL: 'git', - ExternalsDescription.REPO_URL: 'simple-ext.git', - ExternalsDescription.TAG: 'tag1'} - rdatab = {ExternalsDescription.PROTOCOL: 'git', - ExternalsDescription.REPO_URL: 'simple-ext.git', - ExternalsDescription.BRANCH: 'feature2'} - rdatam = {ExternalsDescription.PROTOCOL: 'git', - ExternalsDescription.REPO_URL: 'mixed-cont-ext.git', - ExternalsDescription.BRANCH: 'master'} - desc = {'simp_tag': {ExternalsDescription.REQUIRED: True, - ExternalsDescription.PATH: 'simp_tag', - ExternalsDescription.EXTERNALS: EMPTY_STR, - ExternalsDescription.REPO: rdatat}, - 'simp_branch' : {ExternalsDescription.REQUIRED: True, - ExternalsDescription.PATH: 'simp_branch', - ExternalsDescription.EXTERNALS: EMPTY_STR, - ExternalsDescription.REPO: rdatab}, - 'simp_opt': {ExternalsDescription.REQUIRED: False, - ExternalsDescription.PATH: 'simp_opt', - ExternalsDescription.EXTERNALS: EMPTY_STR, - ExternalsDescription.REPO: rdatat}, - 'mixed_req': {ExternalsDescription.REQUIRED: True, - ExternalsDescription.PATH: 'mixed_req', - ExternalsDescription.EXTERNALS: 'sub-ext.cfg', - ExternalsDescription.REPO: rdatam}} - - return desc - - def test_cfg_v1_ok(self): - """Test that a correct cfg v1 object is created by create_externals_description - - """ - self._config.set(DESCRIPTION_SECTION, VERSION_ITEM, '1.0.3') - ext = create_externals_description(self._config, model_format='cfg') - self.assertIsInstance(ext, ExternalsDescriptionConfigV1) - - def test_cfg_v1_unknown_version(self): - """Test that a config file with unknown schema version is rejected by - create_externals_description. - - """ - self._config.set(DESCRIPTION_SECTION, VERSION_ITEM, '100.0.3') - with self.assertRaises(RuntimeError): - create_externals_description(self._config, model_format='cfg') - - def test_dict(self): - """Test that a correct cfg v1 object is created by create_externals_description - - """ - rdata = {ExternalsDescription.PROTOCOL: 'git', - ExternalsDescription.REPO_URL: '/path/to/repo', - ExternalsDescription.TAG: 'tagv1', - } - - desc = { - 'test': { - ExternalsDescription.REQUIRED: False, - ExternalsDescription.PATH: '../fake', - ExternalsDescription.EXTERNALS: EMPTY_STR, - ExternalsDescription.REPO: rdata, }, - } - - ext = create_externals_description(desc, model_format='dict') - self.assertIsInstance(ext, ExternalsDescriptionDict) - - def test_cfg_component_dict(self): - """Verify that create_externals_description works with a dictionary - """ - # create the top level externals file - desc = self.setup_dict_config() - # Check external with all repos - external = create_externals_description(desc, model_format='dict') - self.assertIsInstance(external, ExternalsDescriptionDict) - self.assertTrue('simp_tag' in external) - self.assertTrue('simp_branch' in external) - self.assertTrue('simp_opt' in external) - self.assertTrue('mixed_req' in external) - - def test_cfg_exclude_component_dict(self): - """Verify that exclude component checkout works with a dictionary - """ - # create the top level externals file - desc = self.setup_dict_config() - # Test an excluded repo - external = create_externals_description(desc, model_format='dict', - exclude=['simp_tag', - 'simp_opt']) - self.assertIsInstance(external, ExternalsDescriptionDict) - self.assertFalse('simp_tag' in external) - self.assertTrue('simp_branch' in external) - self.assertFalse('simp_opt' in external) - self.assertTrue('mixed_req' in external) - - def test_cfg_opt_component_dict(self): - """Verify that exclude component checkout works with a dictionary - """ - # create the top level externals file - desc = self.setup_dict_config() - # Test an excluded repo - external = create_externals_description(desc, model_format='dict', - components=['simp_tag', - 'simp_opt']) - self.assertIsInstance(external, ExternalsDescriptionDict) - self.assertTrue('simp_tag' in external) - self.assertFalse('simp_branch' in external) - self.assertTrue('simp_opt' in external) - self.assertFalse('mixed_req' in external) - - def test_cfg_unknown_version(self): - """Test that a runtime error is raised when an unknown file version is - received - - """ - self._config.set(DESCRIPTION_SECTION, VERSION_ITEM, '123.456.789') - with self.assertRaises(RuntimeError): - create_externals_description(self._config, model_format='cfg') - - def test_cfg_unknown_format(self): - """Test that a runtime error is raised when an unknown format string is - received - - """ - with self.assertRaises(RuntimeError): - create_externals_description(self._config, model_format='unknown') - - -if __name__ == '__main__': - unittest.main() diff --git a/manage_externals/test/test_unit_externals_status.py b/manage_externals/test/test_unit_externals_status.py deleted file mode 100644 index f019514e9e..0000000000 --- a/manage_externals/test/test_unit_externals_status.py +++ /dev/null @@ -1,299 +0,0 @@ -#!/usr/bin/env python3 - -"""Unit test driver for the manic external status reporting module. - -Note: this script assumes the path to the manic package is already in -the python path. - -""" - -from __future__ import absolute_import -from __future__ import unicode_literals -from __future__ import print_function - -import unittest - -from manic.externals_status import ExternalStatus - - -class TestStatusObject(unittest.TestCase): - """Verify that the Status object behaives as expected. - """ - - def test_exists_empty_all(self): - """If the repository sync-state is empty (doesn't exist), and there is no - clean state, then it is considered not to exist. - - """ - stat = ExternalStatus() - stat.sync_state = ExternalStatus.EMPTY - stat.clean_state = ExternalStatus.DEFAULT - exists = stat.exists() - self.assertFalse(exists) - - stat.clean_state = ExternalStatus.EMPTY - exists = stat.exists() - self.assertFalse(exists) - - stat.clean_state = ExternalStatus.UNKNOWN - exists = stat.exists() - self.assertFalse(exists) - - # this state represtens an internal logic error in how the - # repo status was determined. - stat.clean_state = ExternalStatus.STATUS_OK - exists = stat.exists() - self.assertTrue(exists) - - # this state represtens an internal logic error in how the - # repo status was determined. - stat.clean_state = ExternalStatus.DIRTY - exists = stat.exists() - self.assertTrue(exists) - - def test_exists_default_all(self): - """If the repository sync-state is default, then it is considered to exist - regardless of clean state. - - """ - stat = ExternalStatus() - stat.sync_state = ExternalStatus.DEFAULT - stat.clean_state = ExternalStatus.DEFAULT - exists = stat.exists() - self.assertTrue(exists) - - stat.clean_state = ExternalStatus.EMPTY - exists = stat.exists() - self.assertTrue(exists) - - stat.clean_state = ExternalStatus.UNKNOWN - exists = stat.exists() - self.assertTrue(exists) - - stat.clean_state = ExternalStatus.STATUS_OK - exists = stat.exists() - self.assertTrue(exists) - - stat.clean_state = ExternalStatus.DIRTY - exists = stat.exists() - self.assertTrue(exists) - - def test_exists_unknown_all(self): - """If the repository sync-state is unknown, then it is considered to exist - regardless of clean state. - - """ - stat = ExternalStatus() - stat.sync_state = ExternalStatus.UNKNOWN - stat.clean_state = ExternalStatus.DEFAULT - exists = stat.exists() - self.assertTrue(exists) - - stat.clean_state = ExternalStatus.EMPTY - exists = stat.exists() - self.assertTrue(exists) - - stat.clean_state = ExternalStatus.UNKNOWN - exists = stat.exists() - self.assertTrue(exists) - - stat.clean_state = ExternalStatus.STATUS_OK - exists = stat.exists() - self.assertTrue(exists) - - stat.clean_state = ExternalStatus.DIRTY - exists = stat.exists() - self.assertTrue(exists) - - def test_exists_modified_all(self): - """If the repository sync-state is modified, then it is considered to exist - regardless of clean state. - - """ - stat = ExternalStatus() - stat.sync_state = ExternalStatus.MODEL_MODIFIED - stat.clean_state = ExternalStatus.DEFAULT - exists = stat.exists() - self.assertTrue(exists) - - stat.clean_state = ExternalStatus.EMPTY - exists = stat.exists() - self.assertTrue(exists) - - stat.clean_state = ExternalStatus.UNKNOWN - exists = stat.exists() - self.assertTrue(exists) - - stat.clean_state = ExternalStatus.STATUS_OK - exists = stat.exists() - self.assertTrue(exists) - - stat.clean_state = ExternalStatus.DIRTY - exists = stat.exists() - self.assertTrue(exists) - - def test_exists_ok_all(self): - """If the repository sync-state is ok, then it is considered to exist - regardless of clean state. - - """ - stat = ExternalStatus() - stat.sync_state = ExternalStatus.STATUS_OK - stat.clean_state = ExternalStatus.DEFAULT - exists = stat.exists() - self.assertTrue(exists) - - stat.clean_state = ExternalStatus.EMPTY - exists = stat.exists() - self.assertTrue(exists) - - stat.clean_state = ExternalStatus.UNKNOWN - exists = stat.exists() - self.assertTrue(exists) - - stat.clean_state = ExternalStatus.STATUS_OK - exists = stat.exists() - self.assertTrue(exists) - - stat.clean_state = ExternalStatus.DIRTY - exists = stat.exists() - self.assertTrue(exists) - - def test_update_ok_all(self): - """If the repository in-sync is ok, then it is safe to - update only if clean state is ok - - """ - stat = ExternalStatus() - stat.sync_state = ExternalStatus.STATUS_OK - stat.clean_state = ExternalStatus.DEFAULT - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - stat.clean_state = ExternalStatus.EMPTY - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - stat.clean_state = ExternalStatus.UNKNOWN - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - stat.clean_state = ExternalStatus.STATUS_OK - safe_to_update = stat.safe_to_update() - self.assertTrue(safe_to_update) - - stat.clean_state = ExternalStatus.DIRTY - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - def test_update_modified_all(self): - """If the repository in-sync is modified, then it is safe to - update only if clean state is ok - - """ - stat = ExternalStatus() - stat.sync_state = ExternalStatus.MODEL_MODIFIED - stat.clean_state = ExternalStatus.DEFAULT - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - stat.clean_state = ExternalStatus.EMPTY - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - stat.clean_state = ExternalStatus.UNKNOWN - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - stat.clean_state = ExternalStatus.STATUS_OK - safe_to_update = stat.safe_to_update() - self.assertTrue(safe_to_update) - - stat.clean_state = ExternalStatus.DIRTY - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - def test_update_unknown_all(self): - """If the repository in-sync is unknown, then it is not safe to - update, regardless of the clean state. - - """ - stat = ExternalStatus() - stat.sync_state = ExternalStatus.UNKNOWN - stat.clean_state = ExternalStatus.DEFAULT - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - stat.clean_state = ExternalStatus.EMPTY - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - stat.clean_state = ExternalStatus.UNKNOWN - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - stat.clean_state = ExternalStatus.STATUS_OK - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - stat.clean_state = ExternalStatus.DIRTY - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - def test_update_default_all(self): - """If the repository in-sync is default, then it is not safe to - update, regardless of the clean state. - - """ - stat = ExternalStatus() - stat.sync_state = ExternalStatus.UNKNOWN - stat.clean_state = ExternalStatus.DEFAULT - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - stat.clean_state = ExternalStatus.EMPTY - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - stat.clean_state = ExternalStatus.UNKNOWN - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - stat.clean_state = ExternalStatus.STATUS_OK - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - stat.clean_state = ExternalStatus.DIRTY - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - def test_update_empty_all(self): - """If the repository in-sync is empty, then it is not safe to - update, regardless of the clean state. - - """ - stat = ExternalStatus() - stat.sync_state = ExternalStatus.UNKNOWN - stat.clean_state = ExternalStatus.DEFAULT - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - stat.clean_state = ExternalStatus.EMPTY - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - stat.clean_state = ExternalStatus.UNKNOWN - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - stat.clean_state = ExternalStatus.STATUS_OK - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - stat.clean_state = ExternalStatus.DIRTY - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - -if __name__ == '__main__': - unittest.main() diff --git a/manage_externals/test/test_unit_repository.py b/manage_externals/test/test_unit_repository.py deleted file mode 100644 index 1b93861834..0000000000 --- a/manage_externals/test/test_unit_repository.py +++ /dev/null @@ -1,208 +0,0 @@ -#!/usr/bin/env python3 - -"""Unit test driver for checkout_externals - -Note: this script assume the path to the checkout_externals.py module is -already in the python path. - -""" - -from __future__ import absolute_import -from __future__ import unicode_literals -from __future__ import print_function - -import unittest - -from manic.repository_factory import create_repository -from manic.repository_git import GitRepository -from manic.repository_svn import SvnRepository -from manic.repository import Repository -from manic.externals_description import ExternalsDescription -from manic.global_constants import EMPTY_STR - - -class TestCreateRepositoryDict(unittest.TestCase): - """Test the create_repository functionality to ensure it returns the - propper type of repository and errors for unknown repository - types. - - """ - - def setUp(self): - """Common data needed for all tests in this class - """ - self._name = 'test_name' - self._repo = {ExternalsDescription.PROTOCOL: None, - ExternalsDescription.REPO_URL: 'junk_root', - ExternalsDescription.TAG: 'junk_tag', - ExternalsDescription.BRANCH: EMPTY_STR, - ExternalsDescription.HASH: EMPTY_STR, - ExternalsDescription.SPARSE: EMPTY_STR, } - - def test_create_repo_git(self): - """Verify that several possible names for the 'git' protocol - create git repository objects. - - """ - protocols = ['git', 'GIT', 'Git', ] - for protocol in protocols: - self._repo[ExternalsDescription.PROTOCOL] = protocol - repo = create_repository(self._name, self._repo) - self.assertIsInstance(repo, GitRepository) - - def test_create_repo_svn(self): - """Verify that several possible names for the 'svn' protocol - create svn repository objects. - """ - protocols = ['svn', 'SVN', 'Svn', ] - for protocol in protocols: - self._repo[ExternalsDescription.PROTOCOL] = protocol - repo = create_repository(self._name, self._repo) - self.assertIsInstance(repo, SvnRepository) - - def test_create_repo_externals_only(self): - """Verify that an externals only repo returns None. - """ - protocols = ['externals_only', ] - for protocol in protocols: - self._repo[ExternalsDescription.PROTOCOL] = protocol - repo = create_repository(self._name, self._repo) - self.assertEqual(None, repo) - - def test_create_repo_unsupported(self): - """Verify that an unsupported protocol generates a runtime error. - """ - protocols = ['not_a_supported_protocol', ] - for protocol in protocols: - self._repo[ExternalsDescription.PROTOCOL] = protocol - with self.assertRaises(RuntimeError): - create_repository(self._name, self._repo) - - -class TestRepository(unittest.TestCase): - """Test the externals description processing used to create the Repository - base class shared by protocol specific repository classes. - - """ - - def test_tag(self): - """Test creation of a repository object with a tag - """ - name = 'test_repo' - protocol = 'test_protocol' - url = 'test_url' - tag = 'test_tag' - repo_info = {ExternalsDescription.PROTOCOL: protocol, - ExternalsDescription.REPO_URL: url, - ExternalsDescription.TAG: tag, - ExternalsDescription.BRANCH: EMPTY_STR, - ExternalsDescription.HASH: EMPTY_STR, - ExternalsDescription.SPARSE: EMPTY_STR, } - repo = Repository(name, repo_info) - print(repo.__dict__) - self.assertEqual(repo.tag(), tag) - self.assertEqual(repo.url(), url) - - def test_branch(self): - """Test creation of a repository object with a branch - """ - name = 'test_repo' - protocol = 'test_protocol' - url = 'test_url' - branch = 'test_branch' - repo_info = {ExternalsDescription.PROTOCOL: protocol, - ExternalsDescription.REPO_URL: url, - ExternalsDescription.BRANCH: branch, - ExternalsDescription.TAG: EMPTY_STR, - ExternalsDescription.HASH: EMPTY_STR, - ExternalsDescription.SPARSE: EMPTY_STR, } - repo = Repository(name, repo_info) - print(repo.__dict__) - self.assertEqual(repo.branch(), branch) - self.assertEqual(repo.url(), url) - - def test_hash(self): - """Test creation of a repository object with a hash - """ - name = 'test_repo' - protocol = 'test_protocol' - url = 'test_url' - ref = 'deadc0de' - sparse = EMPTY_STR - repo_info = {ExternalsDescription.PROTOCOL: protocol, - ExternalsDescription.REPO_URL: url, - ExternalsDescription.BRANCH: EMPTY_STR, - ExternalsDescription.TAG: EMPTY_STR, - ExternalsDescription.HASH: ref, - ExternalsDescription.SPARSE: sparse, } - repo = Repository(name, repo_info) - print(repo.__dict__) - self.assertEqual(repo.hash(), ref) - self.assertEqual(repo.url(), url) - - def test_tag_branch(self): - """Test creation of a repository object with a tag and branch raises a - runtimer error. - - """ - name = 'test_repo' - protocol = 'test_protocol' - url = 'test_url' - branch = 'test_branch' - tag = 'test_tag' - ref = EMPTY_STR - sparse = EMPTY_STR - repo_info = {ExternalsDescription.PROTOCOL: protocol, - ExternalsDescription.REPO_URL: url, - ExternalsDescription.BRANCH: branch, - ExternalsDescription.TAG: tag, - ExternalsDescription.HASH: ref, - ExternalsDescription.SPARSE: sparse, } - with self.assertRaises(RuntimeError): - Repository(name, repo_info) - - def test_tag_branch_hash(self): - """Test creation of a repository object with a tag, branch and hash raises a - runtimer error. - - """ - name = 'test_repo' - protocol = 'test_protocol' - url = 'test_url' - branch = 'test_branch' - tag = 'test_tag' - ref = 'deadc0de' - sparse = EMPTY_STR - repo_info = {ExternalsDescription.PROTOCOL: protocol, - ExternalsDescription.REPO_URL: url, - ExternalsDescription.BRANCH: branch, - ExternalsDescription.TAG: tag, - ExternalsDescription.HASH: ref, - ExternalsDescription.SPARSE: sparse, } - with self.assertRaises(RuntimeError): - Repository(name, repo_info) - - def test_no_tag_no_branch(self): - """Test creation of a repository object without a tag or branch raises a - runtimer error. - - """ - name = 'test_repo' - protocol = 'test_protocol' - url = 'test_url' - branch = EMPTY_STR - tag = EMPTY_STR - ref = EMPTY_STR - sparse = EMPTY_STR - repo_info = {ExternalsDescription.PROTOCOL: protocol, - ExternalsDescription.REPO_URL: url, - ExternalsDescription.BRANCH: branch, - ExternalsDescription.TAG: tag, - ExternalsDescription.HASH: ref, - ExternalsDescription.SPARSE: sparse, } - with self.assertRaises(RuntimeError): - Repository(name, repo_info) - - -if __name__ == '__main__': - unittest.main() diff --git a/manage_externals/test/test_unit_repository_git.py b/manage_externals/test/test_unit_repository_git.py deleted file mode 100644 index 1c01098acf..0000000000 --- a/manage_externals/test/test_unit_repository_git.py +++ /dev/null @@ -1,811 +0,0 @@ -#!/usr/bin/env python3 - -"""Unit test driver for checkout_externals - -Note: this script assume the path to the checkout_externals.py module is -already in the python path. - -""" -# pylint: disable=too-many-lines,protected-access - -from __future__ import absolute_import -from __future__ import unicode_literals -from __future__ import print_function - -import os -import shutil -import unittest - -from manic.repository_git import GitRepository -from manic.externals_status import ExternalStatus -from manic.externals_description import ExternalsDescription -from manic.externals_description import ExternalsDescriptionDict -from manic.global_constants import EMPTY_STR - -# NOTE(bja, 2017-11) order is important here. origin should be a -# subset of other to trap errors on processing remotes! -GIT_REMOTE_OUTPUT_ORIGIN_UPSTREAM = ''' -upstream /path/to/other/repo (fetch) -upstream /path/to/other/repo (push) -other /path/to/local/repo2 (fetch) -other /path/to/local/repo2 (push) -origin /path/to/local/repo (fetch) -origin /path/to/local/repo (push) -''' - - -class TestGitRepositoryCurrentRef(unittest.TestCase): - """test the current_ref command on a git repository - """ - - def setUp(self): - self._name = 'component' - rdata = {ExternalsDescription.PROTOCOL: 'git', - ExternalsDescription.REPO_URL: - '/path/to/local/repo', - ExternalsDescription.TAG: - 'tag1', - } - - data = {self._name: - { - ExternalsDescription.REQUIRED: False, - ExternalsDescription.PATH: 'junk', - ExternalsDescription.EXTERNALS: EMPTY_STR, - ExternalsDescription.REPO: rdata, - }, - } - - model = ExternalsDescriptionDict(data) - repo = model[self._name][ExternalsDescription.REPO] - self._repo = GitRepository('test', repo) - - # - # mock methods replacing git system calls - # - @staticmethod - def _git_current_branch(branch_found, branch_name): - """Return a function that takes the place of - repo._git_current_branch, which returns the given output.""" - def my_git_current_branch(dirname): - """mock function that can take the place of repo._git_current_branch""" - return branch_found, branch_name - return my_git_current_branch - - @staticmethod - def _git_current_tag(tag_found, tag_name): - """Return a function that takes the place of - repo._git_current_tag, which returns the given output.""" - def my_git_current_tag(dirname): - """mock function that can take the place of repo._git_current_tag""" - return tag_found, tag_name - return my_git_current_tag - - @staticmethod - def _git_current_hash(hash_found, hash_name): - """Return a function that takes the place of - repo._git_current_hash, which returns the given output.""" - def my_git_current_hash(dirname): - """mock function that can take the place of repo._git_current_hash""" - return hash_found, hash_name - return my_git_current_hash - - # ------------------------------------------------------------------------ - # Begin tests - # ------------------------------------------------------------------------ - - def test_ref_branch(self): - """Test that we correctly identify we are on a branch - """ - self._repo._git_current_branch = self._git_current_branch( - True, 'feature3') - self._repo._git_current_tag = self._git_current_tag(True, 'foo_tag') - self._repo._git_current_hash = self._git_current_hash(True, 'abc123') - expected = 'foo_tag (branch feature3)' - result = self._repo._current_ref(os.getcwd()) - self.assertEqual(result, expected) - - def test_ref_detached_tag(self): - """Test that we correctly identify that the ref is detached at a tag - """ - self._repo._git_current_branch = self._git_current_branch(False, '') - self._repo._git_current_tag = self._git_current_tag(True, 'foo_tag') - self._repo._git_current_hash = self._git_current_hash(True, 'abc123') - expected = 'foo_tag' - result = self._repo._current_ref(os.getcwd()) - self.assertEqual(result, expected) - - def test_ref_detached_hash(self): - """Test that we can identify ref is detached at a hash - - """ - self._repo._git_current_branch = self._git_current_branch(False, '') - self._repo._git_current_tag = self._git_current_tag(False, '') - self._repo._git_current_hash = self._git_current_hash(True, 'abc123') - expected = 'abc123' - result = self._repo._current_ref(os.getcwd()) - self.assertEqual(result, expected) - - def test_ref_none(self): - """Test that we correctly identify that we're not in a git repo. - """ - self._repo._git_current_branch = self._git_current_branch(False, '') - self._repo._git_current_tag = self._git_current_tag(False, '') - self._repo._git_current_hash = self._git_current_hash(False, '') - result = self._repo._current_ref(os.getcwd()) - self.assertEqual(result, EMPTY_STR) - - -class TestGitRepositoryCheckSync(unittest.TestCase): - """Test whether the GitRepository _check_sync_logic functionality is - correct. - - Note: there are a lot of combinations of state: - - - external description - tag, branch - - - working copy - - doesn't exist (not checked out) - - exists, no git info - incorrect protocol, e.g. svn, or tarball? - - exists, git info - - as expected: - - different from expected: - - detached tag, - - detached hash, - - detached branch (compare remote and branch), - - tracking branch (compare remote and branch), - - same remote - - different remote - - untracked branch - - Test list: - - doesn't exist - - exists no git info - - - num_external * (working copy expected + num_working copy different) - - total tests = 16 - - """ - - # NOTE(bja, 2017-11) pylint complains about long method names, but - # it is hard to differentiate tests without making them more - # cryptic. Also complains about too many public methods, but it - # doesn't really make sense to break this up. - # pylint: disable=invalid-name,too-many-public-methods - - TMP_FAKE_DIR = 'fake' - TMP_FAKE_GIT_DIR = os.path.join(TMP_FAKE_DIR, '.git') - - def setUp(self): - """Setup reusable git repository object - """ - self._name = 'component' - rdata = {ExternalsDescription.PROTOCOL: 'git', - ExternalsDescription.REPO_URL: - '/path/to/local/repo', - ExternalsDescription.TAG: 'tag1', - } - - data = {self._name: - { - ExternalsDescription.REQUIRED: False, - ExternalsDescription.PATH: self.TMP_FAKE_DIR, - ExternalsDescription.EXTERNALS: EMPTY_STR, - ExternalsDescription.REPO: rdata, - }, - } - - model = ExternalsDescriptionDict(data) - repo = model[self._name][ExternalsDescription.REPO] - self._repo = GitRepository('test', repo) - # The unit tests here don't care about the result of - # _current_ref, but we replace it here so that we don't need to - # worry about calling a possibly slow and possibly - # error-producing command (since _current_ref calls various git - # functions): - self._repo._current_ref = self._current_ref_empty - self._create_tmp_git_dir() - - # We have to override this class method rather than the self._repo - # instance method because it is called via - # GitRepository._remote_name_for_url, which is itself a @classmethod - # calls cls._git_remote_verbose(). - self._orignal_git_remote_verbose = GitRepository._git_remote_verbose - GitRepository._git_remote_verbose = self._git_remote_origin_upstream - def tearDown(self): - """Cleanup tmp stuff on the file system - """ - self._remove_tmp_git_dir() - - GitRepository._git_remote_verbose = self._orignal_git_remote_verbose - - def _create_tmp_git_dir(self): - """Create a temporary fake git directory for testing purposes. - """ - if not os.path.exists(self.TMP_FAKE_GIT_DIR): - os.makedirs(self.TMP_FAKE_GIT_DIR) - - def _remove_tmp_git_dir(self): - """Remove the temporary fake git directory - """ - if os.path.exists(self.TMP_FAKE_DIR): - shutil.rmtree(self.TMP_FAKE_DIR) - - # - # mock methods replacing git system calls - # - @staticmethod - def _current_ref_empty(dirname): - """Return an empty string. - - Drop-in for GitRepository._current_ref - """ - return EMPTY_STR - - @staticmethod - def _git_remote_origin_upstream(dirname): - """Return an info string that is a checkout hash. - - Drop-in for GitRepository._git_remote_verbose. - """ - return GIT_REMOTE_OUTPUT_ORIGIN_UPSTREAM - - @staticmethod - def _git_current_hash(myhash): - """Return a function that takes the place of repo._git_current_hash, - which returns the given hash - """ - def my_git_current_hash(dirname): - """mock function that can take the place of repo._git_current_hash""" - return 0, myhash - return my_git_current_hash - - def _git_revparse_commit(self, expected_ref, mystatus, myhash): - """Return a function that takes the place of - repo._git_revparse_commit, which returns a tuple: - (mystatus, myhash). - - Expects the passed-in ref to equal expected_ref - - status = 0 implies success, non-zero implies failure - """ - def my_git_revparse_commit(ref, dirname): - """mock function that can take the place of repo._git_revparse_commit""" - self.assertEqual(expected_ref, ref) - return mystatus, myhash - return my_git_revparse_commit - - # ---------------------------------------------------------------- - # - # Tests where working copy doesn't exist or is invalid - # - # ---------------------------------------------------------------- - def test_sync_dir_not_exist(self): - """Test that a directory that doesn't exist returns an error status - - Note: the Repository classes should be prevented from ever - working on an empty directory by the _Source object. - - """ - stat = ExternalStatus() - self._repo._check_sync(stat, 'invalid_directory_name') - self.assertEqual(stat.sync_state, ExternalStatus.STATUS_ERROR) - # check_dir should only modify the sync_state, not clean_state - self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) - - def test_sync_dir_exist_no_git_info(self): - """Test that a non-existent git repo returns an unknown status - """ - stat = ExternalStatus() - self._repo._tag = 'tag1' - self._repo._git_current_hash = self._git_current_hash('') - self._repo._git_revparse_commit = self._git_revparse_commit( - 'tag1', 1, '') - self._repo._check_sync(stat, self.TMP_FAKE_DIR) - self.assertEqual(stat.sync_state, ExternalStatus.UNKNOWN) - # check_sync should only modify the sync_state, not clean_state - self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) - - # ------------------------------------------------------------------------ - # - # Tests where version in configuration file is not a valid reference - # - # ------------------------------------------------------------------------ - - def test_sync_invalid_reference(self): - """Test that an invalid reference returns out-of-sync - """ - stat = ExternalStatus() - self._repo._tag = 'tag1' - self._repo._git_current_hash = self._git_current_hash('abc123') - self._repo._git_revparse_commit = self._git_revparse_commit( - 'tag1', 1, '') - self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR) - self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED) - # check_sync should only modify the sync_state, not clean_state - self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) - - # ---------------------------------------------------------------- - # - # Tests where external description specifies a tag - # - # ---------------------------------------------------------------- - def test_sync_tag_on_same_hash(self): - """Test expect tag on same hash --> status ok - - """ - stat = ExternalStatus() - self._repo._tag = 'tag1' - self._repo._git_current_hash = self._git_current_hash('abc123') - self._repo._git_revparse_commit = self._git_revparse_commit( - 'tag1', 0, 'abc123') - self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR) - self.assertEqual(stat.sync_state, ExternalStatus.STATUS_OK) - # check_sync should only modify the sync_state, not clean_state - self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) - - def test_sync_tag_on_different_hash(self): - """Test expect tag on a different hash --> status modified - - """ - stat = ExternalStatus() - self._repo._tag = 'tag1' - self._repo._git_current_hash = self._git_current_hash('def456') - self._repo._git_revparse_commit = self._git_revparse_commit( - 'tag1', 0, 'abc123') - self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR) - self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED) - # check_sync should only modify the sync_state, not clean_state - self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) - - # ---------------------------------------------------------------- - # - # Tests where external description specifies a hash - # - # ---------------------------------------------------------------- - def test_sync_hash_on_same_hash(self): - """Test expect hash on same hash --> status ok - - """ - stat = ExternalStatus() - self._repo._tag = '' - self._repo._hash = 'abc' - self._repo._git_current_hash = self._git_current_hash('abc123') - self._repo._git_revparse_commit = self._git_revparse_commit( - 'abc', 0, 'abc123') - self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR) - self.assertEqual(stat.sync_state, ExternalStatus.STATUS_OK) - # check_sync should only modify the sync_state, not clean_state - self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) - - def test_sync_hash_on_different_hash(self): - """Test expect hash on a different hash --> status modified - - """ - stat = ExternalStatus() - self._repo._tag = '' - self._repo._hash = 'abc' - self._repo._git_current_hash = self._git_current_hash('def456') - self._repo._git_revparse_commit = self._git_revparse_commit( - 'abc', 0, 'abc123') - self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR) - self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED) - # check_sync should only modify the sync_state, not clean_state - self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) - - # ---------------------------------------------------------------- - # - # Tests where external description specifies a branch - # - # ---------------------------------------------------------------- - def test_sync_branch_on_same_hash(self): - """Test expect branch on same hash --> status ok - - """ - stat = ExternalStatus() - self._repo._branch = 'feature-2' - self._repo._tag = '' - self._repo._git_current_hash = self._git_current_hash('abc123') - self._repo._git_revparse_commit = ( - self._git_revparse_commit('origin/feature-2', 0, 'abc123')) - self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR) - self.assertEqual(stat.sync_state, ExternalStatus.STATUS_OK) - # check_sync should only modify the sync_state, not clean_state - self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) - - def test_sync_branch_on_diff_hash(self): - """Test expect branch on diff hash --> status modified - - """ - stat = ExternalStatus() - self._repo._branch = 'feature-2' - self._repo._tag = '' - self._repo._git_current_hash = self._git_current_hash('abc123') - self._repo._git_revparse_commit = ( - self._git_revparse_commit('origin/feature-2', 0, 'def456')) - self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR) - self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED) - # check_sync should only modify the sync_state, not clean_state - self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) - - def test_sync_branch_diff_remote(self): - """Test _remote_name_for_url with a different remote - - """ - stat = ExternalStatus() - self._repo._branch = 'feature-2' - self._repo._tag = '' - self._repo._url = '/path/to/other/repo' - self._repo._git_current_hash = self._git_current_hash('abc123') - self._repo._git_revparse_commit = ( - self._git_revparse_commit('upstream/feature-2', 0, 'def456')) - self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR) - # The test passes if _git_revparse_commit is called with the - # expected argument - - def test_sync_branch_diff_remote2(self): - """Test _remote_name_for_url with a different remote - - """ - stat = ExternalStatus() - self._repo._branch = 'feature-2' - self._repo._tag = '' - self._repo._url = '/path/to/local/repo2' - self._repo._git_current_hash = self._git_current_hash('abc123') - self._repo._git_revparse_commit = ( - self._git_revparse_commit('other/feature-2', 0, 'def789')) - self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR) - # The test passes if _git_revparse_commit is called with the - # expected argument - - def test_sync_branch_on_unknown_remote(self): - """Test expect branch, but remote is unknown --> status modified - - """ - stat = ExternalStatus() - self._repo._branch = 'feature-2' - self._repo._tag = '' - self._repo._url = '/path/to/unknown/repo' - self._repo._git_current_hash = self._git_current_hash('abc123') - self._repo._git_revparse_commit = ( - self._git_revparse_commit('unknown_remote/feature-2', 1, '')) - self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR) - self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED) - # check_sync should only modify the sync_state, not clean_state - self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) - - def test_sync_branch_on_untracked_local(self): - """Test expect branch, on untracked branch in local repo --> status ok - - Setting the externals description to '.' indicates that the - user only wants to consider the current local repo state - without fetching from remotes. This is required to preserve - the current branch of a repository during an update. - - """ - stat = ExternalStatus() - self._repo._branch = 'feature3' - self._repo._tag = '' - self._repo._url = '.' - self._repo._git_current_hash = self._git_current_hash('abc123') - self._repo._git_revparse_commit = ( - self._git_revparse_commit('feature3', 0, 'abc123')) - self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR) - self.assertEqual(stat.sync_state, ExternalStatus.STATUS_OK) - # check_sync should only modify the sync_state, not clean_state - self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) - - -class TestGitStatusPorcelain(unittest.TestCase): - """Test parsing of output from git status --porcelain=v1 -z - """ - # pylint: disable=C0103 - GIT_STATUS_PORCELAIN_V1_ALL = ( - r' D INSTALL\0MM Makefile\0M README.md\0R cmakelists.txt\0' - r'CMakeLists.txt\0D commit-message-template.txt\0A stuff.txt\0' - r'?? junk.txt') - - GIT_STATUS_PORCELAIN_CLEAN = r'' - - def test_porcelain_status_dirty(self): - """Verify that git status output is considered dirty when there are - listed files. - - """ - git_output = self.GIT_STATUS_PORCELAIN_V1_ALL - is_dirty = GitRepository._status_v1z_is_dirty(git_output) - self.assertTrue(is_dirty) - - def test_porcelain_status_clean(self): - """Verify that git status output is considered clean when there are no - listed files. - - """ - git_output = self.GIT_STATUS_PORCELAIN_CLEAN - is_dirty = GitRepository._status_v1z_is_dirty(git_output) - self.assertFalse(is_dirty) - - -class TestGitCreateRemoteName(unittest.TestCase): - """Test the create_remote_name method on the GitRepository class - """ - - def setUp(self): - """Common infrastructure for testing _create_remote_name - """ - self._rdata = {ExternalsDescription.PROTOCOL: 'git', - ExternalsDescription.REPO_URL: - 'empty', - ExternalsDescription.TAG: - 'very_useful_tag', - ExternalsDescription.BRANCH: EMPTY_STR, - ExternalsDescription.HASH: EMPTY_STR, - ExternalsDescription.SPARSE: EMPTY_STR, } - self._repo = GitRepository('test', self._rdata) - - def test_remote_git_proto(self): - """Test remote with git protocol - """ - self._repo._url = 'git@git.github.com:very_nice_org/useful_repo' - remote_name = self._repo._create_remote_name() - self.assertEqual(remote_name, 'very_nice_org_useful_repo') - - def test_remote_https_proto(self): - """Test remote with git protocol - """ - self._repo._url = 'https://www.github.com/very_nice_org/useful_repo' - remote_name = self._repo._create_remote_name() - self.assertEqual(remote_name, 'very_nice_org_useful_repo') - - def test_remote_local_abs(self): - """Test remote with git protocol - """ - self._repo._url = '/path/to/local/repositories/useful_repo' - remote_name = self._repo._create_remote_name() - self.assertEqual(remote_name, 'repositories_useful_repo') - - def test_remote_local_rel(self): - """Test remote with git protocol - """ - os.environ['TEST_VAR'] = '/my/path/to/repos' - self._repo._url = '${TEST_VAR}/../../useful_repo' - remote_name = self._repo._create_remote_name() - self.assertEqual(remote_name, 'path_useful_repo') - del os.environ['TEST_VAR'] - - -class TestVerifyTag(unittest.TestCase): - """Test logic verifying that a tag exists and is unique - - """ - - def setUp(self): - """Setup reusable git repository object - """ - self._name = 'component' - rdata = {ExternalsDescription.PROTOCOL: 'git', - ExternalsDescription.REPO_URL: - '/path/to/local/repo', - ExternalsDescription.TAG: 'tag1', - } - - data = {self._name: - { - ExternalsDescription.REQUIRED: False, - ExternalsDescription.PATH: 'tmp', - ExternalsDescription.EXTERNALS: EMPTY_STR, - ExternalsDescription.REPO: rdata, - }, - } - - model = ExternalsDescriptionDict(data) - repo = model[self._name][ExternalsDescription.REPO] - self._repo = GitRepository('test', repo) - - @staticmethod - def _shell_true(*args, **kwargs): - return 0 - - @staticmethod - def _shell_false(*args, **kwargs): - return 1 - - @staticmethod - def _mock_revparse_commit(ref, dirname): - _ = ref - return (TestValidRef._shell_true, '97ebc0e0deadc0de') - - @staticmethod - def _mock_revparse_commit_false(ref, dirname): - _ = ref - return (TestValidRef._shell_false, '97ebc0e0deadc0de') - - def test_tag_not_tag_branch_commit(self): - """Verify a non-tag returns false - """ - self._repo._git_showref_tag = self._shell_false - self._repo._git_showref_branch = self._shell_false - self._repo._git_lsremote_branch = self._shell_false - self._repo._git_revparse_commit = self._mock_revparse_commit_false - self._repo._tag = 'something' - remote_name = 'origin' - received, _ = self._repo._is_unique_tag(self._repo._tag, remote_name, - os.getcwd()) - self.assertFalse(received) - - def test_tag_not_tag(self): - """Verify a non-tag, untracked remote returns false - """ - self._repo._git_showref_tag = self._shell_false - self._repo._git_showref_branch = self._shell_true - self._repo._git_lsremote_branch = self._shell_true - self._repo._git_revparse_commit = self._mock_revparse_commit_false - self._repo._tag = 'tag1' - remote_name = 'origin' - received, _ = self._repo._is_unique_tag(self._repo._tag, remote_name, - os.getcwd()) - self.assertFalse(received) - - def test_tag_indeterminant(self): - """Verify an indeterminant tag/branch returns false - """ - self._repo._git_showref_tag = self._shell_true - self._repo._git_showref_branch = self._shell_true - self._repo._git_lsremote_branch = self._shell_true - self._repo._git_revparse_commit = self._mock_revparse_commit - self._repo._tag = 'something' - remote_name = 'origin' - received, _ = self._repo._is_unique_tag(self._repo._tag, remote_name, - os.getcwd()) - self.assertFalse(received) - - def test_tag_is_unique(self): - """Verify a unique tag match returns true - """ - self._repo._git_showref_tag = self._shell_true - self._repo._git_showref_branch = self._shell_false - self._repo._git_lsremote_branch = self._shell_false - self._repo._git_revparse_commit = self._mock_revparse_commit - self._repo._tag = 'tag1' - remote_name = 'origin' - received, _ = self._repo._is_unique_tag(self._repo._tag, remote_name, - os.getcwd()) - self.assertTrue(received) - - def test_tag_is_not_hash(self): - """Verify a commit hash is not classified as a tag - """ - self._repo._git_showref_tag = self._shell_false - self._repo._git_showref_branch = self._shell_false - self._repo._git_lsremote_branch = self._shell_false - self._repo._git_revparse_commit = self._mock_revparse_commit - self._repo._tag = '97ebc0e0' - remote_name = 'origin' - received, _ = self._repo._is_unique_tag(self._repo._tag, remote_name, - os.getcwd()) - self.assertFalse(received) - - def test_hash_is_commit(self): - """Verify a commit hash is not classified as a tag - """ - self._repo._git_showref_tag = self._shell_false - self._repo._git_showref_branch = self._shell_false - self._repo._git_lsremote_branch = self._shell_false - self._repo._git_revparse_commit = self._mock_revparse_commit - self._repo._tag = '97ebc0e0' - remote_name = 'origin' - received, _ = self._repo._is_unique_tag(self._repo._tag, remote_name, - os.getcwd()) - self.assertFalse(received) - - -class TestValidRef(unittest.TestCase): - """Test logic verifying that a reference is a valid tag, branch or sha1 - - """ - - def setUp(self): - """Setup reusable git repository object - """ - self._name = 'component' - rdata = {ExternalsDescription.PROTOCOL: 'git', - ExternalsDescription.REPO_URL: - '/path/to/local/repo', - ExternalsDescription.TAG: 'tag1', - } - - data = {self._name: - { - ExternalsDescription.REQUIRED: False, - ExternalsDescription.PATH: 'tmp', - ExternalsDescription.EXTERNALS: EMPTY_STR, - ExternalsDescription.REPO: rdata, - }, - } - - model = ExternalsDescriptionDict(data) - repo = model[self._name][ExternalsDescription.REPO] - self._repo = GitRepository('test', repo) - - @staticmethod - def _shell_true(url, remote=None): - _ = url - _ = remote - return 0 - - @staticmethod - def _shell_false(url, remote=None): - _ = url - _ = remote - return 1 - - @staticmethod - def _mock_revparse_commit_false(ref, dirname): - _ = ref - return (TestValidRef._shell_false, '') - - @staticmethod - def _mock_revparse_commit_true(ref, dirname): - _ = ref - _ = dirname - return (TestValidRef._shell_true, '') - - def test_valid_ref_is_invalid(self): - """Verify an invalid reference raises an exception - """ - self._repo._git_showref_tag = self._shell_false - self._repo._git_showref_branch = self._shell_false - self._repo._git_lsremote_branch = self._shell_false - self._repo._git_revparse_commit = self._mock_revparse_commit_false - self._repo._tag = 'invalid_ref' - with self.assertRaises(RuntimeError): - self._repo._check_for_valid_ref(self._repo._tag, - remote_name=None, - dirname=os.getcwd()) - - def test_valid_tag(self): - """Verify a valid tag return true - """ - self._repo._git_showref_tag = self._shell_true - self._repo._git_showref_branch = self._shell_false - self._repo._git_lsremote_branch = self._shell_false - self._repo._git_revparse_commit = self._mock_revparse_commit_true - self._repo._tag = 'tag1' - received = self._repo._check_for_valid_ref(self._repo._tag, - remote_name=None, - dirname=os.getcwd()) - self.assertTrue(received) - - def test_valid_branch(self): - """Verify a valid tag return true - """ - self._repo._git_showref_tag = self._shell_false - self._repo._git_showref_branch = self._shell_true - self._repo._git_lsremote_branch = self._shell_false - self._repo._git_revparse_commit = self._mock_revparse_commit_true - self._repo._tag = 'tag1' - received = self._repo._check_for_valid_ref(self._repo._tag, - remote_name=None, - dirname=os.getcwd()) - self.assertTrue(received) - - def test_valid_hash(self): - """Verify a valid hash return true - """ - def _mock_revparse_commit_true(ref, dirname): - _ = ref - return (0, '56cc0b539426eb26810af9e') - - self._repo._git_showref_tag = self._shell_false - self._repo._git_showref_branch = self._shell_false - self._repo._git_lsremote_branch = self._shell_false - self._repo._git_revparse_commit = _mock_revparse_commit_true - self._repo._hash = '56cc0b5394' - received = self._repo._check_for_valid_ref(self._repo._hash, - remote_name=None, - dirname=os.getcwd()) - self.assertTrue(received) - - -if __name__ == '__main__': - unittest.main() diff --git a/manage_externals/test/test_unit_repository_svn.py b/manage_externals/test/test_unit_repository_svn.py deleted file mode 100755 index d9309df7f6..0000000000 --- a/manage_externals/test/test_unit_repository_svn.py +++ /dev/null @@ -1,501 +0,0 @@ -#!/usr/bin/env python3 - -"""Unit test driver for checkout_externals - -Note: this script assume the path to the checkout_externals.py module is -already in the python path. - -""" - -from __future__ import absolute_import -from __future__ import unicode_literals -from __future__ import print_function - -import unittest - -from manic.repository_svn import SvnRepository -from manic.externals_status import ExternalStatus -from manic.externals_description import ExternalsDescription -from manic.externals_description import ExternalsDescriptionDict -from manic.global_constants import EMPTY_STR - -# pylint: disable=W0212 - -SVN_INFO_MOSART = """Path: components/mosart -Working Copy Root Path: /Users/andreb/projects/ncar/git-conversion/clm-dev-experimental/components/mosart -URL: https://svn-ccsm-models.cgd.ucar.edu/mosart/trunk_tags/mosart1_0_26 -Relative URL: ^/mosart/trunk_tags/mosart1_0_26 -Repository Root: https://svn-ccsm-models.cgd.ucar.edu -Repository UUID: fe37f545-8307-0410-aea5-b40df96820b5 -Revision: 86711 -Node Kind: directory -Schedule: normal -Last Changed Author: erik -Last Changed Rev: 86031 -Last Changed Date: 2017-07-07 12:28:10 -0600 (Fri, 07 Jul 2017) -""" -SVN_INFO_CISM = """ -Path: components/cism -Working Copy Root Path: /Users/andreb/projects/ncar/git-conversion/clm-dev-experimental/components/cism -URL: https://svn-ccsm-models.cgd.ucar.edu/glc/trunk_tags/cism2_1_37 -Relative URL: ^/glc/trunk_tags/cism2_1_37 -Repository Root: https://svn-ccsm-models.cgd.ucar.edu -Repository UUID: fe37f545-8307-0410-aea5-b40df96820b5 -Revision: 86711 -Node Kind: directory -Schedule: normal -Last Changed Author: sacks -Last Changed Rev: 85704 -Last Changed Date: 2017-06-15 05:59:28 -0600 (Thu, 15 Jun 2017) -""" - - -class TestSvnRepositoryCheckURL(unittest.TestCase): - """Verify that the svn_check_url function is working as expected. - """ - - def setUp(self): - """Setup reusable svn repository object - """ - self._name = 'component' - rdata = {ExternalsDescription.PROTOCOL: 'svn', - ExternalsDescription.REPO_URL: - 'https://svn-ccsm-models.cgd.ucar.edu', - ExternalsDescription.TAG: - 'mosart/trunk_tags/mosart1_0_26', - } - - data = {self._name: - { - ExternalsDescription.REQUIRED: False, - ExternalsDescription.PATH: 'junk', - ExternalsDescription.EXTERNALS: '', - ExternalsDescription.REPO: rdata, - }, - } - - model = ExternalsDescriptionDict(data) - repo = model[self._name][ExternalsDescription.REPO] - self._repo = SvnRepository('test', repo) - - def test_check_url_same(self): - """Test that we correctly identify that the correct URL. - """ - svn_output = SVN_INFO_MOSART - expected_url = self._repo.url() - result, current_version = \ - self._repo._check_url(svn_output, expected_url) - self.assertEqual(result, ExternalStatus.STATUS_OK) - self.assertEqual(current_version, 'mosart/trunk_tags/mosart1_0_26') - - def test_check_url_different(self): - """Test that we correctly reject an incorrect URL. - """ - svn_output = SVN_INFO_CISM - expected_url = self._repo.url() - result, current_version = \ - self._repo._check_url(svn_output, expected_url) - self.assertEqual(result, ExternalStatus.MODEL_MODIFIED) - self.assertEqual(current_version, 'glc/trunk_tags/cism2_1_37') - - def test_check_url_none(self): - """Test that we can handle an empty string for output, e.g. not an svn - repo. - - """ - svn_output = EMPTY_STR - expected_url = self._repo.url() - result, current_version = \ - self._repo._check_url(svn_output, expected_url) - self.assertEqual(result, ExternalStatus.UNKNOWN) - self.assertEqual(current_version, '') - - -class TestSvnRepositoryCheckSync(unittest.TestCase): - """Test whether the SvnRepository svn_check_sync functionality is - correct. - - """ - - def setUp(self): - """Setup reusable svn repository object - """ - self._name = "component" - rdata = {ExternalsDescription.PROTOCOL: 'svn', - ExternalsDescription.REPO_URL: - 'https://svn-ccsm-models.cgd.ucar.edu/', - ExternalsDescription.TAG: - 'mosart/trunk_tags/mosart1_0_26', - } - - data = {self._name: - { - ExternalsDescription.REQUIRED: False, - ExternalsDescription.PATH: 'junk', - ExternalsDescription.EXTERNALS: EMPTY_STR, - ExternalsDescription.REPO: rdata, - }, - } - - model = ExternalsDescriptionDict(data) - repo = model[self._name][ExternalsDescription.REPO] - self._repo = SvnRepository('test', repo) - - @staticmethod - def _svn_info_empty(*_): - """Return an empty info string. Simulates svn info failing. - """ - return '' - - @staticmethod - def _svn_info_synced(*_): - """Return an info sting that is synced with the setUp data - """ - return SVN_INFO_MOSART - - @staticmethod - def _svn_info_modified(*_): - """Return and info string that is modified from the setUp data - """ - return SVN_INFO_CISM - - def test_repo_dir_not_exist(self): - """Test that a directory that doesn't exist returns an error status - - Note: the Repository classes should be prevented from ever - working on an empty directory by the _Source object. - - """ - stat = ExternalStatus() - self._repo._check_sync(stat, 'junk') - self.assertEqual(stat.sync_state, ExternalStatus.STATUS_ERROR) - # check_dir should only modify the sync_state, not clean_state - self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) - - def test_repo_dir_exist_no_svn_info(self): - """Test that an empty info string returns an unknown status - """ - stat = ExternalStatus() - # Now we over-ride the _svn_info method on the repo to return - # a known value without requiring access to svn. - self._repo._svn_info = self._svn_info_empty - self._repo._check_sync(stat, '.') - self.assertEqual(stat.sync_state, ExternalStatus.UNKNOWN) - # check_dir should only modify the sync_state, not clean_state - self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) - - def test_repo_dir_synced(self): - """Test that a valid info string that is synced to the repo in the - externals description returns an ok status. - - """ - stat = ExternalStatus() - # Now we over-ride the _svn_info method on the repo to return - # a known value without requiring access to svn. - self._repo._svn_info = self._svn_info_synced - self._repo._check_sync(stat, '.') - self.assertEqual(stat.sync_state, ExternalStatus.STATUS_OK) - # check_dir should only modify the sync_state, not clean_state - self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) - - def test_repo_dir_modified(self): - """Test that a valid svn info string that is out of sync with the - externals description returns a modified status. - - """ - stat = ExternalStatus() - # Now we over-ride the _svn_info method on the repo to return - # a known value without requiring access to svn. - self._repo._svn_info = self._svn_info_modified - self._repo._check_sync(stat, '.') - self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED) - # check_dir should only modify the sync_state, not clean_state - self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) - - -class TestSVNStatusXML(unittest.TestCase): - """Test parsing of svn status xml output - """ - SVN_STATUS_XML_DIRTY_ALL = ''' - - - - - -sacks -2017-06-15T11:59:00.355419Z - - - - - - -sacks -2013-02-07T16:17:56.412878Z - - - - - - -sacks -2017-05-01T16:48:27.893741Z - - - - - - - - - - - - - - - - -''' - - SVN_STATUS_XML_DIRTY_MISSING = ''' - - - - - -sacks -2017-06-15T11:59:00.355419Z - - - - - - - - -''' - - SVN_STATUS_XML_DIRTY_MODIFIED = ''' - - - - - -sacks -2013-02-07T16:17:56.412878Z - - - - - - - - -''' - - SVN_STATUS_XML_DIRTY_DELETED = ''' - - - - - -sacks -2017-05-01T16:48:27.893741Z - - - - - - - - -''' - - SVN_STATUS_XML_DIRTY_UNVERSION = ''' - - - - - - - - - - - -''' - - SVN_STATUS_XML_DIRTY_ADDED = ''' - - - - - - - - - - - -''' - - SVN_STATUS_XML_CLEAN = ''' - - - - - - - - - - - -''' - - def test_xml_status_dirty_missing(self): - """Verify that svn status output is consindered dirty when there is a - missing file. - - """ - svn_output = self.SVN_STATUS_XML_DIRTY_MISSING - is_dirty = SvnRepository.xml_status_is_dirty( - svn_output) - self.assertTrue(is_dirty) - - def test_xml_status_dirty_modified(self): - """Verify that svn status output is consindered dirty when there is a - modified file. - """ - svn_output = self.SVN_STATUS_XML_DIRTY_MODIFIED - is_dirty = SvnRepository.xml_status_is_dirty( - svn_output) - self.assertTrue(is_dirty) - - def test_xml_status_dirty_deleted(self): - """Verify that svn status output is consindered dirty when there is a - deleted file. - """ - svn_output = self.SVN_STATUS_XML_DIRTY_DELETED - is_dirty = SvnRepository.xml_status_is_dirty( - svn_output) - self.assertTrue(is_dirty) - - def test_xml_status_dirty_unversion(self): - """Verify that svn status output ignores unversioned files when making - the clean/dirty decision. - - """ - svn_output = self.SVN_STATUS_XML_DIRTY_UNVERSION - is_dirty = SvnRepository.xml_status_is_dirty( - svn_output) - self.assertFalse(is_dirty) - - def test_xml_status_dirty_added(self): - """Verify that svn status output is consindered dirty when there is a - added file. - """ - svn_output = self.SVN_STATUS_XML_DIRTY_ADDED - is_dirty = SvnRepository.xml_status_is_dirty( - svn_output) - self.assertTrue(is_dirty) - - def test_xml_status_dirty_all(self): - """Verify that svn status output is consindered dirty when there are - multiple dirty files.. - - """ - svn_output = self.SVN_STATUS_XML_DIRTY_ALL - is_dirty = SvnRepository.xml_status_is_dirty( - svn_output) - self.assertTrue(is_dirty) - - def test_xml_status_dirty_clean(self): - """Verify that svn status output is consindered clean when there are - no 'dirty' files. This means accepting untracked and externals. - - """ - svn_output = self.SVN_STATUS_XML_CLEAN - is_dirty = SvnRepository.xml_status_is_dirty( - svn_output) - self.assertFalse(is_dirty) - - -if __name__ == '__main__': - unittest.main() diff --git a/manage_externals/test/test_unit_utils.py b/manage_externals/test/test_unit_utils.py deleted file mode 100644 index 80e1636649..0000000000 --- a/manage_externals/test/test_unit_utils.py +++ /dev/null @@ -1,350 +0,0 @@ -#!/usr/bin/env python3 - -"""Unit test driver for checkout_externals - -Note: this script assume the path to the checkout_externals.py module is -already in the python path. - -""" - -from __future__ import absolute_import -from __future__ import unicode_literals -from __future__ import print_function - -import os -import unittest - -from manic.utils import last_n_lines, indent_string -from manic.utils import str_to_bool, execute_subprocess -from manic.utils import is_remote_url, split_remote_url, expand_local_url - - -class TestExecuteSubprocess(unittest.TestCase): - """Test the application logic of execute_subprocess wrapper - """ - - def test_exesub_return_stat_err(self): - """Test that execute_subprocess returns a status code when caller - requests and the executed subprocess fails. - - """ - cmd = ['false'] - status = execute_subprocess(cmd, status_to_caller=True) - self.assertEqual(status, 1) - - def test_exesub_return_stat_ok(self): - """Test that execute_subprocess returns a status code when caller - requests and the executed subprocess succeeds. - - """ - cmd = ['true'] - status = execute_subprocess(cmd, status_to_caller=True) - self.assertEqual(status, 0) - - def test_exesub_except_stat_err(self): - """Test that execute_subprocess raises an exception on error when - caller doesn't request return code - - """ - cmd = ['false'] - with self.assertRaises(RuntimeError): - execute_subprocess(cmd, status_to_caller=False) - - -class TestLastNLines(unittest.TestCase): - """Test the last_n_lines function. - - """ - - def test_last_n_lines_short(self): - """With a message with <= n lines, result of last_n_lines should - just be the original message. - - """ - mystr = """three -line -string -""" - - mystr_truncated = last_n_lines( - mystr, 3, truncation_message='[truncated]') - self.assertEqual(mystr, mystr_truncated) - - def test_last_n_lines_long(self): - """With a message with > n lines, result of last_n_lines should - be a truncated string. - - """ - mystr = """a -big -five -line -string -""" - expected = """[truncated] -five -line -string -""" - - mystr_truncated = last_n_lines( - mystr, 3, truncation_message='[truncated]') - self.assertEqual(expected, mystr_truncated) - - -class TestIndentStr(unittest.TestCase): - """Test the indent_string function. - - """ - - def test_indent_string_singleline(self): - """Test the indent_string function with a single-line string - - """ - mystr = 'foo' - result = indent_string(mystr, 4) - expected = ' foo' - self.assertEqual(expected, result) - - def test_indent_string_multiline(self): - """Test the indent_string function with a multi-line string - - """ - mystr = """hello -hi -goodbye -""" - result = indent_string(mystr, 2) - expected = """ hello - hi - goodbye -""" - self.assertEqual(expected, result) - - -class TestStrToBool(unittest.TestCase): - """Test the string to boolean conversion routine. - - """ - - def test_case_insensitive_true(self): - """Verify that case insensitive variants of 'true' returns the True - boolean. - - """ - values = ['true', 'TRUE', 'True', 'tRuE', 't', 'T', ] - for value in values: - received = str_to_bool(value) - self.assertTrue(received) - - def test_case_insensitive_false(self): - """Verify that case insensitive variants of 'false' returns the False - boolean. - - """ - values = ['false', 'FALSE', 'False', 'fAlSe', 'f', 'F', ] - for value in values: - received = str_to_bool(value) - self.assertFalse(received) - - def test_invalid_str_error(self): - """Verify that a non-true/false string generates a runtime error. - """ - values = ['not_true_or_false', 'A', '1', '0', - 'false_is_not_true', 'true_is_not_false'] - for value in values: - with self.assertRaises(RuntimeError): - str_to_bool(value) - - -class TestIsRemoteURL(unittest.TestCase): - """Crude url checking to determine if a url is local or remote. - - """ - - def test_url_remote_git(self): - """verify that a remote git url is identified. - """ - url = 'git@somewhere' - is_remote = is_remote_url(url) - self.assertTrue(is_remote) - - def test_url_remote_ssh(self): - """verify that a remote ssh url is identified. - """ - url = 'ssh://user@somewhere' - is_remote = is_remote_url(url) - self.assertTrue(is_remote) - - def test_url_remote_http(self): - """verify that a remote http url is identified. - """ - url = 'http://somewhere' - is_remote = is_remote_url(url) - self.assertTrue(is_remote) - - def test_url_remote_https(self): - """verify that a remote https url is identified. - """ - url = 'https://somewhere' - is_remote = is_remote_url(url) - self.assertTrue(is_remote) - - def test_url_local_user(self): - """verify that a local path with '~/path/to/repo' gets rejected - - """ - url = '~/path/to/repo' - is_remote = is_remote_url(url) - self.assertFalse(is_remote) - - def test_url_local_var_curly(self): - """verify that a local path with env var '${HOME}' gets rejected - """ - url = '${HOME}/path/to/repo' - is_remote = is_remote_url(url) - self.assertFalse(is_remote) - - def test_url_local_var(self): - """verify that a local path with an env var '$HOME' gets rejected - """ - url = '$HOME/path/to/repo' - is_remote = is_remote_url(url) - self.assertFalse(is_remote) - - def test_url_local_abs(self): - """verify that a local abs path gets rejected - """ - url = '/path/to/repo' - is_remote = is_remote_url(url) - self.assertFalse(is_remote) - - def test_url_local_rel(self): - """verify that a local relative path gets rejected - """ - url = '../../path/to/repo' - is_remote = is_remote_url(url) - self.assertFalse(is_remote) - - -class TestSplitRemoteURL(unittest.TestCase): - """Crude url checking to determine if a url is local or remote. - - """ - - def test_url_remote_git(self): - """verify that a remote git url is identified. - """ - url = 'git@somewhere.com:org/repo' - received = split_remote_url(url) - self.assertEqual(received, "org/repo") - - def test_url_remote_ssh(self): - """verify that a remote ssh url is identified. - """ - url = 'ssh://user@somewhere.com/path/to/repo' - received = split_remote_url(url) - self.assertEqual(received, 'somewhere.com/path/to/repo') - - def test_url_remote_http(self): - """verify that a remote http url is identified. - """ - url = 'http://somewhere.org/path/to/repo' - received = split_remote_url(url) - self.assertEqual(received, 'somewhere.org/path/to/repo') - - def test_url_remote_https(self): - """verify that a remote http url is identified. - """ - url = 'http://somewhere.gov/path/to/repo' - received = split_remote_url(url) - self.assertEqual(received, 'somewhere.gov/path/to/repo') - - def test_url_local_url_unchanged(self): - """verify that a local path is unchanged - - """ - url = '/path/to/repo' - received = split_remote_url(url) - self.assertEqual(received, url) - - -class TestExpandLocalURL(unittest.TestCase): - """Crude url checking to determine if a url is local or remote. - - Remote should be unmodified. - - Local, should perform user and variable expansion. - - """ - - def test_url_local_user1(self): - """verify that a local path with '~/path/to/repo' gets expanded to an - absolute path. - - NOTE(bja, 2017-11) we can't test for something like: - '~user/path/to/repo' because the user has to be in the local - machine password directory and we don't know a user name that - is valid on every system....? - - """ - field = 'test' - url = '~/path/to/repo' - received = expand_local_url(url, field) - print(received) - self.assertTrue(os.path.isabs(received)) - - def test_url_local_expand_curly(self): - """verify that a local path with '${HOME}' gets expanded to an absolute path. - """ - field = 'test' - url = '${HOME}/path/to/repo' - received = expand_local_url(url, field) - self.assertTrue(os.path.isabs(received)) - - def test_url_local_expand_var(self): - """verify that a local path with '$HOME' gets expanded to an absolute path. - """ - field = 'test' - url = '$HOME/path/to/repo' - received = expand_local_url(url, field) - self.assertTrue(os.path.isabs(received)) - - def test_url_local_env_missing(self): - """verify that a local path with env var that is missing gets left as-is - - """ - field = 'test' - url = '$TMP_VAR/path/to/repo' - received = expand_local_url(url, field) - print(received) - self.assertEqual(received, url) - - def test_url_local_expand_env(self): - """verify that a local path with another env var gets expanded to an - absolute path. - - """ - field = 'test' - os.environ['TMP_VAR'] = '/some/absolute' - url = '$TMP_VAR/path/to/repo' - received = expand_local_url(url, field) - del os.environ['TMP_VAR'] - print(received) - self.assertTrue(os.path.isabs(received)) - self.assertEqual(received, '/some/absolute/path/to/repo') - - def test_url_local_normalize_rel(self): - """verify that a local path with another env var gets expanded to an - absolute path. - - """ - field = 'test' - url = '/this/is/a/long/../path/to/a/repo' - received = expand_local_url(url, field) - print(received) - self.assertEqual(received, '/this/is/a/path/to/a/repo') - - -if __name__ == '__main__': - unittest.main() From 0f7d429f0071995504ef41a91215724c957e2fb6 Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Mon, 27 May 2024 17:40:56 -0600 Subject: [PATCH 103/126] add submodules --- .gitignore | 11 ----------- .gitmodules | 39 +++++++++++++++++++++++++++++++++++++++ ccs_config | 1 + cime | 1 + components/cdeps | 1 + components/cism | 1 + components/cmeps | 1 + components/mizuRoute | 1 + components/mosart | 1 + components/rtm | 1 + doc/doc-builder | 1 + libraries/mct | 1 + libraries/parallelio | 1 + share | 1 + src/fates | 1 + 15 files changed, 52 insertions(+), 11 deletions(-) create mode 100644 .gitmodules create mode 160000 ccs_config create mode 160000 cime create mode 160000 components/cdeps create mode 160000 components/cism create mode 160000 components/cmeps create mode 160000 components/mizuRoute create mode 160000 components/mosart create mode 160000 components/rtm create mode 160000 doc/doc-builder create mode 160000 libraries/mct create mode 160000 libraries/parallelio create mode 160000 share create mode 160000 src/fates diff --git a/.gitignore b/.gitignore index 163bfeda2d..a083a7853f 100644 --- a/.gitignore +++ b/.gitignore @@ -1,14 +1,3 @@ -# directories checked out by manage_externals, and other files created -# by manage_externals -manage_externals.log -ccs_config -/src/fates/ -/cime/ -/components/ -/libraries/ -/share/ -/doc/doc-builder/ - # ignore svn directories **/.svn/** .svn/ diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000000..329ed2e748 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,39 @@ +[submodule "fates"] + path = src/fates + url = https://github.com/NGEET/fates +[submodule "cism"] + path = components/cism + url = https://github.com/ESCOMP/CISM-wrapper +[submodule "mosart"] + path = components/mosart + url = https://github.com/ESCOMP/MOSART +[submodule "rtm"] + path = components/rtm + url = https://github.com/ESCOMP/RTM +[submodule "mizuRoute"] + path = components/mizuRoute + url = https://github.com/ESCOMP/mizuRoute +[submodule "ccs_config"] + path = ccs_config + url = https://github.com/ESMCI/ccs_config_cesm.git +[submodule "cime"] + path = cime + url = https://github.com/ESMCI/cime +[submodule "cmeps"] + path = components/cmeps + url = https://github.com/ESCOMP/CMEPS.git +[submodule "cdeps"] + path = components/cdeps + url = https://github.com/ESCOMP/CDEPS.git +[submodule "share"] + path = share + url = https://github.com/ESCOMP/CESM_share +[submodule "mct"] + path = libraries/mct + url = https://github.com/MCSclimate/MCT +[submodule "parallelio"] + path = libraries/parallelio + url = https://github.com/NCAR/ParallelIO +[submodule "doc-builder"] + path = doc/doc-builder + url = https://github.com/ESMCI/doc-builder diff --git a/ccs_config b/ccs_config new file mode 160000 index 0000000000..f90e10502c --- /dev/null +++ b/ccs_config @@ -0,0 +1 @@ +Subproject commit f90e10502c7246c2b45e373b7dd5e0da6cba0828 diff --git a/cime b/cime new file mode 160000 index 0000000000..422ddaa770 --- /dev/null +++ b/cime @@ -0,0 +1 @@ +Subproject commit 422ddaa770a3cea6e83a60c9700ebce77acaceed diff --git a/components/cdeps b/components/cdeps new file mode 160000 index 0000000000..7a522c828c --- /dev/null +++ b/components/cdeps @@ -0,0 +1 @@ +Subproject commit 7a522c828c32dc35777992653f281ec525509c4a diff --git a/components/cism b/components/cism new file mode 160000 index 0000000000..c05dd5c4fc --- /dev/null +++ b/components/cism @@ -0,0 +1 @@ +Subproject commit c05dd5c4fc85327e76523aaea9cfe1e388748928 diff --git a/components/cmeps b/components/cmeps new file mode 160000 index 0000000000..6384ff4e4a --- /dev/null +++ b/components/cmeps @@ -0,0 +1 @@ +Subproject commit 6384ff4e4a6bc82a678f9419a43ffbd5d53ac209 diff --git a/components/mizuRoute b/components/mizuRoute new file mode 160000 index 0000000000..0a62ee6185 --- /dev/null +++ b/components/mizuRoute @@ -0,0 +1 @@ +Subproject commit 0a62ee6185bdc9b99cd88ed5f15ea850602c43a2 diff --git a/components/mosart b/components/mosart new file mode 160000 index 0000000000..1c22a8c547 --- /dev/null +++ b/components/mosart @@ -0,0 +1 @@ +Subproject commit 1c22a8c5478b714cf5b7c54b3e5cf7bad09fd9b2 diff --git a/components/rtm b/components/rtm new file mode 160000 index 0000000000..88503adbc2 --- /dev/null +++ b/components/rtm @@ -0,0 +1 @@ +Subproject commit 88503adbc275fb2ccbb6b598e460deaeb140e515 diff --git a/doc/doc-builder b/doc/doc-builder new file mode 160000 index 0000000000..ab9bc93dd0 --- /dev/null +++ b/doc/doc-builder @@ -0,0 +1 @@ +Subproject commit ab9bc93dd09d0173f8097c7a18c7d061c1cd3b79 diff --git a/libraries/mct b/libraries/mct new file mode 160000 index 0000000000..e36024c5dd --- /dev/null +++ b/libraries/mct @@ -0,0 +1 @@ +Subproject commit e36024c5ddf482625ae6bd9474eff7d8f393f87c diff --git a/libraries/parallelio b/libraries/parallelio new file mode 160000 index 0000000000..6539ef05ae --- /dev/null +++ b/libraries/parallelio @@ -0,0 +1 @@ +Subproject commit 6539ef05ae7584ec570a56fdab9f7dfb336c2b80 diff --git a/share b/share new file mode 160000 index 0000000000..4b9dc4871a --- /dev/null +++ b/share @@ -0,0 +1 @@ +Subproject commit 4b9dc4871a259f00f35bb47708d876cb7dcdf75c diff --git a/src/fates b/src/fates new file mode 160000 index 0000000000..f0185f7c70 --- /dev/null +++ b/src/fates @@ -0,0 +1 @@ +Subproject commit f0185f7c7033fa69c80d1ddb07cbcbf1f8be1adc From aa544736634b4e095b691c2d5168dbd3c8de9cef Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Mon, 27 May 2024 17:43:00 -0600 Subject: [PATCH 104/126] update submodules --- .gitmodules | 105 ++- .../git-fleximod/.github/workflows/pre-commit | 13 + .../.github/workflows/pytest.yaml | 77 ++ .lib/git-fleximod/.pre-commit-config.yaml | 18 + .lib/git-fleximod/License | 20 + .lib/git-fleximod/README.md | 110 +++ .lib/git-fleximod/doc/Makefile | 20 + .lib/git-fleximod/doc/conf.py | 26 + .lib/git-fleximod/doc/index.rst | 24 + .lib/git-fleximod/doc/make.bat | 35 + .lib/git-fleximod/escomp_install | 25 + .lib/git-fleximod/git_fleximod/__init__.py | 0 .lib/git-fleximod/git_fleximod/cli.py | 119 +++ .../git-fleximod/git_fleximod/git_fleximod.py | 601 +++++++++++++++ .../git-fleximod/git_fleximod/gitinterface.py | 79 ++ .lib/git-fleximod/git_fleximod/gitmodules.py | 97 +++ .../git-fleximod/git_fleximod/lstripreader.py | 43 ++ .lib/git-fleximod/git_fleximod/metoflexi.py | 236 ++++++ .lib/git-fleximod/git_fleximod/utils.py | 365 +++++++++ .lib/git-fleximod/poetry.lock | 693 ++++++++++++++++++ .lib/git-fleximod/pyproject.toml | 41 ++ .lib/git-fleximod/tbump.toml | 43 ++ .lib/git-fleximod/tests/__init__.py | 3 + .lib/git-fleximod/tests/conftest.py | 138 ++++ .lib/git-fleximod/tests/test_a_import.py | 8 + .lib/git-fleximod/tests/test_b_update.py | 26 + .lib/git-fleximod/tests/test_c_required.py | 30 + .lib/git-fleximod/tests/test_d_complex.py | 67 ++ bin/git-fleximod | 8 + ccs_config | 2 +- components/cdeps | 2 +- components/mizuRoute | 2 +- components/mosart | 2 +- libraries/mct | 2 +- libraries/parallelio | 2 +- src/fates | 2 +- 36 files changed, 3050 insertions(+), 34 deletions(-) create mode 100644 .lib/git-fleximod/.github/workflows/pre-commit create mode 100644 .lib/git-fleximod/.github/workflows/pytest.yaml create mode 100644 .lib/git-fleximod/.pre-commit-config.yaml create mode 100644 .lib/git-fleximod/License create mode 100644 .lib/git-fleximod/README.md create mode 100644 .lib/git-fleximod/doc/Makefile create mode 100644 .lib/git-fleximod/doc/conf.py create mode 100644 .lib/git-fleximod/doc/index.rst create mode 100644 .lib/git-fleximod/doc/make.bat create mode 100644 .lib/git-fleximod/escomp_install create mode 100644 .lib/git-fleximod/git_fleximod/__init__.py create mode 100644 .lib/git-fleximod/git_fleximod/cli.py create mode 100755 .lib/git-fleximod/git_fleximod/git_fleximod.py create mode 100644 .lib/git-fleximod/git_fleximod/gitinterface.py create mode 100644 .lib/git-fleximod/git_fleximod/gitmodules.py create mode 100644 .lib/git-fleximod/git_fleximod/lstripreader.py create mode 100755 .lib/git-fleximod/git_fleximod/metoflexi.py create mode 100644 .lib/git-fleximod/git_fleximod/utils.py create mode 100644 .lib/git-fleximod/poetry.lock create mode 100644 .lib/git-fleximod/pyproject.toml create mode 100644 .lib/git-fleximod/tbump.toml create mode 100644 .lib/git-fleximod/tests/__init__.py create mode 100644 .lib/git-fleximod/tests/conftest.py create mode 100644 .lib/git-fleximod/tests/test_a_import.py create mode 100644 .lib/git-fleximod/tests/test_b_update.py create mode 100644 .lib/git-fleximod/tests/test_c_required.py create mode 100644 .lib/git-fleximod/tests/test_d_complex.py create mode 100755 bin/git-fleximod diff --git a/.gitmodules b/.gitmodules index 329ed2e748..699ad22969 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,39 +1,90 @@ [submodule "fates"] - path = src/fates - url = https://github.com/NGEET/fates +path = src/fates +url = https://github.com/NGEET/fates +fxtag = sci.1.73.0_api.35.0.0 +fxrequired = AlwaysRequired +fxDONOTUSEurl = https://github.com/NCAR/fates-release + [submodule "cism"] - path = components/cism - url = https://github.com/ESCOMP/CISM-wrapper -[submodule "mosart"] - path = components/mosart - url = https://github.com/ESCOMP/MOSART +path = components/cism +url = https://github.com/ESCOMP/CISM-wrapper +fxtag = cismwrap_2_2_001 +fxrequired = ToplevelRequired +fxDONOTUSEurl = https://github.com/ESCOMP/CISM-wrapper + [submodule "rtm"] - path = components/rtm - url = https://github.com/ESCOMP/RTM +path = components/rtm +url = https://github.com/ESCOMP/RTM +fxtag = rtm1_0_79 +fxrequired = ToplevelRequired +fxDONOTUSEurl = https://github.com/ESCOMP/RTM + +[submodule "mosart"] +path = components/mosart +url = https://github.com/ESCOMP/MOSART +fxtag = mosart1_0_49 +fxrequired = ToplevelRequired +fxDONOTUSEurl = https://github.com/ESCOMP/MOSART + [submodule "mizuRoute"] - path = components/mizuRoute - url = https://github.com/ESCOMP/mizuRoute +path = components/mizuRoute +url = https://github.com/ESCOMP/mizuRoute +fxtag = cesm-coupling.n02_v2.1.2 +fxrequired = ToplevelRequired +fxDONOTUSEurl = https://github.com/ESCOMP/mizuRoute + [submodule "ccs_config"] - path = ccs_config - url = https://github.com/ESMCI/ccs_config_cesm.git +path = ccs_config +url = https://github.com/ESMCI/ccs_config_cesm.git +fxtag = ccs_config_cesm0.0.106 +fxrequired = ToplevelRequired +fxDONOTUSEurl = https://github.com/ESMCI/ccs_config_cesm.git + [submodule "cime"] - path = cime - url = https://github.com/ESMCI/cime +path = cime +url = https://github.com/ESMCI/cime +fxtag = cime6.0.246 +fxrequired = ToplevelRequired +fxDONOTUSEurl = https://github.com/ESMCI/cime + [submodule "cmeps"] - path = components/cmeps - url = https://github.com/ESCOMP/CMEPS.git +path = components/cmeps +url = https://github.com/ESCOMP/CMEPS.git +fxtag = cmeps0.14.63 +fxrequired = ToplevelRequired +fxDONOTUSEurl = https://github.com/ESCOMP/CMEPS.git + [submodule "cdeps"] - path = components/cdeps - url = https://github.com/ESCOMP/CDEPS.git +path = components/cdeps +url = https://github.com/ESCOMP/CDEPS.git +fxtag = cdeps1.0.34 +fxrequired = ToplevelRequired +fxDONOTUSEurl = https://github.com/ESCOMP/CDEPS.git + [submodule "share"] - path = share - url = https://github.com/ESCOMP/CESM_share +path = share +url = https://github.com/ESCOMP/CESM_share +fxtag = share1.0.19 +fxrequired = ToplevelRequired +fxDONOTUSEurl = https://github.com/ESCOMP/CESM_share + [submodule "mct"] - path = libraries/mct - url = https://github.com/MCSclimate/MCT +path = libraries/mct +url = https://github.com/MCSclimate/MCT +fxtag = MCT_2.11.0 +fxrequired = ToplevelRequired +fxDONOTUSEurl = https://github.com/MCSclimate/MCT + [submodule "parallelio"] - path = libraries/parallelio - url = https://github.com/NCAR/ParallelIO +path = libraries/parallelio +url = https://github.com/NCAR/ParallelIO +fxtag = pio2_6_2 +fxrequired = ToplevelRequired +fxDONOTUSEurl = https://github.com/NCAR/ParallelIO + [submodule "doc-builder"] - path = doc/doc-builder - url = https://github.com/ESMCI/doc-builder +path = doc/doc-builder +url = https://github.com/ESMCI/doc-builder +fxtag = v1.0.8 +fxrequired = ToplevelOptional +fxDONOTUSEurl = https://github.com/ESMCI/doc-builder diff --git a/.lib/git-fleximod/.github/workflows/pre-commit b/.lib/git-fleximod/.github/workflows/pre-commit new file mode 100644 index 0000000000..1a6ad0082a --- /dev/null +++ b/.lib/git-fleximod/.github/workflows/pre-commit @@ -0,0 +1,13 @@ +name: pre-commit +on: + pull_request: + push: + branches: [main] + +jobs: + pre-commit: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v3 + - uses: pre-commit/action@v3.0.0 diff --git a/.lib/git-fleximod/.github/workflows/pytest.yaml b/.lib/git-fleximod/.github/workflows/pytest.yaml new file mode 100644 index 0000000000..0868dd9a33 --- /dev/null +++ b/.lib/git-fleximod/.github/workflows/pytest.yaml @@ -0,0 +1,77 @@ +# Run this job on pushes to `main`, and for pull requests. If you don't specify +# `branches: [main], then this actions runs _twice_ on pull requests, which is +# annoying. + +on: + push: + branches: [main] + pull_request: + branches: [main] + +jobs: + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + # If you wanted to use multiple Python versions, you'd have specify a matrix in the job and + # reference the matrixe python version here. + - uses: actions/setup-python@v5 + with: + python-version: '3.9' + + # Cache the installation of Poetry itself, e.g. the next step. This prevents the workflow + # from installing Poetry every time, which can be slow. Note the use of the Poetry version + # number in the cache key, and the "-0" suffix: this allows you to invalidate the cache + # manually if/when you want to upgrade Poetry, or if something goes wrong. This could be + # mildly cleaner by using an environment variable, but I don't really care. + - name: cache poetry install + uses: actions/cache@v4 + with: + path: ~/.local + key: poetry-1.7.1 + + # Install Poetry. You could do this manually, or there are several actions that do this. + # `snok/install-poetry` seems to be minimal yet complete, and really just calls out to + # Poetry's default install script, which feels correct. I pin the Poetry version here + # because Poetry does occasionally change APIs between versions and I don't want my + # actions to break if it does. + # + # The key configuration value here is `virtualenvs-in-project: true`: this creates the + # venv as a `.venv` in your testing directory, which allows the next step to easily + # cache it. + - uses: snok/install-poetry@v1 + with: + version: 1.7.1 + virtualenvs-create: true + virtualenvs-in-project: true + + # Cache your dependencies (i.e. all the stuff in your `pyproject.toml`). Note the cache + # key: if you're using multiple Python versions, or multiple OSes, you'd need to include + # them in the cache key. I'm not, so it can be simple and just depend on the poetry.lock. + - name: cache deps + id: cache-deps + uses: actions/cache@v4 + with: + path: .venv + key: pydeps-${{ hashFiles('**/poetry.lock') }} + + # Install dependencies. `--no-root` means "install all dependencies but not the project + # itself", which is what you want to avoid caching _your_ code. The `if` statement + # ensures this only runs on a cache miss. + - run: poetry install --no-interaction --no-root + if: steps.cache-deps.outputs.cache-hit != 'true' + + # Now install _your_ project. This isn't necessary for many types of projects -- particularly + # things like Django apps don't need this. But it's a good idea since it fully-exercises the + # pyproject.toml and makes that if you add things like console-scripts at some point that + # they'll be installed and working. + - run: poetry install --no-interaction + + # And finally run tests. I'm using pytest and all my pytest config is in my `pyproject.toml` + # so this line is super-simple. But it could be as complex as you need. + - run: | + git config --global user.name "${GITHUB_ACTOR}" + git config --global user.email "${GITHUB_ACTOR_ID}+${GITHUB_ACTOR}@users.noreply.github.com" + poetry run pytest + diff --git a/.lib/git-fleximod/.pre-commit-config.yaml b/.lib/git-fleximod/.pre-commit-config.yaml new file mode 100644 index 0000000000..2f6089da72 --- /dev/null +++ b/.lib/git-fleximod/.pre-commit-config.yaml @@ -0,0 +1,18 @@ +exclude: ^utils/.*$ + +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.0.1 + hooks: + - id: end-of-file-fixer + - id: trailing-whitespace + - repo: https://github.com/psf/black + rev: 22.3.0 + hooks: + - id: black + - repo: https://github.com/PyCQA/pylint + rev: v2.11.1 + hooks: + - id: pylint + args: + - --disable=I,C,R,logging-not-lazy,wildcard-import,unused-wildcard-import,fixme,broad-except,bare-except,eval-used,exec-used,global-statement,logging-format-interpolation,no-name-in-module,arguments-renamed,unspecified-encoding,protected-access,import-error,no-member diff --git a/.lib/git-fleximod/License b/.lib/git-fleximod/License new file mode 100644 index 0000000000..2c6fe768c2 --- /dev/null +++ b/.lib/git-fleximod/License @@ -0,0 +1,20 @@ +Copyright 2024 National Center for Atmospheric Sciences (NCAR) + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +“Softwareâ€), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED “AS ISâ€, WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/.lib/git-fleximod/README.md b/.lib/git-fleximod/README.md new file mode 100644 index 0000000000..d1ef632f28 --- /dev/null +++ b/.lib/git-fleximod/README.md @@ -0,0 +1,110 @@ +# git-fleximod + +Flexible, Enhanced Submodule Management for Git + +## Overview + +Git-fleximod is a Python-based tool that extends Git's submodule and sparse checkout capabilities, offering additional features for managing submodules in a more flexible and efficient way. + +## Installation + +#TODO Install using pip: +# pip install git-fleximod + If you choose to locate git-fleximod in your path you can access it via command: git fleximod + +## Usage + + Basic Usage: + git fleximod [options] + Available Commands: + status: Display the status of submodules. + update: Update submodules to the tag indicated in .gitmodules variable fxtag. + test: Make sure that fxtags and submodule hashes are consistant, + make sure that official urls (as defined by fxDONOTUSEurl) are set + make sure that fxtags are defined for all submodules + Additional Options: + See git fleximod --help for more details. + +## Supported .gitmodules Variables + + fxtag: Specify a specific tag or branch to checkout for a submodule. + fxrequired: Mark a submodule's checkout behavior, with allowed values: + - ToplevelRequired: Top-level and required (checked out only when this is the Toplevel module). + - ToplevelOptional: Top-level and optional (checked out with --optional flag if this is the Toplevel module). + - AlwaysRequired: Always required (always checked out). + - AlwaysOptional: Always optional (checked out with --optional flag). + fxsparse: Enable sparse checkout for a submodule, pointing to a file containing sparse checkout paths. + fxDONOTUSEurl: This is the url used in the test subcommand to assure that protected branches do not point to forks + **NOTE** the fxDONOTUSEurl variable is only used to identify the official project repository and should not be + changed by users. Use the url variable to change to a fork if desired. + +## Sparse Checkouts + + To enable sparse checkout for a submodule, set the fxsparse variable + in the .gitmodules file to the path of a file containing the desired + sparse checkout paths. Git-fleximod will automatically configure + sparse checkout based on this file when applicable commands are run. + See [git-sparse-checkout](https://git-scm.com/docs/git-sparse-checkout#_internalsfull_pattern_set) + for details on the format of this file. + +## Tests + + The git fleximod test action is designed to be used by, for example, github workflows + to assure that protected branches are consistant with respect to submodule hashes and fleximod fxtags + +## Examples + +Here are some common usage examples: + +Update all submodules, including optional ones: +```bash + git fleximod update --optional +``` + +Updating a specific submodule to the fxtag indicated in .gitmodules: + +```bash + git fleximod update submodule-name +``` +Example .gitmodules entry: +```ini, toml + [submodule "cosp2"] + path = src/physics/cosp2/src + url = https://github.com/CFMIP/COSPv2.0 + fxsparse = ../.cosp_sparse_checkout + fxrequired = AlwaysRequired + fxtag = v2.1.4cesm +``` +Explanation: + +This entry indicates that the submodule named cosp2 at tag v2.1.4cesm +should be checked out into the directory src/physics/cosp2/src +relative to the .gitmodules directory. It should be checked out from +the URL https://github.com/CFMIP/COSPv2.0 and use sparse checkout as +described in the file ../.cosp_sparse_checkout relative to the path +directory. It should be checked out anytime this .gitmodules entry is +read. + +Additional example: +```ini, toml + [submodule "cime"] + path = cime + url = https://github.com/jedwards4b/cime + fxrequired = ToplevelRequired + fxtag = cime6.0.198_rme01 +``` + +Explanation: + +This entry indicates that the submodule cime should be checked out +into a directory named cime at tag cime6.0.198_rme01 from the URL +https://github.com/jedwards4b/cime. This should only be done if +the .gitmodules file is at the top level of the repository clone. + +## Contributing + +We welcome contributions! Please see the CONTRIBUTING.md file for guidelines. + +## License + +Git-fleximod is released under the MIT License. diff --git a/.lib/git-fleximod/doc/Makefile b/.lib/git-fleximod/doc/Makefile new file mode 100644 index 0000000000..d4bb2cbb9e --- /dev/null +++ b/.lib/git-fleximod/doc/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/.lib/git-fleximod/doc/conf.py b/.lib/git-fleximod/doc/conf.py new file mode 100644 index 0000000000..423099eec9 --- /dev/null +++ b/.lib/git-fleximod/doc/conf.py @@ -0,0 +1,26 @@ +# Configuration file for the Sphinx documentation builder. +# +# For the full list of built-in configuration values, see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Project information ----------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information + +project = "git-fleximod" +author = "Jim Edwards " +release = "0.4.0" + +# -- General configuration --------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration + +extensions = ["sphinx_argparse_cli"] + +templates_path = ["_templates"] +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] + + +# -- Options for HTML output ------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output + +html_theme = "alabaster" +html_static_path = ["_static"] diff --git a/.lib/git-fleximod/doc/index.rst b/.lib/git-fleximod/doc/index.rst new file mode 100644 index 0000000000..0f9c1a7f7e --- /dev/null +++ b/.lib/git-fleximod/doc/index.rst @@ -0,0 +1,24 @@ +.. git-fleximod documentation master file, created by + sphinx-quickstart on Sat Feb 3 12:02:22 2024. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to git-fleximod's documentation! +======================================== + +.. toctree:: + :maxdepth: 2 + :caption: Contents: +.. module:: sphinxcontrib.autoprogram +.. sphinx_argparse_cli:: + :module: git_fleximod.cli + :func: get_parser + :prog: git-fleximod + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/.lib/git-fleximod/doc/make.bat b/.lib/git-fleximod/doc/make.bat new file mode 100644 index 0000000000..32bb24529f --- /dev/null +++ b/.lib/git-fleximod/doc/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=. +set BUILDDIR=_build + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.https://www.sphinx-doc.org/ + exit /b 1 +) + +if "%1" == "" goto help + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/.lib/git-fleximod/escomp_install b/.lib/git-fleximod/escomp_install new file mode 100644 index 0000000000..ae782e72a4 --- /dev/null +++ b/.lib/git-fleximod/escomp_install @@ -0,0 +1,25 @@ +#!/usr/bin/env python +# updates git-fleximod in an ESCOMP model +# this script should be run from the model root directory, it expects +# git-fleximod to already be installed with the script in bin +# and the classes in lib/python/site-packages +import sys +import shutil +import os + +from glob import iglob + +fleximod_root = sys.argv[1] +fleximod_path = os.path.join(fleximod_root,"src","git-fleximod") +if os.path.isfile(fleximod_path): + with open(fleximod_path,"r") as f: + fleximod = f.readlines() + with open(os.path.join(".","bin","git-fleximod"),"w") as f: + for line in fleximod: + f.write(line) + if "import argparse" in line: + f.write('\nsys.path.append(os.path.join(os.path.dirname(__file__),"..","lib","python","site-packages"))\n\n') + + for file in iglob(os.path.join(fleximod_root, "src", "fleximod", "*.py")): + shutil.copy(file, + os.path.join("lib","python","site-packages","fleximod",os.path.basename(file))) diff --git a/.lib/git-fleximod/git_fleximod/__init__.py b/.lib/git-fleximod/git_fleximod/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/.lib/git-fleximod/git_fleximod/cli.py b/.lib/git-fleximod/git_fleximod/cli.py new file mode 100644 index 0000000000..1fb959dad0 --- /dev/null +++ b/.lib/git-fleximod/git_fleximod/cli.py @@ -0,0 +1,119 @@ +from pathlib import Path +import argparse + +__version__ = "0.7.4" + +def find_root_dir(filename=".git"): + d = Path.cwd() + root = Path(d.root) + while d != root: + attempt = d / filename + if attempt.is_dir(): + return attempt + d = d.parent + return None + + +def get_parser(): + description = """ + %(prog)s manages checking out groups of gitsubmodules with addtional support for Earth System Models + """ + parser = argparse.ArgumentParser( + description=description, formatter_class=argparse.RawDescriptionHelpFormatter + ) + + # + # user options + # + choices = ["update", "status", "test"] + parser.add_argument( + "action", + choices=choices, + default="update", + help=f"Subcommand of git-fleximod, choices are {choices[:-1]}", + ) + + parser.add_argument( + "components", + nargs="*", + help="Specific component(s) to checkout. By default, " + "all required submodules are checked out.", + ) + + parser.add_argument( + "-C", + "--path", + default=find_root_dir(), + help="Toplevel repository directory. Defaults to top git directory relative to current.", + ) + + parser.add_argument( + "-g", + "--gitmodules", + nargs="?", + default=".gitmodules", + help="The submodule description filename. " "Default: %(default)s.", + ) + + parser.add_argument( + "-x", + "--exclude", + nargs="*", + help="Component(s) listed in the gitmodules file which should be ignored.", + ) + parser.add_argument( + "-f", + "--force", + action="store_true", + default=False, + help="Override cautions and update or checkout over locally modified repository.", + ) + + parser.add_argument( + "-o", + "--optional", + action="store_true", + default=False, + help="By default only the required submodules " + "are checked out. This flag will also checkout the " + "optional submodules relative to the toplevel directory.", + ) + + parser.add_argument( + "-v", + "--verbose", + action="count", + default=0, + help="Output additional information to " + "the screen and log file. This flag can be " + "used up to two times, increasing the " + "verbosity level each time.", + ) + + parser.add_argument( + "-V", + "--version", + action="version", + version=f"%(prog)s {__version__}", + help="Print version and exit.", + ) + + # + # developer options + # + parser.add_argument( + "--backtrace", + action="store_true", + help="DEVELOPER: show exception backtraces as extra " "debugging output", + ) + + parser.add_argument( + "-d", + "--debug", + action="store_true", + default=False, + help="DEVELOPER: output additional debugging " + "information to the screen and log file.", + ) + + return parser diff --git a/.lib/git-fleximod/git_fleximod/git_fleximod.py b/.lib/git-fleximod/git_fleximod/git_fleximod.py new file mode 100755 index 0000000000..103cc82a50 --- /dev/null +++ b/.lib/git-fleximod/git_fleximod/git_fleximod.py @@ -0,0 +1,601 @@ +#!/usr/bin/env python +import sys + +MIN_PYTHON = (3, 7) +if sys.version_info < MIN_PYTHON: + sys.exit("Python %s.%s or later is required." % MIN_PYTHON) + +import os +import shutil +import logging +import textwrap +from git_fleximod import utils +from git_fleximod import cli +from git_fleximod.gitinterface import GitInterface +from git_fleximod.gitmodules import GitModules +from configparser import NoOptionError + +# logger variable is global +logger = None + + +def fxrequired_allowed_values(): + return ["ToplevelRequired", "ToplevelOptional", "AlwaysRequired", "AlwaysOptional"] + + +def commandline_arguments(args=None): + parser = cli.get_parser() + + if args: + options = parser.parse_args(args) + else: + options = parser.parse_args() + + # explicitly listing a component overrides the optional flag + if options.optional or options.components: + fxrequired = [ + "ToplevelRequired", + "ToplevelOptional", + "AlwaysRequired", + "AlwaysOptional", + ] + else: + fxrequired = ["ToplevelRequired", "AlwaysRequired"] + + action = options.action + if not action: + action = "update" + handlers = [logging.StreamHandler()] + + if options.debug: + try: + open("fleximod.log", "w") + except PermissionError: + sys.exit("ABORT: Could not write file fleximod.log") + level = logging.DEBUG + handlers.append(logging.FileHandler("fleximod.log")) + elif options.verbose: + level = logging.INFO + else: + level = logging.WARNING + # Configure the root logger + logging.basicConfig( + level=level, format="%(name)s - %(levelname)s - %(message)s", handlers=handlers + ) + + if hasattr(options, "version"): + exit() + + return ( + options.path, + options.gitmodules, + fxrequired, + options.components, + options.exclude, + options.force, + action, + ) + + +def submodule_sparse_checkout(root_dir, name, url, path, sparsefile, tag="master"): + """ + This function performs a sparse checkout of a git submodule. It does so by first creating the .git/info/sparse-checkout fileq + in the submodule and then checking out the desired tag. If the submodule is already checked out, it will not be checked out again. + Creating the sparse-checkout file first prevents the entire submodule from being checked out and then removed. This is important + because the submodule may have a large number of files and checking out the entire submodule and then removing it would be time + and disk space consuming. + + Parameters: + root_dir (str): The root directory for the git operation. + name (str): The name of the submodule. + url (str): The URL of the submodule. + path (str): The path to the submodule. + sparsefile (str): The sparse file for the submodule. + tag (str, optional): The tag to checkout. Defaults to "master". + + Returns: + None + """ + logger.info("Called sparse_checkout for {}".format(name)) + rgit = GitInterface(root_dir, logger) + superroot = rgit.git_operation("rev-parse", "--show-superproject-working-tree") + if superroot: + gitroot = superroot.strip() + else: + gitroot = root_dir.strip() + assert os.path.isdir(os.path.join(gitroot, ".git")) + # first create the module directory + if not os.path.isdir(os.path.join(root_dir, path)): + os.makedirs(os.path.join(root_dir, path)) + + # initialize a new git repo and set the sparse checkout flag + sprep_repo = os.path.join(root_dir, path) + sprepo_git = GitInterface(sprep_repo, logger) + if os.path.exists(os.path.join(sprep_repo, ".git")): + try: + logger.info("Submodule {} found".format(name)) + chk = sprepo_git.config_get_value("core", "sparseCheckout") + if chk == "true": + logger.info("Sparse submodule {} already checked out".format(name)) + return + except NoOptionError: + logger.debug("Sparse submodule {} not present".format(name)) + except Exception as e: + utils.fatal_error("Unexpected error {} occured.".format(e)) + + sprepo_git.config_set_value("core", "sparseCheckout", "true") + + # set the repository remote + + logger.info("Setting remote origin in {}/{}".format(root_dir, path)) + status = sprepo_git.git_operation("remote", "-v") + if url not in status: + sprepo_git.git_operation("remote", "add", "origin", url) + + topgit = os.path.join(gitroot, ".git") + + if gitroot != root_dir and os.path.isfile(os.path.join(root_dir, ".git")): + with open(os.path.join(root_dir, ".git")) as f: + gitpath = os.path.relpath( + os.path.join(root_dir, f.read().split()[1]), + start=os.path.join(root_dir, path), + ) + topgit = os.path.join(gitpath, "modules") + else: + topgit = os.path.relpath( + os.path.join(root_dir, ".git", "modules"), + start=os.path.join(root_dir, path), + ) + + with utils.pushd(sprep_repo): + if not os.path.isdir(topgit): + os.makedirs(topgit) + topgit += os.sep + name + + if os.path.isdir(os.path.join(root_dir, path, ".git")): + with utils.pushd(sprep_repo): + shutil.move(".git", topgit) + with open(".git", "w") as f: + f.write("gitdir: " + os.path.relpath(topgit)) + # assert(os.path.isdir(os.path.relpath(topgit, start=sprep_repo))) + gitsparse = os.path.abspath(os.path.join(topgit, "info", "sparse-checkout")) + if os.path.isfile(gitsparse): + logger.warning( + "submodule {} is already initialized {}".format(name, topgit) + ) + return + + with utils.pushd(sprep_repo): + shutil.copy(sparsefile, gitsparse) + + # Finally checkout the repo + sprepo_git.git_operation("fetch", "origin", "--tags") + sprepo_git.git_operation("checkout", tag) + + print(f"Successfully checked out {name:>20} at {tag}") + rgit.config_set_value(f'submodule "{name}"', "active", "true") + rgit.config_set_value(f'submodule "{name}"', "url", url) + + +def single_submodule_checkout( + root, name, path, url=None, tag=None, force=False, optional=False +): + """ + This function checks out a single git submodule. + + Parameters: + root (str): The root directory for the git operation. + name (str): The name of the submodule. + path (str): The path to the submodule. + url (str, optional): The URL of the submodule. Defaults to None. + tag (str, optional): The tag to checkout. Defaults to None. + force (bool, optional): If set to True, forces the checkout operation. Defaults to False. + optional (bool, optional): If set to True, the submodule is considered optional. Defaults to False. + + Returns: + None + """ + # function implementation... + git = GitInterface(root, logger) + repodir = os.path.join(root, path) + logger.info("Checkout {} into {}/{}".format(name, root, path)) + # if url is provided update to the new url + tmpurl = None + repo_exists = False + if os.path.exists(os.path.join(repodir, ".git")): + logger.info("Submodule {} already checked out".format(name)) + repo_exists = True + # Look for a .gitmodules file in the newly checkedout repo + if not repo_exists and url: + # ssh urls cause problems for those who dont have git accounts with ssh keys defined + # but cime has one since e3sm prefers ssh to https, because the .gitmodules file was + # opened with a GitModules object we don't need to worry about restoring the file here + # it will be done by the GitModules class + if url.startswith("git@"): + tmpurl = url + url = url.replace("git@github.com:", "https://github.com/") + git.git_operation("clone", url, path) + smgit = GitInterface(repodir, logger) + if not tag: + tag = smgit.git_operation("describe", "--tags", "--always").rstrip() + smgit.git_operation("checkout", tag) + # Now need to move the .git dir to the submodule location + rootdotgit = os.path.join(root, ".git") + if os.path.isfile(rootdotgit): + with open(rootdotgit) as f: + line = f.readline() + if line.startswith("gitdir: "): + rootdotgit = line[8:].rstrip() + + newpath = os.path.abspath(os.path.join(root, rootdotgit, "modules", name)) + if os.path.exists(newpath): + shutil.rmtree(os.path.join(repodir, ".git")) + else: + shutil.move(os.path.join(repodir, ".git"), newpath) + + with open(os.path.join(repodir, ".git"), "w") as f: + f.write("gitdir: " + os.path.relpath(newpath, start=repodir)) + + if not os.path.exists(repodir): + parent = os.path.dirname(repodir) + if not os.path.isdir(parent): + os.makedirs(parent) + git.git_operation("submodule", "add", "--name", name, "--", url, path) + + if not repo_exists or not tmpurl: + git.git_operation("submodule", "update", "--init", "--", path) + + if os.path.exists(os.path.join(repodir, ".gitmodules")): + # recursively handle this checkout + print(f"Recursively checking out submodules of {name}") + gitmodules = GitModules(logger, confpath=repodir) + requiredlist = ["AlwaysRequired"] + if optional: + requiredlist.append("AlwaysOptional") + submodules_checkout(gitmodules, repodir, requiredlist, force=force) + if not os.path.exists(os.path.join(repodir, ".git")): + utils.fatal_error( + f"Failed to checkout {name} {repo_exists} {tmpurl} {repodir} {path}" + ) + + if tmpurl: + print(git.git_operation("restore", ".gitmodules")) + + return + + +def submodules_status(gitmodules, root_dir, toplevel=False): + testfails = 0 + localmods = 0 + needsupdate = 0 + for name in gitmodules.sections(): + path = gitmodules.get(name, "path") + tag = gitmodules.get(name, "fxtag") + required = gitmodules.get(name, "fxrequired") + level = required and "Toplevel" in required + if not path: + utils.fatal_error("No path found in .gitmodules for {}".format(name)) + newpath = os.path.join(root_dir, path) + logger.debug("newpath is {}".format(newpath)) + if not os.path.exists(os.path.join(newpath, ".git")): + rootgit = GitInterface(root_dir, logger) + # submodule commands use path, not name + url = gitmodules.get(name, "url") + url = url.replace("git@github.com:", "https://github.com/") + tags = rootgit.git_operation("ls-remote", "--tags", url) + atag = None + needsupdate += 1 + if not toplevel and level: + continue + for htag in tags.split("\n"): + if tag and tag in htag: + atag = (htag.split()[1])[10:] + break + if tag and tag == atag: + print(f"e {name:>20} not checked out, aligned at tag {tag}") + elif tag: + ahash = rootgit.git_operation( + "submodule", "status", "{}".format(path) + ).rstrip() + ahash = ahash[1 : len(tag) + 1] + if tag == ahash: + print(f"e {name:>20} not checked out, aligned at hash {ahash}") + else: + print( + f"e {name:>20} not checked out, out of sync at tag {atag}, expected tag is {tag}" + ) + testfails += 1 + else: + print(f"e {name:>20} has no fxtag defined in .gitmodules") + testfails += 1 + else: + with utils.pushd(newpath): + git = GitInterface(newpath, logger) + atag = git.git_operation("describe", "--tags", "--always").rstrip() + ahash = git.git_operation("status").partition("\n")[0].split()[-1] + if tag and atag == tag: + print(f" {name:>20} at tag {tag}") + elif tag and ahash[: len(tag)] == tag: + print(f" {name:>20} at hash {ahash}") + elif atag == ahash: + print(f" {name:>20} at hash {ahash}") + elif tag: + print( + f"s {name:>20} {atag} {ahash} is out of sync with .gitmodules {tag}" + ) + testfails += 1 + needsupdate += 1 + else: + print( + f"e {name:>20} has no fxtag defined in .gitmodules, module at {atag}" + ) + testfails += 1 + + status = git.git_operation("status", "--ignore-submodules") + if "nothing to commit" not in status: + localmods = localmods + 1 + print("M" + textwrap.indent(status, " ")) + + return testfails, localmods, needsupdate + + +def submodules_update(gitmodules, root_dir, requiredlist, force): + _, localmods, needsupdate = submodules_status(gitmodules, root_dir) + + if localmods and not force: + local_mods_output() + return + if needsupdate == 0: + return + + for name in gitmodules.sections(): + fxtag = gitmodules.get(name, "fxtag") + path = gitmodules.get(name, "path") + url = gitmodules.get(name, "url") + logger.info( + "name={} path={} url={} fxtag={} requiredlist={}".format( + name, os.path.join(root_dir, path), url, fxtag, requiredlist + ) + ) + # if not os.path.exists(os.path.join(root_dir,path, ".git")): + fxrequired = gitmodules.get(name, "fxrequired") + assert fxrequired in fxrequired_allowed_values() + rgit = GitInterface(root_dir, logger) + superroot = rgit.git_operation("rev-parse", "--show-superproject-working-tree") + + fxsparse = gitmodules.get(name, "fxsparse") + + if ( + fxrequired + and (superroot and "Toplevel" in fxrequired) + or fxrequired not in requiredlist + ): + if "ToplevelOptional" == fxrequired: + print("Skipping optional component {}".format(name)) + continue + if fxsparse: + logger.debug( + "Callng submodule_sparse_checkout({}, {}, {}, {}, {}, {}".format( + root_dir, name, url, path, fxsparse, fxtag + ) + ) + submodule_sparse_checkout(root_dir, name, url, path, fxsparse, tag=fxtag) + else: + logger.info( + "Calling submodule_checkout({},{},{},{})".format( + root_dir, name, path, url + ) + ) + + single_submodule_checkout( + root_dir, + name, + path, + url=url, + tag=fxtag, + force=force, + optional=("AlwaysOptional" in requiredlist), + ) + + if os.path.exists(os.path.join(path, ".git")): + submoddir = os.path.join(root_dir, path) + with utils.pushd(submoddir): + git = GitInterface(submoddir, logger) + # first make sure the url is correct + upstream = git.git_operation("ls-remote", "--get-url").rstrip() + newremote = "origin" + if upstream != url: + # TODO - this needs to be a unique name + remotes = git.git_operation("remote", "-v") + if url in remotes: + for line in remotes: + if url in line and "fetch" in line: + newremote = line.split()[0] + break + else: + i = 0 + while newremote in remotes: + i = i + 1 + newremote = f"newremote.{i:02d}" + git.git_operation("remote", "add", newremote, url) + + tags = git.git_operation("tag", "-l") + if fxtag and fxtag not in tags: + git.git_operation("fetch", newremote, "--tags") + atag = git.git_operation("describe", "--tags", "--always").rstrip() + if fxtag and fxtag != atag: + try: + git.git_operation("checkout", fxtag) + print(f"{name:>20} updated to {fxtag}") + except Exception as error: + print(error) + elif not fxtag: + print(f"No fxtag found for submodule {name:>20}") + else: + print(f"{name:>20} up to date.") + + +def local_mods_output(): + text = """\ + The submodules labeled with 'M' above are not in a clean state. + The following are options for how to proceed: + (1) Go into each submodule which is not in a clean state and issue a 'git status' + Either revert or commit your changes so that the submodule is in a clean state. + (2) use the --force option to git-fleximod + (3) you can name the particular submodules to update using the git-fleximod command line + (4) As a last resort you can remove the submodule (via 'rm -fr [directory]') + then rerun git-fleximod update. +""" + print(text) + + +# checkout is done by update if required so this function may be depricated +def submodules_checkout(gitmodules, root_dir, requiredlist, force=False): + """ + This function checks out all git submodules based on the provided parameters. + + Parameters: + gitmodules (ConfigParser): The gitmodules configuration. + root_dir (str): The root directory for the git operation. + requiredlist (list): The list of required modules. + force (bool, optional): If set to True, forces the checkout operation. Defaults to False. + + Returns: + None + """ + # function implementation... + print("") + _, localmods, needsupdate = submodules_status(gitmodules, root_dir) + if localmods and not force: + local_mods_output() + return + if not needsupdate: + return + for name in gitmodules.sections(): + fxrequired = gitmodules.get(name, "fxrequired") + fxsparse = gitmodules.get(name, "fxsparse") + fxtag = gitmodules.get(name, "fxtag") + path = gitmodules.get(name, "path") + url = gitmodules.get(name, "url") + if fxrequired and fxrequired not in requiredlist: + if "Optional" in fxrequired: + print("Skipping optional component {}".format(name)) + continue + + if fxsparse: + logger.debug( + "Callng submodule_sparse_checkout({}, {}, {}, {}, {}, {}".format( + root_dir, name, url, path, fxsparse, fxtag + ) + ) + submodule_sparse_checkout(root_dir, name, url, path, fxsparse, tag=fxtag) + else: + logger.debug( + "Calling submodule_checkout({},{},{})".format(root_dir, name, path) + ) + single_submodule_checkout( + root_dir, + name, + path, + url=url, + tag=fxtag, + force=force, + optional="AlwaysOptional" in requiredlist, + ) + + +def submodules_test(gitmodules, root_dir): + """ + This function tests the git submodules based on the provided parameters. + + It first checks that fxtags are present and in sync with submodule hashes. + Then it ensures that urls are consistent with fxurls (not forks and not ssh) + and that sparse checkout files exist. + + Parameters: + gitmodules (ConfigParser): The gitmodules configuration. + root_dir (str): The root directory for the git operation. + + Returns: + int: The number of test failures. + """ + # First check that fxtags are present and in sync with submodule hashes + testfails, localmods, needsupdate = submodules_status(gitmodules, root_dir) + print("") + # Then make sure that urls are consistant with fxurls (not forks and not ssh) + # and that sparse checkout files exist + for name in gitmodules.sections(): + url = gitmodules.get(name, "url") + fxurl = gitmodules.get(name, "fxDONOTMODIFYurl") + fxsparse = gitmodules.get(name, "fxsparse") + path = gitmodules.get(name, "path") + fxurl = fxurl[:-4] if fxurl.endswith(".git") else fxurl + url = url[:-4] if url.endswith(".git") else url + if not fxurl or url.lower() != fxurl.lower(): + print(f"{name:>20} url {url} not in sync with required {fxurl}") + testfails += 1 + if fxsparse and not os.path.isfile(os.path.join(root_dir, path, fxsparse)): + print(f"{name:>20} sparse checkout file {fxsparse} not found") + testfails += 1 + return testfails + localmods + needsupdate + + +def main(): + ( + root_dir, + file_name, + fxrequired, + includelist, + excludelist, + force, + action, + ) = commandline_arguments() + # Get a logger for the package + global logger + logger = logging.getLogger(__name__) + + logger.info("action is {}".format(action)) + + if not os.path.isfile(os.path.join(root_dir, file_name)): + file_path = utils.find_upwards(root_dir, file_name) + + if file_path is None: + utils.fatal_error( + "No {} found in {} or any of it's parents".format(file_name, root_dir) + ) + + root_dir = os.path.dirname(file_path) + logger.info( + "root_dir is {} includelist={} excludelist={}".format( + root_dir, includelist, excludelist + ) + ) + gitmodules = GitModules( + logger, + confpath=root_dir, + conffile=file_name, + includelist=includelist, + excludelist=excludelist, + ) + if not gitmodules.sections(): + sys.exit("No submodule components found") + retval = 0 + if action == "update": + submodules_update(gitmodules, root_dir, fxrequired, force) + elif action == "status": + tfails, lmods, updates = submodules_status(gitmodules, root_dir, toplevel=True) + if tfails + lmods + updates > 0: + print( + f" testfails = {tfails}, local mods = {lmods}, needs updates {updates}\n" + ) + if lmods > 0: + local_mods_output() + elif action == "test": + retval = submodules_test(gitmodules, root_dir) + else: + utils.fatal_error(f"unrecognized action request {action}") + return retval + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/.lib/git-fleximod/git_fleximod/gitinterface.py b/.lib/git-fleximod/git_fleximod/gitinterface.py new file mode 100644 index 0000000000..93ae38ecde --- /dev/null +++ b/.lib/git-fleximod/git_fleximod/gitinterface.py @@ -0,0 +1,79 @@ +import os +import sys +from . import utils +from pathlib import Path + +class GitInterface: + def __init__(self, repo_path, logger): + logger.debug("Initialize GitInterface for {}".format(repo_path)) + if isinstance(repo_path, str): + self.repo_path = Path(repo_path).resolve() + elif isinstance(repo_path, Path): + self.repo_path = repo_path.resolve() + else: + raise TypeError("repo_path must be a str or Path object") + self.logger = logger + try: + import git + + self._use_module = True + try: + self.repo = git.Repo(str(self.repo_path)) # Initialize GitPython repo + except git.exc.InvalidGitRepositoryError: + self.git = git + self._init_git_repo() + msg = "Using GitPython interface to git" + except ImportError: + self._use_module = False + if not (self.repo_path / ".git").exists(): + self._init_git_repo() + msg = "Using shell interface to git" + self.logger.info(msg) + + def _git_command(self, operation, *args): + self.logger.info(operation) + if self._use_module and operation != "submodule": + try: + return getattr(self.repo.git, operation)(*args) + except Exception as e: + sys.exit(e) + else: + return ["git", "-C", str(self.repo_path), operation] + list(args) + + def _init_git_repo(self): + if self._use_module: + self.repo = self.git.Repo.init(str(self.repo_path)) + else: + command = ("git", "-C", str(self.repo_path), "init") + utils.execute_subprocess(command) + + # pylint: disable=unused-argument + def git_operation(self, operation, *args, **kwargs): + command = self._git_command(operation, *args) + self.logger.info(command) + if isinstance(command, list): + try: + return utils.execute_subprocess(command, output_to_caller=True) + except Exception as e: + sys.exit(e) + else: + return command + + def config_get_value(self, section, name): + if self._use_module: + config = self.repo.config_reader() + return config.get_value(section, name) + else: + cmd = ("git", "-C", str(self.repo_path), "config", "--get", f"{section}.{name}") + output = utils.execute_subprocess(cmd, output_to_caller=True) + return output.strip() + + def config_set_value(self, section, name, value): + if self._use_module: + with self.repo.config_writer() as writer: + writer.set_value(section, name, value) + writer.release() # Ensure changes are saved + else: + cmd = ("git", "-C", str(self.repo_path), "config", f"{section}.{name}", value) + self.logger.info(cmd) + utils.execute_subprocess(cmd, output_to_caller=True) diff --git a/.lib/git-fleximod/git_fleximod/gitmodules.py b/.lib/git-fleximod/git_fleximod/gitmodules.py new file mode 100644 index 0000000000..68c82d066f --- /dev/null +++ b/.lib/git-fleximod/git_fleximod/gitmodules.py @@ -0,0 +1,97 @@ +import shutil +from pathlib import Path +from configparser import RawConfigParser, ConfigParser +from .lstripreader import LstripReader + + +class GitModules(RawConfigParser): + def __init__( + self, + logger, + confpath=Path.cwd(), + conffile=".gitmodules", + includelist=None, + excludelist=None, + ): + """ + confpath: Path to the directory containing the .gitmodules file (defaults to the current working directory). + conffile: Name of the configuration file (defaults to .gitmodules). + includelist: Optional list of submodules to include. + excludelist: Optional list of submodules to exclude. + """ + self.logger = logger + self.logger.debug( + "Creating a GitModules object {} {} {} {}".format( + confpath, conffile, includelist, excludelist + ) + ) + super().__init__() + self.conf_file = (Path(confpath) / Path(conffile)) + if self.conf_file.exists(): + self.read_file(LstripReader(str(self.conf_file)), source=conffile) + self.includelist = includelist + self.excludelist = excludelist + self.isdirty = False + + def reload(self): + self.clear() + if self.conf_file.exists(): + self.read_file(LstripReader(str(self.conf_file)), source=self.conf_file) + + + def set(self, name, option, value): + """ + Sets a configuration value for a specific submodule: + Ensures the appropriate section exists for the submodule. + Calls the parent class's set method to store the value. + """ + self.isdirty = True + self.logger.debug("set called {} {} {}".format(name, option, value)) + section = f'submodule "{name}"' + if not self.has_section(section): + self.add_section(section) + super().set(section, option, str(value)) + + # pylint: disable=redefined-builtin, arguments-differ + def get(self, name, option, raw=False, vars=None, fallback=None): + """ + Retrieves a configuration value for a specific submodule: + Uses the parent class's get method to access the value. + Handles potential errors if the section or option doesn't exist. + """ + self.logger.debug("get called {} {}".format(name, option)) + section = f'submodule "{name}"' + try: + return ConfigParser.get( + self, section, option, raw=raw, vars=vars, fallback=fallback + ) + except ConfigParser.NoOptionError: + return None + + def save(self): + if self.isdirty: + self.logger.info("Writing {}".format(self.conf_file)) + with open(self.conf_file, "w") as fd: + self.write(fd) + self.isdirty = False + + def __del__(self): + self.save() + + def sections(self): + """Strip the submodule part out of section and just use the name""" + self.logger.debug("calling GitModules sections iterator") + names = [] + for section in ConfigParser.sections(self): + name = section[11:-1] + if self.includelist and name not in self.includelist: + continue + if self.excludelist and name in self.excludelist: + continue + names.append(name) + return names + + def items(self, name, raw=False, vars=None): + self.logger.debug("calling GitModules items for {}".format(name)) + section = f'submodule "{name}"' + return ConfigParser.items(section, raw=raw, vars=vars) diff --git a/.lib/git-fleximod/git_fleximod/lstripreader.py b/.lib/git-fleximod/git_fleximod/lstripreader.py new file mode 100644 index 0000000000..01d5580ee8 --- /dev/null +++ b/.lib/git-fleximod/git_fleximod/lstripreader.py @@ -0,0 +1,43 @@ +class LstripReader(object): + "LstripReader formats .gitmodules files to be acceptable for configparser" + + def __init__(self, filename): + with open(filename, "r") as infile: + lines = infile.readlines() + self._lines = list() + self._num_lines = len(lines) + self._index = 0 + for line in lines: + self._lines.append(line.lstrip()) + + def readlines(self): + """Return all the lines from this object's file""" + return self._lines + + def readline(self, size=-1): + """Format and return the next line or raise StopIteration""" + try: + line = self.next() + except StopIteration: + line = "" + + if (size > 0) and (len(line) < size): + return line[0:size] + + return line + + def __iter__(self): + """Begin an iteration""" + self._index = 0 + return self + + def next(self): + """Return the next line or raise StopIteration""" + if self._index >= self._num_lines: + raise StopIteration + + self._index = self._index + 1 + return self._lines[self._index - 1] + + def __next__(self): + return self.next() diff --git a/.lib/git-fleximod/git_fleximod/metoflexi.py b/.lib/git-fleximod/git_fleximod/metoflexi.py new file mode 100755 index 0000000000..cc347db2dd --- /dev/null +++ b/.lib/git-fleximod/git_fleximod/metoflexi.py @@ -0,0 +1,236 @@ +#!/usr/bin/env python +from configparser import ConfigParser +import sys +import shutil +from pathlib import Path +import argparse +import logging +from git_fleximod.gitinterface import GitInterface +from git_fleximod.gitmodules import GitModules +from git_fleximod import utils + +logger = None + +def find_root_dir(filename=".git"): + d = Path.cwd() + root = Path(d.root) + while d != root: + attempt = d / filename + if attempt.is_dir(): + return d + d = d.parent + return None + + +def get_parser(): + description = """ + %(prog)s manages checking out groups of gitsubmodules with addtional support for Earth System Models + """ + parser = argparse.ArgumentParser( + description=description, formatter_class=argparse.RawDescriptionHelpFormatter + ) + + parser.add_argument('-e', '--externals', nargs='?', + default='Externals.cfg', + help='The externals description filename. ' + 'Default: %(default)s.') + + parser.add_argument( + "-C", + "--path", + default=find_root_dir(), + help="Toplevel repository directory. Defaults to top git directory relative to current.", + ) + + parser.add_argument( + "-g", + "--gitmodules", + nargs="?", + default=".gitmodules", + help="The submodule description filename. " "Default: %(default)s.", + ) + parser.add_argument( + "-v", + "--verbose", + action="count", + default=0, + help="Output additional information to " + "the screen and log file. This flag can be " + "used up to two times, increasing the " + "verbosity level each time.", + ) + parser.add_argument( + "-d", + "--debug", + action="store_true", + default=False, + help="DEVELOPER: output additional debugging " + "information to the screen and log file.", + ) + + return parser + +def commandline_arguments(args=None): + parser = get_parser() + + options = parser.parse_args(args) + handlers = [logging.StreamHandler()] + + if options.debug: + try: + open("fleximod.log", "w") + except PermissionError: + sys.exit("ABORT: Could not write file fleximod.log") + level = logging.DEBUG + handlers.append(logging.FileHandler("fleximod.log")) + elif options.verbose: + level = logging.INFO + else: + level = logging.WARNING + # Configure the root logger + logging.basicConfig( + level=level, format="%(name)s - %(levelname)s - %(message)s", handlers=handlers + ) + + return( + options.path, + options.gitmodules, + options.externals + ) + +class ExternalRepoTranslator: + """ + Translates external repositories configured in an INI-style externals file. + """ + + def __init__(self, rootpath, gitmodules, externals): + self.rootpath = rootpath + if gitmodules: + self.gitmodules = GitModules(logger, confpath=rootpath) + self.externals = (rootpath / Path(externals)).resolve() + print(f"Translating {self.externals}") + self.git = GitInterface(rootpath, logger) + +# def __del__(self): +# if (self.rootpath / "save.gitignore"): + + + def translate_single_repo(self, section, tag, url, path, efile, hash_, sparse, protocol): + """ + Translates a single repository based on configuration details. + + Args: + rootpath (str): Root path of the main repository. + gitmodules (str): Path to the .gitmodules file. + tag (str): The tag to use for the external repository. + url (str): The URL of the external repository. + path (str): The relative path within the main repository for the external repository. + efile (str): The external file or file containing submodules. + hash_ (str): The commit hash to checkout (if applicable). + sparse (str): Boolean indicating whether to use sparse checkout (if applicable). + protocol (str): The protocol to use (e.g., 'git', 'http'). + """ + assert protocol != "svn", "SVN protocol is not currently supported" + print(f"Translating repository {section}") + if efile: + file_path = Path(path) / Path(efile) + newroot = (self.rootpath / file_path).parent.resolve() + if not newroot.exists(): + newroot.mkdir(parents=True) + logger.info("Newroot is {}".format(newroot)) + newt = ExternalRepoTranslator(newroot, ".gitmodules", efile) + newt.translate_repo() + if protocol == "externals_only": + if tag: + self.gitmodules.set(section, "fxtag", tag) + if hash_: + self.gitmodules.set(section, "fxtag", hash_) + + self.gitmodules.set(section, "fxDONOTUSEurl", url) + if sparse: + self.gitmodules.set(section, "fxsparse", sparse) + self.gitmodules.set(section, "fxrequired", "ToplevelRequired") + else: + newpath = (self.rootpath / Path(path)) + if newpath.exists(): + shutil.rmtree(newpath) + logger.info("Creating directory {}".format(newpath)) + newpath.mkdir(parents=True) + if tag: + logger.info("cloning {}".format(section)) + try: + self.git.git_operation("clone", "-b", tag, "--depth", "1", url, path) + except: + self.git.git_operation("clone", url, path) + with utils.pushd(newpath): + ngit = GitInterface(newpath, logger) + ngit.git_operation("checkout", tag) + if hash_: + self.git.git_operation("clone", url, path) + git = GitInterface(newpath, logger) + git.git_operation("fetch", "origin") + git.git_operation("checkout", hash_) + if sparse: + print("setting as sparse submodule {}".format(section)) + sparsefile = (newpath / Path(sparse)) + newfile = (newpath / ".git" / "info" / "sparse-checkout") + print(f"sparsefile {sparsefile} newfile {newfile}") + shutil.copy(sparsefile, newfile) + + logger.info("adding submodule {}".format(section)) + self.gitmodules.save() + self.git.git_operation("submodule", "add", "-f", "--name", section, url, path) + self.git.git_operation("submodule","absorbgitdirs") + self.gitmodules.reload() + if tag: + self.gitmodules.set(section, "fxtag", tag) + if hash_: + self.gitmodules.set(section, "fxtag", hash_) + + self.gitmodules.set(section, "fxDONOTUSEurl", url) + if sparse: + self.gitmodules.set(section, "fxsparse", sparse) + self.gitmodules.set(section, "fxrequired", "ToplevelRequired") + + + def translate_repo(self): + """ + Translates external repositories defined within an external file. + + Args: + rootpath (str): Root path of the main repository. + gitmodules (str): Path to the .gitmodules file. + external_file (str): The path to the external file containing repository definitions. + """ + econfig = ConfigParser() + econfig.read((self.rootpath / Path(self.externals))) + + for section in econfig.sections(): + if section == "externals_description": + logger.info("skipping section {}".format(section)) + return + logger.info("Translating section {}".format(section)) + tag = econfig.get(section, "tag", raw=False, fallback=None) + url = econfig.get(section, "repo_url", raw=False, fallback=None) + path = econfig.get(section, "local_path", raw=False, fallback=None) + efile = econfig.get(section, "externals", raw=False, fallback=None) + hash_ = econfig.get(section, "hash", raw=False, fallback=None) + sparse = econfig.get(section, "sparse", raw=False, fallback=None) + protocol = econfig.get(section, "protocol", raw=False, fallback=None) + + self.translate_single_repo(section, tag, url, path, efile, hash_, sparse, protocol) + + + +def _main(): + rootpath, gitmodules, externals = commandline_arguments() + global logger + logger = logging.getLogger(__name__) + with utils.pushd(rootpath): + t = ExternalRepoTranslator(Path(rootpath), gitmodules, externals) + logger.info("Translating {}".format(rootpath)) + t.translate_repo() + + +if __name__ == "__main__": + sys.exit(_main()) diff --git a/.lib/git-fleximod/git_fleximod/utils.py b/.lib/git-fleximod/git_fleximod/utils.py new file mode 100644 index 0000000000..7cc1de38cc --- /dev/null +++ b/.lib/git-fleximod/git_fleximod/utils.py @@ -0,0 +1,365 @@ +#!/usr/bin/env python3 +""" +Common public utilities for manic package + +""" + +import logging +import os +import subprocess +import sys +from threading import Timer +from pathlib import Path + +LOCAL_PATH_INDICATOR = "." +# --------------------------------------------------------------------- +# +# functions to massage text for output and other useful utilities +# +# --------------------------------------------------------------------- +from contextlib import contextmanager + + +@contextmanager +def pushd(new_dir): + """context for chdir. usage: with pushd(new_dir)""" + previous_dir = os.getcwd() + os.chdir(new_dir) + try: + yield + finally: + os.chdir(previous_dir) + + +def log_process_output(output): + """Log each line of process output at debug level so it can be + filtered if necessary. By default, output is a single string, and + logging.debug(output) will only put log info heading on the first + line. This makes it hard to filter with grep. + + """ + output = output.split("\n") + for line in output: + logging.debug(line) + + +def printlog(msg, **kwargs): + """Wrapper script around print to ensure that everything printed to + the screen also gets logged. + + """ + logging.info(msg) + if kwargs: + print(msg, **kwargs) + else: + print(msg) + sys.stdout.flush() + + +def find_upwards(root_dir, filename): + """Find a file in root dir or any of it's parents""" + d = Path(root_dir) + root = Path(d.root) + while d != root: + attempt = d / filename + if attempt.exists(): + return attempt + d = d.parent + return None + + +def last_n_lines(the_string, n_lines, truncation_message=None): + """Returns the last n lines of the given string + + Args: + the_string: str + n_lines: int + truncation_message: str, optional + + Returns a string containing the last n lines of the_string + + If truncation_message is provided, the returned string begins with + the given message if and only if the string is greater than n lines + to begin with. + """ + + lines = the_string.splitlines(True) + if len(lines) <= n_lines: + return_val = the_string + else: + lines_subset = lines[-n_lines:] + str_truncated = "".join(lines_subset) + if truncation_message: + str_truncated = truncation_message + "\n" + str_truncated + return_val = str_truncated + + return return_val + + +def indent_string(the_string, indent_level): + """Indents the given string by a given number of spaces + + Args: + the_string: str + indent_level: int + + Returns a new string that is the same as the_string, except that + each line is indented by 'indent_level' spaces. + + In python3, this can be done with textwrap.indent. + """ + + lines = the_string.splitlines(True) + padding = " " * indent_level + lines_indented = [padding + line for line in lines] + return "".join(lines_indented) + + +# --------------------------------------------------------------------- +# +# error handling +# +# --------------------------------------------------------------------- + + +def fatal_error(message): + """ + Error output function + """ + logging.error(message) + raise RuntimeError("{0}ERROR: {1}".format(os.linesep, message)) + + +# --------------------------------------------------------------------- +# +# Data conversion / manipulation +# +# --------------------------------------------------------------------- +def str_to_bool(bool_str): + """Convert a sting representation of as boolean into a true boolean. + + Conversion should be case insensitive. + """ + value = None + str_lower = bool_str.lower() + if str_lower in ("true", "t"): + value = True + elif str_lower in ("false", "f"): + value = False + if value is None: + msg = ( + 'ERROR: invalid boolean string value "{0}". ' + 'Must be "true" or "false"'.format(bool_str) + ) + fatal_error(msg) + return value + + +REMOTE_PREFIXES = ["http://", "https://", "ssh://", "git@"] + + +def is_remote_url(url): + """check if the user provided a local file path instead of a + remote. If so, it must be expanded to an absolute + path. + + """ + remote_url = False + for prefix in REMOTE_PREFIXES: + if url.startswith(prefix): + remote_url = True + return remote_url + + +def split_remote_url(url): + """check if the user provided a local file path or a + remote. If remote, try to strip off protocol info. + + """ + remote_url = is_remote_url(url) + if not remote_url: + return url + + for prefix in REMOTE_PREFIXES: + url = url.replace(prefix, "") + + if "@" in url: + url = url.split("@")[1] + + if ":" in url: + url = url.split(":")[1] + + return url + + +def expand_local_url(url, field): + """check if the user provided a local file path instead of a + remote. If so, it must be expanded to an absolute + path. + + Note: local paths of LOCAL_PATH_INDICATOR have special meaning and + represent local copy only, don't work with the remotes. + + """ + remote_url = is_remote_url(url) + if not remote_url: + if url.strip() == LOCAL_PATH_INDICATOR: + pass + else: + url = os.path.expandvars(url) + url = os.path.expanduser(url) + if not os.path.isabs(url): + msg = ( + 'WARNING: Externals description for "{0}" contains a ' + "url that is not remote and does not expand to an " + "absolute path. Version control operations may " + "fail.\n\nurl={1}".format(field, url) + ) + printlog(msg) + else: + url = os.path.normpath(url) + return url + + +# --------------------------------------------------------------------- +# +# subprocess +# +# --------------------------------------------------------------------- + +# Give the user a helpful message if we detect that a command seems to +# be hanging. +_HANGING_SEC = 300 + + +def _hanging_msg(working_directory, command): + print( + """ + +Command '{command}' +from directory {working_directory} +has taken {hanging_sec} seconds. It may be hanging. + +The command will continue to run, but you may want to abort +manage_externals with ^C and investigate. A possible cause of hangs is +when svn or git require authentication to access a private +repository. On some systems, svn and git requests for authentication +information will not be displayed to the user. In this case, the program +will appear to hang. Ensure you can run svn and git manually and access +all repositories without entering your authentication information. + +""".format( + command=command, + working_directory=working_directory, + hanging_sec=_HANGING_SEC, + ) + ) + + +def execute_subprocess(commands, status_to_caller=False, output_to_caller=False): + """Wrapper around subprocess.check_output to handle common + exceptions. + + check_output runs a command with arguments and waits + for it to complete. + + check_output raises an exception on a nonzero return code. if + status_to_caller is true, execute_subprocess returns the subprocess + return code, otherwise execute_subprocess treats non-zero return + status as an error and raises an exception. + + """ + cwd = os.getcwd() + msg = "In directory: {0}\nexecute_subprocess running command:".format(cwd) + logging.info(msg) + commands_str = " ".join(str(element) for element in commands) + logging.info(commands_str) + return_to_caller = status_to_caller or output_to_caller + status = -1 + output = "" + hanging_timer = Timer( + _HANGING_SEC, + _hanging_msg, + kwargs={"working_directory": cwd, "command": commands_str}, + ) + hanging_timer.start() + try: + output = subprocess.check_output( + commands, stderr=subprocess.STDOUT, universal_newlines=True + ) + log_process_output(output) + status = 0 + except OSError as error: + msg = failed_command_msg( + "Command execution failed. Does the executable exist?", commands + ) + logging.error(error) + fatal_error(msg) + except ValueError as error: + msg = failed_command_msg( + "DEV_ERROR: Invalid arguments trying to run subprocess", commands + ) + logging.error(error) + fatal_error(msg) + except subprocess.CalledProcessError as error: + # Only report the error if we are NOT returning to the + # caller. If we are returning to the caller, then it may be a + # simple status check. If returning, it is the callers + # responsibility determine if an error occurred and handle it + # appropriately. + if not return_to_caller: + msg_context = ( + "Process did not run successfully; " + "returned status {0}".format(error.returncode) + ) + msg = failed_command_msg(msg_context, commands, output=error.output) + logging.error(error) + logging.error(msg) + log_process_output(error.output) + fatal_error(msg) + status = error.returncode + finally: + hanging_timer.cancel() + + if status_to_caller and output_to_caller: + ret_value = (status, output) + elif status_to_caller: + ret_value = status + elif output_to_caller: + ret_value = output + else: + ret_value = None + + return ret_value + + +def failed_command_msg(msg_context, command, output=None): + """Template for consistent error messages from subprocess calls. + + If 'output' is given, it should provide the output from the failed + command + """ + + if output: + output_truncated = last_n_lines( + output, 20, truncation_message="[... Output truncated for brevity ...]" + ) + errmsg = ( + "Failed with output:\n" + indent_string(output_truncated, 4) + "\nERROR: " + ) + else: + errmsg = "" + + command_str = " ".join(command) + errmsg += """In directory + {cwd} +{context}: + {command} +""".format( + cwd=os.getcwd(), context=msg_context, command=command_str + ) + + if output: + errmsg += "See above for output from failed command.\n" + + return errmsg diff --git a/.lib/git-fleximod/poetry.lock b/.lib/git-fleximod/poetry.lock new file mode 100644 index 0000000000..b59ed3942c --- /dev/null +++ b/.lib/git-fleximod/poetry.lock @@ -0,0 +1,693 @@ +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. + +[[package]] +name = "alabaster" +version = "0.7.13" +description = "A configurable sidebar-enabled Sphinx theme" +optional = false +python-versions = ">=3.6" +files = [ + {file = "alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3"}, + {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, +] + +[[package]] +name = "babel" +version = "2.14.0" +description = "Internationalization utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "Babel-2.14.0-py3-none-any.whl", hash = "sha256:efb1a25b7118e67ce3a259bed20545c29cb68be8ad2c784c83689981b7a57287"}, + {file = "Babel-2.14.0.tar.gz", hash = "sha256:6919867db036398ba21eb5c7a0f6b28ab8cbc3ae7a73a44ebe34ae74a4e7d363"}, +] + +[package.dependencies] +pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} + +[package.extras] +dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "docutils" +version = "0.19" +description = "Docutils -- Python Documentation Utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "docutils-0.19-py3-none-any.whl", hash = "sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc"}, + {file = "docutils-0.19.tar.gz", hash = "sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "fsspec" +version = "2023.12.2" +description = "File-system specification" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fsspec-2023.12.2-py3-none-any.whl", hash = "sha256:d800d87f72189a745fa3d6b033b9dc4a34ad069f60ca60b943a63599f5501960"}, + {file = "fsspec-2023.12.2.tar.gz", hash = "sha256:8548d39e8810b59c38014934f6b31e57f40c1b20f911f4cc2b85389c7e9bf0cb"}, +] + +[package.extras] +abfs = ["adlfs"] +adl = ["adlfs"] +arrow = ["pyarrow (>=1)"] +dask = ["dask", "distributed"] +devel = ["pytest", "pytest-cov"] +dropbox = ["dropbox", "dropboxdrivefs", "requests"] +full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"] +fuse = ["fusepy"] +gcs = ["gcsfs"] +git = ["pygit2"] +github = ["requests"] +gs = ["gcsfs"] +gui = ["panel"] +hdfs = ["pyarrow (>=1)"] +http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)", "requests"] +libarchive = ["libarchive-c"] +oci = ["ocifs"] +s3 = ["s3fs"] +sftp = ["paramiko"] +smb = ["smbprotocol"] +ssh = ["paramiko"] +tqdm = ["tqdm"] + +[[package]] +name = "gitdb" +version = "4.0.11" +description = "Git Object Database" +optional = false +python-versions = ">=3.7" +files = [ + {file = "gitdb-4.0.11-py3-none-any.whl", hash = "sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4"}, + {file = "gitdb-4.0.11.tar.gz", hash = "sha256:bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b"}, +] + +[package.dependencies] +smmap = ">=3.0.1,<6" + +[[package]] +name = "gitpython" +version = "3.1.41" +description = "GitPython is a Python library used to interact with Git repositories" +optional = false +python-versions = ">=3.7" +files = [ + {file = "GitPython-3.1.41-py3-none-any.whl", hash = "sha256:c36b6634d069b3f719610175020a9aed919421c87552185b085e04fbbdb10b7c"}, + {file = "GitPython-3.1.41.tar.gz", hash = "sha256:ed66e624884f76df22c8e16066d567aaa5a37d5b5fa19db2c6df6f7156db9048"}, +] + +[package.dependencies] +gitdb = ">=4.0.1,<5" + +[package.extras] +test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "sumtypes"] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "imagesize" +version = "1.4.1" +description = "Getting image size from png/jpeg/jpeg2000/gif file" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, + {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, +] + +[[package]] +name = "importlib-metadata" +version = "7.0.1" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-7.0.1-py3-none-any.whl", hash = "sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e"}, + {file = "importlib_metadata-7.0.1.tar.gz", hash = "sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pyfakefs" +version = "5.3.5" +description = "pyfakefs implements a fake file system that mocks the Python file system modules." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyfakefs-5.3.5-py3-none-any.whl", hash = "sha256:751015c1de94e1390128c82b48cdedc3f088bbdbe4bc713c79d02a27f0f61e69"}, + {file = "pyfakefs-5.3.5.tar.gz", hash = "sha256:7cdc500b35a214cb7a614e1940543acc6650e69a94ac76e30f33c9373bd9cf90"}, +] + +[[package]] +name = "pygments" +version = "2.17.2" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, + {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, +] + +[package.extras] +plugins = ["importlib-metadata"] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pytest" +version = "8.0.0" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-8.0.0-py3-none-any.whl", hash = "sha256:50fb9cbe836c3f20f0dfa99c565201fb75dc54c8d76373cd1bde06b06657bdb6"}, + {file = "pytest-8.0.0.tar.gz", hash = "sha256:249b1b0864530ba251b7438274c4d251c58d868edaaec8762893ad4a0d71c36c"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.3.0,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytz" +version = "2024.1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "smmap" +version = "5.0.1" +description = "A pure Python implementation of a sliding window memory map manager" +optional = false +python-versions = ">=3.7" +files = [ + {file = "smmap-5.0.1-py3-none-any.whl", hash = "sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da"}, + {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"}, +] + +[[package]] +name = "snowballstemmer" +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +optional = false +python-versions = "*" +files = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] + +[[package]] +name = "sphinx" +version = "5.3.0" +description = "Python documentation generator" +optional = false +python-versions = ">=3.6" +files = [ + {file = "Sphinx-5.3.0.tar.gz", hash = "sha256:51026de0a9ff9fc13c05d74913ad66047e104f56a129ff73e174eb5c3ee794b5"}, + {file = "sphinx-5.3.0-py3-none-any.whl", hash = "sha256:060ca5c9f7ba57a08a1219e547b269fadf125ae25b06b9fa7f66768efb652d6d"}, +] + +[package.dependencies] +alabaster = ">=0.7,<0.8" +babel = ">=2.9" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +docutils = ">=0.14,<0.20" +imagesize = ">=1.3" +importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""} +Jinja2 = ">=3.0" +packaging = ">=21.0" +Pygments = ">=2.12" +requests = ">=2.5.0" +snowballstemmer = ">=2.0" +sphinxcontrib-applehelp = "*" +sphinxcontrib-devhelp = "*" +sphinxcontrib-htmlhelp = ">=2.0.0" +sphinxcontrib-jsmath = "*" +sphinxcontrib-qthelp = "*" +sphinxcontrib-serializinghtml = ">=1.1.5" + +[package.extras] +docs = ["sphinxcontrib-websupport"] +lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-bugbear", "flake8-comprehensions", "flake8-simplify", "isort", "mypy (>=0.981)", "sphinx-lint", "types-requests", "types-typed-ast"] +test = ["cython", "html5lib", "pytest (>=4.6)", "typed_ast"] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "1.0.4" +description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" +optional = false +python-versions = ">=3.8" +files = [ + {file = "sphinxcontrib-applehelp-1.0.4.tar.gz", hash = "sha256:828f867945bbe39817c210a1abfd1bc4895c8b73fcaade56d45357a348a07d7e"}, + {file = "sphinxcontrib_applehelp-1.0.4-py3-none-any.whl", hash = "sha256:29d341f67fb0f6f586b23ad80e072c8e6ad0b48417db2bde114a4c9746feb228"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "1.0.2" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, + {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.0.1" +description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "sphinxcontrib-htmlhelp-2.0.1.tar.gz", hash = "sha256:0cbdd302815330058422b98a113195c9249825d681e18f11e8b1f78a2f11efff"}, + {file = "sphinxcontrib_htmlhelp-2.0.1-py3-none-any.whl", hash = "sha256:c38cb46dccf316c79de6e5515e1770414b797162b23cd3d06e67020e1d2a6903"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["html5lib", "pytest"] + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +description = "A sphinx extension which renders display math in HTML via JavaScript" +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, + {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, +] + +[package.extras] +test = ["flake8", "mypy", "pytest"] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "1.0.3" +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, + {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "1.1.5" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, + {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "urllib3" +version = "2.2.0" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, + {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wheel" +version = "0.42.0" +description = "A built-package format for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "wheel-0.42.0-py3-none-any.whl", hash = "sha256:177f9c9b0d45c47873b619f5b650346d632cdc35fb5e4d25058e09c9e581433d"}, + {file = "wheel-0.42.0.tar.gz", hash = "sha256:c45be39f7882c9d34243236f2d63cbd58039e360f85d0913425fbd7ceea617a8"}, +] + +[package.extras] +test = ["pytest (>=6.0.0)", "setuptools (>=65)"] + +[[package]] +name = "zipp" +version = "3.17.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, + {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.8" +content-hash = "25ee2ae1d74abedde3a6637a60d4a3095ea5cf9731960875741bbc2ba84a475d" diff --git a/.lib/git-fleximod/pyproject.toml b/.lib/git-fleximod/pyproject.toml new file mode 100644 index 0000000000..2484552e4f --- /dev/null +++ b/.lib/git-fleximod/pyproject.toml @@ -0,0 +1,41 @@ +[tool.poetry] +name = "git-fleximod" +version = "0.7.4" +description = "Extended support for git-submodule and git-sparse-checkout" +authors = ["Jim Edwards "] +maintainers = ["Jim Edwards "] +license = "MIT" +readme = "README.md" +homepage = "https://github.com/jedwards4b/git-fleximod" +keywords = ["git", "submodule", "sparse-checkout"] +packages = [ +{ include = "git_fleximod"}, +{ include = "doc"}, +] + +[tool.poetry.scripts] +git-fleximod = "git_fleximod.git_fleximod:main" +me2flexi = "git_fleximod.metoflexi:_main" +fsspec = "fsspec.fuse:main" + +[tool.poetry.dependencies] +python = "^3.8" +GitPython = "^3.1.0" +sphinx = "^5.0.0" +fsspec = "^2023.12.2" +wheel = "^0.42.0" +pytest = "^8.0.0" +pyfakefs = "^5.3.5" + +[tool.poetry.urls] +"Bug Tracker" = "https://github.com/jedwards4b/git-fleximod/issues" + +[tool.pytest.ini_options] +markers = [ + "skip_after_first: only run on first iteration" +] + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" + diff --git a/.lib/git-fleximod/tbump.toml b/.lib/git-fleximod/tbump.toml new file mode 100644 index 0000000000..d4b8eaee11 --- /dev/null +++ b/.lib/git-fleximod/tbump.toml @@ -0,0 +1,43 @@ +# Uncomment this if your project is hosted on GitHub: +github_url = "https://github.com/jedwards4b/git-fleximod/" + +[version] +current = "0.7.4" + +# Example of a semver regexp. +# Make sure this matches current_version before +# using tbump +regex = ''' + (?P\d+) + \. + (?P\d+) + \. + (?P\d+) + ''' + +[git] +message_template = "Bump to {new_version}" +tag_template = "v{new_version}" + +# For each file to patch, add a [[file]] config +# section containing the path of the file, relative to the +# tbump.toml location. +[[file]] +src = "git_fleximod/cli.py" + +[[file]] +src = "pyproject.toml" + +# You can specify a list of commands to +# run after the files have been patched +# and before the git commit is made + +# [[before_commit]] +# name = "check changelog" +# cmd = "grep -q {new_version} Changelog.rst" + +# Or run some commands after the git tag and the branch +# have been pushed: +# [[after_push]] +# name = "publish" +# cmd = "./publish.sh" diff --git a/.lib/git-fleximod/tests/__init__.py b/.lib/git-fleximod/tests/__init__.py new file mode 100644 index 0000000000..4d4c66c78e --- /dev/null +++ b/.lib/git-fleximod/tests/__init__.py @@ -0,0 +1,3 @@ +import sys, os + +sys.path.append(os.path.join(os.path.dirname(__file__), os.path.pardir, "src")) diff --git a/.lib/git-fleximod/tests/conftest.py b/.lib/git-fleximod/tests/conftest.py new file mode 100644 index 0000000000..942a0efb97 --- /dev/null +++ b/.lib/git-fleximod/tests/conftest.py @@ -0,0 +1,138 @@ +import pytest +from git_fleximod.gitinterface import GitInterface +import os +import subprocess +import logging +from pathlib import Path + +@pytest.fixture(scope='session') +def logger(): + logging.basicConfig( + level=logging.INFO, format="%(name)s - %(levelname)s - %(message)s", handlers=[logging.StreamHandler()] + ) + logger = logging.getLogger(__name__) + return logger + +all_repos=[ + {"subrepo_path": "modules/test", + "submodule_name": "test_submodule", + "status1" : "test_submodule MPIserial_2.5.0-3-gd82ce7c is out of sync with .gitmodules MPIserial_2.4.0", + "status2" : "test_submodule at tag MPIserial_2.4.0", + "status3" : "test_submodule at tag MPIserial_2.4.0", + "status4" : "test_submodule at tag MPIserial_2.4.0", + "gitmodules_content" : """ + [submodule "test_submodule"] + path = modules/test + url = https://github.com/ESMCI/mpi-serial.git + fxtag = MPIserial_2.4.0 + fxDONOTUSEurl = https://github.com/ESMCI/mpi-serial.git + fxrequired = ToplevelRequired +"""}, + {"subrepo_path": "modules/test_optional", + "submodule_name": "test_optional", + "status1" : "test_optional MPIserial_2.5.0-3-gd82ce7c is out of sync with .gitmodules MPIserial_2.4.0", + "status2" : "test_optional at tag MPIserial_2.4.0", + "status3" : "test_optional not checked out, aligned at tag MPIserial_2.4.0", + "status4" : "test_optional at tag MPIserial_2.4.0", + "gitmodules_content": """ + [submodule "test_optional"] + path = modules/test_optional + url = https://github.com/ESMCI/mpi-serial.git + fxtag = MPIserial_2.4.0 + fxDONOTUSEurl = https://github.com/ESMCI/mpi-serial.git + fxrequired = ToplevelOptional +"""}, + {"subrepo_path": "modules/test_alwaysoptional", + "submodule_name": "test_alwaysoptional", + "status1" : "test_alwaysoptional MPIserial_2.3.0 is out of sync with .gitmodules e5cf35c", + "status2" : "test_alwaysoptional at hash e5cf35c", + "status3" : "test_alwaysoptional not checked out, out of sync at tag MPIserial_2.3.0", + "status4" : "test_alwaysoptional at hash e5cf35c", + "gitmodules_content": """ + [submodule "test_alwaysoptional"] + path = modules/test_alwaysoptional + url = https://github.com/ESMCI/mpi-serial.git + fxtag = e5cf35c + fxDONOTUSEurl = https://github.com/ESMCI/mpi-serial.git + fxrequired = AlwaysOptional +"""}, + {"subrepo_path": "modules/test_sparse", + "submodule_name": "test_sparse", + "status1" : "test_sparse at tag MPIserial_2.5.0", + "status2" : "test_sparse at tag MPIserial_2.5.0", + "status3" : "test_sparse at tag MPIserial_2.5.0", + "status4" : "test_sparse at tag MPIserial_2.5.0", + "gitmodules_content": """ + [submodule "test_sparse"] + path = modules/test_sparse + url = https://github.com/ESMCI/mpi-serial.git + fxtag = MPIserial_2.5.0 + fxDONOTUSEurl = https://github.com/ESMCI/mpi-serial.git + fxrequired = AlwaysRequired + fxsparse = ../.sparse_file_list +"""}, +] +@pytest.fixture(params=all_repos) + +def shared_repos(request): + return request.param + +@pytest.fixture +def get_all_repos(): + return all_repos + +def write_sparse_checkout_file(fp): + sparse_content = """m4 +""" + fp.write_text(sparse_content) + +@pytest.fixture +def test_repo(shared_repos, tmp_path, logger): + subrepo_path = shared_repos["subrepo_path"] + submodule_name = shared_repos["submodule_name"] + test_dir = tmp_path / "testrepo" + test_dir.mkdir() + str_path = str(test_dir) + gitp = GitInterface(str_path, logger) + assert test_dir.joinpath(".git").is_dir() + (test_dir / "modules").mkdir() + if "sparse" in submodule_name: + (test_dir / subrepo_path).mkdir() + # Add the sparse checkout file + write_sparse_checkout_file(test_dir / "modules" / ".sparse_file_list") + gitp.git_operation("add","modules/.sparse_file_list") + else: + gitp = GitInterface(str(test_dir), logger) + gitp.git_operation("submodule", "add", "--depth","1","--name", submodule_name, "https://github.com/ESMCI/mpi-serial.git", subrepo_path) + assert test_dir.joinpath(".gitmodules").is_file() + gitp.git_operation("add",subrepo_path) + gitp.git_operation("commit","-a","-m","\"add submod\"") + test_dir2 = tmp_path / "testrepo2" + gitp.git_operation("clone",test_dir,test_dir2) + return test_dir2 + + +@pytest.fixture +def complex_repo(tmp_path, logger): + test_dir = tmp_path / "testcomplex" + test_dir.mkdir() + str_path = str(test_dir) + gitp = GitInterface(str_path, logger) + gitp.git_operation("remote", "add", "origin", "https://github.com/jedwards4b/fleximod-test2") + gitp.git_operation("fetch", "origin", "main") + gitp.git_operation("checkout", "main") + return test_dir + +@pytest.fixture +def git_fleximod(): + def _run_fleximod(path, args, input=None): + cmd = ["git", "fleximod"] + args.split() + result = subprocess.run(cmd, cwd=path, input=input, + stdout=subprocess.PIPE, stderr=subprocess.PIPE, + text=True) + if result.returncode: + print(result.stdout) + print(result.stderr) + return result + return _run_fleximod + diff --git a/.lib/git-fleximod/tests/test_a_import.py b/.lib/git-fleximod/tests/test_a_import.py new file mode 100644 index 0000000000..d5ca878de5 --- /dev/null +++ b/.lib/git-fleximod/tests/test_a_import.py @@ -0,0 +1,8 @@ +# pylint: disable=unused-import +from git_fleximod import cli +from git_fleximod import utils +from git_fleximod.gitinterface import GitInterface +from git_fleximod.gitmodules import GitModules + +def test_import(): + print("here") diff --git a/.lib/git-fleximod/tests/test_b_update.py b/.lib/git-fleximod/tests/test_b_update.py new file mode 100644 index 0000000000..159f1cfae0 --- /dev/null +++ b/.lib/git-fleximod/tests/test_b_update.py @@ -0,0 +1,26 @@ +import pytest +from pathlib import Path + +def test_basic_checkout(git_fleximod, test_repo, shared_repos): + # Prepare a simple .gitmodules + gm = shared_repos['gitmodules_content'] + file_path = (test_repo / ".gitmodules") + repo_name = shared_repos["submodule_name"] + repo_path = shared_repos["subrepo_path"] + + file_path.write_text(gm) + + # Run the command + result = git_fleximod(test_repo, f"update {repo_name}") + + # Assertions + assert result.returncode == 0 + assert Path(test_repo / repo_path).exists() # Did the submodule directory get created? + if "sparse" in repo_name: + assert Path(test_repo / f"{repo_path}/m4").exists() # Did the submodule sparse directory get created? + assert not Path(test_repo / f"{repo_path}/README").exists() # Did only the submodule sparse directory get created? + + status = git_fleximod(test_repo, f"status {repo_name}") + + assert shared_repos["status2"] in status.stdout + diff --git a/.lib/git-fleximod/tests/test_c_required.py b/.lib/git-fleximod/tests/test_c_required.py new file mode 100644 index 0000000000..89ab8d294d --- /dev/null +++ b/.lib/git-fleximod/tests/test_c_required.py @@ -0,0 +1,30 @@ +import pytest +from pathlib import Path + +def test_required(git_fleximod, test_repo, shared_repos): + file_path = (test_repo / ".gitmodules") + gm = shared_repos["gitmodules_content"] + repo_name = shared_repos["submodule_name"] + if file_path.exists(): + with file_path.open("r") as f: + gitmodules_content = f.read() + # add the entry if it does not exist + if repo_name not in gitmodules_content: + file_path.write_text(gitmodules_content+gm) + # or if it is incomplete + elif gm not in gitmodules_content: + file_path.write_text(gm) + else: + file_path.write_text(gm) + result = git_fleximod(test_repo, "update") + assert result.returncode == 0 + status = git_fleximod(test_repo, f"status {repo_name}") + assert shared_repos["status3"] in status.stdout + status = git_fleximod(test_repo, f"update --optional") + assert result.returncode == 0 + status = git_fleximod(test_repo, f"status {repo_name}") + assert shared_repos["status4"] in status.stdout + status = git_fleximod(test_repo, f"update {repo_name}") + assert result.returncode == 0 + status = git_fleximod(test_repo, f"status {repo_name}") + assert shared_repos["status4"] in status.stdout diff --git a/.lib/git-fleximod/tests/test_d_complex.py b/.lib/git-fleximod/tests/test_d_complex.py new file mode 100644 index 0000000000..fdce516274 --- /dev/null +++ b/.lib/git-fleximod/tests/test_d_complex.py @@ -0,0 +1,67 @@ +import pytest +from pathlib import Path +from git_fleximod.gitinterface import GitInterface + +def test_complex_checkout(git_fleximod, complex_repo, logger): + status = git_fleximod(complex_repo, "status") + assert("ToplevelOptional not checked out, aligned at tag v5.3.2" in status.stdout) + assert("ToplevelRequired not checked out, aligned at tag MPIserial_2.5.0" in status.stdout) + assert("AlwaysRequired not checked out, aligned at tag MPIserial_2.4.0" in status.stdout) + assert("Complex not checked out, aligned at tag testtag01" in status.stdout) + assert("AlwaysOptional not checked out, aligned at tag MPIserial_2.3.0" in status.stdout) + + # This should checkout and update test_submodule and complex_sub + result = git_fleximod(complex_repo, "update") + assert result.returncode == 0 + + status = git_fleximod(complex_repo, "status") + assert("ToplevelOptional not checked out, aligned at tag v5.3.2" in status.stdout) + assert("ToplevelRequired at tag MPIserial_2.5.0" in status.stdout) + assert("AlwaysRequired at tag MPIserial_2.4.0" in status.stdout) + assert("Complex at tag testtag01" in status.stdout) + + # now check the complex_sub + root = (complex_repo / "modules" / "complex") + assert(not (root / "libraries" / "gptl" / ".git").exists()) + assert(not (root / "libraries" / "mpi-serial" / ".git").exists()) + assert((root / "modules" / "mpi-serial" / ".git").exists()) + assert(not (root / "modules" / "mpi-serial2" / ".git").exists()) + assert((root / "modules" / "mpi-sparse" / ".git").exists()) + assert((root / "modules" / "mpi-sparse" / "m4").exists()) + assert(not (root / "modules" / "mpi-sparse" / "README").exists()) + + # update a single optional submodule + + result = git_fleximod(complex_repo, "update ToplevelOptional") + assert result.returncode == 0 + + status = git_fleximod(complex_repo, "status") + assert("ToplevelOptional at tag v5.3.2" in status.stdout) + assert("ToplevelRequired at tag MPIserial_2.5.0" in status.stdout) + assert("AlwaysRequired at tag MPIserial_2.4.0" in status.stdout) + assert("Complex at tag testtag01" in status.stdout) + assert("AlwaysOptional not checked out, aligned at tag MPIserial_2.3.0" in status.stdout) + + + # Finally update optional + result = git_fleximod(complex_repo, "update --optional") + assert result.returncode == 0 + + status = git_fleximod(complex_repo, "status") + assert("ToplevelOptional at tag v5.3.2" in status.stdout) + assert("ToplevelRequired at tag MPIserial_2.5.0" in status.stdout) + assert("AlwaysRequired at tag MPIserial_2.4.0" in status.stdout) + assert("Complex at tag testtag01" in status.stdout) + assert("AlwaysOptional at tag MPIserial_2.3.0" in status.stdout) + + # now check the complex_sub + root = (complex_repo / "modules" / "complex" ) + assert(not (root / "libraries" / "gptl" / ".git").exists()) + assert(not (root / "libraries" / "mpi-serial" / ".git").exists()) + assert((root / "modules" / "mpi-serial" / ".git").exists()) + assert((root / "modules" / "mpi-serial2" / ".git").exists()) + assert((root / "modules" / "mpi-sparse" / ".git").exists()) + assert((root / "modules" / "mpi-sparse" / "m4").exists()) + assert(not (root / "modules" / "mpi-sparse" / "README").exists()) + + diff --git a/bin/git-fleximod b/bin/git-fleximod new file mode 100755 index 0000000000..f69ede1c22 --- /dev/null +++ b/bin/git-fleximod @@ -0,0 +1,8 @@ +#!/usr/bin/env python3 +import sys +import os +sys.path.insert(0,os.path.abspath(os.path.join(os.path.dirname(__file__),"..",".lib","git-fleximod"))) +from git_fleximod.git_fleximod import main + +if __name__ == '__main__': + sys.exit(main()) diff --git a/ccs_config b/ccs_config index f90e10502c..2ff978f92a 160000 --- a/ccs_config +++ b/ccs_config @@ -1 +1 @@ -Subproject commit f90e10502c7246c2b45e373b7dd5e0da6cba0828 +Subproject commit 2ff978f92a5ac9a6ab243e5c14d06a7e2d2f5799 diff --git a/components/cdeps b/components/cdeps index 7a522c828c..7476950699 160000 --- a/components/cdeps +++ b/components/cdeps @@ -1 +1 @@ -Subproject commit 7a522c828c32dc35777992653f281ec525509c4a +Subproject commit 7476950699909813d1938a34bd8d71bf5bfbf1e9 diff --git a/components/mizuRoute b/components/mizuRoute index 0a62ee6185..81c720c7ee 160000 --- a/components/mizuRoute +++ b/components/mizuRoute @@ -1 +1 @@ -Subproject commit 0a62ee6185bdc9b99cd88ed5f15ea850602c43a2 +Subproject commit 81c720c7ee51f9c69f2934f696078c42f4493565 diff --git a/components/mosart b/components/mosart index 1c22a8c547..8c682b1b7f 160000 --- a/components/mosart +++ b/components/mosart @@ -1 +1 @@ -Subproject commit 1c22a8c5478b714cf5b7c54b3e5cf7bad09fd9b2 +Subproject commit 8c682b1b7f15d146816de302e0d359da3e957056 diff --git a/libraries/mct b/libraries/mct index e36024c5dd..82b0071e69 160000 --- a/libraries/mct +++ b/libraries/mct @@ -1 +1 @@ -Subproject commit e36024c5ddf482625ae6bd9474eff7d8f393f87c +Subproject commit 82b0071e69d14330b75d23b0bc68543ebea9aadc diff --git a/libraries/parallelio b/libraries/parallelio index 6539ef05ae..f52ade0756 160000 --- a/libraries/parallelio +++ b/libraries/parallelio @@ -1 +1 @@ -Subproject commit 6539ef05ae7584ec570a56fdab9f7dfb336c2b80 +Subproject commit f52ade075619b32fa141993b5665b0fe099befc2 diff --git a/src/fates b/src/fates index f0185f7c70..adfa664806 160000 --- a/src/fates +++ b/src/fates @@ -1 +1 @@ -Subproject commit f0185f7c7033fa69c80d1ddb07cbcbf1f8be1adc +Subproject commit adfa6648063408d443f5cae671bd37f713d3e3e8 From 798d43c0fe673557b98dd7fd1eb614b205641db0 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Tue, 28 May 2024 08:39:13 -0600 Subject: [PATCH 105/126] Add comment in lilacsmoke.py as per code review --- cime_config/SystemTests/lilacsmoke.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/cime_config/SystemTests/lilacsmoke.py b/cime_config/SystemTests/lilacsmoke.py index 366d5d1176..1287301ba2 100644 --- a/cime_config/SystemTests/lilacsmoke.py +++ b/cime_config/SystemTests/lilacsmoke.py @@ -407,6 +407,10 @@ def run_phase(self): # case.get_mpirun_cmd creates a command that runs the executable given by # case.run_exe. So it's important that (elsewhere in this test script) we create a # link pointing from that to the atm_driver.exe executable. + # + # 2024/5/28 slevis: We added the load_env here to replace the + # behavior of the PBS -V directive that was removed from + # /ccs_config/machines/config_batch.xml self._case.load_env(reset=True) lid = new_lid() os.environ["OMP_NUM_THREADS"] = str(self._case.thread_count) From aaeae950ca98d3292e3a02eb95862e69e7783c40 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Tue, 28 May 2024 08:54:58 -0600 Subject: [PATCH 106/126] Updated ChangeLog and ChangeSum --- doc/ChangeLog | 26 +++----------------------- doc/ChangeSum | 2 +- 2 files changed, 4 insertions(+), 24 deletions(-) diff --git a/doc/ChangeLog b/doc/ChangeLog index d68ef323ac..e15c554f8f 100644 --- a/doc/ChangeLog +++ b/doc/ChangeLog @@ -1,7 +1,7 @@ =============================================================== Tag name: ctsm5.2.006 Originator(s): slevis (Samuel Levis) -Date: Tue 21 May 2024 02:59:05 PM MDT +Date: Tue 28 May 2024 08:45:50 AM MDT One-line Summary: Update externals to cesm2_3_beta17, remove mct, retire /test/tools Purpose and description of changes @@ -48,12 +48,6 @@ Changes to documentation: Substantial timing or memory changes: Not considered -Notes of particular relevance for developers: ---------------------------------------------- -Changes to tests or testing: - I added a long list of izumi nag debug tests to ExpectedFailures as per - https://github.com/ESCOMP/CMEPS/pull/460 - Testing summary: ---------------- [PASS means all tests PASS; OK means tests PASS other than expected fails.] @@ -68,7 +62,7 @@ Testing summary: regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing): - derecho ----- + derecho ----- OK izumi ------- OK any other testing (give details below): @@ -82,21 +76,7 @@ If the tag used for baseline comparisons was NOT the previous tag, note that her Answer changes -------------- -Changes answers relative to baseline: - - [ If a tag changes answers relative to baseline comparison the - following should be filled in (otherwise remove this section). - And always remove these three lines and parts that don't apply. ] - - Summarize any changes to answers, i.e., - - what code configurations: - - what platforms/compilers: - - nature of change (roundoff; larger than roundoff/same climate; new climate): - - If bitwise differences were observed, how did you show they were no worse - than roundoff? Roundoff differences means one or more lines of code change results - only by roundoff level (because order of operation changes for example). Roundoff - changes to state fields usually grow to greater than roundoff as the simulation progresses. +Changes answers relative to baseline: NO Other details ------------- diff --git a/doc/ChangeSum b/doc/ChangeSum index 7f8cdaeead..77bc8a426d 100644 --- a/doc/ChangeSum +++ b/doc/ChangeSum @@ -1,6 +1,6 @@ Tag Who Date Summary ============================================================================================================================ - ctsm5.2.006 slevis 05/21/2024 Update externals to cesm2_3_beta17, remove mct, retire /test/tools + ctsm5.2.006 slevis 05/28/2024 Update externals to cesm2_3_beta17, remove mct, retire /test/tools ctsm5.2.005 erik 05/13/2024 Fix clm6_0 defaults and CESM testing issues, add tests to detect these problems ctsm5.2.004 multiple 05/09/2024 CTSM5.2 1979 fsurdat and 1979-2026 landuse ne0np4 files + two fixes ctsm5.2.003 samrabin 05/02/2024 Merge b4b-dev From 62d8ad004e896037f04a626ac7c4fceee9ecf6b0 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Tue, 28 May 2024 09:03:35 -0600 Subject: [PATCH 107/126] Update share in Externals.cfg --- Externals.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Externals.cfg b/Externals.cfg index 70d103a94b..dac119c47c 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -63,7 +63,7 @@ externals = Externals_CDEPS.cfg required = True [share] -tag = share1.0.18 +tag = share1.0.19 protocol = git repo_url = https://github.com/ESCOMP/CESM_share local_path = share From 26c1222668be7b2be11936d666be76916be1b751 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Tue, 28 May 2024 15:18:22 -0600 Subject: [PATCH 108/126] Update ChangeLog with latest info about tests --- doc/ChangeLog | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/ChangeLog b/doc/ChangeLog index e15c554f8f..b2f14a2c01 100644 --- a/doc/ChangeLog +++ b/doc/ChangeLog @@ -1,7 +1,7 @@ =============================================================== Tag name: ctsm5.2.006 Originator(s): slevis (Samuel Levis) -Date: Tue 28 May 2024 08:45:50 AM MDT +Date: Tue 28 May 2024 03:14:18 PM MDT One-line Summary: Update externals to cesm2_3_beta17, remove mct, retire /test/tools Purpose and description of changes @@ -68,15 +68,15 @@ Testing summary: any other testing (give details below): ctsm_sci - derecho ---- - -If the tag used for baseline comparisons was NOT the previous tag, note that here: + derecho ---- OK Answer changes -------------- Changes answers relative to baseline: NO + Except two derecho_nvhpc tests due to an update to the nvhpc compiler + as documented in the Pull Request listed a few lines down. Other details ------------- From 51079bfb92ca2d75a71cfee5679bbce40314eaae Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Wed, 29 May 2024 10:17:10 -0600 Subject: [PATCH 109/126] update run_sys_tests.py for fleximod --- python/ctsm/run_sys_tests.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/python/ctsm/run_sys_tests.py b/python/ctsm/run_sys_tests.py index de93081504..ecea2342a7 100644 --- a/python/ctsm/run_sys_tests.py +++ b/python/ctsm/run_sys_tests.py @@ -576,12 +576,12 @@ def _record_git_status(testroot, retry, dry_run): if git_status.count("\n") == 1: # Only line in git status is the branch info output += "(clean sandbox)\n" - manic = os.path.join("manage_externals", "checkout_externals") - manage_externals_status = subprocess.check_output( - [manic, "--status", "--verbose"], cwd=ctsm_root, universal_newlines=True + fleximod = os.path.join("bin", "git-fleximod") + fleximod_status = subprocess.check_output( + [fleximod, "status"], cwd=ctsm_root, universal_newlines=True ) - output += 72 * "-" + "\n" + "manage_externals status:" + "\n" - output += manage_externals_status + output += 72 * "-" + "\n" + "git-fleximod status:" + "\n" + output += fleximod_status output += 72 * "-" + "\n" print(output) From 6b550b1aa536fed46d197580614226e16691d1f8 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Wed, 29 May 2024 14:03:29 -0600 Subject: [PATCH 110/126] Replace checkout_externals with git-fleximod in documentation --- .gitignore | 6 ++-- README | 4 +-- README_EXTERNALS.rst | 32 +++++++++---------- bld/CLMBuildNamelist.pm | 2 +- doc/README.CHECKLIST.master_tags | 6 ++-- doc/build_docs | 2 +- doc/design/python_script_user_interface.rst | 2 +- .../obtaining-and-building-ctsm.rst | 6 ++-- doc/source/lilac/specific-atm-models/wrf.rst | 2 +- python/ctsm/site_and_regional/run_neon.py | 2 +- tools/mksurfdata_esmf/README.md | 2 +- 11 files changed, 33 insertions(+), 33 deletions(-) diff --git a/.gitignore b/.gitignore index 1da8072fed..6c8111525a 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,6 @@ -# directories checked out by manage_externals, and other files created -# by manage_externals -manage_externals.log +# directories checked out by git-fleximod, and other files created +# by git-fleximod +git-fleximod.log ccs_config /src/fates/ /cime/ diff --git a/README b/README index 18cc2b1458..97daae2d5a 100644 --- a/README +++ b/README @@ -50,7 +50,7 @@ lilac ------------- Lightweight Infrastructure for Land-Atmosphere Coupling (for test -------------- CTSM Testing scripts for CTSM offline tools (deprecated) tools ------------- CTSM Offline tools to prepare input datasets and process output. cime_config ------- Configuration files of cime for compsets and CTSM settings -manage_externals -- Script to manage the external source directories (deprecated) +git-fleximod ------ Script to manage the external source directories py_env_create ----- Script to setup the python environment for CTSM python tools using conda python ------------ Python modules used in tools and testing and automated checking of ALL CTSM python scirpts @@ -75,7 +75,7 @@ Top level documentation ($CTSMROOT): README ------------------- This file README.md ---------------- File that displays on github under https::/github.com/ESCOMP/CTSM.git README.rst --------------- File that displays under the project in github -README_EXTERNALS.rst ----- Information on how to work with manage_externals for CTSM (deprecated) +README_EXTERNALS.rst ----- Information on how to work with git-fleximod for CTSM CODE_OF_CONDUCT.md ------- Code of Conduct for how to work with each other on the CTSM project Copyright ---------------- CESM Copyright file doc/UpdateChangeLog.pl --- Script to add documentation on a tag to the diff --git a/README_EXTERNALS.rst b/README_EXTERNALS.rst index ed7a068991..22c56ee6f6 100644 --- a/README_EXTERNALS.rst +++ b/README_EXTERNALS.rst @@ -18,11 +18,11 @@ To obtain the CTSM code you need to do the following: This will create a directory ``my_ctsm_sandbox/`` in your current working directory. -#. Run the script **manage_externals/checkout_externals**. :: +#. Run **./bin/git-fleximod update**. :: - ./manage_externals/checkout_externals + ./bin/git-fleximod update - The **checkout_externals** script is a package manager that will + **git-fleximod** is a package manager that will populate the ctsm directory with the relevant versions of each of the components along with the CIME infrastructure code. @@ -30,21 +30,21 @@ At this point you have a working version of CTSM. To see full details of how to set up a case, compile and run, see the CIME documentation at http://esmci.github.io/cime/ . -More details on checkout_externals ----------------------------------- +More details on git-fleximod +---------------------------- The file **Externals.cfg** in your top-level CTSM directory tells -**checkout_externals** which tag/branch of each component should be +**git-fleximod** which tag/branch of each component should be brought in to generate your sandbox. **Externals_CLM.cfg** is used similarly to point to the correct version of FATES (and possibly other CTSM-specific externals in the future); the below instructions referring to **Externals.cfg** also apply to modifying **Externals_CLM.cfg**. -NOTE: checkout_externals will always attempt +NOTE: git-fleximod will always attempt to make the working copy exactly match the externals description. If you manually modify an external without updating Externals.cfg, e.g. switch -to a different tag, then rerunning checkout_externals will switch you +to a different tag, then rerunning git-fleximod will switch you back to the external described in Externals.cfg. See below documentation `Customizing your CTSM sandbox`_ for more details. -**You need to rerun checkout_externals whenever Externals.cfg has +**You need to rerun git-fleximod whenever Externals.cfg has changed** (unless you have already manually updated the relevant external(s) to have the correct branch/tag checked out). Common times when this is needed are: @@ -54,17 +54,17 @@ when this is needed are: * After merging some other CTSM branch/tag into your currently checked-out branch -**checkout_externals** must be run from the root of the source +**git-fleximod** must be run from the root of the source tree. For example, if you cloned CTSM with:: git clone https://github.com/escomp/ctsm.git my_ctsm_sandbox -then you must run **checkout_externals** from +then you must run **git-fleximod** from ``/path/to/my_ctsm_sandbox``. -To see more details of **checkout_externals**, issue :: +To see more details of **git-fleximod**, issue :: - ./manage_externals/checkout_externals --help + ./bin/git-fleximod --help Customizing your CTSM sandbox ============================= @@ -80,7 +80,7 @@ checked out ctsm1.0.0 but really wanted to have ctsm1.1.0; you would simply do the following:: git checkout ctsm1.1.0 - ./manage_externals/checkout_externals + ./bin/git-fleximod update You should **not** use this method if you have made any source code changes, or if you have any ongoing CTSM cases that were created from @@ -107,11 +107,11 @@ Each entry specifies either a tag, a hash or a branch. To point to a new tag: #. Checkout the new component(s):: - ./manage_externals/checkout_externals + ./bin/git-fleximod update To point to a hash, the process is the same, except also change ``tag = ...`` to ``hash = ...``. -To point to a branch, use ``branch = ...``. Pointing to a branch means that, each time you run ``manage_externals/checkout_externals`` you will get the current latest version of that branch. This can be convenient for in-progress development work, but should not be used when you need a stable version for scientific simulations. There are a number of gotchas with this workflow, so in general you should default to pointing to fixed hashes. (For CTSM master, we require a fixed hash or, usually, a tag.) +To point to a branch, use ``branch = ...``. Pointing to a branch means that, each time you run ``./bin/git-fleximod update`` you will get the current latest version of that branch. This can be convenient for in-progress development work, but should not be used when you need a stable version for scientific simulations. There are a number of gotchas with this workflow, so in general you should default to pointing to fixed hashes. (For CTSM master, we require a fixed hash or, usually, a tag.) Keep in mind that changing individual components from a tag may result in an invalid model (won't compile, won't run, not scientifically diff --git a/bld/CLMBuildNamelist.pm b/bld/CLMBuildNamelist.pm index fb44023cd5..1371ea5c06 100755 --- a/bld/CLMBuildNamelist.pm +++ b/bld/CLMBuildNamelist.pm @@ -366,7 +366,7 @@ sub check_for_perl_utils { } else { die <<"EOF"; ** Cannot find the root of the cime directory enter it using the -cimeroot option - Did you run the checkout_externals scripts? + Did you run ./bin/git-fleximod update? EOF } } diff --git a/doc/README.CHECKLIST.master_tags b/doc/README.CHECKLIST.master_tags index ed7794130b..0da52acaaa 100644 --- a/doc/README.CHECKLIST.master_tags +++ b/doc/README.CHECKLIST.master_tags @@ -14,9 +14,9 @@ https://github.com/ESCOMP/ctsm/wiki/CTSM-development-workflow 2a -- run 'git pull' to pull in the latest version from GitHub 2b -- run 'git status' and/or 'git diff' to make sure you don't have any uncommitted local changes - 2c -- run './manage_externals/checkout_externals -S' to make sure all externals are - updated and don't have any uncommitted changes. (If any are marked with 's' in - the first column, run ./manage_externals/checkout_externals to update them.) + 2c -- run './bin/git-fleximod status' to make sure all externals are + updated and don't have any uncommitted changes. If any are marked with 's' in + the first column, run './bin/git-fleximod update'. (3) Do all testing on your fork/feature-branch diff --git a/doc/build_docs b/doc/build_docs index a8e8099b60..45c7099ec5 100755 --- a/doc/build_docs +++ b/doc/build_docs @@ -6,5 +6,5 @@ if [ -f doc-builder/build_docs ]; then echo "Running: ./doc-builder/build_docs $@" ./doc-builder/build_docs "$@" else - echo "Obtain doc-builder by running ./manage_externals/checkout_externals -o from the top-level" + echo "Obtain doc-builder by running './bin/git-fleximod update --optional' from the top-level" fi diff --git a/doc/design/python_script_user_interface.rst b/doc/design/python_script_user_interface.rst index 87c1b3e8fc..d639117810 100644 --- a/doc/design/python_script_user_interface.rst +++ b/doc/design/python_script_user_interface.rst @@ -66,7 +66,7 @@ For logical flags, use a flag without an argument -- ``--feature`` for the case We try to follow the guide at the top of `Python's logging howto `_. In particular, print statements should be used for "console output for ordinary usage of a command line script or program"; ``logger.info`` or ``logger.debug`` should be used to "report events that occur during normal operation of a program (e.g. for status monitoring or fault investigation)", etc. -The distinction between when to use print and when to use logging can admittedly be a bit subjective, as it comes down to the question of whether the given output is part of the fundamental operation of the script – i.e., part of what the script is designed to do is to give this output. For example, ``run_sys_tests`` prints a variety of information when it starts, particularly concerning the git and manage_externals status of the current repository. The rationale for using ``print`` statements for this is that we designed ``run_sys_tests`` to replace some of the repetitive items that we did whenever running the system tests. One of these items was running ``git status`` and ``./manage_externals/checkout_externals -S -v`` to check that the repository is in a clean state. Thus, in this case, our view is that the output from these commands is part of the fundamental purpose of ``run_sys_tests``: it is something we always want to see, and we feel that it is important for anyone running the system tests to review, and thus ``print`` statements are appropriate here. +The distinction between when to use print and when to use logging can admittedly be a bit subjective, as it comes down to the question of whether the given output is part of the fundamental operation of the script – i.e., part of what the script is designed to do is to give this output. For example, ``run_sys_tests`` prints a variety of information when it starts, particularly concerning the git and git-fleximod status of the current repository. The rationale for using ``print`` statements for this is that we designed ``run_sys_tests`` to replace some of the repetitive items that we did whenever running the system tests. One of these items was running ``git status`` and ``./bin/git-fleximod status --verbose`` to check that the repository is in a clean state. Thus, in this case, our view is that the output from these commands is part of the fundamental purpose of ``run_sys_tests``: it is something we always want to see, and we feel that it is important for anyone running the system tests to review, and thus ``print`` statements are appropriate here. In general, ``print`` statements should be used sparingly, just for output that is important for the user to see. That said, users of CTSM scripts often expect more output than you would see from a typical Unix tool (where the philosophy is that there should be no output if everything worked correctly). Some examples of things that users of CTSM scripts typically want to see are: diff --git a/doc/source/lilac/obtaining-building-and-running/obtaining-and-building-ctsm.rst b/doc/source/lilac/obtaining-building-and-running/obtaining-and-building-ctsm.rst index c0e510c017..ef24e695b9 100644 --- a/doc/source/lilac/obtaining-building-and-running/obtaining-and-building-ctsm.rst +++ b/doc/source/lilac/obtaining-building-and-running/obtaining-and-building-ctsm.rst @@ -26,7 +26,7 @@ Obtain CTSM by running:: git clone https://github.com/ESCOMP/CTSM.git cd CTSM - ./manage_externals/checkout_externals + ./bin/git-fleximod update Then build CTSM and its dependencies. On a machine that has been ported to CIME, the command will look like this (example given for NCAR's ``cheyenne`` machine):: @@ -101,11 +101,11 @@ above`) can be obtained with:: git clone https://github.com/ESCOMP/CTSM.git cd CTSM - ./manage_externals/checkout_externals + ./bin/git-fleximod update By default, this will put you on the ``master`` branch of CTSM, which is the main development branch. You can checkout a different branch or tag using ``git checkout``; -**be sure to rerun** ``./manage_externals/checkout_externals`` **after doing so.** +**be sure to rerun** ``./bin/git-fleximod update`` **after doing so.** For more details, see https://github.com/ESCOMP/CTSM/wiki/Quick-start-to-CTSM-development-with-git diff --git a/doc/source/lilac/specific-atm-models/wrf.rst b/doc/source/lilac/specific-atm-models/wrf.rst index 5d104778ec..ad85fee777 100644 --- a/doc/source/lilac/specific-atm-models/wrf.rst +++ b/doc/source/lilac/specific-atm-models/wrf.rst @@ -44,7 +44,7 @@ Clone the CTSM repository:: git clone https://github.com/ESCOMP/CTSM.git cd CTSM - ./manage_externals/checkout_externals + ./bin/git-fleximod update .. _build-CTSM-and-dependencies: diff --git a/python/ctsm/site_and_regional/run_neon.py b/python/ctsm/site_and_regional/run_neon.py index 4b0df2722d..3acbf435b1 100755 --- a/python/ctsm/site_and_regional/run_neon.py +++ b/python/ctsm/site_and_regional/run_neon.py @@ -41,7 +41,7 @@ # - [ ] Case dependency and the ability to check case status # - [ ] If Case dependency works we don't need finidat given explicilty for post-ad and transient. -# - [ ] checkout_externals instead of using env varaiable +# - [ ] "./bin/git-fleximod update" instead of using env variable # - [ ] wget the fields available and run for those available # - [ ] Matrix spin-up if (SASU) Eric merged it in diff --git a/tools/mksurfdata_esmf/README.md b/tools/mksurfdata_esmf/README.md index 11cb69c681..3d19c88ff0 100644 --- a/tools/mksurfdata_esmf/README.md +++ b/tools/mksurfdata_esmf/README.md @@ -93,7 +93,7 @@ https://github.com/ESCOMP/CTSM/issues/2341 ``` shell # Assuming pwd is the tools/mksurfdata_esmf directory - ./manage_externals/checkout_externals # Assuming at the top level of the CTSM/CESM checkout + ./bin/git-fleximod update # Assuming at the top level of the CTSM/CESM checkout ``` This will bring in CIME and ccs_config which are required for building. From 612962280d766d86497ae5747c10d8af33623121 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Wed, 29 May 2024 14:32:40 -0600 Subject: [PATCH 111/126] Restore .gitignore as it was --- .gitignore | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.gitignore b/.gitignore index 6c8111525a..1da8072fed 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,6 @@ -# directories checked out by git-fleximod, and other files created -# by git-fleximod -git-fleximod.log +# directories checked out by manage_externals, and other files created +# by manage_externals +manage_externals.log ccs_config /src/fates/ /cime/ From 5a577cdf701d028393bacc12bfde1749c9a9a2d8 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Wed, 29 May 2024 14:34:26 -0600 Subject: [PATCH 112/126] Delete README_EXTERNALS.rst to be like #2559 --- README_EXTERNALS.rst | 132 ------------------------------------------- 1 file changed, 132 deletions(-) delete mode 100644 README_EXTERNALS.rst diff --git a/README_EXTERNALS.rst b/README_EXTERNALS.rst deleted file mode 100644 index 22c56ee6f6..0000000000 --- a/README_EXTERNALS.rst +++ /dev/null @@ -1,132 +0,0 @@ -Obtaining the full model code and associated scripting infrastructure -===================================================================== - -[!CAUTION] -This is deprecated and will be replaced with git submodules. See -https://github.com/ESCOMP/CTSM/pull/2443 - - -CTSM is released via GitHub. You will need some familiarity with git in order -to modify the code and commit these changes. However, to simply checkout and run the -code, no git knowledge is required other than what is documented in the following steps. - -To obtain the CTSM code you need to do the following: - -#. Clone the repository. :: - - git clone https://github.com/escomp/ctsm.git my_ctsm_sandbox - - This will create a directory ``my_ctsm_sandbox/`` in your current working directory. - -#. Run **./bin/git-fleximod update**. :: - - ./bin/git-fleximod update - - **git-fleximod** is a package manager that will - populate the ctsm directory with the relevant versions of each of the - components along with the CIME infrastructure code. - -At this point you have a working version of CTSM. - -To see full details of how to set up a case, compile and run, see the CIME documentation at http://esmci.github.io/cime/ . - -More details on git-fleximod ----------------------------- - -The file **Externals.cfg** in your top-level CTSM directory tells -**git-fleximod** which tag/branch of each component should be -brought in to generate your sandbox. **Externals_CLM.cfg** is used similarly to point to the correct version of FATES (and possibly other CTSM-specific externals in the future); the below instructions referring to **Externals.cfg** also apply to modifying **Externals_CLM.cfg**. - -NOTE: git-fleximod will always attempt -to make the working copy exactly match the externals description. If -you manually modify an external without updating Externals.cfg, e.g. switch -to a different tag, then rerunning git-fleximod will switch you -back to the external described in Externals.cfg. See below -documentation `Customizing your CTSM sandbox`_ for more details. - -**You need to rerun git-fleximod whenever Externals.cfg has -changed** (unless you have already manually updated the relevant -external(s) to have the correct branch/tag checked out). Common times -when this is needed are: - -* After checking out a new CTSM branch/tag - -* After merging some other CTSM branch/tag into your currently - checked-out branch - -**git-fleximod** must be run from the root of the source -tree. For example, if you cloned CTSM with:: - - git clone https://github.com/escomp/ctsm.git my_ctsm_sandbox - -then you must run **git-fleximod** from -``/path/to/my_ctsm_sandbox``. - -To see more details of **git-fleximod**, issue :: - - ./bin/git-fleximod --help - -Customizing your CTSM sandbox -============================= - -There are several use cases to consider when you want to customize or modify your CTSM sandbox. - -Switching to a different CTSM branch or tag -------------------------------------------- - -If you have already checked out a branch or tag and **HAVE NOT MADE ANY -MODIFICATIONS** it is simple to change your sandbox. Say that you -checked out ctsm1.0.0 but really wanted to have ctsm1.1.0; -you would simply do the following:: - - git checkout ctsm1.1.0 - ./bin/git-fleximod update - -You should **not** use this method if you have made any source code -changes, or if you have any ongoing CTSM cases that were created from -this sandbox. In these cases, it is often easiest to do a second **git -clone**. - -Pointing to a different version of a component ----------------------------------------------- - -Each entry in **Externals.cfg** has the following form (we use CIME as an -example below):: - - [cime] - local_path = cime - protocol = git - repo_url = https://github.com/CESM-Development/cime - tag = cime5.4.0-alpha.20 - required = True - -Each entry specifies either a tag, a hash or a branch. To point to a new tag: - -#. Modify the relevant entry/entries in **Externals.cfg** (e.g., changing - ``cime5.4.0-alpha.20`` to ``cime5.4.0-alpha.21`` above) - -#. Checkout the new component(s):: - - ./bin/git-fleximod update - -To point to a hash, the process is the same, except also change ``tag = ...`` to ``hash = ...``. - -To point to a branch, use ``branch = ...``. Pointing to a branch means that, each time you run ``./bin/git-fleximod update`` you will get the current latest version of that branch. This can be convenient for in-progress development work, but should not be used when you need a stable version for scientific simulations. There are a number of gotchas with this workflow, so in general you should default to pointing to fixed hashes. (For CTSM master, we require a fixed hash or, usually, a tag.) - -Keep in mind that changing individual components from a tag may result -in an invalid model (won't compile, won't run, not scientifically -meaningful) and is unsupported. - -Committing your change to Externals.cfg -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -After making this change, it's a good idea to commit the change in your -local CTSM git repository. First create a branch in your local -repository, then commit it. (Unlike with subversion, branches are stored -locally unless you explicitly push them up to GitHub. Feel free to -create whatever local branches you'd like.) For example:: - - git checkout -b my_ctsm_branch - git add Externals.cfg - git commit -m "Update CIME to cime5.4.0-alpha.20" - From 61c97a13ebf81bb28f4bbac7aa1072c9f544c85b Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Wed, 29 May 2024 15:27:22 -0600 Subject: [PATCH 113/126] Add dummy ./manage_externals/checkout_externals with ERROR message --- manage_externals/checkout_externals | 3 +++ 1 file changed, 3 insertions(+) create mode 100755 manage_externals/checkout_externals diff --git a/manage_externals/checkout_externals b/manage_externals/checkout_externals new file mode 100755 index 0000000000..5f848f5da9 --- /dev/null +++ b/manage_externals/checkout_externals @@ -0,0 +1,3 @@ +echo "ERROR: Instead of ./manage_externals/checkout_externals" +echo "please type './bin/git-fleximod update'" +echo "For additional information, please type './bin/git-fleximod --help'" From 1ed3f1967ed4564e2809841ca8a99b09a479299f Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Wed, 29 May 2024 16:46:46 -0600 Subject: [PATCH 114/126] Replace cheyenne with derecho in a .rst file --- .../obtaining-and-building-ctsm.rst | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/doc/source/lilac/obtaining-building-and-running/obtaining-and-building-ctsm.rst b/doc/source/lilac/obtaining-building-and-running/obtaining-and-building-ctsm.rst index ef24e695b9..907f63f211 100644 --- a/doc/source/lilac/obtaining-building-and-running/obtaining-and-building-ctsm.rst +++ b/doc/source/lilac/obtaining-building-and-running/obtaining-and-building-ctsm.rst @@ -29,9 +29,9 @@ Obtain CTSM by running:: ./bin/git-fleximod update Then build CTSM and its dependencies. On a machine that has been ported to CIME, the -command will look like this (example given for NCAR's ``cheyenne`` machine):: +command will look like this (example given for NCAR's ``derecho`` machine):: - ./lilac/build_ctsm /glade/scratch/$USER/ctsm_build_dir --machine cheyenne --compiler intel + ./lilac/build_ctsm /glade/scratch/$USER/ctsm_build_dir --machine derecho --compiler intel and then, before building the atmosphere model:: @@ -145,13 +145,13 @@ the `CIME documentation`_. Building on a CIME-supported machine ------------------------------------ -If you are using a machine that has been ported to CIME_ (for example, NCAR's ``cheyenne`` +If you are using a machine that has been ported to CIME_ (for example, NCAR's ``derecho`` machine), then you do not need to specify much information to ``build_ctsm``. In addition, in this case, CIME will load the appropriate modules and set the appropriate environment variables at build time, so you do not need to do anything to set up your environment ahead of time. **Building CTSM with LILAC requires ESMF. ESMF is currently an optional CIME dependency, so many CIME-ported machines do not provide information on an ESMF -installation. NCAR's cheyenne machine DOES provide ESMF, but for other machines, you may +installation. NCAR's derecho machine DOES provide ESMF, but for other machines, you may need to add this to your CIME port.** To build CTSM and its dependencies in this case, run:: @@ -231,7 +231,7 @@ Example usage for a Mac (a simple case) is:: ./lilac/build_ctsm ~/ctsm_build_dir --os Darwin --compiler gnu --netcdf-path /usr/local --esmf-mkfile-path /Users/sacks/ESMF/esmf8.0.0/lib/libO/Darwin.gfortranclang.64.mpich3.default/esmf.mk --max-mpitasks-per-node 4 --no-pnetcdf -Example usage for NCAR's ``cheyenne`` machine (a more complex case) is:: +Example usage for NCAR's ``derecho`` machine (a more complex case) is:: module purge module load ncarenv/1.3 python/3.7.9 cmake intel/19.1.1 esmf_libs mkl From 4f9db007afa9b3e1705c7bad78c2cc71714d9cd6 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Thu, 30 May 2024 11:51:36 -0600 Subject: [PATCH 115/126] Add "bin/" before git-fleximod in README documentation --- README | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README b/README index 97daae2d5a..07d9c7b13d 100644 --- a/README +++ b/README @@ -50,7 +50,7 @@ lilac ------------- Lightweight Infrastructure for Land-Atmosphere Coupling (for test -------------- CTSM Testing scripts for CTSM offline tools (deprecated) tools ------------- CTSM Offline tools to prepare input datasets and process output. cime_config ------- Configuration files of cime for compsets and CTSM settings -git-fleximod ------ Script to manage the external source directories +bin/git-fleximod -- Script to manage the external source directories py_env_create ----- Script to setup the python environment for CTSM python tools using conda python ------------ Python modules used in tools and testing and automated checking of ALL CTSM python scirpts From 8ac86a1f21704e952b5dbb753f51cb4ddcfd47f4 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Thu, 30 May 2024 12:10:04 -0600 Subject: [PATCH 116/126] Replacing README_EXTERNALS.rst with README_GITFLEXIMOD.rst --- README_GITFLEXIMOD.rst | 127 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 127 insertions(+) create mode 100644 README_GITFLEXIMOD.rst diff --git a/README_GITFLEXIMOD.rst b/README_GITFLEXIMOD.rst new file mode 100644 index 0000000000..f91ef30fae --- /dev/null +++ b/README_GITFLEXIMOD.rst @@ -0,0 +1,127 @@ +Obtaining the full model code and associated scripting infrastructure +===================================================================== + +CTSM is released via GitHub. You will need some familiarity with git in order +to modify the code and commit these changes. However, to simply checkout and run the +code, no git knowledge is required other than what is documented in the following steps. + +To obtain the CTSM code you need to do the following: + +#. Clone the repository. :: + + git clone https://github.com/escomp/ctsm.git my_ctsm_sandbox + + This will create a directory ``my_ctsm_sandbox/`` in your current working directory. + +#. Run **./bin/git-fleximod update**. :: + + ./bin/git-fleximod update + + **git-fleximod** is a package manager that will + populate the ctsm directory with the relevant versions of each of the + components along with the CIME infrastructure code. + +At this point you have a working version of CTSM. + +To see full details of how to set up a case, compile and run, see the CIME documentation at http://esmci.github.io/cime/ . + +More details on git-fleximod +---------------------------- + +The file **Externals.cfg** in your top-level CTSM directory tells +**git-fleximod** which tag/branch of each component should be +brought in to generate your sandbox. **Externals_CLM.cfg** is used similarly to point to the correct version of FATES (and possibly other CTSM-specific externals in the future); the below instructions referring to **Externals.cfg** also apply to modifying **Externals_CLM.cfg**. + +NOTE: git-fleximod will always attempt +to make the working copy exactly match the externals description. If +you manually modify an external without updating Externals.cfg, e.g. switch +to a different tag, then rerunning git-fleximod will switch you +back to the external described in Externals.cfg. See below +documentation `Customizing your CTSM sandbox`_ for more details. + +**You need to rerun git-fleximod whenever Externals.cfg has +changed** (unless you have already manually updated the relevant +external(s) to have the correct branch/tag checked out). Common times +when this is needed are: + +* After checking out a new CTSM branch/tag + +* After merging some other CTSM branch/tag into your currently + checked-out branch + +**./bin/git-fleximod** must be run from the root of the source +tree. For example, if you cloned CTSM with:: + + git clone https://github.com/escomp/ctsm.git my_ctsm_sandbox + +then you must run **./bin/git-fleximod** from +``/path/to/my_ctsm_sandbox``. + +To see more details of **git-fleximod**, issue :: + + ./bin/git-fleximod --help + +Customizing your CTSM sandbox +============================= + +There are several use cases to consider when you want to customize or modify your CTSM sandbox. + +Switching to a different CTSM branch or tag +------------------------------------------- + +If you have already checked out a branch or tag and **HAVE NOT MADE ANY +MODIFICATIONS** it is simple to change your sandbox. Say that you +checked out ctsm1.0.0 but really wanted to have ctsm1.1.0; +you would simply do the following:: + + git checkout ctsm1.1.0 + ./bin/git-fleximod + +You should **not** use this method if you have made any source code +changes, or if you have any ongoing CTSM cases that were created from +this sandbox. In these cases, it is often easiest to do a second **git +clone**. + +Pointing to a different version of a component +---------------------------------------------- + +Each entry in **Externals.cfg** has the following form (we use CIME as an +example below):: + + [cime] + local_path = cime + protocol = git + repo_url = https://github.com/CESM-Development/cime + tag = cime5.4.0-alpha.20 + required = True + +Each entry specifies either a tag, a hash or a branch. To point to a new tag: + +#. Modify the relevant entry/entries in **Externals.cfg** (e.g., changing + ``cime5.4.0-alpha.20`` to ``cime5.4.0-alpha.21`` above) + +#. Checkout the new component(s):: + + ./bin/git-fleximod + +To point to a hash, the process is the same, except also change ``tag = ...`` to ``hash = ...``. + +To point to a branch, use ``branch = ...``. Pointing to a branch means that, each time you run ``./bin/git-fleximod`` you will get the current latest version of that branch. This can be convenient for in-progress development work, but should not be used when you need a stable version for scientific simulations. There are a number of gotchas with this workflow, so in general you should default to pointing to fixed hashes. (For CTSM master, we require a fixed hash or, usually, a tag.) + +Keep in mind that changing individual components from a tag may result +in an invalid model (won't compile, won't run, not scientifically +meaningful) and is unsupported. + +Committing your change to Externals.cfg +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +After making this change, it's a good idea to commit the change in your +local CTSM git repository. First create a branch in your local +repository, then commit it. (Unlike with subversion, branches are stored +locally unless you explicitly push them up to GitHub. Feel free to +create whatever local branches you'd like.) For example:: + + git checkout -b my_ctsm_branch + git add Externals.cfg + git commit -m "Update CIME to cime5.4.0-alpha.20" + From 03762a081fd3f283c4ff1b9336fe14b7f71a051a Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Thu, 30 May 2024 16:08:27 -0600 Subject: [PATCH 117/126] Updates to README_GITFLEXIMOD.rst as per Jim's review --- README_GITFLEXIMOD.rst | 68 +++++++++++++++++------------------------- 1 file changed, 28 insertions(+), 40 deletions(-) diff --git a/README_GITFLEXIMOD.rst b/README_GITFLEXIMOD.rst index f91ef30fae..ab8c412cee 100644 --- a/README_GITFLEXIMOD.rst +++ b/README_GITFLEXIMOD.rst @@ -15,11 +15,15 @@ To obtain the CTSM code you need to do the following: #. Run **./bin/git-fleximod update**. :: + cd my_ctsm_sandbox ./bin/git-fleximod update + ./bin/git-fleximod --help # for a user's guide **git-fleximod** is a package manager that will populate the ctsm directory with the relevant versions of each of the components along with the CIME infrastructure code. + Additional documentation for git-fleximod appears here: + https://github.com/ESMCI/git-fleximod?tab=readme-ov-file#git-fleximod At this point you have a working version of CTSM. @@ -28,18 +32,18 @@ To see full details of how to set up a case, compile and run, see the CIME docum More details on git-fleximod ---------------------------- -The file **Externals.cfg** in your top-level CTSM directory tells +The file **.gitmodules** in your top-level CTSM directory tells **git-fleximod** which tag/branch of each component should be -brought in to generate your sandbox. **Externals_CLM.cfg** is used similarly to point to the correct version of FATES (and possibly other CTSM-specific externals in the future); the below instructions referring to **Externals.cfg** also apply to modifying **Externals_CLM.cfg**. +brought in to generate your sandbox. -NOTE: git-fleximod will always attempt -to make the working copy exactly match the externals description. If -you manually modify an external without updating Externals.cfg, e.g. switch -to a different tag, then rerunning git-fleximod will switch you -back to the external described in Externals.cfg. See below -documentation `Customizing your CTSM sandbox`_ for more details. +NOTE: If you manually modify an external without updating .gitmodules, +e.g. switch to a different tag, then rerunning git-fleximod will warn you of +local changes you need to resolve. +git-fleximod will not change a modified external back to what is specified in +.gitmodules without the --force option. +See below documentation `Customizing your CTSM sandbox`_ for more details. -**You need to rerun git-fleximod whenever Externals.cfg has +**You need to rerun git-fleximod whenever .gitmodules has changed** (unless you have already manually updated the relevant external(s) to have the correct branch/tag checked out). Common times when this is needed are: @@ -49,18 +53,6 @@ when this is needed are: * After merging some other CTSM branch/tag into your currently checked-out branch -**./bin/git-fleximod** must be run from the root of the source -tree. For example, if you cloned CTSM with:: - - git clone https://github.com/escomp/ctsm.git my_ctsm_sandbox - -then you must run **./bin/git-fleximod** from -``/path/to/my_ctsm_sandbox``. - -To see more details of **git-fleximod**, issue :: - - ./bin/git-fleximod --help - Customizing your CTSM sandbox ============================= @@ -75,7 +67,7 @@ checked out ctsm1.0.0 but really wanted to have ctsm1.1.0; you would simply do the following:: git checkout ctsm1.1.0 - ./bin/git-fleximod + ./bin/git-fleximod update You should **not** use this method if you have made any source code changes, or if you have any ongoing CTSM cases that were created from @@ -85,35 +77,31 @@ clone**. Pointing to a different version of a component ---------------------------------------------- -Each entry in **Externals.cfg** has the following form (we use CIME as an +Each entry in **.gitmodules** has the following form (we use CIME as an example below):: - [cime] - local_path = cime - protocol = git - repo_url = https://github.com/CESM-Development/cime - tag = cime5.4.0-alpha.20 - required = True + [submodule "cime"] + path = cime + url = https://github.com/ESMCI/cime + fxtag = cime6.0.246 + fxrequired = ToplevelRequired + fxDONOTUSEurl = https://github.com/ESMCI/cime -Each entry specifies either a tag, a hash or a branch. To point to a new tag: +Each entry specifies either a tag or a hash. To point to a new tag or hash: -#. Modify the relevant entry/entries in **Externals.cfg** (e.g., changing - ``cime5.4.0-alpha.20`` to ``cime5.4.0-alpha.21`` above) +#. Modify the relevant entry/entries in **.gitmodules** (e.g., changing + ``cime6.0.246`` to ``cime6.0.247`` above) #. Checkout the new component(s):: - ./bin/git-fleximod - -To point to a hash, the process is the same, except also change ``tag = ...`` to ``hash = ...``. - -To point to a branch, use ``branch = ...``. Pointing to a branch means that, each time you run ``./bin/git-fleximod`` you will get the current latest version of that branch. This can be convenient for in-progress development work, but should not be used when you need a stable version for scientific simulations. There are a number of gotchas with this workflow, so in general you should default to pointing to fixed hashes. (For CTSM master, we require a fixed hash or, usually, a tag.) + ./bin/git-fleximod update Keep in mind that changing individual components from a tag may result in an invalid model (won't compile, won't run, not scientifically meaningful) and is unsupported. -Committing your change to Externals.cfg -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Committing your change to .gitmodules +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ After making this change, it's a good idea to commit the change in your local CTSM git repository. First create a branch in your local @@ -122,6 +110,6 @@ locally unless you explicitly push them up to GitHub. Feel free to create whatever local branches you'd like.) For example:: git checkout -b my_ctsm_branch - git add Externals.cfg + git add .gitmodules git commit -m "Update CIME to cime5.4.0-alpha.20" From 6fa0255660c848b4f49603511b1da79750b686c8 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Thu, 30 May 2024 16:09:26 -0600 Subject: [PATCH 118/126] Jim's suggestions for doc/.../lilac/.../obtaining-and-building-ctsm.rst --- .../obtaining-and-building-ctsm.rst | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/doc/source/lilac/obtaining-building-and-running/obtaining-and-building-ctsm.rst b/doc/source/lilac/obtaining-building-and-running/obtaining-and-building-ctsm.rst index 907f63f211..fcd8235b62 100644 --- a/doc/source/lilac/obtaining-building-and-running/obtaining-and-building-ctsm.rst +++ b/doc/source/lilac/obtaining-building-and-running/obtaining-and-building-ctsm.rst @@ -91,7 +91,7 @@ Building CTSM requires: - ESMF version 8 or later - - **ESMF is not needed in general for CTSM, but is needed for LILAC** + - **ESMF is needed for LILAC (and for CESM3 and later)** Obtaining CTSM ============== @@ -149,10 +149,11 @@ If you are using a machine that has been ported to CIME_ (for example, NCAR's `` machine), then you do not need to specify much information to ``build_ctsm``. In addition, in this case, CIME will load the appropriate modules and set the appropriate environment variables at build time, so you do not need to do anything to set up your environment -ahead of time. **Building CTSM with LILAC requires ESMF. ESMF is currently an optional -CIME dependency, so many CIME-ported machines do not provide information on an ESMF -installation. NCAR's derecho machine DOES provide ESMF, but for other machines, you may -need to add this to your CIME port.** +ahead of time. **Building CTSM with LILAC requires ESMF. ESMF is an optional +CIME dependency before CESM3 and not optional for CESM3. +NCAR's derecho machine DOES provide ESMF. For other machines, you may +need to add this to your CIME port. Please see esmf.org for download and build +instructions.** To build CTSM and its dependencies in this case, run:: From ae93d22bfee601a9fbe4561cedbd77945ab23bbf Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Thu, 30 May 2024 16:42:18 -0600 Subject: [PATCH 119/126] Remove cheyenne from testlis_clm.xml --- cime_config/testdefs/testlist_clm.xml | 651 +------------------------- 1 file changed, 1 insertion(+), 650 deletions(-) diff --git a/cime_config/testdefs/testlist_clm.xml b/cime_config/testdefs/testlist_clm.xml index 9cfba6f5b3..1ac2334ba8 100644 --- a/cime_config/testdefs/testlist_clm.xml +++ b/cime_config/testdefs/testlist_clm.xml @@ -43,8 +43,6 @@ - - @@ -259,24 +257,14 @@ --> - - - - - - - - - - @@ -360,8 +348,6 @@ - - @@ -422,15 +408,6 @@ - - - - - - - - - @@ -440,14 +417,6 @@ - - - - - - - - @@ -456,14 +425,6 @@ - - - - - - - - @@ -474,7 +435,6 @@ - @@ -483,8 +443,6 @@ - - @@ -494,7 +452,6 @@ - @@ -512,7 +469,6 @@ - @@ -539,8 +495,6 @@ - - @@ -548,15 +502,6 @@ - - - - - - - - - @@ -568,7 +513,6 @@ - @@ -788,7 +732,6 @@ - @@ -798,7 +741,6 @@ - @@ -808,7 +750,6 @@ - @@ -817,8 +758,6 @@ - - @@ -828,7 +767,6 @@ - @@ -838,8 +776,6 @@ - - @@ -857,7 +793,6 @@ - @@ -866,8 +801,6 @@ - - @@ -877,7 +810,6 @@ - @@ -922,15 +854,6 @@ - - - - - - - - - @@ -940,15 +863,6 @@ - - - - - - - - - @@ -960,7 +874,6 @@ - @@ -984,17 +897,6 @@ - - - - - - - - - - - @@ -1006,14 +908,6 @@ - - - - - - - - @@ -1022,15 +916,6 @@ - - - - - - - - - @@ -1058,14 +943,6 @@ - - - - - - - - @@ -1074,14 +951,6 @@ - - - - - - - - @@ -1090,14 +959,6 @@ - - - - - - - - @@ -1106,15 +967,6 @@ - - - - - - - - - @@ -1124,15 +976,6 @@ - - - - - - - - - @@ -1142,15 +985,6 @@ - - - - - - - - - @@ -1160,15 +994,6 @@ - - - - - - - - - @@ -1180,8 +1005,6 @@ - - @@ -1201,7 +1024,6 @@ - @@ -1210,8 +1032,6 @@ - - @@ -1229,7 +1049,6 @@ - @@ -1286,15 +1105,6 @@ - - - - - - - - - @@ -1306,7 +1116,6 @@ - @@ -1314,15 +1123,6 @@ - - - - - - - - - @@ -1333,14 +1133,6 @@ - - - - - - - - @@ -1349,15 +1141,6 @@ - - - - - - - - - @@ -1367,15 +1150,6 @@ - - - - - - - - - @@ -1387,23 +1161,12 @@ - - - - - - - - - - - @@ -1414,16 +1177,6 @@ - - - - - - - - - - @@ -1434,16 +1187,6 @@ - - - - - - - - - - @@ -1454,15 +1197,6 @@ - - - - - - - - - @@ -1472,14 +1206,6 @@ - - - - - - - - @@ -1489,15 +1215,6 @@ - - - - - - - - - @@ -1507,16 +1224,6 @@ - - - - - - - - - - @@ -1527,14 +1234,6 @@ - - - - - - - - @@ -1543,15 +1242,6 @@ - - - - - - - - - @@ -1561,16 +1251,6 @@ - - - - - - - - - - @@ -1581,16 +1261,6 @@ - - - - - - - - - - @@ -1603,22 +1273,12 @@ - - - - - - - - - - @@ -1639,7 +1299,6 @@ - @@ -1649,7 +1308,6 @@ - @@ -1659,7 +1317,6 @@ - @@ -1667,15 +1324,6 @@ - - - - - - - - - @@ -1687,10 +1335,6 @@ - - - - @@ -1702,7 +1346,6 @@ - @@ -1712,7 +1355,6 @@ - @@ -1721,7 +1363,6 @@ - @@ -1730,8 +1371,6 @@ - - @@ -1750,7 +1389,6 @@ - @@ -1805,7 +1443,6 @@ - @@ -1822,15 +1459,6 @@ - - - - - - - - - @@ -1840,15 +1468,6 @@ - - - - - - - - - @@ -1858,15 +1477,6 @@ - - - - - - - - - @@ -1878,7 +1488,6 @@ - @@ -1888,8 +1497,6 @@ - - @@ -1900,7 +1507,6 @@ - @@ -1931,7 +1537,6 @@ - @@ -1939,15 +1544,6 @@ - - - - - - - - - @@ -1959,7 +1555,6 @@ - @@ -1969,8 +1564,6 @@ - - @@ -1981,8 +1574,6 @@ - - @@ -1992,8 +1583,6 @@ - - @@ -2010,16 +1599,6 @@ - - - - - - - - - - @@ -2033,7 +1612,6 @@ - @@ -2043,7 +1621,6 @@ - @@ -2053,8 +1630,6 @@ - - @@ -2146,7 +1721,6 @@ - @@ -2156,7 +1730,6 @@ - @@ -2166,7 +1739,6 @@ - @@ -2204,12 +1776,10 @@ - - @@ -2225,16 +1795,6 @@ - - - - - - - - - - @@ -2255,10 +1815,6 @@ - - - - @@ -2270,9 +1826,6 @@ - - - @@ -2283,7 +1836,6 @@ - @@ -2292,8 +1844,6 @@ - - @@ -2356,20 +1906,9 @@ - - - - - - - - - - - - + @@ -2407,7 +1946,6 @@ - @@ -2426,7 +1964,6 @@ - @@ -2436,7 +1973,6 @@ - @@ -2456,7 +1992,6 @@ - @@ -2482,7 +2017,6 @@ - @@ -2492,7 +2026,6 @@ - @@ -2502,7 +2035,6 @@ - @@ -2584,7 +2116,6 @@ - @@ -2594,8 +2125,6 @@ - - @@ -2632,7 +2161,6 @@ - @@ -2642,7 +2170,6 @@ - @@ -2651,7 +2178,6 @@ - @@ -2660,8 +2186,6 @@ - - @@ -2690,7 +2214,6 @@ - @@ -2700,7 +2223,6 @@ - @@ -2710,7 +2232,6 @@ - @@ -2740,7 +2261,6 @@ - @@ -2759,8 +2279,6 @@ - - @@ -2771,7 +2289,6 @@ - @@ -2780,7 +2297,6 @@ - @@ -2790,7 +2306,6 @@ - @@ -2799,8 +2314,6 @@ - - @@ -2819,7 +2332,6 @@ - @@ -2829,8 +2341,6 @@ - - @@ -2838,14 +2348,6 @@ - - - - - - - - @@ -2856,12 +2358,9 @@ - - - @@ -2870,7 +2369,6 @@ - @@ -2879,7 +2377,6 @@ - @@ -2889,7 +2386,6 @@ - @@ -2900,7 +2396,6 @@ - @@ -2911,7 +2406,6 @@ - @@ -2922,7 +2416,6 @@ - @@ -2930,16 +2423,6 @@ - - - - - - - - - - @@ -2972,18 +2455,6 @@ - - - - - - - - - - - - @@ -2995,7 +2466,6 @@ - @@ -3003,18 +2473,6 @@ - - - - - - - - - - - - @@ -3027,7 +2485,6 @@ - @@ -3038,7 +2495,6 @@ - @@ -3048,7 +2504,6 @@ - @@ -3056,15 +2511,6 @@ - - - - - - - - - @@ -3076,7 +2522,6 @@ - @@ -3087,7 +2532,6 @@ - @@ -3098,7 +2542,6 @@ - @@ -3110,7 +2553,6 @@ - @@ -3121,7 +2563,6 @@ - @@ -3131,8 +2572,6 @@ - - @@ -3144,7 +2583,6 @@ - @@ -3155,7 +2593,6 @@ - @@ -3167,10 +2604,8 @@ - - @@ -3181,7 +2616,6 @@ - @@ -3192,7 +2626,6 @@ - @@ -3204,7 +2637,6 @@ - @@ -3215,7 +2647,6 @@ - @@ -3226,7 +2657,6 @@ - @@ -3237,7 +2667,6 @@ - @@ -3248,7 +2677,6 @@ - @@ -3258,7 +2686,6 @@ - @@ -3268,7 +2695,6 @@ - @@ -3278,7 +2704,6 @@ - @@ -3287,7 +2712,6 @@ - @@ -3298,7 +2722,6 @@ - @@ -3308,7 +2731,6 @@ - @@ -3319,7 +2741,6 @@ - @@ -3329,7 +2750,6 @@ - @@ -3339,7 +2759,6 @@ - @@ -3348,16 +2767,6 @@ - - - - - - - - - - @@ -3370,7 +2779,6 @@ - @@ -3380,7 +2788,6 @@ - @@ -3391,7 +2798,6 @@ - @@ -3402,8 +2808,6 @@ - - @@ -3414,7 +2818,6 @@ - @@ -3424,9 +2827,6 @@ - - - @@ -3448,7 +2848,6 @@ - @@ -3458,11 +2857,8 @@ - - - @@ -3471,7 +2867,6 @@ - @@ -3491,7 +2886,6 @@ - @@ -3539,8 +2933,6 @@ - - @@ -3561,16 +2953,6 @@ - - - - - - - - - - @@ -3580,16 +2962,6 @@ - - - - - - - - - - @@ -3599,16 +2971,6 @@ - - - - - - - - - - @@ -3618,15 +2980,6 @@ - - - - - - - - - @@ -3639,7 +2992,6 @@ - @@ -3650,7 +3002,6 @@ - From b25a02d904bf8cd010fb400b4b0181361f077d83 Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Fri, 31 May 2024 10:20:07 -0600 Subject: [PATCH 120/126] fix subtree add --- .../git-fleximod/.github/workflows/pre-commit | 13 - .../.github/workflows/pytest.yaml | 77 -- .lib/git-fleximod/.pre-commit-config.yaml | 18 - .lib/git-fleximod/License | 20 - .lib/git-fleximod/README.md | 110 --- .lib/git-fleximod/doc/Makefile | 20 - .lib/git-fleximod/doc/conf.py | 26 - .lib/git-fleximod/doc/index.rst | 24 - .lib/git-fleximod/doc/make.bat | 35 - .lib/git-fleximod/escomp_install | 25 - .lib/git-fleximod/git_fleximod/__init__.py | 0 .lib/git-fleximod/git_fleximod/cli.py | 119 --- .../git-fleximod/git_fleximod/git_fleximod.py | 601 --------------- .../git-fleximod/git_fleximod/gitinterface.py | 79 -- .lib/git-fleximod/git_fleximod/gitmodules.py | 97 --- .../git-fleximod/git_fleximod/lstripreader.py | 43 -- .lib/git-fleximod/git_fleximod/metoflexi.py | 236 ------ .lib/git-fleximod/git_fleximod/utils.py | 365 --------- .lib/git-fleximod/poetry.lock | 693 ------------------ .lib/git-fleximod/pyproject.toml | 41 -- .lib/git-fleximod/tbump.toml | 43 -- .lib/git-fleximod/tests/__init__.py | 3 - .lib/git-fleximod/tests/conftest.py | 138 ---- .lib/git-fleximod/tests/test_a_import.py | 8 - .lib/git-fleximod/tests/test_b_update.py | 26 - .lib/git-fleximod/tests/test_c_required.py | 30 - .lib/git-fleximod/tests/test_d_complex.py | 67 -- 27 files changed, 2957 deletions(-) delete mode 100644 .lib/git-fleximod/.github/workflows/pre-commit delete mode 100644 .lib/git-fleximod/.github/workflows/pytest.yaml delete mode 100644 .lib/git-fleximod/.pre-commit-config.yaml delete mode 100644 .lib/git-fleximod/License delete mode 100644 .lib/git-fleximod/README.md delete mode 100644 .lib/git-fleximod/doc/Makefile delete mode 100644 .lib/git-fleximod/doc/conf.py delete mode 100644 .lib/git-fleximod/doc/index.rst delete mode 100644 .lib/git-fleximod/doc/make.bat delete mode 100644 .lib/git-fleximod/escomp_install delete mode 100644 .lib/git-fleximod/git_fleximod/__init__.py delete mode 100644 .lib/git-fleximod/git_fleximod/cli.py delete mode 100755 .lib/git-fleximod/git_fleximod/git_fleximod.py delete mode 100644 .lib/git-fleximod/git_fleximod/gitinterface.py delete mode 100644 .lib/git-fleximod/git_fleximod/gitmodules.py delete mode 100644 .lib/git-fleximod/git_fleximod/lstripreader.py delete mode 100755 .lib/git-fleximod/git_fleximod/metoflexi.py delete mode 100644 .lib/git-fleximod/git_fleximod/utils.py delete mode 100644 .lib/git-fleximod/poetry.lock delete mode 100644 .lib/git-fleximod/pyproject.toml delete mode 100644 .lib/git-fleximod/tbump.toml delete mode 100644 .lib/git-fleximod/tests/__init__.py delete mode 100644 .lib/git-fleximod/tests/conftest.py delete mode 100644 .lib/git-fleximod/tests/test_a_import.py delete mode 100644 .lib/git-fleximod/tests/test_b_update.py delete mode 100644 .lib/git-fleximod/tests/test_c_required.py delete mode 100644 .lib/git-fleximod/tests/test_d_complex.py diff --git a/.lib/git-fleximod/.github/workflows/pre-commit b/.lib/git-fleximod/.github/workflows/pre-commit deleted file mode 100644 index 1a6ad0082a..0000000000 --- a/.lib/git-fleximod/.github/workflows/pre-commit +++ /dev/null @@ -1,13 +0,0 @@ -name: pre-commit -on: - pull_request: - push: - branches: [main] - -jobs: - pre-commit: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - uses: actions/setup-python@v3 - - uses: pre-commit/action@v3.0.0 diff --git a/.lib/git-fleximod/.github/workflows/pytest.yaml b/.lib/git-fleximod/.github/workflows/pytest.yaml deleted file mode 100644 index 0868dd9a33..0000000000 --- a/.lib/git-fleximod/.github/workflows/pytest.yaml +++ /dev/null @@ -1,77 +0,0 @@ -# Run this job on pushes to `main`, and for pull requests. If you don't specify -# `branches: [main], then this actions runs _twice_ on pull requests, which is -# annoying. - -on: - push: - branches: [main] - pull_request: - branches: [main] - -jobs: - test: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - # If you wanted to use multiple Python versions, you'd have specify a matrix in the job and - # reference the matrixe python version here. - - uses: actions/setup-python@v5 - with: - python-version: '3.9' - - # Cache the installation of Poetry itself, e.g. the next step. This prevents the workflow - # from installing Poetry every time, which can be slow. Note the use of the Poetry version - # number in the cache key, and the "-0" suffix: this allows you to invalidate the cache - # manually if/when you want to upgrade Poetry, or if something goes wrong. This could be - # mildly cleaner by using an environment variable, but I don't really care. - - name: cache poetry install - uses: actions/cache@v4 - with: - path: ~/.local - key: poetry-1.7.1 - - # Install Poetry. You could do this manually, or there are several actions that do this. - # `snok/install-poetry` seems to be minimal yet complete, and really just calls out to - # Poetry's default install script, which feels correct. I pin the Poetry version here - # because Poetry does occasionally change APIs between versions and I don't want my - # actions to break if it does. - # - # The key configuration value here is `virtualenvs-in-project: true`: this creates the - # venv as a `.venv` in your testing directory, which allows the next step to easily - # cache it. - - uses: snok/install-poetry@v1 - with: - version: 1.7.1 - virtualenvs-create: true - virtualenvs-in-project: true - - # Cache your dependencies (i.e. all the stuff in your `pyproject.toml`). Note the cache - # key: if you're using multiple Python versions, or multiple OSes, you'd need to include - # them in the cache key. I'm not, so it can be simple and just depend on the poetry.lock. - - name: cache deps - id: cache-deps - uses: actions/cache@v4 - with: - path: .venv - key: pydeps-${{ hashFiles('**/poetry.lock') }} - - # Install dependencies. `--no-root` means "install all dependencies but not the project - # itself", which is what you want to avoid caching _your_ code. The `if` statement - # ensures this only runs on a cache miss. - - run: poetry install --no-interaction --no-root - if: steps.cache-deps.outputs.cache-hit != 'true' - - # Now install _your_ project. This isn't necessary for many types of projects -- particularly - # things like Django apps don't need this. But it's a good idea since it fully-exercises the - # pyproject.toml and makes that if you add things like console-scripts at some point that - # they'll be installed and working. - - run: poetry install --no-interaction - - # And finally run tests. I'm using pytest and all my pytest config is in my `pyproject.toml` - # so this line is super-simple. But it could be as complex as you need. - - run: | - git config --global user.name "${GITHUB_ACTOR}" - git config --global user.email "${GITHUB_ACTOR_ID}+${GITHUB_ACTOR}@users.noreply.github.com" - poetry run pytest - diff --git a/.lib/git-fleximod/.pre-commit-config.yaml b/.lib/git-fleximod/.pre-commit-config.yaml deleted file mode 100644 index 2f6089da72..0000000000 --- a/.lib/git-fleximod/.pre-commit-config.yaml +++ /dev/null @@ -1,18 +0,0 @@ -exclude: ^utils/.*$ - -repos: - - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.0.1 - hooks: - - id: end-of-file-fixer - - id: trailing-whitespace - - repo: https://github.com/psf/black - rev: 22.3.0 - hooks: - - id: black - - repo: https://github.com/PyCQA/pylint - rev: v2.11.1 - hooks: - - id: pylint - args: - - --disable=I,C,R,logging-not-lazy,wildcard-import,unused-wildcard-import,fixme,broad-except,bare-except,eval-used,exec-used,global-statement,logging-format-interpolation,no-name-in-module,arguments-renamed,unspecified-encoding,protected-access,import-error,no-member diff --git a/.lib/git-fleximod/License b/.lib/git-fleximod/License deleted file mode 100644 index 2c6fe768c2..0000000000 --- a/.lib/git-fleximod/License +++ /dev/null @@ -1,20 +0,0 @@ -Copyright 2024 National Center for Atmospheric Sciences (NCAR) - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -“Softwareâ€), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED “AS ISâ€, WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/.lib/git-fleximod/README.md b/.lib/git-fleximod/README.md deleted file mode 100644 index d1ef632f28..0000000000 --- a/.lib/git-fleximod/README.md +++ /dev/null @@ -1,110 +0,0 @@ -# git-fleximod - -Flexible, Enhanced Submodule Management for Git - -## Overview - -Git-fleximod is a Python-based tool that extends Git's submodule and sparse checkout capabilities, offering additional features for managing submodules in a more flexible and efficient way. - -## Installation - -#TODO Install using pip: -# pip install git-fleximod - If you choose to locate git-fleximod in your path you can access it via command: git fleximod - -## Usage - - Basic Usage: - git fleximod [options] - Available Commands: - status: Display the status of submodules. - update: Update submodules to the tag indicated in .gitmodules variable fxtag. - test: Make sure that fxtags and submodule hashes are consistant, - make sure that official urls (as defined by fxDONOTUSEurl) are set - make sure that fxtags are defined for all submodules - Additional Options: - See git fleximod --help for more details. - -## Supported .gitmodules Variables - - fxtag: Specify a specific tag or branch to checkout for a submodule. - fxrequired: Mark a submodule's checkout behavior, with allowed values: - - ToplevelRequired: Top-level and required (checked out only when this is the Toplevel module). - - ToplevelOptional: Top-level and optional (checked out with --optional flag if this is the Toplevel module). - - AlwaysRequired: Always required (always checked out). - - AlwaysOptional: Always optional (checked out with --optional flag). - fxsparse: Enable sparse checkout for a submodule, pointing to a file containing sparse checkout paths. - fxDONOTUSEurl: This is the url used in the test subcommand to assure that protected branches do not point to forks - **NOTE** the fxDONOTUSEurl variable is only used to identify the official project repository and should not be - changed by users. Use the url variable to change to a fork if desired. - -## Sparse Checkouts - - To enable sparse checkout for a submodule, set the fxsparse variable - in the .gitmodules file to the path of a file containing the desired - sparse checkout paths. Git-fleximod will automatically configure - sparse checkout based on this file when applicable commands are run. - See [git-sparse-checkout](https://git-scm.com/docs/git-sparse-checkout#_internalsfull_pattern_set) - for details on the format of this file. - -## Tests - - The git fleximod test action is designed to be used by, for example, github workflows - to assure that protected branches are consistant with respect to submodule hashes and fleximod fxtags - -## Examples - -Here are some common usage examples: - -Update all submodules, including optional ones: -```bash - git fleximod update --optional -``` - -Updating a specific submodule to the fxtag indicated in .gitmodules: - -```bash - git fleximod update submodule-name -``` -Example .gitmodules entry: -```ini, toml - [submodule "cosp2"] - path = src/physics/cosp2/src - url = https://github.com/CFMIP/COSPv2.0 - fxsparse = ../.cosp_sparse_checkout - fxrequired = AlwaysRequired - fxtag = v2.1.4cesm -``` -Explanation: - -This entry indicates that the submodule named cosp2 at tag v2.1.4cesm -should be checked out into the directory src/physics/cosp2/src -relative to the .gitmodules directory. It should be checked out from -the URL https://github.com/CFMIP/COSPv2.0 and use sparse checkout as -described in the file ../.cosp_sparse_checkout relative to the path -directory. It should be checked out anytime this .gitmodules entry is -read. - -Additional example: -```ini, toml - [submodule "cime"] - path = cime - url = https://github.com/jedwards4b/cime - fxrequired = ToplevelRequired - fxtag = cime6.0.198_rme01 -``` - -Explanation: - -This entry indicates that the submodule cime should be checked out -into a directory named cime at tag cime6.0.198_rme01 from the URL -https://github.com/jedwards4b/cime. This should only be done if -the .gitmodules file is at the top level of the repository clone. - -## Contributing - -We welcome contributions! Please see the CONTRIBUTING.md file for guidelines. - -## License - -Git-fleximod is released under the MIT License. diff --git a/.lib/git-fleximod/doc/Makefile b/.lib/git-fleximod/doc/Makefile deleted file mode 100644 index d4bb2cbb9e..0000000000 --- a/.lib/git-fleximod/doc/Makefile +++ /dev/null @@ -1,20 +0,0 @@ -# Minimal makefile for Sphinx documentation -# - -# You can set these variables from the command line, and also -# from the environment for the first two. -SPHINXOPTS ?= -SPHINXBUILD ?= sphinx-build -SOURCEDIR = . -BUILDDIR = _build - -# Put it first so that "make" without argument is like "make help". -help: - @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) - -.PHONY: help Makefile - -# Catch-all target: route all unknown targets to Sphinx using the new -# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). -%: Makefile - @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/.lib/git-fleximod/doc/conf.py b/.lib/git-fleximod/doc/conf.py deleted file mode 100644 index 423099eec9..0000000000 --- a/.lib/git-fleximod/doc/conf.py +++ /dev/null @@ -1,26 +0,0 @@ -# Configuration file for the Sphinx documentation builder. -# -# For the full list of built-in configuration values, see the documentation: -# https://www.sphinx-doc.org/en/master/usage/configuration.html - -# -- Project information ----------------------------------------------------- -# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information - -project = "git-fleximod" -author = "Jim Edwards " -release = "0.4.0" - -# -- General configuration --------------------------------------------------- -# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration - -extensions = ["sphinx_argparse_cli"] - -templates_path = ["_templates"] -exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] - - -# -- Options for HTML output ------------------------------------------------- -# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output - -html_theme = "alabaster" -html_static_path = ["_static"] diff --git a/.lib/git-fleximod/doc/index.rst b/.lib/git-fleximod/doc/index.rst deleted file mode 100644 index 0f9c1a7f7e..0000000000 --- a/.lib/git-fleximod/doc/index.rst +++ /dev/null @@ -1,24 +0,0 @@ -.. git-fleximod documentation master file, created by - sphinx-quickstart on Sat Feb 3 12:02:22 2024. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -Welcome to git-fleximod's documentation! -======================================== - -.. toctree:: - :maxdepth: 2 - :caption: Contents: -.. module:: sphinxcontrib.autoprogram -.. sphinx_argparse_cli:: - :module: git_fleximod.cli - :func: get_parser - :prog: git-fleximod - - -Indices and tables -================== - -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` diff --git a/.lib/git-fleximod/doc/make.bat b/.lib/git-fleximod/doc/make.bat deleted file mode 100644 index 32bb24529f..0000000000 --- a/.lib/git-fleximod/doc/make.bat +++ /dev/null @@ -1,35 +0,0 @@ -@ECHO OFF - -pushd %~dp0 - -REM Command file for Sphinx documentation - -if "%SPHINXBUILD%" == "" ( - set SPHINXBUILD=sphinx-build -) -set SOURCEDIR=. -set BUILDDIR=_build - -%SPHINXBUILD% >NUL 2>NUL -if errorlevel 9009 ( - echo. - echo.The 'sphinx-build' command was not found. Make sure you have Sphinx - echo.installed, then set the SPHINXBUILD environment variable to point - echo.to the full path of the 'sphinx-build' executable. Alternatively you - echo.may add the Sphinx directory to PATH. - echo. - echo.If you don't have Sphinx installed, grab it from - echo.https://www.sphinx-doc.org/ - exit /b 1 -) - -if "%1" == "" goto help - -%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% -goto end - -:help -%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% - -:end -popd diff --git a/.lib/git-fleximod/escomp_install b/.lib/git-fleximod/escomp_install deleted file mode 100644 index ae782e72a4..0000000000 --- a/.lib/git-fleximod/escomp_install +++ /dev/null @@ -1,25 +0,0 @@ -#!/usr/bin/env python -# updates git-fleximod in an ESCOMP model -# this script should be run from the model root directory, it expects -# git-fleximod to already be installed with the script in bin -# and the classes in lib/python/site-packages -import sys -import shutil -import os - -from glob import iglob - -fleximod_root = sys.argv[1] -fleximod_path = os.path.join(fleximod_root,"src","git-fleximod") -if os.path.isfile(fleximod_path): - with open(fleximod_path,"r") as f: - fleximod = f.readlines() - with open(os.path.join(".","bin","git-fleximod"),"w") as f: - for line in fleximod: - f.write(line) - if "import argparse" in line: - f.write('\nsys.path.append(os.path.join(os.path.dirname(__file__),"..","lib","python","site-packages"))\n\n') - - for file in iglob(os.path.join(fleximod_root, "src", "fleximod", "*.py")): - shutil.copy(file, - os.path.join("lib","python","site-packages","fleximod",os.path.basename(file))) diff --git a/.lib/git-fleximod/git_fleximod/__init__.py b/.lib/git-fleximod/git_fleximod/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/.lib/git-fleximod/git_fleximod/cli.py b/.lib/git-fleximod/git_fleximod/cli.py deleted file mode 100644 index 1fb959dad0..0000000000 --- a/.lib/git-fleximod/git_fleximod/cli.py +++ /dev/null @@ -1,119 +0,0 @@ -from pathlib import Path -import argparse - -__version__ = "0.7.4" - -def find_root_dir(filename=".git"): - d = Path.cwd() - root = Path(d.root) - while d != root: - attempt = d / filename - if attempt.is_dir(): - return attempt - d = d.parent - return None - - -def get_parser(): - description = """ - %(prog)s manages checking out groups of gitsubmodules with addtional support for Earth System Models - """ - parser = argparse.ArgumentParser( - description=description, formatter_class=argparse.RawDescriptionHelpFormatter - ) - - # - # user options - # - choices = ["update", "status", "test"] - parser.add_argument( - "action", - choices=choices, - default="update", - help=f"Subcommand of git-fleximod, choices are {choices[:-1]}", - ) - - parser.add_argument( - "components", - nargs="*", - help="Specific component(s) to checkout. By default, " - "all required submodules are checked out.", - ) - - parser.add_argument( - "-C", - "--path", - default=find_root_dir(), - help="Toplevel repository directory. Defaults to top git directory relative to current.", - ) - - parser.add_argument( - "-g", - "--gitmodules", - nargs="?", - default=".gitmodules", - help="The submodule description filename. " "Default: %(default)s.", - ) - - parser.add_argument( - "-x", - "--exclude", - nargs="*", - help="Component(s) listed in the gitmodules file which should be ignored.", - ) - parser.add_argument( - "-f", - "--force", - action="store_true", - default=False, - help="Override cautions and update or checkout over locally modified repository.", - ) - - parser.add_argument( - "-o", - "--optional", - action="store_true", - default=False, - help="By default only the required submodules " - "are checked out. This flag will also checkout the " - "optional submodules relative to the toplevel directory.", - ) - - parser.add_argument( - "-v", - "--verbose", - action="count", - default=0, - help="Output additional information to " - "the screen and log file. This flag can be " - "used up to two times, increasing the " - "verbosity level each time.", - ) - - parser.add_argument( - "-V", - "--version", - action="version", - version=f"%(prog)s {__version__}", - help="Print version and exit.", - ) - - # - # developer options - # - parser.add_argument( - "--backtrace", - action="store_true", - help="DEVELOPER: show exception backtraces as extra " "debugging output", - ) - - parser.add_argument( - "-d", - "--debug", - action="store_true", - default=False, - help="DEVELOPER: output additional debugging " - "information to the screen and log file.", - ) - - return parser diff --git a/.lib/git-fleximod/git_fleximod/git_fleximod.py b/.lib/git-fleximod/git_fleximod/git_fleximod.py deleted file mode 100755 index 103cc82a50..0000000000 --- a/.lib/git-fleximod/git_fleximod/git_fleximod.py +++ /dev/null @@ -1,601 +0,0 @@ -#!/usr/bin/env python -import sys - -MIN_PYTHON = (3, 7) -if sys.version_info < MIN_PYTHON: - sys.exit("Python %s.%s or later is required." % MIN_PYTHON) - -import os -import shutil -import logging -import textwrap -from git_fleximod import utils -from git_fleximod import cli -from git_fleximod.gitinterface import GitInterface -from git_fleximod.gitmodules import GitModules -from configparser import NoOptionError - -# logger variable is global -logger = None - - -def fxrequired_allowed_values(): - return ["ToplevelRequired", "ToplevelOptional", "AlwaysRequired", "AlwaysOptional"] - - -def commandline_arguments(args=None): - parser = cli.get_parser() - - if args: - options = parser.parse_args(args) - else: - options = parser.parse_args() - - # explicitly listing a component overrides the optional flag - if options.optional or options.components: - fxrequired = [ - "ToplevelRequired", - "ToplevelOptional", - "AlwaysRequired", - "AlwaysOptional", - ] - else: - fxrequired = ["ToplevelRequired", "AlwaysRequired"] - - action = options.action - if not action: - action = "update" - handlers = [logging.StreamHandler()] - - if options.debug: - try: - open("fleximod.log", "w") - except PermissionError: - sys.exit("ABORT: Could not write file fleximod.log") - level = logging.DEBUG - handlers.append(logging.FileHandler("fleximod.log")) - elif options.verbose: - level = logging.INFO - else: - level = logging.WARNING - # Configure the root logger - logging.basicConfig( - level=level, format="%(name)s - %(levelname)s - %(message)s", handlers=handlers - ) - - if hasattr(options, "version"): - exit() - - return ( - options.path, - options.gitmodules, - fxrequired, - options.components, - options.exclude, - options.force, - action, - ) - - -def submodule_sparse_checkout(root_dir, name, url, path, sparsefile, tag="master"): - """ - This function performs a sparse checkout of a git submodule. It does so by first creating the .git/info/sparse-checkout fileq - in the submodule and then checking out the desired tag. If the submodule is already checked out, it will not be checked out again. - Creating the sparse-checkout file first prevents the entire submodule from being checked out and then removed. This is important - because the submodule may have a large number of files and checking out the entire submodule and then removing it would be time - and disk space consuming. - - Parameters: - root_dir (str): The root directory for the git operation. - name (str): The name of the submodule. - url (str): The URL of the submodule. - path (str): The path to the submodule. - sparsefile (str): The sparse file for the submodule. - tag (str, optional): The tag to checkout. Defaults to "master". - - Returns: - None - """ - logger.info("Called sparse_checkout for {}".format(name)) - rgit = GitInterface(root_dir, logger) - superroot = rgit.git_operation("rev-parse", "--show-superproject-working-tree") - if superroot: - gitroot = superroot.strip() - else: - gitroot = root_dir.strip() - assert os.path.isdir(os.path.join(gitroot, ".git")) - # first create the module directory - if not os.path.isdir(os.path.join(root_dir, path)): - os.makedirs(os.path.join(root_dir, path)) - - # initialize a new git repo and set the sparse checkout flag - sprep_repo = os.path.join(root_dir, path) - sprepo_git = GitInterface(sprep_repo, logger) - if os.path.exists(os.path.join(sprep_repo, ".git")): - try: - logger.info("Submodule {} found".format(name)) - chk = sprepo_git.config_get_value("core", "sparseCheckout") - if chk == "true": - logger.info("Sparse submodule {} already checked out".format(name)) - return - except NoOptionError: - logger.debug("Sparse submodule {} not present".format(name)) - except Exception as e: - utils.fatal_error("Unexpected error {} occured.".format(e)) - - sprepo_git.config_set_value("core", "sparseCheckout", "true") - - # set the repository remote - - logger.info("Setting remote origin in {}/{}".format(root_dir, path)) - status = sprepo_git.git_operation("remote", "-v") - if url not in status: - sprepo_git.git_operation("remote", "add", "origin", url) - - topgit = os.path.join(gitroot, ".git") - - if gitroot != root_dir and os.path.isfile(os.path.join(root_dir, ".git")): - with open(os.path.join(root_dir, ".git")) as f: - gitpath = os.path.relpath( - os.path.join(root_dir, f.read().split()[1]), - start=os.path.join(root_dir, path), - ) - topgit = os.path.join(gitpath, "modules") - else: - topgit = os.path.relpath( - os.path.join(root_dir, ".git", "modules"), - start=os.path.join(root_dir, path), - ) - - with utils.pushd(sprep_repo): - if not os.path.isdir(topgit): - os.makedirs(topgit) - topgit += os.sep + name - - if os.path.isdir(os.path.join(root_dir, path, ".git")): - with utils.pushd(sprep_repo): - shutil.move(".git", topgit) - with open(".git", "w") as f: - f.write("gitdir: " + os.path.relpath(topgit)) - # assert(os.path.isdir(os.path.relpath(topgit, start=sprep_repo))) - gitsparse = os.path.abspath(os.path.join(topgit, "info", "sparse-checkout")) - if os.path.isfile(gitsparse): - logger.warning( - "submodule {} is already initialized {}".format(name, topgit) - ) - return - - with utils.pushd(sprep_repo): - shutil.copy(sparsefile, gitsparse) - - # Finally checkout the repo - sprepo_git.git_operation("fetch", "origin", "--tags") - sprepo_git.git_operation("checkout", tag) - - print(f"Successfully checked out {name:>20} at {tag}") - rgit.config_set_value(f'submodule "{name}"', "active", "true") - rgit.config_set_value(f'submodule "{name}"', "url", url) - - -def single_submodule_checkout( - root, name, path, url=None, tag=None, force=False, optional=False -): - """ - This function checks out a single git submodule. - - Parameters: - root (str): The root directory for the git operation. - name (str): The name of the submodule. - path (str): The path to the submodule. - url (str, optional): The URL of the submodule. Defaults to None. - tag (str, optional): The tag to checkout. Defaults to None. - force (bool, optional): If set to True, forces the checkout operation. Defaults to False. - optional (bool, optional): If set to True, the submodule is considered optional. Defaults to False. - - Returns: - None - """ - # function implementation... - git = GitInterface(root, logger) - repodir = os.path.join(root, path) - logger.info("Checkout {} into {}/{}".format(name, root, path)) - # if url is provided update to the new url - tmpurl = None - repo_exists = False - if os.path.exists(os.path.join(repodir, ".git")): - logger.info("Submodule {} already checked out".format(name)) - repo_exists = True - # Look for a .gitmodules file in the newly checkedout repo - if not repo_exists and url: - # ssh urls cause problems for those who dont have git accounts with ssh keys defined - # but cime has one since e3sm prefers ssh to https, because the .gitmodules file was - # opened with a GitModules object we don't need to worry about restoring the file here - # it will be done by the GitModules class - if url.startswith("git@"): - tmpurl = url - url = url.replace("git@github.com:", "https://github.com/") - git.git_operation("clone", url, path) - smgit = GitInterface(repodir, logger) - if not tag: - tag = smgit.git_operation("describe", "--tags", "--always").rstrip() - smgit.git_operation("checkout", tag) - # Now need to move the .git dir to the submodule location - rootdotgit = os.path.join(root, ".git") - if os.path.isfile(rootdotgit): - with open(rootdotgit) as f: - line = f.readline() - if line.startswith("gitdir: "): - rootdotgit = line[8:].rstrip() - - newpath = os.path.abspath(os.path.join(root, rootdotgit, "modules", name)) - if os.path.exists(newpath): - shutil.rmtree(os.path.join(repodir, ".git")) - else: - shutil.move(os.path.join(repodir, ".git"), newpath) - - with open(os.path.join(repodir, ".git"), "w") as f: - f.write("gitdir: " + os.path.relpath(newpath, start=repodir)) - - if not os.path.exists(repodir): - parent = os.path.dirname(repodir) - if not os.path.isdir(parent): - os.makedirs(parent) - git.git_operation("submodule", "add", "--name", name, "--", url, path) - - if not repo_exists or not tmpurl: - git.git_operation("submodule", "update", "--init", "--", path) - - if os.path.exists(os.path.join(repodir, ".gitmodules")): - # recursively handle this checkout - print(f"Recursively checking out submodules of {name}") - gitmodules = GitModules(logger, confpath=repodir) - requiredlist = ["AlwaysRequired"] - if optional: - requiredlist.append("AlwaysOptional") - submodules_checkout(gitmodules, repodir, requiredlist, force=force) - if not os.path.exists(os.path.join(repodir, ".git")): - utils.fatal_error( - f"Failed to checkout {name} {repo_exists} {tmpurl} {repodir} {path}" - ) - - if tmpurl: - print(git.git_operation("restore", ".gitmodules")) - - return - - -def submodules_status(gitmodules, root_dir, toplevel=False): - testfails = 0 - localmods = 0 - needsupdate = 0 - for name in gitmodules.sections(): - path = gitmodules.get(name, "path") - tag = gitmodules.get(name, "fxtag") - required = gitmodules.get(name, "fxrequired") - level = required and "Toplevel" in required - if not path: - utils.fatal_error("No path found in .gitmodules for {}".format(name)) - newpath = os.path.join(root_dir, path) - logger.debug("newpath is {}".format(newpath)) - if not os.path.exists(os.path.join(newpath, ".git")): - rootgit = GitInterface(root_dir, logger) - # submodule commands use path, not name - url = gitmodules.get(name, "url") - url = url.replace("git@github.com:", "https://github.com/") - tags = rootgit.git_operation("ls-remote", "--tags", url) - atag = None - needsupdate += 1 - if not toplevel and level: - continue - for htag in tags.split("\n"): - if tag and tag in htag: - atag = (htag.split()[1])[10:] - break - if tag and tag == atag: - print(f"e {name:>20} not checked out, aligned at tag {tag}") - elif tag: - ahash = rootgit.git_operation( - "submodule", "status", "{}".format(path) - ).rstrip() - ahash = ahash[1 : len(tag) + 1] - if tag == ahash: - print(f"e {name:>20} not checked out, aligned at hash {ahash}") - else: - print( - f"e {name:>20} not checked out, out of sync at tag {atag}, expected tag is {tag}" - ) - testfails += 1 - else: - print(f"e {name:>20} has no fxtag defined in .gitmodules") - testfails += 1 - else: - with utils.pushd(newpath): - git = GitInterface(newpath, logger) - atag = git.git_operation("describe", "--tags", "--always").rstrip() - ahash = git.git_operation("status").partition("\n")[0].split()[-1] - if tag and atag == tag: - print(f" {name:>20} at tag {tag}") - elif tag and ahash[: len(tag)] == tag: - print(f" {name:>20} at hash {ahash}") - elif atag == ahash: - print(f" {name:>20} at hash {ahash}") - elif tag: - print( - f"s {name:>20} {atag} {ahash} is out of sync with .gitmodules {tag}" - ) - testfails += 1 - needsupdate += 1 - else: - print( - f"e {name:>20} has no fxtag defined in .gitmodules, module at {atag}" - ) - testfails += 1 - - status = git.git_operation("status", "--ignore-submodules") - if "nothing to commit" not in status: - localmods = localmods + 1 - print("M" + textwrap.indent(status, " ")) - - return testfails, localmods, needsupdate - - -def submodules_update(gitmodules, root_dir, requiredlist, force): - _, localmods, needsupdate = submodules_status(gitmodules, root_dir) - - if localmods and not force: - local_mods_output() - return - if needsupdate == 0: - return - - for name in gitmodules.sections(): - fxtag = gitmodules.get(name, "fxtag") - path = gitmodules.get(name, "path") - url = gitmodules.get(name, "url") - logger.info( - "name={} path={} url={} fxtag={} requiredlist={}".format( - name, os.path.join(root_dir, path), url, fxtag, requiredlist - ) - ) - # if not os.path.exists(os.path.join(root_dir,path, ".git")): - fxrequired = gitmodules.get(name, "fxrequired") - assert fxrequired in fxrequired_allowed_values() - rgit = GitInterface(root_dir, logger) - superroot = rgit.git_operation("rev-parse", "--show-superproject-working-tree") - - fxsparse = gitmodules.get(name, "fxsparse") - - if ( - fxrequired - and (superroot and "Toplevel" in fxrequired) - or fxrequired not in requiredlist - ): - if "ToplevelOptional" == fxrequired: - print("Skipping optional component {}".format(name)) - continue - if fxsparse: - logger.debug( - "Callng submodule_sparse_checkout({}, {}, {}, {}, {}, {}".format( - root_dir, name, url, path, fxsparse, fxtag - ) - ) - submodule_sparse_checkout(root_dir, name, url, path, fxsparse, tag=fxtag) - else: - logger.info( - "Calling submodule_checkout({},{},{},{})".format( - root_dir, name, path, url - ) - ) - - single_submodule_checkout( - root_dir, - name, - path, - url=url, - tag=fxtag, - force=force, - optional=("AlwaysOptional" in requiredlist), - ) - - if os.path.exists(os.path.join(path, ".git")): - submoddir = os.path.join(root_dir, path) - with utils.pushd(submoddir): - git = GitInterface(submoddir, logger) - # first make sure the url is correct - upstream = git.git_operation("ls-remote", "--get-url").rstrip() - newremote = "origin" - if upstream != url: - # TODO - this needs to be a unique name - remotes = git.git_operation("remote", "-v") - if url in remotes: - for line in remotes: - if url in line and "fetch" in line: - newremote = line.split()[0] - break - else: - i = 0 - while newremote in remotes: - i = i + 1 - newremote = f"newremote.{i:02d}" - git.git_operation("remote", "add", newremote, url) - - tags = git.git_operation("tag", "-l") - if fxtag and fxtag not in tags: - git.git_operation("fetch", newremote, "--tags") - atag = git.git_operation("describe", "--tags", "--always").rstrip() - if fxtag and fxtag != atag: - try: - git.git_operation("checkout", fxtag) - print(f"{name:>20} updated to {fxtag}") - except Exception as error: - print(error) - elif not fxtag: - print(f"No fxtag found for submodule {name:>20}") - else: - print(f"{name:>20} up to date.") - - -def local_mods_output(): - text = """\ - The submodules labeled with 'M' above are not in a clean state. - The following are options for how to proceed: - (1) Go into each submodule which is not in a clean state and issue a 'git status' - Either revert or commit your changes so that the submodule is in a clean state. - (2) use the --force option to git-fleximod - (3) you can name the particular submodules to update using the git-fleximod command line - (4) As a last resort you can remove the submodule (via 'rm -fr [directory]') - then rerun git-fleximod update. -""" - print(text) - - -# checkout is done by update if required so this function may be depricated -def submodules_checkout(gitmodules, root_dir, requiredlist, force=False): - """ - This function checks out all git submodules based on the provided parameters. - - Parameters: - gitmodules (ConfigParser): The gitmodules configuration. - root_dir (str): The root directory for the git operation. - requiredlist (list): The list of required modules. - force (bool, optional): If set to True, forces the checkout operation. Defaults to False. - - Returns: - None - """ - # function implementation... - print("") - _, localmods, needsupdate = submodules_status(gitmodules, root_dir) - if localmods and not force: - local_mods_output() - return - if not needsupdate: - return - for name in gitmodules.sections(): - fxrequired = gitmodules.get(name, "fxrequired") - fxsparse = gitmodules.get(name, "fxsparse") - fxtag = gitmodules.get(name, "fxtag") - path = gitmodules.get(name, "path") - url = gitmodules.get(name, "url") - if fxrequired and fxrequired not in requiredlist: - if "Optional" in fxrequired: - print("Skipping optional component {}".format(name)) - continue - - if fxsparse: - logger.debug( - "Callng submodule_sparse_checkout({}, {}, {}, {}, {}, {}".format( - root_dir, name, url, path, fxsparse, fxtag - ) - ) - submodule_sparse_checkout(root_dir, name, url, path, fxsparse, tag=fxtag) - else: - logger.debug( - "Calling submodule_checkout({},{},{})".format(root_dir, name, path) - ) - single_submodule_checkout( - root_dir, - name, - path, - url=url, - tag=fxtag, - force=force, - optional="AlwaysOptional" in requiredlist, - ) - - -def submodules_test(gitmodules, root_dir): - """ - This function tests the git submodules based on the provided parameters. - - It first checks that fxtags are present and in sync with submodule hashes. - Then it ensures that urls are consistent with fxurls (not forks and not ssh) - and that sparse checkout files exist. - - Parameters: - gitmodules (ConfigParser): The gitmodules configuration. - root_dir (str): The root directory for the git operation. - - Returns: - int: The number of test failures. - """ - # First check that fxtags are present and in sync with submodule hashes - testfails, localmods, needsupdate = submodules_status(gitmodules, root_dir) - print("") - # Then make sure that urls are consistant with fxurls (not forks and not ssh) - # and that sparse checkout files exist - for name in gitmodules.sections(): - url = gitmodules.get(name, "url") - fxurl = gitmodules.get(name, "fxDONOTMODIFYurl") - fxsparse = gitmodules.get(name, "fxsparse") - path = gitmodules.get(name, "path") - fxurl = fxurl[:-4] if fxurl.endswith(".git") else fxurl - url = url[:-4] if url.endswith(".git") else url - if not fxurl or url.lower() != fxurl.lower(): - print(f"{name:>20} url {url} not in sync with required {fxurl}") - testfails += 1 - if fxsparse and not os.path.isfile(os.path.join(root_dir, path, fxsparse)): - print(f"{name:>20} sparse checkout file {fxsparse} not found") - testfails += 1 - return testfails + localmods + needsupdate - - -def main(): - ( - root_dir, - file_name, - fxrequired, - includelist, - excludelist, - force, - action, - ) = commandline_arguments() - # Get a logger for the package - global logger - logger = logging.getLogger(__name__) - - logger.info("action is {}".format(action)) - - if not os.path.isfile(os.path.join(root_dir, file_name)): - file_path = utils.find_upwards(root_dir, file_name) - - if file_path is None: - utils.fatal_error( - "No {} found in {} or any of it's parents".format(file_name, root_dir) - ) - - root_dir = os.path.dirname(file_path) - logger.info( - "root_dir is {} includelist={} excludelist={}".format( - root_dir, includelist, excludelist - ) - ) - gitmodules = GitModules( - logger, - confpath=root_dir, - conffile=file_name, - includelist=includelist, - excludelist=excludelist, - ) - if not gitmodules.sections(): - sys.exit("No submodule components found") - retval = 0 - if action == "update": - submodules_update(gitmodules, root_dir, fxrequired, force) - elif action == "status": - tfails, lmods, updates = submodules_status(gitmodules, root_dir, toplevel=True) - if tfails + lmods + updates > 0: - print( - f" testfails = {tfails}, local mods = {lmods}, needs updates {updates}\n" - ) - if lmods > 0: - local_mods_output() - elif action == "test": - retval = submodules_test(gitmodules, root_dir) - else: - utils.fatal_error(f"unrecognized action request {action}") - return retval - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/.lib/git-fleximod/git_fleximod/gitinterface.py b/.lib/git-fleximod/git_fleximod/gitinterface.py deleted file mode 100644 index 93ae38ecde..0000000000 --- a/.lib/git-fleximod/git_fleximod/gitinterface.py +++ /dev/null @@ -1,79 +0,0 @@ -import os -import sys -from . import utils -from pathlib import Path - -class GitInterface: - def __init__(self, repo_path, logger): - logger.debug("Initialize GitInterface for {}".format(repo_path)) - if isinstance(repo_path, str): - self.repo_path = Path(repo_path).resolve() - elif isinstance(repo_path, Path): - self.repo_path = repo_path.resolve() - else: - raise TypeError("repo_path must be a str or Path object") - self.logger = logger - try: - import git - - self._use_module = True - try: - self.repo = git.Repo(str(self.repo_path)) # Initialize GitPython repo - except git.exc.InvalidGitRepositoryError: - self.git = git - self._init_git_repo() - msg = "Using GitPython interface to git" - except ImportError: - self._use_module = False - if not (self.repo_path / ".git").exists(): - self._init_git_repo() - msg = "Using shell interface to git" - self.logger.info(msg) - - def _git_command(self, operation, *args): - self.logger.info(operation) - if self._use_module and operation != "submodule": - try: - return getattr(self.repo.git, operation)(*args) - except Exception as e: - sys.exit(e) - else: - return ["git", "-C", str(self.repo_path), operation] + list(args) - - def _init_git_repo(self): - if self._use_module: - self.repo = self.git.Repo.init(str(self.repo_path)) - else: - command = ("git", "-C", str(self.repo_path), "init") - utils.execute_subprocess(command) - - # pylint: disable=unused-argument - def git_operation(self, operation, *args, **kwargs): - command = self._git_command(operation, *args) - self.logger.info(command) - if isinstance(command, list): - try: - return utils.execute_subprocess(command, output_to_caller=True) - except Exception as e: - sys.exit(e) - else: - return command - - def config_get_value(self, section, name): - if self._use_module: - config = self.repo.config_reader() - return config.get_value(section, name) - else: - cmd = ("git", "-C", str(self.repo_path), "config", "--get", f"{section}.{name}") - output = utils.execute_subprocess(cmd, output_to_caller=True) - return output.strip() - - def config_set_value(self, section, name, value): - if self._use_module: - with self.repo.config_writer() as writer: - writer.set_value(section, name, value) - writer.release() # Ensure changes are saved - else: - cmd = ("git", "-C", str(self.repo_path), "config", f"{section}.{name}", value) - self.logger.info(cmd) - utils.execute_subprocess(cmd, output_to_caller=True) diff --git a/.lib/git-fleximod/git_fleximod/gitmodules.py b/.lib/git-fleximod/git_fleximod/gitmodules.py deleted file mode 100644 index 68c82d066f..0000000000 --- a/.lib/git-fleximod/git_fleximod/gitmodules.py +++ /dev/null @@ -1,97 +0,0 @@ -import shutil -from pathlib import Path -from configparser import RawConfigParser, ConfigParser -from .lstripreader import LstripReader - - -class GitModules(RawConfigParser): - def __init__( - self, - logger, - confpath=Path.cwd(), - conffile=".gitmodules", - includelist=None, - excludelist=None, - ): - """ - confpath: Path to the directory containing the .gitmodules file (defaults to the current working directory). - conffile: Name of the configuration file (defaults to .gitmodules). - includelist: Optional list of submodules to include. - excludelist: Optional list of submodules to exclude. - """ - self.logger = logger - self.logger.debug( - "Creating a GitModules object {} {} {} {}".format( - confpath, conffile, includelist, excludelist - ) - ) - super().__init__() - self.conf_file = (Path(confpath) / Path(conffile)) - if self.conf_file.exists(): - self.read_file(LstripReader(str(self.conf_file)), source=conffile) - self.includelist = includelist - self.excludelist = excludelist - self.isdirty = False - - def reload(self): - self.clear() - if self.conf_file.exists(): - self.read_file(LstripReader(str(self.conf_file)), source=self.conf_file) - - - def set(self, name, option, value): - """ - Sets a configuration value for a specific submodule: - Ensures the appropriate section exists for the submodule. - Calls the parent class's set method to store the value. - """ - self.isdirty = True - self.logger.debug("set called {} {} {}".format(name, option, value)) - section = f'submodule "{name}"' - if not self.has_section(section): - self.add_section(section) - super().set(section, option, str(value)) - - # pylint: disable=redefined-builtin, arguments-differ - def get(self, name, option, raw=False, vars=None, fallback=None): - """ - Retrieves a configuration value for a specific submodule: - Uses the parent class's get method to access the value. - Handles potential errors if the section or option doesn't exist. - """ - self.logger.debug("get called {} {}".format(name, option)) - section = f'submodule "{name}"' - try: - return ConfigParser.get( - self, section, option, raw=raw, vars=vars, fallback=fallback - ) - except ConfigParser.NoOptionError: - return None - - def save(self): - if self.isdirty: - self.logger.info("Writing {}".format(self.conf_file)) - with open(self.conf_file, "w") as fd: - self.write(fd) - self.isdirty = False - - def __del__(self): - self.save() - - def sections(self): - """Strip the submodule part out of section and just use the name""" - self.logger.debug("calling GitModules sections iterator") - names = [] - for section in ConfigParser.sections(self): - name = section[11:-1] - if self.includelist and name not in self.includelist: - continue - if self.excludelist and name in self.excludelist: - continue - names.append(name) - return names - - def items(self, name, raw=False, vars=None): - self.logger.debug("calling GitModules items for {}".format(name)) - section = f'submodule "{name}"' - return ConfigParser.items(section, raw=raw, vars=vars) diff --git a/.lib/git-fleximod/git_fleximod/lstripreader.py b/.lib/git-fleximod/git_fleximod/lstripreader.py deleted file mode 100644 index 01d5580ee8..0000000000 --- a/.lib/git-fleximod/git_fleximod/lstripreader.py +++ /dev/null @@ -1,43 +0,0 @@ -class LstripReader(object): - "LstripReader formats .gitmodules files to be acceptable for configparser" - - def __init__(self, filename): - with open(filename, "r") as infile: - lines = infile.readlines() - self._lines = list() - self._num_lines = len(lines) - self._index = 0 - for line in lines: - self._lines.append(line.lstrip()) - - def readlines(self): - """Return all the lines from this object's file""" - return self._lines - - def readline(self, size=-1): - """Format and return the next line or raise StopIteration""" - try: - line = self.next() - except StopIteration: - line = "" - - if (size > 0) and (len(line) < size): - return line[0:size] - - return line - - def __iter__(self): - """Begin an iteration""" - self._index = 0 - return self - - def next(self): - """Return the next line or raise StopIteration""" - if self._index >= self._num_lines: - raise StopIteration - - self._index = self._index + 1 - return self._lines[self._index - 1] - - def __next__(self): - return self.next() diff --git a/.lib/git-fleximod/git_fleximod/metoflexi.py b/.lib/git-fleximod/git_fleximod/metoflexi.py deleted file mode 100755 index cc347db2dd..0000000000 --- a/.lib/git-fleximod/git_fleximod/metoflexi.py +++ /dev/null @@ -1,236 +0,0 @@ -#!/usr/bin/env python -from configparser import ConfigParser -import sys -import shutil -from pathlib import Path -import argparse -import logging -from git_fleximod.gitinterface import GitInterface -from git_fleximod.gitmodules import GitModules -from git_fleximod import utils - -logger = None - -def find_root_dir(filename=".git"): - d = Path.cwd() - root = Path(d.root) - while d != root: - attempt = d / filename - if attempt.is_dir(): - return d - d = d.parent - return None - - -def get_parser(): - description = """ - %(prog)s manages checking out groups of gitsubmodules with addtional support for Earth System Models - """ - parser = argparse.ArgumentParser( - description=description, formatter_class=argparse.RawDescriptionHelpFormatter - ) - - parser.add_argument('-e', '--externals', nargs='?', - default='Externals.cfg', - help='The externals description filename. ' - 'Default: %(default)s.') - - parser.add_argument( - "-C", - "--path", - default=find_root_dir(), - help="Toplevel repository directory. Defaults to top git directory relative to current.", - ) - - parser.add_argument( - "-g", - "--gitmodules", - nargs="?", - default=".gitmodules", - help="The submodule description filename. " "Default: %(default)s.", - ) - parser.add_argument( - "-v", - "--verbose", - action="count", - default=0, - help="Output additional information to " - "the screen and log file. This flag can be " - "used up to two times, increasing the " - "verbosity level each time.", - ) - parser.add_argument( - "-d", - "--debug", - action="store_true", - default=False, - help="DEVELOPER: output additional debugging " - "information to the screen and log file.", - ) - - return parser - -def commandline_arguments(args=None): - parser = get_parser() - - options = parser.parse_args(args) - handlers = [logging.StreamHandler()] - - if options.debug: - try: - open("fleximod.log", "w") - except PermissionError: - sys.exit("ABORT: Could not write file fleximod.log") - level = logging.DEBUG - handlers.append(logging.FileHandler("fleximod.log")) - elif options.verbose: - level = logging.INFO - else: - level = logging.WARNING - # Configure the root logger - logging.basicConfig( - level=level, format="%(name)s - %(levelname)s - %(message)s", handlers=handlers - ) - - return( - options.path, - options.gitmodules, - options.externals - ) - -class ExternalRepoTranslator: - """ - Translates external repositories configured in an INI-style externals file. - """ - - def __init__(self, rootpath, gitmodules, externals): - self.rootpath = rootpath - if gitmodules: - self.gitmodules = GitModules(logger, confpath=rootpath) - self.externals = (rootpath / Path(externals)).resolve() - print(f"Translating {self.externals}") - self.git = GitInterface(rootpath, logger) - -# def __del__(self): -# if (self.rootpath / "save.gitignore"): - - - def translate_single_repo(self, section, tag, url, path, efile, hash_, sparse, protocol): - """ - Translates a single repository based on configuration details. - - Args: - rootpath (str): Root path of the main repository. - gitmodules (str): Path to the .gitmodules file. - tag (str): The tag to use for the external repository. - url (str): The URL of the external repository. - path (str): The relative path within the main repository for the external repository. - efile (str): The external file or file containing submodules. - hash_ (str): The commit hash to checkout (if applicable). - sparse (str): Boolean indicating whether to use sparse checkout (if applicable). - protocol (str): The protocol to use (e.g., 'git', 'http'). - """ - assert protocol != "svn", "SVN protocol is not currently supported" - print(f"Translating repository {section}") - if efile: - file_path = Path(path) / Path(efile) - newroot = (self.rootpath / file_path).parent.resolve() - if not newroot.exists(): - newroot.mkdir(parents=True) - logger.info("Newroot is {}".format(newroot)) - newt = ExternalRepoTranslator(newroot, ".gitmodules", efile) - newt.translate_repo() - if protocol == "externals_only": - if tag: - self.gitmodules.set(section, "fxtag", tag) - if hash_: - self.gitmodules.set(section, "fxtag", hash_) - - self.gitmodules.set(section, "fxDONOTUSEurl", url) - if sparse: - self.gitmodules.set(section, "fxsparse", sparse) - self.gitmodules.set(section, "fxrequired", "ToplevelRequired") - else: - newpath = (self.rootpath / Path(path)) - if newpath.exists(): - shutil.rmtree(newpath) - logger.info("Creating directory {}".format(newpath)) - newpath.mkdir(parents=True) - if tag: - logger.info("cloning {}".format(section)) - try: - self.git.git_operation("clone", "-b", tag, "--depth", "1", url, path) - except: - self.git.git_operation("clone", url, path) - with utils.pushd(newpath): - ngit = GitInterface(newpath, logger) - ngit.git_operation("checkout", tag) - if hash_: - self.git.git_operation("clone", url, path) - git = GitInterface(newpath, logger) - git.git_operation("fetch", "origin") - git.git_operation("checkout", hash_) - if sparse: - print("setting as sparse submodule {}".format(section)) - sparsefile = (newpath / Path(sparse)) - newfile = (newpath / ".git" / "info" / "sparse-checkout") - print(f"sparsefile {sparsefile} newfile {newfile}") - shutil.copy(sparsefile, newfile) - - logger.info("adding submodule {}".format(section)) - self.gitmodules.save() - self.git.git_operation("submodule", "add", "-f", "--name", section, url, path) - self.git.git_operation("submodule","absorbgitdirs") - self.gitmodules.reload() - if tag: - self.gitmodules.set(section, "fxtag", tag) - if hash_: - self.gitmodules.set(section, "fxtag", hash_) - - self.gitmodules.set(section, "fxDONOTUSEurl", url) - if sparse: - self.gitmodules.set(section, "fxsparse", sparse) - self.gitmodules.set(section, "fxrequired", "ToplevelRequired") - - - def translate_repo(self): - """ - Translates external repositories defined within an external file. - - Args: - rootpath (str): Root path of the main repository. - gitmodules (str): Path to the .gitmodules file. - external_file (str): The path to the external file containing repository definitions. - """ - econfig = ConfigParser() - econfig.read((self.rootpath / Path(self.externals))) - - for section in econfig.sections(): - if section == "externals_description": - logger.info("skipping section {}".format(section)) - return - logger.info("Translating section {}".format(section)) - tag = econfig.get(section, "tag", raw=False, fallback=None) - url = econfig.get(section, "repo_url", raw=False, fallback=None) - path = econfig.get(section, "local_path", raw=False, fallback=None) - efile = econfig.get(section, "externals", raw=False, fallback=None) - hash_ = econfig.get(section, "hash", raw=False, fallback=None) - sparse = econfig.get(section, "sparse", raw=False, fallback=None) - protocol = econfig.get(section, "protocol", raw=False, fallback=None) - - self.translate_single_repo(section, tag, url, path, efile, hash_, sparse, protocol) - - - -def _main(): - rootpath, gitmodules, externals = commandline_arguments() - global logger - logger = logging.getLogger(__name__) - with utils.pushd(rootpath): - t = ExternalRepoTranslator(Path(rootpath), gitmodules, externals) - logger.info("Translating {}".format(rootpath)) - t.translate_repo() - - -if __name__ == "__main__": - sys.exit(_main()) diff --git a/.lib/git-fleximod/git_fleximod/utils.py b/.lib/git-fleximod/git_fleximod/utils.py deleted file mode 100644 index 7cc1de38cc..0000000000 --- a/.lib/git-fleximod/git_fleximod/utils.py +++ /dev/null @@ -1,365 +0,0 @@ -#!/usr/bin/env python3 -""" -Common public utilities for manic package - -""" - -import logging -import os -import subprocess -import sys -from threading import Timer -from pathlib import Path - -LOCAL_PATH_INDICATOR = "." -# --------------------------------------------------------------------- -# -# functions to massage text for output and other useful utilities -# -# --------------------------------------------------------------------- -from contextlib import contextmanager - - -@contextmanager -def pushd(new_dir): - """context for chdir. usage: with pushd(new_dir)""" - previous_dir = os.getcwd() - os.chdir(new_dir) - try: - yield - finally: - os.chdir(previous_dir) - - -def log_process_output(output): - """Log each line of process output at debug level so it can be - filtered if necessary. By default, output is a single string, and - logging.debug(output) will only put log info heading on the first - line. This makes it hard to filter with grep. - - """ - output = output.split("\n") - for line in output: - logging.debug(line) - - -def printlog(msg, **kwargs): - """Wrapper script around print to ensure that everything printed to - the screen also gets logged. - - """ - logging.info(msg) - if kwargs: - print(msg, **kwargs) - else: - print(msg) - sys.stdout.flush() - - -def find_upwards(root_dir, filename): - """Find a file in root dir or any of it's parents""" - d = Path(root_dir) - root = Path(d.root) - while d != root: - attempt = d / filename - if attempt.exists(): - return attempt - d = d.parent - return None - - -def last_n_lines(the_string, n_lines, truncation_message=None): - """Returns the last n lines of the given string - - Args: - the_string: str - n_lines: int - truncation_message: str, optional - - Returns a string containing the last n lines of the_string - - If truncation_message is provided, the returned string begins with - the given message if and only if the string is greater than n lines - to begin with. - """ - - lines = the_string.splitlines(True) - if len(lines) <= n_lines: - return_val = the_string - else: - lines_subset = lines[-n_lines:] - str_truncated = "".join(lines_subset) - if truncation_message: - str_truncated = truncation_message + "\n" + str_truncated - return_val = str_truncated - - return return_val - - -def indent_string(the_string, indent_level): - """Indents the given string by a given number of spaces - - Args: - the_string: str - indent_level: int - - Returns a new string that is the same as the_string, except that - each line is indented by 'indent_level' spaces. - - In python3, this can be done with textwrap.indent. - """ - - lines = the_string.splitlines(True) - padding = " " * indent_level - lines_indented = [padding + line for line in lines] - return "".join(lines_indented) - - -# --------------------------------------------------------------------- -# -# error handling -# -# --------------------------------------------------------------------- - - -def fatal_error(message): - """ - Error output function - """ - logging.error(message) - raise RuntimeError("{0}ERROR: {1}".format(os.linesep, message)) - - -# --------------------------------------------------------------------- -# -# Data conversion / manipulation -# -# --------------------------------------------------------------------- -def str_to_bool(bool_str): - """Convert a sting representation of as boolean into a true boolean. - - Conversion should be case insensitive. - """ - value = None - str_lower = bool_str.lower() - if str_lower in ("true", "t"): - value = True - elif str_lower in ("false", "f"): - value = False - if value is None: - msg = ( - 'ERROR: invalid boolean string value "{0}". ' - 'Must be "true" or "false"'.format(bool_str) - ) - fatal_error(msg) - return value - - -REMOTE_PREFIXES = ["http://", "https://", "ssh://", "git@"] - - -def is_remote_url(url): - """check if the user provided a local file path instead of a - remote. If so, it must be expanded to an absolute - path. - - """ - remote_url = False - for prefix in REMOTE_PREFIXES: - if url.startswith(prefix): - remote_url = True - return remote_url - - -def split_remote_url(url): - """check if the user provided a local file path or a - remote. If remote, try to strip off protocol info. - - """ - remote_url = is_remote_url(url) - if not remote_url: - return url - - for prefix in REMOTE_PREFIXES: - url = url.replace(prefix, "") - - if "@" in url: - url = url.split("@")[1] - - if ":" in url: - url = url.split(":")[1] - - return url - - -def expand_local_url(url, field): - """check if the user provided a local file path instead of a - remote. If so, it must be expanded to an absolute - path. - - Note: local paths of LOCAL_PATH_INDICATOR have special meaning and - represent local copy only, don't work with the remotes. - - """ - remote_url = is_remote_url(url) - if not remote_url: - if url.strip() == LOCAL_PATH_INDICATOR: - pass - else: - url = os.path.expandvars(url) - url = os.path.expanduser(url) - if not os.path.isabs(url): - msg = ( - 'WARNING: Externals description for "{0}" contains a ' - "url that is not remote and does not expand to an " - "absolute path. Version control operations may " - "fail.\n\nurl={1}".format(field, url) - ) - printlog(msg) - else: - url = os.path.normpath(url) - return url - - -# --------------------------------------------------------------------- -# -# subprocess -# -# --------------------------------------------------------------------- - -# Give the user a helpful message if we detect that a command seems to -# be hanging. -_HANGING_SEC = 300 - - -def _hanging_msg(working_directory, command): - print( - """ - -Command '{command}' -from directory {working_directory} -has taken {hanging_sec} seconds. It may be hanging. - -The command will continue to run, but you may want to abort -manage_externals with ^C and investigate. A possible cause of hangs is -when svn or git require authentication to access a private -repository. On some systems, svn and git requests for authentication -information will not be displayed to the user. In this case, the program -will appear to hang. Ensure you can run svn and git manually and access -all repositories without entering your authentication information. - -""".format( - command=command, - working_directory=working_directory, - hanging_sec=_HANGING_SEC, - ) - ) - - -def execute_subprocess(commands, status_to_caller=False, output_to_caller=False): - """Wrapper around subprocess.check_output to handle common - exceptions. - - check_output runs a command with arguments and waits - for it to complete. - - check_output raises an exception on a nonzero return code. if - status_to_caller is true, execute_subprocess returns the subprocess - return code, otherwise execute_subprocess treats non-zero return - status as an error and raises an exception. - - """ - cwd = os.getcwd() - msg = "In directory: {0}\nexecute_subprocess running command:".format(cwd) - logging.info(msg) - commands_str = " ".join(str(element) for element in commands) - logging.info(commands_str) - return_to_caller = status_to_caller or output_to_caller - status = -1 - output = "" - hanging_timer = Timer( - _HANGING_SEC, - _hanging_msg, - kwargs={"working_directory": cwd, "command": commands_str}, - ) - hanging_timer.start() - try: - output = subprocess.check_output( - commands, stderr=subprocess.STDOUT, universal_newlines=True - ) - log_process_output(output) - status = 0 - except OSError as error: - msg = failed_command_msg( - "Command execution failed. Does the executable exist?", commands - ) - logging.error(error) - fatal_error(msg) - except ValueError as error: - msg = failed_command_msg( - "DEV_ERROR: Invalid arguments trying to run subprocess", commands - ) - logging.error(error) - fatal_error(msg) - except subprocess.CalledProcessError as error: - # Only report the error if we are NOT returning to the - # caller. If we are returning to the caller, then it may be a - # simple status check. If returning, it is the callers - # responsibility determine if an error occurred and handle it - # appropriately. - if not return_to_caller: - msg_context = ( - "Process did not run successfully; " - "returned status {0}".format(error.returncode) - ) - msg = failed_command_msg(msg_context, commands, output=error.output) - logging.error(error) - logging.error(msg) - log_process_output(error.output) - fatal_error(msg) - status = error.returncode - finally: - hanging_timer.cancel() - - if status_to_caller and output_to_caller: - ret_value = (status, output) - elif status_to_caller: - ret_value = status - elif output_to_caller: - ret_value = output - else: - ret_value = None - - return ret_value - - -def failed_command_msg(msg_context, command, output=None): - """Template for consistent error messages from subprocess calls. - - If 'output' is given, it should provide the output from the failed - command - """ - - if output: - output_truncated = last_n_lines( - output, 20, truncation_message="[... Output truncated for brevity ...]" - ) - errmsg = ( - "Failed with output:\n" + indent_string(output_truncated, 4) + "\nERROR: " - ) - else: - errmsg = "" - - command_str = " ".join(command) - errmsg += """In directory - {cwd} -{context}: - {command} -""".format( - cwd=os.getcwd(), context=msg_context, command=command_str - ) - - if output: - errmsg += "See above for output from failed command.\n" - - return errmsg diff --git a/.lib/git-fleximod/poetry.lock b/.lib/git-fleximod/poetry.lock deleted file mode 100644 index b59ed3942c..0000000000 --- a/.lib/git-fleximod/poetry.lock +++ /dev/null @@ -1,693 +0,0 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. - -[[package]] -name = "alabaster" -version = "0.7.13" -description = "A configurable sidebar-enabled Sphinx theme" -optional = false -python-versions = ">=3.6" -files = [ - {file = "alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3"}, - {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, -] - -[[package]] -name = "babel" -version = "2.14.0" -description = "Internationalization utilities" -optional = false -python-versions = ">=3.7" -files = [ - {file = "Babel-2.14.0-py3-none-any.whl", hash = "sha256:efb1a25b7118e67ce3a259bed20545c29cb68be8ad2c784c83689981b7a57287"}, - {file = "Babel-2.14.0.tar.gz", hash = "sha256:6919867db036398ba21eb5c7a0f6b28ab8cbc3ae7a73a44ebe34ae74a4e7d363"}, -] - -[package.dependencies] -pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} - -[package.extras] -dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] - -[[package]] -name = "certifi" -version = "2024.2.2" -description = "Python package for providing Mozilla's CA Bundle." -optional = false -python-versions = ">=3.6" -files = [ - {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, - {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, -] - -[[package]] -name = "charset-normalizer" -version = "3.3.2" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, -] - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "docutils" -version = "0.19" -description = "Docutils -- Python Documentation Utilities" -optional = false -python-versions = ">=3.7" -files = [ - {file = "docutils-0.19-py3-none-any.whl", hash = "sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc"}, - {file = "docutils-0.19.tar.gz", hash = "sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6"}, -] - -[[package]] -name = "exceptiongroup" -version = "1.2.0" -description = "Backport of PEP 654 (exception groups)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, - {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, -] - -[package.extras] -test = ["pytest (>=6)"] - -[[package]] -name = "fsspec" -version = "2023.12.2" -description = "File-system specification" -optional = false -python-versions = ">=3.8" -files = [ - {file = "fsspec-2023.12.2-py3-none-any.whl", hash = "sha256:d800d87f72189a745fa3d6b033b9dc4a34ad069f60ca60b943a63599f5501960"}, - {file = "fsspec-2023.12.2.tar.gz", hash = "sha256:8548d39e8810b59c38014934f6b31e57f40c1b20f911f4cc2b85389c7e9bf0cb"}, -] - -[package.extras] -abfs = ["adlfs"] -adl = ["adlfs"] -arrow = ["pyarrow (>=1)"] -dask = ["dask", "distributed"] -devel = ["pytest", "pytest-cov"] -dropbox = ["dropbox", "dropboxdrivefs", "requests"] -full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"] -fuse = ["fusepy"] -gcs = ["gcsfs"] -git = ["pygit2"] -github = ["requests"] -gs = ["gcsfs"] -gui = ["panel"] -hdfs = ["pyarrow (>=1)"] -http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)", "requests"] -libarchive = ["libarchive-c"] -oci = ["ocifs"] -s3 = ["s3fs"] -sftp = ["paramiko"] -smb = ["smbprotocol"] -ssh = ["paramiko"] -tqdm = ["tqdm"] - -[[package]] -name = "gitdb" -version = "4.0.11" -description = "Git Object Database" -optional = false -python-versions = ">=3.7" -files = [ - {file = "gitdb-4.0.11-py3-none-any.whl", hash = "sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4"}, - {file = "gitdb-4.0.11.tar.gz", hash = "sha256:bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b"}, -] - -[package.dependencies] -smmap = ">=3.0.1,<6" - -[[package]] -name = "gitpython" -version = "3.1.41" -description = "GitPython is a Python library used to interact with Git repositories" -optional = false -python-versions = ">=3.7" -files = [ - {file = "GitPython-3.1.41-py3-none-any.whl", hash = "sha256:c36b6634d069b3f719610175020a9aed919421c87552185b085e04fbbdb10b7c"}, - {file = "GitPython-3.1.41.tar.gz", hash = "sha256:ed66e624884f76df22c8e16066d567aaa5a37d5b5fa19db2c6df6f7156db9048"}, -] - -[package.dependencies] -gitdb = ">=4.0.1,<5" - -[package.extras] -test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "sumtypes"] - -[[package]] -name = "idna" -version = "3.6" -description = "Internationalized Domain Names in Applications (IDNA)" -optional = false -python-versions = ">=3.5" -files = [ - {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, - {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, -] - -[[package]] -name = "imagesize" -version = "1.4.1" -description = "Getting image size from png/jpeg/jpeg2000/gif file" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, - {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, -] - -[[package]] -name = "importlib-metadata" -version = "7.0.1" -description = "Read metadata from Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "importlib_metadata-7.0.1-py3-none-any.whl", hash = "sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e"}, - {file = "importlib_metadata-7.0.1.tar.gz", hash = "sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc"}, -] - -[package.dependencies] -zipp = ">=0.5" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] - -[[package]] -name = "iniconfig" -version = "2.0.0" -description = "brain-dead simple config-ini parsing" -optional = false -python-versions = ">=3.7" -files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, -] - -[[package]] -name = "jinja2" -version = "3.1.3" -description = "A very fast and expressive template engine." -optional = false -python-versions = ">=3.7" -files = [ - {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, - {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, -] - -[package.dependencies] -MarkupSafe = ">=2.0" - -[package.extras] -i18n = ["Babel (>=2.7)"] - -[[package]] -name = "markupsafe" -version = "2.1.5" -description = "Safely add untrusted strings to HTML/XML markup." -optional = false -python-versions = ">=3.7" -files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, -] - -[[package]] -name = "packaging" -version = "23.2" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.7" -files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, -] - -[[package]] -name = "pluggy" -version = "1.4.0" -description = "plugin and hook calling mechanisms for python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, - {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, -] - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "pyfakefs" -version = "5.3.5" -description = "pyfakefs implements a fake file system that mocks the Python file system modules." -optional = false -python-versions = ">=3.7" -files = [ - {file = "pyfakefs-5.3.5-py3-none-any.whl", hash = "sha256:751015c1de94e1390128c82b48cdedc3f088bbdbe4bc713c79d02a27f0f61e69"}, - {file = "pyfakefs-5.3.5.tar.gz", hash = "sha256:7cdc500b35a214cb7a614e1940543acc6650e69a94ac76e30f33c9373bd9cf90"}, -] - -[[package]] -name = "pygments" -version = "2.17.2" -description = "Pygments is a syntax highlighting package written in Python." -optional = false -python-versions = ">=3.7" -files = [ - {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, - {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, -] - -[package.extras] -plugins = ["importlib-metadata"] -windows-terminal = ["colorama (>=0.4.6)"] - -[[package]] -name = "pytest" -version = "8.0.0" -description = "pytest: simple powerful testing with Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytest-8.0.0-py3-none-any.whl", hash = "sha256:50fb9cbe836c3f20f0dfa99c565201fb75dc54c8d76373cd1bde06b06657bdb6"}, - {file = "pytest-8.0.0.tar.gz", hash = "sha256:249b1b0864530ba251b7438274c4d251c58d868edaaec8762893ad4a0d71c36c"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=1.3.0,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} - -[package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] - -[[package]] -name = "pytz" -version = "2024.1" -description = "World timezone definitions, modern and historical" -optional = false -python-versions = "*" -files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, -] - -[[package]] -name = "requests" -version = "2.31.0" -description = "Python HTTP for Humans." -optional = false -python-versions = ">=3.7" -files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "smmap" -version = "5.0.1" -description = "A pure Python implementation of a sliding window memory map manager" -optional = false -python-versions = ">=3.7" -files = [ - {file = "smmap-5.0.1-py3-none-any.whl", hash = "sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da"}, - {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"}, -] - -[[package]] -name = "snowballstemmer" -version = "2.2.0" -description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -optional = false -python-versions = "*" -files = [ - {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, - {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, -] - -[[package]] -name = "sphinx" -version = "5.3.0" -description = "Python documentation generator" -optional = false -python-versions = ">=3.6" -files = [ - {file = "Sphinx-5.3.0.tar.gz", hash = "sha256:51026de0a9ff9fc13c05d74913ad66047e104f56a129ff73e174eb5c3ee794b5"}, - {file = "sphinx-5.3.0-py3-none-any.whl", hash = "sha256:060ca5c9f7ba57a08a1219e547b269fadf125ae25b06b9fa7f66768efb652d6d"}, -] - -[package.dependencies] -alabaster = ">=0.7,<0.8" -babel = ">=2.9" -colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} -docutils = ">=0.14,<0.20" -imagesize = ">=1.3" -importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""} -Jinja2 = ">=3.0" -packaging = ">=21.0" -Pygments = ">=2.12" -requests = ">=2.5.0" -snowballstemmer = ">=2.0" -sphinxcontrib-applehelp = "*" -sphinxcontrib-devhelp = "*" -sphinxcontrib-htmlhelp = ">=2.0.0" -sphinxcontrib-jsmath = "*" -sphinxcontrib-qthelp = "*" -sphinxcontrib-serializinghtml = ">=1.1.5" - -[package.extras] -docs = ["sphinxcontrib-websupport"] -lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-bugbear", "flake8-comprehensions", "flake8-simplify", "isort", "mypy (>=0.981)", "sphinx-lint", "types-requests", "types-typed-ast"] -test = ["cython", "html5lib", "pytest (>=4.6)", "typed_ast"] - -[[package]] -name = "sphinxcontrib-applehelp" -version = "1.0.4" -description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" -optional = false -python-versions = ">=3.8" -files = [ - {file = "sphinxcontrib-applehelp-1.0.4.tar.gz", hash = "sha256:828f867945bbe39817c210a1abfd1bc4895c8b73fcaade56d45357a348a07d7e"}, - {file = "sphinxcontrib_applehelp-1.0.4-py3-none-any.whl", hash = "sha256:29d341f67fb0f6f586b23ad80e072c8e6ad0b48417db2bde114a4c9746feb228"}, -] - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["pytest"] - -[[package]] -name = "sphinxcontrib-devhelp" -version = "1.0.2" -description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." -optional = false -python-versions = ">=3.5" -files = [ - {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, - {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, -] - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["pytest"] - -[[package]] -name = "sphinxcontrib-htmlhelp" -version = "2.0.1" -description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" -optional = false -python-versions = ">=3.8" -files = [ - {file = "sphinxcontrib-htmlhelp-2.0.1.tar.gz", hash = "sha256:0cbdd302815330058422b98a113195c9249825d681e18f11e8b1f78a2f11efff"}, - {file = "sphinxcontrib_htmlhelp-2.0.1-py3-none-any.whl", hash = "sha256:c38cb46dccf316c79de6e5515e1770414b797162b23cd3d06e67020e1d2a6903"}, -] - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["html5lib", "pytest"] - -[[package]] -name = "sphinxcontrib-jsmath" -version = "1.0.1" -description = "A sphinx extension which renders display math in HTML via JavaScript" -optional = false -python-versions = ">=3.5" -files = [ - {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, - {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, -] - -[package.extras] -test = ["flake8", "mypy", "pytest"] - -[[package]] -name = "sphinxcontrib-qthelp" -version = "1.0.3" -description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." -optional = false -python-versions = ">=3.5" -files = [ - {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, - {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, -] - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["pytest"] - -[[package]] -name = "sphinxcontrib-serializinghtml" -version = "1.1.5" -description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." -optional = false -python-versions = ">=3.5" -files = [ - {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, - {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, -] - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["pytest"] - -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] - -[[package]] -name = "urllib3" -version = "2.2.0" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = ">=3.8" -files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, -] - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -h2 = ["h2 (>=4,<5)"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] - -[[package]] -name = "wheel" -version = "0.42.0" -description = "A built-package format for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "wheel-0.42.0-py3-none-any.whl", hash = "sha256:177f9c9b0d45c47873b619f5b650346d632cdc35fb5e4d25058e09c9e581433d"}, - {file = "wheel-0.42.0.tar.gz", hash = "sha256:c45be39f7882c9d34243236f2d63cbd58039e360f85d0913425fbd7ceea617a8"}, -] - -[package.extras] -test = ["pytest (>=6.0.0)", "setuptools (>=65)"] - -[[package]] -name = "zipp" -version = "3.17.0" -description = "Backport of pathlib-compatible object wrapper for zip files" -optional = false -python-versions = ">=3.8" -files = [ - {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, - {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] - -[metadata] -lock-version = "2.0" -python-versions = "^3.8" -content-hash = "25ee2ae1d74abedde3a6637a60d4a3095ea5cf9731960875741bbc2ba84a475d" diff --git a/.lib/git-fleximod/pyproject.toml b/.lib/git-fleximod/pyproject.toml deleted file mode 100644 index 2484552e4f..0000000000 --- a/.lib/git-fleximod/pyproject.toml +++ /dev/null @@ -1,41 +0,0 @@ -[tool.poetry] -name = "git-fleximod" -version = "0.7.4" -description = "Extended support for git-submodule and git-sparse-checkout" -authors = ["Jim Edwards "] -maintainers = ["Jim Edwards "] -license = "MIT" -readme = "README.md" -homepage = "https://github.com/jedwards4b/git-fleximod" -keywords = ["git", "submodule", "sparse-checkout"] -packages = [ -{ include = "git_fleximod"}, -{ include = "doc"}, -] - -[tool.poetry.scripts] -git-fleximod = "git_fleximod.git_fleximod:main" -me2flexi = "git_fleximod.metoflexi:_main" -fsspec = "fsspec.fuse:main" - -[tool.poetry.dependencies] -python = "^3.8" -GitPython = "^3.1.0" -sphinx = "^5.0.0" -fsspec = "^2023.12.2" -wheel = "^0.42.0" -pytest = "^8.0.0" -pyfakefs = "^5.3.5" - -[tool.poetry.urls] -"Bug Tracker" = "https://github.com/jedwards4b/git-fleximod/issues" - -[tool.pytest.ini_options] -markers = [ - "skip_after_first: only run on first iteration" -] - -[build-system] -requires = ["poetry-core"] -build-backend = "poetry.core.masonry.api" - diff --git a/.lib/git-fleximod/tbump.toml b/.lib/git-fleximod/tbump.toml deleted file mode 100644 index d4b8eaee11..0000000000 --- a/.lib/git-fleximod/tbump.toml +++ /dev/null @@ -1,43 +0,0 @@ -# Uncomment this if your project is hosted on GitHub: -github_url = "https://github.com/jedwards4b/git-fleximod/" - -[version] -current = "0.7.4" - -# Example of a semver regexp. -# Make sure this matches current_version before -# using tbump -regex = ''' - (?P\d+) - \. - (?P\d+) - \. - (?P\d+) - ''' - -[git] -message_template = "Bump to {new_version}" -tag_template = "v{new_version}" - -# For each file to patch, add a [[file]] config -# section containing the path of the file, relative to the -# tbump.toml location. -[[file]] -src = "git_fleximod/cli.py" - -[[file]] -src = "pyproject.toml" - -# You can specify a list of commands to -# run after the files have been patched -# and before the git commit is made - -# [[before_commit]] -# name = "check changelog" -# cmd = "grep -q {new_version} Changelog.rst" - -# Or run some commands after the git tag and the branch -# have been pushed: -# [[after_push]] -# name = "publish" -# cmd = "./publish.sh" diff --git a/.lib/git-fleximod/tests/__init__.py b/.lib/git-fleximod/tests/__init__.py deleted file mode 100644 index 4d4c66c78e..0000000000 --- a/.lib/git-fleximod/tests/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -import sys, os - -sys.path.append(os.path.join(os.path.dirname(__file__), os.path.pardir, "src")) diff --git a/.lib/git-fleximod/tests/conftest.py b/.lib/git-fleximod/tests/conftest.py deleted file mode 100644 index 942a0efb97..0000000000 --- a/.lib/git-fleximod/tests/conftest.py +++ /dev/null @@ -1,138 +0,0 @@ -import pytest -from git_fleximod.gitinterface import GitInterface -import os -import subprocess -import logging -from pathlib import Path - -@pytest.fixture(scope='session') -def logger(): - logging.basicConfig( - level=logging.INFO, format="%(name)s - %(levelname)s - %(message)s", handlers=[logging.StreamHandler()] - ) - logger = logging.getLogger(__name__) - return logger - -all_repos=[ - {"subrepo_path": "modules/test", - "submodule_name": "test_submodule", - "status1" : "test_submodule MPIserial_2.5.0-3-gd82ce7c is out of sync with .gitmodules MPIserial_2.4.0", - "status2" : "test_submodule at tag MPIserial_2.4.0", - "status3" : "test_submodule at tag MPIserial_2.4.0", - "status4" : "test_submodule at tag MPIserial_2.4.0", - "gitmodules_content" : """ - [submodule "test_submodule"] - path = modules/test - url = https://github.com/ESMCI/mpi-serial.git - fxtag = MPIserial_2.4.0 - fxDONOTUSEurl = https://github.com/ESMCI/mpi-serial.git - fxrequired = ToplevelRequired -"""}, - {"subrepo_path": "modules/test_optional", - "submodule_name": "test_optional", - "status1" : "test_optional MPIserial_2.5.0-3-gd82ce7c is out of sync with .gitmodules MPIserial_2.4.0", - "status2" : "test_optional at tag MPIserial_2.4.0", - "status3" : "test_optional not checked out, aligned at tag MPIserial_2.4.0", - "status4" : "test_optional at tag MPIserial_2.4.0", - "gitmodules_content": """ - [submodule "test_optional"] - path = modules/test_optional - url = https://github.com/ESMCI/mpi-serial.git - fxtag = MPIserial_2.4.0 - fxDONOTUSEurl = https://github.com/ESMCI/mpi-serial.git - fxrequired = ToplevelOptional -"""}, - {"subrepo_path": "modules/test_alwaysoptional", - "submodule_name": "test_alwaysoptional", - "status1" : "test_alwaysoptional MPIserial_2.3.0 is out of sync with .gitmodules e5cf35c", - "status2" : "test_alwaysoptional at hash e5cf35c", - "status3" : "test_alwaysoptional not checked out, out of sync at tag MPIserial_2.3.0", - "status4" : "test_alwaysoptional at hash e5cf35c", - "gitmodules_content": """ - [submodule "test_alwaysoptional"] - path = modules/test_alwaysoptional - url = https://github.com/ESMCI/mpi-serial.git - fxtag = e5cf35c - fxDONOTUSEurl = https://github.com/ESMCI/mpi-serial.git - fxrequired = AlwaysOptional -"""}, - {"subrepo_path": "modules/test_sparse", - "submodule_name": "test_sparse", - "status1" : "test_sparse at tag MPIserial_2.5.0", - "status2" : "test_sparse at tag MPIserial_2.5.0", - "status3" : "test_sparse at tag MPIserial_2.5.0", - "status4" : "test_sparse at tag MPIserial_2.5.0", - "gitmodules_content": """ - [submodule "test_sparse"] - path = modules/test_sparse - url = https://github.com/ESMCI/mpi-serial.git - fxtag = MPIserial_2.5.0 - fxDONOTUSEurl = https://github.com/ESMCI/mpi-serial.git - fxrequired = AlwaysRequired - fxsparse = ../.sparse_file_list -"""}, -] -@pytest.fixture(params=all_repos) - -def shared_repos(request): - return request.param - -@pytest.fixture -def get_all_repos(): - return all_repos - -def write_sparse_checkout_file(fp): - sparse_content = """m4 -""" - fp.write_text(sparse_content) - -@pytest.fixture -def test_repo(shared_repos, tmp_path, logger): - subrepo_path = shared_repos["subrepo_path"] - submodule_name = shared_repos["submodule_name"] - test_dir = tmp_path / "testrepo" - test_dir.mkdir() - str_path = str(test_dir) - gitp = GitInterface(str_path, logger) - assert test_dir.joinpath(".git").is_dir() - (test_dir / "modules").mkdir() - if "sparse" in submodule_name: - (test_dir / subrepo_path).mkdir() - # Add the sparse checkout file - write_sparse_checkout_file(test_dir / "modules" / ".sparse_file_list") - gitp.git_operation("add","modules/.sparse_file_list") - else: - gitp = GitInterface(str(test_dir), logger) - gitp.git_operation("submodule", "add", "--depth","1","--name", submodule_name, "https://github.com/ESMCI/mpi-serial.git", subrepo_path) - assert test_dir.joinpath(".gitmodules").is_file() - gitp.git_operation("add",subrepo_path) - gitp.git_operation("commit","-a","-m","\"add submod\"") - test_dir2 = tmp_path / "testrepo2" - gitp.git_operation("clone",test_dir,test_dir2) - return test_dir2 - - -@pytest.fixture -def complex_repo(tmp_path, logger): - test_dir = tmp_path / "testcomplex" - test_dir.mkdir() - str_path = str(test_dir) - gitp = GitInterface(str_path, logger) - gitp.git_operation("remote", "add", "origin", "https://github.com/jedwards4b/fleximod-test2") - gitp.git_operation("fetch", "origin", "main") - gitp.git_operation("checkout", "main") - return test_dir - -@pytest.fixture -def git_fleximod(): - def _run_fleximod(path, args, input=None): - cmd = ["git", "fleximod"] + args.split() - result = subprocess.run(cmd, cwd=path, input=input, - stdout=subprocess.PIPE, stderr=subprocess.PIPE, - text=True) - if result.returncode: - print(result.stdout) - print(result.stderr) - return result - return _run_fleximod - diff --git a/.lib/git-fleximod/tests/test_a_import.py b/.lib/git-fleximod/tests/test_a_import.py deleted file mode 100644 index d5ca878de5..0000000000 --- a/.lib/git-fleximod/tests/test_a_import.py +++ /dev/null @@ -1,8 +0,0 @@ -# pylint: disable=unused-import -from git_fleximod import cli -from git_fleximod import utils -from git_fleximod.gitinterface import GitInterface -from git_fleximod.gitmodules import GitModules - -def test_import(): - print("here") diff --git a/.lib/git-fleximod/tests/test_b_update.py b/.lib/git-fleximod/tests/test_b_update.py deleted file mode 100644 index 159f1cfae0..0000000000 --- a/.lib/git-fleximod/tests/test_b_update.py +++ /dev/null @@ -1,26 +0,0 @@ -import pytest -from pathlib import Path - -def test_basic_checkout(git_fleximod, test_repo, shared_repos): - # Prepare a simple .gitmodules - gm = shared_repos['gitmodules_content'] - file_path = (test_repo / ".gitmodules") - repo_name = shared_repos["submodule_name"] - repo_path = shared_repos["subrepo_path"] - - file_path.write_text(gm) - - # Run the command - result = git_fleximod(test_repo, f"update {repo_name}") - - # Assertions - assert result.returncode == 0 - assert Path(test_repo / repo_path).exists() # Did the submodule directory get created? - if "sparse" in repo_name: - assert Path(test_repo / f"{repo_path}/m4").exists() # Did the submodule sparse directory get created? - assert not Path(test_repo / f"{repo_path}/README").exists() # Did only the submodule sparse directory get created? - - status = git_fleximod(test_repo, f"status {repo_name}") - - assert shared_repos["status2"] in status.stdout - diff --git a/.lib/git-fleximod/tests/test_c_required.py b/.lib/git-fleximod/tests/test_c_required.py deleted file mode 100644 index 89ab8d294d..0000000000 --- a/.lib/git-fleximod/tests/test_c_required.py +++ /dev/null @@ -1,30 +0,0 @@ -import pytest -from pathlib import Path - -def test_required(git_fleximod, test_repo, shared_repos): - file_path = (test_repo / ".gitmodules") - gm = shared_repos["gitmodules_content"] - repo_name = shared_repos["submodule_name"] - if file_path.exists(): - with file_path.open("r") as f: - gitmodules_content = f.read() - # add the entry if it does not exist - if repo_name not in gitmodules_content: - file_path.write_text(gitmodules_content+gm) - # or if it is incomplete - elif gm not in gitmodules_content: - file_path.write_text(gm) - else: - file_path.write_text(gm) - result = git_fleximod(test_repo, "update") - assert result.returncode == 0 - status = git_fleximod(test_repo, f"status {repo_name}") - assert shared_repos["status3"] in status.stdout - status = git_fleximod(test_repo, f"update --optional") - assert result.returncode == 0 - status = git_fleximod(test_repo, f"status {repo_name}") - assert shared_repos["status4"] in status.stdout - status = git_fleximod(test_repo, f"update {repo_name}") - assert result.returncode == 0 - status = git_fleximod(test_repo, f"status {repo_name}") - assert shared_repos["status4"] in status.stdout diff --git a/.lib/git-fleximod/tests/test_d_complex.py b/.lib/git-fleximod/tests/test_d_complex.py deleted file mode 100644 index fdce516274..0000000000 --- a/.lib/git-fleximod/tests/test_d_complex.py +++ /dev/null @@ -1,67 +0,0 @@ -import pytest -from pathlib import Path -from git_fleximod.gitinterface import GitInterface - -def test_complex_checkout(git_fleximod, complex_repo, logger): - status = git_fleximod(complex_repo, "status") - assert("ToplevelOptional not checked out, aligned at tag v5.3.2" in status.stdout) - assert("ToplevelRequired not checked out, aligned at tag MPIserial_2.5.0" in status.stdout) - assert("AlwaysRequired not checked out, aligned at tag MPIserial_2.4.0" in status.stdout) - assert("Complex not checked out, aligned at tag testtag01" in status.stdout) - assert("AlwaysOptional not checked out, aligned at tag MPIserial_2.3.0" in status.stdout) - - # This should checkout and update test_submodule and complex_sub - result = git_fleximod(complex_repo, "update") - assert result.returncode == 0 - - status = git_fleximod(complex_repo, "status") - assert("ToplevelOptional not checked out, aligned at tag v5.3.2" in status.stdout) - assert("ToplevelRequired at tag MPIserial_2.5.0" in status.stdout) - assert("AlwaysRequired at tag MPIserial_2.4.0" in status.stdout) - assert("Complex at tag testtag01" in status.stdout) - - # now check the complex_sub - root = (complex_repo / "modules" / "complex") - assert(not (root / "libraries" / "gptl" / ".git").exists()) - assert(not (root / "libraries" / "mpi-serial" / ".git").exists()) - assert((root / "modules" / "mpi-serial" / ".git").exists()) - assert(not (root / "modules" / "mpi-serial2" / ".git").exists()) - assert((root / "modules" / "mpi-sparse" / ".git").exists()) - assert((root / "modules" / "mpi-sparse" / "m4").exists()) - assert(not (root / "modules" / "mpi-sparse" / "README").exists()) - - # update a single optional submodule - - result = git_fleximod(complex_repo, "update ToplevelOptional") - assert result.returncode == 0 - - status = git_fleximod(complex_repo, "status") - assert("ToplevelOptional at tag v5.3.2" in status.stdout) - assert("ToplevelRequired at tag MPIserial_2.5.0" in status.stdout) - assert("AlwaysRequired at tag MPIserial_2.4.0" in status.stdout) - assert("Complex at tag testtag01" in status.stdout) - assert("AlwaysOptional not checked out, aligned at tag MPIserial_2.3.0" in status.stdout) - - - # Finally update optional - result = git_fleximod(complex_repo, "update --optional") - assert result.returncode == 0 - - status = git_fleximod(complex_repo, "status") - assert("ToplevelOptional at tag v5.3.2" in status.stdout) - assert("ToplevelRequired at tag MPIserial_2.5.0" in status.stdout) - assert("AlwaysRequired at tag MPIserial_2.4.0" in status.stdout) - assert("Complex at tag testtag01" in status.stdout) - assert("AlwaysOptional at tag MPIserial_2.3.0" in status.stdout) - - # now check the complex_sub - root = (complex_repo / "modules" / "complex" ) - assert(not (root / "libraries" / "gptl" / ".git").exists()) - assert(not (root / "libraries" / "mpi-serial" / ".git").exists()) - assert((root / "modules" / "mpi-serial" / ".git").exists()) - assert((root / "modules" / "mpi-serial2" / ".git").exists()) - assert((root / "modules" / "mpi-sparse" / ".git").exists()) - assert((root / "modules" / "mpi-sparse" / "m4").exists()) - assert(not (root / "modules" / "mpi-sparse" / "README").exists()) - - From eb94e4d298564d52197d9cf632d73ffad74c5f61 Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Fri, 31 May 2024 10:20:12 -0600 Subject: [PATCH 121/126] Squashed '.lib/git-fleximod/' content from commit a354b0528 git-subtree-dir: .lib/git-fleximod git-subtree-split: a354b0528228ac89ce103b3b42b2a203fe495ba5 --- .github/workflows/pre-commit | 13 + .github/workflows/pytest.yaml | 77 ++++ .pre-commit-config.yaml | 18 + License | 20 + README.md | 108 ++++++ doc/Makefile | 20 + doc/conf.py | 26 ++ doc/index.rst | 24 ++ doc/make.bat | 35 ++ escomp_install | 25 ++ git_fleximod/__init__.py | 0 git_fleximod/cli.py | 129 +++++++ git_fleximod/git_fleximod.py | 605 +++++++++++++++++++++++++++++ git_fleximod/gitinterface.py | 79 ++++ git_fleximod/gitmodules.py | 97 +++++ git_fleximod/lstripreader.py | 43 +++ git_fleximod/metoflexi.py | 236 ++++++++++++ git_fleximod/utils.py | 365 ++++++++++++++++++ poetry.lock | 693 ++++++++++++++++++++++++++++++++++ pyproject.toml | 41 ++ tbump.toml | 43 +++ tests/__init__.py | 3 + tests/conftest.py | 138 +++++++ tests/test_a_import.py | 8 + tests/test_b_update.py | 26 ++ tests/test_c_required.py | 30 ++ tests/test_d_complex.py | 67 ++++ 27 files changed, 2969 insertions(+) create mode 100644 .github/workflows/pre-commit create mode 100644 .github/workflows/pytest.yaml create mode 100644 .pre-commit-config.yaml create mode 100644 License create mode 100644 README.md create mode 100644 doc/Makefile create mode 100644 doc/conf.py create mode 100644 doc/index.rst create mode 100644 doc/make.bat create mode 100644 escomp_install create mode 100644 git_fleximod/__init__.py create mode 100644 git_fleximod/cli.py create mode 100755 git_fleximod/git_fleximod.py create mode 100644 git_fleximod/gitinterface.py create mode 100644 git_fleximod/gitmodules.py create mode 100644 git_fleximod/lstripreader.py create mode 100755 git_fleximod/metoflexi.py create mode 100644 git_fleximod/utils.py create mode 100644 poetry.lock create mode 100644 pyproject.toml create mode 100644 tbump.toml create mode 100644 tests/__init__.py create mode 100644 tests/conftest.py create mode 100644 tests/test_a_import.py create mode 100644 tests/test_b_update.py create mode 100644 tests/test_c_required.py create mode 100644 tests/test_d_complex.py diff --git a/.github/workflows/pre-commit b/.github/workflows/pre-commit new file mode 100644 index 0000000000..1a6ad0082a --- /dev/null +++ b/.github/workflows/pre-commit @@ -0,0 +1,13 @@ +name: pre-commit +on: + pull_request: + push: + branches: [main] + +jobs: + pre-commit: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v3 + - uses: pre-commit/action@v3.0.0 diff --git a/.github/workflows/pytest.yaml b/.github/workflows/pytest.yaml new file mode 100644 index 0000000000..0868dd9a33 --- /dev/null +++ b/.github/workflows/pytest.yaml @@ -0,0 +1,77 @@ +# Run this job on pushes to `main`, and for pull requests. If you don't specify +# `branches: [main], then this actions runs _twice_ on pull requests, which is +# annoying. + +on: + push: + branches: [main] + pull_request: + branches: [main] + +jobs: + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + # If you wanted to use multiple Python versions, you'd have specify a matrix in the job and + # reference the matrixe python version here. + - uses: actions/setup-python@v5 + with: + python-version: '3.9' + + # Cache the installation of Poetry itself, e.g. the next step. This prevents the workflow + # from installing Poetry every time, which can be slow. Note the use of the Poetry version + # number in the cache key, and the "-0" suffix: this allows you to invalidate the cache + # manually if/when you want to upgrade Poetry, or if something goes wrong. This could be + # mildly cleaner by using an environment variable, but I don't really care. + - name: cache poetry install + uses: actions/cache@v4 + with: + path: ~/.local + key: poetry-1.7.1 + + # Install Poetry. You could do this manually, or there are several actions that do this. + # `snok/install-poetry` seems to be minimal yet complete, and really just calls out to + # Poetry's default install script, which feels correct. I pin the Poetry version here + # because Poetry does occasionally change APIs between versions and I don't want my + # actions to break if it does. + # + # The key configuration value here is `virtualenvs-in-project: true`: this creates the + # venv as a `.venv` in your testing directory, which allows the next step to easily + # cache it. + - uses: snok/install-poetry@v1 + with: + version: 1.7.1 + virtualenvs-create: true + virtualenvs-in-project: true + + # Cache your dependencies (i.e. all the stuff in your `pyproject.toml`). Note the cache + # key: if you're using multiple Python versions, or multiple OSes, you'd need to include + # them in the cache key. I'm not, so it can be simple and just depend on the poetry.lock. + - name: cache deps + id: cache-deps + uses: actions/cache@v4 + with: + path: .venv + key: pydeps-${{ hashFiles('**/poetry.lock') }} + + # Install dependencies. `--no-root` means "install all dependencies but not the project + # itself", which is what you want to avoid caching _your_ code. The `if` statement + # ensures this only runs on a cache miss. + - run: poetry install --no-interaction --no-root + if: steps.cache-deps.outputs.cache-hit != 'true' + + # Now install _your_ project. This isn't necessary for many types of projects -- particularly + # things like Django apps don't need this. But it's a good idea since it fully-exercises the + # pyproject.toml and makes that if you add things like console-scripts at some point that + # they'll be installed and working. + - run: poetry install --no-interaction + + # And finally run tests. I'm using pytest and all my pytest config is in my `pyproject.toml` + # so this line is super-simple. But it could be as complex as you need. + - run: | + git config --global user.name "${GITHUB_ACTOR}" + git config --global user.email "${GITHUB_ACTOR_ID}+${GITHUB_ACTOR}@users.noreply.github.com" + poetry run pytest + diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000000..2f6089da72 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,18 @@ +exclude: ^utils/.*$ + +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.0.1 + hooks: + - id: end-of-file-fixer + - id: trailing-whitespace + - repo: https://github.com/psf/black + rev: 22.3.0 + hooks: + - id: black + - repo: https://github.com/PyCQA/pylint + rev: v2.11.1 + hooks: + - id: pylint + args: + - --disable=I,C,R,logging-not-lazy,wildcard-import,unused-wildcard-import,fixme,broad-except,bare-except,eval-used,exec-used,global-statement,logging-format-interpolation,no-name-in-module,arguments-renamed,unspecified-encoding,protected-access,import-error,no-member diff --git a/License b/License new file mode 100644 index 0000000000..88bc22515e --- /dev/null +++ b/License @@ -0,0 +1,20 @@ +Copyright 2024 NSF National Center for Atmospheric Sciences (NCAR) + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +“Softwareâ€), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED “AS ISâ€, WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 0000000000..53917da400 --- /dev/null +++ b/README.md @@ -0,0 +1,108 @@ +# git-fleximod + +Flexible, Enhanced Submodule Management for Git + +## Overview + +Git-fleximod is a Python-based tool that extends Git's submodule and sparse checkout capabilities, offering additional features for managing submodules in a more flexible and efficient way. + +## Installation + + If you choose to locate git-fleximod in your path you can access it via command: git fleximod + +## Usage + + Basic Usage: + git fleximod [options] + Available Commands: + status: Display the status of submodules. + update: Update submodules to the tag indicated in .gitmodules variable fxtag. + test: Make sure that fxtags and submodule hashes are consistant, + make sure that official urls (as defined by fxDONOTUSEurl) are set + make sure that fxtags are defined for all submodules + Additional Options: + See git fleximod --help for more details. + +## Supported .gitmodules Variables + + fxtag: Specify a specific tag or branch to checkout for a submodule. + fxrequired: Mark a submodule's checkout behavior, with allowed values: + - ToplevelRequired: Top-level and required (checked out only when this is the Toplevel module). + - ToplevelOptional: Top-level and optional (checked out with --optional flag if this is the Toplevel module). + - AlwaysRequired: Always required (always checked out). + - AlwaysOptional: Always optional (checked out with --optional flag). + fxsparse: Enable sparse checkout for a submodule, pointing to a file containing sparse checkout paths. + fxDONOTUSEurl: This is the url used in the test subcommand to assure that protected branches do not point to forks + **NOTE** the fxDONOTUSEurl variable is only used to identify the official project repository and should not be + changed by users. Use the url variable to change to a fork if desired. + +## Sparse Checkouts + + To enable sparse checkout for a submodule, set the fxsparse variable + in the .gitmodules file to the path of a file containing the desired + sparse checkout paths. Git-fleximod will automatically configure + sparse checkout based on this file when applicable commands are run. + See [git-sparse-checkout](https://git-scm.com/docs/git-sparse-checkout#_internalsfull_pattern_set) + for details on the format of this file. + +## Tests + + The git fleximod test action is designed to be used by, for example, github workflows + to assure that protected branches are consistant with respect to submodule hashes and fleximod fxtags + +## Examples + +Here are some common usage examples: + +Update all submodules, including optional ones: +```bash + git fleximod update --optional +``` + +Updating a specific submodule to the fxtag indicated in .gitmodules: + +```bash + git fleximod update submodule-name +``` +Example .gitmodules entry: +```ini, toml + [submodule "cosp2"] + path = src/physics/cosp2/src + url = https://github.com/CFMIP/COSPv2.0 + fxsparse = ../.cosp_sparse_checkout + fxrequired = AlwaysRequired + fxtag = v2.1.4cesm +``` +Explanation: + +This entry indicates that the submodule named cosp2 at tag v2.1.4cesm +should be checked out into the directory src/physics/cosp2/src +relative to the .gitmodules directory. It should be checked out from +the URL https://github.com/CFMIP/COSPv2.0 and use sparse checkout as +described in the file ../.cosp_sparse_checkout relative to the path +directory. It should be checked out anytime this .gitmodules entry is +read. + +Additional example: +```ini, toml + [submodule "cime"] + path = cime + url = https://github.com/jedwards4b/cime + fxrequired = ToplevelRequired + fxtag = cime6.0.198_rme01 +``` + +Explanation: + +This entry indicates that the submodule cime should be checked out +into a directory named cime at tag cime6.0.198_rme01 from the URL +https://github.com/jedwards4b/cime. This should only be done if +the .gitmodules file is at the top level of the repository clone. + +## Contributing + +We welcome contributions! Please see the CONTRIBUTING.md file for guidelines. + +## License + +Git-fleximod is released under the MIT License. diff --git a/doc/Makefile b/doc/Makefile new file mode 100644 index 0000000000..d4bb2cbb9e --- /dev/null +++ b/doc/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/doc/conf.py b/doc/conf.py new file mode 100644 index 0000000000..423099eec9 --- /dev/null +++ b/doc/conf.py @@ -0,0 +1,26 @@ +# Configuration file for the Sphinx documentation builder. +# +# For the full list of built-in configuration values, see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Project information ----------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information + +project = "git-fleximod" +author = "Jim Edwards " +release = "0.4.0" + +# -- General configuration --------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration + +extensions = ["sphinx_argparse_cli"] + +templates_path = ["_templates"] +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] + + +# -- Options for HTML output ------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output + +html_theme = "alabaster" +html_static_path = ["_static"] diff --git a/doc/index.rst b/doc/index.rst new file mode 100644 index 0000000000..0f9c1a7f7e --- /dev/null +++ b/doc/index.rst @@ -0,0 +1,24 @@ +.. git-fleximod documentation master file, created by + sphinx-quickstart on Sat Feb 3 12:02:22 2024. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to git-fleximod's documentation! +======================================== + +.. toctree:: + :maxdepth: 2 + :caption: Contents: +.. module:: sphinxcontrib.autoprogram +.. sphinx_argparse_cli:: + :module: git_fleximod.cli + :func: get_parser + :prog: git-fleximod + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/doc/make.bat b/doc/make.bat new file mode 100644 index 0000000000..32bb24529f --- /dev/null +++ b/doc/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=. +set BUILDDIR=_build + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.https://www.sphinx-doc.org/ + exit /b 1 +) + +if "%1" == "" goto help + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/escomp_install b/escomp_install new file mode 100644 index 0000000000..ae782e72a4 --- /dev/null +++ b/escomp_install @@ -0,0 +1,25 @@ +#!/usr/bin/env python +# updates git-fleximod in an ESCOMP model +# this script should be run from the model root directory, it expects +# git-fleximod to already be installed with the script in bin +# and the classes in lib/python/site-packages +import sys +import shutil +import os + +from glob import iglob + +fleximod_root = sys.argv[1] +fleximod_path = os.path.join(fleximod_root,"src","git-fleximod") +if os.path.isfile(fleximod_path): + with open(fleximod_path,"r") as f: + fleximod = f.readlines() + with open(os.path.join(".","bin","git-fleximod"),"w") as f: + for line in fleximod: + f.write(line) + if "import argparse" in line: + f.write('\nsys.path.append(os.path.join(os.path.dirname(__file__),"..","lib","python","site-packages"))\n\n') + + for file in iglob(os.path.join(fleximod_root, "src", "fleximod", "*.py")): + shutil.copy(file, + os.path.join("lib","python","site-packages","fleximod",os.path.basename(file))) diff --git a/git_fleximod/__init__.py b/git_fleximod/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/git_fleximod/cli.py b/git_fleximod/cli.py new file mode 100644 index 0000000000..4c3fb1a8f9 --- /dev/null +++ b/git_fleximod/cli.py @@ -0,0 +1,129 @@ +from pathlib import Path +import argparse +from git_fleximod import utils + +__version__ = "0.7.5" + +def find_root_dir(filename=".gitmodules"): + """ finds the highest directory in tree + which contains a file called filename """ + d = Path.cwd() + root = Path(d.root) + dirlist = [] + dl = d + while dl != root: + dirlist.append(dl) + dl = dl.parent + dirlist.append(root) + dirlist.reverse() + + for dl in dirlist: + attempt = dl / filename + if attempt.is_file(): + return str(dl) + utils.fatal_error("No .gitmodules found in directory tree") + + +def get_parser(): + description = """ + %(prog)s manages checking out groups of gitsubmodules with addtional support for Earth System Models + """ + parser = argparse.ArgumentParser( + description=description, formatter_class=argparse.RawDescriptionHelpFormatter + ) + + # + # user options + # + choices = ["update", "status", "test"] + parser.add_argument( + "action", + choices=choices, + default="update", + help=f"Subcommand of git-fleximod, choices are {choices[:-1]}", + ) + + parser.add_argument( + "components", + nargs="*", + help="Specific component(s) to checkout. By default, " + "all required submodules are checked out.", + ) + + parser.add_argument( + "-C", + "--path", + default=find_root_dir(), + help="Toplevel repository directory. Defaults to top git directory relative to current.", + ) + + parser.add_argument( + "-g", + "--gitmodules", + nargs="?", + default=".gitmodules", + help="The submodule description filename. " "Default: %(default)s.", + ) + + parser.add_argument( + "-x", + "--exclude", + nargs="*", + help="Component(s) listed in the gitmodules file which should be ignored.", + ) + parser.add_argument( + "-f", + "--force", + action="store_true", + default=False, + help="Override cautions and update or checkout over locally modified repository.", + ) + + parser.add_argument( + "-o", + "--optional", + action="store_true", + default=False, + help="By default only the required submodules " + "are checked out. This flag will also checkout the " + "optional submodules relative to the toplevel directory.", + ) + + parser.add_argument( + "-v", + "--verbose", + action="count", + default=0, + help="Output additional information to " + "the screen and log file. This flag can be " + "used up to two times, increasing the " + "verbosity level each time.", + ) + + parser.add_argument( + "-V", + "--version", + action="version", + version=f"%(prog)s {__version__}", + help="Print version and exit.", + ) + + # + # developer options + # + parser.add_argument( + "--backtrace", + action="store_true", + help="DEVELOPER: show exception backtraces as extra " "debugging output", + ) + + parser.add_argument( + "-d", + "--debug", + action="store_true", + default=False, + help="DEVELOPER: output additional debugging " + "information to the screen and log file.", + ) + + return parser diff --git a/git_fleximod/git_fleximod.py b/git_fleximod/git_fleximod.py new file mode 100755 index 0000000000..f080513a52 --- /dev/null +++ b/git_fleximod/git_fleximod.py @@ -0,0 +1,605 @@ +#!/usr/bin/env python +import sys + +MIN_PYTHON = (3, 7) +if sys.version_info < MIN_PYTHON: + sys.exit("Python %s.%s or later is required." % MIN_PYTHON) + +import os +import shutil +import logging +import textwrap +from git_fleximod import utils +from git_fleximod import cli +from git_fleximod.gitinterface import GitInterface +from git_fleximod.gitmodules import GitModules +from configparser import NoOptionError + +# logger variable is global +logger = None + + +def fxrequired_allowed_values(): + return ["ToplevelRequired", "ToplevelOptional", "AlwaysRequired", "AlwaysOptional"] + + +def commandline_arguments(args=None): + parser = cli.get_parser() + + if args: + options = parser.parse_args(args) + else: + options = parser.parse_args() + + # explicitly listing a component overrides the optional flag + if options.optional or options.components: + fxrequired = [ + "ToplevelRequired", + "ToplevelOptional", + "AlwaysRequired", + "AlwaysOptional", + ] + else: + fxrequired = ["ToplevelRequired", "AlwaysRequired"] + + action = options.action + if not action: + action = "update" + handlers = [logging.StreamHandler()] + + if options.debug: + try: + open("fleximod.log", "w") + except PermissionError: + sys.exit("ABORT: Could not write file fleximod.log") + level = logging.DEBUG + handlers.append(logging.FileHandler("fleximod.log")) + elif options.verbose: + level = logging.INFO + else: + level = logging.WARNING + # Configure the root logger + logging.basicConfig( + level=level, format="%(name)s - %(levelname)s - %(message)s", handlers=handlers + ) + + if hasattr(options, "version"): + exit() + + return ( + options.path, + options.gitmodules, + fxrequired, + options.components, + options.exclude, + options.force, + action, + ) + + +def submodule_sparse_checkout(root_dir, name, url, path, sparsefile, tag="master"): + """ + This function performs a sparse checkout of a git submodule. It does so by first creating the .git/info/sparse-checkout fileq + in the submodule and then checking out the desired tag. If the submodule is already checked out, it will not be checked out again. + Creating the sparse-checkout file first prevents the entire submodule from being checked out and then removed. This is important + because the submodule may have a large number of files and checking out the entire submodule and then removing it would be time + and disk space consuming. + + Parameters: + root_dir (str): The root directory for the git operation. + name (str): The name of the submodule. + url (str): The URL of the submodule. + path (str): The path to the submodule. + sparsefile (str): The sparse file for the submodule. + tag (str, optional): The tag to checkout. Defaults to "master". + + Returns: + None + """ + logger.info("Called sparse_checkout for {}".format(name)) + rgit = GitInterface(root_dir, logger) + superroot = rgit.git_operation("rev-parse", "--show-superproject-working-tree") + if superroot: + gitroot = superroot.strip() + else: + gitroot = root_dir.strip() + assert os.path.isdir(os.path.join(gitroot, ".git")) + # first create the module directory + if not os.path.isdir(os.path.join(root_dir, path)): + os.makedirs(os.path.join(root_dir, path)) + + # initialize a new git repo and set the sparse checkout flag + sprep_repo = os.path.join(root_dir, path) + sprepo_git = GitInterface(sprep_repo, logger) + if os.path.exists(os.path.join(sprep_repo, ".git")): + try: + logger.info("Submodule {} found".format(name)) + chk = sprepo_git.config_get_value("core", "sparseCheckout") + if chk == "true": + logger.info("Sparse submodule {} already checked out".format(name)) + return + except NoOptionError: + logger.debug("Sparse submodule {} not present".format(name)) + except Exception as e: + utils.fatal_error("Unexpected error {} occured.".format(e)) + + sprepo_git.config_set_value("core", "sparseCheckout", "true") + + # set the repository remote + + logger.info("Setting remote origin in {}/{}".format(root_dir, path)) + status = sprepo_git.git_operation("remote", "-v") + if url not in status: + sprepo_git.git_operation("remote", "add", "origin", url) + + topgit = os.path.join(gitroot, ".git") + + if gitroot != root_dir and os.path.isfile(os.path.join(root_dir, ".git")): + with open(os.path.join(root_dir, ".git")) as f: + gitpath = os.path.relpath( + os.path.join(root_dir, f.read().split()[1]), + start=os.path.join(root_dir, path), + ) + topgit = os.path.join(gitpath, "modules") + else: + topgit = os.path.relpath( + os.path.join(root_dir, ".git", "modules"), + start=os.path.join(root_dir, path), + ) + + with utils.pushd(sprep_repo): + if not os.path.isdir(topgit): + os.makedirs(topgit) + topgit += os.sep + name + + if os.path.isdir(os.path.join(root_dir, path, ".git")): + with utils.pushd(sprep_repo): + shutil.move(".git", topgit) + with open(".git", "w") as f: + f.write("gitdir: " + os.path.relpath(topgit)) + # assert(os.path.isdir(os.path.relpath(topgit, start=sprep_repo))) + gitsparse = os.path.abspath(os.path.join(topgit, "info", "sparse-checkout")) + if os.path.isfile(gitsparse): + logger.warning( + "submodule {} is already initialized {}".format(name, topgit) + ) + return + + with utils.pushd(sprep_repo): + shutil.copy(sparsefile, gitsparse) + + # Finally checkout the repo + sprepo_git.git_operation("fetch", "origin", "--tags") + sprepo_git.git_operation("checkout", tag) + + print(f"Successfully checked out {name:>20} at {tag}") + rgit.config_set_value(f'submodule "{name}"', "active", "true") + rgit.config_set_value(f'submodule "{name}"', "url", url) + + +def single_submodule_checkout( + root, name, path, url=None, tag=None, force=False, optional=False +): + """ + This function checks out a single git submodule. + + Parameters: + root (str): The root directory for the git operation. + name (str): The name of the submodule. + path (str): The path to the submodule. + url (str, optional): The URL of the submodule. Defaults to None. + tag (str, optional): The tag to checkout. Defaults to None. + force (bool, optional): If set to True, forces the checkout operation. Defaults to False. + optional (bool, optional): If set to True, the submodule is considered optional. Defaults to False. + + Returns: + None + """ + # function implementation... + git = GitInterface(root, logger) + repodir = os.path.join(root, path) + logger.info("Checkout {} into {}/{}".format(name, root, path)) + # if url is provided update to the new url + tmpurl = None + repo_exists = False + if os.path.exists(os.path.join(repodir, ".git")): + logger.info("Submodule {} already checked out".format(name)) + repo_exists = True + # Look for a .gitmodules file in the newly checkedout repo + if not repo_exists and url: + # ssh urls cause problems for those who dont have git accounts with ssh keys defined + # but cime has one since e3sm prefers ssh to https, because the .gitmodules file was + # opened with a GitModules object we don't need to worry about restoring the file here + # it will be done by the GitModules class + if url.startswith("git@"): + tmpurl = url + url = url.replace("git@github.com:", "https://github.com/") + git.git_operation("clone", url, path) + smgit = GitInterface(repodir, logger) + if not tag: + tag = smgit.git_operation("describe", "--tags", "--always").rstrip() + smgit.git_operation("checkout", tag) + # Now need to move the .git dir to the submodule location + rootdotgit = os.path.join(root, ".git") + if os.path.isfile(rootdotgit): + with open(rootdotgit) as f: + line = f.readline() + if line.startswith("gitdir: "): + rootdotgit = line[8:].rstrip() + + newpath = os.path.abspath(os.path.join(root, rootdotgit, "modules", name)) + if os.path.exists(newpath): + shutil.rmtree(os.path.join(repodir, ".git")) + else: + shutil.move(os.path.join(repodir, ".git"), newpath) + + with open(os.path.join(repodir, ".git"), "w") as f: + f.write("gitdir: " + os.path.relpath(newpath, start=repodir)) + + if not os.path.exists(repodir): + parent = os.path.dirname(repodir) + if not os.path.isdir(parent): + os.makedirs(parent) + git.git_operation("submodule", "add", "--name", name, "--", url, path) + + if not repo_exists or not tmpurl: + git.git_operation("submodule", "update", "--init", "--", path) + + if os.path.exists(os.path.join(repodir, ".gitmodules")): + # recursively handle this checkout + print(f"Recursively checking out submodules of {name}") + gitmodules = GitModules(logger, confpath=repodir) + requiredlist = ["AlwaysRequired"] + if optional: + requiredlist.append("AlwaysOptional") + submodules_checkout(gitmodules, repodir, requiredlist, force=force) + if not os.path.exists(os.path.join(repodir, ".git")): + utils.fatal_error( + f"Failed to checkout {name} {repo_exists} {tmpurl} {repodir} {path}" + ) + + if tmpurl: + print(git.git_operation("restore", ".gitmodules")) + + return + + +def submodules_status(gitmodules, root_dir, toplevel=False): + testfails = 0 + localmods = 0 + needsupdate = 0 + for name in gitmodules.sections(): + path = gitmodules.get(name, "path") + tag = gitmodules.get(name, "fxtag") + required = gitmodules.get(name, "fxrequired") + level = required and "Toplevel" in required + if not path: + utils.fatal_error("No path found in .gitmodules for {}".format(name)) + newpath = os.path.join(root_dir, path) + logger.debug("newpath is {}".format(newpath)) + if not os.path.exists(os.path.join(newpath, ".git")): + rootgit = GitInterface(root_dir, logger) + # submodule commands use path, not name + url = gitmodules.get(name, "url") + url = url.replace("git@github.com:", "https://github.com/") + tags = rootgit.git_operation("ls-remote", "--tags", url) + atag = None + needsupdate += 1 + if not toplevel and level: + continue + for htag in tags.split("\n"): + if tag and tag in htag: + atag = (htag.split()[1])[10:] + break + if tag and tag == atag: + print(f"e {name:>20} not checked out, aligned at tag {tag}") + elif tag: + ahash = rootgit.git_operation( + "submodule", "status", "{}".format(path) + ).rstrip() + ahash = ahash[1 : len(tag) + 1] + if tag == ahash: + print(f"e {name:>20} not checked out, aligned at hash {ahash}") + else: + print( + f"e {name:>20} not checked out, out of sync at tag {atag}, expected tag is {tag}" + ) + testfails += 1 + else: + print(f"e {name:>20} has no fxtag defined in .gitmodules") + testfails += 1 + else: + with utils.pushd(newpath): + git = GitInterface(newpath, logger) + atag = git.git_operation("describe", "--tags", "--always").rstrip() + part = git.git_operation("status").partition("\n")[0] + # fake hash to initialize + ahash = "xxxx" + if part: + ahash = part.split()[-1] + if tag and atag == tag: + print(f" {name:>20} at tag {tag}") + elif tag and ahash[: len(tag)] == tag: + print(f" {name:>20} at hash {ahash}") + elif atag == ahash: + print(f" {name:>20} at hash {ahash}") + elif tag: + print( + f"s {name:>20} {atag} {ahash} is out of sync with .gitmodules {tag}" + ) + testfails += 1 + needsupdate += 1 + else: + print( + f"e {name:>20} has no fxtag defined in .gitmodules, module at {atag}" + ) + testfails += 1 + + status = git.git_operation("status", "--ignore-submodules") + if "nothing to commit" not in status: + localmods = localmods + 1 + print("M" + textwrap.indent(status, " ")) + + return testfails, localmods, needsupdate + + +def submodules_update(gitmodules, root_dir, requiredlist, force): + _, localmods, needsupdate = submodules_status(gitmodules, root_dir) + + if localmods and not force: + local_mods_output() + return + if needsupdate == 0: + return + + for name in gitmodules.sections(): + fxtag = gitmodules.get(name, "fxtag") + path = gitmodules.get(name, "path") + url = gitmodules.get(name, "url") + logger.info( + "name={} path={} url={} fxtag={} requiredlist={}".format( + name, os.path.join(root_dir, path), url, fxtag, requiredlist + ) + ) + # if not os.path.exists(os.path.join(root_dir,path, ".git")): + fxrequired = gitmodules.get(name, "fxrequired") + assert fxrequired in fxrequired_allowed_values() + rgit = GitInterface(root_dir, logger) + superroot = rgit.git_operation("rev-parse", "--show-superproject-working-tree") + + fxsparse = gitmodules.get(name, "fxsparse") + + if ( + fxrequired + and (superroot and "Toplevel" in fxrequired) + or fxrequired not in requiredlist + ): + if "ToplevelOptional" == fxrequired: + print("Skipping optional component {}".format(name)) + continue + if fxsparse: + logger.debug( + "Callng submodule_sparse_checkout({}, {}, {}, {}, {}, {}".format( + root_dir, name, url, path, fxsparse, fxtag + ) + ) + submodule_sparse_checkout(root_dir, name, url, path, fxsparse, tag=fxtag) + else: + logger.info( + "Calling submodule_checkout({},{},{},{})".format( + root_dir, name, path, url + ) + ) + + single_submodule_checkout( + root_dir, + name, + path, + url=url, + tag=fxtag, + force=force, + optional=("AlwaysOptional" in requiredlist), + ) + + if os.path.exists(os.path.join(path, ".git")): + submoddir = os.path.join(root_dir, path) + with utils.pushd(submoddir): + git = GitInterface(submoddir, logger) + # first make sure the url is correct + upstream = git.git_operation("ls-remote", "--get-url").rstrip() + newremote = "origin" + if upstream != url: + # TODO - this needs to be a unique name + remotes = git.git_operation("remote", "-v") + if url in remotes: + for line in remotes: + if url in line and "fetch" in line: + newremote = line.split()[0] + break + else: + i = 0 + while newremote in remotes: + i = i + 1 + newremote = f"newremote.{i:02d}" + git.git_operation("remote", "add", newremote, url) + + tags = git.git_operation("tag", "-l") + if fxtag and fxtag not in tags: + git.git_operation("fetch", newremote, "--tags") + atag = git.git_operation("describe", "--tags", "--always").rstrip() + if fxtag and fxtag != atag: + try: + git.git_operation("checkout", fxtag) + print(f"{name:>20} updated to {fxtag}") + except Exception as error: + print(error) + elif not fxtag: + print(f"No fxtag found for submodule {name:>20}") + else: + print(f"{name:>20} up to date.") + + +def local_mods_output(): + text = """\ + The submodules labeled with 'M' above are not in a clean state. + The following are options for how to proceed: + (1) Go into each submodule which is not in a clean state and issue a 'git status' + Either revert or commit your changes so that the submodule is in a clean state. + (2) use the --force option to git-fleximod + (3) you can name the particular submodules to update using the git-fleximod command line + (4) As a last resort you can remove the submodule (via 'rm -fr [directory]') + then rerun git-fleximod update. +""" + print(text) + + +# checkout is done by update if required so this function may be depricated +def submodules_checkout(gitmodules, root_dir, requiredlist, force=False): + """ + This function checks out all git submodules based on the provided parameters. + + Parameters: + gitmodules (ConfigParser): The gitmodules configuration. + root_dir (str): The root directory for the git operation. + requiredlist (list): The list of required modules. + force (bool, optional): If set to True, forces the checkout operation. Defaults to False. + + Returns: + None + """ + # function implementation... + print("") + _, localmods, needsupdate = submodules_status(gitmodules, root_dir) + if localmods and not force: + local_mods_output() + return + if not needsupdate: + return + for name in gitmodules.sections(): + fxrequired = gitmodules.get(name, "fxrequired") + fxsparse = gitmodules.get(name, "fxsparse") + fxtag = gitmodules.get(name, "fxtag") + path = gitmodules.get(name, "path") + url = gitmodules.get(name, "url") + if fxrequired and fxrequired not in requiredlist: + if "Optional" in fxrequired: + print("Skipping optional component {}".format(name)) + continue + + if fxsparse: + logger.debug( + "Callng submodule_sparse_checkout({}, {}, {}, {}, {}, {}".format( + root_dir, name, url, path, fxsparse, fxtag + ) + ) + submodule_sparse_checkout(root_dir, name, url, path, fxsparse, tag=fxtag) + else: + logger.debug( + "Calling submodule_checkout({},{},{})".format(root_dir, name, path) + ) + single_submodule_checkout( + root_dir, + name, + path, + url=url, + tag=fxtag, + force=force, + optional="AlwaysOptional" in requiredlist, + ) + + +def submodules_test(gitmodules, root_dir): + """ + This function tests the git submodules based on the provided parameters. + + It first checks that fxtags are present and in sync with submodule hashes. + Then it ensures that urls are consistent with fxurls (not forks and not ssh) + and that sparse checkout files exist. + + Parameters: + gitmodules (ConfigParser): The gitmodules configuration. + root_dir (str): The root directory for the git operation. + + Returns: + int: The number of test failures. + """ + # First check that fxtags are present and in sync with submodule hashes + testfails, localmods, needsupdate = submodules_status(gitmodules, root_dir) + print("") + # Then make sure that urls are consistant with fxurls (not forks and not ssh) + # and that sparse checkout files exist + for name in gitmodules.sections(): + url = gitmodules.get(name, "url") + fxurl = gitmodules.get(name, "fxDONOTMODIFYurl") + fxsparse = gitmodules.get(name, "fxsparse") + path = gitmodules.get(name, "path") + fxurl = fxurl[:-4] if fxurl.endswith(".git") else fxurl + url = url[:-4] if url.endswith(".git") else url + if not fxurl or url.lower() != fxurl.lower(): + print(f"{name:>20} url {url} not in sync with required {fxurl}") + testfails += 1 + if fxsparse and not os.path.isfile(os.path.join(root_dir, path, fxsparse)): + print(f"{name:>20} sparse checkout file {fxsparse} not found") + testfails += 1 + return testfails + localmods + needsupdate + + +def main(): + ( + root_dir, + file_name, + fxrequired, + includelist, + excludelist, + force, + action, + ) = commandline_arguments() + # Get a logger for the package + global logger + logger = logging.getLogger(__name__) + + logger.info("action is {} root_dir={} file_name={}".format(action, root_dir, file_name)) + + if not os.path.isfile(os.path.join(root_dir, file_name)): + file_path = utils.find_upwards(root_dir, file_name) + + if file_path is None: + utils.fatal_error( + "No {} found in {} or any of it's parents".format(file_name, root_dir) + ) + + root_dir = os.path.dirname(file_path) + logger.info( + "root_dir is {} includelist={} excludelist={}".format( + root_dir, includelist, excludelist + ) + ) + gitmodules = GitModules( + logger, + confpath=root_dir, + conffile=file_name, + includelist=includelist, + excludelist=excludelist, + ) + if not gitmodules.sections(): + sys.exit("No submodule components found") + retval = 0 + if action == "update": + submodules_update(gitmodules, root_dir, fxrequired, force) + elif action == "status": + tfails, lmods, updates = submodules_status(gitmodules, root_dir, toplevel=True) + if tfails + lmods + updates > 0: + print( + f" testfails = {tfails}, local mods = {lmods}, needs updates {updates}\n" + ) + if lmods > 0: + local_mods_output() + elif action == "test": + retval = submodules_test(gitmodules, root_dir) + else: + utils.fatal_error(f"unrecognized action request {action}") + return retval + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/git_fleximod/gitinterface.py b/git_fleximod/gitinterface.py new file mode 100644 index 0000000000..93ae38ecde --- /dev/null +++ b/git_fleximod/gitinterface.py @@ -0,0 +1,79 @@ +import os +import sys +from . import utils +from pathlib import Path + +class GitInterface: + def __init__(self, repo_path, logger): + logger.debug("Initialize GitInterface for {}".format(repo_path)) + if isinstance(repo_path, str): + self.repo_path = Path(repo_path).resolve() + elif isinstance(repo_path, Path): + self.repo_path = repo_path.resolve() + else: + raise TypeError("repo_path must be a str or Path object") + self.logger = logger + try: + import git + + self._use_module = True + try: + self.repo = git.Repo(str(self.repo_path)) # Initialize GitPython repo + except git.exc.InvalidGitRepositoryError: + self.git = git + self._init_git_repo() + msg = "Using GitPython interface to git" + except ImportError: + self._use_module = False + if not (self.repo_path / ".git").exists(): + self._init_git_repo() + msg = "Using shell interface to git" + self.logger.info(msg) + + def _git_command(self, operation, *args): + self.logger.info(operation) + if self._use_module and operation != "submodule": + try: + return getattr(self.repo.git, operation)(*args) + except Exception as e: + sys.exit(e) + else: + return ["git", "-C", str(self.repo_path), operation] + list(args) + + def _init_git_repo(self): + if self._use_module: + self.repo = self.git.Repo.init(str(self.repo_path)) + else: + command = ("git", "-C", str(self.repo_path), "init") + utils.execute_subprocess(command) + + # pylint: disable=unused-argument + def git_operation(self, operation, *args, **kwargs): + command = self._git_command(operation, *args) + self.logger.info(command) + if isinstance(command, list): + try: + return utils.execute_subprocess(command, output_to_caller=True) + except Exception as e: + sys.exit(e) + else: + return command + + def config_get_value(self, section, name): + if self._use_module: + config = self.repo.config_reader() + return config.get_value(section, name) + else: + cmd = ("git", "-C", str(self.repo_path), "config", "--get", f"{section}.{name}") + output = utils.execute_subprocess(cmd, output_to_caller=True) + return output.strip() + + def config_set_value(self, section, name, value): + if self._use_module: + with self.repo.config_writer() as writer: + writer.set_value(section, name, value) + writer.release() # Ensure changes are saved + else: + cmd = ("git", "-C", str(self.repo_path), "config", f"{section}.{name}", value) + self.logger.info(cmd) + utils.execute_subprocess(cmd, output_to_caller=True) diff --git a/git_fleximod/gitmodules.py b/git_fleximod/gitmodules.py new file mode 100644 index 0000000000..68c82d066f --- /dev/null +++ b/git_fleximod/gitmodules.py @@ -0,0 +1,97 @@ +import shutil +from pathlib import Path +from configparser import RawConfigParser, ConfigParser +from .lstripreader import LstripReader + + +class GitModules(RawConfigParser): + def __init__( + self, + logger, + confpath=Path.cwd(), + conffile=".gitmodules", + includelist=None, + excludelist=None, + ): + """ + confpath: Path to the directory containing the .gitmodules file (defaults to the current working directory). + conffile: Name of the configuration file (defaults to .gitmodules). + includelist: Optional list of submodules to include. + excludelist: Optional list of submodules to exclude. + """ + self.logger = logger + self.logger.debug( + "Creating a GitModules object {} {} {} {}".format( + confpath, conffile, includelist, excludelist + ) + ) + super().__init__() + self.conf_file = (Path(confpath) / Path(conffile)) + if self.conf_file.exists(): + self.read_file(LstripReader(str(self.conf_file)), source=conffile) + self.includelist = includelist + self.excludelist = excludelist + self.isdirty = False + + def reload(self): + self.clear() + if self.conf_file.exists(): + self.read_file(LstripReader(str(self.conf_file)), source=self.conf_file) + + + def set(self, name, option, value): + """ + Sets a configuration value for a specific submodule: + Ensures the appropriate section exists for the submodule. + Calls the parent class's set method to store the value. + """ + self.isdirty = True + self.logger.debug("set called {} {} {}".format(name, option, value)) + section = f'submodule "{name}"' + if not self.has_section(section): + self.add_section(section) + super().set(section, option, str(value)) + + # pylint: disable=redefined-builtin, arguments-differ + def get(self, name, option, raw=False, vars=None, fallback=None): + """ + Retrieves a configuration value for a specific submodule: + Uses the parent class's get method to access the value. + Handles potential errors if the section or option doesn't exist. + """ + self.logger.debug("get called {} {}".format(name, option)) + section = f'submodule "{name}"' + try: + return ConfigParser.get( + self, section, option, raw=raw, vars=vars, fallback=fallback + ) + except ConfigParser.NoOptionError: + return None + + def save(self): + if self.isdirty: + self.logger.info("Writing {}".format(self.conf_file)) + with open(self.conf_file, "w") as fd: + self.write(fd) + self.isdirty = False + + def __del__(self): + self.save() + + def sections(self): + """Strip the submodule part out of section and just use the name""" + self.logger.debug("calling GitModules sections iterator") + names = [] + for section in ConfigParser.sections(self): + name = section[11:-1] + if self.includelist and name not in self.includelist: + continue + if self.excludelist and name in self.excludelist: + continue + names.append(name) + return names + + def items(self, name, raw=False, vars=None): + self.logger.debug("calling GitModules items for {}".format(name)) + section = f'submodule "{name}"' + return ConfigParser.items(section, raw=raw, vars=vars) diff --git a/git_fleximod/lstripreader.py b/git_fleximod/lstripreader.py new file mode 100644 index 0000000000..01d5580ee8 --- /dev/null +++ b/git_fleximod/lstripreader.py @@ -0,0 +1,43 @@ +class LstripReader(object): + "LstripReader formats .gitmodules files to be acceptable for configparser" + + def __init__(self, filename): + with open(filename, "r") as infile: + lines = infile.readlines() + self._lines = list() + self._num_lines = len(lines) + self._index = 0 + for line in lines: + self._lines.append(line.lstrip()) + + def readlines(self): + """Return all the lines from this object's file""" + return self._lines + + def readline(self, size=-1): + """Format and return the next line or raise StopIteration""" + try: + line = self.next() + except StopIteration: + line = "" + + if (size > 0) and (len(line) < size): + return line[0:size] + + return line + + def __iter__(self): + """Begin an iteration""" + self._index = 0 + return self + + def next(self): + """Return the next line or raise StopIteration""" + if self._index >= self._num_lines: + raise StopIteration + + self._index = self._index + 1 + return self._lines[self._index - 1] + + def __next__(self): + return self.next() diff --git a/git_fleximod/metoflexi.py b/git_fleximod/metoflexi.py new file mode 100755 index 0000000000..cc347db2dd --- /dev/null +++ b/git_fleximod/metoflexi.py @@ -0,0 +1,236 @@ +#!/usr/bin/env python +from configparser import ConfigParser +import sys +import shutil +from pathlib import Path +import argparse +import logging +from git_fleximod.gitinterface import GitInterface +from git_fleximod.gitmodules import GitModules +from git_fleximod import utils + +logger = None + +def find_root_dir(filename=".git"): + d = Path.cwd() + root = Path(d.root) + while d != root: + attempt = d / filename + if attempt.is_dir(): + return d + d = d.parent + return None + + +def get_parser(): + description = """ + %(prog)s manages checking out groups of gitsubmodules with addtional support for Earth System Models + """ + parser = argparse.ArgumentParser( + description=description, formatter_class=argparse.RawDescriptionHelpFormatter + ) + + parser.add_argument('-e', '--externals', nargs='?', + default='Externals.cfg', + help='The externals description filename. ' + 'Default: %(default)s.') + + parser.add_argument( + "-C", + "--path", + default=find_root_dir(), + help="Toplevel repository directory. Defaults to top git directory relative to current.", + ) + + parser.add_argument( + "-g", + "--gitmodules", + nargs="?", + default=".gitmodules", + help="The submodule description filename. " "Default: %(default)s.", + ) + parser.add_argument( + "-v", + "--verbose", + action="count", + default=0, + help="Output additional information to " + "the screen and log file. This flag can be " + "used up to two times, increasing the " + "verbosity level each time.", + ) + parser.add_argument( + "-d", + "--debug", + action="store_true", + default=False, + help="DEVELOPER: output additional debugging " + "information to the screen and log file.", + ) + + return parser + +def commandline_arguments(args=None): + parser = get_parser() + + options = parser.parse_args(args) + handlers = [logging.StreamHandler()] + + if options.debug: + try: + open("fleximod.log", "w") + except PermissionError: + sys.exit("ABORT: Could not write file fleximod.log") + level = logging.DEBUG + handlers.append(logging.FileHandler("fleximod.log")) + elif options.verbose: + level = logging.INFO + else: + level = logging.WARNING + # Configure the root logger + logging.basicConfig( + level=level, format="%(name)s - %(levelname)s - %(message)s", handlers=handlers + ) + + return( + options.path, + options.gitmodules, + options.externals + ) + +class ExternalRepoTranslator: + """ + Translates external repositories configured in an INI-style externals file. + """ + + def __init__(self, rootpath, gitmodules, externals): + self.rootpath = rootpath + if gitmodules: + self.gitmodules = GitModules(logger, confpath=rootpath) + self.externals = (rootpath / Path(externals)).resolve() + print(f"Translating {self.externals}") + self.git = GitInterface(rootpath, logger) + +# def __del__(self): +# if (self.rootpath / "save.gitignore"): + + + def translate_single_repo(self, section, tag, url, path, efile, hash_, sparse, protocol): + """ + Translates a single repository based on configuration details. + + Args: + rootpath (str): Root path of the main repository. + gitmodules (str): Path to the .gitmodules file. + tag (str): The tag to use for the external repository. + url (str): The URL of the external repository. + path (str): The relative path within the main repository for the external repository. + efile (str): The external file or file containing submodules. + hash_ (str): The commit hash to checkout (if applicable). + sparse (str): Boolean indicating whether to use sparse checkout (if applicable). + protocol (str): The protocol to use (e.g., 'git', 'http'). + """ + assert protocol != "svn", "SVN protocol is not currently supported" + print(f"Translating repository {section}") + if efile: + file_path = Path(path) / Path(efile) + newroot = (self.rootpath / file_path).parent.resolve() + if not newroot.exists(): + newroot.mkdir(parents=True) + logger.info("Newroot is {}".format(newroot)) + newt = ExternalRepoTranslator(newroot, ".gitmodules", efile) + newt.translate_repo() + if protocol == "externals_only": + if tag: + self.gitmodules.set(section, "fxtag", tag) + if hash_: + self.gitmodules.set(section, "fxtag", hash_) + + self.gitmodules.set(section, "fxDONOTUSEurl", url) + if sparse: + self.gitmodules.set(section, "fxsparse", sparse) + self.gitmodules.set(section, "fxrequired", "ToplevelRequired") + else: + newpath = (self.rootpath / Path(path)) + if newpath.exists(): + shutil.rmtree(newpath) + logger.info("Creating directory {}".format(newpath)) + newpath.mkdir(parents=True) + if tag: + logger.info("cloning {}".format(section)) + try: + self.git.git_operation("clone", "-b", tag, "--depth", "1", url, path) + except: + self.git.git_operation("clone", url, path) + with utils.pushd(newpath): + ngit = GitInterface(newpath, logger) + ngit.git_operation("checkout", tag) + if hash_: + self.git.git_operation("clone", url, path) + git = GitInterface(newpath, logger) + git.git_operation("fetch", "origin") + git.git_operation("checkout", hash_) + if sparse: + print("setting as sparse submodule {}".format(section)) + sparsefile = (newpath / Path(sparse)) + newfile = (newpath / ".git" / "info" / "sparse-checkout") + print(f"sparsefile {sparsefile} newfile {newfile}") + shutil.copy(sparsefile, newfile) + + logger.info("adding submodule {}".format(section)) + self.gitmodules.save() + self.git.git_operation("submodule", "add", "-f", "--name", section, url, path) + self.git.git_operation("submodule","absorbgitdirs") + self.gitmodules.reload() + if tag: + self.gitmodules.set(section, "fxtag", tag) + if hash_: + self.gitmodules.set(section, "fxtag", hash_) + + self.gitmodules.set(section, "fxDONOTUSEurl", url) + if sparse: + self.gitmodules.set(section, "fxsparse", sparse) + self.gitmodules.set(section, "fxrequired", "ToplevelRequired") + + + def translate_repo(self): + """ + Translates external repositories defined within an external file. + + Args: + rootpath (str): Root path of the main repository. + gitmodules (str): Path to the .gitmodules file. + external_file (str): The path to the external file containing repository definitions. + """ + econfig = ConfigParser() + econfig.read((self.rootpath / Path(self.externals))) + + for section in econfig.sections(): + if section == "externals_description": + logger.info("skipping section {}".format(section)) + return + logger.info("Translating section {}".format(section)) + tag = econfig.get(section, "tag", raw=False, fallback=None) + url = econfig.get(section, "repo_url", raw=False, fallback=None) + path = econfig.get(section, "local_path", raw=False, fallback=None) + efile = econfig.get(section, "externals", raw=False, fallback=None) + hash_ = econfig.get(section, "hash", raw=False, fallback=None) + sparse = econfig.get(section, "sparse", raw=False, fallback=None) + protocol = econfig.get(section, "protocol", raw=False, fallback=None) + + self.translate_single_repo(section, tag, url, path, efile, hash_, sparse, protocol) + + + +def _main(): + rootpath, gitmodules, externals = commandline_arguments() + global logger + logger = logging.getLogger(__name__) + with utils.pushd(rootpath): + t = ExternalRepoTranslator(Path(rootpath), gitmodules, externals) + logger.info("Translating {}".format(rootpath)) + t.translate_repo() + + +if __name__ == "__main__": + sys.exit(_main()) diff --git a/git_fleximod/utils.py b/git_fleximod/utils.py new file mode 100644 index 0000000000..7cc1de38cc --- /dev/null +++ b/git_fleximod/utils.py @@ -0,0 +1,365 @@ +#!/usr/bin/env python3 +""" +Common public utilities for manic package + +""" + +import logging +import os +import subprocess +import sys +from threading import Timer +from pathlib import Path + +LOCAL_PATH_INDICATOR = "." +# --------------------------------------------------------------------- +# +# functions to massage text for output and other useful utilities +# +# --------------------------------------------------------------------- +from contextlib import contextmanager + + +@contextmanager +def pushd(new_dir): + """context for chdir. usage: with pushd(new_dir)""" + previous_dir = os.getcwd() + os.chdir(new_dir) + try: + yield + finally: + os.chdir(previous_dir) + + +def log_process_output(output): + """Log each line of process output at debug level so it can be + filtered if necessary. By default, output is a single string, and + logging.debug(output) will only put log info heading on the first + line. This makes it hard to filter with grep. + + """ + output = output.split("\n") + for line in output: + logging.debug(line) + + +def printlog(msg, **kwargs): + """Wrapper script around print to ensure that everything printed to + the screen also gets logged. + + """ + logging.info(msg) + if kwargs: + print(msg, **kwargs) + else: + print(msg) + sys.stdout.flush() + + +def find_upwards(root_dir, filename): + """Find a file in root dir or any of it's parents""" + d = Path(root_dir) + root = Path(d.root) + while d != root: + attempt = d / filename + if attempt.exists(): + return attempt + d = d.parent + return None + + +def last_n_lines(the_string, n_lines, truncation_message=None): + """Returns the last n lines of the given string + + Args: + the_string: str + n_lines: int + truncation_message: str, optional + + Returns a string containing the last n lines of the_string + + If truncation_message is provided, the returned string begins with + the given message if and only if the string is greater than n lines + to begin with. + """ + + lines = the_string.splitlines(True) + if len(lines) <= n_lines: + return_val = the_string + else: + lines_subset = lines[-n_lines:] + str_truncated = "".join(lines_subset) + if truncation_message: + str_truncated = truncation_message + "\n" + str_truncated + return_val = str_truncated + + return return_val + + +def indent_string(the_string, indent_level): + """Indents the given string by a given number of spaces + + Args: + the_string: str + indent_level: int + + Returns a new string that is the same as the_string, except that + each line is indented by 'indent_level' spaces. + + In python3, this can be done with textwrap.indent. + """ + + lines = the_string.splitlines(True) + padding = " " * indent_level + lines_indented = [padding + line for line in lines] + return "".join(lines_indented) + + +# --------------------------------------------------------------------- +# +# error handling +# +# --------------------------------------------------------------------- + + +def fatal_error(message): + """ + Error output function + """ + logging.error(message) + raise RuntimeError("{0}ERROR: {1}".format(os.linesep, message)) + + +# --------------------------------------------------------------------- +# +# Data conversion / manipulation +# +# --------------------------------------------------------------------- +def str_to_bool(bool_str): + """Convert a sting representation of as boolean into a true boolean. + + Conversion should be case insensitive. + """ + value = None + str_lower = bool_str.lower() + if str_lower in ("true", "t"): + value = True + elif str_lower in ("false", "f"): + value = False + if value is None: + msg = ( + 'ERROR: invalid boolean string value "{0}". ' + 'Must be "true" or "false"'.format(bool_str) + ) + fatal_error(msg) + return value + + +REMOTE_PREFIXES = ["http://", "https://", "ssh://", "git@"] + + +def is_remote_url(url): + """check if the user provided a local file path instead of a + remote. If so, it must be expanded to an absolute + path. + + """ + remote_url = False + for prefix in REMOTE_PREFIXES: + if url.startswith(prefix): + remote_url = True + return remote_url + + +def split_remote_url(url): + """check if the user provided a local file path or a + remote. If remote, try to strip off protocol info. + + """ + remote_url = is_remote_url(url) + if not remote_url: + return url + + for prefix in REMOTE_PREFIXES: + url = url.replace(prefix, "") + + if "@" in url: + url = url.split("@")[1] + + if ":" in url: + url = url.split(":")[1] + + return url + + +def expand_local_url(url, field): + """check if the user provided a local file path instead of a + remote. If so, it must be expanded to an absolute + path. + + Note: local paths of LOCAL_PATH_INDICATOR have special meaning and + represent local copy only, don't work with the remotes. + + """ + remote_url = is_remote_url(url) + if not remote_url: + if url.strip() == LOCAL_PATH_INDICATOR: + pass + else: + url = os.path.expandvars(url) + url = os.path.expanduser(url) + if not os.path.isabs(url): + msg = ( + 'WARNING: Externals description for "{0}" contains a ' + "url that is not remote and does not expand to an " + "absolute path. Version control operations may " + "fail.\n\nurl={1}".format(field, url) + ) + printlog(msg) + else: + url = os.path.normpath(url) + return url + + +# --------------------------------------------------------------------- +# +# subprocess +# +# --------------------------------------------------------------------- + +# Give the user a helpful message if we detect that a command seems to +# be hanging. +_HANGING_SEC = 300 + + +def _hanging_msg(working_directory, command): + print( + """ + +Command '{command}' +from directory {working_directory} +has taken {hanging_sec} seconds. It may be hanging. + +The command will continue to run, but you may want to abort +manage_externals with ^C and investigate. A possible cause of hangs is +when svn or git require authentication to access a private +repository. On some systems, svn and git requests for authentication +information will not be displayed to the user. In this case, the program +will appear to hang. Ensure you can run svn and git manually and access +all repositories without entering your authentication information. + +""".format( + command=command, + working_directory=working_directory, + hanging_sec=_HANGING_SEC, + ) + ) + + +def execute_subprocess(commands, status_to_caller=False, output_to_caller=False): + """Wrapper around subprocess.check_output to handle common + exceptions. + + check_output runs a command with arguments and waits + for it to complete. + + check_output raises an exception on a nonzero return code. if + status_to_caller is true, execute_subprocess returns the subprocess + return code, otherwise execute_subprocess treats non-zero return + status as an error and raises an exception. + + """ + cwd = os.getcwd() + msg = "In directory: {0}\nexecute_subprocess running command:".format(cwd) + logging.info(msg) + commands_str = " ".join(str(element) for element in commands) + logging.info(commands_str) + return_to_caller = status_to_caller or output_to_caller + status = -1 + output = "" + hanging_timer = Timer( + _HANGING_SEC, + _hanging_msg, + kwargs={"working_directory": cwd, "command": commands_str}, + ) + hanging_timer.start() + try: + output = subprocess.check_output( + commands, stderr=subprocess.STDOUT, universal_newlines=True + ) + log_process_output(output) + status = 0 + except OSError as error: + msg = failed_command_msg( + "Command execution failed. Does the executable exist?", commands + ) + logging.error(error) + fatal_error(msg) + except ValueError as error: + msg = failed_command_msg( + "DEV_ERROR: Invalid arguments trying to run subprocess", commands + ) + logging.error(error) + fatal_error(msg) + except subprocess.CalledProcessError as error: + # Only report the error if we are NOT returning to the + # caller. If we are returning to the caller, then it may be a + # simple status check. If returning, it is the callers + # responsibility determine if an error occurred and handle it + # appropriately. + if not return_to_caller: + msg_context = ( + "Process did not run successfully; " + "returned status {0}".format(error.returncode) + ) + msg = failed_command_msg(msg_context, commands, output=error.output) + logging.error(error) + logging.error(msg) + log_process_output(error.output) + fatal_error(msg) + status = error.returncode + finally: + hanging_timer.cancel() + + if status_to_caller and output_to_caller: + ret_value = (status, output) + elif status_to_caller: + ret_value = status + elif output_to_caller: + ret_value = output + else: + ret_value = None + + return ret_value + + +def failed_command_msg(msg_context, command, output=None): + """Template for consistent error messages from subprocess calls. + + If 'output' is given, it should provide the output from the failed + command + """ + + if output: + output_truncated = last_n_lines( + output, 20, truncation_message="[... Output truncated for brevity ...]" + ) + errmsg = ( + "Failed with output:\n" + indent_string(output_truncated, 4) + "\nERROR: " + ) + else: + errmsg = "" + + command_str = " ".join(command) + errmsg += """In directory + {cwd} +{context}: + {command} +""".format( + cwd=os.getcwd(), context=msg_context, command=command_str + ) + + if output: + errmsg += "See above for output from failed command.\n" + + return errmsg diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000000..b59ed3942c --- /dev/null +++ b/poetry.lock @@ -0,0 +1,693 @@ +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. + +[[package]] +name = "alabaster" +version = "0.7.13" +description = "A configurable sidebar-enabled Sphinx theme" +optional = false +python-versions = ">=3.6" +files = [ + {file = "alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3"}, + {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, +] + +[[package]] +name = "babel" +version = "2.14.0" +description = "Internationalization utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "Babel-2.14.0-py3-none-any.whl", hash = "sha256:efb1a25b7118e67ce3a259bed20545c29cb68be8ad2c784c83689981b7a57287"}, + {file = "Babel-2.14.0.tar.gz", hash = "sha256:6919867db036398ba21eb5c7a0f6b28ab8cbc3ae7a73a44ebe34ae74a4e7d363"}, +] + +[package.dependencies] +pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} + +[package.extras] +dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "docutils" +version = "0.19" +description = "Docutils -- Python Documentation Utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "docutils-0.19-py3-none-any.whl", hash = "sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc"}, + {file = "docutils-0.19.tar.gz", hash = "sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "fsspec" +version = "2023.12.2" +description = "File-system specification" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fsspec-2023.12.2-py3-none-any.whl", hash = "sha256:d800d87f72189a745fa3d6b033b9dc4a34ad069f60ca60b943a63599f5501960"}, + {file = "fsspec-2023.12.2.tar.gz", hash = "sha256:8548d39e8810b59c38014934f6b31e57f40c1b20f911f4cc2b85389c7e9bf0cb"}, +] + +[package.extras] +abfs = ["adlfs"] +adl = ["adlfs"] +arrow = ["pyarrow (>=1)"] +dask = ["dask", "distributed"] +devel = ["pytest", "pytest-cov"] +dropbox = ["dropbox", "dropboxdrivefs", "requests"] +full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"] +fuse = ["fusepy"] +gcs = ["gcsfs"] +git = ["pygit2"] +github = ["requests"] +gs = ["gcsfs"] +gui = ["panel"] +hdfs = ["pyarrow (>=1)"] +http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)", "requests"] +libarchive = ["libarchive-c"] +oci = ["ocifs"] +s3 = ["s3fs"] +sftp = ["paramiko"] +smb = ["smbprotocol"] +ssh = ["paramiko"] +tqdm = ["tqdm"] + +[[package]] +name = "gitdb" +version = "4.0.11" +description = "Git Object Database" +optional = false +python-versions = ">=3.7" +files = [ + {file = "gitdb-4.0.11-py3-none-any.whl", hash = "sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4"}, + {file = "gitdb-4.0.11.tar.gz", hash = "sha256:bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b"}, +] + +[package.dependencies] +smmap = ">=3.0.1,<6" + +[[package]] +name = "gitpython" +version = "3.1.41" +description = "GitPython is a Python library used to interact with Git repositories" +optional = false +python-versions = ">=3.7" +files = [ + {file = "GitPython-3.1.41-py3-none-any.whl", hash = "sha256:c36b6634d069b3f719610175020a9aed919421c87552185b085e04fbbdb10b7c"}, + {file = "GitPython-3.1.41.tar.gz", hash = "sha256:ed66e624884f76df22c8e16066d567aaa5a37d5b5fa19db2c6df6f7156db9048"}, +] + +[package.dependencies] +gitdb = ">=4.0.1,<5" + +[package.extras] +test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "sumtypes"] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "imagesize" +version = "1.4.1" +description = "Getting image size from png/jpeg/jpeg2000/gif file" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, + {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, +] + +[[package]] +name = "importlib-metadata" +version = "7.0.1" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-7.0.1-py3-none-any.whl", hash = "sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e"}, + {file = "importlib_metadata-7.0.1.tar.gz", hash = "sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pyfakefs" +version = "5.3.5" +description = "pyfakefs implements a fake file system that mocks the Python file system modules." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyfakefs-5.3.5-py3-none-any.whl", hash = "sha256:751015c1de94e1390128c82b48cdedc3f088bbdbe4bc713c79d02a27f0f61e69"}, + {file = "pyfakefs-5.3.5.tar.gz", hash = "sha256:7cdc500b35a214cb7a614e1940543acc6650e69a94ac76e30f33c9373bd9cf90"}, +] + +[[package]] +name = "pygments" +version = "2.17.2" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, + {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, +] + +[package.extras] +plugins = ["importlib-metadata"] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pytest" +version = "8.0.0" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-8.0.0-py3-none-any.whl", hash = "sha256:50fb9cbe836c3f20f0dfa99c565201fb75dc54c8d76373cd1bde06b06657bdb6"}, + {file = "pytest-8.0.0.tar.gz", hash = "sha256:249b1b0864530ba251b7438274c4d251c58d868edaaec8762893ad4a0d71c36c"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.3.0,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytz" +version = "2024.1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "smmap" +version = "5.0.1" +description = "A pure Python implementation of a sliding window memory map manager" +optional = false +python-versions = ">=3.7" +files = [ + {file = "smmap-5.0.1-py3-none-any.whl", hash = "sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da"}, + {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"}, +] + +[[package]] +name = "snowballstemmer" +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +optional = false +python-versions = "*" +files = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] + +[[package]] +name = "sphinx" +version = "5.3.0" +description = "Python documentation generator" +optional = false +python-versions = ">=3.6" +files = [ + {file = "Sphinx-5.3.0.tar.gz", hash = "sha256:51026de0a9ff9fc13c05d74913ad66047e104f56a129ff73e174eb5c3ee794b5"}, + {file = "sphinx-5.3.0-py3-none-any.whl", hash = "sha256:060ca5c9f7ba57a08a1219e547b269fadf125ae25b06b9fa7f66768efb652d6d"}, +] + +[package.dependencies] +alabaster = ">=0.7,<0.8" +babel = ">=2.9" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +docutils = ">=0.14,<0.20" +imagesize = ">=1.3" +importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""} +Jinja2 = ">=3.0" +packaging = ">=21.0" +Pygments = ">=2.12" +requests = ">=2.5.0" +snowballstemmer = ">=2.0" +sphinxcontrib-applehelp = "*" +sphinxcontrib-devhelp = "*" +sphinxcontrib-htmlhelp = ">=2.0.0" +sphinxcontrib-jsmath = "*" +sphinxcontrib-qthelp = "*" +sphinxcontrib-serializinghtml = ">=1.1.5" + +[package.extras] +docs = ["sphinxcontrib-websupport"] +lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-bugbear", "flake8-comprehensions", "flake8-simplify", "isort", "mypy (>=0.981)", "sphinx-lint", "types-requests", "types-typed-ast"] +test = ["cython", "html5lib", "pytest (>=4.6)", "typed_ast"] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "1.0.4" +description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" +optional = false +python-versions = ">=3.8" +files = [ + {file = "sphinxcontrib-applehelp-1.0.4.tar.gz", hash = "sha256:828f867945bbe39817c210a1abfd1bc4895c8b73fcaade56d45357a348a07d7e"}, + {file = "sphinxcontrib_applehelp-1.0.4-py3-none-any.whl", hash = "sha256:29d341f67fb0f6f586b23ad80e072c8e6ad0b48417db2bde114a4c9746feb228"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "1.0.2" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, + {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.0.1" +description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "sphinxcontrib-htmlhelp-2.0.1.tar.gz", hash = "sha256:0cbdd302815330058422b98a113195c9249825d681e18f11e8b1f78a2f11efff"}, + {file = "sphinxcontrib_htmlhelp-2.0.1-py3-none-any.whl", hash = "sha256:c38cb46dccf316c79de6e5515e1770414b797162b23cd3d06e67020e1d2a6903"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["html5lib", "pytest"] + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +description = "A sphinx extension which renders display math in HTML via JavaScript" +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, + {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, +] + +[package.extras] +test = ["flake8", "mypy", "pytest"] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "1.0.3" +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, + {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "1.1.5" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, + {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "urllib3" +version = "2.2.0" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, + {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wheel" +version = "0.42.0" +description = "A built-package format for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "wheel-0.42.0-py3-none-any.whl", hash = "sha256:177f9c9b0d45c47873b619f5b650346d632cdc35fb5e4d25058e09c9e581433d"}, + {file = "wheel-0.42.0.tar.gz", hash = "sha256:c45be39f7882c9d34243236f2d63cbd58039e360f85d0913425fbd7ceea617a8"}, +] + +[package.extras] +test = ["pytest (>=6.0.0)", "setuptools (>=65)"] + +[[package]] +name = "zipp" +version = "3.17.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, + {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.8" +content-hash = "25ee2ae1d74abedde3a6637a60d4a3095ea5cf9731960875741bbc2ba84a475d" diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000000..ac1684ea0f --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,41 @@ +[tool.poetry] +name = "git-fleximod" +version = "0.7.5" +description = "Extended support for git-submodule and git-sparse-checkout" +authors = ["Jim Edwards "] +maintainers = ["Jim Edwards "] +license = "MIT" +readme = "README.md" +homepage = "https://github.com/jedwards4b/git-fleximod" +keywords = ["git", "submodule", "sparse-checkout"] +packages = [ +{ include = "git_fleximod"}, +{ include = "doc"}, +] + +[tool.poetry.scripts] +git-fleximod = "git_fleximod.git_fleximod:main" +me2flexi = "git_fleximod.metoflexi:_main" +fsspec = "fsspec.fuse:main" + +[tool.poetry.dependencies] +python = "^3.8" +GitPython = "^3.1.0" +sphinx = "^5.0.0" +fsspec = "^2023.12.2" +wheel = "^0.42.0" +pytest = "^8.0.0" +pyfakefs = "^5.3.5" + +[tool.poetry.urls] +"Bug Tracker" = "https://github.com/jedwards4b/git-fleximod/issues" + +[tool.pytest.ini_options] +markers = [ + "skip_after_first: only run on first iteration" +] + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" + diff --git a/tbump.toml b/tbump.toml new file mode 100644 index 0000000000..e644fc4d24 --- /dev/null +++ b/tbump.toml @@ -0,0 +1,43 @@ +# Uncomment this if your project is hosted on GitHub: +github_url = "https://github.com/jedwards4b/git-fleximod/" + +[version] +current = "0.7.5" + +# Example of a semver regexp. +# Make sure this matches current_version before +# using tbump +regex = ''' + (?P\d+) + \. + (?P\d+) + \. + (?P\d+) + ''' + +[git] +message_template = "Bump to {new_version}" +tag_template = "v{new_version}" + +# For each file to patch, add a [[file]] config +# section containing the path of the file, relative to the +# tbump.toml location. +[[file]] +src = "git_fleximod/cli.py" + +[[file]] +src = "pyproject.toml" + +# You can specify a list of commands to +# run after the files have been patched +# and before the git commit is made + +# [[before_commit]] +# name = "check changelog" +# cmd = "grep -q {new_version} Changelog.rst" + +# Or run some commands after the git tag and the branch +# have been pushed: +# [[after_push]] +# name = "publish" +# cmd = "./publish.sh" diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000000..4d4c66c78e --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1,3 @@ +import sys, os + +sys.path.append(os.path.join(os.path.dirname(__file__), os.path.pardir, "src")) diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000000..942a0efb97 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,138 @@ +import pytest +from git_fleximod.gitinterface import GitInterface +import os +import subprocess +import logging +from pathlib import Path + +@pytest.fixture(scope='session') +def logger(): + logging.basicConfig( + level=logging.INFO, format="%(name)s - %(levelname)s - %(message)s", handlers=[logging.StreamHandler()] + ) + logger = logging.getLogger(__name__) + return logger + +all_repos=[ + {"subrepo_path": "modules/test", + "submodule_name": "test_submodule", + "status1" : "test_submodule MPIserial_2.5.0-3-gd82ce7c is out of sync with .gitmodules MPIserial_2.4.0", + "status2" : "test_submodule at tag MPIserial_2.4.0", + "status3" : "test_submodule at tag MPIserial_2.4.0", + "status4" : "test_submodule at tag MPIserial_2.4.0", + "gitmodules_content" : """ + [submodule "test_submodule"] + path = modules/test + url = https://github.com/ESMCI/mpi-serial.git + fxtag = MPIserial_2.4.0 + fxDONOTUSEurl = https://github.com/ESMCI/mpi-serial.git + fxrequired = ToplevelRequired +"""}, + {"subrepo_path": "modules/test_optional", + "submodule_name": "test_optional", + "status1" : "test_optional MPIserial_2.5.0-3-gd82ce7c is out of sync with .gitmodules MPIserial_2.4.0", + "status2" : "test_optional at tag MPIserial_2.4.0", + "status3" : "test_optional not checked out, aligned at tag MPIserial_2.4.0", + "status4" : "test_optional at tag MPIserial_2.4.0", + "gitmodules_content": """ + [submodule "test_optional"] + path = modules/test_optional + url = https://github.com/ESMCI/mpi-serial.git + fxtag = MPIserial_2.4.0 + fxDONOTUSEurl = https://github.com/ESMCI/mpi-serial.git + fxrequired = ToplevelOptional +"""}, + {"subrepo_path": "modules/test_alwaysoptional", + "submodule_name": "test_alwaysoptional", + "status1" : "test_alwaysoptional MPIserial_2.3.0 is out of sync with .gitmodules e5cf35c", + "status2" : "test_alwaysoptional at hash e5cf35c", + "status3" : "test_alwaysoptional not checked out, out of sync at tag MPIserial_2.3.0", + "status4" : "test_alwaysoptional at hash e5cf35c", + "gitmodules_content": """ + [submodule "test_alwaysoptional"] + path = modules/test_alwaysoptional + url = https://github.com/ESMCI/mpi-serial.git + fxtag = e5cf35c + fxDONOTUSEurl = https://github.com/ESMCI/mpi-serial.git + fxrequired = AlwaysOptional +"""}, + {"subrepo_path": "modules/test_sparse", + "submodule_name": "test_sparse", + "status1" : "test_sparse at tag MPIserial_2.5.0", + "status2" : "test_sparse at tag MPIserial_2.5.0", + "status3" : "test_sparse at tag MPIserial_2.5.0", + "status4" : "test_sparse at tag MPIserial_2.5.0", + "gitmodules_content": """ + [submodule "test_sparse"] + path = modules/test_sparse + url = https://github.com/ESMCI/mpi-serial.git + fxtag = MPIserial_2.5.0 + fxDONOTUSEurl = https://github.com/ESMCI/mpi-serial.git + fxrequired = AlwaysRequired + fxsparse = ../.sparse_file_list +"""}, +] +@pytest.fixture(params=all_repos) + +def shared_repos(request): + return request.param + +@pytest.fixture +def get_all_repos(): + return all_repos + +def write_sparse_checkout_file(fp): + sparse_content = """m4 +""" + fp.write_text(sparse_content) + +@pytest.fixture +def test_repo(shared_repos, tmp_path, logger): + subrepo_path = shared_repos["subrepo_path"] + submodule_name = shared_repos["submodule_name"] + test_dir = tmp_path / "testrepo" + test_dir.mkdir() + str_path = str(test_dir) + gitp = GitInterface(str_path, logger) + assert test_dir.joinpath(".git").is_dir() + (test_dir / "modules").mkdir() + if "sparse" in submodule_name: + (test_dir / subrepo_path).mkdir() + # Add the sparse checkout file + write_sparse_checkout_file(test_dir / "modules" / ".sparse_file_list") + gitp.git_operation("add","modules/.sparse_file_list") + else: + gitp = GitInterface(str(test_dir), logger) + gitp.git_operation("submodule", "add", "--depth","1","--name", submodule_name, "https://github.com/ESMCI/mpi-serial.git", subrepo_path) + assert test_dir.joinpath(".gitmodules").is_file() + gitp.git_operation("add",subrepo_path) + gitp.git_operation("commit","-a","-m","\"add submod\"") + test_dir2 = tmp_path / "testrepo2" + gitp.git_operation("clone",test_dir,test_dir2) + return test_dir2 + + +@pytest.fixture +def complex_repo(tmp_path, logger): + test_dir = tmp_path / "testcomplex" + test_dir.mkdir() + str_path = str(test_dir) + gitp = GitInterface(str_path, logger) + gitp.git_operation("remote", "add", "origin", "https://github.com/jedwards4b/fleximod-test2") + gitp.git_operation("fetch", "origin", "main") + gitp.git_operation("checkout", "main") + return test_dir + +@pytest.fixture +def git_fleximod(): + def _run_fleximod(path, args, input=None): + cmd = ["git", "fleximod"] + args.split() + result = subprocess.run(cmd, cwd=path, input=input, + stdout=subprocess.PIPE, stderr=subprocess.PIPE, + text=True) + if result.returncode: + print(result.stdout) + print(result.stderr) + return result + return _run_fleximod + diff --git a/tests/test_a_import.py b/tests/test_a_import.py new file mode 100644 index 0000000000..d5ca878de5 --- /dev/null +++ b/tests/test_a_import.py @@ -0,0 +1,8 @@ +# pylint: disable=unused-import +from git_fleximod import cli +from git_fleximod import utils +from git_fleximod.gitinterface import GitInterface +from git_fleximod.gitmodules import GitModules + +def test_import(): + print("here") diff --git a/tests/test_b_update.py b/tests/test_b_update.py new file mode 100644 index 0000000000..159f1cfae0 --- /dev/null +++ b/tests/test_b_update.py @@ -0,0 +1,26 @@ +import pytest +from pathlib import Path + +def test_basic_checkout(git_fleximod, test_repo, shared_repos): + # Prepare a simple .gitmodules + gm = shared_repos['gitmodules_content'] + file_path = (test_repo / ".gitmodules") + repo_name = shared_repos["submodule_name"] + repo_path = shared_repos["subrepo_path"] + + file_path.write_text(gm) + + # Run the command + result = git_fleximod(test_repo, f"update {repo_name}") + + # Assertions + assert result.returncode == 0 + assert Path(test_repo / repo_path).exists() # Did the submodule directory get created? + if "sparse" in repo_name: + assert Path(test_repo / f"{repo_path}/m4").exists() # Did the submodule sparse directory get created? + assert not Path(test_repo / f"{repo_path}/README").exists() # Did only the submodule sparse directory get created? + + status = git_fleximod(test_repo, f"status {repo_name}") + + assert shared_repos["status2"] in status.stdout + diff --git a/tests/test_c_required.py b/tests/test_c_required.py new file mode 100644 index 0000000000..89ab8d294d --- /dev/null +++ b/tests/test_c_required.py @@ -0,0 +1,30 @@ +import pytest +from pathlib import Path + +def test_required(git_fleximod, test_repo, shared_repos): + file_path = (test_repo / ".gitmodules") + gm = shared_repos["gitmodules_content"] + repo_name = shared_repos["submodule_name"] + if file_path.exists(): + with file_path.open("r") as f: + gitmodules_content = f.read() + # add the entry if it does not exist + if repo_name not in gitmodules_content: + file_path.write_text(gitmodules_content+gm) + # or if it is incomplete + elif gm not in gitmodules_content: + file_path.write_text(gm) + else: + file_path.write_text(gm) + result = git_fleximod(test_repo, "update") + assert result.returncode == 0 + status = git_fleximod(test_repo, f"status {repo_name}") + assert shared_repos["status3"] in status.stdout + status = git_fleximod(test_repo, f"update --optional") + assert result.returncode == 0 + status = git_fleximod(test_repo, f"status {repo_name}") + assert shared_repos["status4"] in status.stdout + status = git_fleximod(test_repo, f"update {repo_name}") + assert result.returncode == 0 + status = git_fleximod(test_repo, f"status {repo_name}") + assert shared_repos["status4"] in status.stdout diff --git a/tests/test_d_complex.py b/tests/test_d_complex.py new file mode 100644 index 0000000000..fdce516274 --- /dev/null +++ b/tests/test_d_complex.py @@ -0,0 +1,67 @@ +import pytest +from pathlib import Path +from git_fleximod.gitinterface import GitInterface + +def test_complex_checkout(git_fleximod, complex_repo, logger): + status = git_fleximod(complex_repo, "status") + assert("ToplevelOptional not checked out, aligned at tag v5.3.2" in status.stdout) + assert("ToplevelRequired not checked out, aligned at tag MPIserial_2.5.0" in status.stdout) + assert("AlwaysRequired not checked out, aligned at tag MPIserial_2.4.0" in status.stdout) + assert("Complex not checked out, aligned at tag testtag01" in status.stdout) + assert("AlwaysOptional not checked out, aligned at tag MPIserial_2.3.0" in status.stdout) + + # This should checkout and update test_submodule and complex_sub + result = git_fleximod(complex_repo, "update") + assert result.returncode == 0 + + status = git_fleximod(complex_repo, "status") + assert("ToplevelOptional not checked out, aligned at tag v5.3.2" in status.stdout) + assert("ToplevelRequired at tag MPIserial_2.5.0" in status.stdout) + assert("AlwaysRequired at tag MPIserial_2.4.0" in status.stdout) + assert("Complex at tag testtag01" in status.stdout) + + # now check the complex_sub + root = (complex_repo / "modules" / "complex") + assert(not (root / "libraries" / "gptl" / ".git").exists()) + assert(not (root / "libraries" / "mpi-serial" / ".git").exists()) + assert((root / "modules" / "mpi-serial" / ".git").exists()) + assert(not (root / "modules" / "mpi-serial2" / ".git").exists()) + assert((root / "modules" / "mpi-sparse" / ".git").exists()) + assert((root / "modules" / "mpi-sparse" / "m4").exists()) + assert(not (root / "modules" / "mpi-sparse" / "README").exists()) + + # update a single optional submodule + + result = git_fleximod(complex_repo, "update ToplevelOptional") + assert result.returncode == 0 + + status = git_fleximod(complex_repo, "status") + assert("ToplevelOptional at tag v5.3.2" in status.stdout) + assert("ToplevelRequired at tag MPIserial_2.5.0" in status.stdout) + assert("AlwaysRequired at tag MPIserial_2.4.0" in status.stdout) + assert("Complex at tag testtag01" in status.stdout) + assert("AlwaysOptional not checked out, aligned at tag MPIserial_2.3.0" in status.stdout) + + + # Finally update optional + result = git_fleximod(complex_repo, "update --optional") + assert result.returncode == 0 + + status = git_fleximod(complex_repo, "status") + assert("ToplevelOptional at tag v5.3.2" in status.stdout) + assert("ToplevelRequired at tag MPIserial_2.5.0" in status.stdout) + assert("AlwaysRequired at tag MPIserial_2.4.0" in status.stdout) + assert("Complex at tag testtag01" in status.stdout) + assert("AlwaysOptional at tag MPIserial_2.3.0" in status.stdout) + + # now check the complex_sub + root = (complex_repo / "modules" / "complex" ) + assert(not (root / "libraries" / "gptl" / ".git").exists()) + assert(not (root / "libraries" / "mpi-serial" / ".git").exists()) + assert((root / "modules" / "mpi-serial" / ".git").exists()) + assert((root / "modules" / "mpi-serial2" / ".git").exists()) + assert((root / "modules" / "mpi-sparse" / ".git").exists()) + assert((root / "modules" / "mpi-sparse" / "m4").exists()) + assert(not (root / "modules" / "mpi-sparse" / "README").exists()) + + From c834c30292c96bf27b63a78c0ce01be66c14f64d Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Fri, 31 May 2024 11:28:29 -0600 Subject: [PATCH 122/126] Draft of ChangeLog/ChageSum --- doc/ChangeLog | 96 +++++++++++++++++++++++++++++++++++++++++++++++++++ doc/ChangeSum | 1 + 2 files changed, 97 insertions(+) diff --git a/doc/ChangeLog b/doc/ChangeLog index b2f14a2c01..ee2e861be7 100644 --- a/doc/ChangeLog +++ b/doc/ChangeLog @@ -1,4 +1,100 @@ =============================================================== +Tag name: ctsm5.2.007 +Originator(s): jedwards4b (Jim Edwards, UCAR/CGD) slevis (Sam Levis, UCAR/CGD) +Date: Fri 31 May 2024 11:24:29 AM MDT +One-line Summary: Rm manage_externals and update documentation accordingly + +Purpose and description of changes +---------------------------------- +#2443 Jim replaced this PR with the next one +#2559 Remove manage externals +#2564 Replace checkout_externals with git-fleximod in documentation + +Significant changes to scientifically-supported configurations +-------------------------------------------------------------- + +Does this tag change answers significantly for any of the following physics configurations? +(Details of any changes will be given in the "Answer changes" section below.) + + [Put an [X] in the box for any configuration with significant answer changes.] + +[ ] clm6_0 + +[ ] clm5_1 + +[ ] clm5_0 + +[ ] ctsm5_0-nwp + +[ ] clm4_5 + + +Bugs fixed +---------- +List of CTSM issues fixed (include CTSM Issue # and description) [one per line]: +Fixes #2537 Remove manage_externals stuff from run_sys_tests +Fixes #2536 Update documentation for git-fleximod +Fixes #2467 Remove references to cheyenne from the testlist + +Notes of particular relevance for users +--------------------------------------- +Caveats for users (e.g., need to interpolate initial conditions): + ./manage_externals/checkout_externals + is replaced with + ./bin/git-fleximod + +Changes to documentation: + Accordingly + +Testing summary: +---------------- + + [PASS means all tests PASS; OK means tests PASS other than expected fails.] + + build-namelist tests (if CLMBuildNamelist.pm has changed): + + derecho - PASS + + python testing (if python code has changed; see instructions in python/README.md; document testing done): + + derecho - PASS + + [If python code has changed and you are NOT running aux_clm (e.g., because the only changes are in python + code) then also run the clm_pymods test suite; this is a small subset of aux_clm that runs the system + tests impacted by python changes. The best way to do this, if you expect no changes from the last tag in + either model output or namelists, is: create sym links pointing to the last tag's baseline directory, + named with the upcoming tag; then run the clm_pymods test suite comparing against these baselines but NOT + doing their own baseline generation. If you are already running the full aux_clm then you do NOT need to + separately run the clm_pymods test suite, and you can remove the following line.] + + clm_pymods test suite on derecho - + + regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing): + + derecho ----- OK + izumi ------- OK + + any other testing (give details below): + + ctsm_sci + derecho ---- OK + +Answer changes +-------------- +Changes answers relative to baseline: No + +Other details +------------- +List any externals directories updated (cime, rtm, mosart, cism, fates, etc.): + JIM: What's the answer here? + +Pull Requests that document the changes (include PR ids): + https://github.com/ESCOMP/ctsm/pull/2443 + https://github.com/ESCOMP/ctsm/pull/2559 + https://github.com/ESCOMP/ctsm/pull/2564 + +=============================================================== +=============================================================== Tag name: ctsm5.2.006 Originator(s): slevis (Samuel Levis) Date: Tue 28 May 2024 03:14:18 PM MDT diff --git a/doc/ChangeSum b/doc/ChangeSum index 77bc8a426d..b3f1632571 100644 --- a/doc/ChangeSum +++ b/doc/ChangeSum @@ -1,5 +1,6 @@ Tag Who Date Summary ============================================================================================================================ + ctsm5.2.007 multiple 05/31/2024 Rm manage_externals and update documentation accordingly ctsm5.2.006 slevis 05/28/2024 Update externals to cesm2_3_beta17, remove mct, retire /test/tools ctsm5.2.005 erik 05/13/2024 Fix clm6_0 defaults and CESM testing issues, add tests to detect these problems ctsm5.2.004 multiple 05/09/2024 CTSM5.2 1979 fsurdat and 1979-2026 landuse ne0np4 files + two fixes From 9a82a3fb531de7f1d31578f51091b700bccc3651 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Fri, 31 May 2024 13:54:53 -0600 Subject: [PATCH 123/126] Replace manage_externals with git-fleximod in an error message --- .lib/git-fleximod/git_fleximod/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.lib/git-fleximod/git_fleximod/utils.py b/.lib/git-fleximod/git_fleximod/utils.py index 7cc1de38cc..64a2e70d98 100644 --- a/.lib/git-fleximod/git_fleximod/utils.py +++ b/.lib/git-fleximod/git_fleximod/utils.py @@ -241,7 +241,7 @@ def _hanging_msg(working_directory, command): has taken {hanging_sec} seconds. It may be hanging. The command will continue to run, but you may want to abort -manage_externals with ^C and investigate. A possible cause of hangs is +git-fleximod with ^C and investigate. A possible cause of hangs is when svn or git require authentication to access a private repository. On some systems, svn and git requests for authentication information will not be displayed to the user. In this case, the program From c9020ec5320013121b84e4abc03bc0cf5bab2133 Mon Sep 17 00:00:00 2001 From: Samuel Levis Date: Fri, 31 May 2024 13:57:53 -0600 Subject: [PATCH 124/126] Replace "externals directories" with "git submodules" in ChangeLogs --- doc/.ChangeLog_template | 2 +- doc/ChangeLog | 9 ++++++--- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/doc/.ChangeLog_template b/doc/.ChangeLog_template index f6a6dbd2f8..60a7f49288 100644 --- a/doc/.ChangeLog_template +++ b/doc/.ChangeLog_template @@ -157,7 +157,7 @@ Other details ------------- [Remove any lines that don't apply. Remove entire section if nothing applies.] -List any externals directories updated (cime, rtm, mosart, cism, fates, etc.): +List any git submodules updated (cime, rtm, mosart, cism, fates, etc.): Pull Requests that document the changes (include PR ids): (https://github.com/ESCOMP/ctsm/pull) diff --git a/doc/ChangeLog b/doc/ChangeLog index ee2e861be7..8b6bb8f0d2 100644 --- a/doc/ChangeLog +++ b/doc/ChangeLog @@ -1,7 +1,7 @@ =============================================================== Tag name: ctsm5.2.007 Originator(s): jedwards4b (Jim Edwards, UCAR/CGD) slevis (Sam Levis, UCAR/CGD) -Date: Fri 31 May 2024 11:24:29 AM MDT +Date: Fri 31 May 2024 13:49:29 AM MDT One-line Summary: Rm manage_externals and update documentation accordingly Purpose and description of changes @@ -85,8 +85,11 @@ Changes answers relative to baseline: No Other details ------------- -List any externals directories updated (cime, rtm, mosart, cism, fates, etc.): - JIM: What's the answer here? +List any git submodules updated (cime, rtm, mosart, cism, fates, etc.): + Comparing .gitmodules against Externals.cfg and Externals_CLM_cfg: + - cismwrap_2_2_001 from cismwrap_2_1_100 + - cime6.0.246 from cime6.0.238_httpsbranch01 + - cdeps1.0.34 from cdeps1.0.33 Pull Requests that document the changes (include PR ids): https://github.com/ESCOMP/ctsm/pull/2443 From a0f834f3d3e80388ebc87cb5d4fed15563cb5caa Mon Sep 17 00:00:00 2001 From: Teagan King Date: Tue, 25 Jun 2024 09:21:45 -0600 Subject: [PATCH 125/126] update plumber for consistency and fix import --- python/ctsm/site_and_regional/run_tower.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/python/ctsm/site_and_regional/run_tower.py b/python/ctsm/site_and_regional/run_tower.py index b5b2d4ad18..093e6a01fb 100755 --- a/python/ctsm/site_and_regional/run_tower.py +++ b/python/ctsm/site_and_regional/run_tower.py @@ -64,9 +64,9 @@ # pylint: disable=wrong-import-position from ctsm.path_utils import path_to_ctsm_root from ctsm.download_utils import download_file -from python.ctsm.site_and_regional.tower_arg_parse import get_parser +from ctsm.site_and_regional.tower_arg_parse import get_parser from ctsm.site_and_regional.neon_site import NeonSite -from ctsm.site_and_regional.plumber_site import PlumberSite +from ctsm.site_and_regional.plumber_site import Plumber2Site # pylint: disable=import-error, wildcard-import, wrong-import-order from standard_script_setup import * From 76f7220e0d14592d83af710926147044c9139129 Mon Sep 17 00:00:00 2001 From: Teagan King Date: Tue, 25 Jun 2024 15:12:17 -0600 Subject: [PATCH 126/126] updates to run_tower --- python/ctsm/site_and_regional/run_tower.py | 79 ++++++++++------------ 1 file changed, 35 insertions(+), 44 deletions(-) diff --git a/python/ctsm/site_and_regional/run_tower.py b/python/ctsm/site_and_regional/run_tower.py index 1f0a3192cb..8cac7f6bfc 100755 --- a/python/ctsm/site_and_regional/run_tower.py +++ b/python/ctsm/site_and_regional/run_tower.py @@ -168,7 +168,7 @@ def parse_neon_listing(listing_file, valid_neon_sites): return available_list -def parse_plumber_listing(valid_plumber_sites): +def check_plumber_data(valid_plumber_sites): """ A function to find plumber sites with the dates where data is available. @@ -182,28 +182,17 @@ def parse_plumber_listing(valid_plumber_sites): for site_name in valid_plumber_sites: - # -- figure out start_year and end_year from shell commands - # TODO: do we even need this though if the shell commands accomplish the same thing? - # start_year = tmp_df2[0].iloc[0] - # end_year = tmp_df2[0].iloc[-1] - - # -- figure out start_month and end_month - # start_month = tmp_df2[1].iloc[0] - # end_month = tmp_df2[1].iloc[-1] + # TODO: figure out start_year and end_year from shell commands + start_year = 'DUMMY_START_YEAR' + end_year = 'DUMMY_END_YEAR' + start_month = "DUMMY_START_MONTH" + end_month = "DUMMY_END_MONTH" logger.debug("Valid plumber site %s found!", site_name) - # logger.debug("File version %s", latest_version) - # logger.debug("start_year=%s", start_year) - # logger.debug("end_year=%s", end_year) - # logger.debug("start_month=%s", start_month) - # logger.debug("end_month=%s", end_month) - # finidat = None - # for line in finidatlist["object"]: - # if site_name in line: - # finidat = line.split(",")[0].split("/")[-1] - - # plumber_site = Plumber2Site(site_name, start_year, end_year, start_month, end_month, finidat) - # available_list.append(plumber_site) + finidat = None # TODO: may need to update? + + plumber_site = Plumber2Site(site_name, start_year, end_year, start_month, end_month, finidat) + available_list.append(plumber_site) return available_list @@ -292,27 +281,29 @@ def main(description): experiment=experiment, ) + # -- check for available plumber data: + available_plumber_list = check_plumber_data(valid_plumber_sites) + # -- Looping over plumber sites - # TODO: define available_plumber_list! - # for plumber_site in available_plumber_list: - # if plumber_site.name in site_list: - # if run_from_postad: - # plumber_site.finidat = None - # if not base_case_root: - # user_mods_dirs = None - # base_case_root = plumber_site.build_base_case( - # cesmroot, output_root, res, compset, user_mods_dirs, overwrite, setup_only - # ) - # logger.info("-----------------------------------") - # logger.info("Running CTSM for plumber site : %s", plumber_site.name) - # plumber_site.run_case( - # base_case_root, - # run_type, - # prism, - # user_version, - # overwrite, - # setup_only, - # no_batch, - # rerun, - # experiment, - # ) + for plumber_site in available_plumber_list: + if plumber_site.name in site_list: + if run_from_postad: + plumber_site.finidat = None + if not base_case_root: + user_mods_dirs = None + base_case_root = plumber_site.build_base_case( + cesmroot, output_root, res, compset, user_mods_dirs, overwrite, setup_only + ) + logger.info("-----------------------------------") + logger.info("Running CTSM for plumber site : %s", plumber_site.name) + plumber_site.run_case( + base_case_root, + run_type, + prism, + user_version, + overwrite, + setup_only, + no_batch, + rerun, + experiment, + )