Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix/post angola #143

Merged
merged 21 commits into from
Sep 20, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -3,3 +3,4 @@ notebooks/.ipynb_checkpoints/
/data/stash_select
notebooks/data_v2/
*.pyc
.idea
215 changes: 121 additions & 94 deletions notebooks/worksheet1.ipynb

Large diffs are not rendered by default.

48 changes: 27 additions & 21 deletions notebooks/worksheet2.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -115,15 +115,16 @@
"source": [
"# Provide the names of the directories where the netCDF model files are stored\n",
"DATADIR = 'data_v2/'\n",
"DOMAIN = 'EAS-22'\n",
"\n",
"# Load and concatenate the HadGEM2-ES model cube data\n",
"infile = os.path.join(DATADIR, 'EAS-22/pr_EAS-22_MOHC-HadGEM2-ES_historical_r1i1p1_GERICS-REMO2015_v1_mon_*.nc')\n",
"infile = os.path.join(DATADIR, DOMAIN, 'pr_'+DOMAIN+'_MOHC-HadGEM2-ES_historical_r1i1p1_GERICS-REMO2015_v1_mon_*.nc')\n",
"cubes = iris.load(infile)\n",
"equalise_attributes(cubes)\n",
"hadgem2 = cubes.concatenate_cube()\n",
"\n",
"# Load and concatenate the MPI-ESM-LR model cube data\n",
"infile = os.path.join(DATADIR, 'EAS-22/pr_EAS-22_MPI-M-MPI-ESM-LR_historical_r1i1p1_GERICS-REMO2015_v1_mon_*.nc')\n",
"infile = os.path.join(DATADIR, DOMAIN, '/pr_'+DOMAIN+'_MPI-M-MPI-ESM-LR_historical_r1i1p1_GERICS-REMO2015_v1_mon_*.nc')\n",
"cubes = iris.load(infile)\n",
"equalise_attributes(cubes)\n",
"mpiesm = cubes.concatenate_cube()\n",
Expand Down Expand Up @@ -328,10 +329,10 @@
"outputs": [],
"source": [
"# print the unit\n",
"print('The current unit for data is: {}'.format(hadgem2.units))\n",
"print('The current unit for data is: ' + hadgem2.units)\n",
"# print the summary statistic (maximum monthly precipitation)\n",
"maxpr = np.max(hadgem2.data)\n",
"print('This is an example rainfall rate (kg m-2 s-1) prior to conversion: {:f}'.format(maxpr))"
"print('This is an example rainfall rate (kg m-2 s-1) prior to conversion: ' + maxpr)"
]
},
{
Expand All @@ -343,10 +344,10 @@
"# Convert units to kg m-2 day-1 (same as multiplying by 86400 seconds)\n",
"hadgem2.convert_units('kg m-2 day-1')\n",
"# Print cube.units to view new units for precipitation\n",
"print(f'The new rainfall units are: {hadgem2.units}')\n",
"print('The new rainfall units are: ' + hadgem2.units)\n",
"maxpr = np.max(hadgem2.data)\n",
"# print the summary statistic (maximum monthly precipitation) after the unit conversion\n",
"print(f'This is the same rainfall rate but now in (kg m-2 day-1): {maxpr:f}')"
"print('This is the same rainfall rate but now in (kg m-2 day-1): ' + maxpr)"
]
},
{
Expand All @@ -366,16 +367,16 @@
"hadgem2.units = 'mm day-1'\n",
"\n",
"# Save the new cube as a new netCDF file\n",
"OUTDIR = 'data_v2/EAS-22'\n",
"HISTDIR = os.path.join(DATADIR, DOMAIN, 'historical')\n",
"\n",
"# Check to see if this directory exists, if not create it\n",
"if not os.path.isdir(OUTDIR):\n",
"if not os.path.isdir(HISTDIR):\n",
" # Make directory\n",
" os.mkdir(OUTDIR)\n",
" os.mkdir(HISTDIR)\n",
" # Set directory permissions \n",
" os.chmod(OUTDIR, 0o776)\n",
" os.chmod(HISTDIR, 0o776)\n",
"\n",
"outfile = os.path.join(OUTDIR, 'hadgem2-es.mon.1986_2005.GERICS-REMO2015.pr.mmday-1.nc')\n",
"outfile = os.path.join(HISTDIR, 'hadgem2-es.mon.1986_2005.GERICS-REMO2015.pr.mmday-1.nc')\n",
"iris.save(hadgem2, outfile)"
]
},
Expand All @@ -402,7 +403,7 @@
"\n",
"\n",
"# Save the new cube as a new netCDF file using the `outfile` filename we've provided below!\n",
"outfile = os.path.join(OUTDIR, 'mpi-esm-lr.mon.1986_2005.GERICS-REMO2015.pr.mmday-1.nc')\n",
"outfile = os.path.join(HISTDIR, 'mpi-esm-lr.mon.1986_2005.GERICS-REMO2015.pr.mmday-1.nc')\n",
"\n"
]
},
Expand All @@ -425,7 +426,7 @@
"outputs": [],
"source": [
"# Set up directory for the climatology\n",
"CLIMDIR = 'data_v2/EAS-22/climatology'\n",
"CLIMDIR = os.path.join(DATADIR, DOMAIN, 'climatology')\n",
"\n",
"# Check to see if this directory exists, if not create it\n",
"if not os.path.isdir(CLIMDIR):\n",
Expand All @@ -445,7 +446,7 @@
"source": [
"# Loop through two model runs\n",
"for gcmid in ['hadgem2-es', 'mpi-esm-lr']:\n",
" infile = os.path.join(OUTDIR, gcmid + '.mon.1986_2005.GERICS-REMO2015.pr.mmday-1.nc')\n",
" infile = os.path.join(HISTDIR, gcmid + '.mon.1986_2005.GERICS-REMO2015.pr.mmday-1.nc')\n",
"\n",
" # Load the data\n",
" data = iris.load_cube(infile)\n",
Expand All @@ -461,7 +462,7 @@
"\n",
" # The cube 'data_ond' contains data from October-December for all years. \n",
" # The command below calculates the mean over all years.\n",
" seasonal_mean = data_ond.aggregated_by(['seasons'], iris.analysis.MEAN)\n",
" seasonal_mean = data_ond.collapsed('time', iris.analysis.MEAN)\n",
" \n",
" # Save the OND seasonal mean as a netCDF\n",
" outfile = os.path.join(CLIMDIR, gcmid + '.OND.mean.1986_2005.pr.mmday-1.nc')\n",
Expand Down Expand Up @@ -541,7 +542,7 @@
"chirps_ond = chirps.extract(iris.Constraint(seasons='ond'))\n",
"\n",
"# Now calculate the climatological mean for this season\n",
"seasonal_mean = chirps_ond.aggregated_by(['seasons'], iris.analysis.MEAN)\n",
"seasonal_mean = chirps_ond.collapsed('time', iris.analysis.MEAN)\n",
"\n",
"# save the seasonal mean cube as a NetCDF file\n",
"outfile = os.path.join(CLIMDIR, 'chirps.OND.mean.1986_2005.pr.mmday-1.nc')\n",
Expand Down Expand Up @@ -655,23 +656,23 @@
"# Note this is where cube slicing is needed as you can only plot 2-coordinate\n",
"# dimensions with qplt.contourf, so here we have selected time[0] as there is only\n",
"# one timestep (the baseline 1986-2005 mean)\n",
"qplt.contourf(hadgem_cube[0], levels=levels, cmap=cm.RdBu, extend='max')\n",
"qplt.contourf(hadgem_cube, levels=levels, cmap=cm.RdBu, extend='max')\n",
" \n",
"\n",
"plt.title('HadGEM2-ES model') # plots a title for the plot\n",
"ax = plt.gca() # gca function that returns the current axes\n",
"ax.coastlines() # adds coastlines defined by the axes of the plot\n",
"\n",
"plt.subplot(1, 3, 2) # Create a new subplot for the model data; 1 row x 3 columns, 2nd plot\n",
"qplt.contourf(mpi_cube[0], levels=levels, cmap=cm.RdBu, extend='max')\n",
"qplt.contourf(mpi_cube, levels=levels, cmap=cm.RdBu, extend='max')\n",
"\n",
"plt.title('MPI-ESM-LR model') # plots a title for the plot\n",
"ax = plt.gca() # gca function that returns the current axes\n",
"ax.coastlines() # adds coastlines defined by the axes of the plot\n",
"\n",
"plt.subplot(1, 3, 3) # Create a new subplot for the observed data 1 row x 3 columns, 3rd plot\n",
" # This plot will be centred and below the two model plots\n",
"qplt.contourf(obs_cube[0], levels=levels, cmap=cm.RdBu, extend='max')\n",
"qplt.contourf(obs_cube, levels=levels, cmap=cm.RdBu, extend='max')\n",
"\n",
"plt.title('CHIRPS obs') # plots a title for the plot\n",
"ax = plt.gca() # gca function that returns the current axes\n",
Expand Down Expand Up @@ -746,9 +747,9 @@
"metadata": {
"instance_type": "ml.t3.medium",
"kernelspec": {
"display_name": "pyprecis-environment",
"display_name": "Python 3.6.10 ('pyprecis-environment')",
"language": "python",
"name": "pyprecis-environment"
"name": "python3"
},
"language_info": {
"codemirror_mode": {
Expand All @@ -762,6 +763,11 @@
"pygments_lexer": "ipython3",
"version": "3.6.10"
},
"vscode": {
"interpreter": {
"hash": "30fc2c9368d5cc76c4fad6f63328738b6cf0a0091c34cff8da6b063602b9d1b0"
}
},
"widgets": {
"state": {},
"version": "1.1.2"
Expand Down
41 changes: 24 additions & 17 deletions notebooks/worksheet3.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -68,15 +68,16 @@
"\n",
"# Provide the names of the directories where the netCDF model files are stored\n",
"DATADIR = 'data_v2/'\n",
"DOMAIN = 'EAS-22'\n",
"\n",
"# CHIRPS dir\n",
"CHIRPSDIR = os.path.join(DATADIR, 'CHIRPS/')\n",
"\n",
"# Directory name where processed data is stored \n",
"HISTDIR = 'data_v2/EAS-22/historical'\n",
"HISTDIR = os.path.join(DATADIR, DOMAIN, 'historical')\n",
"\n",
"# Directory name where climtology data is stored \n",
"CLIMDIR = 'data_v2/EAS-22/climatology'"
"CLIMDIR = os.path.join(DATADIR, DOMAIN, 'climatology')"
]
},
{
Expand Down Expand Up @@ -192,8 +193,8 @@
"\n",
"pole_lat = hadgem2_cube.coord_system().grid_north_pole_latitude\n",
"pole_lon = hadgem2_cube.coord_system().grid_north_pole_longitude\n",
"print(f'Pole Latitude: {pole_lat}')\n",
"print(f'Pole Longitude: {pole_lon}')"
"print('Pole Latitude: ' + pole_lat)\n",
"print('Pole Longitude: ' + pole_lon)"
]
},
{
Expand All @@ -212,8 +213,8 @@
"lon=np.array([99.5, 100.5])\n",
"lat=np.array([13.5, 14.5])\n",
"rotated_lons, rotated_lats = iris.analysis.cartography.rotate_pole(lon, lat, pole_lon, pole_lat)\n",
"print(f'Rotated Longitudes: {rotated_lons[0]:.2f}, {rotated_lons[1]:.2f}')\n",
"print(f'Rotated Latitudes: {rotated_lats[0]:.2f}, {rotated_lats[1]:.2f}')"
"print('Rotated Longitudes: ' + round(rotated_lons[0], 2) + ' , ' + round(rotated_lons[1], 2))\n",
"print('Rotated Latitudes: ' + round(rotated_lats[0], 2) + ' , ' + round(rotated_lats[1], 2))"
]
},
{
Expand Down Expand Up @@ -247,7 +248,7 @@
" # save the constrained cube\n",
" outfile = os.path.join(CLIMDIR, gcm + '.mon.1986_2005.GERICS-REMO2015.pr.mmday-1.BK.nc')\n",
" iris.save(data_BK, outfile)\n",
" print(f'Saved: {outfile}')"
" print('Saved: ' + outfile)"
]
},
{
Expand Down Expand Up @@ -588,14 +589,14 @@
"mpi_regrid = mpi_model_cube.regrid(obs_cube_sub, iris.analysis.Nearest(extrapolation_mode='mask'))\n",
"\n",
"# Save HadGEM2 output\n",
"outpath = os.path.join(CLIMDIR, 'hadgem2-es.OND.mean.1986_2005.pr.mmday-1.rg.nc')\n",
"iris.save(hadgem2_regrid, outpath)\n",
"print('Saved: {}'.format(outfile))\n",
"outfile = os.path.join(CLIMDIR, 'hadgem2-es.OND.mean.1986_2005.pr.mmday-1.rg.nc')\n",
"iris.save(hadgem2_regrid, outfile)\n",
"print('Saved: ' + outfile)\n",
"\n",
"# Save MPI-ESM output\n",
"outpath = os.path.join(CLIMDIR, 'mpi-esm-lr.OND.mean.1986_2005.pr.mmday-1.rg.nc')\n",
"iris.save(mpi_regrid, outpath)\n",
"print('Saved: {}'.format(outfile))"
"outfile = os.path.join(CLIMDIR, 'mpi-esm-lr.OND.mean.1986_2005.pr.mmday-1.rg.nc')\n",
"iris.save(mpi_regrid, outfile)\n",
"print('Saved: ' + outfile)"
]
},
{
Expand Down Expand Up @@ -794,10 +795,11 @@
"'''\n",
"# Some helpful data locations\n",
"DATADIR = 'data_v2'\n",
"DOMAIN = 'EAS-22'\n",
"CHIRPSDIR = os.path.join(DATADIR, 'CHIRPS')\n",
"CRUDIR = os.path.join(DATADIR, 'CRU')\n",
"CLIMDIR = 'data_v2/EAS-22/climatology'\n",
"MODELDIR = os.path.join(DATADIR, 'cordex/EAS-22')\n",
"CLIMDIR = os.path.join(DATADIR, DOMAIN, 'climatology')\n",
"MODELDIR = os.path.join(DATADIR, 'cordex', DOMAIN)\n",
"\n",
"# Some helpful model variables\n",
"GCMS = ['hadgem2-es', 'mpi-esm-lr']\n",
Expand Down Expand Up @@ -1028,9 +1030,9 @@
"metadata": {
"instance_type": "ml.t3.medium",
"kernelspec": {
"display_name": "pyprecis-environment",
"display_name": "Python 3.6.10 ('pyprecis-environment')",
"language": "python",
"name": "pyprecis-environment"
"name": "python3"
},
"language_info": {
"codemirror_mode": {
Expand All @@ -1044,6 +1046,11 @@
"pygments_lexer": "ipython3",
"version": "3.6.10"
},
"vscode": {
"interpreter": {
"hash": "30fc2c9368d5cc76c4fad6f63328738b6cf0a0091c34cff8da6b063602b9d1b0"
}
},
"widgets": {
"state": {},
"version": "1.1.2"
Expand Down
38 changes: 22 additions & 16 deletions notebooks/worksheet4.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -69,10 +69,11 @@
"import numpy.ma as ma\n",
"\n",
"# Some helpful data locations\n",
"DATADIR = 'data_v2/EAS-22/'\n",
"CLIMDIR = os.path.join(DATADIR, 'climatology')\n",
"HISTDIR = os.path.join(DATADIR, 'historical')\n",
"FUTRDIR = os.path.join(DATADIR, 'future')\n",
"DATADIR = 'data_v2'\n",
"DOMAIN = 'EAS-22'\n",
"CLIMDIR = os.path.join(DATADIR, DOMAIN, 'climatology')\n",
"HISTDIR = os.path.join(DATADIR, DOMAIN, 'historical')\n",
"FUTRDIR = os.path.join(DATADIR, DOMAIN, 'rcp85')\n",
"GCMIDS = ['hadgem2-es', 'mpi-esm-lr']"
]
},
Expand Down Expand Up @@ -103,12 +104,12 @@
" data_ond = data.extract(iris.Constraint(seasons='ond'))\n",
"\n",
" # Now calculate the mean over the OND season\n",
" ond_mean = data_ond.aggregated_by(['seasons'], iris.analysis.MEAN)\n",
" ond_mean = data_ond.collapsed('time', iris.analysis.MEAN)\n",
"\n",
" # save the OND mean as a netCDF\n",
" outfile = os.path.join(CLIMDIR, gcmid + '.OND.mean.2041_2060.GERICS-REMO2015.pr.mmday-1.nc')\n",
" iris.save(ond_mean, outfile)\n",
" print('Saved: {}'.format(outfile))"
" print('Saved: ' + outfile)"
]
},
{
Expand Down Expand Up @@ -187,7 +188,7 @@
" # Save the resulting cube\n",
" outfile = os.path.join(CLIMDIR, gcmid + '.OND.mean.diff.GERICS-REMO2015.pr.mmday-1.nc')\n",
" iris.save(diff, outfile)\n",
" print('Saved {}'.format(outfile))\n",
" print('Saved ' + outfile)\n",
" # Find the percentage change\n",
" pcent_change = iris.analysis.maths.multiply(iris.analysis.maths.divide(diff, OND_baseline), 100)\n",
" # remember to change the title and units to reflect the data processing\n",
Expand All @@ -196,7 +197,7 @@
" # And save this too\n",
" outfile = os.path.join(CLIMDIR, gcmid + '.OND.mean.diffperc.GERICS-REMO2015.pr.mmday-1.nc')\n",
" iris.save(pcent_change, outfile)\n",
" print('Saved {}'.format(outfile))"
" print('Saved ' + outfile)"
]
},
{
Expand All @@ -218,7 +219,7 @@
"source": [
"# HINT: Your filenames should have the format: \n",
"# gcmid + '.OND.mean.' + time_periods[period] + '.GERICS-REMO2015.tm.C.nc'\n",
"time_periods = {'historical':'1986_2005', 'future':'2041_2060'}\n",
"time_periods = {'historical':'1986_2005', 'rcp85':'2041_2060'}\n",
"\n",
"for gcmid in GCMIDS:\n",
" for period in time_periods.keys():\n",
Expand Down Expand Up @@ -261,7 +262,7 @@
" # Save\n",
" outfile = os.path.join(CLIMDIR, gcmid + '.OND.mean.diff.GERICS-REMO2015.tm.C.nc')\n",
" iris.save(diff, outfile)\n",
" print('Saved: {}'.format(outfile))"
" print('Saved: ' + outfile)"
]
},
{
Expand Down Expand Up @@ -367,8 +368,8 @@
"source": [
"# Read in the land-sea mask. \n",
"# The cube data array has a land fraction associated with it which we'll use to mask out ocean points.\n",
"land_fraction_file = 'sftlf_EAS-22_MOHC-HadGEM2-ES_historical_r0i0p0_GERICS-REMO2015_v1_fx_r0i0p0.nc'\n",
"land_fraction = iris.load_cube(DATADIR + land_fraction_file)\n",
"land_fraction_file = os.path.join(DATADIR, DOMAIN, 'sftlf_EAS-22_MOHC-HadGEM2-ES_historical_r0i0p0_GERICS-REMO2015_v1_fx_r0i0p0.nc')\n",
"land_fraction = iris.load_cube(land_fraction_file)\n",
"\n",
"# convert this to a binary (i.e. 1 or 0 mask)\n",
"land_sea_mask = land_fraction.copy()\n",
Expand Down Expand Up @@ -421,7 +422,7 @@
" # Save the area averaged monthly future anomalies (time series)\n",
" outpath = os.path.join(CLIMDIR, gcmid + '.mon.2041_2060.anom.series.GERICS-REMO2015.pr.mmday-1.nc')\n",
" iris.save(diff, outpath)\n",
" print('Saved: {}'.format(outpath))"
" print('Saved: ' + outpath)"
]
},
{
Expand Down Expand Up @@ -476,7 +477,7 @@
"iplt.plot(time, mpiesm, label = 'MPI-ESM-LR')\n",
"plt.legend()\n",
"plt.suptitle('2041-2060 Precipitation anomaly (relative to 1986-2005)')\n",
"plt.ylabel(f'Precipitation change ({hadgem2es.units}')\n",
"plt.ylabel('Precipitation change (' + hadgem2es.units +')')\n",
"plt.xlabel('Years')\n",
"plt.show()"
]
Expand Down Expand Up @@ -603,9 +604,9 @@
"metadata": {
"instance_type": "ml.t3.medium",
"kernelspec": {
"display_name": "pyprecis-environment",
"display_name": "Python 3.6.10 ('pyprecis-environment')",
"language": "python",
"name": "pyprecis-environment"
"name": "python3"
},
"language_info": {
"codemirror_mode": {
Expand All @@ -619,6 +620,11 @@
"pygments_lexer": "ipython3",
"version": "3.6.10"
},
"vscode": {
"interpreter": {
"hash": "30fc2c9368d5cc76c4fad6f63328738b6cf0a0091c34cff8da6b063602b9d1b0"
}
},
"widgets": {
"state": {},
"version": "1.1.2"
Expand Down
Loading