Skip to content

Commit

Permalink
fixed logging, attributes, removed instantaneous values from daily an…
Browse files Browse the repository at this point in the history
…d monthly files
  • Loading branch information
BaptisteVandecrux committed Jun 15, 2024
1 parent c6f4233 commit a903538
Show file tree
Hide file tree
Showing 5 changed files with 22 additions and 9 deletions.
1 change: 1 addition & 0 deletions src/pypromice/process/get_l2tol3.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import pypromice
from pypromice.process.L2toL3 import toL3
from pypromice.process.write import prepare_and_write
logger = logging.getLogger(__name__)

def parse_arguments_l2tol3(debug_args=None):
parser = ArgumentParser(description="AWS L3 script for the processing L3 "+
Expand Down
3 changes: 2 additions & 1 deletion src/pypromice/process/join_l2.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
#!/usr/bin/env python
import os, unittest
import logging, sys, os, unittest
import pandas as pd
import xarray as xr
from argparse import ArgumentParser
from pypromice.process.utilities import addMeta, roundValues
from pypromice.process.write import prepare_and_write
from pypromice.process.L1toL2 import correctPrecip
logger = logging.getLogger(__name__)

def parse_arguments_join():
parser = ArgumentParser(description="AWS L2 joiner for merging together two L2 products, for example an L2 RAW and L2 TX data product. An hourly, daily and monthly L2 data product is outputted to the defined output path")
Expand Down
17 changes: 10 additions & 7 deletions src/pypromice/process/join_l3.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import numpy as np
import pandas as pd
import xarray as xr
logger = logging.getLogger(__name__)

def parse_arguments_joinl3(debug_args=None):
parser = ArgumentParser(description="AWS L3 script for the processing L3 data from L2 and merging the L3 data with its historical site. An hourly, daily and monthly L3 data product is outputted to the defined output path")
Expand Down Expand Up @@ -194,7 +195,6 @@ def join_l3():
station_dict = build_station_dict(args.config_folder)

l3m = xr.Dataset()
l3m.attrs['level'] = 'L3'
for stid in station_dict[args.site]:
logger.info(stid)

Expand All @@ -208,7 +208,7 @@ def join_l3():
if os.path.isfile(filepath):
is_gcnet = True
if not is_promice and not is_gcnet:
logger.info(stid, 'not found either in', args.folder_l3, 'or', args.folder_gcnet)
logger.info(stid+' not found either in '+args.folder_l3+' or '+args.folder_gcnet)
continue

l3, _ = loadArr(filepath)
Expand Down Expand Up @@ -246,17 +246,20 @@ def join_l3():
l3 = l3.drop(v)
else:
l3m[v] = ('time', l3m.t_u.data*np.nan)
logger.info('Unused variables in older dataset:',list_dropped)
logger.info('Unused variables in older dataset: '+' '.join(list_dropped))

# saving attributes of station under an attribute called $stid
l3m = l3m.assign_attrs({stid : l3.attrs.copy()})
st_attrs = l3m.attrs.get('stations_attributes', {})
st_attrs[stid] = l3.attrs.copy()
l3m.attrs["stations_attributes"] = st_attrs

# then stripping attributes
attrs_list = list(l3.attrs.keys())
for k in attrs_list:
del l3.attrs[k]

l3m.attrs[stid]['first_timestamp'] = l3.time.isel(time=0).dt.strftime( date_format='%Y-%m-%d %H:%M:%S').item()
l3m.attrs[stid]['last_timestamp'] = l3m.time.isel(time=0).dt.strftime( date_format='%Y-%m-%d %H:%M:%S').item()
l3m.attrs['stations_attributes'][stid]['first_timestamp'] = l3.time.isel(time=0).dt.strftime( date_format='%Y-%m-%d %H:%M:%S').item()
l3m.attrs['stations_attributes'][stid]['last_timestamp'] = l3m.time.isel(time=0).dt.strftime( date_format='%Y-%m-%d %H:%M:%S').item()

# merging by time block
l3m = xr.concat((l3.sel(
Expand All @@ -268,7 +271,7 @@ def join_l3():
# Assign site id
l3m.attrs['site_id'] = args.site
l3m.attrs['stations'] = station_dict[args.site]

l3m.attrs['level'] = 'L3'
if args.outpath is not None:
prepare_and_write(l3m, args.outpath, args.variables, args.metadata, '60min')
prepare_and_write(l3m, args.outpath, args.variables, args.metadata, '1D')
Expand Down
2 changes: 1 addition & 1 deletion src/pypromice/process/resample.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ def resample_dataset(ds_h, t):
df_d = ds_h.to_dataframe().resample(t).mean()

# recalculating wind direction from averaged directional wind speeds
for var in ['wdir_u','wdir_l','wdir_i']:
for var in ['wdir_u','wdir_l']:
if var in df_d.columns:
if ('wspd_x_'+var.split('_')[1] in df_d.columns) & ('wspd_x_'+var.split('_')[1] in df_d.columns):
df_d[var] = _calcWindDir(df_d['wspd_x_'+var.split('_')[1]],
Expand Down
8 changes: 8 additions & 0 deletions src/pypromice/process/write.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,9 +79,17 @@ def prepare_and_write(dataset, outpath, vars_df=None, meta_dict=None, time='60mi
out_csv = os.path.join(outdir, name+'_hour.csv')
out_nc = os.path.join(outdir, name+'_hour.nc')
elif t == 86400:
# removing instantaneous values from daily and monthly files
for v in col_names:
if ('_i' in v) and ('_i_' not in v):
col_names.remove(v)
out_csv = os.path.join(outdir, name+'_day.csv')
out_nc = os.path.join(outdir, name+'_day.nc')
else:
# removing instantaneous values from daily and monthly files
for v in col_names:
if ('_i' in v) and ('_i_' not in v):
col_names.remove(v)
out_csv = os.path.join(outdir, name+'_month.csv')
out_nc = os.path.join(outdir, name+'_month.nc')
if not os.path.isdir(outdir):
Expand Down

0 comments on commit a903538

Please sign in to comment.