Skip to content

Commit

Permalink
GNSS station checks
Browse files Browse the repository at this point in the history
  • Loading branch information
rzinke committed Sep 11, 2024
1 parent 0782d4a commit 740a8ff
Showing 1 changed file with 94 additions and 36 deletions.
130 changes: 94 additions & 36 deletions methods/secular/Secular_Requirement_Validation.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -496,6 +496,15 @@
"With this formulation, we can obtain InSAR and GNSS velocity estimates and their formal uncertainties (including in areas where zero deformation is expected). The default InSAR (and GNSS) velocity fit in MintPy is to estimate a mean linear velocity $(v)$ in in the equation. To estimate a secular deformation rate, one can also estimate additional parameters for annual and semi-annual terms to account for seasonal periodic fluctuations in position. Below, we define a set of basis functions that MintPy will invert for, for both InSAR and GNSS data."
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"<div class=\"alert alert-warning\">\n",
"If the time-series span less than one year, then annual and semi-annual fuctions will not be reliably fit for noisy data. Therefore, the period functions will not be included in these analyses if that is the case.\n",
"</div>"
]
},
{
"cell_type": "code",
"execution_count": null,
Expand All @@ -511,7 +520,18 @@
" 'polyline': [],\n",
" 'exp': {},\n",
" 'log': {}\n",
"}"
"}\n",
"\n",
"# Determine time span to check whether fitting of periodic functions is appropriate\n",
"ts_metadata = readfile.read_attribute(timeseries_filename)\n",
"start_date = dt.strptime(ts_metadata['START_DATE'], '%Y%m%d')\n",
"end_date = dt.strptime(ts_metadata['END_DATE'], '%Y%m%d')\n",
"ts_timespan = (end_date - start_date).days\n",
"\n",
"if ts_timespan < 365:\n",
" print('Time span is {:d} days'.format(ts_timespan))\n",
" ts_functions['periodic'] = []\n",
" print('No periodic functions will be fit')\n"
]
},
{
Expand Down Expand Up @@ -664,6 +684,13 @@
"### 4.4. Get GNSS Position Time Series <a id='gps_ts'></a>"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"We download the GNSS station data, and evaluate the station data quality based on data completeness and scatter of residuals."
]
},
{
"cell_type": "code",
"execution_count": null,
Expand All @@ -672,56 +699,89 @@
},
"outputs": [],
"source": [
"use_stn = [] #stations to keep\n",
"bad_stn = [] #stations to toss\n",
"use_lats = [] \n",
"use_lons = []\n",
"# Empty dicts and lists to store GNSS data\n",
"gnss_stns = {}\n",
"bad_stns = {}\n",
"\n",
"for site_name in site_names:\n",
" gnss_stn = gnss.get_gnss_class(gnss_source)(site = site_name)\n",
" gnss_stn.open(print_msg=False)\n",
"\n",
"for counter, stn in enumerate(site_names):\n",
" gnss_obj = gnss.get_gnss_class(gnss_source)(site = stn, data_dir = os.path.join(mintpy_dir,'GNSS'))\n",
" gnss_obj.open(print_msg=False)\n",
" \n",
" # count number of dates in time range\n",
" dates = gnss_obj.dates\n",
" dates = gnss_stn.dates\n",
" range_days = (end_date_gnss - start_date_gnss).days\n",
" gnss_count = np.histogram(dates, bins=[start_date_gnss, end_date_gnss])\n",
" gnss_count = int(gnss_count[0][0])\n",
"\n",
" # for this quick screening check of data quality, we use constant incidence and azimuth angles \n",
" # get standard deviation of residuals to linear fit\n",
" disp_los = ut.enu2los(gnss_obj.dis_e, gnss_obj.dis_n, gnss_obj.dis_u, inc_angle, az_angle)\n",
" disp_los = ut.enu2los(gnss_stn.dis_e, gnss_stn.dis_n, gnss_stn.dis_u, inc_angle, az_angle)\n",
" disp_detrended = signal.detrend(disp_los)\n",
" stn_stdv = np.std(disp_detrended)\n",
" \n",
"\n",
" # select GNSS stations based on data completeness and scatter of residuals\n",
" disp_detrended = signal.detrend(disp_los)\n",
" if range_days*gnss_completeness_threshold <= gnss_count:\n",
" if stn_stdv > gnss_residual_stdev_threshold:\n",
" bad_stn.append(stn)\n",
" bad_stns[site_name] = gnss_stn\n",
" else:\n",
" use_stn.append(stn)\n",
" use_lats.append(site_lats[counter])\n",
" use_lons.append(site_lons[counter])\n",
" gnss_stns[site_name] = gnss_stn\n",
" else:\n",
" bad_stn.append(stn)\n",
"\n",
"site_names = use_stn\n",
"site_lats = use_lats\n",
"site_lons = use_lons\n",
" bad_stns[site_name] = gnss_stn\n",
"\n",
"# [optional] manually remove additional stations\n",
"gnss_to_remove=[]\n",
"\n",
"for i, gnss_site in enumerate(gnss_to_remove):\n",
" if gnss_site in site_names:\n",
" site_names.remove(gnss_site)\n",
" if gnss_site not in bad_stn:\n",
" bad_stn.append(gnss_site)\n",
"for site_name in gnss_to_remove:\n",
" bad_stns[site_name] = gnss_stns[site_name]\n",
" del gnss_stns[site_name]\n",
"\n",
"# Final list of site names\n",
"site_names = [*gnss_stns]\n",
"bad_site_names = [*bad_stns]\n",
"\n",
"print(\"\\nFinal list of {} stations used in analysis:\".format(len(site_names)))\n",
"print(site_names)\n",
"print(\"List of {} stations removed from analysis\".format(len(bad_stn)))\n",
"print(bad_stn)"
"print(\"List of {} stations removed from analysis\".format(len(bad_site_names)))\n",
"print(bad_site_names)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Plot stations in map view\n",
"vmin, vmax = -25, 25\n",
"cmap = plt.get_cmap('RdBu_r')\n",
"\n",
"fig, ax = plt.subplots(figsize=[18, 5.5])\n",
"cax = ax.imshow(insar_velocities, cmap=cmap, vmin=vmin, vmax=vmax, interpolation='nearest', extent=(W, E, S, N))\n",
"cbar = fig.colorbar(cax, ax=ax)\n",
"cbar.set_label('LOS velocity [mm/year]')\n",
"\n",
"# Plot valid stations\n",
"for i, gnss_stn in enumerate(gnss_stns.values()):\n",
" # Retrieve station information\n",
" gnss_stn.get_site_lat_lon()\n",
"\n",
" # Plot station\n",
" ax.scatter(gnss_stn.site_lon, gnss_stn.site_lat, s=8**2, c='k',\n",
" label=('valid' if i == 0 else None))\n",
" ax.annotate(gnss_stn.site, (gnss_stn.site_lon, gnss_stn.site_lat))\n",
"\n",
"# Plot rejected stations\n",
"for i, gnss_stn in enumerate(bad_stns.values()):\n",
" # Retrieve station information\n",
" gnss_stn.get_site_lat_lon()\n",
"\n",
" # Plot station\n",
" ax.scatter(gnss_stn.site_lon, gnss_stn.site_lat, s=8**2,\n",
" marker='X', facecolor='none', edgecolor='k',\n",
" label=('rejected' if i == 0 else None))\n",
" ax.annotate(gnss_stn.site, (gnss_stn.site_lon, gnss_stn.site_lat))\n",
"\n",
"ax.legend()\n"
]
},
{
Expand Down Expand Up @@ -749,14 +809,12 @@
"outputs": [],
"source": [
"# Empty dicts and lists to store GNSS data\n",
"gnss_stns = {}\n",
"gnss_velocities = []\n",
"gnss_velocity_errors = []\n",
"\n",
"# Loop through valid GNSS stations\n",
"for i, site_name in enumerate(site_names):\n",
"for i, gnss_stn in enumerate(gnss_stns.values()):\n",
" # Retrieve station information\n",
" gnss_stn = gnss.get_gnss_class(gnss_source)(site = site_name)\n",
" gnss_stn.get_los_displacement(insar_metadata,\n",
" start_date=start_date,\n",
" end_date=end_date)\n",
Expand All @@ -768,9 +826,6 @@
" # Outlier detection and removal\n",
" remove_gnss_outliers(gnss_stn, 'LOS', model=ts_functions,\n",
" threshold=3, max_iter=2, verbose=False)\n",
"\n",
" # Record filtered station to dictionary\n",
" gnss_stns[site_name] = gnss_stn\n",
" \n",
" # Model outlier-removed time-series\n",
" dis_los_hat, m_hat, mhat_se = model_gnss_timeseries(gnss_stn, 'LOS', ts_functions)\n",
Expand Down Expand Up @@ -845,7 +900,10 @@
"source": [
"# reference GNSS stations to GNSS reference site\n",
"print(\"Using GNSS reference station: \", sitedata['sites'][site]['gps_ref_site_name'])\n",
"ref_site_ind = site_names.index(sitedata['sites'][site]['gps_ref_site_name'])\n",
"if sitedata['sites'][site]['gps_ref_site_name'] == 'auto':\n",
" ref_site_ind = 0\n",
"else:\n",
" ref_site_ind = site_names.index(sitedata['sites'][site]['gps_ref_site_name'])\n",
"gnss_velocities = gnss_velocities - gnss_velocities[ref_site_ind]\n",
"\n",
"# reference InSAR to GNSS reference site\n",
Expand Down

0 comments on commit 740a8ff

Please sign in to comment.