From 6b46787a5aa9167534e0f6fb8a0691104ac04647 Mon Sep 17 00:00:00 2001 From: Alexey Pechnikov Date: Sat, 9 Sep 2023 17:06:18 +0700 Subject: [PATCH] Rename SBAS class to Stack because for now it allows SBAS, PSI and other analyses. --- pygmtsar/pygmtsar/IO.py | 70 +++++++++---------- pygmtsar/pygmtsar/S1.py | 12 ++-- pygmtsar/pygmtsar/{SBAS.py => Stack.py} | 30 ++++---- .../pygmtsar/{SBAS_base.py => Stack_base.py} | 20 +++--- .../pygmtsar/{SBAS_dem.py => Stack_dem.py} | 25 +++---- .../{SBAS_detrend.py => Stack_detrend.py} | 26 +++---- .../{SBAS_geocode.py => Stack_geocode.py} | 18 ++--- .../{SBAS_incidence.py => Stack_incidence.py} | 48 ++++++------- .../pygmtsar/{SBAS_intf.py => Stack_intf.py} | 10 +-- .../{SBAS_landmask.py => Stack_landmask.py} | 20 +++--- .../{SBAS_merge.py => Stack_merge.py} | 10 +-- ..._merge_gmtsar.py => Stack_merge_gmtsar.py} | 4 +- .../{SBAS_orbits.py => Stack_orbits.py} | 8 +-- .../pygmtsar/{SBAS_prm.py => Stack_prm.py} | 4 +- pygmtsar/pygmtsar/{SBAS_ps.py => Stack_ps.py} | 14 ++-- .../{SBAS_reframe.py => Stack_reframe.py} | 18 ++--- ...rame_gmtsar.py => Stack_reframe_gmtsar.py} | 4 +- .../pygmtsar/{SBAS_sbas.py => Stack_sbas.py} | 16 ++--- .../{SBAS_stack.py => Stack_stack.py} | 10 +-- .../pygmtsar/{SBAS_stl.py => Stack_stl.py} | 16 ++--- .../{SBAS_tidal.py => Stack_tidal.py} | 28 ++++---- .../pygmtsar/{SBAS_topo.py => Stack_topo.py} | 8 +-- .../{SBAS_trans.py => Stack_trans.py} | 4 +- .../{SBAS_trans_inv.py => Stack_trans_inv.py} | 4 +- .../{SBAS_unwrap.py => Stack_unwrap.py} | 18 ++--- ...nwrap_snaphu.py => Stack_unwrap_snaphu.py} | 4 +- pygmtsar/pygmtsar/__init__.py | 2 +- pygmtsar/pygmtsar/datagrid.py | 22 +++--- 28 files changed, 237 insertions(+), 236 deletions(-) rename pygmtsar/pygmtsar/{SBAS.py => Stack.py} (72%) rename pygmtsar/pygmtsar/{SBAS_base.py => Stack_base.py} (92%) rename pygmtsar/pygmtsar/{SBAS_dem.py => Stack_dem.py} (93%) rename pygmtsar/pygmtsar/{SBAS_detrend.py => Stack_detrend.py} (94%) rename pygmtsar/pygmtsar/{SBAS_geocode.py => Stack_geocode.py} (97%) rename pygmtsar/pygmtsar/{SBAS_incidence.py => Stack_incidence.py} (90%) rename pygmtsar/pygmtsar/{SBAS_intf.py => Stack_intf.py} (96%) rename pygmtsar/pygmtsar/{SBAS_landmask.py => Stack_landmask.py} (90%) rename pygmtsar/pygmtsar/{SBAS_merge.py => Stack_merge.py} (97%) rename pygmtsar/pygmtsar/{SBAS_merge_gmtsar.py => Stack_merge_gmtsar.py} (96%) rename pygmtsar/pygmtsar/{SBAS_orbits.py => Stack_orbits.py} (88%) rename pygmtsar/pygmtsar/{SBAS_prm.py => Stack_prm.py} (96%) rename pygmtsar/pygmtsar/{SBAS_ps.py => Stack_ps.py} (95%) rename pygmtsar/pygmtsar/{SBAS_reframe.py => Stack_reframe.py} (93%) rename pygmtsar/pygmtsar/{SBAS_reframe_gmtsar.py => Stack_reframe_gmtsar.py} (98%) rename pygmtsar/pygmtsar/{SBAS_sbas.py => Stack_sbas.py} (97%) rename pygmtsar/pygmtsar/{SBAS_stack.py => Stack_stack.py} (98%) rename pygmtsar/pygmtsar/{SBAS_stl.py => Stack_stl.py} (96%) rename pygmtsar/pygmtsar/{SBAS_tidal.py => Stack_tidal.py} (92%) rename pygmtsar/pygmtsar/{SBAS_topo.py => Stack_topo.py} (95%) rename pygmtsar/pygmtsar/{SBAS_trans.py => Stack_trans.py} (99%) rename pygmtsar/pygmtsar/{SBAS_trans_inv.py => Stack_trans_inv.py} (99%) rename pygmtsar/pygmtsar/{SBAS_unwrap.py => Stack_unwrap.py} (94%) rename pygmtsar/pygmtsar/{SBAS_unwrap_snaphu.py => Stack_unwrap_snaphu.py} (98%) diff --git a/pygmtsar/pygmtsar/IO.py b/pygmtsar/pygmtsar/IO.py index 6dbffa75..91d97a4f 100644 --- a/pygmtsar/pygmtsar/IO.py +++ b/pygmtsar/pygmtsar/IO.py @@ -17,7 +17,7 @@ class IO(datagrid): def dump(self, to_path=None): """ - Dump SBAS object state to a pickle file (SBAS.pickle in the processing directory by default). + Dump Stack object state to a pickle file (Stack.pickle in the processing directory by default). Parameters ---------- @@ -31,34 +31,34 @@ def dump(self, to_path=None): Examples -------- Dump the current state to the default dump file in the processing directory: - sbas.dump() + stack.dump() Notes ----- - This method serializes the state of the SBAS object and saves it to a pickle file. The pickle file can be used to - restore the SBAS object with its processed data and configuration. By default, the dump file is named "SBAS.pickle" + This method serializes the state of the Stack object and saves it to a pickle file. The pickle file can be used to + restore the Stack object with its processed data and configuration. By default, the dump file is named "Stack.pickle" and is saved in the processing directory. An alternative file path can be provided using the `to_path` parameter. """ import pickle import os if to_path is None: - sbas_pickle = os.path.join(self.basedir, 'SBAS.pickle') + stack_pickle = os.path.join(self.basedir, 'Stack.pickle') else: if os.path.isdir(to_path): - sbas_pickle = os.path.join(to_path, 'SBAS.pickle') + stack_pickle = os.path.join(to_path, 'Stack.pickle') else: - sbas_pickle = to_path + stack_pickle = to_path - print (f'NOTE: save state to file {sbas_pickle}') - pickle.dump(self, open(sbas_pickle, 'wb')) + print (f'NOTE: save state to file {stack_pickle}') + pickle.dump(self, open(stack_pickle, 'wb')) return @staticmethod def restore(from_path): """ - Restore SBAS object state from a pickle file (SBAS.pickle in the processing directory by default). + Restore Stack object state from a pickle file (Stack.pickle in the processing directory by default). Parameters ---------- @@ -67,36 +67,36 @@ def restore(from_path): Returns ------- - SBAS - The restored SBAS object. + Stack + The restored Stack object. Examples -------- Restore the current state from the default dump file in the processing directory: - SBAS.restore() + Stack.restore() Notes ----- - This static method restores the state of an SBAS object from a pickle file. The pickle file should contain the - serialized state of the SBAS object, including its processed data and configuration. By default, the method assumes - the input file is named "SBAS.pickle" and is located in the processing directory. An alternative file path can be - provided using the `from_path` parameter. The method returns the restored SBAS object. + This static method restores the state of an Stack object from a pickle file. The pickle file should contain the + serialized state of the Stack object, including its processed data and configuration. By default, the method assumes + the input file is named "Stack.pickle" and is located in the processing directory. An alternative file path can be + provided using the `from_path` parameter. The method returns the restored Stack object. """ import pickle import os if os.path.isdir(from_path): - sbas_pickle = os.path.join(from_path, 'SBAS.pickle') + stack_pickle = os.path.join(from_path, 'Stack.pickle') else: - sbas_pickle = from_path + stack_pickle = from_path - print (f'NOTE: load state from file {sbas_pickle}') - return pickle.load(open(sbas_pickle, 'rb')) + print (f'NOTE: load state from file {stack_pickle}') + return pickle.load(open(stack_pickle, 'rb')) def backup(self, backup_dir, copy=False, debug=False): """ - Backup framed SBAS scenes, orbits, DEM, and landmask files to build a minimal reproducible dataset. + Backup framed Stack scenes, orbits, DEM, and landmask files to build a minimal reproducible dataset. Parameters ---------- @@ -115,20 +115,20 @@ def backup(self, backup_dir, copy=False, debug=False): Examples -------- Backup the files to the specified directory: - sbas.backup('backup') + stack.backup('backup') Open the backup for the reproducible run by defining it as a new data directory: - sbas = SBAS('backup', 'backup/DEM_WGS84.nc', 'raw') + stack = Stack('backup', 'backup/DEM_WGS84.nc', 'raw') Notes ----- - This method backs up the framed SBAS scenes, orbits, DEM, and landmask files to a specified backup directory. + This method backs up the framed Stack scenes, orbits, DEM, and landmask files to a specified backup directory. It provides a way to create a minimal reproducible dataset by preserving the necessary files for processing. The method creates the backup directory if it does not exist. By default, the method moves the scene and orbit files to the backup directory, effectively removing them from the work directory. The DEM and landmask files are always copied to the backup directory. If the `copy` parameter is set to True, the scene and orbit files will be copied instead of moved. Use caution when setting `copy` to True as it can result in duplicated files and consume - additional storage space. The method also updates the SBAS object's dataframe to mark the removed files as empty. + additional storage space. The method also updates the Stack object's dataframe to mark the removed files as empty. """ import os import shutil @@ -137,7 +137,7 @@ def backup(self, backup_dir, copy=False, debug=False): # this optional file is dumped state, copy it if exists # auto-generated file can't be a symlink but user-defined symlink target should be copied - filename = os.path.join(self.basedir, 'SBAS.pickle') + filename = os.path.join(self.basedir, 'Stack.pickle') if os.path.exists(filename): if debug: print ('DEBUG: copy', filename, backup_dir) @@ -270,9 +270,9 @@ def load_pairs(self, name='phase', subswath=None): def open_grid(self, name, subswath=None, add_subswath=True, chunksize=None): """ - sbas.open_grid('intf_ll2ra') - sbas.open_grid('intf_ra2ll') - sbas.open_grid('intfweight') + stack.open_grid('intf_ll2ra') + stack.open_grid('intf_ra2ll') + stack.open_grid('intfweight') """ import xarray as xr @@ -343,7 +343,7 @@ def save_grid(self, data, name, subswath=None, caption='Saving 2D grid', chunksi def open_stack(self, pairs, name, subswath=None, add_subswath=True, chunksize=None): """ - sbas.open_stack(baseline_pairs,'phasefilt') + stack.open_stack(baseline_pairs,'phasefilt') """ import xarray as xr import pandas as pd @@ -437,7 +437,7 @@ def open_stack_slc(self, dates=None, subswath=None, intensity=False, dfact=2.5e- def open_stack_geotif(self, dates=None, subswath=None, intensity=False, chunksize=None): """ - tiffs = sbas.open_stack_geotif(['2022-06-16', '2022-06-28'], intensity=True) + tiffs = stack.open_stack_geotif(['2022-06-16', '2022-06-28'], intensity=True) """ import xarray as xr import rioxarray as rio @@ -477,7 +477,7 @@ def open_model(self, name, chunksize=None): Opens an xarray 3D Dataset from a NetCDF file and re-chunks it based on the specified chunksize. This function takes the name of the model to be opened, reads the NetCDF file, and re-chunks - the dataset according to the provided chunksize or the default value from the 'sbas' object. + the dataset according to the provided chunksize or the default value from the 'stack' object. The 'date' dimension is always chunked with a size of 1. Parameters @@ -486,7 +486,7 @@ def open_model(self, name, chunksize=None): The name of the model file to be opened. chunksize : int, optional The chunk size to be used for dimensions other than 'date'. If not provided, the default - chunk size from the 'sbas' object will be used. + chunk size from the 'stack' object will be used. Returns ------- @@ -537,7 +537,7 @@ def save_model(self, model, name=None, caption='Saving 3D datacube', chunksize=N The model to be saved. chunksize : int, optional The chunk size to be used for dimensions other than 'date'. If not provided, the default - chunk size from the 'sbas' object will be used. + chunk size from the 'stack' object will be used. caption: str The text caption for the saving progress bar. diff --git a/pygmtsar/pygmtsar/S1.py b/pygmtsar/pygmtsar/S1.py index 2fe84760..4b47f92b 100644 --- a/pygmtsar/pygmtsar/S1.py +++ b/pygmtsar/pygmtsar/S1.py @@ -13,7 +13,7 @@ class S1(): @staticmethod def scan_slc(datadir, orbit=None, mission=None, subswath=None, polarization=None): """ - Initialize an instance of the SBAS class. + Initialize an instance of the Stack class. Parameters ---------- @@ -34,11 +34,11 @@ def scan_slc(datadir, orbit=None, mission=None, subswath=None, polarization=None Examples -------- - Initialize an SBAS object with the data directory 'data' and the base directory 'raw': - sbas = S1('data', basedir='raw') + Initialize an Stack object with the data directory 'data' and the base directory 'raw': + stack = S1('data', basedir='raw') - Initialize an SBAS object with the data directory 'data', DEM filename 'data/DEM_WGS84.nc', and the base directory 'raw': - sbas = SBAS('data', 'data/DEM_WGS84.nc', 'raw') + Initialize an Stack object with the data directory 'data', DEM filename 'data/DEM_WGS84.nc', and the base directory 'raw': + stack = Stack('data', 'data/DEM_WGS84.nc', 'raw') """ import os import shutil @@ -217,7 +217,7 @@ def geoloc2bursts(metapath): raise ValueError('ERROR: Two or more scenes required') daily_scenes = df.groupby(['date', 'subswath'])['datetime'].count().values.max() if daily_scenes > 1: - print ('NOTE: Found multiple scenes for a single day, use function SBAS.reframe() to stitch the scenes') + print ('NOTE: Found multiple scenes for a single day, use function Stack.reframe() to stitch the scenes') return df diff --git a/pygmtsar/pygmtsar/SBAS.py b/pygmtsar/pygmtsar/Stack.py similarity index 72% rename from pygmtsar/pygmtsar/SBAS.py rename to pygmtsar/pygmtsar/Stack.py index f3817ec1..5ac6378a 100755 --- a/pygmtsar/pygmtsar/SBAS.py +++ b/pygmtsar/pygmtsar/Stack.py @@ -7,15 +7,21 @@ # # Licensed under the BSD 3-Clause License (see LICENSE for details) # ---------------------------------------------------------------------------- -from .SBAS_ps import SBAS_ps +from .Stack_ps import Stack_ps from .S1 import S1 from .PRM import PRM -class SBAS(SBAS_ps): +class Stack(Stack_ps): - def __init__(self, basedir, scenes, reference=None, dem_filename=None, landmask_filename=None, drop_if_exists=False): + df = None + basedir = None + reference = None + dem_filename = None + landmask_filename = None + + def __init__(self, basedir, drop_if_exists=False): """ - Initialize an instance of the SBAS class. + Initialize an instance of the Stack class. Parameters ---------- @@ -30,11 +36,11 @@ def __init__(self, basedir, scenes, reference=None, dem_filename=None, landmask_ Examples -------- - Initialize an SBAS object with the data directory 'data' and the base directory 'raw': - sbas = SBAS('data', basedir='raw') + Initialize an Stack object with the data directory 'data' and the base directory 'raw': + stack = Stack('data', basedir='raw') - Initialize an SBAS object with the data directory 'data', DEM filename 'data/DEM_WGS84.nc', and the base directory 'raw': - sbas = SBAS('data', 'data/DEM_WGS84.nc', 'raw') + Initialize an Stack object with the data directory 'data', DEM filename 'data/DEM_WGS84.nc', and the base directory 'raw': + stack = Stack('data', 'data/DEM_WGS84.nc', 'raw') """ import os import shutil @@ -48,12 +54,12 @@ def __init__(self, basedir, scenes, reference=None, dem_filename=None, landmask_ os.makedirs(basedir) self.basedir = basedir + def set_scenes(self, scenes): self.df = scenes - if reference is None: - print (f'NOTE: reference scene is not defined, use {scenes.index[0]}. You can change it like SBAS.set_reference("2022-01-20")') + if self.reference is None: + print (f'NOTE: auto set reference scene {scenes.index[0]}. You can change it like Stack.set_reference("2022-01-20")') self.reference = self.df.index[0] - self.set_dem(dem_filename) - self.set_landmask(landmask_filename) + return self # def make_gaussian_filter(self, range_dec, azi_dec, wavelength, debug=False): # """ diff --git a/pygmtsar/pygmtsar/SBAS_base.py b/pygmtsar/pygmtsar/Stack_base.py similarity index 92% rename from pygmtsar/pygmtsar/SBAS_base.py rename to pygmtsar/pygmtsar/Stack_base.py index 84d2da7a..07e99360 100644 --- a/pygmtsar/pygmtsar/SBAS_base.py +++ b/pygmtsar/pygmtsar/Stack_base.py @@ -11,23 +11,23 @@ from .tqdm_joblib import tqdm_joblib from .tqdm_dask import tqdm_dask -class SBAS_base(tqdm_joblib, IO): +class Stack_base(tqdm_joblib, IO): def __repr__(self): return 'Object %s %d items\n%r' % (self.__class__.__name__, len(self.df), self.df) def to_dataframe(self): """ - Return a Pandas DataFrame for all SBAS scenes. + Return a Pandas DataFrame for all Stack scenes. Returns ------- pandas.DataFrame - The DataFrame containing SBAS scenes. + The DataFrame containing Stack scenes. Examples -------- - df = sbas.to_dataframe() + df = stack.to_dataframe() """ return self.df @@ -47,7 +47,7 @@ def multistem_stem(self, subswath, dt=None): def set_reference(self, reference): """ - Define reference scene for SBAS object. + Define reference scene for Stack object. Parameters ---------- @@ -56,16 +56,16 @@ def set_reference(self, reference): Returns ------- - SBAS - Modified instance of the SBAS class. + Stack + Modified instance of the Stack class. Examples -------- Set the reference scene to '2022-01-20': - sbas.set_reference('2022-01-20') + stack.set_reference('2022-01-20') """ if reference is None: - print ('NOTE: reference scene is None, SBAS.set_reference() command is ignored') + print ('NOTE: reference scene is None, Stack.set_reference() command is ignored') return self if not reference in self.df.index: raise Exception('Reference scene not found') @@ -150,7 +150,7 @@ def get_subswath(self, subswath=None): assert subswath is None or subswath in subswaths, f'ERROR: subswath {subswath} not found' if subswath is not None: return subswath - assert len(subswaths)==1, f'ERROR: multiple subswaths {subswaths} found, merge them first using SBAS.merge_parallel()' + assert len(subswaths)==1, f'ERROR: multiple subswaths {subswaths} found, merge them first using Stack.merge_parallel()' # define subswath return subswaths[0] diff --git a/pygmtsar/pygmtsar/SBAS_dem.py b/pygmtsar/pygmtsar/Stack_dem.py similarity index 93% rename from pygmtsar/pygmtsar/SBAS_dem.py rename to pygmtsar/pygmtsar/Stack_dem.py index ffbecb6f..b5a868e2 100644 --- a/pygmtsar/pygmtsar/SBAS_dem.py +++ b/pygmtsar/pygmtsar/Stack_dem.py @@ -7,10 +7,10 @@ # # Licensed under the BSD 3-Clause License (see LICENSE for details) # ---------------------------------------------------------------------------- -from .SBAS_reframe import SBAS_reframe +from .Stack_reframe import Stack_reframe from .PRM import PRM -class SBAS_dem(SBAS_reframe): +class Stack_dem(Stack_reframe): def set_dem(self, dem_filename): """ @@ -29,10 +29,7 @@ def set_dem(self, dem_filename): Examples -------- Set the DEM filename: - sbas = sbas.set_dem('data/DEM_WGS84.nc') - - Alternatively, the same result can be achieved during SBAS initialization: - sbas = SBAS(..., dem_filename='data/DEM_WGS84.nc') + stack = stack.set_dem('data/DEM_WGS84.nc') Notes ----- @@ -79,10 +76,10 @@ def get_dem(self, subswath=None, geoloc=False, buffer_degrees=0.02): Examples -------- Get DEM for all the processed subswaths: - topo_ll = sbas.get_dem() + topo_ll = stack.get_dem() Get DEM for a single subswath IW1: - topo_ll = sbas.get_dem(1) + topo_ll = stack.get_dem(1) Notes ----- @@ -152,19 +149,19 @@ def download_dem(self, backend=None, product='SRTM1', resolution_meters=None, me Examples -------- Download STRM1 DEM with a resolution of 30 meters and convert it to the default 60-meter grid: - sbas.download_dem() + stack.download_dem() Download STRM1 DEM with a resolution of 30 meters and convert it to a 60-meter grid: - sbas.download_dem(resolution_meters=60) + stack.download_dem(resolution_meters=60) Download STRM3 DEM with a resolution of 90 meters and convert it to a 120-meter grid: - sbas.download_dem(product='STRM3', resolution_meters=120) + stack.download_dem(product='STRM3', resolution_meters=120) Load and crop from local NetCDF file: - sbas.download_dem(product='GEBCO_2020/GEBCO_2020.nc') + stack.download_dem(product='GEBCO_2020/GEBCO_2020.nc') Load and crop from local GeoTIF file: - sbas.download_dem(product='GEBCO_2019.tif') + stack.download_dem(product='GEBCO_2019.tif') Notes ----- @@ -181,7 +178,7 @@ def download_dem(self, backend=None, product='SRTM1', resolution_meters=None, me from tqdm.auto import tqdm if self.dem_filename is not None: - print ('NOTE: DEM exists, ignore the command. Use SBAS.set_dem(None) to allow new DEM downloading') + print ('NOTE: DEM exists, ignore the command. Use Stack.set_dem(None) to allow new DEM downloading') return if backend is not None: diff --git a/pygmtsar/pygmtsar/SBAS_detrend.py b/pygmtsar/pygmtsar/Stack_detrend.py similarity index 94% rename from pygmtsar/pygmtsar/SBAS_detrend.py rename to pygmtsar/pygmtsar/Stack_detrend.py index 3dd170a4..3adff54a 100644 --- a/pygmtsar/pygmtsar/SBAS_detrend.py +++ b/pygmtsar/pygmtsar/Stack_detrend.py @@ -7,9 +7,9 @@ # # Licensed under the BSD 3-Clause License (see LICENSE for details) # ---------------------------------------------------------------------------- -from .SBAS_unwrap import SBAS_unwrap +from .Stack_unwrap import Stack_unwrap -class SBAS_detrend(SBAS_unwrap): +class Stack_detrend(Stack_unwrap): # # def detrend_parallel(self, pairs=None, chunksize=None, n_jobs=-1, interactive=False, **kwargs): # """ @@ -37,12 +37,12 @@ class SBAS_detrend(SBAS_unwrap): # Examples # -------- # Detrend plain and topography and read the results: -# sbas.detrend_parallel(pairs) -# detrended = sbas.open_grids(pairs, 'detrend') +# stack.detrend_parallel(pairs) +# detrended = stack.open_grids(pairs, 'detrend') # # Detrend ionospheric effects and solid Earth's tides on large areas using Gaussian filtering # and detrend plain and topography after that: -# sbas.detrend_parallel(pairs, wavelength=12000) +# stack.detrend_parallel(pairs, wavelength=12000) # # Notes # ----- @@ -108,7 +108,7 @@ class SBAS_detrend(SBAS_unwrap): # Examples # -------- # Simplest detrending: -# unwrap_detrended = sbas.detrend(pair.values[0] if isinstance(pairs, pd.DataFrame) else pair[0]) +# unwrap_detrended = stack.detrend(pair.values[0] if isinstance(pairs, pd.DataFrame) else pair[0]) # # Detrend unwrapped interferogram in radar coordinates, see for details: # - [GitHub Issue 98](https://github.com/gmtsar/gmtsar/issues/98) @@ -257,17 +257,17 @@ def stack_gaussian2d(self, grid, wavelength, truncate=3.0, resolution_meters=90, Examples -------- Detrend ionospheric effects and solid Earth's tides on a large area and save to disk: - sbas.gaussian_parallel(slcs, wavelength=400) + stack.gaussian_parallel(slcs, wavelength=400) For band-pass filtering apply the function twice and save to disk: - model = sbas.gaussian_parallel(slcs, wavelength=400, interactive=True) \ - - sbas.gaussian_parallel(slcs, wavelength=2000, interactive=True) - sbas.save_model(model, caption='Gaussian Band-Pass filtering') + model = stack.gaussian_parallel(slcs, wavelength=400, interactive=True) \ + - stack.gaussian_parallel(slcs, wavelength=2000, interactive=True) + stack.save_model(model, caption='Gaussian Band-Pass filtering') Detrend and return lazy xarray dataarray: - sbas.gaussian_parallel(slcs, wavelength=400, interactive=True) + stack.gaussian_parallel(slcs, wavelength=400, interactive=True) For band-pass filtering apply the function twice: - sbas.gaussian_parallel(slcs, wavelength=400, interactive=True) \ - - sbas.gaussian_parallel(slcs, wavelength=2000, interactive=True) + stack.gaussian_parallel(slcs, wavelength=400, interactive=True) \ + - stack.gaussian_parallel(slcs, wavelength=2000, interactive=True) """ import xarray as xr diff --git a/pygmtsar/pygmtsar/SBAS_geocode.py b/pygmtsar/pygmtsar/Stack_geocode.py similarity index 97% rename from pygmtsar/pygmtsar/SBAS_geocode.py rename to pygmtsar/pygmtsar/Stack_geocode.py index b06cfc23..c0f4c311 100644 --- a/pygmtsar/pygmtsar/SBAS_geocode.py +++ b/pygmtsar/pygmtsar/Stack_geocode.py @@ -7,10 +7,10 @@ # # Licensed under the BSD 3-Clause License (see LICENSE for details) # ---------------------------------------------------------------------------- -from .SBAS_sbas import SBAS_sbas +from .Stack_sbas import Stack_sbas from .tqdm_dask import tqdm_dask -class SBAS_geocode(SBAS_sbas): +class Stack_geocode(Stack_sbas): def geocode_parallel(self, coarsen=4, **kwargs): """ @@ -29,7 +29,7 @@ def geocode_parallel(self, coarsen=4, **kwargs): Examples -------- - sbas.topo_parallel() + stack.topo_parallel() Notes ----- @@ -140,7 +140,7 @@ def ll2ra(self, data, z_offset=None): # self.topo_parallel(coarsen=coarsen) # # # build geographic coordinates transformation matrix for landmask and other grids -# sbas.intf_ll2ra_matrix(data, subswath, chunksize=chunksize) +# stack.intf_ll2ra_matrix(data, subswath, chunksize=chunksize) # # build radar coordinates transformation matrix for the interferograms grid stack # self.intf_ra2ll_matrix(data, subswath, chunksize=chunksize) ########################################################################################## @@ -167,9 +167,9 @@ def ra2ll(self, data, subswath, autoscale=True, chunksize=None): Examples -------- Geocode 3D unwrapped phase grid stack: - unwraps_ll = sbas.intf_ra2ll(sbas.open_grids(pairs, 'unwrap')) + unwraps_ll = stack.intf_ra2ll(stack.open_grids(pairs, 'unwrap')) # or use "geocode" option for open_grids() instead: - unwraps_ll = sbas.open_grids(pairs, 'unwrap', geocode=True) + unwraps_ll = stack.open_grids(pairs, 'unwrap', geocode=True) """ import dask import xarray as xr @@ -244,7 +244,7 @@ def intf_block(lats_block, lons_block, stackval=None): #print ('step_y', step_y, 'step_x', step_x) assert step_y>=1 and step_x>=1, f'Transforming grid spacing (grid_dy, grid_dx) is smaller \ than transform matrix spacing (trans_dy, trans_dx), \ - call SBAS.topo_ra_parallel() or SBAS.geocode_parallel() with less coarsing' + call Stack.topo_ra_parallel() or Stack.geocode_parallel() with less coarsing' # decimate the full trans grid to the required spacing if autoscale and (step_y>1 or step_x>1): # define the equally spacing geographic coordinates grid @@ -439,8 +439,8 @@ def intf_ll2ra(self, grids, chunksize=None): Examples -------- Inverse geocode 3D unwrapped phase grids stack: - unwraps_ll = sbas.open_grids(pairs, 'unwrap', geocode=True) - unwraps = sbas.intf_ll2ra(unwraps_ll) + unwraps_ll = stack.open_grids(pairs, 'unwrap', geocode=True) + unwraps = stack.intf_ll2ra(unwraps_ll) """ import dask import xarray as xr diff --git a/pygmtsar/pygmtsar/SBAS_incidence.py b/pygmtsar/pygmtsar/Stack_incidence.py similarity index 90% rename from pygmtsar/pygmtsar/SBAS_incidence.py rename to pygmtsar/pygmtsar/Stack_incidence.py index c1992dd8..0632975f 100644 --- a/pygmtsar/pygmtsar/SBAS_incidence.py +++ b/pygmtsar/pygmtsar/Stack_incidence.py @@ -7,10 +7,10 @@ # # Licensed under the BSD 3-Clause License (see LICENSE for details) # ---------------------------------------------------------------------------- -from .SBAS_geocode import SBAS_geocode +from .Stack_geocode import Stack_geocode from .tqdm_dask import tqdm_dask -class SBAS_incidence(SBAS_geocode): +class Stack_incidence(Stack_geocode): def los_projection(self, data): """ @@ -29,7 +29,7 @@ def los_projection(self, data): Examples ------- Calculate tidal LOS projection measured in meter [m]: - los_projection_mm = sbas.los_projection(tidal) + los_projection_mm = stack.los_projection(tidal) # Expected input # xarray.Dataset # Dimensions: @@ -51,7 +51,7 @@ def los_projection(self, data): # ... Calculate plate velocity LOS projection in millimeter [mm]: - sbas.los_projection([22.67, 13.36, 0]) + stack.los_projection([22.67, 13.36, 0]) # Expected output: # NOTE: estimation using central point satellite look vector # array([-15.57419278]) @@ -96,7 +96,7 @@ def los_projection(self, data): # Examples # ------- # Calculate tidal LOS projection: -# los_projection_mm = 1000*sbas.los_projection(tidal) +# los_projection_mm = 1000*stack.los_projection(tidal) # # Expected input # # lon lat dx dy dz # # date @@ -109,12 +109,12 @@ def los_projection(self, data): # # ... # # Using list or tuple as input: -# los_projection_mm = 1000*sbas.los_projection([tidal.dx, tidal.dy, tidal.dz], lon, lat) +# los_projection_mm = 1000*stack.los_projection([tidal.dx, tidal.dy, tidal.dz], lon, lat) # # Expected output: # # [55.34030452, -56.55791618, ...] # # Using numpy.ndarray as input: -# los_projection_mm = 1000*sbas.los_projection(np.column_stack([tidal.dx, tidal.dy, tidal.dz])) +# los_projection_mm = 1000*stack.los_projection(np.column_stack([tidal.dx, tidal.dy, tidal.dz])) # # Expected output (with central point satellite look vector estimation): # # [54.72536278, -57.87347137, ...] # @@ -184,7 +184,7 @@ def los_projection(self, data): # Examples # ------- # Calculate tidal LOS projection: -# los_projection_mm = 1000*sbas.los_projection(tidal) +# los_projection_mm = 1000*stack.los_projection(tidal) # # Expected input # # lon lat dx dy dz # # date @@ -197,12 +197,12 @@ def los_projection(self, data): # # ... # # Using list or tuple as input: -# los_projection_mm = 1000*sbas.los_projection([tidal.dx, tidal.dy, tidal.dz], lon, lat) +# los_projection_mm = 1000*stack.los_projection([tidal.dx, tidal.dy, tidal.dz], lon, lat) # # Expected output: # # [55.34030452, -56.55791618, ...] # # Using numpy.ndarray as input: -# los_projection_mm = 1000*sbas.los_projection(np.column_stack([tidal.dx, tidal.dy, tidal.dz])) +# los_projection_mm = 1000*stack.los_projection(np.column_stack([tidal.dx, tidal.dy, tidal.dz])) # # Expected output (with central point satellite look vector estimation): # # [54.72536278, -57.87347137, ...] # @@ -251,7 +251,7 @@ def get_sat_look(self, chunksize=None): Examples -------- Get satellite look vectors: - sat_look_ll = sbas.get_sat_look() + sat_look_ll = stack.get_sat_look() Notes ----- @@ -279,17 +279,17 @@ def los_displacement_mm(self, data): Examples -------- Calculate LOS displacement for unwrapped phase grids in radar coordinates: - unwraps_ra = sbas.open_grids(pairs, 'unwrap') - los_disp_ra = sbas.los_displacement_mm(unwraps_ra) + unwraps_ra = stack.open_grids(pairs, 'unwrap') + los_disp_ra = stack.los_displacement_mm(unwraps_ra) # or the same code in one line - los_disp_ra = sbas.open_grids(pairs, 'unwrap', func=sbas.los_displacement_mm) + los_disp_ra = stack.open_grids(pairs, 'unwrap', func=stack.los_displacement_mm) # Note: here "func" argument for open_grids() function reduces the code to a single command. Calculate LOS displacement for detrended unwrapped phase grids in geographic coordinates: - detrend_ll = sbas.open_grids(pairs, 'detrend', geocode=True) - los_disp_ll = sbas.los_displacement_mm(detrend_ll) + detrend_ll = stack.open_grids(pairs, 'detrend', geocode=True) + los_disp_ll = stack.los_displacement_mm(detrend_ll) # or the same code in one line - los_disp_ll = sbas.open_grids(pairs, 'detrend', geocode=True, func=sbas.los_displacement_mm) + los_disp_ll = stack.open_grids(pairs, 'detrend', geocode=True, func=stack.los_displacement_mm) # Note: here "func" argument for open_grids() function reduces the code to a single command. """ import xarray as xr @@ -318,7 +318,7 @@ def incidence_angle(self): Examples -------- Compute the incidence angle grid: - inc_angle_ll = sbas.incidence_angle() + inc_angle_ll = stack.incidence_angle() Notes ----- @@ -351,11 +351,11 @@ def vertical_displacement_mm(self, unwraps): Examples -------- Calculate vertical displacement for unwrapped phase grids in geographic coordinates: - unwraps_ll = sbas.open_grids(pairs, 'unwrap', geocode=True) - vert_disp_mm = sbas.vertical_displacement_mm(unwraps_ll) + unwraps_ll = stack.open_grids(pairs, 'unwrap', geocode=True) + vert_disp_mm = stack.vertical_displacement_mm(unwraps_ll) Calculate vertical displacement for detrended unwrapped phase grids in geographic coordinates: - vert_disp_mm = sbas.open_grids(pairs, 'detrend', geocode=True, func=sbas.vertical_displacement_mm) + vert_disp_mm = stack.open_grids(pairs, 'detrend', geocode=True, func=stack.vertical_displacement_mm) # Note: here "func" argument for open_grids() function reduces the code to a single command. """ import numpy as np @@ -383,11 +383,11 @@ def eastwest_displacement_mm(self, unwraps): Examples -------- Calculate East-West displacement for unwrapped phase grids in geographic coordinates: - unwraps_ll = sbas.open_grids(pairs, 'unwrap', geocode=True) - ew_disp_mm = sbas.eastwest_displacement_mm(unwraps_ll) + unwraps_ll = stack.open_grids(pairs, 'unwrap', geocode=True) + ew_disp_mm = stack.eastwest_displacement_mm(unwraps_ll) Calculate East-West displacement for detrended unwrapped phase grids in geographic coordinates: - ew_disp_mm = sbas.open_grids(pairs, 'detrend', geocode=True, func=sbas.eastwest_displacement_mm) + ew_disp_mm = stack.open_grids(pairs, 'detrend', geocode=True, func=stack.eastwest_displacement_mm) # Note: here "func" argument for open_grids() function reduces the code to a single command. """ import numpy as np diff --git a/pygmtsar/pygmtsar/SBAS_intf.py b/pygmtsar/pygmtsar/Stack_intf.py similarity index 96% rename from pygmtsar/pygmtsar/SBAS_intf.py rename to pygmtsar/pygmtsar/Stack_intf.py index e69117fa..d59a557f 100644 --- a/pygmtsar/pygmtsar/SBAS_intf.py +++ b/pygmtsar/pygmtsar/Stack_intf.py @@ -7,10 +7,10 @@ # # Licensed under the BSD 3-Clause License (see LICENSE for details) # ---------------------------------------------------------------------------- -from .SBAS_topo import SBAS_topo +from .Stack_topo import Stack_topo from .tqdm_dask import tqdm_dask -class SBAS_intf(SBAS_topo): +class Stack_intf(Stack_topo): def intf(self, subswath, pair, **kwargs): """ @@ -57,7 +57,7 @@ def intf(self, subswath, pair, **kwargs): del kwargs['topo_file'] else: topo_file = self.get_filename('topo', subswath) - #print ('SBAS intf kwargs', kwargs) + #print ('Stack intf kwargs', kwargs) prm_ref.intf(prm_rep, basedir=self.basedir, topo_fromfile = topo_file, @@ -87,9 +87,9 @@ def intf_parallel(self, pairs, weight=None, n_jobs=-1, chunksize=None, **kwargs) Examples -------- For default 60m DEM resolution and other default parameters use command below: - pairs = [sbas.to_dataframe().index.unique()] + pairs = [stack.to_dataframe().index.unique()] decimator = lambda dataarray: dataarray.coarsen({'y': 4, 'x': 4}, boundary='trim').mean() - sbas.intf_parallel(pairs, func=decimator) + stack.intf_parallel(pairs, func=decimator) """ import xarray as xr import pandas as pd diff --git a/pygmtsar/pygmtsar/SBAS_landmask.py b/pygmtsar/pygmtsar/Stack_landmask.py similarity index 90% rename from pygmtsar/pygmtsar/SBAS_landmask.py rename to pygmtsar/pygmtsar/Stack_landmask.py index 19643bd0..20b162a5 100644 --- a/pygmtsar/pygmtsar/SBAS_landmask.py +++ b/pygmtsar/pygmtsar/Stack_landmask.py @@ -7,9 +7,9 @@ # # Licensed under the BSD 3-Clause License (see LICENSE for details) # ---------------------------------------------------------------------------- -from .SBAS_merge import SBAS_merge +from .Stack_merge import Stack_merge -class SBAS_landmask(SBAS_merge): +class Stack_landmask(Stack_merge): def set_landmask(self, landmask_filename): """ @@ -22,14 +22,12 @@ def set_landmask(self, landmask_filename): Examples -------- - sbas = sbas.set_landmask('data/landmask.nc') - Also, the same result is possible on SBAS initialization: - sbas = SBAS(..., landmask_filename='data/landmask.nc') + stack = stack.set_landmask('data/landmask.nc') Returns ------- - self : SBAS - The SBAS object with the updated landmask file path. + self : Stack + The Stack object with the updated landmask file path. """ import os if landmask_filename is not None: @@ -56,10 +54,10 @@ def get_landmask(self, inverse_geocode=False): Examples -------- Get land mask in geographic coordinates: - landmask_ll = sbas.get_landmask() + landmask_ll = stack.get_landmask() Get land mask in radar coordinates: - landmask_ra = sbas.get_landmask(inverse_geocode=True) + landmask_ra = stack.get_landmask(inverse_geocode=True) Notes ----- @@ -108,7 +106,7 @@ def download_landmask(self, backend=None, debug=False): Examples -------- - sbas = sbas.download_landmask() + stack.download_landmask() Notes ----- @@ -121,7 +119,7 @@ def download_landmask(self, backend=None, debug=False): arcsec_degree = 0.000833333333333/3 if self.landmask_filename is not None: - print ('NOTE: landmask exists, ignore the command. Use SBAS.set_landmask(None) to allow new landmask downloading') + print ('NOTE: landmask exists, ignore the command. Use Stack.set_landmask(None) to allow new landmask downloading') return if backend is not None: diff --git a/pygmtsar/pygmtsar/SBAS_merge.py b/pygmtsar/pygmtsar/Stack_merge.py similarity index 97% rename from pygmtsar/pygmtsar/SBAS_merge.py rename to pygmtsar/pygmtsar/Stack_merge.py index 5ab5ddce..069f2d60 100644 --- a/pygmtsar/pygmtsar/SBAS_merge.py +++ b/pygmtsar/pygmtsar/Stack_merge.py @@ -7,10 +7,10 @@ # # Licensed under the BSD 3-Clause License (see LICENSE for details) # ---------------------------------------------------------------------------- -from .SBAS_merge_gmtsar import SBAS_merge_gmtsar +from .Stack_merge_gmtsar import Stack_merge_gmtsar from .PRM import PRM -class SBAS_merge(SBAS_merge_gmtsar): +class Stack_merge(Stack_merge_gmtsar): def merge(self, grid, debug=False): """ @@ -168,13 +168,13 @@ def merge_parallel(self, pairs, intfs = ['phasefilt', 'corr'], grids=['adi'], n_ Examples -------- - sbas.merge_parallel(pairs) + stack.merge_parallel(pairs) Notes ----- This method performs parallel merging of the interferograms for the specified pairs and grids. It utilizes joblib.Parallel for efficient parallel processing. The merged interferograms are stored in the 'df' attribute - of the SBAS object, which is a GeoDataFrame containing information about the original or merged interferograms. + of the Stack object, which is a GeoDataFrame containing information about the original or merged interferograms. """ from tqdm.auto import tqdm import joblib @@ -182,7 +182,7 @@ def merge_parallel(self, pairs, intfs = ['phasefilt', 'corr'], grids=['adi'], n_ import geopandas as gpd # merging is not applicable to a single subswath - # for this case coordinate transformation matrices already built in SBAS.intf_parallel() + # for this case coordinate transformation matrices already built in Stack.intf_parallel() subswaths = self.get_subswaths() if len(subswaths) == 1: return diff --git a/pygmtsar/pygmtsar/SBAS_merge_gmtsar.py b/pygmtsar/pygmtsar/Stack_merge_gmtsar.py similarity index 96% rename from pygmtsar/pygmtsar/SBAS_merge_gmtsar.py rename to pygmtsar/pygmtsar/Stack_merge_gmtsar.py index e66f51f4..3a4bd5de 100644 --- a/pygmtsar/pygmtsar/SBAS_merge_gmtsar.py +++ b/pygmtsar/pygmtsar/Stack_merge_gmtsar.py @@ -7,9 +7,9 @@ # # Licensed under the BSD 3-Clause License (see LICENSE for details) # ---------------------------------------------------------------------------- -from .SBAS_intf import SBAS_intf +from .Stack_intf import Stack_intf -class SBAS_merge_gmtsar(SBAS_intf): +class Stack_merge_gmtsar(Stack_intf): # stem_tofile + '.PRM' generating def merge_swath(self, conf, grid_tofile, stem_tofile, debug=False): diff --git a/pygmtsar/pygmtsar/SBAS_orbits.py b/pygmtsar/pygmtsar/Stack_orbits.py similarity index 88% rename from pygmtsar/pygmtsar/SBAS_orbits.py rename to pygmtsar/pygmtsar/Stack_orbits.py index d974cf52..5e221712 100644 --- a/pygmtsar/pygmtsar/SBAS_orbits.py +++ b/pygmtsar/pygmtsar/Stack_orbits.py @@ -7,15 +7,15 @@ # # Licensed under the BSD 3-Clause License (see LICENSE for details) # ---------------------------------------------------------------------------- -from .SBAS_prm import SBAS_prm +from .Stack_prm import Stack_prm -class SBAS_orbits(SBAS_prm): +class Stack_orbits(Stack_prm): # for precision orbit there is only single orbit per day # for approximate orbit 2 and maybe more orbits per day are possible # so check orbit file for for each subswath def download_orbits(self): """ - Download missed orbits for all the SBAS scenes. + Download missed orbits for all the Stack scenes. Returns ------- @@ -24,7 +24,7 @@ def download_orbits(self): Examples -------- - sbas.download_orbits() + stack.download_orbits() """ from eof.download import download_eofs diff --git a/pygmtsar/pygmtsar/SBAS_prm.py b/pygmtsar/pygmtsar/Stack_prm.py similarity index 96% rename from pygmtsar/pygmtsar/SBAS_prm.py rename to pygmtsar/pygmtsar/Stack_prm.py index 6c1ed556..b2ca1e74 100644 --- a/pygmtsar/pygmtsar/SBAS_prm.py +++ b/pygmtsar/pygmtsar/Stack_prm.py @@ -7,10 +7,10 @@ # # Licensed under the BSD 3-Clause License (see LICENSE for details) # ---------------------------------------------------------------------------- -from .SBAS_base import SBAS_base +from .Stack_base import Stack_base from .PRM import PRM -class SBAS_prm(SBAS_base): +class Stack_prm(Stack_base): def PRM(self, subswath=None, date=None, multi=True, singleswath=False): """ diff --git a/pygmtsar/pygmtsar/SBAS_ps.py b/pygmtsar/pygmtsar/Stack_ps.py similarity index 95% rename from pygmtsar/pygmtsar/SBAS_ps.py rename to pygmtsar/pygmtsar/Stack_ps.py index f97e57b5..9d064091 100644 --- a/pygmtsar/pygmtsar/SBAS_ps.py +++ b/pygmtsar/pygmtsar/Stack_ps.py @@ -7,21 +7,21 @@ # # Licensed under the BSD 3-Clause License (see LICENSE for details) # ---------------------------------------------------------------------------- -from .SBAS_stl import SBAS_stl +from .Stack_stl import Stack_stl from .tqdm_dask import tqdm_dask -class SBAS_ps(SBAS_stl): +class Stack_ps(Stack_stl): def get_ps(self, subswath=None, chunksize=None): return self.open_grid('ps', subswath, chunksize=chunksize) #from pygmtsar import tqdm_dask - #SBAS.ps_parallel = ps_parallel - #sbas.ps_parallel(interactive=True) - #sbas.ps_parallel() - #adi = sbas.open_grids(None, 'ps') + #Stack.ps_parallel = ps_parallel + #stack.ps_parallel(interactive=True) + #stack.ps_parallel() + #adi = stack.open_grids(None, 'ps') #adi - #ps_decimator = sbas.pixel_decimator(resolution_meters=60, grid=adi, debug=True) + #ps_decimator = stack.pixel_decimator(resolution_meters=60, grid=adi, debug=True) #adi_dec = adi.coarsen({'y': 4, 'x': 16}, boundary='trim').min() #adi_dec # define PS candidates using Amplitude Dispersion Index (ADI) diff --git a/pygmtsar/pygmtsar/SBAS_reframe.py b/pygmtsar/pygmtsar/Stack_reframe.py similarity index 93% rename from pygmtsar/pygmtsar/SBAS_reframe.py rename to pygmtsar/pygmtsar/Stack_reframe.py index ef10a33c..c7f75f34 100644 --- a/pygmtsar/pygmtsar/SBAS_reframe.py +++ b/pygmtsar/pygmtsar/Stack_reframe.py @@ -7,18 +7,18 @@ # # Licensed under the BSD 3-Clause License (see LICENSE for details) # ---------------------------------------------------------------------------- -from .SBAS_reframe_gmtsar import SBAS_reframe_gmtsar +from .Stack_reframe_gmtsar import Stack_reframe_gmtsar from .S1 import S1 from .PRM import PRM -class SBAS_reframe(SBAS_reframe_gmtsar): +class Stack_reframe(Stack_reframe_gmtsar): def get_pins(self, subswath=None): - print ('The function is obsolete, and it does nothing. Use SBAS.reframe_parallel(geometry=...) to crop bursts.') + print ('The function is obsolete, and it does nothing. Use Stack.reframe_parallel(geometry=...) to crop bursts.') return [] def set_pins(self, *args): - print ('The function is obsolete, and it does nothing. Use SBAS.reframe_parallel(geometry=...) to crop bursts.') + print ('The function is obsolete, and it does nothing. Use Stack.reframe_parallel(geometry=...) to crop bursts.') return def reframe(self, subswath, date, geometry=None, debug=False): @@ -43,7 +43,7 @@ def reframe(self, subswath, date, geometry=None, debug=False): Examples -------- - df = sbas.reframe(1, '2023-05-20') + df = stack.reframe(1, '2023-05-20') """ import geopandas as gpd import numpy as np @@ -158,17 +158,17 @@ def reframe_parallel(self, geometry=None, n_jobs=-1, **kwargs): Examples -------- Without defined geometry the command is silently skipped: - sbas.reframe_parallel() + stack.reframe_parallel() Define a line partially covering two bursts: - sbas.reframe_parallel(geometry=LineString([Point(25.3, 35.0), Point(25, 35.2)])) + stack.reframe_parallel(geometry=LineString([Point(25.3, 35.0), Point(25, 35.2)])) Read the geometry from GeoJSON file and convert to WGS84 coordinates: AOI = gpd.GeoDataFrame().from_file('AOI.json').to_crs(4326) - sbas.reframe_parallel(geometry=AOI) + stack.reframe_parallel(geometry=AOI) TODO: Define a point on a selected burst (this option is not available now): - sbas.reframe_parallel(geometry=Point(25.3, 35)) + stack.reframe_parallel(geometry=Point(25.3, 35)) """ from tqdm.auto import tqdm import joblib diff --git a/pygmtsar/pygmtsar/SBAS_reframe_gmtsar.py b/pygmtsar/pygmtsar/Stack_reframe_gmtsar.py similarity index 98% rename from pygmtsar/pygmtsar/SBAS_reframe_gmtsar.py rename to pygmtsar/pygmtsar/Stack_reframe_gmtsar.py index e55a6052..1e618958 100644 --- a/pygmtsar/pygmtsar/SBAS_reframe_gmtsar.py +++ b/pygmtsar/pygmtsar/Stack_reframe_gmtsar.py @@ -7,10 +7,10 @@ # # Licensed under the BSD 3-Clause License (see LICENSE for details) # ---------------------------------------------------------------------------- -from .SBAS_orbits import SBAS_orbits +from .Stack_orbits import Stack_orbits from .PRM import PRM -class SBAS_reframe_gmtsar(SBAS_orbits): +class Stack_reframe_gmtsar(Stack_orbits): def ext_orb_s1a(self, subswath, stem, date=None, debug=False): """ diff --git a/pygmtsar/pygmtsar/SBAS_sbas.py b/pygmtsar/pygmtsar/Stack_sbas.py similarity index 97% rename from pygmtsar/pygmtsar/SBAS_sbas.py rename to pygmtsar/pygmtsar/Stack_sbas.py index 0dbb681b..734a8669 100644 --- a/pygmtsar/pygmtsar/SBAS_sbas.py +++ b/pygmtsar/pygmtsar/Stack_sbas.py @@ -7,10 +7,10 @@ # # Licensed under the BSD 3-Clause License (see LICENSE for details) # ---------------------------------------------------------------------------- -from .SBAS_detrend import SBAS_detrend +from .Stack_detrend import Stack_detrend from .PRM import PRM -class SBAS_sbas(SBAS_detrend): +class Stack_sbas(Stack_detrend): @staticmethod def lstsq(x, w, matrix): @@ -74,7 +74,7 @@ def lstsq(x, w, matrix): except Exception as e: # typically, this error handled: # LinAlgError: SVD did not converge in Linear Least Squares - print ('SBAS.lstsq notice:', str(e)) + print ('Stack.lstsq notice:', str(e)) return np.nan * np.zeros(matrix.shape[1]) #print ('model', model) # mask produced cumsum zeroes by NaNs where model[0] is the timeseries values @@ -144,11 +144,11 @@ def stack_lstsq(self, data=None, weight=None, chunksize=None, interactive=False, Examples: ----- - sbas.lstsq_parallel(unwraps_detrend, interactive=False) - sbas.lstsq_parallel(unwraps_detrend, corrs, interactive=False) - sbas.lstsq_parallel([unwraps_detrend, corrs], interactive=False) - sbas.lstsq_parallel((unwraps_detrend, corrs), interactive=False) - sbas.lstsq_parallel((unwraps_detrend, corrs.mean(['y', 'x'])), interactive=False) + stack.lstsq_parallel(unwraps_detrend, interactive=False) + stack.lstsq_parallel(unwraps_detrend, corrs, interactive=False) + stack.lstsq_parallel([unwraps_detrend, corrs], interactive=False) + stack.lstsq_parallel((unwraps_detrend, corrs), interactive=False) + stack.lstsq_parallel((unwraps_detrend, corrs.mean(['y', 'x'])), interactive=False) Notes ----- diff --git a/pygmtsar/pygmtsar/SBAS_stack.py b/pygmtsar/pygmtsar/Stack_stack.py similarity index 98% rename from pygmtsar/pygmtsar/SBAS_stack.py rename to pygmtsar/pygmtsar/Stack_stack.py index 43e45f08..eae621dc 100644 --- a/pygmtsar/pygmtsar/SBAS_stack.py +++ b/pygmtsar/pygmtsar/Stack_stack.py @@ -7,10 +7,10 @@ # # Licensed under the BSD 3-Clause License (see LICENSE for details) # ---------------------------------------------------------------------------- -from .SBAS_dem import SBAS_dem +from .Stack_dem import Stack_dem from .PRM import PRM -class SBAS_stack(SBAS_dem): +class Stack_stack(Stack_dem): def offset2shift(self, xyz, rmax, amax, method='linear'): """ @@ -100,7 +100,7 @@ def stack_ref(self, subswath, debug=False): Examples -------- - sbas.stack_ref(subswath=2, debug=True) + stack.stack_ref(subswath=2, debug=True) """ import xarray as xr import numpy as np @@ -149,7 +149,7 @@ def stack_rep(self, subswath, date=None, degrees=12.0/3600, debug=False): Examples -------- - sbas.stack_rep(subswath=2, date='2023-05-01', degrees=15.0/3600, debug=True) + stack.stack_rep(subswath=2, date='2023-05-01', degrees=15.0/3600, debug=True) """ import xarray as xr import numpy as np @@ -314,7 +314,7 @@ def stack_parallel(self, dates=None, n_jobs=-1, **kwargs): Examples -------- - sbas.stack_parallel() + stack.stack_parallel() """ from tqdm.auto import tqdm import joblib diff --git a/pygmtsar/pygmtsar/SBAS_stl.py b/pygmtsar/pygmtsar/Stack_stl.py similarity index 96% rename from pygmtsar/pygmtsar/SBAS_stl.py rename to pygmtsar/pygmtsar/Stack_stl.py index e3738e65..eef56a58 100644 --- a/pygmtsar/pygmtsar/SBAS_stl.py +++ b/pygmtsar/pygmtsar/Stack_stl.py @@ -7,10 +7,10 @@ # # Licensed under the BSD 3-Clause License (see LICENSE for details) # ---------------------------------------------------------------------------- -from .SBAS_tidal import SBAS_tidal +from .Stack_tidal import Stack_tidal from .tqdm_dask import tqdm_dask -class SBAS_stl(SBAS_tidal): +class Stack_stl(Stack_tidal): @staticmethod def stl(ts, dt, dt_periodic, periods, robust=False): @@ -76,14 +76,14 @@ def stack_stl(self, data, freq='W', periods=52, robust=False, chunksize=None, in The function performs the following steps: 1. Convert the 'date' coordinate to valid dates. 2. Unify date intervals to a specified frequency (e.g., weekly) for a mix of time intervals. - 3. Apply the sbas_stl function in parallel using xarray's apply_ufunc and Dask. + 3. Apply the Stack.stl function in parallel using xarray's apply_ufunc and Dask. 4. Rename the output date dimension to match the original irregular date dimension. 5. Return the STL decomposition results as an xarray Dataset. Parameters ---------- - self : SBAS - Instance of the SBAS class. + self : Stack + Instance of the Stack class. dates : numpy.ndarray Array of datetime64 values corresponding to the input time series data. data : xarray.DataArray @@ -108,8 +108,8 @@ def stack_stl(self, data, freq='W', periods=52, robust=False, chunksize=None, in Examples -------- Use on (date,lat,lon) and (date,y,x) grids to return the results or store them on disk: - sbas.stl_parallel(disp, interactive=True) - sbas.stl_parallel(disp) + stack.stl_parallel(disp, interactive=True) + stack.stl_parallel(disp) See Also -------- @@ -181,7 +181,7 @@ def stl_block(lats, lons): # transform to separate variables coords = {'date': dt_weekly.values, dim1: data[dim1], dim2: data[dim2]} - # transform to separate variables variables returned from SBAS.stl() function + # transform to separate variables variables returned from Stack.stl() function varnames = ['trend', 'seasonal', 'resid'] keys_vars = {varname: xr.DataArray(models[varidx], coords=coords) for (varidx, varname) in enumerate(varnames)} model = xr.Dataset({**keys_vars}) diff --git a/pygmtsar/pygmtsar/SBAS_tidal.py b/pygmtsar/pygmtsar/Stack_tidal.py similarity index 92% rename from pygmtsar/pygmtsar/SBAS_tidal.py rename to pygmtsar/pygmtsar/Stack_tidal.py index bfa3b670..0ff26abd 100644 --- a/pygmtsar/pygmtsar/SBAS_tidal.py +++ b/pygmtsar/pygmtsar/Stack_tidal.py @@ -7,9 +7,9 @@ # # Licensed under the BSD 3-Clause License (see LICENSE for details) # ---------------------------------------------------------------------------- -from .SBAS_incidence import SBAS_incidence +from .Stack_incidence import Stack_incidence -class SBAS_tidal(SBAS_incidence): +class Stack_tidal(Stack_incidence): def get_tidal(self, subswath=None, chunksize=None): return self.open_grid('tidal', subswath=subswath, chunksize=chunksize) @@ -94,28 +94,28 @@ def tidal_parallel(self, pairs, coarsen=32, chunksize=None, interactive=False): # -------- # Compute the tidal correction for a single pair of coordinates: # -# coords = sbas.solid_tide(sbas.df.index, coords=[13.40076, 47.40143]) +# coords = stack.solid_tide(stack.df.index, coords=[13.40076, 47.40143]) # # Compute the tidal correction for multiple pairs of coordinates: # -# coords = sbas.solid_tide(sbas.df.index, coords=[[13.40076, 47.40143], [13.40076, 47.40143]]) +# coords = stack.solid_tide(stack.df.index, coords=[[13.40076, 47.40143], [13.40076, 47.40143]]) # # Compute the tidal correction for point geodataframe: -# coords = sbas.solid_tide(sbas.df.index, AOI) +# coords = stack.solid_tide(stack.df.index, AOI) # # Compute the tidal correction for a single record point geodataframe: -# coords = sbas.solid_tide(sbas.df.index, AOI.head(1)) +# coords = stack.solid_tide(stack.df.index, AOI.head(1)) # # Output: # -# >>> sbas.solid_tide(sbas.df.index[:3], coords=[lon, lat]) +# >>> stack.solid_tide(stack.df.index[:3], coords=[lon, lat]) # lon lat dx dy dz # date # 2022-06-16 13.400758 47.401431 -0.066918 -0.004765 0.016200 # 2022-06-28 13.400758 47.401431 -0.033571 0.012279 -0.099899 # 2022-07-10 13.400758 47.401431 -0.000806 -0.007983 -0.150675 # -# >>> sbas.solid_tide(sbas.df.index[:3], coords=[[lon, lat], [lon+1, lat+1]]) +# >>> stack.solid_tide(stack.df.index[:3], coords=[[lon, lat], [lon+1, lat+1]]) # lon lat dx dy dz # date # 2022-06-16 13.400758 47.401431 -0.066918 -0.004765 0.016200 @@ -196,28 +196,28 @@ def solid_tide2(self, dates, data, debug=False): -------- Compute the tidal correction for a single pair of coordinates: - coords = sbas.solid_tide(sbas.df.index, coords=[13.40076, 47.40143]) + coords = stack.solid_tide(stack.df.index, coords=[13.40076, 47.40143]) Compute the tidal correction for multiple pairs of coordinates: - coords = sbas.solid_tide(sbas.df.index, coords=[[13.40076, 47.40143], [13.40076, 47.40143]]) + coords = stack.solid_tide(stack.df.index, coords=[[13.40076, 47.40143], [13.40076, 47.40143]]) Compute the tidal correction for point geodataframe: - coords = sbas.solid_tide(sbas.df.index, AOI) + coords = stack.solid_tide(stack.df.index, AOI) Compute the tidal correction for a single record point geodataframe: - coords = sbas.solid_tide(sbas.df.index, AOI.head(1)) + coords = stack.solid_tide(stack.df.index, AOI.head(1)) Output: - >>> sbas.solid_tide(sbas.df.index[:3], coords=[lon, lat]) + >>> stack.solid_tide(stack.df.index[:3], coords=[lon, lat]) lon lat dx dy dz date 2022-06-16 13.400758 47.401431 -0.066918 -0.004765 0.016200 2022-06-28 13.400758 47.401431 -0.033571 0.012279 -0.099899 2022-07-10 13.400758 47.401431 -0.000806 -0.007983 -0.150675 - >>> sbas.solid_tide(sbas.df.index[:3], coords=[[lon, lat], [lon+1, lat+1]]) + >>> stack.solid_tide(stack.df.index[:3], coords=[[lon, lat], [lon+1, lat+1]]) lon lat dx dy dz date 2022-06-16 13.400758 47.401431 -0.066918 -0.004765 0.016200 diff --git a/pygmtsar/pygmtsar/SBAS_topo.py b/pygmtsar/pygmtsar/Stack_topo.py similarity index 95% rename from pygmtsar/pygmtsar/SBAS_topo.py rename to pygmtsar/pygmtsar/Stack_topo.py index ad303d01..1ad05653 100644 --- a/pygmtsar/pygmtsar/SBAS_topo.py +++ b/pygmtsar/pygmtsar/Stack_topo.py @@ -7,11 +7,11 @@ # # Licensed under the BSD 3-Clause License (see LICENSE for details) # ---------------------------------------------------------------------------- -from .SBAS_trans_inv import SBAS_trans_inv +from .Stack_trans_inv import Stack_trans_inv from .PRM import PRM from .tqdm_dask import tqdm_dask -class SBAS_topo(SBAS_trans_inv): +class Stack_topo(Stack_trans_inv): def topo(self, subswath, chunksize=None, interactive=False): """ @@ -35,7 +35,7 @@ def topo(self, subswath, chunksize=None, interactive=False): topo = self.get_trans_inv(subswath).ele[1:,1:].rename('topo') if interactive: - # do not flip vertically because it's returned as is without SBAS.get_topo() function + # do not flip vertically because it's returned as is without Stack.get_topo() function return topo # flip vertically for GMTSAR compatibility reasons topo = xr.DataArray(dask.array.flipud(topo), coords=topo.coords, name=topo.name) @@ -56,7 +56,7 @@ def get_topo(self, subswath=None, chunksize=None): Examples -------- Get DEM for all the processed subswaths: - topo = sbas.get_topo() + topo = stack.get_topo() Notes ----- diff --git a/pygmtsar/pygmtsar/SBAS_trans.py b/pygmtsar/pygmtsar/Stack_trans.py similarity index 99% rename from pygmtsar/pygmtsar/SBAS_trans.py rename to pygmtsar/pygmtsar/Stack_trans.py index b6bdb4e9..c1aab380 100644 --- a/pygmtsar/pygmtsar/SBAS_trans.py +++ b/pygmtsar/pygmtsar/Stack_trans.py @@ -7,10 +7,10 @@ # # Licensed under the BSD 3-Clause License (see LICENSE for details) # ---------------------------------------------------------------------------- -from .SBAS_stack import SBAS_stack +from .Stack_stack import Stack_stack from .tqdm_dask import tqdm_dask -class SBAS_trans(SBAS_stack): +class Stack_trans(Stack_stack): def define_trans_grid(self, subswath, coarsen): import numpy as np diff --git a/pygmtsar/pygmtsar/SBAS_trans_inv.py b/pygmtsar/pygmtsar/Stack_trans_inv.py similarity index 99% rename from pygmtsar/pygmtsar/SBAS_trans_inv.py rename to pygmtsar/pygmtsar/Stack_trans_inv.py index 3319731c..bc9383f1 100644 --- a/pygmtsar/pygmtsar/SBAS_trans_inv.py +++ b/pygmtsar/pygmtsar/Stack_trans_inv.py @@ -7,10 +7,10 @@ # # Licensed under the BSD 3-Clause License (see LICENSE for details) # ---------------------------------------------------------------------------- -from .SBAS_trans import SBAS_trans +from .Stack_trans import Stack_trans from .tqdm_dask import tqdm_dask -class SBAS_trans_inv(SBAS_trans): +class Stack_trans_inv(Stack_trans): def get_trans_inv(self, subswath=None, chunksize=None): """ diff --git a/pygmtsar/pygmtsar/SBAS_unwrap.py b/pygmtsar/pygmtsar/Stack_unwrap.py similarity index 94% rename from pygmtsar/pygmtsar/SBAS_unwrap.py rename to pygmtsar/pygmtsar/Stack_unwrap.py index 0fd35b79..a46a5c28 100644 --- a/pygmtsar/pygmtsar/SBAS_unwrap.py +++ b/pygmtsar/pygmtsar/Stack_unwrap.py @@ -7,9 +7,9 @@ # # Licensed under the BSD 3-Clause License (see LICENSE for details) # ---------------------------------------------------------------------------- -from .SBAS_unwrap_snaphu import SBAS_unwrap_snaphu +from .Stack_unwrap_snaphu import Stack_unwrap_snaphu -class SBAS_unwrap(SBAS_unwrap_snaphu): +class Stack_unwrap(Stack_unwrap_snaphu): @staticmethod def unwrap1d(data_pairs, matrix): @@ -203,23 +203,23 @@ def stack_snaphu(self): # Examples # -------- # Simplest unwrapping: -# sbas.unwrap_parallel(pairs) +# stack.unwrap_parallel(pairs) # # Filter low-coherence areas with common correlation threshold 0.75: -# sbas.unwrap_parallel(pairs, threshold=0.075) +# stack.unwrap_parallel(pairs, threshold=0.075) # # Unwrap with coherence threshold 0.075 and fill NODATA gaps: -# interpolator = lambda corr, unwrap: sbas.nearest_grid(unwrap).where(corr>=0) -# sbas.unwrap_parallel(pairs, threshold=0.075, func=interpolator) +# interpolator = lambda corr, unwrap: stack.nearest_grid(unwrap).where(corr>=0) +# stack.unwrap_parallel(pairs, threshold=0.075, func=interpolator) # # Unwrap with coherence threshold 0.075 and apply land mask: # cleaner = lambda corr, unwrap: xr.where(corr>=0.075, unwrap, np.nan) -# sbas.unwrap_parallel(pairs, threshold=0.075, mask=landmask_ra, func=cleaner) +# stack.unwrap_parallel(pairs, threshold=0.075, mask=landmask_ra, func=cleaner) # # Unwrap with coherence threshold 0.075 and use SNAPHU tiling for faster processing and smaller RAM usage: # cleaner = lambda corr, unwrap: xr.where(corr>=0.075, unwrap, np.nan) -# conf = sbas.PRM().snaphu_config(NTILEROW=1, NTILECOL=2, ROWOVRLP=200, COLOVRLP=200) -# sbas.unwrap_parallel(pairs, n_jobs=1, threshold=0.075, func=cleaner, conf=conf) +# conf = stack.PRM().snaphu_config(NTILEROW=1, NTILECOL=2, ROWOVRLP=200, COLOVRLP=200) +# stack.unwrap_parallel(pairs, n_jobs=1, threshold=0.075, func=cleaner, conf=conf) # # Notes # ----- diff --git a/pygmtsar/pygmtsar/SBAS_unwrap_snaphu.py b/pygmtsar/pygmtsar/Stack_unwrap_snaphu.py similarity index 98% rename from pygmtsar/pygmtsar/SBAS_unwrap_snaphu.py rename to pygmtsar/pygmtsar/Stack_unwrap_snaphu.py index 4580ce00..af13240a 100644 --- a/pygmtsar/pygmtsar/SBAS_unwrap_snaphu.py +++ b/pygmtsar/pygmtsar/Stack_unwrap_snaphu.py @@ -7,9 +7,9 @@ # # Licensed under the BSD 3-Clause License (see LICENSE for details) # ---------------------------------------------------------------------------- -from .SBAS_landmask import SBAS_landmask +from .Stack_landmask import Stack_landmask -class SBAS_unwrap_snaphu(SBAS_landmask): +class Stack_unwrap_snaphu(Stack_landmask): # -s for SMOOTH mode and -d for DEFO mode when DEFOMAX_CYCLE should be defined in the configuration # DEFO mode (-d) and DEFOMAX_CYCLE=0 is equal to SMOOTH mode (-s) diff --git a/pygmtsar/pygmtsar/__init__.py b/pygmtsar/pygmtsar/__init__.py index 90954571..c7511383 100644 --- a/pygmtsar/pygmtsar/__init__.py +++ b/pygmtsar/pygmtsar/__init__.py @@ -15,6 +15,6 @@ # top level module classes from .PRM import PRM from .S1 import S1 -from .SBAS import SBAS +from .Stack import Stack # export to VTK format from .NCubeVTK import NCubeVTK diff --git a/pygmtsar/pygmtsar/datagrid.py b/pygmtsar/pygmtsar/datagrid.py index 7bde2e50..1f5a023a 100644 --- a/pygmtsar/pygmtsar/datagrid.py +++ b/pygmtsar/pygmtsar/datagrid.py @@ -143,7 +143,7 @@ def as_geo(self, da): Examples -------- Convert a raster to geospatial and mask it using a Shapely vector geometry: - sbas.as_geo(grid).rio.clip([geometry]) + stack.as_geo(grid).rio.clip([geometry]) Notes ----- @@ -211,7 +211,7 @@ def cropna(das): Examples -------- Crop the valid extent of a raster: - sbas.cropna(grid) + stack.cropna(grid) Notes ----- @@ -340,7 +340,7 @@ def nearest_grid(self, in_grid, search_radius_pixels=None): in_grid : xarray.DataArray The input 2D grid to be interpolated. search_radius_pixels : int, optional - The interpolation distance in pixels. If not provided, the default is set to the chunksize of the SBAS object. + The interpolation distance in pixels. If not provided, the default is set to the chunksize of the Stack object. Returns ------- @@ -350,13 +350,13 @@ def nearest_grid(self, in_grid, search_radius_pixels=None): Examples -------- Fill gaps in the specified grid using nearest neighbor interpolation: - sbas.nearest_grid(grid) + stack.nearest_grid(grid) Notes ----- This method performs nearest neighbor interpolation on a 2D grid. It replaces the NaN values in the input grid with the nearest non-NaN values. The interpolation is performed within a specified search radius in pixels. - If a search radius is not provided, the default search radius is set to the chunksize of the SBAS object. + If a search radius is not provided, the default search radius is set to the chunksize of the Stack object. """ from scipy.spatial import cKDTree import xarray as xr @@ -456,15 +456,15 @@ def pixel_size(self, grid=(1, 4), average=True): Examples -------- Get the default average ground pixel size: - sbas.pixel_size() + stack.pixel_size() >>> (14.0, 15.7) Get the default ground pixel size per subswath: - sbas.pixel_size(average=False) + stack.pixel_size(average=False) >>> [(14.0, 16.7), (14.0, 14.7)] Get the ground pixel size for an unwrapped phase grid with a decimation of {'y': 2, 'x': 2}: - sbas.pixel_size(unwraps) + stack.pixel_size(unwraps) >>> (27.9, 29.5) Notes @@ -513,13 +513,13 @@ def pixel_decimator(self, resolution_meters=60, grid=(1, 4), func='mean', debug= Returns ------- callable - Post-processing function for SBAS.ints() and SBAS.intf_parallel(). + Post-processing function for Stack.ints() and Stack.intf_parallel(). Examples -------- Decimate computed interferograms to default DEM resolution 60 meters: - decimator = sbas.pixel_decimator() - sbas.intf_parallel(pairs, func=decimator) + decimator = stack.pixel_decimator() + stack.intf_parallel(pairs, func=decimator) """ import numpy as np import dask