Skip to content

Commit

Permalink
Rename SBAS class to Stack because for now it allows SBAS, PSI and ot…
Browse files Browse the repository at this point in the history
…her analyses.
  • Loading branch information
AlexeyPechnikov committed Sep 9, 2023
1 parent c8e8128 commit 6b46787
Show file tree
Hide file tree
Showing 28 changed files with 237 additions and 236 deletions.
70 changes: 35 additions & 35 deletions pygmtsar/pygmtsar/IO.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ class IO(datagrid):

def dump(self, to_path=None):
"""
Dump SBAS object state to a pickle file (SBAS.pickle in the processing directory by default).
Dump Stack object state to a pickle file (Stack.pickle in the processing directory by default).
Parameters
----------
Expand All @@ -31,34 +31,34 @@ def dump(self, to_path=None):
Examples
--------
Dump the current state to the default dump file in the processing directory:
sbas.dump()
stack.dump()
Notes
-----
This method serializes the state of the SBAS object and saves it to a pickle file. The pickle file can be used to
restore the SBAS object with its processed data and configuration. By default, the dump file is named "SBAS.pickle"
This method serializes the state of the Stack object and saves it to a pickle file. The pickle file can be used to
restore the Stack object with its processed data and configuration. By default, the dump file is named "Stack.pickle"
and is saved in the processing directory. An alternative file path can be provided using the `to_path` parameter.
"""
import pickle
import os

if to_path is None:
sbas_pickle = os.path.join(self.basedir, 'SBAS.pickle')
stack_pickle = os.path.join(self.basedir, 'Stack.pickle')
else:
if os.path.isdir(to_path):
sbas_pickle = os.path.join(to_path, 'SBAS.pickle')
stack_pickle = os.path.join(to_path, 'Stack.pickle')
else:
sbas_pickle = to_path
stack_pickle = to_path

print (f'NOTE: save state to file {sbas_pickle}')
pickle.dump(self, open(sbas_pickle, 'wb'))
print (f'NOTE: save state to file {stack_pickle}')
pickle.dump(self, open(stack_pickle, 'wb'))

return

@staticmethod
def restore(from_path):
"""
Restore SBAS object state from a pickle file (SBAS.pickle in the processing directory by default).
Restore Stack object state from a pickle file (Stack.pickle in the processing directory by default).
Parameters
----------
Expand All @@ -67,36 +67,36 @@ def restore(from_path):
Returns
-------
SBAS
The restored SBAS object.
Stack
The restored Stack object.
Examples
--------
Restore the current state from the default dump file in the processing directory:
SBAS.restore()
Stack.restore()
Notes
-----
This static method restores the state of an SBAS object from a pickle file. The pickle file should contain the
serialized state of the SBAS object, including its processed data and configuration. By default, the method assumes
the input file is named "SBAS.pickle" and is located in the processing directory. An alternative file path can be
provided using the `from_path` parameter. The method returns the restored SBAS object.
This static method restores the state of an Stack object from a pickle file. The pickle file should contain the
serialized state of the Stack object, including its processed data and configuration. By default, the method assumes
the input file is named "Stack.pickle" and is located in the processing directory. An alternative file path can be
provided using the `from_path` parameter. The method returns the restored Stack object.
"""
import pickle
import os

if os.path.isdir(from_path):
sbas_pickle = os.path.join(from_path, 'SBAS.pickle')
stack_pickle = os.path.join(from_path, 'Stack.pickle')
else:
sbas_pickle = from_path
stack_pickle = from_path

print (f'NOTE: load state from file {sbas_pickle}')
return pickle.load(open(sbas_pickle, 'rb'))
print (f'NOTE: load state from file {stack_pickle}')
return pickle.load(open(stack_pickle, 'rb'))


def backup(self, backup_dir, copy=False, debug=False):
"""
Backup framed SBAS scenes, orbits, DEM, and landmask files to build a minimal reproducible dataset.
Backup framed Stack scenes, orbits, DEM, and landmask files to build a minimal reproducible dataset.
Parameters
----------
Expand All @@ -115,20 +115,20 @@ def backup(self, backup_dir, copy=False, debug=False):
Examples
--------
Backup the files to the specified directory:
sbas.backup('backup')
stack.backup('backup')
Open the backup for the reproducible run by defining it as a new data directory:
sbas = SBAS('backup', 'backup/DEM_WGS84.nc', 'raw')
stack = Stack('backup', 'backup/DEM_WGS84.nc', 'raw')
Notes
-----
This method backs up the framed SBAS scenes, orbits, DEM, and landmask files to a specified backup directory.
This method backs up the framed Stack scenes, orbits, DEM, and landmask files to a specified backup directory.
It provides a way to create a minimal reproducible dataset by preserving the necessary files for processing.
The method creates the backup directory if it does not exist. By default, the method moves the scene and orbit files
to the backup directory, effectively removing them from the work directory. The DEM and landmask files are always
copied to the backup directory. If the `copy` parameter is set to True, the scene and orbit files will be copied
instead of moved. Use caution when setting `copy` to True as it can result in duplicated files and consume
additional storage space. The method also updates the SBAS object's dataframe to mark the removed files as empty.
additional storage space. The method also updates the Stack object's dataframe to mark the removed files as empty.
"""
import os
import shutil
Expand All @@ -137,7 +137,7 @@ def backup(self, backup_dir, copy=False, debug=False):

# this optional file is dumped state, copy it if exists
# auto-generated file can't be a symlink but user-defined symlink target should be copied
filename = os.path.join(self.basedir, 'SBAS.pickle')
filename = os.path.join(self.basedir, 'Stack.pickle')
if os.path.exists(filename):
if debug:
print ('DEBUG: copy', filename, backup_dir)
Expand Down Expand Up @@ -270,9 +270,9 @@ def load_pairs(self, name='phase', subswath=None):

def open_grid(self, name, subswath=None, add_subswath=True, chunksize=None):
"""
sbas.open_grid('intf_ll2ra')
sbas.open_grid('intf_ra2ll')
sbas.open_grid('intfweight')
stack.open_grid('intf_ll2ra')
stack.open_grid('intf_ra2ll')
stack.open_grid('intfweight')
"""
import xarray as xr

Expand Down Expand Up @@ -343,7 +343,7 @@ def save_grid(self, data, name, subswath=None, caption='Saving 2D grid', chunksi

def open_stack(self, pairs, name, subswath=None, add_subswath=True, chunksize=None):
"""
sbas.open_stack(baseline_pairs,'phasefilt')
stack.open_stack(baseline_pairs,'phasefilt')
"""
import xarray as xr
import pandas as pd
Expand Down Expand Up @@ -437,7 +437,7 @@ def open_stack_slc(self, dates=None, subswath=None, intensity=False, dfact=2.5e-

def open_stack_geotif(self, dates=None, subswath=None, intensity=False, chunksize=None):
"""
tiffs = sbas.open_stack_geotif(['2022-06-16', '2022-06-28'], intensity=True)
tiffs = stack.open_stack_geotif(['2022-06-16', '2022-06-28'], intensity=True)
"""
import xarray as xr
import rioxarray as rio
Expand Down Expand Up @@ -477,7 +477,7 @@ def open_model(self, name, chunksize=None):
Opens an xarray 3D Dataset from a NetCDF file and re-chunks it based on the specified chunksize.
This function takes the name of the model to be opened, reads the NetCDF file, and re-chunks
the dataset according to the provided chunksize or the default value from the 'sbas' object.
the dataset according to the provided chunksize or the default value from the 'stack' object.
The 'date' dimension is always chunked with a size of 1.
Parameters
Expand All @@ -486,7 +486,7 @@ def open_model(self, name, chunksize=None):
The name of the model file to be opened.
chunksize : int, optional
The chunk size to be used for dimensions other than 'date'. If not provided, the default
chunk size from the 'sbas' object will be used.
chunk size from the 'stack' object will be used.
Returns
-------
Expand Down Expand Up @@ -537,7 +537,7 @@ def save_model(self, model, name=None, caption='Saving 3D datacube', chunksize=N
The model to be saved.
chunksize : int, optional
The chunk size to be used for dimensions other than 'date'. If not provided, the default
chunk size from the 'sbas' object will be used.
chunk size from the 'stack' object will be used.
caption: str
The text caption for the saving progress bar.
Expand Down
12 changes: 6 additions & 6 deletions pygmtsar/pygmtsar/S1.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ class S1():
@staticmethod
def scan_slc(datadir, orbit=None, mission=None, subswath=None, polarization=None):
"""
Initialize an instance of the SBAS class.
Initialize an instance of the Stack class.
Parameters
----------
Expand All @@ -34,11 +34,11 @@ def scan_slc(datadir, orbit=None, mission=None, subswath=None, polarization=None
Examples
--------
Initialize an SBAS object with the data directory 'data' and the base directory 'raw':
sbas = S1('data', basedir='raw')
Initialize an Stack object with the data directory 'data' and the base directory 'raw':
stack = S1('data', basedir='raw')
Initialize an SBAS object with the data directory 'data', DEM filename 'data/DEM_WGS84.nc', and the base directory 'raw':
sbas = SBAS('data', 'data/DEM_WGS84.nc', 'raw')
Initialize an Stack object with the data directory 'data', DEM filename 'data/DEM_WGS84.nc', and the base directory 'raw':
stack = Stack('data', 'data/DEM_WGS84.nc', 'raw')
"""
import os
import shutil
Expand Down Expand Up @@ -217,7 +217,7 @@ def geoloc2bursts(metapath):
raise ValueError('ERROR: Two or more scenes required')
daily_scenes = df.groupby(['date', 'subswath'])['datetime'].count().values.max()
if daily_scenes > 1:
print ('NOTE: Found multiple scenes for a single day, use function SBAS.reframe() to stitch the scenes')
print ('NOTE: Found multiple scenes for a single day, use function Stack.reframe() to stitch the scenes')

return df

Expand Down
30 changes: 18 additions & 12 deletions pygmtsar/pygmtsar/SBAS.py → pygmtsar/pygmtsar/Stack.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,15 +7,21 @@
#
# Licensed under the BSD 3-Clause License (see LICENSE for details)
# ----------------------------------------------------------------------------
from .SBAS_ps import SBAS_ps
from .Stack_ps import Stack_ps
from .S1 import S1
from .PRM import PRM

class SBAS(SBAS_ps):
class Stack(Stack_ps):

def __init__(self, basedir, scenes, reference=None, dem_filename=None, landmask_filename=None, drop_if_exists=False):
df = None
basedir = None
reference = None
dem_filename = None
landmask_filename = None

def __init__(self, basedir, drop_if_exists=False):
"""
Initialize an instance of the SBAS class.
Initialize an instance of the Stack class.
Parameters
----------
Expand All @@ -30,11 +36,11 @@ def __init__(self, basedir, scenes, reference=None, dem_filename=None, landmask_
Examples
--------
Initialize an SBAS object with the data directory 'data' and the base directory 'raw':
sbas = SBAS('data', basedir='raw')
Initialize an Stack object with the data directory 'data' and the base directory 'raw':
stack = Stack('data', basedir='raw')
Initialize an SBAS object with the data directory 'data', DEM filename 'data/DEM_WGS84.nc', and the base directory 'raw':
sbas = SBAS('data', 'data/DEM_WGS84.nc', 'raw')
Initialize an Stack object with the data directory 'data', DEM filename 'data/DEM_WGS84.nc', and the base directory 'raw':
stack = Stack('data', 'data/DEM_WGS84.nc', 'raw')
"""
import os
import shutil
Expand All @@ -48,12 +54,12 @@ def __init__(self, basedir, scenes, reference=None, dem_filename=None, landmask_
os.makedirs(basedir)
self.basedir = basedir

def set_scenes(self, scenes):
self.df = scenes
if reference is None:
print (f'NOTE: reference scene is not defined, use {scenes.index[0]}. You can change it like SBAS.set_reference("2022-01-20")')
if self.reference is None:
print (f'NOTE: auto set reference scene {scenes.index[0]}. You can change it like Stack.set_reference("2022-01-20")')
self.reference = self.df.index[0]
self.set_dem(dem_filename)
self.set_landmask(landmask_filename)
return self

# def make_gaussian_filter(self, range_dec, azi_dec, wavelength, debug=False):
# """
Expand Down
20 changes: 10 additions & 10 deletions pygmtsar/pygmtsar/SBAS_base.py → pygmtsar/pygmtsar/Stack_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,23 +11,23 @@
from .tqdm_joblib import tqdm_joblib
from .tqdm_dask import tqdm_dask

class SBAS_base(tqdm_joblib, IO):
class Stack_base(tqdm_joblib, IO):

def __repr__(self):
return 'Object %s %d items\n%r' % (self.__class__.__name__, len(self.df), self.df)

def to_dataframe(self):
"""
Return a Pandas DataFrame for all SBAS scenes.
Return a Pandas DataFrame for all Stack scenes.
Returns
-------
pandas.DataFrame
The DataFrame containing SBAS scenes.
The DataFrame containing Stack scenes.
Examples
--------
df = sbas.to_dataframe()
df = stack.to_dataframe()
"""
return self.df

Expand All @@ -47,7 +47,7 @@ def multistem_stem(self, subswath, dt=None):

def set_reference(self, reference):
"""
Define reference scene for SBAS object.
Define reference scene for Stack object.
Parameters
----------
Expand All @@ -56,16 +56,16 @@ def set_reference(self, reference):
Returns
-------
SBAS
Modified instance of the SBAS class.
Stack
Modified instance of the Stack class.
Examples
--------
Set the reference scene to '2022-01-20':
sbas.set_reference('2022-01-20')
stack.set_reference('2022-01-20')
"""
if reference is None:
print ('NOTE: reference scene is None, SBAS.set_reference() command is ignored')
print ('NOTE: reference scene is None, Stack.set_reference() command is ignored')
return self
if not reference in self.df.index:
raise Exception('Reference scene not found')
Expand Down Expand Up @@ -150,7 +150,7 @@ def get_subswath(self, subswath=None):
assert subswath is None or subswath in subswaths, f'ERROR: subswath {subswath} not found'
if subswath is not None:
return subswath
assert len(subswaths)==1, f'ERROR: multiple subswaths {subswaths} found, merge them first using SBAS.merge_parallel()'
assert len(subswaths)==1, f'ERROR: multiple subswaths {subswaths} found, merge them first using Stack.merge_parallel()'
# define subswath
return subswaths[0]

Expand Down
Loading

0 comments on commit 6b46787

Please sign in to comment.