diff --git a/README.md b/README.md index f1d1111..21b4d89 100644 --- a/README.md +++ b/README.md @@ -45,6 +45,25 @@ Please cite the following reference if you use this pipeline in a scientific pub * [Vigan, 2020, ASCL, ascl:2009.002](https://ui.adsabs.harvard.edu/abs/2020ascl.soft09002V/abstract) +Or simply use the following Bibtex entry: + +``` +@MISC{2020ascl.soft09002V, + author = {{Vigan}, Arthur}, + title = "{vlt-sphere: Automatic VLT/SPHERE data reduction and analysis}", + keywords = {Software}, + year = 2020, + month = sep, + doi = {10.5281/zenodo.6563998}, + eid = {ascl:2009.002}, + pages = {ascl:2009.002}, +archivePrefix = {ascl}, + eprint = {2009.002}, + adsurl = {https://ui.adsabs.harvard.edu/abs/2020ascl.soft09002V}, + adsnote = {Provided by the SAO/NASA Astrophysics Data System} +} +``` + Moreover, the development of the SPHERE instrument has demanded a tremendous effort from many scientists, who have devoted several years of their life to design, build, test and commission the instrument. To recognize this work, we kindly ask you to cite the relevant instrumental papers in your scientific work. The reference papers for the instrument and its observing mode are: SPHERE: @@ -73,7 +92,8 @@ With important contributions from: * Dino Mesa (INAF/OAPD): IFS pre-processing * ESO personnel in general: SPHERE pipeline and calibration procedures -And bug reports with suggested fix from: +And small improvements, or bug reports with suggested fix from: * Wolfgang Brandner (@Rumpelstil) * Tomas Stolker (@tomasstolker) * Karthikeyan Singaravelan (@tirkarthi) + * Chen Xie (@ChenXie-astro) diff --git a/examples/ifs_reduction.py b/examples/ifs_reduction.py index af42a5d..2a1c384 100644 --- a/examples/ifs_reduction.py +++ b/examples/ifs_reduction.py @@ -1,32 +1,49 @@ import sphere.IFS as IFS -##################################################################### -# Starting in the present version of the pipeline, the default # -# -1.75° true North offset is automatically added to the derotation # -# angles. The offset value can be modified in the configuration of # -# the reduction: # -# # -# >>> reduction.config[\'cal_true_north\'] = xxx # -# # -# To avoid any issues, make sure to: # -# * either reprocess data previously processed with version <1.4 # -# * or take into account the offset in your astrometric analysis # -##################################################################### +############################################################################### +# Since version 1.4 of the pipeline, the default -1.75° true North offset is # +# automatically added to the derotation angles. The offset value can be # +# modified in the configuration of the reduction: # +# # +# >>> reduction.config[\'cal_true_north\'] = xxx # +# # +# To avoid any issues, make sure to: # +# * either reprocess data previously processed with version <1.4 # +# * or take into account the offset in your astrometric analysis # +############################################################################### ####################################################@ # full reduction # #%% init reduction -reduction = IFS.Reduction('/Users/avigan/data/sphere-test-target/IFS/', log_level='info') +reduction = IFS.Reduction('/Users/avigan/data/sphere-test-target/IFS/', + clean_start=True, + log_level='info', + user_config=None) + +############################################################################### +# It is possible to provide a default INI configuration file to set some (or # +# all) of the reduction parameters to a default value different from the ones # +# hard-coded in the sphere package. This is done with the keyword: # +# user_config='~/path/to/the/file/config.ini' # +# The increasing priority for setting reduction parameters is the following: # +# 0- default values hard-coded in the sphere package # +# 1- values contained in the file pointed by the user_config keyword, if a # +# file path is provided and exists # +# 2- values contained in a reduction_config.json file left in the reduction # +# directory by a previous reduction # +# 3- values manually set by the user (see examples below) # +############################################################################### #%% configuration reduction.config['preproc_collapse_science'] = True reduction.config['preproc_collapse_type'] = 'coadd' reduction.config['preproc_coadd_value'] = 2 +reduction.config['combine_center_selection'] = 'first' reduction.config['center_high_pass_waffle'] = True reduction.config['clean'] = False -reduction.show_config() +print(reduction.config) #%% reduction reduction.full_reduction() @@ -36,7 +53,10 @@ # #%% init reduction -reduction = IFS.Reduction('/Users/avigan/data/sphere-test-target/IFS/', log_level='info') +reduction = IFS.Reduction('/Users/avigan/data/sphere-test-target/IFS/', + clean_start=True, + log_level='info', + user_config=None) #%% sorting reduction.sort_files() @@ -61,8 +81,8 @@ reduction.sph_ifs_wavelength_recalibration(high_pass=True, offset=(-3, 0), plot=True) reduction.sph_ifs_star_center(high_pass_psf=False, high_pass_waffle=True, offset=(-3, 0), plot=True) reduction.sph_ifs_combine_data(cpix=True, psf_dim=80, science_dim=200, correct_anamorphism=True, - shift_method='interp', manual_center=None, coarse_centering=False, - save_scaled=False) + shift_method='interp', manual_center=None, center_selection='time', + coarse_centering=False, save_scaled=False) #%% cleaning -reduction.sph_ifs_clean(delete_raw=False, delete_products=False) +reduction.sph_ifs_clean(delete_raw=False, delete_products=False, delete_config=False) diff --git a/examples/irdis_imaging_reduction.py b/examples/irdis_imaging_reduction.py index aa8ef10..cfea6bf 100644 --- a/examples/irdis_imaging_reduction.py +++ b/examples/irdis_imaging_reduction.py @@ -1,34 +1,51 @@ import sphere.IRDIS as IRDIS -##################################################################### -# Starting in the present version of the pipeline, the default # -# -1.75° true North offset is automatically added to the derotation # -# angles. The offset value can be modified in the configuration of # -# the reduction: # -# # -# >>> reduction.config[\'cal_true_north\'] = xxx # -# # -# To avoid any issues, make sure to: # -# * either reprocess data previously processed with version <1.4 # -# * or take into account the offset in your astrometric analysis # -##################################################################### +############################################################################### +# Since version 1.4 of the pipeline, the default -1.75° true North offset is # +# automatically added to the derotation angles. The offset value can be # +# modified in the configuration of the reduction: # +# # +# >>> reduction.config[\'cal_true_north\'] = xxx # +# # +# To avoid any issues, make sure to: # +# * either reprocess data previously processed with version <1.4 # +# * or take into account the offset in your astrometric analysis # +############################################################################### ####################################################@ # full reduction # #%% init reduction -reduction = IRDIS.ImagingReduction('/Users/avigan/data/sphere-test-target/IRD/DBI/', log_level='info') +reduction = IRDIS.ImagingReduction('/Users/avigan/data/sphere-test-target/IRD/DBI/', + clean_start=True, + log_level='info', + user_config=None) + +############################################################################### +# It is possible to provide a default INI configuration file to set some (or # +# all) of the reduction parameters to a default value different from the ones # +# hard-coded in the sphere package. This is done with the keyword: # +# user_config='~/path/to/the/file/config.ini' # +# The increasing priority for setting reduction parameters is the following: # +# 0- default values hard-coded in the sphere package # +# 1- values contained in the file pointed by the user_config keyword, if a # +# file path is provided and exists # +# 2- values contained in a reduction_config.json file left in the reduction # +# directory by a previous reduction # +# 3- values manually set by the user (see examples below) # +############################################################################### #%% configuration reduction.config['combine_psf_dim'] = 80 reduction.config['combine_science_dim'] = 400 reduction.config['combine_shift_method'] = 'fft' +reduction.config['combine_center_selection'] = 'first' reduction.config['preproc_collapse_science'] = True reduction.config['preproc_collapse_type'] = 'mean' reduction.config['center_high_pass_waffle'] = True reduction.config['clean'] = False -reduction.show_config() +print(reduction.config) #%% reduction reduction.full_reduction() @@ -38,7 +55,10 @@ # #%% init reduction -reduction = IRDIS.ImagingReduction('/Users/avigan/data/sphere-test-target/IRD/DBI/', log_level='info') +reduction = IRDIS.ImagingReduction('/Users/avigan/data/sphere-test-target/IRD/DBI/', + clean_start=True, + log_level='info', + user_config=None) #%% sorting reduction.sort_files() @@ -57,8 +77,8 @@ #%% high-level science processing reduction.sph_ird_star_center(high_pass_psf=True, high_pass_waffle=False, offset=(0, 0), plot=True) reduction.sph_ird_combine_data(cpix=True, psf_dim=80, science_dim=200, correct_anamorphism=True, - shift_method='interp', manual_center=None, coarse_centering=False, - save_scaled=False) + shift_method='interp', manual_center=None, center_selection='time', + coarse_centering=False, save_scaled=False) #%% cleaning -reduction.sph_ird_clean(delete_raw=False, delete_products=False) +reduction.sph_ird_clean(delete_raw=False, delete_products=False, delete_config=False) diff --git a/examples/irdis_spectro_reduction.py b/examples/irdis_spectro_reduction.py index 5fabc48..4111a33 100644 --- a/examples/irdis_spectro_reduction.py +++ b/examples/irdis_spectro_reduction.py @@ -5,12 +5,30 @@ # #%% init reduction -reduction = IRDIS.SpectroReduction('/Users/avigan/data/sphere-test-target/IRD/LSS/', log_level='info') +reduction = IRDIS.SpectroReduction('/Users/avigan/data/sphere-test-target/IRD/LSS/', + clean_start=True, + log_level='info', + user_config=None) + +############################################################################### +# It is possible to provide a default INI configuration file to set some (or # +# all) of the reduction parameters to a default value different from the ones # +# hard-coded in the sphere package. This is done with the keyword: # +# user_config='~/path/to/the/file/config.ini' # +# The increasing priority for setting reduction parameters is the following: # +# 0- default values hard-coded in the sphere package # +# 1- values contained in the file pointed by the user_config keyword, if a # +# file path is provided and exists # +# 2- values contained in a reduction_config.json file left in the reduction # +# directory by a previous reduction # +# 3- values manually set by the user (see examples below) # +############################################################################### #%% configuration reduction.config['combine_science_dim'] = 300 +reduction.config['combine_center_selection'] = 'first' reduction.config['clean'] = False -reduction.show_config() +print(reduction.config) #%% reduction reduction.full_reduction() @@ -20,7 +38,10 @@ # #%% init reduction -reduction = IRDIS.SpectroReduction('/Users/avigan/data/sphere-test-target/IRD/LSS/', log_level='info') +reduction = IRDIS.SpectroReduction('/Users/avigan/data/sphere-test-target/IRD/LSS/', + clean_start=True, + log_level='info', + user_config=None) #%% sorting reduction.sort_files() @@ -42,7 +63,8 @@ reduction.sph_ird_wavelength_recalibration(fit_scaling=True, plot=True) reduction.sph_ird_combine_data(cpix=True, psf_dim=80, science_dim=300, correct_mrs_chromatism=True, split_posang=True, - shift_method='fft', manual_center=None, coarse_centering=False) + shift_method='fft', manual_center=None, center_selection='time', + coarse_centering=False) #%% cleaning -reduction.sph_ird_clean(delete_raw=False, delete_products=False) +reduction.sph_ird_clean(delete_raw=False, delete_products=False, delete_config=False) diff --git a/examples/sparta_reduction.py b/examples/sparta_reduction.py index 88caede..4dbc447 100644 --- a/examples/sparta_reduction.py +++ b/examples/sparta_reduction.py @@ -5,7 +5,24 @@ # #%% init reduction -reduction = SPARTA.Reduction('/Users/avigan/data/sphere-test-target/SPARTA/', log_level='info') +reduction = SPARTA.Reduction('/Users/avigan/data/sphere-test-target/SPARTA/', + clean_start=True, + log_level='info', + user_config=None) + +############################################################################### +# It is possible to provide a default INI configuration file to set some (or # +# all) of the reduction parameters to a default value different from the ones # +# hard-coded in the sphere package. This is done with the keyword: # +# user_config='~/path/to/the/file/config.ini' # +# The increasing priority for setting reduction parameters is the following: # +# 0- default values hard-coded in the sphere package # +# 1- values contained in the file pointed by the user_config keyword, if a # +# file path is provided and exists # +# 2- values contained in a reduction_config.json file left in the reduction # +# directory by a previous reduction # +# 3- values manually set by the user (see examples below) # +############################################################################### #%% configuration reduction.config['misc_plot'] = True @@ -19,7 +36,10 @@ # #%% init reduction -reduction = SPARTA.Reduction('/Users/avigan/data/sphere-test-target/SPARTA/', log_level='info') +reduction = SPARTA.Reduction('/Users/avigan/data/sphere-test-target/SPARTA/', + clean_start=True, + log_level='info', + user_config=None) #%% sorting reduction.sort_files() diff --git a/examples/sphere_dataset.py b/examples/sphere_dataset.py index 771ffb4..c5d473a 100644 --- a/examples/sphere_dataset.py +++ b/examples/sphere_dataset.py @@ -1,7 +1,8 @@ import sphere.SPHERE as SPHERE #%% init data set -ds = SPHERE.Dataset('/Users/avigan/data/sphere-test-target/', log_level='info') +ds = SPHERE.Dataset('/Users/avigan/data/sphere-test-target/', + log_level='info') print('IRDIS reductions:') for red in ds.IRDIS_reductions: diff --git a/setup.py b/setup.py index 3dd52dd..3176787 100644 --- a/setup.py +++ b/setup.py @@ -9,7 +9,7 @@ # setup setup( name='vlt-sphere', - version='1.5.1', + version='1.6, description='Reduction and analysis code for the VLT/SPHERE instrument', long_description=long_description, long_description_content_type='text/markdown', diff --git a/sphere/IFS.py b/sphere/IFS.py index cf6622c..c999805 100644 --- a/sphere/IFS.py +++ b/sphere/IFS.py @@ -22,8 +22,8 @@ import sphere.utils as utils import sphere.utils.imutils as imutils import sphere.utils.aperture as aperture -import sphere.transmission as transmission -import sphere.toolbox as toolbox +import sphere.utils.toolbox as toolbox +import sphere.utils.transmission as transmission _log = logging.getLogger(__name__) @@ -393,7 +393,7 @@ class Reduction(object): # Constructor ################################################## - def __new__(cls, path, log_level='info', sphere_handler=None): + def __new__(cls, path, clean_start=True, log_level='info', user_config=None, sphere_handler=None): ''' Custom instantiation for the class @@ -407,9 +407,17 @@ def __new__(cls, path, log_level='info', sphere_handler=None): path : str Path to the directory containing the dataset - level : {'debug', 'info', 'warning', 'error', 'critical'} + clean_start : bool + Remove all results from previous reductions for a clean start. + Default is True + + log_level : {'debug', 'info', 'warning', 'error', 'critical'} The log level of the handler + user_config : str + Path to a user-provided configuration. Default is None, i.e. the + reduction will use the package default configuration parameters + sphere_handler : log handler Higher-level SPHERE.Dataset log handler ''' @@ -424,7 +432,7 @@ def __new__(cls, path, log_level='info', sphere_handler=None): # zeroth-order reduction validation raw = path / 'raw' if not raw.exists(): - _log.error('No raw/ subdirectory. {0} is not a valid reduction path'.format(path)) + _log.error(f'No raw/ subdirectory. {path} is not a valid reduction path') return None else: reduction = super(Reduction, cls).__new__(cls) @@ -460,52 +468,69 @@ def __new__(cls, path, log_level='info', sphere_handler=None): reduction._logger = logger - reduction._logger.info('Creating IFS reduction at path {}'.format(path)) + reduction._logger.info(f'Creating IFS reduction at path {path}') # # v1.4 - True North correction change # - reduction._logger.warning('#################################################################') - reduction._logger.warning('Starting in the present version of the pipeline, the default ') - reduction._logger.warning('-1.75° true North offset is automatically added to the derotation') - reduction._logger.warning('angles. The offset value can be modified in the configuration of ') - reduction._logger.warning('the reduction: ') - reduction._logger.warning(' ') - reduction._logger.warning(' >>> reduction.config[\'cal_true_north\'] = xxx ') - reduction._logger.warning(' ') - reduction._logger.warning('To avoid any issues, make sure to: ') - reduction._logger.warning(' * either reprocess data previously processed with version <1.4 ') - reduction._logger.warning(' * or take into account the offset in your astrometric analysis ') - reduction._logger.warning('#################################################################') + reduction._logger.warning('##################################################################') + reduction._logger.warning('Since version 1.4 of the pipeline, the default -1.75° true North ') + reduction._logger.warning('offset is automatically added to the derotation angles. The offset') + reduction._logger.warning('value can be modified in the configuration of the reduction: ') + reduction._logger.warning(' ') + reduction._logger.warning(' >>> reduction.config[\'cal_true_north\'] = xxx ') + reduction._logger.warning(' ') + reduction._logger.warning('To avoid any issues, make sure to: ') + reduction._logger.warning(' * either reprocess data previously processed with version <1.4 ') + reduction._logger.warning(' * or take into account the offset in your astrometric analysis ') + reduction._logger.warning('##################################################################') + + # + # clean start + # + if clean_start: + reduction._logger.info('Erase outputs of previous reduction for a clean start') + reduction._path.remove(delete_raw=False, delete_products=True, logger=reduction._logger) + config_file = reduction._path.root / 'reduction_config.ini' + if config_file.exists(): + config_file.unlink() # # configuration # reduction._logger.debug('> read default configuration') configfile = f'{Path(sphere.__file__).parent}/instruments/{reduction._instrument}.ini' - config = configparser.ConfigParser() + cfgparser = configparser.ConfigParser() reduction._logger.debug('Read configuration') - config.read(configfile) + cfgparser.read(configfile) # instrument - reduction._pixel = float(config.get('instrument', 'pixel')) - reduction._nwave = int(config.get('instrument', 'nwave')) + reduction._pixel = float(cfgparser.get('instrument', 'pixel')) + reduction._nwave = int(cfgparser.get('instrument', 'nwave')) # calibration - reduction._wave_cal_lasers = np.array(eval(config.get('calibration', 'wave_cal_lasers'))) - reduction._default_center = np.array(eval(config.get('calibration', 'default_center'))) - reduction._orientation_offset = eval(config.get('calibration', 'orientation_offset')) + reduction._wave_cal_lasers = np.array(eval(cfgparser.get('calibration', 'wave_cal_lasers'))) + reduction._default_center = np.array(eval(cfgparser.get('calibration', 'default_center'))) + reduction._orientation_offset = eval(cfgparser.get('calibration', 'orientation_offset')) # reduction parameters - reduction._config = dict(config.items('reduction')) - for key, value in reduction._config.items(): + cfg = {} + items = dict(cfgparser.items('reduction')) + for key, value in items.items(): try: val = eval(value) except NameError: val = value - reduction._config[key] = val + cfg[key] = val + reduction._config = utils.Configuration(reduction._path, reduction._logger, cfg) + # load user-provided default configuration parameters + if user_config: + user_config = Path(user_config).expanduser() + + reduction._config.load_from_file(user_config) + # # reduction adn recipes status # @@ -528,7 +553,7 @@ def __new__(cls, path, log_level='info', sphere_handler=None): ################################################## def __repr__(self): - return ''.format(self._instrument, self._mode, self._path, self.loglevel) + return f'' def __format__(self): return self.__repr__() @@ -593,56 +618,6 @@ def mode(self): # Generic class methods ################################################## - def show_config(self): - ''' - Shows the reduction configuration - ''' - - # dictionary - dico = self.config - - # misc parameters - print() - print('{0:<30s}{1}'.format('Parameter', 'Value')) - print('-'*35) - keys = [key for key in dico if key.startswith('misc')] - for key in keys: - print('{0:<30s}{1}'.format(key, dico[key])) - - # calibrations - print('-'*35) - keys = [key for key in dico if key.startswith('cal')] - for key in keys: - print('{0:<30s}{1}'.format(key, dico[key])) - - # pre-processing - print('-'*35) - keys = [key for key in dico if key.startswith('preproc')] - for key in keys: - print('{0:<30s}{1}'.format(key, dico[key])) - - # centring - print('-'*35) - keys = [key for key in dico if key.startswith('center')] - for key in keys: - print('{0:<30s}{1}'.format(key, dico[key])) - - # combining - print('-'*35) - keys = [key for key in dico if key.startswith('combine')] - for key in keys: - print('{0:<30s}{1}'.format(key, dico[key])) - - # clean - print('-'*35) - keys = [key for key in dico if key.startswith('clean')] - for key in keys: - print('{0:<30s}{1}'.format(key, dico[key])) - print('-'*35) - - print() - - def init_reduction(self): ''' Sort files and frames, perform sanity check @@ -717,6 +692,7 @@ def process_science(self): science_dim=config['combine_science_dim'], correct_anamorphism=config['combine_correct_anamorphism'], manual_center=config['combine_manual_center'], + center_selection=config['combine_center_selection'], coarse_centering=config['combine_coarse_centering'], shift_method=config['combine_shift_method'], save_scaled=config['combine_save_scaled']) @@ -733,7 +709,8 @@ def clean(self): if config['clean']: self.sph_ifs_clean(delete_raw=config['clean_delete_raw'], - delete_products=config['clean_delete_products']) + delete_products=config['clean_delete_products'], + delete_config=config['clean_delete_config']) def full_reduction(self): @@ -776,6 +753,9 @@ def _read_info(self): # path path = self.path + # load existing configuration + self.config.load() + # files info fname = path.preproc / 'files.csv' if fname.exists(): @@ -849,52 +829,52 @@ def _read_info(self): # additional checks to update recipe execution if frames_info is not None: wave_file = files_info[np.logical_not(files_info['PROCESSED']) & (files_info['DPR TYPE'] == 'WAVE,LAMP')] - done = (path.preproc / '{}_preproc.fits'.format(wave_file.index[0])).exists() + done = (path.preproc / f'{wave_file.index[0]}_preproc.fits').exists() if done: self._update_recipe_status('sph_ifs_preprocess_wave', sphere.SUCCESS) - self._logger.debug('> sph_ifs_preprocess_wave status = {}'.format(done)) + self._logger.debug(f'> sph_ifs_preprocess_wave status = {done}') done = (path.preproc / 'wavelength_default.fits').exists() if done: self._update_recipe_status('sph_ifs_cal_wave', sphere.SUCCESS) - self._logger.debug('> sph_ifs_cal_wave status = {}'.format(done)) + self._logger.debug(f'> sph_ifs_cal_wave status = {done}') done = (path.preproc / 'wavelength_recalibrated.fits').exists() if done: self._update_recipe_status('sph_ifs_wavelength_recalibration', sphere.SUCCESS) - self._logger.debug('> sph_ifs_wavelength_recalibration status = {}'.format(done)) + self._logger.debug(f'> sph_ifs_wavelength_recalibration status = {done}') if frames_info_preproc is not None: done = True files = frames_info_preproc.index for file, idx in files: - fname = '{0}_DIT{1:03d}_preproc'.format(file, idx) - file = list(path.preproc.glob('{}.fits'.format(fname))) + fname = f'{file}_DIT{idx:03d}_preproc' + file = list(path.preproc.glob(f'{fname}.fits')) done = done and (len(file) == 1) if done: self._update_recipe_status('sph_ifs_preprocess_science', sphere.SUCCESS) - self._logger.debug('> sph_ifs_preprocess_science status = {}'.format(done)) + self._logger.debug(f'> sph_ifs_preprocess_science status = {done}') done = True files = frames_info_preproc.index for file, idx in files: - fname = '{0}_DIT{1:03d}_preproc_?????'.format(file, idx) - file = list(path.preproc.glob('{}.fits'.format(fname))) + fname = f'{file}_DIT{idx:03d}_preproc_?????' + file = list(path.preproc.glob(f'{fname}.fits')) done = done and (len(file) == 1) if done: self._update_recipe_status('sph_ifs_science_cubes', sphere.SUCCESS) - self._logger.debug('> sph_ifs_science_cubes status = {}'.format(done)) + self._logger.debug(f'> sph_ifs_science_cubes status = {done}') done = True files = frames_info_preproc[(frames_info_preproc['DPR TYPE'] == 'OBJECT,FLUX') | (frames_info_preproc['DPR TYPE'] == 'OBJECT,CENTER')].index for file, idx in files: - fname = '{0}_DIT{1:03d}_preproc_centers'.format(file, idx) - file = list(path.preproc.glob('{}.fits'.format(fname))) + fname = f'{file}_DIT{idx:03d}_preproc_centers' + file = list(path.preproc.glob(f'{fname}.fits')) done = done and (len(file) == 1) if done: self._update_recipe_status('sph_ifs_star_center', sphere.SUCCESS) - self._logger.debug('> sph_ifs_star_center status = {}'.format(done)) + self._logger.debug(f'> sph_ifs_star_center status = {done}') # reduction status self._status = sphere.INCOMPLETE @@ -947,7 +927,7 @@ def sort_files(self): self._status = sphere.FATAL return - self._logger.info(' * found {0} raw FITS files'.format(len(files))) + self._logger.info(f' * found {len(files)} raw FITS files') # read list of keywords self._logger.debug('> read keyword list') @@ -974,7 +954,7 @@ def sort_files(self): self._logger.debug('> read FITS keywords') for f in files: - hdu = fits.open(path.raw / '{}.fits'.format(f)) + hdu = fits.open(path.raw / f'{f}.fits') hdr = hdu[0].header for k, sk in zip(keywords, keywords_short): @@ -1001,7 +981,7 @@ def sort_files(self): # check instruments instru = files_info['SEQ ARM'].unique() if len(instru) != 1: - self._logger.critical('Sequence is mixing different instruments: {0}'.format(instru)) + self._logger.critical(f'Sequence is mixing different instruments: {instru}') self._update_recipe_status('sort_files', sphere.ERROR) self._status = sphere.FATAL return @@ -1109,7 +1089,7 @@ def sort_frames(self): ra_drot_h = np.floor(ra_drot/1e4) ra_drot_m = np.floor((ra_drot - ra_drot_h*1e4)/1e2) ra_drot_s = ra_drot - ra_drot_h*1e4 - ra_drot_m*1e2 - RA = '{:02.0f}:{:02.0f}:{:02.3f}'.format(ra_drot_h, ra_drot_m, ra_drot_s) + RA = f'{ra_drot_h:02.0f}:{ra_drot_m:02.0f}:{ra_drot_s:02.3f}' dec_drot = cinfo['INS4 DROT2 DEC'][0] sign = np.sign(dec_drot) @@ -1118,33 +1098,34 @@ def sort_frames(self): dec_drot_m = np.floor((udec_drot - dec_drot_d*1e4)/1e2) dec_drot_s = udec_drot - dec_drot_d*1e4 - dec_drot_m*1e2 dec_drot_d *= sign - DEC = '{:02.0f}:{:02.0f}:{:02.2f}'.format(dec_drot_d, dec_drot_m, dec_drot_s) + DEC = f'{dec_drot_d:02.0f}:{dec_drot_m:02.0f}:{dec_drot_s:02.2f}' pa_start = cinfo['PARANG'][0] pa_end = cinfo['PARANG'][-1] - posang = cinfo['INS4 DROT2 POSANG'].unique() - + posang = cinfo['INS4 DROT2 POSANG'].unique() + posangs = [f'{p:.2f}°' for p in posang] + date = str(cinfo['DATE'][0])[0:10] - self._logger.info(' * Programme ID: {0}'.format(cinfo['OBS PROG ID'][0])) - self._logger.info(' * OB name: {0}'.format(cinfo['OBS NAME'][0])) - self._logger.info(' * OB ID: {0}'.format(cinfo['OBS ID'][0])) - self._logger.info(' * Object: {0}'.format(cinfo['OBJECT'][0])) - self._logger.info(' * RA / DEC: {0} / {1}'.format(RA, DEC)) - self._logger.info(' * Date: {0}'.format(date)) - self._logger.info(' * Instrument: {0}'.format(cinfo['SEQ ARM'][0])) - self._logger.info(' * Derotator: {0}'.format(cinfo['INS4 DROT2 MODE'][0])) - self._logger.info(' * VIS WFS mode: {0}'.format(cinfo['AOS VISWFS MODE'][0])) - self._logger.info(' * IR WFS mode: {0}'.format(cinfo['AOS IRWFS MODE'][0])) - self._logger.info(' * Coronagraph: {0}'.format(cinfo['INS COMB ICOR'][0])) - self._logger.info(' * Mode: {0}'.format(cinfo['INS1 MODE'][0])) - self._logger.info(' * Filter: {0}'.format(cinfo['INS2 COMB IFS'][0])) - self._logger.info(' * DIT: {0:.2f} sec'.format(cinfo['DET SEQ1 DIT'][0])) - self._logger.info(' * NDIT: {0:.0f}'.format(cinfo['DET NDIT'][0])) - self._logger.info(' * Texp: {0:.2f} min'.format(cinfo['DET SEQ1 DIT'].sum()/60)) - self._logger.info(' * PA: {0:.2f}° ==> {1:.2f}° = {2:.2f}°'.format(pa_start, pa_end, np.abs(pa_end-pa_start))) - self._logger.info(' * POSANG: {0}'.format(', '.join(['{:.2f}°'.format(p) for p in posang]))) + self._logger.info(f" * Programme ID: {cinfo['OBS PROG ID'][0]}") + self._logger.info(f" * OB name: {cinfo['OBS NAME'][0]}") + self._logger.info(f" * OB ID: {cinfo['OBS ID'][0]}") + self._logger.info(f" * Object: {cinfo['OBJECT'][0]}") + self._logger.info(f' * RA / DEC: {RA} / {DEC}') + self._logger.info(f' * Date: {date}') + self._logger.info(f" * Instrument: {cinfo['SEQ ARM'][0]}") + self._logger.info(f" * Derotator: {cinfo['INS4 DROT2 MODE'][0]}") + self._logger.info(f" * VIS WFS mode: {cinfo['AOS VISWFS MODE'][0]}") + self._logger.info(f" * IR WFS mode: {cinfo['AOS IRWFS MODE'][0]}") + self._logger.info(f" * Coronagraph: {cinfo['INS COMB ICOR'][0]}") + self._logger.info(f" * Mode: {cinfo['INS1 MODE'][0]}") + self._logger.info(f" * Filter: {cinfo['INS2 COMB IFS'][0]}") + self._logger.info(f" * DIT: {cinfo['DET SEQ1 DIT'][0]:.2f} sec") + self._logger.info(f" * NDIT: {cinfo['DET NDIT'][0]:.0f}") + self._logger.info(f" * Texp: {cinfo['DET SEQ1 DIT'].sum() / 60:.2f} min") + self._logger.info(f' * PA: {pa_start:.2f}° ==> {pa_end:.2f}° = {np.abs(pa_end - pa_start):.2f}°') + self._logger.info(f" * POSANG: {', '.join(posangs)}") # update recipe execution self._update_recipe_status('sort_frames', sphere.SUCCESS) @@ -1175,7 +1156,7 @@ def check_files_association(self): # instrument arm arm = files_info['SEQ ARM'].unique() if len(arm) != 1: - self._logger.error('Sequence is mixing different instruments: {0}'.format(arm)) + self._logger.error(f'Sequence is mixing different instruments: {arm}') self._update_recipe_status('check_files_association', sphere.ERROR) return @@ -1192,7 +1173,7 @@ def check_files_association(self): elif mode == 'OBS_H': mode_short = 'YJH' else: - self._logger.error('Unknown IFS mode {0}'.format(mode)) + self._logger.error(f'Unknown IFS mode {mode}') self._update_recipe_status('check_files_association', sphere.ERROR) return @@ -1210,13 +1191,13 @@ def check_files_association(self): # white flat self._logger.debug('> check white flat requirements') - cfiles = calibs[(calibs['DPR TYPE'] == 'FLAT,LAMP') & (calibs['INS2 COMB IFS'] == 'CAL_BB_2_{0}'.format(mode_short))] + cfiles = calibs[(calibs['DPR TYPE'] == 'FLAT,LAMP') & (calibs['INS2 COMB IFS'] == f'CAL_BB_2_{mode_short}')] if len(cfiles) < 2: error_flag += 1 - self._logger.error(' * there should be 2 flat files for white lamp, found {0}'.format(len(cfiles))) + self._logger.error(f' * there should be 2 flat files for white lamp, found {len(cfiles)}') elif len(cfiles) > 2: warning_flag += 1 - self._logger.warning(' * there should be 2 flat files for white lamp, found {0}. Using the closest from science.'.format(len(cfiles))) + self._logger.warning(f' * there should be 2 flat files for white lamp, found {len(cfiles)}. Using the closest from science.') # find the two closest to science files sci_files = files_info[(files_info['DPR CATG'] == 'SCIENCE')] @@ -1229,13 +1210,13 @@ def check_files_association(self): # 1020 nm flat self._logger.debug('> check 1020 nm flat requirements') - cfiles = calibs[(calibs['DPR TYPE'] == 'FLAT,LAMP') & (calibs['INS2 COMB IFS'] == 'CAL_NB1_1_{0}'.format(mode_short))] + cfiles = calibs[(calibs['DPR TYPE'] == 'FLAT,LAMP') & (calibs['INS2 COMB IFS'] == f'CAL_NB1_1_{mode_short}')] if len(cfiles) < 2: error_flag += 1 - self._logger.error(' * there should be 2 flat files for 1020 nm filter, found {0}'.format(len(cfiles))) + self._logger.error(f' * there should be 2 flat files for 1020 nm filter, found {len(cfiles)}') elif len(cfiles) > 2: warning_flag += 1 - self._logger.warning(' * there should be 2 flat files for 1020 nm filter, found {0}. Using the closest from science.'.format(len(cfiles))) + self._logger.warning(f' * there should be 2 flat files for 1020 nm filter, found {len(cfiles)}. Using the closest from science.') # find the two closest to science files sci_files = files_info[(files_info['DPR CATG'] == 'SCIENCE')] @@ -1248,13 +1229,13 @@ def check_files_association(self): # 1230 nm flat self._logger.debug('> check 1230 nm flat requirements') - cfiles = calibs[(calibs['DPR TYPE'] == 'FLAT,LAMP') & (calibs['INS2 COMB IFS'] == 'CAL_NB2_1_{0}'.format(mode_short))] + cfiles = calibs[(calibs['DPR TYPE'] == 'FLAT,LAMP') & (calibs['INS2 COMB IFS'] == f'CAL_NB2_1_{mode_short}')] if len(cfiles) < 2: error_flag += 1 - self._logger.error(' * there should be 2 flat files for 1230 nm filter, found {0}'.format(len(cfiles))) + self._logger.error(f' * there should be 2 flat files for 1230 nm filter, found {len(cfiles)}') elif len(cfiles) > 2: warning_flag += 1 - self._logger.warning(' * there should be 2 flat files for 1230 nm filter, found {0}. Using the closest from science.'.format(len(cfiles))) + self._logger.warning(f' * there should be 2 flat files for 1230 nm filter, found {len(cfiles)}. Using the closest from science.') # find the two closest to science files sci_files = files_info[(files_info['DPR CATG'] == 'SCIENCE')] @@ -1267,13 +1248,13 @@ def check_files_association(self): # 1300 nm flat self._logger.debug('> check 1300 nm flat requirements') - cfiles = calibs[(calibs['DPR TYPE'] == 'FLAT,LAMP') & (calibs['INS2 COMB IFS'] == 'CAL_NB3_1_{0}'.format(mode_short))] + cfiles = calibs[(calibs['DPR TYPE'] == 'FLAT,LAMP') & (calibs['INS2 COMB IFS'] == f'CAL_NB3_1_{mode_short}')] if len(cfiles) < 2: error_flag += 1 - self._logger.error(' * there should be 2 flat files for 1300 nm filter, found {0}'.format(len(cfiles))) + self._logger.error(f' * there should be 2 flat files for 1300 nm filter, found {len(cfiles)}') elif len(cfiles) > 2: warning_flag += 1 - self._logger.warning(' * there should be 2 flat files for 1300 nm filter, found {0}. Using the closest from science.'.format(len(cfiles))) + self._logger.warning(f' * there should be 2 flat files for 1300 nm filter, found {len(cfiles)}. Using the closest from science.') # find the two closest to science files sci_files = files_info[(files_info['DPR CATG'] == 'SCIENCE')] @@ -1287,13 +1268,13 @@ def check_files_association(self): # 1550 nm flat (YJH mode only) if mode_short == 'YJH': self._logger.debug('> check 1550 nm flat requirements') - cfiles = calibs[(calibs['DPR TYPE'] == 'FLAT,LAMP') & (calibs['INS2 COMB IFS'] == 'CAL_NB4_2_{0}'.format(mode_short))] + cfiles = calibs[(calibs['DPR TYPE'] == 'FLAT,LAMP') & (calibs['INS2 COMB IFS'] == f'CAL_NB4_2_{mode_short}')] if len(cfiles) < 2: error_flag += 1 - self._logger.error(' * there should be 2 flat files for 1550 nm filter, found {0}'.format(len(cfiles))) + self._logger.error(f' * there should be 2 flat files for 1550 nm filter, found {len(cfiles)}') elif len(cfiles) > 2: warning_flag += 1 - self._logger.warning(' * there should be 2 flat files for 1550 nm filter, found {0}. Using the closest from science.'.format(len(cfiles))) + self._logger.warning(f' * there should be 2 flat files for 1550 nm filter, found {len(cfiles)}. Using the closest from science.') # find the two closest to science files sci_files = files_info[(files_info['DPR CATG'] == 'SCIENCE')] @@ -1312,7 +1293,7 @@ def check_files_association(self): self._logger.error(' * there should be 1 spectra position file, found none.') elif len(cfiles) > 1: warning_flag += 1 - self._logger.warning(' * there should be 1 spectra position file, found {0}. Using the closest from science.'.format(len(cfiles))) + self._logger.warning(f' * there should be 1 spectra position file, found {len(cfiles)}. Using the closest from science.') # find the two closest to science files sci_files = files_info[(files_info['DPR CATG'] == 'SCIENCE')] @@ -1331,7 +1312,7 @@ def check_files_association(self): self._logger.error(' * there should be 1 wavelength calibration file, found none.') elif len(cfiles) > 1: warning_flag += 1 - self._logger.warning(' * there should be 1 wavelength calibration file, found {0}. Using the closest from science.'.format(len(cfiles))) + self._logger.warning(f' * there should be 1 wavelength calibration file, found {len(cfiles)}. Using the closest from science.') # find the two closest to science files sci_files = files_info[(files_info['DPR CATG'] == 'SCIENCE')] @@ -1350,7 +1331,7 @@ def check_files_association(self): self._logger.error(' * there should be 1 IFU flat file, found none') elif len(cfiles) > 1: warning_flag += 1 - self._logger.warning(' * there should be 1 IFU flat file, found {0}. Using the closest from science.'.format(len(cfiles))) + self._logger.warning(f' * there should be 1 IFU flat file, found {len(cfiles)}. Using the closest from science.') # find the two closest to science files sci_files = files_info[(files_info['DPR CATG'] == 'SCIENCE')] @@ -1385,22 +1366,22 @@ def check_files_association(self): (calibs['DET SEQ1 DIT'].round(2) == DIT)] if len(cfiles) == 0: warning_flag += 1 - self._logger.warning(' * there is no dark/background for science files with DIT={0} sec. It is *highly recommended* to include one to obtain the best data reduction. A single dark/background file is sufficient, and it can easily be downloaded from the ESO archive'.format(DIT)) + self._logger.warning(f' * there is no dark/background for science files with DIT={DIT} sec. It is *highly recommended* to include one to obtain the best data reduction. A single dark/background file is sufficient, and it can easily be downloaded from the ESO archive') # sky backgrounds cfiles = files_info[(files_info['DPR TYPE'] == 'SKY') & (files_info['DET SEQ1 DIT'].round(2) == DIT)] if len(cfiles) == 0: warning_flag += 1 - self._logger.warning(' * there is no sky background for science files with DIT={0} sec. Using a sky background instead of an internal instrumental background can usually provide a cleaner data reduction'.format(DIT)) + self._logger.warning(f' * there is no sky background for science files with DIT={DIT} sec. Using a sky background instead of an internal instrumental background can usually provide a cleaner data reduction') # error reporting self._logger.debug('> report status') if error_flag: - self._logger.error('There are {0} warning(s) and {1} error(s) in the classification of files'.format(warning_flag, error_flag)) + self._logger.error(f'There are {warning_flag} warning(s) and {error_flag} error(s) in the classification of files') self._update_recipe_status('check_files_association', sphere.ERROR) return else: - self._logger.warning('There are {0} warning(s) and {1} error(s) in the classification of files'.format(warning_flag, error_flag)) + self._logger.warning(f'There are {warning_flag} warning(s) and {error_flag} error(s) in the classification of files') # save self._logger.debug('> save files.csv') @@ -1454,14 +1435,14 @@ def sph_ifs_cal_dark(self, silent=True): if len(cfiles) == 0: continue - self._logger.info(' * {0} with DIT={1:.2f} sec ({2} files)'.format(ctype, DIT, len(cfiles))) + self._logger.info(f' * {ctype} with DIT={DIT:.2f} sec ({len(cfiles)} files)') # create sof self._logger.debug('> create sof file') - sof = path.sof / 'dark_DIT={0:.2f}.sof'.format(DIT) + sof = path.sof / f'dark_DIT={DIT:.2f}.sof' file = open(sof, 'w') for f in files: - file.write('{0}/{1}.fits {2}\n'.format(path.raw, f, 'IFS_DARK_RAW')) + file.write(f"{path.raw}/{f}.fits IFS_DARK_RAW\n") file.close() # products @@ -1469,8 +1450,8 @@ def sph_ifs_cal_dark(self, silent=True): loc = 'sky' else: loc = 'internal' - dark_file = 'dark_{0}_DIT={1:.2f}'.format(loc, DIT) - bpm_file = 'dark_{0}_bpm_DIT={1:.2f}'.format(loc, DIT) + dark_file = f'dark_{loc}_DIT={DIT:.2f}' + bpm_file = f'dark_{loc}_bpm_DIT={DIT:.2f}' # esorex parameters args = ['esorex', @@ -1482,8 +1463,8 @@ def sph_ifs_cal_dark(self, silent=True): '--ifs.master_dark.smoothing=5', '--ifs.master_dark.min_acceptable=0.0', '--ifs.master_dark.max_acceptable=2000.0', - '--ifs.master_dark.outfilename={0}/{1}.fits'.format(path.calib, dark_file), - '--ifs.master_dark.badpixfilename={0}/{1}.fits'.format(path.calib, bpm_file), + f'--ifs.master_dark.outfilename={path.calib}/{dark_file}.fits', + f'--ifs.master_dark.badpixfilename={path.calib}/{bpm_file}.fits', str(sof)] # check esorex @@ -1493,7 +1474,7 @@ def sph_ifs_cal_dark(self, silent=True): return # execute esorex - self._logger.debug('> execute {}'.format(' '.join(args))) + self._logger.debug(f"> execute {' '.join(args)}") if silent: proc = subprocess.run(args, cwd=path.tmp, stdout=subprocess.DEVNULL) else: @@ -1565,13 +1546,13 @@ def sph_ifs_cal_detector_flat(self, silent=True): elif mode == 'OBS_H': mode_short = 'YJH' else: - self._logger.error('Unknown IFS mode {0}'.format(mode)) + self._logger.error(f'Unknown IFS mode {mode}') self._update_recipe_status('sph_ifs_cal_detector_flat', sphere.ERROR) return # bpm files cfiles = files_info[files_info['PRO CATG'] == 'IFS_STATIC_BADPIXELMAP'].index - bpm_files = [path.calib / '{}.fits'.format(f) for f in cfiles] + bpm_files = [path.calib / f'{f}.fits' for f in cfiles] if len(bpm_files) == 0: self._logger.error('Could not fin any bad pixel maps') self._update_recipe_status('sph_ifs_cal_detector_flat', sphere.ERROR) @@ -1583,15 +1564,15 @@ def sph_ifs_cal_detector_flat(self, silent=True): lamps = [ 5, 1, 2, 3, 4] for wave, comb, lamp in zip(waves, combs, lamps): - self._logger.info(' * flat for wavelength {0} nm (filter {1}, lamp {2})'.format(wave, comb, lamp)) + self._logger.info(f' * flat for wavelength {wave} nm (filter {comb}, lamp {lamp})') - cfiles = calibs[calibs['INS2 COMB IFS'] == '{0}_{1}'.format(comb, mode_short)] - files = [path.raw / '{}.fits'.format(f) for f in cfiles.index] + cfiles = calibs[calibs['INS2 COMB IFS'] == f'{comb}_{mode_short}'] + files = [path.raw / f'{f}.fits' for f in cfiles.index] if len(files) == 0: continue elif len(files) != 2: - self._logger.error('There should be exactly 2 raw flat files. Found {0}.'.format(len(files))) + self._logger.error(f'There should be exactly 2 raw flat files. Found {len(files)}.') self._update_recipe_status('sph_ifs_cal_detector_flat', sphere.ERROR) return @@ -1603,12 +1584,12 @@ def sph_ifs_cal_detector_flat(self, silent=True): wav = 'white' else: wav = str(int(wave)) - flat_file = 'master_detector_flat_{0}_l{1}'.format(wav, lamp) - bpm_file = 'dff_badpixelname_{0}_l{1}'.format(wav, lamp) + flat_file = f'master_detector_flat_{wav}_l{lamp}' + bpm_file = f'dff_badpixelname_{wav}_l{lamp}' hdu = fits.open(path.raw / files[0]) - fits.writeto(path.calib / '{}.fits'.format(flat_file), flat, header=hdu[0].header, output_verify='silentfix', overwrite=True) - fits.writeto(path.calib / '{}.fits'.format(bpm_file), bpm, header=hdu[0].header, output_verify='silentfix', overwrite=True) + fits.writeto(path.calib / f'{flat_file}.fits', flat, header=hdu[0].header, output_verify='silentfix', overwrite=True) + fits.writeto(path.calib / f'{bpm_file}.fits', bpm, header=hdu[0].header, output_verify='silentfix', overwrite=True) hdu.close() # store products @@ -1663,7 +1644,7 @@ def sph_ifs_cal_specpos(self, silent=True): # get list of files specpos_file = files_info[np.logical_not(files_info['PROCESSED']) & (files_info['DPR TYPE'] == 'SPECPOS,LAMP')] if len(specpos_file) != 1: - self._logger.error('There should be exactly 1 raw specpos files. Found {0}.'.format(len(specpos_file))) + self._logger.error(f'There should be exactly 1 raw specpos files. Found {len(specpos_file)}.') self._update_recipe_status('sph_ifs_cal_specpos', sphere.ERROR) return @@ -1681,7 +1662,7 @@ def sph_ifs_cal_specpos(self, silent=True): elif mode == 'OBS_H': Hmode = 'TRUE' else: - self._logger.error('Unknown IFS mode {0}'.format(mode)) + self._logger.error(f'Unknown IFS mode {mode}') self._update_recipe_status('sph_ifs_cal_specpos', sphere.ERROR) return @@ -1689,8 +1670,8 @@ def sph_ifs_cal_specpos(self, silent=True): self._logger.debug('> create sof file') sof = path.sof / 'specpos.sof' file = open(sof, 'w') - file.write('{0}/{1}.fits {2}\n'.format(path.raw, specpos_file.index[0], 'IFS_SPECPOS_RAW')) - file.write('{0}/{1}.fits {2}\n'.format(path.calib, dark_file.index[0], 'IFS_MASTER_DARK')) + file.write(f"{path.raw}/{specpos_file.index[0]}.fits IFS_SPECPOS_RAW\n") + file.write(f"{path.calib}/{dark_file.index[0]}.fits IFS_MASTER_DARK\n") file.close() # products @@ -1701,8 +1682,8 @@ def sph_ifs_cal_specpos(self, silent=True): '--no-checksum=TRUE', '--no-datamd5=TRUE', 'sph_ifs_spectra_positions', - '--ifs.spectra_positions.hmode={0}'.format(Hmode), - '--ifs.spectra_positions.outfilename={0}/{1}.fits'.format(path.calib, specp_file), + f'--ifs.spectra_positions.hmode={Hmode}', + f'--ifs.spectra_positions.outfilename={path.calib}/{specp_file}.fits', str(sof)] # check esorex @@ -1712,7 +1693,7 @@ def sph_ifs_cal_specpos(self, silent=True): return # execute esorex - self._logger.debug('> execute {}'.format(' '.join(args))) + self._logger.debug(f"> execute {' '.join(args)}") if silent: proc = subprocess.run(args, cwd=path.tmp, stdout=subprocess.DEVNULL) else: @@ -1768,13 +1749,13 @@ def sph_ifs_cal_wave(self, silent=True): # get list of files wave_file = files_info[np.logical_not(files_info['PROCESSED']) & (files_info['DPR TYPE'] == 'WAVE,LAMP')] if len(wave_file) != 1: - self._logger.error('There should be exactly 1 raw wavelength calibration file. Found {0}.'.format(len(wave_file))) + self._logger.error(f'There should be exactly 1 raw wavelength calibration file. Found {len(wave_file)}.') self._update_recipe_status('sph_ifs_cal_wave', sphere.ERROR) return specpos_file = files_info[files_info['PROCESSED'] & (files_info['PRO CATG'] == 'IFS_SPECPOS')] if len(specpos_file) != 1: - self._logger.error('There should be exactly 1 specpos file. Found {0}.'.format(len(specpos_file))) + self._logger.error(f'There should be exactly 1 specpos file. Found {len(specpos_file)}.') self._update_recipe_status('sph_ifs_cal_wave', sphere.ERROR) return @@ -1792,16 +1773,16 @@ def sph_ifs_cal_wave(self, silent=True): self._logger.debug('> create sof file') sof = path.sof / 'wave.sof' file = open(sof, 'w') - file.write('{0}/{1}.fits {2}\n'.format(path.raw, wave_file.index[0], 'IFS_WAVECALIB_RAW')) - file.write('{0}/{1}.fits {2}\n'.format(path.calib, specpos_file.index[0], 'IFS_SPECPOS')) - file.write('{0}/{1}.fits {2}\n'.format(path.calib, dark_file.index[0], 'IFS_MASTER_DARK')) + file.write(f"{path.raw}/{wave_file.index[0]}.fits IFS_WAVECALIB_RAW\n") + file.write(f"{path.calib}/{specpos_file.index[0]}.fits IFS_SPECPOS\n") + file.write(f"{path.calib}/{dark_file.index[0]}.fits IFS_MASTER_DARK\n") file.close() # products wav_file = 'wave_calib' # esorex parameters - self._logger.debug('> IFS mode is {}'.format(mode)) + self._logger.debug(f'> IFS mode is {mode}') if mode == 'OBS_YJ': args = ['esorex', '--no-checksum=TRUE', @@ -1811,7 +1792,7 @@ def sph_ifs_cal_wave(self, silent=True): '--ifs.wave_calib.wavelength_line1=0.9877', '--ifs.wave_calib.wavelength_line2=1.1237', '--ifs.wave_calib.wavelength_line3=1.3094', - '--ifs.wave_calib.outfilename={0}/{1}.fits'.format(path.calib, wav_file), + f'--ifs.wave_calib.outfilename={path.calib}/{wav_file}.fits', str(sof)] elif mode == 'OBS_H': args = ['esorex', @@ -1823,7 +1804,7 @@ def sph_ifs_cal_wave(self, silent=True): '--ifs.wave_calib.wavelength_line2=1.1237', '--ifs.wave_calib.wavelength_line3=1.3094', '--ifs.wave_calib.wavelength_line4=1.5451', - '--ifs.wave_calib.outfilename={0}/{1}.fits'.format(path.calib, wav_file), + f'--ifs.wave_calib.outfilename={path.calib}/{wav_file}.fits', str(sof)] # check esorex @@ -1833,7 +1814,7 @@ def sph_ifs_cal_wave(self, silent=True): return # execute esorex - self._logger.debug('> execute {}'.format(' '.join(args))) + self._logger.debug(f"> execute {' '.join(args)}") if silent: proc = subprocess.run(args, cwd=path.tmp, stdout=subprocess.DEVNULL) else: @@ -1860,7 +1841,7 @@ def sph_ifs_cal_wave(self, silent=True): # store default wavelength calibration in preproc self._logger.debug('> compute default wavelength calibration') - hdr = fits.getheader(path.calib / '{}.fits'.format(wav_file)) + hdr = fits.getheader(path.calib / f'{wav_file}.fits') wave_min = hdr['HIERARCH ESO DRS IFS MIN LAMBDA']*1000 wave_max = hdr['HIERARCH ESO DRS IFS MAX LAMBDA']*1000 @@ -1904,7 +1885,7 @@ def sph_ifs_cal_ifu_flat(self, silent=True): elif mode == 'OBS_H': mode_short = 'YJH' else: - self._logger.error('Unknown IFS mode {0}'.format(mode)) + self._logger.error(f'Unknown IFS mode {mode}') self._update_recipe_status('sph_ifs_cal_ifu_flat', sphere.ERROR) return @@ -1912,13 +1893,13 @@ def sph_ifs_cal_ifu_flat(self, silent=True): ifu_flat_file = files_info[np.logical_not(files_info['PROCESSED']) & (files_info['DPR TYPE'] == 'FLAT,LAMP') & (files_info['DPR TECH'] == 'IFU')] if len(ifu_flat_file) != 1: - self._logger.error('There should be exactly 1 raw IFU flat file. Found {0}.'.format(len(ifu_flat_file))) + self._logger.error(f'There should be exactly 1 raw IFU flat file. Found {len(ifu_flat_file)}.') self._update_recipe_status('sph_ifs_cal_ifu_flat', sphere.ERROR) return wave_file = files_info[files_info['PROCESSED'] & (files_info['PRO CATG'] == 'IFS_WAVECALIB')] if len(wave_file) != 1: - self._logger.error('There should be exactly 1 wavelength calibration file. Found {0}.'.format(len(wave_file))) + self._logger.error(f'There should be exactly 1 wavelength calibration file. Found {len(wave_file)}.') self._update_recipe_status('sph_ifs_cal_ifu_flat', sphere.ERROR) return @@ -1930,38 +1911,38 @@ def sph_ifs_cal_ifu_flat(self, silent=True): return flat_white_file = files_info[files_info['PROCESSED'] & (files_info['PRO CATG'] == 'IFS_MASTER_DFF') & - (files_info['INS2 COMB IFS'] == 'CAL_BB_2_{0}'.format(mode_short))] + (files_info['INS2 COMB IFS'] == f'CAL_BB_2_{mode_short}')] if len(flat_white_file) != 1: - self._logger.error('There should be exactly 1 white flat file. Found {0}.'.format(len(flat_white_file))) + self._logger.error(f'There should be exactly 1 white flat file. Found {len(flat_white_file)}.') self._update_recipe_status('sph_ifs_cal_ifu_flat', sphere.ERROR) return flat_1020_file = files_info[files_info['PROCESSED'] & (files_info['PRO CATG'] == 'IFS_MASTER_DFF') & - (files_info['INS2 COMB IFS'] == 'CAL_NB1_1_{0}'.format(mode_short))] + (files_info['INS2 COMB IFS'] == f'CAL_NB1_1_{mode_short}')] if len(flat_1020_file) != 1: - self._logger.error('There should be exactly 1 1020 nm flat file. Found {0}.'.format(len(flat_1020_file))) + self._logger.error(f'There should be exactly 1 1020 nm flat file. Found {len(flat_1020_file)}.') self._update_recipe_status('sph_ifs_cal_ifu_flat', sphere.ERROR) return flat_1230_file = files_info[files_info['PROCESSED'] & (files_info['PRO CATG'] == 'IFS_MASTER_DFF') & - (files_info['INS2 COMB IFS'] == 'CAL_NB2_1_{0}'.format(mode_short))] + (files_info['INS2 COMB IFS'] == f'CAL_NB2_1_{mode_short}')] if len(flat_1230_file) != 1: - self._logger.error('There should be exactly 1 1230 nm flat file. Found {0}.'.format(len(flat_1230_file))) + self._logger.error(f'There should be exactly 1 1230 nm flat file. Found {len(flat_1230_file)}.') self._update_recipe_status('sph_ifs_cal_ifu_flat', sphere.ERROR) return flat_1300_file = files_info[files_info['PROCESSED'] & (files_info['PRO CATG'] == 'IFS_MASTER_DFF') & - (files_info['INS2 COMB IFS'] == 'CAL_NB3_1_{0}'.format(mode_short))] + (files_info['INS2 COMB IFS'] == f'CAL_NB3_1_{mode_short}')] if len(flat_1300_file) != 1: - self._logger.error('There should be exactly 1 1300 nm flat file. Found {0}.'.format(len(flat_1300_file))) + self._logger.error(f'There should be exactly 1 1300 nm flat file. Found {len(flat_1300_file)}.') self._update_recipe_status('sph_ifs_cal_ifu_flat', sphere.ERROR) return if mode == 'OBS_H': flat_1550_file = files_info[files_info['PROCESSED'] & (files_info['PRO CATG'] == 'IFS_MASTER_DFF') & - (files_info['INS2 COMB IFS'] == 'CAL_NB4_2_{0}'.format(mode_short))] + (files_info['INS2 COMB IFS'] == f'CAL_NB4_2_{mode_short}')] if len(flat_1550_file) != 1: - self._logger.error('There should be exactly 1 1550 nm flat file. Found {0}.'.format(len(flat_1550_file))) + self._logger.error(f'There should be exactly 1 1550 nm flat file. Found {len(flat_1550_file)}.') self._update_recipe_status('sph_ifs_cal_ifu_flat', sphere.ERROR) return @@ -1969,16 +1950,16 @@ def sph_ifs_cal_ifu_flat(self, silent=True): self._logger.debug('> create sof file') sof = path.sof / 'ifu_flat.sof' file = open(sof, 'w') - file.write('{0}/{1}.fits {2}\n'.format(path.raw, ifu_flat_file.index[0], 'IFS_FLAT_FIELD_RAW')) - file.write('{0}/{1}.fits {2}\n'.format(path.calib, wave_file.index[0], 'IFS_WAVECALIB')) - file.write('{0}/{1}.fits {2}\n'.format(path.calib, dark_file.index[0], 'IFS_MASTER_DARK')) - file.write('{0}/{1}.fits {2}\n'.format(path.calib, flat_white_file.index[0], 'IFS_MASTER_DFF_SHORT')) - file.write('{0}/{1}.fits {2}\n'.format(path.calib, flat_white_file.index[0], 'IFS_MASTER_DFF_LONGBB')) - file.write('{0}/{1}.fits {2}\n'.format(path.calib, flat_1020_file.index[0], 'IFS_MASTER_DFF_LONG1')) - file.write('{0}/{1}.fits {2}\n'.format(path.calib, flat_1230_file.index[0], 'IFS_MASTER_DFF_LONG2')) - file.write('{0}/{1}.fits {2}\n'.format(path.calib, flat_1300_file.index[0], 'IFS_MASTER_DFF_LONG3')) + file.write(f"{path.raw}/{ifu_flat_file.index[0]}.fits IFS_FLAT_FIELD_RAW\n") + file.write(f"{path.calib}/{wave_file.index[0]}.fits IFS_WAVECALIB\n") + file.write(f"{path.calib}/{dark_file.index[0]}.fits IFS_MASTER_DARK\n") + file.write(f"{path.calib}/{flat_white_file.index[0]}.fits IFS_MASTER_DFF_SHORT\n") + file.write(f"{path.calib}/{flat_white_file.index[0]}.fits IFS_MASTER_DFF_LONGBB\n") + file.write(f"{path.calib}/{flat_1020_file.index[0]}.fits IFS_MASTER_DFF_LONG1\n") + file.write(f"{path.calib}/{flat_1230_file.index[0]}.fits IFS_MASTER_DFF_LONG2\n") + file.write(f"{path.calib}/{flat_1300_file.index[0]}.fits IFS_MASTER_DFF_LONG3\n") if mode == 'OBS_H': - file.write('{0}/{1}.fits {2}\n'.format(path.calib, flat_1550_file.index[0], 'IFS_MASTER_DFF_LONG4')) + file.write(f"{path.calib}/{flat_1550_file.index[0]}.fits IFS_MASTER_DFF_LONG4\n") file.close() # products @@ -1990,7 +1971,7 @@ def sph_ifs_cal_ifu_flat(self, silent=True): '--no-datamd5=TRUE', 'sph_ifs_instrument_flat', '--ifs.instrument_flat.nofit=TRUE', - '--ifs.instrument_flat.ifu_filename={0}/{1}.fits'.format(path.calib, ifu_file), + f'--ifs.instrument_flat.ifu_filename={path.calib}/{ifu_file}.fits', str(sof)] # check esorex @@ -2000,7 +1981,7 @@ def sph_ifs_cal_ifu_flat(self, silent=True): return # execute esorex - self._logger.debug('> execute {}'.format(' '.join(args))) + self._logger.debug(f"> execute {' '.join(args)}") if silent: proc = subprocess.run(args, cwd=path.tmp, stdout=subprocess.DEVNULL) else: @@ -2110,7 +2091,7 @@ def sph_ifs_preprocess_science(self, # bpm if fix_badpix: bpm_files = files_info[files_info['PRO CATG'] == 'IFS_STATIC_BADPIXELMAP'].index - bpm_files = [path.calib / '{}.fits'.format(f) for f in bpm_files] + bpm_files = [path.calib / f'{f}.fits' for f in bpm_files] if len(bpm_files) == 0: self._logger.error('Could not fin any bad pixel maps') @@ -2138,7 +2119,7 @@ def sph_ifs_preprocess_science(self, for DIT in sci_DITs: sfiles = sci_files[sci_files['DET SEQ1 DIT'].round(2) == DIT] - self._logger.info('{0} files of type {1} with DIT={2} sec'.format(len(sfiles), typ, DIT)) + self._logger.info(f'{len(sfiles)} files of type {typ} with DIT={DIT} sec') if subtract_background: # look for sky, then background, then darks @@ -2150,17 +2131,17 @@ def sph_ifs_preprocess_science(self, (files_info['DET SEQ1 DIT'].round(2) == DIT)] if len(dfiles) != 0: break - self._logger.info(' ==> found {0} corresponding {1} file'.format(len(dfiles), d)) + self._logger.info(f' ==> found {len(dfiles)} corresponding {d} file') if len(dfiles) == 0: # issue a warning if absolutely no background is found self._logger.warning('No background has been found. Pre-processing will continue but data quality will likely be affected') bkg = np.zeros((2048, 2048)) elif len(dfiles) == 1: - bkg = fits.getdata(path.calib / '{}.fits'.format(dfiles.index[0])) + bkg = fits.getdata(path.calib / f'{dfiles.index[0]}.fits') elif len(dfiles) > 1: # FIXME: handle cases when multiple backgrounds are found? - self._logger.error('Unexpected number of background files ({0})'.format(len(dfiles))) + self._logger.error(f'Unexpected number of background files ({len(dfiles)})') self._update_recipe_status('sph_ifs_preprocess_science', sphere.ERROR) return @@ -2169,11 +2150,11 @@ def sph_ifs_preprocess_science(self, # frames_info extract finfo = frames_info.loc[(fname, slice(None)), :] - self._logger.info(' * file {0}/{1}: {2}, NDIT={3}'.format(idx+1, len(sfiles), fname, len(finfo))) + self._logger.info(f' * file {idx + 1}/{len(sfiles)}: {fname}, NDIT={len(finfo)}') # read data self._logger.info(' ==> read data') - img, hdr = fits.getdata(path.raw / '{}.fits'.format(fname), header=True) + img, hdr = fits.getdata(path.raw / f'{fname}.fits', header=True) # add extra dimension to single images to make cubes if img.ndim == 2: @@ -2183,14 +2164,14 @@ def sph_ifs_preprocess_science(self, true_north = self.config['cal_true_north'] if (typ == 'OBJECT,CENTER'): if collapse_center: - self._logger.info(' ==> collapse: mean ({0} -> 1 frame, 0 dropped)'.format(len(img))) + self._logger.info(f' ==> collapse: mean ({len(img)} -> 1 frame, 0 dropped)') img = np.mean(img, axis=0, keepdims=True) frames_info_new = toolbox.collapse_frames_info(finfo, fname, true_north, 'mean', logger=self._logger) else: frames_info_new = toolbox.collapse_frames_info(finfo, fname, true_north, 'none', logger=self._logger) elif (typ == 'OBJECT,FLUX'): if collapse_psf: - self._logger.info(' ==> collapse: mean ({0} -> 1 frame, 0 dropped)'.format(len(img))) + self._logger.info(f' ==> collapse: mean ({len(img)} -> 1 frame, 0 dropped)') img = np.mean(img, axis=0, keepdims=True) frames_info_new = toolbox.collapse_frames_info(finfo, fname, true_north, 'mean', logger=self._logger) else: @@ -2198,7 +2179,7 @@ def sph_ifs_preprocess_science(self, elif (typ == 'OBJECT'): if collapse_science: if collapse_type == 'mean': - self._logger.info(' ==> collapse: mean ({0} -> 1 frame, 0 dropped)'.format(len(img))) + self._logger.info(f' ==> collapse: mean ({len(img)} -> 1 frame, 0 dropped)') img = np.mean(img, axis=0, keepdims=True) frames_info_new = toolbox.collapse_frames_info(finfo, fname, true_north, 'mean', logger=self._logger) @@ -2214,11 +2195,11 @@ def sph_ifs_preprocess_science(self, dropped = NDIT % coadd_value if coadd_value > NDIT: - self._logger.error('coadd_value ({0}) must be < NDIT ({1})'.format(coadd_value, NDIT)) + self._logger.error(f'coadd_value ({coadd_value}) must be < NDIT ({NDIT})') self._update_recipe_status('sph_ifs_preprocess_science', sphere.ERROR) return - self._logger.info(' ==> collapse: coadd by {0} ({1} -> {2} frames, {3} dropped)'.format(coadd_value, NDIT, NDIT_new, dropped)) + self._logger.info(f' ==> collapse: coadd by {coadd_value} ({NDIT} -> {NDIT_new} frames, {dropped} dropped)') # coadd frames nimg = np.empty((NDIT_new, 2048, 2048), dtype=img.dtype) @@ -2228,7 +2209,7 @@ def sph_ifs_preprocess_science(self, frames_info_new = toolbox.collapse_frames_info(finfo, fname, true_north, 'coadd', coadd_value=coadd_value, logger=self._logger) else: - self._logger.error('Unknown collapse type {0}'.format(collapse_type)) + self._logger.error(f'Unknown collapse type {collapse_type}') self._update_recipe_status('sph_ifs_preprocess_science', sphere.ERROR) return else: @@ -2283,7 +2264,7 @@ def sph_ifs_preprocess_science(self, for f in range(len(img)): frame = img[f].squeeze() hdr['HIERARCH ESO DET NDIT'] = 1 - fits.writeto(path.preproc / '{}_DIT{:03d}_preproc.fits'.format(fname, f), frame, hdr, + fits.writeto(path.preproc / f'{fname}_DIT{f:03d}_preproc.fits', frame, hdr, overwrite=True, output_verify='silentfix') # sort and save final dataframe @@ -2319,7 +2300,7 @@ def sph_ifs_preprocess_wave(self): # bpm bpm_files = files_info[files_info['PRO CATG'] == 'IFS_STATIC_BADPIXELMAP'].index - bpm_files = [path.calib / '{}.fits'.format(f) for f in bpm_files] + bpm_files = [path.calib / f'{f}.fits' for f in bpm_files] if len(bpm_files) == 0: self._logger.error('Could not fin any bad pixel maps') self._update_recipe_status('sph_ifs_preprocess_wave', sphere.ERROR) @@ -2334,20 +2315,20 @@ def sph_ifs_preprocess_wave(self): self._logger.error('There should at least 1 dark file for calibrations. Found none.') self._update_recipe_status('sph_ifs_preprocess_wave', sphere.ERROR) return - bkg = fits.getdata(path.calib / '{}.fits'.format(dark_file.index[0])) + bkg = fits.getdata(path.calib / f'{dark_file.index[0]}.fits') # wavelength calibration wave_file = files_info[np.logical_not(files_info['PROCESSED']) & (files_info['DPR TYPE'] == 'WAVE,LAMP')] if len(wave_file) != 1: - self._logger.error('There should be exactly 1 raw wavelength calibration file. Found {0}.'.format(len(wave_file))) + self._logger.error(f'There should be exactly 1 raw wavelength calibration file. Found {len(wave_file)}.') self._update_recipe_status('sph_ifs_preprocess_wave', sphere.ERROR) return fname = wave_file.index[0] # read data - self._logger.info(' * {0}'.format(fname)) + self._logger.info(f' * {fname}') self._logger.info(' ==> read data') - img, hdr = fits.getdata(path.raw / '{}.fits'.format(fname), header=True) + img, hdr = fits.getdata(path.raw / f'{fname}.fits', header=True) # collapse self._logger.info(' ==> collapse: mean') @@ -2371,7 +2352,7 @@ def sph_ifs_preprocess_wave(self): hdr['HIERARCH ESO TEL TARG DELTA'] = -900000.0 # save - fits.writeto(path.preproc / '{}_preproc.fits'.format(fname), img, hdr, + fits.writeto(path.preproc / f'{fname}_preproc.fits', img, hdr, overwrite=True, output_verify='silentfix') # update recipe execution @@ -2415,63 +2396,63 @@ def sph_ifs_science_cubes(self, silent=True): elif mode == 'OBS_H': mode_short = 'YJH' else: - self._logger.error('Unknown IFS mode {0}'.format(mode)) + self._logger.error(f'Unknown IFS mode {mode}') self._update_recipe_status('sph_ifs_science_cubes', sphere.ERROR) return # get list of science files sci_files = sorted(list(path.preproc.glob('*_preproc.fits'))) - self._logger.info(' * found {0} pre-processed files'.format(len(sci_files))) + self._logger.info(f' * found {len(sci_files)} pre-processed files') # get list of calibration files bpm_file = files_info[files_info['PROCESSED'] & (files_info['PRO CATG'] == 'IFS_STATIC_BADPIXELMAP') & - (files_info['INS2 COMB IFS'] == 'CAL_BB_2_{0}'.format(mode_short))] + (files_info['INS2 COMB IFS'] == f'CAL_BB_2_{mode_short}')] ifu_flat_file = files_info[files_info['PROCESSED'] & (files_info['PRO CATG'] == 'IFS_IFU_FLAT_FIELD')] if len(ifu_flat_file) != 1: - self._logger.error('There should be exactly 1 IFU flat file. Found {0}.'.format(len(ifu_flat_file))) + self._logger.error(f'There should be exactly 1 IFU flat file. Found {len(ifu_flat_file)}.') self._update_recipe_status('sph_ifs_science_cubes', sphere.ERROR) return wave_file = files_info[files_info['PROCESSED'] & (files_info['PRO CATG'] == 'IFS_WAVECALIB')] if len(wave_file) != 1: - self._logger.error('There should be exactly 1 wavelength calibration file. Found {0}.'.format(len(wave_file))) + self._logger.error(f'There should be exactly 1 wavelength calibration file. Found {len(wave_file)}.') self._update_recipe_status('sph_ifs_science_cubes', sphere.ERROR) return flat_white_file = files_info[files_info['PROCESSED'] & (files_info['PRO CATG'] == 'IFS_MASTER_DFF') & - (files_info['INS2 COMB IFS'] == 'CAL_BB_2_{0}'.format(mode_short))] + (files_info['INS2 COMB IFS'] == f'CAL_BB_2_{mode_short}')] if len(flat_white_file) != 1: - self._logger.error('There should be exactly 1 white flat file. Found {0}.'.format(len(flat_white_file))) + self._logger.error(f'There should be exactly 1 white flat file. Found {len(flat_white_file)}.') self._update_recipe_status('sph_ifs_science_cubes', sphere.ERROR) return flat_1020_file = files_info[files_info['PROCESSED'] & (files_info['PRO CATG'] == 'IFS_MASTER_DFF') & - (files_info['INS2 COMB IFS'] == 'CAL_NB1_1_{0}'.format(mode_short))] + (files_info['INS2 COMB IFS'] == f'CAL_NB1_1_{mode_short}')] if len(flat_1020_file) != 1: - self._logger.error('There should be exactly 1 1020 nm flat file. Found {0}.'.format(len(flat_1020_file))) + self._logger.error(f'There should be exactly 1 1020 nm flat file. Found {len(flat_1020_file)}.') self._update_recipe_status('sph_ifs_science_cubes', sphere.ERROR) return flat_1230_file = files_info[files_info['PROCESSED'] & (files_info['PRO CATG'] == 'IFS_MASTER_DFF') & - (files_info['INS2 COMB IFS'] == 'CAL_NB2_1_{0}'.format(mode_short))] + (files_info['INS2 COMB IFS'] == f'CAL_NB2_1_{mode_short}')] if len(flat_1230_file) != 1: - self._logger.error('There should be exactly 1 1230 nm flat file. Found {0}.'.format(len(flat_1230_file))) + self._logger.error(f'There should be exactly 1 1230 nm flat file. Found {len(flat_1230_file)}.') self._update_recipe_status('sph_ifs_science_cubes', sphere.ERROR) return flat_1300_file = files_info[files_info['PROCESSED'] & (files_info['PRO CATG'] == 'IFS_MASTER_DFF') & - (files_info['INS2 COMB IFS'] == 'CAL_NB3_1_{0}'.format(mode_short))] + (files_info['INS2 COMB IFS'] == f'CAL_NB3_1_{mode_short}')] if len(flat_1300_file) != 1: - self._logger.error('There should be exactly 1 1300 nm flat file. Found {0}.'.format(len(flat_1300_file))) + self._logger.error(f'There should be exactly 1 1300 nm flat file. Found {len(flat_1300_file)}.') self._update_recipe_status('sph_ifs_science_cubes', sphere.ERROR) return if mode == 'OBS_H': flat_1550_file = files_info[files_info['PROCESSED'] & (files_info['PRO CATG'] == 'IFS_MASTER_DFF') & - (files_info['INS2 COMB IFS'] == 'CAL_NB4_2_{0}'.format(mode_short))] + (files_info['INS2 COMB IFS'] == f'CAL_NB4_2_{mode_short}')] if len(flat_1550_file) != 1: - self._logger.error('There should be exactly 1 1550 nm flat file. Found {0}.'.format(len(flat_1550_file))) + self._logger.error(f'There should be exactly 1 1550 nm flat file. Found {len(flat_1550_file)}.') self._update_recipe_status('sph_ifs_science_cubes', sphere.ERROR) return @@ -2480,17 +2461,17 @@ def sph_ifs_science_cubes(self, silent=True): sof = path.sof / 'science.sof' file = open(sof, 'w') for f in sci_files: - file.write('{0} {1}\n'.format(f, 'IFS_SCIENCE_DR_RAW')) - file.write('{0}/{1}.fits {2}\n'.format(path.calib, ifu_flat_file.index[0], 'IFS_IFU_FLAT_FIELD')) - file.write('{0}/{1}.fits {2}\n'.format(path.calib, wave_file.index[0], 'IFS_WAVECALIB')) - file.write('{0}/{1}.fits {2}\n'.format(path.calib, flat_white_file.index[0], 'IFS_MASTER_DFF_SHORT')) - file.write('{0}/{1}.fits {2}\n'.format(path.calib, flat_white_file.index[0], 'IFS_MASTER_DFF_LONGBB')) - file.write('{0}/{1}.fits {2}\n'.format(path.calib, bpm_file.index[0], 'IFS_STATIC_BADPIXELMAP')) - file.write('{0}/{1}.fits {2}\n'.format(path.calib, flat_1020_file.index[0], 'IFS_MASTER_DFF_LONG1')) - file.write('{0}/{1}.fits {2}\n'.format(path.calib, flat_1230_file.index[0], 'IFS_MASTER_DFF_LONG2')) - file.write('{0}/{1}.fits {2}\n'.format(path.calib, flat_1300_file.index[0], 'IFS_MASTER_DFF_LONG3')) + file.write(f"{f} IFS_SCIENCE_DR_RAW\n") + file.write(f"{path.calib}/{ifu_flat_file.index[0]}.fits IFS_IFU_FLAT_FIELD\n") + file.write(f"{path.calib}/{wave_file.index[0]}.fits IFS_WAVECALIB\n") + file.write(f"{path.calib}/{flat_white_file.index[0]}.fits IFS_MASTER_DFF_SHORT\n") + file.write(f"{path.calib}/{flat_white_file.index[0]}.fits IFS_MASTER_DFF_LONGBB\n") + file.write(f"{path.calib}/{bpm_file.index[0]}.fits IFS_STATIC_BADPIXELMAP\n") + file.write(f"{path.calib}/{flat_1020_file.index[0]}.fits IFS_MASTER_DFF_LONG1\n") + file.write(f"{path.calib}/{flat_1230_file.index[0]}.fits IFS_MASTER_DFF_LONG2\n") + file.write(f"{path.calib}/{flat_1300_file.index[0]}.fits IFS_MASTER_DFF_LONG3\n") if mode == 'OBS_H': - file.write('{0}/{1}.fits {2}\n'.format(path.calib, flat_1550_file.index[0], 'IFS_MASTER_DFF_LONG4')) + file.write(f"{path.calib}/{flat_1550_file.index[0]}.fits IFS_MASTER_DFF_LONG4\n") file.close() # esorex parameters @@ -2510,7 +2491,7 @@ def sph_ifs_science_cubes(self, silent=True): return # execute esorex - self._logger.debug('> execute {}'.format(' '.join(args))) + self._logger.debug(f"> execute {' '.join(args)}") if silent: proc = subprocess.run(args, cwd=path.tmp, stdout=subprocess.DEVNULL) else: @@ -2598,7 +2579,7 @@ def sph_ifs_wavelength_recalibration(self, high_pass=False, offset=(0, 0), box_w # get header of any science file science_files = frames_info[frames_info['DPR CATG'] == 'SCIENCE'].index[0] - fname = '{0}_DIT{1:03d}_preproc_'.format(science_files[0], science_files[1]) + fname = f'{science_files[0]}_DIT{science_files[1]:03d}_preproc_' files = list(path.preproc.glob(fname+'*[0-9].fits')) hdr = fits.getheader(files[0]) @@ -2619,7 +2600,7 @@ def sph_ifs_wavelength_recalibration(self, high_pass=False, offset=(0, 0), box_w return ifs_mode = starcen_files['INS2 COMB IFS'].values[0] - fname = '{0}_DIT{1:03d}_preproc_'.format(starcen_files.index.values[0][0], starcen_files.index.values[0][1]) + fname = f'{starcen_files.index.values[0][0]}_DIT{starcen_files.index.values[0][1]:03d}_preproc_' files = list(path.preproc.glob(fname+'*[0-9].fits')) cube, hdr = fits.getdata(files[0], header=True) @@ -2633,9 +2614,9 @@ def sph_ifs_wavelength_recalibration(self, high_pass=False, offset=(0, 0), box_w # compute centers from waffle spots waffle_orientation = hdr['HIERARCH ESO OCS WAFFLE ORIENT'] - self._logger.debug('> waffle orientation: {}'.format(waffle_orientation)) + self._logger.debug(f'> waffle orientation: {waffle_orientation}') if plot: - save_path = path.products / '{}waffle_fitting.pdf'.format(fname) + save_path = path.products / f'{fname}waffle_fitting.pdf' else: save_path = None spot_center, spot_dist, img_center \ @@ -2655,7 +2636,7 @@ def sph_ifs_wavelength_recalibration(self, high_pass=False, offset=(0, 0), box_w # find wavelength calibration file name wave_file = files_info[np.logical_not(files_info['PROCESSED']) & (files_info['DPR TYPE'] == 'WAVE,LAMP')].index[0] - fname = '{0}_preproc_'.format(wave_file) + fname = f'{wave_file}_preproc_' files = list(path.preproc.glob(fname+'*.fits')) # read cube and measure mean flux in all channels @@ -2727,7 +2708,7 @@ def sph_ifs_wavelength_recalibration(self, high_pass=False, offset=(0, 0), box_w wave_final = np.full(nwave, res.x) * wave_scale wave_diff = np.abs(wave_final - wave_drh) - self._logger.info(' ==> difference with calibrated wavelength: min={0:.1f} nm, max={1:.1f} nm'.format(wave_diff.min(), wave_diff.max())) + self._logger.info(f' ==> difference with calibrated wavelength: min={wave_diff.min():.1f} nm, max={wave_diff.max():.1f} nm') # save self._logger.info(' * saving') @@ -2831,11 +2812,11 @@ def sph_ifs_star_center(self, high_pass_psf=False, high_pass_waffle=False, offse flux_files = frames_info[frames_info['DPR TYPE'] == 'OBJECT,FLUX'] if len(flux_files) != 0: for file, idx in flux_files.index: - self._logger.info(' * OBJECT,FLUX: {0}'.format(file)) + self._logger.info(f' * OBJECT,FLUX: {file}') # read data self._logger.debug('> read data') - fname = '{0}_DIT{1:03d}_preproc_'.format(file, idx) + fname = f'{file}_DIT{idx:03d}_preproc_' files = list(path.preproc.glob(fname+'*[0-9].fits')) cube, hdr = fits.getdata(files[0], header=True) @@ -2852,7 +2833,7 @@ def sph_ifs_star_center(self, high_pass_psf=False, high_pass_waffle=False, offse # centers if plot: - save_path = path.products / '{}psf_fitting.pdf'.format(fname) + save_path = path.products / f'{fname}psf_fitting.pdf' else: save_path = None img_center = toolbox.star_centers_from_PSF_img_cube(cube, wave_drh, pixel, exclude_fraction=0.15, @@ -2861,17 +2842,17 @@ def sph_ifs_star_center(self, high_pass_psf=False, high_pass_waffle=False, offse # save self._logger.debug('> save centers') - fits.writeto(path.preproc / '{}centers.fits'.format(fname), img_center, overwrite=True) + fits.writeto(path.preproc / f'{fname}centers.fits', img_center, overwrite=True) # then OBJECT,CENTER starcen_files = frames_info[frames_info['DPR TYPE'] == 'OBJECT,CENTER'] if len(starcen_files) != 0: for file, idx in starcen_files.index: - self._logger.info(' * OBJECT,CENTER: {0}'.format(file)) + self._logger.info(f' * OBJECT,CENTER: {file}') # read data self._logger.debug('> read data') - fname = '{0}_DIT{1:03d}_preproc_'.format(file, idx) + fname = f'{file}_DIT{idx:03d}_preproc_' files = list(path.preproc.glob(fname+'*[0-9].fits')) cube, hdr = fits.getdata(files[0], header=True) @@ -2883,9 +2864,9 @@ def sph_ifs_star_center(self, high_pass_psf=False, high_pass_waffle=False, offse # centers waffle_orientation = hdr['HIERARCH ESO OCS WAFFLE ORIENT'] - self._logger.debug('> waffle orientation: {}'.format(waffle_orientation)) + self._logger.debug(f'> waffle orientation: {waffle_orientation}') if plot: - save_path = path.products / '{}waffle_fitting.pdf'.format(fname) + save_path = path.products / f'{fname}waffle_fitting.pdf' else: save_path = None spot_center, spot_dist, img_center \ @@ -2896,7 +2877,7 @@ def sph_ifs_star_center(self, high_pass_psf=False, high_pass_waffle=False, offse # save self._logger.debug('> save centers') - fits.writeto(path.preproc / '{}centers.fits'.format(fname), img_center, overwrite=True) + fits.writeto(path.preproc / f'{fname}centers.fits', img_center, overwrite=True) # update recipe execution self._update_recipe_status('sph_ifs_star_center', sphere.SUCCESS) @@ -2906,7 +2887,8 @@ def sph_ifs_star_center(self, high_pass_psf=False, high_pass_waffle=False, offse def sph_ifs_combine_data(self, cpix=True, psf_dim=80, science_dim=290, correct_anamorphism=True, - shift_method='fft', manual_center=None, coarse_centering=False, save_scaled=False): + shift_method='fft', manual_center=None, center_selection='first', + coarse_centering=False, save_scaled=False): '''Combine and save the science data into final cubes All types of data are combined independently: PSFs @@ -2982,6 +2964,13 @@ def sph_ifs_combine_data(self, cpix=True, psf_dim=80, science_dim=290, correct_a frames. This should be an array of either 2 or nwave*2 values. Default is None + center_selection : str + Specify which star center to use when multiple are + available. Possible values are first, last, and time. The + time option indicates to use the star center file that is + closest in time with respect to each science file. Default + is first + coarse_centering : bool Control if images are finely centered or not before being combined. However the images are still roughly centered by @@ -2997,7 +2986,7 @@ def sph_ifs_combine_data(self, cpix=True, psf_dim=80, science_dim=290, correct_a save_scaled : bool Also save the wavelength-rescaled cubes. Makes the process much longer. The value of save_scaled is automatically set - to True when coarse_centering is set to True. The default + to False when coarse_centering is set to True. The default is False ''' @@ -3083,17 +3072,17 @@ def sph_ifs_combine_data(self, cpix=True, psf_dim=80, science_dim=290, correct_a # read and combine files for file_idx, (file, idx) in enumerate(flux_files.index): - self._logger.info(' ==> file {0}/{1}: {2}, DIT #{3}'.format(file_idx+1, len(flux_files), file, idx)) + self._logger.info(f' ==> file {file_idx + 1}/{len(flux_files)}: {file}, DIT #{idx}') # read data self._logger.debug('> read data') - fname = '{0}_DIT{1:03d}_preproc_'.format(file, idx) + fname = f'{file}_DIT{idx:03d}_preproc_' files = list(path.preproc.glob(fname+'?????.fits')) cube = fits.getdata(files[0]) # centers self._logger.debug('> read centers') - cfile = path.preproc / '{}centers.fits'.format(fname) + cfile = path.preproc / f'{fname}centers.fits' if cfile.exists(): centers = fits.getdata(cfile) else: @@ -3120,7 +3109,7 @@ def sph_ifs_combine_data(self, cpix=True, psf_dim=80, science_dim=290, correct_a # center frames for wave_idx, img in enumerate(cube): - self._logger.debug('> wave {}'.format(wave_idx)) + self._logger.debug(f'> wave {wave_idx}') cx, cy = centers[wave_idx, :] self._logger.debug('> shift and normalize') @@ -3181,11 +3170,11 @@ def sph_ifs_combine_data(self, cpix=True, psf_dim=80, science_dim=290, correct_a # read and combine files for file_idx, (file, idx) in enumerate(starcen_files.index): - self._logger.info(' ==> file {0}/{1}: {2}, DIT #{3}'.format(file_idx+1, len(starcen_files), file, idx)) + self._logger.info(f' ==> file {file_idx + 1}/{len(starcen_files)}: {file}, DIT #{idx}') # read data self._logger.debug('> read data') - fname = '{0}_DIT{1:03d}_preproc_'.format(file, idx) + fname = f'{file}_DIT{idx:03d}_preproc_' files = list(path.preproc.glob(fname+'?????.fits')) cube = fits.getdata(files[0]) @@ -3195,7 +3184,7 @@ def sph_ifs_combine_data(self, cpix=True, psf_dim=80, science_dim=290, correct_a centers = manual_center else: # otherwise read center data - centers = fits.getdata(path.preproc / '{}centers.fits'.format(fname)) + centers = fits.getdata(path.preproc / f'{fname}centers.fits') # make sure we have only integers if user wants coarse centering if coarse_centering: @@ -3217,7 +3206,7 @@ def sph_ifs_combine_data(self, cpix=True, psf_dim=80, science_dim=290, correct_a # center frames for wave_idx, img in enumerate(cube): - self._logger.debug('> wave {}'.format(wave_idx)) + self._logger.debug(f'> wave {wave_idx}') cx, cy = centers[wave_idx, :] self._logger.debug('> shift and normalize') @@ -3263,34 +3252,6 @@ def sph_ifs_combine_data(self, cpix=True, psf_dim=80, science_dim=290, correct_a if nfiles != 0: self._logger.info(' * OBJECT data') - # use manual center if explicitely requested - self._logger.debug('> read centers') - if manual_center is not None: - centers = manual_center - else: - # otherwise, look whether we have an OBJECT,CENTER frame - - # FIXME: ticket #12. Use first DIT of first OBJECT,CENTER - # in the sequence, but it would be better to be able to - # select which CENTER to use - starcen_files = frames_info[frames_info['DPR TYPE'] == 'OBJECT,CENTER'] - if len(starcen_files) == 0: - self._logger.warning('No OBJECT,CENTER file in the dataset. Images will be centered using default center ({},{})'.format(*self._default_center)) - centers = np.full((nwave, 2), self._default_center, dtype=np.float) - else: - fname = '{0}_DIT{1:03d}_preproc_centers.fits'.format(starcen_files.index.values[0][0], starcen_files.index.values[0][1]) - fpath = path.preproc / fname - - if fpath.exists(): - centers = fits.getdata(fpath) - else: - self._logger.warning('sph_ifs_star_center() has not been executed. Images will be centered using default center ({},{})'.format(*self._default_center)) - centers = np.full((nwave, 2), self._default_center, dtype=np.float) - - # make sure we have only integers if user wants coarse centering - if coarse_centering: - centers = centers.astype(np.int) - # final center if cpix: cc = science_dim // 2 @@ -3306,11 +3267,49 @@ def sph_ifs_combine_data(self, cpix=True, psf_dim=80, science_dim=290, correct_a # read and combine files for file_idx, (file, idx) in enumerate(object_files.index): - self._logger.info(' ==> file {0}/{1}: {2}, DIT #{3}'.format(file_idx+1, len(object_files), file, idx)) + self._logger.info(f' ==> file {file_idx + 1}/{len(object_files)}: {file}, DIT #{idx}') + # use manual center if explicitely requested + self._logger.debug('> read centers') + if manual_center is not None: + centers = manual_center + else: + # otherwise, look whether we have an OBJECT,CENTER frame and select the one requested by user + starcen_files = frames_info[frames_info['DPR TYPE'] == 'OBJECT,CENTER'] + if len(starcen_files) == 0: + self._logger.warning('No OBJECT,CENTER file in the dataset. Images will be centered using default center ({},{})'.format(*self._default_center)) + centers = self._default_center + else: + # selection of the proper OBJECT,CENTER + center_selection = center_selection.lower() + if center_selection == 'first': + center_index = 0 + elif center_selection == 'last': + center_index = len(starcen_files.index.values)-1 + elif center_selection == 'time': + time_cen = starcen_files['DATE-OBS'] + time_sci = frames_info.loc[(file, idx), 'DATE-OBS'] + center_index = np.abs(time_sci - time_cen).argmin() + else: + self._logger.error(f'Unknown OBJECT,CENTER selection {center_selection}. Possible values are first, last, and time.') + self._update_recipe_status('sph_ifs_combine_data', sphere.ERROR) + return + + fname = f'{starcen_files.index.values[center_index][0]}_DIT{starcen_files.index.values[center_index][1]:03d}_preproc_centers.fits' + fpath = path.preproc / fname + if fpath.exists(): + centers = fits.getdata(fpath) + else: + self._logger.warning('sph_ifs_star_center() has not been executed. Images will be centered using default center ({},{})'.format(*self._default_center)) + centers = np.full((nwave, 2), self._default_center, dtype=np.float) + + # make sure we have only integers if user wants coarse centering + if coarse_centering: + centers = centers.astype(np.int) + # read data self._logger.debug('> read data') - fname = '{0}_DIT{1:03d}_preproc_'.format(file, idx) + fname = f'{file}_DIT{idx:03d}_preproc_' files = list(path.preproc.glob(fname+'*.fits')) cube = fits.getdata(files[0]) @@ -3330,7 +3329,7 @@ def sph_ifs_combine_data(self, cpix=True, psf_dim=80, science_dim=290, correct_a # center frames for wave_idx, img in enumerate(cube): - self._logger.debug('> wave {}'.format(wave_idx)) + self._logger.debug(f'> wave {wave_idx}') cx, cy = centers[wave_idx, :] self._logger.debug('> shift and normalize') @@ -3375,7 +3374,7 @@ def sph_ifs_combine_data(self, cpix=True, psf_dim=80, science_dim=290, correct_a self._status = sphere.COMPLETE - def sph_ifs_clean(self, delete_raw=False, delete_products=False): + def sph_ifs_clean(self, delete_raw=False, delete_products=False, delete_config=False): ''' Clean everything except for raw data and science products (by default) @@ -3386,6 +3385,9 @@ def sph_ifs_clean(self, delete_raw=False, delete_products=False): delete_products : bool Delete science products. Default is False + + delete_config : bool + Delete configuration file. Default is False ''' self._logger.info('Clean reduction data') @@ -3398,6 +3400,10 @@ def sph_ifs_clean(self, delete_raw=False, delete_products=False): # remove sub-directories self.path.remove(delete_raw=delete_raw, delete_products=delete_products, logger=self._logger) + # remove config + if delete_config: + self.config._file.unlink() + # update recipe execution self._update_recipe_status('sph_ifs_clean', sphere.SUCCESS) diff --git a/sphere/IRDIS/ImagingReduction.py b/sphere/IRDIS/ImagingReduction.py index d40177a..a02367e 100644 --- a/sphere/IRDIS/ImagingReduction.py +++ b/sphere/IRDIS/ImagingReduction.py @@ -17,8 +17,8 @@ import sphere.utils as utils import sphere.utils.imutils as imutils import sphere.utils.aperture as aperture -import sphere.transmission as transmission -import sphere.toolbox as toolbox +import sphere.utils.toolbox as toolbox +import sphere.utils.transmission as transmission _log = logging.getLogger(__name__) @@ -52,7 +52,7 @@ class ImagingReduction(object): # Constructor ################################################## - def __new__(cls, path, log_level='info', sphere_handler=None): + def __new__(cls, path, clean_start=True, log_level='info', user_config=None, sphere_handler=None): '''Custom instantiation for the class and initialization for the instances @@ -66,9 +66,17 @@ def __new__(cls, path, log_level='info', sphere_handler=None): path : str Path to the directory containing the dataset - level : {'debug', 'info', 'warning', 'error', 'critical'} + clean_start : bool + Remove all results from previous reductions for a clean start. + Default is True + + log_level : {'debug', 'info', 'warning', 'error', 'critical'} The log level of the handler + user_config : str + Path to a user-provided configuration. Default is None, i.e. the + reduction will use the package default configuration parameters + sphere_handler : log handler Higher-level SPHERE.Dataset log handler @@ -84,7 +92,7 @@ def __new__(cls, path, log_level='info', sphere_handler=None): # zeroth-order reduction validation raw = path / 'raw' if not raw.exists(): - _log.error('No raw/ subdirectory. {0} is not a valid reduction path'.format(path)) + _log.error(f'No raw/ subdirectory. {path} is not a valid reduction path') return None else: # it's all good: create instance! @@ -121,56 +129,71 @@ def __new__(cls, path, log_level='info', sphere_handler=None): reduction._logger = logger - reduction._logger.info('Creating IRDIS imaging reduction at path {}'.format(path)) + reduction._logger.info(f'Creating IRDIS imaging reduction at path {path}') # # v1.4 - True North correction change # - reduction._logger.warning('#################################################################') - reduction._logger.warning('Starting in the present version of the pipeline, the default ') - reduction._logger.warning('-1.75° true North offset is automatically added to the derotation') - reduction._logger.warning('angles. The offset value can be modified in the configuration of ') - reduction._logger.warning('the reduction: ') - reduction._logger.warning(' ') - reduction._logger.warning(' >>> reduction.config[\'cal_true_north\'] = xxx ') - reduction._logger.warning(' ') - reduction._logger.warning('To avoid any issues, make sure to: ') - reduction._logger.warning(' * either reprocess data previously processed with version <1.4 ') - reduction._logger.warning(' * or take into account the offset in your astrometric analysis ') - reduction._logger.warning('#################################################################') + reduction._logger.warning('##################################################################') + reduction._logger.warning('Since version 1.4 of the pipeline, the default -1.75° true North ') + reduction._logger.warning('offset is automatically added to the derotation angles. The offset') + reduction._logger.warning('value can be modified in the configuration of the reduction: ') + reduction._logger.warning(' ') + reduction._logger.warning(' >>> reduction.config[\'cal_true_north\'] = xxx ') + reduction._logger.warning(' ') + reduction._logger.warning('To avoid any issues, make sure to: ') + reduction._logger.warning(' * either reprocess data previously processed with version <1.4 ') + reduction._logger.warning(' * or take into account the offset in your astrometric analysis ') + reduction._logger.warning('##################################################################') + + # + # clean start + # + if clean_start: + reduction._logger.info('Erase outputs of previous reduction for a clean start') + reduction._path.remove(delete_raw=False, delete_products=True, logger=reduction._logger) + config_file = reduction._path.root / 'reduction_config.ini' + if config_file.exists(): + config_file.unlink() # # configuration # - configfile = f'{Path(sphere.__file__).parent}/instruments/{reduction._instrument}.ini' - config = configparser.ConfigParser() + cfgfile = f'{Path(sphere.__file__).parent}/instruments/{reduction._instrument}.ini' + cfgparser = configparser.ConfigParser() reduction._logger.debug('> read default configuration') - config.read(configfile) + cfgparser.read(cfgfile) # instrument - reduction._pixel = float(config.get('instrument', 'pixel')) + reduction._pixel = float(cfgparser.get('instrument', 'pixel')) reduction._nwave = 2 # calibration - reduction._wave_cal_lasers = np.array(eval(config.get('calibration', 'wave_cal_lasers'))) + reduction._wave_cal_lasers = np.array(eval(cfgparser.get('calibration', 'wave_cal_lasers'))) # imaging calibration - reduction._default_center = np.array(eval(config.get('calibration-imaging', 'default_center'))) - reduction._orientation_offset = eval(config.get('calibration-imaging', 'orientation_offset')) + reduction._default_center = np.array(eval(cfgparser.get('calibration-imaging', 'default_center'))) + reduction._orientation_offset = eval(cfgparser.get('calibration-imaging', 'orientation_offset')) # reduction parameters - reduction._config = {} + cfg = {} for group in ['reduction', 'reduction-imaging']: - items = dict(config.items(group)) - reduction._config.update(items) + items = dict(cfgparser.items(group)) for key, value in items.items(): try: val = eval(value) except NameError: val = value - reduction._config[key] = val + cfg[key] = val + reduction._config = utils.Configuration(reduction._path, reduction._logger, cfg) + + # load user-provided default configuration parameters + if user_config: + user_config = Path(user_config).expanduser() + reduction._config.load_from_file(user_config) + # # reduction and recipes status # @@ -193,7 +216,7 @@ def __new__(cls, path, log_level='info', sphere_handler=None): ################################################## def __repr__(self): - return ''.format(self._instrument, self._mode, self._path, self.loglevel) + return f'' def __format__(self): return self.__repr__() @@ -258,56 +281,6 @@ def mode(self): # Generic class methods ################################################## - def show_config(self): - ''' - Shows the reduction configuration - ''' - - # dictionary - dico = self.config - - # misc parameters - print() - print('{0:<30s}{1}'.format('Parameter', 'Value')) - print('-'*35) - keys = [key for key in dico if key.startswith('misc')] - for key in keys: - print('{0:<30s}{1}'.format(key, dico[key])) - - # calibrations - print('-'*35) - keys = [key for key in dico if key.startswith('cal')] - for key in keys: - print('{0:<30s}{1}'.format(key, dico[key])) - - # pre-processing - print('-'*35) - keys = [key for key in dico if key.startswith('preproc')] - for key in keys: - print('{0:<30s}{1}'.format(key, dico[key])) - - # centring - print('-'*35) - keys = [key for key in dico if key.startswith('center')] - for key in keys: - print('{0:<30s}{1}'.format(key, dico[key])) - - # combining - print('-'*35) - keys = [key for key in dico if key.startswith('combine')] - for key in keys: - print('{0:<30s}{1}'.format(key, dico[key])) - - # clean - print('-'*35) - keys = [key for key in dico if key.startswith('clean')] - for key in keys: - print('{0:<30s}{1}'.format(key, dico[key])) - print('-'*35) - - print() - - def init_reduction(self): ''' Sort files and frames, perform sanity check @@ -372,6 +345,7 @@ def process_science(self): science_dim=config['combine_science_dim'], correct_anamorphism=config['combine_correct_anamorphism'], manual_center=config['combine_manual_center'], + center_selection=config['combine_center_selection'], coarse_centering=config['combine_coarse_centering'], shift_method=config['combine_shift_method'], save_scaled=config['combine_save_scaled']) @@ -389,7 +363,8 @@ def clean(self): if config['clean']: self.sph_ird_clean(delete_raw=config['clean_delete_raw'], - delete_products=config['clean_delete_products']) + delete_products=config['clean_delete_products'], + delete_config=config['clean_delete_config']) def full_reduction(self): @@ -432,6 +407,9 @@ def _read_info(self): # path path = self.path + # load existing configuration + self.config.load() + # files info fname = path.preproc / 'files.csv' if fname.exists(): @@ -501,23 +479,23 @@ def _read_info(self): done = True files = frames_info_preproc.index for file, idx in files: - fname = '{0}_DIT{1:03d}_preproc'.format(file, idx) - file = list(path.preproc.glob('{}.fits'.format(fname))) + fname = f'{file}_DIT{idx:03d}_preproc' + file = list(path.preproc.glob(f'{fname}.fits')) done = done and (len(file) == 1) if done: self._update_recipe_status('sph_ird_preprocess_science', sphere.SUCCESS) - self._logger.debug('> sph_ird_preprocess_science status = {}'.format(done)) + self._logger.debug(f'> sph_ird_preprocess_science status = {done}') done = True files = frames_info_preproc[(frames_info_preproc['DPR TYPE'] == 'OBJECT,FLUX') | (frames_info_preproc['DPR TYPE'] == 'OBJECT,CENTER')].index for file, idx in files: - fname = '{0}_DIT{1:03d}_preproc_centers'.format(file, idx) - file = list(path.preproc.glob('{}.fits'.format(fname))) + fname = f'{file}_DIT{idx:03d}_preproc_centers' + file = list(path.preproc.glob(f'{fname}.fits')) done = done and (len(file) == 1) if done: self._update_recipe_status('sph_ird_star_center', sphere.SUCCESS) - self._logger.debug('> sph_ird_star_center status = {}'.format(done)) + self._logger.debug(f'> sph_ird_star_center status = {done}') # reduction status self._status = sphere.INCOMPLETE @@ -570,7 +548,7 @@ def sort_files(self): self._status = sphere.FATAL return - self._logger.info(' * found {0} raw FITS files'.format(len(files))) + self._logger.info(f' * found {len(files)} raw FITS files') # read list of keywords self._logger.debug('> read keyword list') @@ -597,7 +575,7 @@ def sort_files(self): self._logger.debug('> read FITS keywords') for f in files: - hdu = fits.open(path.raw / '{}.fits'.format(f)) + hdu = fits.open(path.raw / f'{f}.fits') hdr = hdu[0].header for k, sk in zip(keywords, keywords_short): @@ -624,7 +602,7 @@ def sort_files(self): # check instruments instru = files_info['SEQ ARM'].unique() if len(instru) != 1: - self._logger.critical('Sequence is mixing different instruments: {0}'.format(instru)) + self._logger.critical(f'Sequence is mixing different instruments: {instru}') self._update_recipe_status('sort_files', sphere.ERROR) self._status = sphere.FATAL return @@ -732,7 +710,7 @@ def sort_frames(self): ra_drot_h = np.floor(ra_drot/1e4) ra_drot_m = np.floor((ra_drot - ra_drot_h*1e4)/1e2) ra_drot_s = ra_drot - ra_drot_h*1e4 - ra_drot_m*1e2 - RA = '{:02.0f}:{:02.0f}:{:02.3f}'.format(ra_drot_h, ra_drot_m, ra_drot_s) + RA = f'{ra_drot_h:02.0f}:{ra_drot_m:02.0f}:{ra_drot_s:02.3f}' dec_drot = cinfo['INS4 DROT2 DEC'][0] sign = np.sign(dec_drot) @@ -741,33 +719,34 @@ def sort_frames(self): dec_drot_m = np.floor((udec_drot - dec_drot_d*1e4)/1e2) dec_drot_s = udec_drot - dec_drot_d*1e4 - dec_drot_m*1e2 dec_drot_d *= sign - DEC = '{:02.0f}:{:02.0f}:{:02.2f}'.format(dec_drot_d, dec_drot_m, dec_drot_s) + DEC = f'{dec_drot_d:02.0f}:{dec_drot_m:02.0f}:{dec_drot_s:02.2f}' pa_start = cinfo['PARANG'][0] pa_end = cinfo['PARANG'][-1] - posang = cinfo['INS4 DROT2 POSANG'].unique() - + posang = cinfo['INS4 DROT2 POSANG'].unique() + posangs = [f'{p:.2f}°' for p in posang] + date = str(cinfo['DATE'][0])[0:10] - - self._logger.info(' * Programme ID: {0}'.format(cinfo['OBS PROG ID'][0])) - self._logger.info(' * OB name: {0}'.format(cinfo['OBS NAME'][0])) - self._logger.info(' * OB ID: {0}'.format(cinfo['OBS ID'][0])) - self._logger.info(' * Object: {0}'.format(cinfo['OBJECT'][0])) - self._logger.info(' * RA / DEC: {0} / {1}'.format(RA, DEC)) - self._logger.info(' * Date: {0}'.format(date)) - self._logger.info(' * Instrument: {0}'.format(cinfo['SEQ ARM'][0])) - self._logger.info(' * Derotator: {0}'.format(cinfo['INS4 DROT2 MODE'][0])) - self._logger.info(' * VIS WFS mode: {0}'.format(cinfo['AOS VISWFS MODE'][0])) - self._logger.info(' * IR WFS mode: {0}'.format(cinfo['AOS IRWFS MODE'][0])) - self._logger.info(' * Coronagraph: {0}'.format(cinfo['INS COMB ICOR'][0])) - self._logger.info(' * Mode: {0}'.format(cinfo['INS1 MODE'][0])) - self._logger.info(' * Filter: {0}'.format(cinfo['INS COMB IFLT'][0])) - self._logger.info(' * DIT: {0:.2f} sec'.format(cinfo['DET SEQ1 DIT'][0])) - self._logger.info(' * NDIT: {0:.0f}'.format(cinfo['DET NDIT'][0])) - self._logger.info(' * Texp: {0:.2f} min'.format(cinfo['DET SEQ1 DIT'].sum()/60)) - self._logger.info(' * PA: {0:.2f}° ==> {1:.2f}° = {2:.2f}°'.format(pa_start, pa_end, np.abs(pa_end-pa_start))) - self._logger.info(' * POSANG: {0}'.format(', '.join(['{:.2f}°'.format(p) for p in posang]))) + + self._logger.info(f" * Programme ID: {cinfo['OBS PROG ID'][0]}") + self._logger.info(f" * OB name: {cinfo['OBS NAME'][0]}") + self._logger.info(f" * OB ID: {cinfo['OBS ID'][0]}") + self._logger.info(f" * Object: {cinfo['OBJECT'][0]}") + self._logger.info(f' * RA / DEC: {RA} / {DEC}') + self._logger.info(f' * Date: {date}') + self._logger.info(f" * Instrument: {cinfo['SEQ ARM'][0]}") + self._logger.info(f" * Derotator: {cinfo['INS4 DROT2 MODE'][0]}") + self._logger.info(f" * VIS WFS mode: {cinfo['AOS VISWFS MODE'][0]}") + self._logger.info(f" * IR WFS mode: {cinfo['AOS IRWFS MODE'][0]}") + self._logger.info(f" * Coronagraph: {cinfo['INS COMB ICOR'][0]}") + self._logger.info(f" * Mode: {cinfo['INS1 MODE'][0]}") + self._logger.info(f" * Filter: {cinfo['INS COMB IFLT'][0]}") + self._logger.info(f" * DIT: {cinfo['DET SEQ1 DIT'][0]:.2f} sec") + self._logger.info(f" * NDIT: {cinfo['DET NDIT'][0]:.0f}") + self._logger.info(f" * Texp: {cinfo['DET SEQ1 DIT'].sum() / 60:.2f} min") + self._logger.info(f' * PA: {pa_start:.2f}° ==> {pa_end:.2f}° = {np.abs(pa_end - pa_start):.2f}°') + self._logger.info(f" * POSANG: {', '.join(posangs)}") # recipe execution status self._update_recipe_status('sort_frames', sphere.SUCCESS) @@ -797,20 +776,20 @@ def check_files_association(self): # instrument arm arm = files_info['SEQ ARM'].unique() if len(arm) != 1: - self._logger.error('Sequence is mixing different instruments: {0}'.format(arm)) + self._logger.error(f'Sequence is mixing different instruments: {arm}') self._update_recipe_status('check_files_association', sphere.ERROR) return # IRDIS obs mode and filter combination modes = files_info.loc[files_info['DPR CATG'] == 'SCIENCE', 'INS1 MODE'].unique() if len(modes) != 1: - self._logger.error('Sequence is mixing different types of observations: {0}'.format(modes)) + self._logger.error(f'Sequence is mixing different types of observations: {modes}') self._update_recipe_status('check_files_association', sphere.ERROR) return filter_combs = files_info.loc[files_info['DPR CATG'] == 'SCIENCE', 'INS COMB IFLT'].unique() if len(filter_combs) != 1: - self._logger.error('Sequence is mixing different types of filters combinations: {0}'.format(filter_combs)) + self._logger.error(f'Sequence is mixing different types of filters combinations: {filter_combs}') self._update_recipe_status('check_files_association', sphere.ERROR) return filter_comb = filter_combs[0] @@ -832,7 +811,7 @@ def check_files_association(self): cfiles = calibs[(calibs['DPR TYPE'] == 'FLAT,LAMP') & (calibs['INS COMB IFLT'] == filter_comb)] if len(cfiles) <= 1: error_flag += 1 - self._logger.error(' * there should be more than 1 flat in filter combination {0}'.format(filter_comb)) + self._logger.error(f' * there should be more than 1 flat in filter combination {filter_comb}') ################################################## # static calibrations that depend on science DIT @@ -850,22 +829,22 @@ def check_files_association(self): (calibs['DET SEQ1 DIT'].round(2) == DIT)] if len(cfiles) == 0: warning_flag += 1 - self._logger.warning(' * there is no dark/background for science files with DIT={0} sec. It is *highly recommended* to include one to obtain the best data reduction. A single dark/background file is sufficient, and it can easily be downloaded from the ESO archive'.format(DIT)) + self._logger.warning(f' * there is no dark/background for science files with DIT={DIT} sec. It is *highly recommended* to include one to obtain the best data reduction. A single dark/background file is sufficient, and it can easily be downloaded from the ESO archive') # sky backgrounds cfiles = files_info[(files_info['DPR TYPE'] == 'SKY') & (files_info['DET SEQ1 DIT'].round(2) == DIT)] if len(cfiles) == 0: warning_flag += 1 - self._logger.warning(' * there is no sky background for science files with DIT={0} sec. Using a sky background instead of an internal instrumental background can usually provide a cleaner data reduction, especially in K-band'.format(DIT)) + self._logger.warning(f' * there is no sky background for science files with DIT={DIT} sec. Using a sky background instead of an internal instrumental background can usually provide a cleaner data reduction, especially in K-band') # error reporting self._logger.debug('> report status') if error_flag: - self._logger.error('There are {0} warning(s) and {1} error(s) in the classification of files'.format(warning_flag, error_flag)) + self._logger.error(f'There are {warning_flag} warning(s) and {error_flag} error(s) in the classification of files') self._update_recipe_status('check_files_association', sphere.ERROR) return else: - self._logger.warning('There are {0} warning(s) and {1} error(s) in the classification of files'.format(warning_flag, error_flag)) + self._logger.warning(f'There are {warning_flag} warning(s) and {error_flag} error(s) in the classification of files') # recipe execution status self._update_recipe_status('check_files_association', sphere.SUCCESS) @@ -917,14 +896,14 @@ def sph_ird_cal_dark(self, silent=True): if len(cfiles) == 0: continue - self._logger.info(' * {0} in filter {1} with DIT={2:.2f} sec ({3} files)'.format(ctype, cfilt, DIT, len(cfiles))) + self._logger.info(f' * {ctype} in filter {cfilt} with DIT={DIT:.2f} sec ({len(cfiles)} files)') # create sof self._logger.debug('> create sof file') - sof = path.sof / 'dark_filt={0}_DIT={1:.2f}.sof'.format(cfilt, DIT) + sof = path.sof / f'dark_filt={cfilt}_DIT={DIT:.2f}.sof' file = open(sof, 'w') for f in files: - file.write('{0}/{1}.fits {2}\n'.format(path.raw, f, 'IRD_DARK_RAW')) + file.write(f"{path.raw}/{f}.fits IRD_DARK_RAW\n") file.close() # products @@ -932,8 +911,8 @@ def sph_ird_cal_dark(self, silent=True): loc = 'sky' else: loc = 'internal' - dark_file = 'dark_{0}_filt={1}_DIT={2:.2f}'.format(loc, cfilt, DIT) - bpm_file = 'dark_{0}_bpm_filt={1}_DIT={2:.2f}'.format(loc, cfilt, DIT) + dark_file = f'dark_{loc}_filt={cfilt}_DIT={DIT:.2f}' + bpm_file = f'dark_{loc}_bpm_filt={cfilt}_DIT={DIT:.2f}' # different max level in K-band max_level = 1000 @@ -947,9 +926,9 @@ def sph_ird_cal_dark(self, silent=True): 'sph_ird_master_dark', '--ird.master_dark.sigma_clip=5.0', '--ird.master_dark.save_addprod=TRUE', - '--ird.master_dark.max_acceptable={0}'.format(max_level), - '--ird.master_dark.outfilename={0}/{1}.fits'.format(path.calib, dark_file), - '--ird.master_dark.badpixfilename={0}/{1}.fits'.format(path.calib, bpm_file), + f'--ird.master_dark.max_acceptable={max_level}', + f'--ird.master_dark.outfilename={path.calib}/{dark_file}.fits', + f'--ird.master_dark.badpixfilename={path.calib}/{bpm_file}.fits', str(sof)] # check esorex @@ -959,7 +938,7 @@ def sph_ird_cal_dark(self, silent=True): return # execute esorex - self._logger.debug('> execute {}'.format(' '.join(args))) + self._logger.debug(f"> execute {' '.join(args)}") if silent: proc = subprocess.run(args, cwd=path.tmp, stdout=subprocess.DEVNULL) else: @@ -1035,19 +1014,19 @@ def sph_ird_cal_detector_flat(self, silent=True): cfiles = calibs[calibs['INS COMB IFLT'] == cfilt] files = cfiles.index - self._logger.info(' * filter {0} ({1} files)'.format(cfilt, len(cfiles))) + self._logger.info(f' * filter {cfilt} ({len(cfiles)} files)') # create sof self._logger.debug('> create sof file') - sof = path.sof / 'flat_filt={0}.sof'.format(cfilt) + sof = path.sof / f'flat_filt={cfilt}.sof' file = open(sof, 'w') for f in files: - file.write('{0}/{1}.fits {2}\n'.format(path.raw, f, 'IRD_FLAT_FIELD_RAW')) + file.write(f"{path.raw}/{f}.fits IRD_FLAT_FIELD_RAW\n") file.close() # products - flat_file = 'flat_filt={0}'.format(cfilt) - bpm_file = 'flat_bpm_filt={0}'.format(cfilt) + flat_file = f'flat_filt={cfilt}' + bpm_file = f'flat_bpm_filt={cfilt}' # esorex parameters args = ['esorex', @@ -1055,8 +1034,8 @@ def sph_ird_cal_detector_flat(self, silent=True): '--no-datamd5=TRUE', 'sph_ird_instrument_flat', '--ird.instrument_flat.save_addprod=TRUE', - '--ird.instrument_flat.outfilename={0}/{1}.fits'.format(path.calib, flat_file), - '--ird.instrument_flat.badpixfilename={0}/{1}.fits'.format(path.calib, bpm_file), + f'--ird.instrument_flat.outfilename={path.calib}/{flat_file}.fits', + f'--ird.instrument_flat.badpixfilename={path.calib}/{bpm_file}.fits', str(sof)] # check esorex @@ -1066,7 +1045,7 @@ def sph_ird_cal_detector_flat(self, silent=True): return # execute esorex - self._logger.debug('> execute {}'.format(' '.join(args))) + self._logger.debug(f"> execute {' '.join(args)}") if silent: proc = subprocess.run(args, cwd=path.tmp, stdout=subprocess.DEVNULL) else: @@ -1189,7 +1168,7 @@ def sph_ird_preprocess_science(self, if fix_badpix: bpm_files = files_info[(files_info['PRO CATG'] == 'IRD_STATIC_BADPIXELMAP') | (files_info['PRO CATG'] == 'IRD_NON_LINEAR_BADPIXELMAP')].index - bpm_files = [path.calib / '{}.fits'.format(f) for f in bpm_files] + bpm_files = [path.calib / f'{f}.fits' for f in bpm_files] if len(bpm_files) == 0: self._logger.error('Could not fin any bad pixel maps') self._update_recipe_status('sph_ird_preprocess_science', sphere.ERROR) @@ -1208,10 +1187,10 @@ def sph_ird_preprocess_science(self, flat_file = files_info[files_info['PROCESSED'] & (files_info['PRO CATG'] == 'IRD_FLAT_FIELD') & (files_info['INS COMB IFLT'] == filter_comb)] if len(flat_file) != 1: - self._logger.error('There should be exactly 1 flat file. Found {0}.'.format(len(flat_file))) + self._logger.error(f'There should be exactly 1 flat file. Found {len(flat_file)}.') self._update_recipe_status('sph_ird_preprocess_science', sphere.ERROR) return - flat = fits.getdata(path.calib / '{}.fits'.format(flat_file.index[0])) + flat = fits.getdata(path.calib / f'{flat_file.index[0]}.fits') # final dataframe self._logger.debug('> create frames_info_preproc data frame') @@ -1232,7 +1211,7 @@ def sph_ird_preprocess_science(self, for DIT in sci_DITs: sfiles = sci_files[sci_files['DET SEQ1 DIT'].round(2) == DIT] - self._logger.info('{0} files of type {1} with DIT={2} sec'.format(len(sfiles), typ, DIT)) + self._logger.info(f'{len(sfiles)} files of type {typ} with DIT={DIT} sec') if subtract_background: # look for sky, then background, then darks @@ -1243,17 +1222,17 @@ def sph_ird_preprocess_science(self, (files_info['DPR TYPE'] == d) & (files_info['DET SEQ1 DIT'].round(2) == DIT)] if len(dfiles) != 0: break - self._logger.info(' ==> found {0} corresponding {1} file'.format(len(dfiles), d)) + self._logger.info(f' ==> found {len(dfiles)} corresponding {d} file') if len(dfiles) == 0: # issue a warning if absolutely no background is found self._logger.warning('No background has been found. Pre-processing will continue but data quality will likely be affected') bkg = np.zeros((1024, 2048)) elif len(dfiles) == 1: - bkg = fits.getdata(path.calib / '{}.fits'.format(dfiles.index[0])) + bkg = fits.getdata(path.calib / f'{dfiles.index[0]}.fits') elif len(dfiles) > 1: # FIXME: handle cases when multiple backgrounds are found? - self._logger.error('Unexpected number of background files ({0})'.format(len(dfiles))) + self._logger.error(f'Unexpected number of background files ({len(dfiles)})') self._update_recipe_status('sph_ird_preprocess_science', sphere.ERROR) return @@ -1262,11 +1241,11 @@ def sph_ird_preprocess_science(self, # frames_info extract finfo = frames_info.loc[(fname, slice(None)), :] - self._logger.info(' * file {0}/{1}: {2}, NDIT={3}'.format(idx+1, len(sfiles), fname, len(finfo))) + self._logger.info(f' * file {idx + 1}/{len(sfiles)}: {fname}, NDIT={len(finfo)}') # read data self._logger.info(' ==> read data') - img, hdr = fits.getdata(path.raw / '{}.fits'.format(fname), header=True) + img, hdr = fits.getdata(path.raw / f'{fname}.fits', header=True) # add extra dimension to single images to make cubes if img.ndim == 2: @@ -1298,7 +1277,7 @@ def sph_ird_preprocess_science(self, elif (typ == 'OBJECT'): if collapse_science: if collapse_type == 'mean': - self._logger.info(' ==> collapse: mean ({0} -> 1 frame, 0 dropped)'.format(len(img))) + self._logger.info(f' ==> collapse: mean ({len(img)} -> 1 frame, 0 dropped)') img = np.mean(img, axis=0, keepdims=True) frames_info_new = toolbox.collapse_frames_info(finfo, fname, true_north, 'mean', logger=self._logger) @@ -1314,11 +1293,11 @@ def sph_ird_preprocess_science(self, dropped = NDIT % coadd_value if coadd_value > NDIT: - self._logger.error('coadd_value ({0}) must be < NDIT ({1})'.format(coadd_value, NDIT)) + self._logger.error(f'coadd_value ({coadd_value}) must be < NDIT ({NDIT})') self._update_recipe_status('sph_ird_preprocess_science', sphere.ERROR) return - self._logger.info(' ==> collapse: coadd by {0} ({1} -> {2} frames, {3} dropped)'.format(coadd_value, NDIT, NDIT_new, dropped)) + self._logger.info(f' ==> collapse: coadd by {coadd_value} ({NDIT} -> {NDIT_new} frames, {dropped} dropped)') # coadd frames nimg = np.empty((NDIT_new, 1024, 2048), dtype=img.dtype) @@ -1328,7 +1307,7 @@ def sph_ird_preprocess_science(self, frames_info_new = toolbox.collapse_frames_info(finfo, fname, true_north, 'coadd', coadd_value=coadd_value, logger=self._logger) else: - self._logger.error('Unknown collapse type {0}'.format(collapse_type)) + self._logger.error(f'Unknown collapse type {collapse_type}') self._update_recipe_status('sph_ird_preprocess_science', sphere.ERROR) return else: @@ -1383,7 +1362,7 @@ def sph_ird_preprocess_science(self, for f in range(len(img)): frame = nimg[f, ...].squeeze() hdr['HIERARCH ESO DET NDIT'] = 1 - fits.writeto(path.preproc / '{}_DIT{:03d}_preproc.fits'.format(fname, f), frame, hdr, + fits.writeto(path.preproc / f'{fname}_DIT{f:03d}_preproc.fits', frame, hdr, overwrite=True, output_verify='silentfix') # sort and save final dataframe @@ -1453,16 +1432,16 @@ def sph_ird_star_center(self, high_pass_psf=False, high_pass_waffle=False, offse flux_files = frames_info[frames_info['DPR TYPE'] == 'OBJECT,FLUX'] if len(flux_files) != 0: for file, idx in flux_files.index: - self._logger.info(' * OBJECT,FLUX: {0}'.format(file)) + self._logger.info(f' * OBJECT,FLUX: {file}') # read data self._logger.debug('> read data') - fname = '{0}_DIT{1:03d}_preproc'.format(file, idx) - cube, hdr = fits.getdata(path.preproc / '{}.fits'.format(fname), header=True) + fname = f'{file}_DIT{idx:03d}_preproc' + cube, hdr = fits.getdata(path.preproc / f'{fname}.fits', header=True) # centers if plot: - save_path = path.products / '{}_psf_fitting.pdf'.format(fname) + save_path = path.products / f'{fname}_psf_fitting.pdf' else: save_path = None img_center = toolbox.star_centers_from_PSF_img_cube(cube, wave, pixel, exclude_fraction=0.3, @@ -1471,18 +1450,18 @@ def sph_ird_star_center(self, high_pass_psf=False, high_pass_waffle=False, offse # save self._logger.debug('> save centers') - fits.writeto(path.preproc / '{}_centers.fits'.format(fname), img_center, overwrite=True) + fits.writeto(path.preproc / f'{fname}_centers.fits', img_center, overwrite=True) # then OBJECT,CENTER starcen_files = frames_info[frames_info['DPR TYPE'] == 'OBJECT,CENTER'] if len(starcen_files) != 0: for file, idx in starcen_files.index: - self._logger.info(' * OBJECT,CENTER: {0}'.format(file)) + self._logger.info(f' * OBJECT,CENTER: {file}') # read data self._logger.debug('> read data') - fname = '{0}_DIT{1:03d}_preproc'.format(file, idx) - cube, hdr = fits.getdata(path.preproc / '{}.fits'.format(fname), header=True) + fname = f'{file}_DIT{idx:03d}_preproc' + cube, hdr = fits.getdata(path.preproc / f'{fname}.fits', header=True) # coronagraph coro_name = starcen_files.loc[(file, idx), 'INS COMB ICOR'] @@ -1493,9 +1472,9 @@ def sph_ird_star_center(self, high_pass_psf=False, high_pass_waffle=False, offse # centers waffle_orientation = hdr['HIERARCH ESO OCS WAFFLE ORIENT'] - self._logger.debug('> waffle orientation: {}'.format(waffle_orientation)) + self._logger.debug(f'> waffle orientation: {waffle_orientation}') if plot: - save_path = path.products / '{}_waffle_fitting.pdf'.format(fname) + save_path = path.products / f'{fname}_waffle_fitting.pdf' else: save_path = None spot_center, spot_dist, img_center \ @@ -1506,7 +1485,7 @@ def sph_ird_star_center(self, high_pass_psf=False, high_pass_waffle=False, offse # save self._logger.debug('> save centers') - fits.writeto(path.preproc / '{}_centers.fits'.format(fname), img_center, overwrite=True) + fits.writeto(path.preproc / f'{fname}_centers.fits', img_center, overwrite=True) # recipe execution status self._update_recipe_status('sph_ird_star_center', sphere.SUCCESS) @@ -1516,7 +1495,8 @@ def sph_ird_star_center(self, high_pass_psf=False, high_pass_waffle=False, offse def sph_ird_combine_data(self, cpix=True, psf_dim=80, science_dim=290, correct_anamorphism=True, - shift_method='fft', manual_center=None, coarse_centering=False, save_scaled=False): + shift_method='fft', manual_center=None, center_selection='first', + coarse_centering=False, save_scaled=False): '''Combine and save the science data into final cubes All types of data are combined independently: PSFs @@ -1592,6 +1572,13 @@ def sph_ird_combine_data(self, cpix=True, psf_dim=80, science_dim=290, correct_a frames. This should be an array of either 2 or nwave*2 values. Default is None + center_selection : str + Specify which star center to use when multiple are + available. Possible values are first, last, and time. The + time option indicates to use the star center file that is + closest in time with respect to each science file. Default + is first + coarse_centering : bool Control if images are finely centered or not before being combined. However the images are still roughly centered by @@ -1607,9 +1594,8 @@ def sph_ird_combine_data(self, cpix=True, psf_dim=80, science_dim=290, correct_a save_scaled : bool Also save the wavelength-rescaled cubes. Makes the process much longer. The value of save_scaled is automatically set - to True when coarse_centering is set to True. The default + to False when coarse_centering is set to True. The default is False - ''' self._logger.info('Combine science data') @@ -1685,15 +1671,15 @@ def sph_ird_combine_data(self, cpix=True, psf_dim=80, science_dim=290, correct_a # read and combine files for file_idx, (file, idx) in enumerate(flux_files.index): - self._logger.info(' ==> file {0}/{1}: {2}, DIT #{3}'.format(file_idx+1, len(flux_files), file, idx)) + self._logger.info(f' ==> file {file_idx + 1}/{len(flux_files)}: {file}, DIT #{idx}') # read data self._logger.debug('> read data') - fname = '{0}_DIT{1:03d}_preproc'.format(file, idx) - cube = fits.getdata(path.preproc / '{}.fits'.format(fname)) + fname = f'{file}_DIT{idx:03d}_preproc' + cube = fits.getdata(path.preproc / f'{fname}.fits') self._logger.debug('> read centers') - cfile = path.preproc / '{}_centers.fits'.format(fname) + cfile = path.preproc / f'{fname}_centers.fits' if cfile.exists(): centers = fits.getdata(cfile) else: @@ -1717,7 +1703,7 @@ def sph_ird_combine_data(self, cpix=True, psf_dim=80, science_dim=290, correct_a # center frames for wave_idx, img in enumerate(cube): - self._logger.debug('> wave {}'.format(wave_idx)) + self._logger.debug(f'> wave {wave_idx}') cx, cy = centers[wave_idx, :] self._logger.debug('> shift and normalize') @@ -1778,12 +1764,12 @@ def sph_ird_combine_data(self, cpix=True, psf_dim=80, science_dim=290, correct_a # read and combine files for file_idx, (file, idx) in enumerate(starcen_files.index): - self._logger.info(' ==> file {0}/{1}: {2}, DIT #{3}'.format(file_idx+1, len(starcen_files), file, idx)) + self._logger.info(f' ==> file {file_idx + 1}/{len(starcen_files)}: {file}, DIT #{idx}') # read data self._logger.debug('> read data') - fname = '{0}_DIT{1:03d}_preproc'.format(file, idx) - cube = fits.getdata(path.preproc / '{}.fits'.format(fname)) + fname = f'{file}_DIT{idx:03d}_preproc' + cube = fits.getdata(path.preproc / f'{fname}.fits') # use manual center if explicitely requested self._logger.debug('> read centers') @@ -1791,7 +1777,7 @@ def sph_ird_combine_data(self, cpix=True, psf_dim=80, science_dim=290, correct_a centers = manual_center else: # otherwise read center data - centers = fits.getdata(path.preproc / '{}_centers.fits'.format(fname)) + centers = fits.getdata(path.preproc / f'{fname}_centers.fits') # make sure we have only integers if user wants coarse centering if coarse_centering: @@ -1810,7 +1796,7 @@ def sph_ird_combine_data(self, cpix=True, psf_dim=80, science_dim=290, correct_a # center frames for wave_idx, img in enumerate(cube): - self._logger.debug('> wave {}'.format(wave_idx)) + self._logger.debug(f'> wave {wave_idx}') cx, cy = centers[wave_idx, :] self._logger.debug('> shift and normalize') @@ -1860,40 +1846,6 @@ def sph_ird_combine_data(self, cpix=True, psf_dim=80, science_dim=290, correct_a dms_dx_ref = 0 dms_dy_ref = 0 - # use manual center if explicitely requested - self._logger.debug('> read centers') - if manual_center is not None: - centers = manual_center - else: - # otherwise, look whether we have an OBJECT,CENTER frame - - # FIXME: ticket #12. Use first DIT of first OBJECT,CENTER - # in the sequence, but it would be better to be able to - # select which CENTER to use - starcen_files = frames_info[frames_info['DPR TYPE'] == 'OBJECT,CENTER'] - if len(starcen_files) == 0: - self._logger.warning('No OBJECT,CENTER file in the dataset. Images will be centered using default center ({},{})'.format(*self._default_center)) - centers = self._default_center - else: - fname = '{0}_DIT{1:03d}_preproc_centers.fits'.format(starcen_files.index.values[0][0], starcen_files.index.values[0][1]) - fpath = path.preproc / fname - if fpath.exists(): - centers = fits.getdata(fpath) - - # Dithering Motion Stage for star center: value is in micron, - # and the pixel size is 18 micron - dms_dx_ref = starcen_files['INS1 PAC X'][0] / 18 - dms_dy_ref = starcen_files['INS1 PAC Y'][0] / 18 - else: - self._logger.warning('sph_ird_star_center() has not been executed. Images will be centered using default center ({},{})'.format(*self._default_center)) - centers = self._default_center - - # make sure we have only integers if user wants coarse centering - if coarse_centering: - centers = centers.astype(np.int) - dms_dx_ref = np.int(dms_dx_ref) - dms_dy_ref = np.int(dms_dy_ref) - # final center if cpix: cc = science_dim // 2 @@ -1909,12 +1861,57 @@ def sph_ird_combine_data(self, cpix=True, psf_dim=80, science_dim=290, correct_a # read and combine files for file_idx, (file, idx) in enumerate(object_files.index): - self._logger.info(' ==> file {0}/{1}: {2}, DIT #{3}'.format(file_idx+1, len(object_files), file, idx)) + self._logger.info(f' ==> file {file_idx + 1}/{len(object_files)}: {file}, DIT #{idx}') + + # use manual center if explicitely requested + self._logger.debug('> read centers') + if manual_center is not None: + centers = manual_center + else: + # otherwise, look whether we have an OBJECT,CENTER frame and select the one requested by user + starcen_files = frames_info[frames_info['DPR TYPE'] == 'OBJECT,CENTER'] + if len(starcen_files) == 0: + self._logger.warning('No OBJECT,CENTER file in the dataset. Images will be centered using default center ({},{})'.format(*self._default_center)) + centers = self._default_center + else: + # selection of the proper OBJECT,CENTER + center_selection = center_selection.lower() + if center_selection == 'first': + center_index = 0 + elif center_selection == 'last': + center_index = len(starcen_files.index.values)-1 + elif center_selection == 'time': + time_cen = starcen_files['DATE-OBS'] + time_sci = frames_info.loc[(file, idx), 'DATE-OBS'] + center_index = np.abs(time_sci - time_cen).argmin() + else: + self._logger.error(f'Unknown OBJECT,CENTER selection {center_selection}. Possible values are first, last, and time.') + self._update_recipe_status('sph_ird_combine_data', sphere.ERROR) + return + + fname = f'{starcen_files.index.values[center_index][0]}_DIT{starcen_files.index.values[center_index][1]:03d}_preproc_centers.fits' + fpath = path.preproc / fname + if fpath.exists(): + centers = fits.getdata(fpath) + + # Dithering Motion Stage for star center: value is in micron, + # and the pixel size is 18 micron + dms_dx_ref = starcen_files['INS1 PAC X'][0] / 18 + dms_dy_ref = starcen_files['INS1 PAC Y'][0] / 18 + else: + self._logger.warning('sph_ird_star_center() has not been executed. Images will be centered using default center ({},{})'.format(*self._default_center)) + centers = self._default_center + + # make sure we have only integers if user wants coarse centering + if coarse_centering: + centers = centers.astype(np.int) + dms_dx_ref = np.int(dms_dx_ref) + dms_dy_ref = np.int(dms_dy_ref) # read data self._logger.debug('> read data') - fname = '{0}_DIT{1:03d}_preproc'.format(file, idx) - files = list(path.preproc.glob('{}*.fits'.format(fname))) + fname = f'{file}_DIT{idx:03d}_preproc' + files = list(path.preproc.glob(f'{fname}*.fits')) cube = fits.getdata(files[0]) # neutral density @@ -1941,7 +1938,7 @@ def sph_ird_combine_data(self, cpix=True, psf_dim=80, science_dim=290, correct_a # center frames for wave_idx, img in enumerate(cube): - self._logger.debug('> wave {}'.format(wave_idx)) + self._logger.debug(f'> wave {wave_idx}') cx, cy = centers[wave_idx, :] # DMS contribution @@ -1990,7 +1987,7 @@ def sph_ird_combine_data(self, cpix=True, psf_dim=80, science_dim=290, correct_a self._status = sphere.COMPLETE - def sph_ird_clean(self, delete_raw=False, delete_products=False): + def sph_ird_clean(self, delete_raw=False, delete_products=False, delete_config=False): ''' Clean everything except for raw data and science products (by default) @@ -2001,6 +1998,9 @@ def sph_ird_clean(self, delete_raw=False, delete_products=False): delete_products : bool Delete science products. Default is False + + delete_config : bool + Delete configuration file. Default is False ''' self._logger.info('Clean reduction data') @@ -2013,6 +2013,10 @@ def sph_ird_clean(self, delete_raw=False, delete_products=False): # remove sub-directories self.path.remove(delete_raw=delete_raw, delete_products=delete_products, logger=self._logger) + # remove config + if delete_config: + self.config._file.unlink() + # recipe execution status self._update_recipe_status('sph_ird_clean', sphere.SUCCESS) diff --git a/sphere/IRDIS/SpectroReduction.py b/sphere/IRDIS/SpectroReduction.py index e7cdac0..fa9d8c6 100644 --- a/sphere/IRDIS/SpectroReduction.py +++ b/sphere/IRDIS/SpectroReduction.py @@ -22,8 +22,8 @@ import sphere.utils as utils import sphere.utils.imutils as imutils import sphere.utils.aperture as aperture -import sphere.transmission as transmission -import sphere.toolbox as toolbox +import sphere.utils.toolbox as toolbox +import sphere.utils.transmission as transmission _log = logging.getLogger(__name__) @@ -103,7 +103,7 @@ class SpectroReduction(object): # Constructor ################################################## - def __new__(cls, path, log_level='info', sphere_handler=None): + def __new__(cls, path, clean_start=True, log_level='info', user_config=None, sphere_handler=None): '''Custom instantiation for the class and initialization for the instances @@ -117,9 +117,17 @@ def __new__(cls, path, log_level='info', sphere_handler=None): path : str Path to the directory containing the dataset - level : {'debug', 'info', 'warning', 'error', 'critical'} + clean_start : bool + Remove all results from previous reductions for a clean start. + Default is True + + log_level : {'debug', 'info', 'warning', 'error', 'critical'} The log level of the handler + user_config : str + Path to a user-provided configuration. Default is None, i.e. the + reduction will use the package default configuration parameters + sphere_handler : log handler Higher-level SPHERE.Dataset log handler @@ -135,7 +143,7 @@ def __new__(cls, path, log_level='info', sphere_handler=None): # zeroth-order reduction validation raw = path / 'raw' if not raw.exists(): - _log.error('No raw/ subdirectory. {0} is not a valid reduction path'.format(path)) + _log.error(f'No raw/ subdirectory. {path} is not a valid reduction path') return None else: # it's all good: create instance! @@ -172,44 +180,60 @@ def __new__(cls, path, log_level='info', sphere_handler=None): reduction._logger = logger - reduction._logger.info('Creating IRDIS spectroscopy reduction at path {}'.format(path)) + reduction._logger.info(f'Creating IRDIS spectroscopy reduction at path {path}') + # + # clean start + # + if clean_start: + reduction._logger.info('Erase outputs of previous reduction for a clean start') + reduction._path.remove(delete_raw=False, delete_products=True, logger=reduction._logger) + config_file = reduction._path.root / 'reduction_config.ini' + if config_file.exists(): + config_file.unlink() + # # configuration # configfile = f'{Path(sphere.__file__).parent}/instruments/{reduction._instrument}.ini' - config = configparser.ConfigParser() + cfgparser = configparser.ConfigParser() reduction._logger.debug('> read configuration') - config.read(configfile) + cfgparser.read(configfile) # instrument - reduction._pixel = float(config.get('instrument', 'pixel')) + reduction._pixel = float(cfgparser.get('instrument', 'pixel')) reduction._nwave = -1 # calibration - reduction._wave_cal_lasers = np.array(eval(config.get('calibration', 'wave_cal_lasers'))) + reduction._wave_cal_lasers = np.array(eval(cfgparser.get('calibration', 'wave_cal_lasers'))) # spectro calibration - reduction._default_center_lrs = np.array(eval(config.get('calibration-spectro', 'default_center_lrs'))) - reduction._wave_min_lrs = eval(config.get('calibration-spectro', 'wave_min_lrs')) - reduction._wave_max_lrs = eval(config.get('calibration-spectro', 'wave_max_lrs')) + reduction._default_center_lrs = np.array(eval(cfgparser.get('calibration-spectro', 'default_center_lrs'))) + reduction._wave_min_lrs = eval(cfgparser.get('calibration-spectro', 'wave_min_lrs')) + reduction._wave_max_lrs = eval(cfgparser.get('calibration-spectro', 'wave_max_lrs')) - reduction._default_center_mrs = np.array(eval(config.get('calibration-spectro', 'default_center_mrs'))) - reduction._wave_min_mrs = eval(config.get('calibration-spectro', 'wave_min_mrs')) - reduction._wave_max_mrs = eval(config.get('calibration-spectro', 'wave_max_mrs')) + reduction._default_center_mrs = np.array(eval(cfgparser.get('calibration-spectro', 'default_center_mrs'))) + reduction._wave_min_mrs = eval(cfgparser.get('calibration-spectro', 'wave_min_mrs')) + reduction._wave_max_mrs = eval(cfgparser.get('calibration-spectro', 'wave_max_mrs')) # reduction parameters - reduction._config = {} + cfg = {} for group in ['reduction', 'reduction-spectro']: - items = dict(config.items(group)) - reduction._config.update(items) + items = dict(cfgparser.items(group)) for key, value in items.items(): try: val = eval(value) except NameError: val = value - reduction._config[key] = val + cfg[key] = val + reduction._config = utils.Configuration(reduction._path, reduction._logger, cfg) + + # load user-provided default configuration parameters + if user_config: + user_config = Path(user_config).expanduser() + + reduction._config.load_from_file(user_config) # # reduction and recipes status @@ -233,7 +257,7 @@ def __new__(cls, path, log_level='info', sphere_handler=None): ################################################## def __repr__(self): - return ''.format(self._instrument, self._mode, self._path, self.loglevel) + return f'' def __format__(self): return self.__repr__() @@ -298,62 +322,6 @@ def mode(self): # Generic class methods ################################################## - def show_config(self): - ''' - Shows the reduction configuration - ''' - - # dictionary - dico = self.config - - # misc parameters - print() - print('{0:<30s}{1}'.format('Parameter', 'Value')) - print('-'*35) - keys = [key for key in dico if key.startswith('misc')] - for key in keys: - print('{0:<30s}{1}'.format(key, dico[key])) - - # calibrations - print('-'*35) - keys = [key for key in dico if key.startswith('cal')] - for key in keys: - print('{0:<30s}{1}'.format(key, dico[key])) - - # pre-processing - print('-'*35) - keys = [key for key in dico if key.startswith('preproc')] - for key in keys: - print('{0:<30s}{1}'.format(key, dico[key])) - - # centring - print('-'*35) - keys = [key for key in dico if key.startswith('center')] - for key in keys: - print('{0:<30s}{1}'.format(key, dico[key])) - - # wave - print('-'*35) - keys = [key for key in dico if key.startswith('wave')] - for key in keys: - print('{0:<30s}{1}'.format(key, dico[key])) - - # combining - print('-'*35) - keys = [key for key in dico if key.startswith('combine')] - for key in keys: - print('{0:<30s}{1}'.format(key, dico[key])) - - # clean - print('-'*35) - keys = [key for key in dico if key.startswith('clean')] - for key in keys: - print('{0:<30s}{1}'.format(key, dico[key])) - print('-'*35) - - print() - - def init_reduction(self): ''' Sort files and frames, perform sanity check @@ -420,6 +388,7 @@ def process_science(self): split_posang=config['combine_split_posang'], shift_method=config['combine_shift_method'], manual_center=config['combine_manual_center'], + center_selection=config['combine_center_selection'], coarse_centering=config['combine_coarse_centering']) def clean(self): @@ -434,7 +403,8 @@ def clean(self): if config['clean']: self.sph_ird_clean(delete_raw=config['clean_delete_raw'], - delete_products=config['clean_delete_products']) + delete_products=config['clean_delete_products'], + delete_config=config['clean_delete_config']) def full_reduction(self): @@ -477,6 +447,9 @@ def _read_info(self): # path path = self.path + # load existing configuration + self.config.load() + # files info fname = path.preproc / 'files.csv' if fname.exists(): @@ -548,33 +521,33 @@ def _read_info(self): done = (path.preproc / 'wavelength_default.fits').exists() if done: self._update_recipe_status('sph_ird_cal_wave', sphere.SUCCESS) - self._logger.debug('> sph_ird_cal_wave status = {}'.format(done)) + self._logger.debug(f'> sph_ird_cal_wave status = {done}') done = (path.preproc / 'wavelength_recalibrated.fits').exists() if done: self._update_recipe_status('sph_ird_wavelength_recalibration', sphere.SUCCESS) - self._logger.debug('> sph_ird_wavelength_recalibration status = {}'.format(done)) + self._logger.debug(f'> sph_ird_wavelength_recalibration status = {done}') done = True files = frames_info_preproc.index for file, idx in files: - fname = '{0}_DIT{1:03d}_preproc'.format(file, idx) - file = list(path.preproc.glob('{}.fits'.format(fname))) + fname = f'{file}_DIT{idx:03d}_preproc' + file = list(path.preproc.glob(f'{fname}.fits')) done = done and (len(file) == 1) if done: self._update_recipe_status('sph_ird_preprocess_science', sphere.SUCCESS) - self._logger.debug('> sph_ird_preprocess_science status = {}'.format(done)) + self._logger.debug(f'> sph_ird_preprocess_science status = {done}') done = True files = frames_info_preproc[(frames_info_preproc['DPR TYPE'] == 'OBJECT,FLUX') | (frames_info_preproc['DPR TYPE'] == 'OBJECT,CENTER')].index for file, idx in files: - fname = '{0}_DIT{1:03d}_preproc_centers'.format(file, idx) - file = list(path.preproc.glob('{}.fits'.format(fname))) + fname = f'{file}_DIT{idx:03d}_preproc_centers' + file = list(path.preproc.glob(f'{fname}.fits')) done = done and (len(file) == 1) if done: self._update_recipe_status('sph_ird_star_center', sphere.SUCCESS) - self._logger.debug('> sph_ird_star_center status = {}'.format(done)) + self._logger.debug(f'> sph_ird_star_center status = {done}') # reduction status self._status = sphere.INCOMPLETE @@ -627,7 +600,7 @@ def sort_files(self): self._status = sphere.FATAL return - self._logger.info(' * found {0} raw FITS files'.format(len(files))) + self._logger.info(f' * found {len(files)} raw FITS files') # read list of keywords self._logger.debug('> read keyword list') @@ -654,7 +627,7 @@ def sort_files(self): self._logger.debug('> read FITS keywords') for f in files: - hdu = fits.open(path.raw / '{}.fits'.format(f)) + hdu = fits.open(path.raw / f'{f}.fits') hdr = hdu[0].header for k, sk in zip(keywords, keywords_short): @@ -681,7 +654,7 @@ def sort_files(self): # check instruments instru = files_info['SEQ ARM'].unique() if len(instru) != 1: - self._logger.critical('Sequence is mixing different instruments: {0}'.format(instru)) + self._logger.critical(f'Sequence is mixing different instruments: {instru}') self._update_recipe_status('sort_files', sphere.ERROR) self._status = sphere.FATAL return @@ -789,7 +762,7 @@ def sort_frames(self): ra_drot_h = np.floor(ra_drot/1e4) ra_drot_m = np.floor((ra_drot - ra_drot_h*1e4)/1e2) ra_drot_s = ra_drot - ra_drot_h*1e4 - ra_drot_m*1e2 - RA = '{:02.0f}:{:02.0f}:{:02.3f}'.format(ra_drot_h, ra_drot_m, ra_drot_s) + RA = f'{ra_drot_h:02.0f}:{ra_drot_m:02.0f}:{ra_drot_s:02.3f}' dec_drot = cinfo['INS4 DROT2 DEC'][0] sign = np.sign(dec_drot) @@ -798,33 +771,34 @@ def sort_frames(self): dec_drot_m = np.floor((udec_drot - dec_drot_d*1e4)/1e2) dec_drot_s = udec_drot - dec_drot_d*1e4 - dec_drot_m*1e2 dec_drot_d *= sign - DEC = '{:02.0f}:{:02.0f}:{:02.2f}'.format(dec_drot_d, dec_drot_m, dec_drot_s) + DEC = f'{dec_drot_d:02.0f}:{dec_drot_m:02.0f}:{dec_drot_s:02.2f}' pa_start = cinfo['PARANG'][0] pa_end = cinfo['PARANG'][-1] - posang = cinfo['INS4 DROT2 POSANG'].unique() - + posang = cinfo['INS4 DROT2 POSANG'].unique() + posangs = [f'{p:.2f}°' for p in posang] + date = str(cinfo['DATE'][0])[0:10] - self._logger.info(' * Programme ID: {0}'.format(cinfo['OBS PROG ID'][0])) - self._logger.info(' * OB name: {0}'.format(cinfo['OBS NAME'][0])) - self._logger.info(' * OB ID: {0}'.format(cinfo['OBS ID'][0])) - self._logger.info(' * Object: {0}'.format(cinfo['OBJECT'][0])) - self._logger.info(' * RA / DEC: {0} / {1}'.format(RA, DEC)) - self._logger.info(' * Date: {0}'.format(date)) - self._logger.info(' * Instrument: {0}'.format(cinfo['SEQ ARM'][0])) - self._logger.info(' * Derotator: {0}'.format(cinfo['INS4 DROT2 MODE'][0])) - self._logger.info(' * VIS WFS mode: {0}'.format(cinfo['AOS VISWFS MODE'][0])) - self._logger.info(' * IR WFS mode: {0}'.format(cinfo['AOS IRWFS MODE'][0])) - self._logger.info(' * Coronagraph: {0}'.format(cinfo['INS COMB ICOR'][0])) - self._logger.info(' * Mode: {0}'.format(cinfo['INS1 MODE'][0])) - self._logger.info(' * Filter: {0}'.format(cinfo['INS COMB IFLT'][0])) - self._logger.info(' * DIT: {0:.2f} sec'.format(cinfo['DET SEQ1 DIT'][0])) - self._logger.info(' * NDIT: {0:.0f}'.format(cinfo['DET NDIT'][0])) - self._logger.info(' * Texp: {0:.2f} min'.format(cinfo['DET SEQ1 DIT'].sum()/60)) - self._logger.info(' * PA: {0:.2f}° ==> {1:.2f}° = {2:.2f}°'.format(pa_start, pa_end, np.abs(pa_end-pa_start))) - self._logger.info(' * POSANG: {0}'.format(', '.join(['{:.2f}°'.format(p) for p in posang]))) + self._logger.info(f" * Programme ID: {cinfo['OBS PROG ID'][0]}") + self._logger.info(f" * OB name: {cinfo['OBS NAME'][0]}") + self._logger.info(f" * OB ID: {cinfo['OBS ID'][0]}") + self._logger.info(f" * Object: {cinfo['OBJECT'][0]}") + self._logger.info(f' * RA / DEC: {RA} / {DEC}') + self._logger.info(f' * Date: {date}') + self._logger.info(f" * Instrument: {cinfo['SEQ ARM'][0]}") + self._logger.info(f" * Derotator: {cinfo['INS4 DROT2 MODE'][0]}") + self._logger.info(f" * VIS WFS mode: {cinfo['AOS VISWFS MODE'][0]}") + self._logger.info(f" * IR WFS mode: {cinfo['AOS IRWFS MODE'][0]}") + self._logger.info(f" * Coronagraph: {cinfo['INS COMB ICOR'][0]}") + self._logger.info(f" * Mode: {cinfo['INS1 MODE'][0]}") + self._logger.info(f" * Filter: {cinfo['INS COMB IFLT'][0]}") + self._logger.info(f" * DIT: {cinfo['DET SEQ1 DIT'][0]:.2f} sec") + self._logger.info(f" * NDIT: {cinfo['DET NDIT'][0]:.0f}") + self._logger.info(f" * Texp: {cinfo['DET SEQ1 DIT'].sum() / 60:.2f} min") + self._logger.info(f' * PA: {pa_start:.2f}° ==> {pa_end:.2f}° = {np.abs(pa_end - pa_start):.2f}°') + self._logger.info(f" * POSANG: {', '.join(posangs)}") # update recipe execution self._update_recipe_status('sort_frames', sphere.SUCCESS) @@ -855,26 +829,26 @@ def check_files_association(self): # instrument arm arm = files_info['SEQ ARM'].unique() if len(arm) != 1: - self._logger.error('Sequence is mixing different instruments: {0}'.format(arm)) + self._logger.error(f'Sequence is mixing different instruments: {arm}') self._update_recipe_status('check_files_association', sphere.ERROR) return # IRDIS obs mode and filter combination modes = files_info.loc[files_info['DPR CATG'] == 'SCIENCE', 'INS1 MODE'].unique() if len(modes) != 1: - self._logger.eror('Sequence is mixing different types of observations: {0}'.format(modes)) + self._logger.eror(f'Sequence is mixing different types of observations: {modes}') self._update_recipe_status('check_files_association', sphere.ERROR) return filter_combs = files_info.loc[files_info['DPR CATG'] == 'SCIENCE', 'INS COMB IFLT'].unique() if len(filter_combs) != 1: - self._logger.error('Sequence is mixing different types of filters combinations: {0}'.format(filter_combs)) + self._logger.error(f'Sequence is mixing different types of filters combinations: {filter_combs}') self._update_recipe_status('check_files_association', sphere.ERROR) return filter_comb = filter_combs[0] if (filter_comb != 'S_LR') and (filter_comb != 'S_MR'): - self._logger.error('Unknown IRDIS-LSS filter combination/mode {0}'.format(filter_comb)) + self._logger.error(f'Unknown IRDIS-LSS filter combination/mode {filter_comb}') self._update_recipe_status('check_files_association', sphere.ERROR) return @@ -895,7 +869,7 @@ def check_files_association(self): cfiles = calibs[(calibs['DPR TYPE'] == 'FLAT,LAMP') & (calibs['INS COMB IFLT'] == filter_comb)] if len(cfiles) <= 1: error_flag += 1 - self._logger.error(' * there should be more than 1 flat in filter combination {0}'.format(filter_comb)) + self._logger.error(f' * there should be more than 1 flat in filter combination {filter_comb}') # wave self._logger.debug('> check wavelength calibration requirements') @@ -905,7 +879,7 @@ def check_files_association(self): self._logger.error(' * there should be 1 wavelength calibration file, found none.') elif len(cfiles) > 1: warning_flag += 1 - self._logger.warning(' * there should be 1 wavelength calibration file, found {0}. Using the closest from science.'.format(len(cfiles))) + self._logger.warning(f' * there should be 1 wavelength calibration file, found {len(cfiles)}. Using the closest from science.') # find the two closest to science files sci_files = files_info[(files_info['DPR CATG'] == 'SCIENCE')] @@ -924,7 +898,7 @@ def check_files_association(self): (calibs['DET SEQ1 DIT'].round(2) == wavecal_DIT)] if len(cfiles) == 0: error_flag += 1 - self._logger.warning(' * there is no dark/background for the wavelength calibration (DIT={0:.1f} sec). It is *highly recommended* to include one to obtain the best data reduction. A single dark/background file is sufficient, and it can easily be downloaded from the ESO archive'.format(wavecal_DIT)) + self._logger.error(f' * there is no dark/background for the wavelength calibration (DIT={wavecal_DIT:.1f} sec). It is mandatory to include one to obtain the best data reduction. A single dark/background file is sufficient, and it can easily be downloaded from the ESO archive') ################################################## # static calibrations that depend on science DIT @@ -942,22 +916,22 @@ def check_files_association(self): (calibs['DET SEQ1 DIT'].round(2) == DIT)] if len(cfiles) == 0: warning_flag += 1 - self._logger.warning(' * there is no dark/background for science files with DIT={0} sec. It is *highly recommended* to include one to obtain the best data reduction. A single dark/background file is sufficient, and it can easily be downloaded from the ESO archive'.format(DIT)) + self._logger.warning(f' * there is no dark/background for science files with DIT={DIT} sec. It is *highly recommended* to include one to obtain the best data reduction. A single dark/background file is sufficient, and it can easily be downloaded from the ESO archive') # sky backgrounds cfiles = files_info[(files_info['DPR TYPE'] == 'SKY') & (files_info['DET SEQ1 DIT'].round(2) == DIT)] if len(cfiles) == 0: warning_flag += 1 - self._logger.warning(' * there is no sky background for science files with DIT={0} sec. Using a sky background instead of an internal instrumental background can usually provide a cleaner data reduction'.format(DIT)) + self._logger.warning(f' * there is no sky background for science files with DIT={DIT} sec. Using a sky background instead of an internal instrumental background can usually provide a cleaner data reduction') # error reporting self._logger.debug('> report status') if error_flag: - self._logger.error('There are {0} warning(s) and {1} error(s) in the classification of files'.format(warning_flag, error_flag)) + self._logger.error(f'There are {warning_flag} warning(s) and {error_flag} error(s) in the classification of files') self._update_recipe_status('check_files_association', sphere.ERROR) return else: - self._logger.warning('There are {0} warning(s) and {1} error(s) in the classification of files'.format(warning_flag, error_flag)) + self._logger.warning(f'There are {warning_flag} warning(s) and {error_flag} error(s) in the classification of files') # save files_info.to_csv(path.preproc / 'files.csv') @@ -1013,14 +987,14 @@ def sph_ird_cal_dark(self, silent=True): if len(cfiles) == 0: continue - self._logger.info(' * {0} in filter {1} with DIT={2:.2f} sec ({3} files)'.format(ctype, cfilt, DIT, len(cfiles))) + self._logger.info(f' * {ctype} in filter {cfilt} with DIT={DIT:.2f} sec ({len(cfiles)} files)') # create sof self._logger.debug('> create sof file') - sof = path.sof / 'dark_filt={0}_DIT={1:.2f}.sof'.format(cfilt, DIT) + sof = path.sof / f'dark_filt={cfilt}_DIT={DIT:.2f}.sof' file = open(sof, 'w') for f in files: - file.write('{0}/{1}.fits {2}\n'.format(path.raw, f, 'IRD_DARK_RAW')) + file.write(f"{path.raw}/{f}.fits IRD_DARK_RAW\n") file.close() # products @@ -1028,8 +1002,8 @@ def sph_ird_cal_dark(self, silent=True): loc = 'sky' else: loc = 'internal' - dark_file = 'dark_{0}_filt={1}_DIT={2:.2f}'.format(loc, cfilt, DIT) - bpm_file = 'dark_{0}_bpm_filt={1}_DIT={2:.2f}'.format(loc, cfilt, DIT) + dark_file = f'dark_{loc}_filt={cfilt}_DIT={DIT:.2f}' + bpm_file = f'dark_{loc}_bpm_filt={cfilt}_DIT={DIT:.2f}' # different max level in LRS max_level = 1000 @@ -1043,9 +1017,9 @@ def sph_ird_cal_dark(self, silent=True): 'sph_ird_master_dark', '--ird.master_dark.sigma_clip=5.0', '--ird.master_dark.save_addprod=TRUE', - '--ird.master_dark.max_acceptable={0}'.format(max_level), - '--ird.master_dark.outfilename={0}/{1}.fits'.format(path.calib, dark_file), - '--ird.master_dark.badpixfilename={0}/{1}.fits'.format(path.calib, bpm_file), + f'--ird.master_dark.max_acceptable={max_level}', + f'--ird.master_dark.outfilename={path.calib}/{dark_file}.fits', + f'--ird.master_dark.badpixfilename={path.calib}/{bpm_file}.fits', str(sof)] # check esorex @@ -1055,7 +1029,7 @@ def sph_ird_cal_dark(self, silent=True): return # execute esorex - self._logger.debug('> execute {}'.format(' '.join(args))) + self._logger.debug(f"> execute {' '.join(args)}") if silent: proc = subprocess.run(args, cwd=path.tmp, stdout=subprocess.DEVNULL) else: @@ -1139,19 +1113,19 @@ def sph_ird_cal_detector_flat(self, silent=True): cfiles = calibs[calibs['INS COMB IFLT'] == cfilt] files = cfiles.index - self._logger.info(' * filter {0} ({1} files)'.format(cfilt, len(cfiles))) + self._logger.info(f' * filter {cfilt} ({len(cfiles)} files)') # create sof self._logger.debug('> create sof file') - sof = path.sof / 'flat_filt={0}.sof'.format(cfilt) + sof = path.sof / f'flat_filt={cfilt}.sof' file = open(sof, 'w') for f in files: - file.write('{0}/{1}.fits {2}\n'.format(path.raw, f, 'IRD_FLAT_FIELD_RAW')) + file.write(f"{path.raw}/{f}.fits IRD_FLAT_FIELD_RAW\n") file.close() # products - flat_file = 'flat_filt={0}'.format(cfilt) - bpm_file = 'flat_bpm_filt={0}'.format(cfilt) + flat_file = f'flat_filt={cfilt}' + bpm_file = f'flat_bpm_filt={cfilt}' # esorex parameters args = ['esorex', @@ -1159,8 +1133,8 @@ def sph_ird_cal_detector_flat(self, silent=True): '--no-datamd5=TRUE', 'sph_ird_instrument_flat', '--ird.instrument_flat.save_addprod=TRUE', - '--ird.instrument_flat.outfilename={0}/{1}.fits'.format(path.calib, flat_file), - '--ird.instrument_flat.badpixfilename={0}/{1}.fits'.format(path.calib, bpm_file), + f'--ird.instrument_flat.outfilename={path.calib}/{flat_file}.fits', + f'--ird.instrument_flat.badpixfilename={path.calib}/{bpm_file}.fits', str(sof)] # check esorex @@ -1170,7 +1144,7 @@ def sph_ird_cal_detector_flat(self, silent=True): return # execute esorex - self._logger.debug('> execute {}'.format(' '.join(args))) + self._logger.debug(f"> execute {' '.join(args)}") if silent: proc = subprocess.run(args, cwd=path.tmp, stdout=subprocess.DEVNULL) else: @@ -1239,7 +1213,7 @@ def sph_ird_cal_wave(self, silent=True): # get list of files wave_file = files_info[np.logical_not(files_info['PROCESSED']) & (files_info['DPR TYPE'] == 'LAMP,WAVE')] if len(wave_file) != 1: - self._logger.error('There should be exactly 1 raw wavelength calibration file. Found {0}.'.format(len(wave_file))) + self._logger.error(f'There should be exactly 1 raw wavelength calibration file. Found {len(wave_file)}.') self._update_recipe_status('sph_ird_cal_wave', sphere.ERROR) return @@ -1271,16 +1245,16 @@ def sph_ird_cal_wave(self, silent=True): wave_lasers = self._wave_cal_lasers # esorex parameters - self._logger.debug('> filter combination is {}'.format(filter_comb)) + self._logger.debug(f'> filter combination is {filter_comb}') if filter_comb == 'S_LR': # create standard sof in LRS self._logger.debug('> create sof file') sof = path.sof / 'wave.sof' file = open(sof, 'w') - file.write('{0}/{1}.fits {2}\n'.format(path.raw, wave_file.index[0], 'IRD_WAVECALIB_RAW')) - file.write('{0}/{1}.fits {2}\n'.format(path.calib, dark_file.index[0], 'IRD_MASTER_DARK')) - file.write('{0}/{1}.fits {2}\n'.format(path.calib, flat_file.index[0], 'IRD_FLAT_FIELD')) - file.write('{0}/{1}.fits {2}\n'.format(path.calib, bpm_file.index[0], 'IRD_STATIC_BADPIXELMAP')) + file.write(f"{path.raw}/{wave_file.index[0]}.fits IRD_WAVECALIB_RAW\n") + file.write(f"{path.calib}/{dark_file.index[0]}.fits IRD_MASTER_DARK\n") + file.write(f"{path.calib}/{flat_file.index[0]}.fits IRD_FLAT_FIELD\n") + file.write(f"{path.calib}/{bpm_file.index[0]}.fits IRD_STATIC_BADPIXELMAP\n") file.close() args = ['esorex', @@ -1291,32 +1265,32 @@ def sph_ird_cal_wave(self, silent=True): '--ird.wave_calib.grism_mode=FALSE', '--ird.wave_calib.threshold=1000', '--ird.wave_calib.number_lines=6', - '--ird.wave_calib.wavelength_line1={:.2f}'.format(wave_lasers[0]), - '--ird.wave_calib.wavelength_line2={:.2f}'.format(wave_lasers[1]), - '--ird.wave_calib.wavelength_line3={:.2f}'.format(wave_lasers[2]), - '--ird.wave_calib.wavelength_line4={:.2f}'.format(wave_lasers[3]), - '--ird.wave_calib.wavelength_line5={:.2f}'.format(wave_lasers[4]), - '--ird.wave_calib.wavelength_line6={:.2f}'.format(wave_lasers[5]), - '--ird.wave_calib.outfilename={0}/{1}.fits'.format(path.calib, wav_file), + f'--ird.wave_calib.wavelength_line1={wave_lasers[0]:.2f}', + f'--ird.wave_calib.wavelength_line2={wave_lasers[1]:.2f}', + f'--ird.wave_calib.wavelength_line3={wave_lasers[2]:.2f}', + f'--ird.wave_calib.wavelength_line4={wave_lasers[3]:.2f}', + f'--ird.wave_calib.wavelength_line5={wave_lasers[4]:.2f}', + f'--ird.wave_calib.wavelength_line6={wave_lasers[5]:.2f}', + f'--ird.wave_calib.outfilename={path.calib}/{wav_file}.fits', str(sof)] elif filter_comb == 'S_MR': # masking of second order spectrum in MRS self._logger.debug('> masking second order') wave_fname = wave_file.index[0] - wave_data, hdr = fits.getdata(path.raw / '{}.fits'.format(wave_fname), header=True) + wave_data, hdr = fits.getdata(path.raw / f'{wave_fname}.fits', header=True) wave_data = wave_data.squeeze() wave_data[:60, :] = 0 - fits.writeto(path.preproc / '{}_masked.fits'.format(wave_fname), wave_data, hdr, overwrite=True, + fits.writeto(path.preproc / f'{wave_fname}_masked.fits', wave_data, hdr, overwrite=True, output_verify='silentfix') # create sof using the masked file self._logger.debug('> create sof file') sof = path.sof / 'wave.sof' file = open(sof, 'w') - file.write('{0}/{1}_masked.fits {2}\n'.format(path.preproc, wave_fname, 'IRD_WAVECALIB_RAW')) - file.write('{0}/{1}.fits {2}\n'.format(path.calib, dark_file.index[0], 'IRD_MASTER_DARK')) - file.write('{0}/{1}.fits {2}\n'.format(path.calib, flat_file.index[0], 'IRD_FLAT_FIELD')) - file.write('{0}/{1}.fits {2}\n'.format(path.calib, bpm_file.index[0], 'IRD_STATIC_BADPIXELMAP')) + file.write(f"{path.preproc}/{wave_fname}_masked.fits IRD_WAVECALIB_RAW\n") + file.write(f"{path.calib}/{dark_file.index[0]}.fits IRD_MASTER_DARK\n") + file.write(f"{path.calib}/{flat_file.index[0]}.fits IRD_FLAT_FIELD\n") + file.write(f"{path.calib}/{bpm_file.index[0]}.fits IRD_STATIC_BADPIXELMAP\n") file.close() args = ['esorex', @@ -1327,12 +1301,12 @@ def sph_ird_cal_wave(self, silent=True): '--ird.wave_calib.grism_mode=TRUE', '--ird.wave_calib.threshold=1000', '--ird.wave_calib.number_lines=5', - '--ird.wave_calib.wavelength_line1={:.2f}'.format(wave_lasers[0]), - '--ird.wave_calib.wavelength_line2={:.2f}'.format(wave_lasers[1]), - '--ird.wave_calib.wavelength_line3={:.2f}'.format(wave_lasers[2]), - '--ird.wave_calib.wavelength_line4={:.2f}'.format(wave_lasers[3]), - '--ird.wave_calib.wavelength_line5={:.2f}'.format(wave_lasers[4]), - '--ird.wave_calib.outfilename={0}/{1}.fits'.format(path.calib, wav_file), + f'--ird.wave_calib.wavelength_line1={wave_lasers[0]:.2f}', + f'--ird.wave_calib.wavelength_line2={wave_lasers[1]:.2f}', + f'--ird.wave_calib.wavelength_line3={wave_lasers[2]:.2f}', + f'--ird.wave_calib.wavelength_line4={wave_lasers[3]:.2f}', + f'--ird.wave_calib.wavelength_line5={wave_lasers[4]:.2f}', + f'--ird.wave_calib.outfilename={path.calib}/{wav_file}.fits', str(sof)] # check esorex @@ -1342,7 +1316,7 @@ def sph_ird_cal_wave(self, silent=True): return # execute esorex - self._logger.debug('> execute {}'.format(' '.join(args))) + self._logger.debug(f"> execute {' '.join(args)}") if silent: proc = subprocess.run(args, cwd=path.tmp, stdout=subprocess.DEVNULL) else: @@ -1381,7 +1355,7 @@ def sph_ird_cal_wave(self, silent=True): wave_min = self._wave_min_mrs wave_max = self._wave_max_mrs - wave_calib = fits.getdata(path.calib / '{}.fits'.format(wav_file)) + wave_calib = fits.getdata(path.calib / f'{wav_file}.fits') wave_lin = get_wavelength_calibration(filter_comb, wave_calib, centers, wave_min, wave_max) self._logger.debug('> save default wavelength calibration') @@ -1457,7 +1431,7 @@ def sph_ird_preprocess_science(self, if fix_badpix: bpm_files = files_info[(files_info['PRO CATG'] == 'IRD_STATIC_BADPIXELMAP') | (files_info['PRO CATG'] == 'IRD_NON_LINEAR_BADPIXELMAP')].index - bpm_files = [path.calib / '{}.fits'.format(f) for f in bpm_files] + bpm_files = [path.calib / f'{f}.fits' for f in bpm_files] if len(bpm_files) == 0: self._logger.error('Could not fin any bad pixel maps') self._update_recipe_status('sph_ird_preprocess_science', sphere.ERROR) @@ -1476,10 +1450,10 @@ def sph_ird_preprocess_science(self, flat_file = files_info[files_info['PROCESSED'] & (files_info['PRO CATG'] == 'IRD_FLAT_FIELD') & (files_info['INS COMB IFLT'] == filter_comb)] if len(flat_file) != 1: - self._logger.error('There should be exactly 1 flat file. Found {0}.'.format(len(flat_file))) + self._logger.error(f'There should be exactly 1 flat file. Found {len(flat_file)}.') self._update_recipe_status('sph_ird_preprocess_science', sphere.ERROR) return - flat = fits.getdata(path.calib / '{}.fits'.format(flat_file.index[0])) + flat = fits.getdata(path.calib / f'{flat_file.index[0]}.fits') # final dataframe self._logger.debug('> create frames_info_preproc data frame') @@ -1500,7 +1474,7 @@ def sph_ird_preprocess_science(self, for DIT in sci_DITs: sfiles = sci_files[sci_files['DET SEQ1 DIT'].round(2) == DIT] - self._logger.info('{0} files of type {1} with DIT={2} sec'.format(len(sfiles), typ, DIT)) + self._logger.info(f'{len(sfiles)} files of type {typ} with DIT={DIT} sec') if subtract_background: # look for sky, then background, then darks @@ -1511,17 +1485,17 @@ def sph_ird_preprocess_science(self, (files_info['DPR TYPE'] == d) & (files_info['DET SEQ1 DIT'].round(2) == DIT)] if len(dfiles) != 0: break - self._logger.info(' ==> found {0} corresponding {1} file'.format(len(dfiles), d)) + self._logger.info(f' ==> found {len(dfiles)} corresponding {d} file') if len(dfiles) == 0: # issue a warning if absolutely no background is found self._logger.warning('No background has been found. Pre-processing will continue but data quality will likely be affected') bkg = np.zeros((1024, 2048)) elif len(dfiles) == 1: - bkg = fits.getdata(path.calib / '{}.fits'.format(dfiles.index[0])) + bkg = fits.getdata(path.calib / f'{dfiles.index[0]}.fits') elif len(dfiles) > 1: # FIXME: handle cases when multiple backgrounds are found? - self._logger.error('Unexpected number of background files ({0})'.format(len(dfiles))) + self._logger.error(f'Unexpected number of background files ({len(dfiles)})') self._update_recipe_status('sph_ird_preprocess_science', sphere.ERROR) return @@ -1530,11 +1504,11 @@ def sph_ird_preprocess_science(self, # frames_info extract finfo = frames_info.loc[(fname, slice(None)), :] - self._logger.info(' * file {0}/{1}: {2}, NDIT={3}'.format(idx+1, len(sfiles), fname, len(finfo))) + self._logger.info(f' * file {idx + 1}/{len(sfiles)}: {fname}, NDIT={len(finfo)}') # read data self._logger.info(' ==> read data') - img, hdr = fits.getdata(path.raw / '{}.fits'.format(fname), header=True) + img, hdr = fits.getdata(path.raw / f'{fname}.fits', header=True) # add extra dimension to single images to make cubes if img.ndim == 2: @@ -1565,7 +1539,7 @@ def sph_ird_preprocess_science(self, frames_info_new = toolbox.collapse_frames_info(finfo, fname, true_north, 'none', logger=self._logger) elif (typ == 'OBJECT'): if collapse_science: - self._logger.info(' ==> collapse: mean ({0} -> 1 frame, 0 dropped)'.format(len(img))) + self._logger.info(f' ==> collapse: mean ({len(img)} -> 1 frame, 0 dropped)') img = np.mean(img, axis=0, keepdims=True) frames_info_new = toolbox.collapse_frames_info(finfo, fname, true_north, 'mean', logger=self._logger) @@ -1621,7 +1595,7 @@ def sph_ird_preprocess_science(self, for f in range(len(img)): frame = nimg[f, ...].squeeze() hdr['HIERARCH ESO DET NDIT'] = 1 - fits.writeto(path.preproc / '{}_DIT{:03d}_preproc.fits'.format(fname, f), frame, hdr, + fits.writeto(path.preproc / f'{fname}_DIT{f:03d}_preproc.fits', frame, hdr, overwrite=True, output_verify='silentfix') # sort and save final dataframe @@ -1690,23 +1664,23 @@ def sph_ird_star_center(self, high_pass_psf=False, high_pass_waffle=False, box_p # wavelength map self._logger.debug('> compute default wavelength calibration') wave_file = files_info[files_info['PROCESSED'] & (files_info['PRO CATG'] == 'IRD_WAVECALIB')] - wave_calib = fits.getdata(path.calib / '{}.fits'.format(wave_file.index[0])) + wave_calib = fits.getdata(path.calib / f'{wave_file.index[0]}.fits') wave_lin = get_wavelength_calibration(filter_comb, wave_calib, centers, wave_min, wave_max) # start with OBJECT,FLUX flux_files = frames_info[frames_info['DPR TYPE'] == 'OBJECT,FLUX'] if len(flux_files) != 0: for file, idx in flux_files.index: - self._logger.info(' * OBJECT,FLUX: {0}'.format(file)) + self._logger.info(f' * OBJECT,FLUX: {file}') # read data self._logger.debug('> read data') - fname = '{0}_DIT{1:03d}_preproc'.format(file, idx) - cube, hdr = fits.getdata(path.preproc / '{}.fits'.format(fname), header=True) + fname = f'{file}_DIT{idx:03d}_preproc' + cube, hdr = fits.getdata(path.preproc / f'{fname}.fits', header=True) # centers if plot: - save_path = path.products / '{}_psf_fitting.pdf'.format(fname) + save_path = path.products / f'{fname}_psf_fitting.pdf' else: save_path = None psf_center = toolbox.star_centers_from_PSF_lss_cube(cube, wave_lin, pixel, high_pass=high_pass_psf, @@ -1714,7 +1688,7 @@ def sph_ird_star_center(self, high_pass_psf=False, high_pass_waffle=False, box_p # save self._logger.debug('> save centers') - fits.writeto(path.preproc / '{}_centers.fits'.format(fname), psf_center, overwrite=True) + fits.writeto(path.preproc / f'{fname}_centers.fits', psf_center, overwrite=True) # then OBJECT,CENTER (if any) starcen_files = frames_info[frames_info['DPR TYPE'] == 'OBJECT,CENTER'] @@ -1723,25 +1697,25 @@ def sph_ird_star_center(self, high_pass_psf=False, high_pass_waffle=False, box_p starsci_files = frames_info[(frames_info['DPR TYPE'] == 'OBJECT') & (frames_info['DET SEQ1 DIT'].round(2) == DIT)] for file, idx in starcen_files.index: - self._logger.info(' * OBJECT,CENTER: {0}'.format(file)) + self._logger.info(f' * OBJECT,CENTER: {file}') # read center data self._logger.debug('> read data') - fname = '{0}_DIT{1:03d}_preproc'.format(file, idx) - cube_cen, hdr = fits.getdata(path.preproc / '{}.fits'.format(fname), header=True) + fname = f'{file}_DIT{idx:03d}_preproc' + cube_cen, hdr = fits.getdata(path.preproc / f'{fname}.fits', header=True) # read science data self._logger.debug('> read matching science data') if len(starsci_files) != 0: self._logger.debug('> read matiching science data') - fname2 = '{0}_DIT{1:03d}_preproc'.format(starsci_files.index[0][0], idx) - cube_sci, hdr = fits.getdata(path.preproc / '{}.fits'.format(fname2), header=True) + fname2 = f'{starsci_files.index[0][0]}_DIT{idx:03d}_preproc' + cube_sci, hdr = fits.getdata(path.preproc / f'{fname2}.fits', header=True) else: cube_sci = None # centers if plot: - save_path = path.products / '{}_waffle_fitting.pdf'.format(fname) + save_path = path.products / f'{fname}_waffle_fitting.pdf' else: save_path = None spot_centers, spot_dist, img_centers \ @@ -1751,8 +1725,8 @@ def sph_ird_star_center(self, high_pass_psf=False, high_pass_waffle=False, box_p # save self._logger.debug('> save centers') - fits.writeto(path.preproc / '{}_centers.fits'.format(fname), img_centers, overwrite=True) - fits.writeto(path.preproc / '{}_spot_distance.fits'.format(fname), spot_dist, overwrite=True) + fits.writeto(path.preproc / f'{fname}_centers.fits', img_centers, overwrite=True) + fits.writeto(path.preproc / f'{fname}_spot_distance.fits', spot_dist, overwrite=True) # update recipe execution self._update_recipe_status('sph_ird_star_center', sphere.SUCCESS) @@ -1816,7 +1790,7 @@ def sph_ird_wavelength_recalibration(self, fit_scaling=True, plot=True): # wavelength map self._logger.debug('> compute default wavelength calibration') wave_file = files_info[files_info['PROCESSED'] & (files_info['PRO CATG'] == 'IRD_WAVECALIB')] - wave_calib = fits.getdata(path.calib / '{}.fits'.format(wave_file.index[0])) + wave_calib = fits.getdata(path.calib / f'{wave_file.index[0]}.fits') wave_lin = get_wavelength_calibration(filter_comb, wave_calib, centers, wave_min, wave_max) # reference wavelength @@ -1830,8 +1804,8 @@ def sph_ird_wavelength_recalibration(self, fit_scaling=True, plot=True): self._logger.info(' ==> no OBJECT,CENTER file in the data set. Wavelength cannot be recalibrated. The standard wavelength calibrated by the ESO pripeline will be used.') return - fname = '{0}_DIT{1:03d}_preproc_spot_distance'.format(starcen_files.index.values[0][0], starcen_files.index.values[0][1]) - spot_dist = fits.getdata(path.preproc / '{}.fits'.format(fname)) + fname = f'{starcen_files.index.values[0][0]}_DIT{starcen_files.index.values[0][1]:03d}_preproc_spot_distance' + spot_dist = fits.getdata(path.preproc / f'{fname}.fits') if plot: pdf = PdfPages(path.products / 'wavelength_recalibration.pdf') @@ -1839,7 +1813,7 @@ def sph_ird_wavelength_recalibration(self, fit_scaling=True, plot=True): pix = np.arange(1024) wave_final = np.zeros((1024, 2)) for fidx in range(2): - self._logger.info(' ==> field {0:2d}/{1:2d}'.format(fidx+1, 2)) + self._logger.info(f' ==> field {fidx + 1:2d}/{2:2d}') wave = wave_lin[fidx] dist = spot_dist[:, fidx] @@ -1872,7 +1846,7 @@ def sph_ird_wavelength_recalibration(self, fit_scaling=True, plot=True): wave_final_fit[bad] = np.nan wave_diff = np.abs(wave_final_fit - wave) - self._logger.info(' ==> difference with calibrated wavelength: min={0:.1f} nm, max={1:.1f} nm'.format(np.nanmin(wave_diff), np.nanmax(wave_diff))) + self._logger.info(f' ==> difference with calibrated wavelength: min={np.nanmin(wave_diff):.1f} nm, max={np.nanmax(wave_diff):.1f} nm') if filter_comb == 'S_LR': # use DRH @@ -1913,7 +1887,7 @@ def sph_ird_wavelength_recalibration(self, fit_scaling=True, plot=True): plt.plot(pix, wave_final_fit, label='Recalibrated [fit]'+use_f) plt.legend(loc='upper left') plt.ylabel('Wavelength r[nm]') - plt.title('Field #{}'.format(fidx)) + plt.title(f'Field #{fidx}') plt.xlim(xmin, xmax) plt.ylim(900, 2400) plt.gca().xaxis.set_ticklabels([]) @@ -1946,7 +1920,8 @@ def sph_ird_wavelength_recalibration(self, fit_scaling=True, plot=True): def sph_ird_combine_data(self, cpix=True, psf_dim=80, science_dim=800, correct_mrs_chromatism=True, - split_posang=True, shift_method='fft', manual_center=None, coarse_centering=False): + split_posang=True, shift_method='fft', manual_center=None, + center_selection='first', coarse_centering=False): '''Combine and save the science data into final cubes All types of data are combined independently: PSFs @@ -2025,6 +2000,13 @@ def sph_ird_combine_data(self, cpix=True, psf_dim=80, science_dim=800, correct_m OBJECT frames. This should be an array of 2 values (cx for the 2 IRDIS fields). Default is None + center_selection : str + Specify which star center to use when multiple are + available. Possible values are first, last, and time. The + time option indicates to use the star center file that is + closest in time with respect to each science file. Default + is first + coarse_centering : bool Control if images are finely centered or not before being combined. However the images are still roughly centered by @@ -2142,15 +2124,15 @@ def sph_ird_combine_data(self, cpix=True, psf_dim=80, science_dim=800, correct_m # read and combine files for file_idx, (file, idx) in enumerate(flux_files.index): - self._logger.info(' ==> file {0}/{1}: {2}, DIT #{3}'.format(file_idx+1, len(flux_files), file, idx)) + self._logger.info(f' ==> file {file_idx + 1}/{len(flux_files)}: {file}, DIT #{idx}') # read data self._logger.debug('> read data') - fname = '{0}_DIT{1:03d}_preproc'.format(file, idx) - cube = fits.getdata(path.preproc / '{}.fits'.format(fname)) + fname = f'{file}_DIT{idx:03d}_preproc' + cube = fits.getdata(path.preproc / f'{fname}.fits') self._logger.debug('> read centers') - cfile = path.preproc / '{}_centers.fits'.format(fname) + cfile = path.preproc / f'{fname}_centers.fits' if cfile.exists(): centers = fits.getdata(cfile) else: @@ -2168,7 +2150,7 @@ def sph_ird_combine_data(self, cpix=True, psf_dim=80, science_dim=800, correct_m # center for field_idx, img in enumerate(cube): - self._logger.debug('> field {}'.format(field_idx)) + self._logger.debug(f'> field {field_idx}') # wavelength solution for this field ciwave = iwave[:, field_idx] @@ -2210,11 +2192,11 @@ def sph_ird_combine_data(self, cpix=True, psf_dim=80, science_dim=800, correct_m ii = np.where(psf_posang == pa)[0] # save metadata - flux_files[(flux_files['INS4 DROT2 POSANG'] + 90) == pa].to_csv(path.products / 'psf_posang={:06.2f}_frames.csv'.format(pa)) - fits.writeto(path.products / 'psf_posang={:06.2f}_posang.fits'.format(pa), psf_posang[ii], overwrite=True) + flux_files[(flux_files['INS4 DROT2 POSANG'] + 90) == pa].to_csv(path.products / f'psf_posang={pa:06.2f}_frames.csv') + fits.writeto(path.products / f'psf_posang={pa:06.2f}_posang.fits', psf_posang[ii], overwrite=True) # save final cubes - fits.writeto(path.products / 'psf_posang={:06.2f}_cube.fits'.format(pa), psf_cube[:, ii], overwrite=True) + fits.writeto(path.products / f'psf_posang={pa:06.2f}_cube.fits', psf_cube[:, ii], overwrite=True) else: # save metadata flux_files.to_csv(path.products / 'psf_posang=all_frames.csv') @@ -2247,19 +2229,19 @@ def sph_ird_combine_data(self, cpix=True, psf_dim=80, science_dim=800, correct_m # read and combine files for file_idx, (file, idx) in enumerate(starcen_files.index): - self._logger.info(' ==> file {0}/{1}: {2}, DIT #{3}'.format(file_idx+1, len(starcen_files), file, idx)) + self._logger.info(f' ==> file {file_idx + 1}/{len(starcen_files)}: {file}, DIT #{idx}') # read data self._logger.debug('> read data') - fname = '{0}_DIT{1:03d}_preproc'.format(file, idx) - cube = fits.getdata(path.preproc / '{}.fits'.format(fname)) + fname = f'{file}_DIT{idx:03d}_preproc' + cube = fits.getdata(path.preproc / f'{fname}.fits') # use manual center if explicitely requested self._logger.debug('> read centers') if manual_center is not None: centers = manual_center else: - centers = fits.getdata(path.preproc / '{}_centers.fits'.format(fname)) + centers = fits.getdata(path.preproc / f'{fname}_centers.fits') # make sure we have only integers if user wants coarse centering if coarse_centering: @@ -2272,7 +2254,7 @@ def sph_ird_combine_data(self, cpix=True, psf_dim=80, science_dim=800, correct_m # center for field_idx, img in enumerate(cube): - self._logger.debug('> field {}'.format(field_idx)) + self._logger.debug(f'> field {field_idx}') # wavelength solution for this field ciwave = iwave[:, field_idx] @@ -2313,11 +2295,11 @@ def sph_ird_combine_data(self, cpix=True, psf_dim=80, science_dim=800, correct_m ii = np.where(cen_posang == pa)[0] # save metadata - starcen_files[(starcen_files['INS4 DROT2 POSANG'] + 90) == pa].to_csv(path.products / 'starcenter_posang={:06.2f}_frames.csv'.format(pa)) - fits.writeto(path.products / 'starcenter_posang={:06.2f}_posang.fits'.format(pa), cen_posang[ii], overwrite=True) + starcen_files[(starcen_files['INS4 DROT2 POSANG'] + 90) == pa].to_csv(path.products / f'starcenter_posang={pa:06.2f}_frames.csv') + fits.writeto(path.products / f'starcenter_posang={pa:06.2f}_posang.fits', cen_posang[ii], overwrite=True) # save final cubes - fits.writeto(path.products / 'starcenter_posang={:06.2f}_cube.fits'.format(pa), cen_cube[:, ii], overwrite=True) + fits.writeto(path.products / f'starcenter_posang={pa:06.2f}_cube.fits', cen_cube[:, ii], overwrite=True) else: # save metadata starcen_files.to_csv(path.products / 'starcenter_posang=all_frames.csv') @@ -2341,26 +2323,6 @@ def sph_ird_combine_data(self, cpix=True, psf_dim=80, science_dim=800, correct_m sci_cube = np.zeros((2, nfiles, nwave, science_dim)) sci_posang = np.zeros(nfiles) - # use manual center if explicitely requested - self._logger.debug('> read centers') - if manual_center is not None: - centers = np.full((1024, 2), manual_center, dtype=np.float) - else: - # FIXME: ticket #12. Use first DIT of first OBJECT,CENTER - # in the sequence, but it would be better to be able to - # select which CENTER to use - starcen_files = frames_info[frames_info['DPR TYPE'] == 'OBJECT,CENTER'] - if len(starcen_files) == 0: - self._logger.warning('No OBJECT,CENTER file in the data set. Images will be centered using default center ({},{})'.format(*default_center[:, 0])) - centers = np.full((1024, 2), default_center[:, 0], dtype=np.float) - else: - fname = '{0}_DIT{1:03d}_preproc_centers.fits'.format(starcen_files.index.values[0][0], starcen_files.index.values[0][1]) - centers = fits.getdata(path.preproc / fname) - - # make sure we have only integers if user wants coarse centering - if coarse_centering: - centers = centers.astype(np.int) - # final center if cpix: cc = science_dim // 2 @@ -2369,21 +2331,60 @@ def sph_ird_combine_data(self, cpix=True, psf_dim=80, science_dim=800, correct_m # read and combine files for file_idx, (file, idx) in enumerate(object_files.index): - self._logger.info(' ==> file {0}/{1}: {2}, DIT #{3}'.format(file_idx+1, len(object_files), file, idx)) + posang = frames_info.loc[(file, idx), 'INS4 DROT2 POSANG'] + 90 + self._logger.info(f' ==> file {file_idx + 1}/{len(object_files)}: {file}, DIT #{idx}, posang={posang:5.1f}°') + + # use manual center if explicitely requested + self._logger.debug('> read centers') + if manual_center is not None: + centers = np.full((1024, 2), manual_center, dtype=np.float) + else: + # otherwise, look whether we have an OBJECT,CENTER frame and select the one requested by user + starcen_files = frames_info[frames_info['DPR TYPE'] == 'OBJECT,CENTER'] + if len(starcen_files) == 0: + self._logger.warning('No OBJECT,CENTER file in the dataset. Images will be centered using default center ({},{})'.format(*self._default_center)) + centers = self._default_center + else: + # selection of the proper OBJECT,CENTER + center_selection = center_selection.lower() + if center_selection == 'first': + center_index = 0 + elif center_selection == 'last': + center_index = len(starcen_files.index.values)-1 + elif center_selection == 'time': + time_cen = starcen_files['DATE-OBS'] + time_sci = frames_info.loc[(file, idx), 'DATE-OBS'] + center_index = np.abs(time_sci - time_cen).argmin() + else: + self._logger.error(f'Unknown OBJECT,CENTER selection {center_selection}. Possible values are first, last, and time.') + self._update_recipe_status('sph_ird_combine_data', sphere.ERROR) + return + + fname = f'{starcen_files.index.values[center_index][0]}_DIT{starcen_files.index.values[center_index][1]:03d}_preproc_centers.fits' + fpath = path.preproc / fname + if fpath.exists(): + centers = fits.getdata(fpath) + else: + self._logger.warning('sph_ird_star_center() has not been executed. Images will be centered using default center ({},{})'.format(*self._default_center)) + centers = np.full((1024, 2), default_center[:, 0], dtype=np.float) + # make sure we have only integers if user wants coarse centering + if coarse_centering: + centers = centers.astype(np.int) + # read data self._logger.debug('> read data') - fname = '{0}_DIT{1:03d}_preproc'.format(file, idx) - cube = fits.getdata(path.preproc / '{}.fits'.format(fname)) + fname = f'{file}_DIT{idx:03d}_preproc' + cube = fits.getdata(path.preproc / f'{fname}.fits') # DIT, angles, etc self._logger.debug('> read angles') DIT = frames_info.loc[(file, idx), 'DET SEQ1 DIT'] - sci_posang[file_idx] = frames_info.loc[(file, idx), 'INS4 DROT2 POSANG'] + 90 + sci_posang[file_idx] = posang # center for field_idx, img in enumerate(cube): - self._logger.debug('> field {}'.format(field_idx)) + self._logger.debug(f'> field {field_idx}') # wavelength solution for this field ciwave = iwave[:, field_idx] @@ -2424,11 +2425,11 @@ def sph_ird_combine_data(self, cpix=True, psf_dim=80, science_dim=800, correct_m ii = np.where(sci_posang == pa)[0] # save metadata - object_files[(object_files['INS4 DROT2 POSANG'] + 90) == pa].to_csv(path.products / 'science_posang={:06.2f}_frames.csv'.format(pa)) - fits.writeto(path.products / 'science_posang={:06.2f}_posang.fits'.format(pa), sci_posang[ii], overwrite=True) + object_files[(object_files['INS4 DROT2 POSANG'] + 90) == pa].to_csv(path.products / f'science_posang={pa:06.2f}_frames.csv') + fits.writeto(path.products / f'science_posang={pa:06.2f}_posang.fits', sci_posang[ii], overwrite=True) # save final cubes - fits.writeto(path.products / 'science_posang={:06.2f}_cube.fits'.format(pa), sci_cube[:, ii], overwrite=True) + fits.writeto(path.products / f'science_posang={pa:06.2f}_cube.fits', sci_cube[:, ii], overwrite=True) else: # save metadata object_files.to_csv(path.products / 'science_posang=all_frames.csv') @@ -2447,7 +2448,7 @@ def sph_ird_combine_data(self, cpix=True, psf_dim=80, science_dim=800, correct_m self._status = sphere.COMPLETE - def sph_ird_clean(self, delete_raw=False, delete_products=False): + def sph_ird_clean(self, delete_raw=False, delete_products=False, delete_config=False): ''' Clean everything except for raw data and science products (by default) @@ -2458,6 +2459,9 @@ def sph_ird_clean(self, delete_raw=False, delete_products=False): delete_products : bool Delete science products. Default is False + + delete_config : bool + Delete configuration file. Default is False ''' self._logger.info('Clean reduction data') @@ -2474,6 +2478,10 @@ def sph_ird_clean(self, delete_raw=False, delete_products=False): self._logger.debug('> update recipe execution') self._recipes_status['sph_ird_clean'] = True + # remove config + if delete_config: + self.config._file.unlink() + # update recipe execution self._update_recipe_status('sph_ird_clean', sphere.SUCCESS) diff --git a/sphere/SPARTA.py b/sphere/SPARTA.py index 31d2f08..9b48d7f 100644 --- a/sphere/SPARTA.py +++ b/sphere/SPARTA.py @@ -17,7 +17,7 @@ import sphere import sphere.utils as utils -import sphere.toolbox as toolbox +import sphere.utils.toolbox as toolbox _log = logging.getLogger(__name__) @@ -57,7 +57,7 @@ class Reduction(object): # Constructor ################################################## - def __new__(cls, path, log_level='info', sphere_handler=None): + def __new__(cls, path, clean_start=True, log_level='info', user_config=None, sphere_handler=None): ''' Custom instantiation for the class @@ -71,9 +71,17 @@ def __new__(cls, path, log_level='info', sphere_handler=None): path : str Path to the directory containing the dataset - level : {'debug', 'info', 'warning', 'error', 'critical'} + clean_start : bool + Remove all results from previous reductions for a clean start. + Default is True + + log_level : {'debug', 'info', 'warning', 'error', 'critical'} The log level of the handler + user_config : str + Path to a user-provided configuration. Default is None, i.e. the + reduction will use the package default configuration parameters + sphere_handler : log handler Higher-level SPHERE.Dataset log handler ''' @@ -88,7 +96,7 @@ def __new__(cls, path, log_level='info', sphere_handler=None): # zeroth-order reduction validation raw = path / 'raw' if not raw.exists(): - _log.error('No raw/ subdirectory. {0} is not a valid reduction path'.format(path)) + _log.error(f'No raw/ subdirectory. {path} is not a valid reduction path') return None else: reduction = super(Reduction, cls).__new__(cls) @@ -123,26 +131,44 @@ def __new__(cls, path, log_level='info', sphere_handler=None): reduction._logger = logger - reduction._logger.info('Creating SPARTA reduction at path {}'.format(path)) + reduction._logger.info(f'Creating SPARTA reduction at path {path}') + + # + # clean start + # + if clean_start: + reduction._logger.info('Erase outputs of previous reduction for a clean start') + reduction._path.remove(delete_raw=False, delete_products=True, logger=reduction._logger) + config_file = reduction._path.root / 'reduction_config.ini' + if config_file.exists(): + config_file.unlink() # # configuration # reduction._logger.debug('> read default configuration') configfile = f'{Path(sphere.__file__).parent}/instruments/{reduction._instrument}.ini' - config = configparser.ConfigParser() + cfgparser = configparser.ConfigParser() reduction._logger.debug('Read configuration') - config.read(configfile) + cfgparser.read(configfile) # reduction parameters - reduction._config = dict(config.items('reduction')) - for key, value in reduction._config.items(): + cfg = {} + items = dict(cfgparser.items('reduction')) + for key, value in items.items(): try: val = eval(value) except NameError: val = value - reduction._config[key] = val + cfg[key] = val + reduction._config = utils.Configuration(reduction._path, reduction._logger, cfg) + + # load user-provided default configuration parameters + if user_config: + user_config = Path(user_config).expanduser() + + reduction._config.load_from_file(user_config) # # reduction and recipe status @@ -174,7 +200,7 @@ def __new__(cls, path, log_level='info', sphere_handler=None): ################################################## def __repr__(self): - return ''.format(self._instrument, self._path, self.loglevel) + return f'' def __format__(self): return self.__repr__() @@ -250,6 +276,9 @@ def _read_info(self): # path path = self.path + # load existing configuration + self.config.load() + # files info fname = path.preproc / 'files.csv' if fname.exists(): @@ -372,32 +401,6 @@ def _update_recipe_status(self, recipe, status): # Generic class methods ################################################## - def show_config(self): - ''' - Shows the reduction configuration - ''' - - # dictionary - dico = self._config - - # misc parameters - print() - print('{0:<30s}{1}'.format('Parameter', 'Value')) - print('-'*35) - keys = [key for key in dico if key.startswith('misc')] - for key in keys: - print('{0:<30s}{1}'.format(key, dico[key])) - - # clean - print('-'*35) - keys = [key for key in dico if key.startswith('clean')] - for key in keys: - print('{0:<30s}{1}'.format(key, dico[key])) - print('-'*35) - - print() - - def init_reduction(self): ''' Sort files and frames, perform sanity check @@ -454,7 +457,8 @@ def clean(self): if config['clean']: self.sph_sparta_clean(delete_raw=config['clean_delete_raw'], - delete_products=config['clean_delete_products']) + delete_products=config['clean_delete_products'], + delete_config=config['clean_delete_config']) def full_reduction(self): ''' @@ -497,7 +501,7 @@ def sort_files(self): self._status = sphere.FATAL return - self._logger.info(' * found {0} raw FITS files'.format(len(files))) + self._logger.info(f' * found {len(files)} raw FITS files') # read list of keywords self._logger.debug('> read keyword list') @@ -524,7 +528,7 @@ def sort_files(self): self._logger.debug('> read FITS keywords') for f in files: - hdu = fits.open(path.raw / '{}.fits'.format(f)) + hdu = fits.open(path.raw / f'{f}.fits') hdr = hdu[0].header for k, sk in zip(keywords, keywords_short): @@ -566,7 +570,7 @@ def sort_files(self): ra_drot_h = np.floor(ra_drot/1e4) ra_drot_m = np.floor((ra_drot - ra_drot_h*1e4)/1e2) ra_drot_s = ra_drot - ra_drot_h*1e4 - ra_drot_m*1e2 - RA = '{:02.0f}:{:02.0f}:{:02.3f}'.format(ra_drot_h, ra_drot_m, ra_drot_s) + RA = f'{ra_drot_h:02.0f}:{ra_drot_m:02.0f}:{ra_drot_s:02.3f}' dec_drot = cinfo['INS4 DROT2 DEC'][0] sign = np.sign(dec_drot) @@ -575,20 +579,20 @@ def sort_files(self): dec_drot_m = np.floor((udec_drot - dec_drot_d*1e4)/1e2) dec_drot_s = udec_drot - dec_drot_d*1e4 - dec_drot_m*1e2 dec_drot_d *= sign - DEC = '{:02.0f}:{:02.0f}:{:02.2f}'.format(dec_drot_d, dec_drot_m, dec_drot_s) + DEC = f'{dec_drot_d:02.0f}:{dec_drot_m:02.0f}:{dec_drot_s:02.2f}' date = str(cinfo['DATE'][0])[0:10] self._logger.info('Extract frames information') - self._logger.info(' * Programme ID: {0}'.format(cinfo['OBS PROG ID'][0])) - self._logger.info(' * OB name: {0}'.format(cinfo['OBS NAME'][0])) - self._logger.info(' * OB ID: {0}'.format(cinfo['OBS ID'][0])) - self._logger.info(' * RA / DEC: {0} / {1}'.format(RA, DEC)) - self._logger.info(' * Date: {0}'.format(date)) - self._logger.info(' * Instrument: {0}'.format(cinfo['SEQ ARM'][0])) - self._logger.info(' * Derotator: {0}'.format(cinfo['INS4 DROT2 MODE'][0])) - self._logger.info(' * VIS WFS mode: {0}'.format(cinfo['AOS VISWFS MODE'][0])) - self._logger.info(' * IR WFS mode: {0}'.format(cinfo['AOS IRWFS MODE'][0])) + self._logger.info(f" * Programme ID: {cinfo['OBS PROG ID'][0]}") + self._logger.info(f" * OB name: {cinfo['OBS NAME'][0]}") + self._logger.info(f" * OB ID: {cinfo['OBS ID'][0]}") + self._logger.info(f' * RA / DEC: {RA} / {DEC}') + self._logger.info(f' * Date: {date}') + self._logger.info(f" * Instrument: {cinfo['SEQ ARM'][0]}") + self._logger.info(f" * Derotator: {cinfo['INS4 DROT2 MODE'][0]}") + self._logger.info(f" * VIS WFS mode: {cinfo['AOS VISWFS MODE'][0]}") + self._logger.info(f" * IR WFS mode: {cinfo['AOS IRWFS MODE'][0]}") # update recipe execution self._update_recipe_status('sort_files', sphere.SUCCESS) @@ -1410,7 +1414,7 @@ def sph_sparta_plot(self): self._status = sphere.COMPLETE - def sph_sparta_clean(self, delete_raw=False, delete_products=False): + def sph_sparta_clean(self, delete_raw=False, delete_products=False, delete_config=False): ''' Clean everything except for raw data and science products (by default) @@ -1421,6 +1425,9 @@ def sph_sparta_clean(self, delete_raw=False, delete_products=False): delete_products : bool Delete science products. Default is False + + delete_config : bool + Delete configuration file. Default is False ''' self._logger.info('Clean reduction data') @@ -1430,42 +1437,12 @@ def sph_sparta_clean(self, delete_raw=False, delete_products=False): self.recipe_requirements, logger=self._logger): return - # parameters - path = self.path + # remove sub-directories + self.path.remove(delete_raw=delete_raw, delete_products=delete_products, logger=self._logger) - # tmp - if path.tmp.exists(): - self._logger.debug('> remove {}'.format(path.tmp)) - shutil.rmtree(path.tmp, ignore_errors=True) - - # sof - if path.sof.exists(): - self._logger.debug('> remove {}'.format(path.sof)) - shutil.rmtree(path.sof, ignore_errors=True) - - # calib - if path.calib.exists(): - self._logger.debug('> remove {}'.format(path.calib)) - shutil.rmtree(path.calib, ignore_errors=True) - - # preproc - if path.preproc.exists(): - self._logger.debug('> remove {}'.format(path.preproc)) - shutil.rmtree(path.preproc, ignore_errors=True) - - # raw - if delete_raw: - if path.raw.exists(): - self._logger.debug('> remove {}'.format(path.raw)) - self._logger.warning(' ==> delete raw files') - shutil.rmtree(path.raw, ignore_errors=True) - - # products - if delete_products: - if path.products.exists(): - self._logger.debug('> remove {}'.format(path.products)) - self._logger.warning(' ==> delete products') - shutil.rmtree(path.products, ignore_errors=True) + # remove config + if delete_config: + self.config._file.unlink() # update recipe execution self._update_recipe_status('sph_sparta_clean', sphere.SUCCESS) diff --git a/sphere/SPHERE.py b/sphere/SPHERE.py index e7a945a..c204817 100644 --- a/sphere/SPHERE.py +++ b/sphere/SPHERE.py @@ -39,7 +39,7 @@ def process_mainFiles(mainFiles, files, logger=_log): fname = file.attrib['name'] files.append(fname) - logger.debug(' ==> {0}'.format(fname)) + logger.debug(f' ==> {fname}') def process_association(tree, files, logger=_log): @@ -116,14 +116,14 @@ def sort_files_from_xml(path, logger=_log): xml_files = list(path.glob('*.xml')) logger.info('Sort data based on XML files (ESO automated calibration selection)') - logger.info(' * {0} XML files\n'.format(len(xml_files))) + logger.info(f' * {len(xml_files)} XML files\n') # sort files for file in xml_files: tree = etree.parse(file) root = tree.getroot() - logger.info(' * {}'.format(file.name)) + logger.info(f' * {file.name}') # process only IFS and IRDIS science data catg = root.attrib['category'] @@ -135,14 +135,14 @@ def sort_files_from_xml(path, logger=_log): filename = scifiles[0].attrib['name'] # Mac OS X replaces : by _ in file names... - if not (path / '{}.fits'.format(filename)).exists(): + if not (path / f'{filename}.fits').exists(): filename = filename.replace(':', '_') - if not (path / '{}.fits'.format(filename)).exists(): - logger.info(' ==> file {} does not exist. Skipping'.format(filename)) + if not (path / f'{filename}.fits').exists(): + logger.info(f' ==> file {filename} does not exist. Skipping') continue - fpath = path / '{}.fits'.format(filename) + fpath = path / f'{filename}.fits' hdr = fits.getheader(fpath) # target and arm @@ -156,7 +156,7 @@ def sort_files_from_xml(path, logger=_log): try: arm = hdr['HIERARCH ESO SEQ ARM'] except KeyError: - logger.error('No \'HIERARCH ESO SEQ ARM\' keyword in {}'.format(fpath)) + logger.error(f'No \'HIERARCH ESO SEQ ARM\' keyword in {fpath}') continue if arm == 'IRDIS': @@ -164,7 +164,7 @@ def sort_files_from_xml(path, logger=_log): elif arm == 'IFS': instrument = 'IFS' else: - logger.error('Unknown arm {0}'.format(arm)) + logger.error(f'Unknown arm {arm}') continue # get files @@ -172,34 +172,34 @@ def sort_files_from_xml(path, logger=_log): process_association(root, files, logger=logger) # target path - directory = '{0}_id={1}'.format(target, obs_id) + directory = f'{target}_id={obs_id}' directory = '_'.join(directory.split()) target_path = path / directory / night / instrument / 'raw' target_path.mkdir(parents=True, exist_ok=True) # copy files for filename in files: - fpath = path / '{}.fits'.format(filename) + fpath = path / f'{filename}.fits' # Mac OS X replaces : by _ in file names... if not fpath.exists(): filename = filename.replace(':', '_') - fpath = path / '{}.fits'.format(filename) + fpath = path / f'{filename}.fits' # check if file actually exists if not fpath.exists(): - logger.info(' ==> file {} does not exist. Skipping.'.format(fpath)) + logger.info(f' ==> file {fpath} does not exist. Skipping.') continue # copy if needed - nfpath = target_path / '{}.fits'.format(filename) + nfpath = target_path / f'{filename}.fits' if not nfpath.exists(): shutil.copy(fpath, nfpath) # print status - logger.debug('{0} - id={1}'.format(target, obs_id)) - logger.debug(' ==> found {0} files'.format(len(files))) - logger.debug(' ==> copied to {0}'.format(target_path)) + logger.debug(f'{target} - id={obs_id}') + logger.debug(f' ==> found {len(files)} files') + logger.debug(f' ==> copied to {target_path}') # move all files path_new = path / 'all_files' @@ -242,11 +242,11 @@ def sort_files_from_fits(path, logger=_log): fits_files = list(path.glob('*.fits')) logger.info('Sort data based on FITS files') - logger.info(' * {0} FITS files\n'.format(len(fits_files))) + logger.info(f' * {len(fits_files)} FITS files\n') # sort files for file in fits_files: - logger.info(' * {}'.format(file.name)) + logger.info(f' * {file.name}') # target and arm hdr = fits.getheader(file) @@ -256,7 +256,7 @@ def sort_files_from_fits(path, logger=_log): obs_id = hdr['HIERARCH ESO OBS ID'] dpr_type = hdr['HIERARCH ESO DPR TYPE'] except KeyError: - logger.error('Missing ESO HIERARCH keywords in {}'.format(file)) + logger.error(f'Missing ESO HIERARCH keywords in {file}') continue if dpr_type == 'OBJECT,AO': @@ -265,7 +265,7 @@ def sort_files_from_fits(path, logger=_log): try: arm = hdr['HIERARCH ESO SEQ ARM'] except KeyError: - logger.error('No \'HIERARCH ESO SEQ ARM\' keyword in {}'.format(file)) + logger.error(f'No \'HIERARCH ESO SEQ ARM\' keyword in {file}') continue if arm == 'IRDIS': @@ -273,7 +273,7 @@ def sort_files_from_fits(path, logger=_log): elif arm == 'IFS': instrument = 'IFS' else: - logger.error('Unknown arm {0}'.format(arm)) + logger.error(f'Unknown arm {arm}') continue # target path @@ -284,8 +284,8 @@ def sort_files_from_fits(path, logger=_log): file.rename(target_path / file.name) # print status - logger.debug('{0} - id={1}'.format(target, obs_id)) - logger.debug(' ==> copied to {0}'.format(target_path)) + logger.debug(f'{target} - id={obs_id}') + logger.debug(f' ==> copied to {target_path}') # move all files path_new = path / 'unsorted_files' @@ -322,7 +322,7 @@ def classify_irdis_dataset(path, logger=_log): # zeroth-order reduction validation raw = path / 'raw' if not raw.exists(): - logger.error('No raw/ subdirectory. {0} is not a valid reduction path!'.format(path)) + logger.error(f'No raw/ subdirectory. {path} is not a valid reduction path!') return None # list all fits files @@ -398,7 +398,7 @@ def __init__(self, path, log_level='info'): self._handler = handler self._logger = logger - self._logger.info('Looking for SPHERE data sets at path {}'.format(path)) + self._logger.info(f'Looking for SPHERE data sets at path {path}') # list of reductions self._IFS_reductions = [] @@ -423,7 +423,7 @@ def __init__(self, path, log_level='info'): ################################################## def __repr__(self): - return ''.format(len(self.IFS_reductions), len(self.IRDIS_reductions), len(self.SPARTA_reductions)) + return f'' ################################################## # Properties @@ -459,7 +459,7 @@ def init_reduction(self): ''' for r in self.reductions: - self._logger.info('Init: {}'.format(str(r))) + self._logger.info(f'Init: {str(r)}') r.init_reduction() @@ -470,7 +470,7 @@ def create_static_calibrations(self): ''' for r in self.reductions: - self._logger.info('Static calibrations: {}'.format(str(r))) + self._logger.info(f'Static calibrations: {str(r)}') r.create_static_calibrations() @@ -481,7 +481,7 @@ def preprocess_science(self): ''' for r in self.reductions: - self._logger.info('Science pre-processing: {}'.format(str(r))) + self._logger.info(f'Science pre-processing: {str(r)}') r.preprocess_science() @@ -493,7 +493,7 @@ def process_science(self): ''' for r in self.reductions: - self._logger.info('Science processing: {}'.format(str(r))) + self._logger.info(f'Science processing: {str(r)}') r.process_science() @@ -506,7 +506,7 @@ def clean(self): for r in self.reductions: print(r) - self._logger.info('Clean-up: {}'.format(str(r))) + self._logger.info(f'Clean-up: {str(r)}') r.clean() @@ -519,7 +519,7 @@ def full_reduction(self): for r in self.reductions: self._logger.info('###########################################################################') - self._logger.info('# Full reduction: {}'.format(str(r))) + self._logger.info(f'# Full reduction: {str(r)}') self._logger.info('###########################################################################') r.full_reduction() @@ -553,7 +553,7 @@ def _create_reductions(self): try: arm = hdr['HIERARCH ESO SEQ ARM'] except KeyError: - self._logger.error('No \'HIERARCH ESO SEQ ARM\' keyword in {}'.format(fits_files[0])) + self._logger.error(f'No \'HIERARCH ESO SEQ ARM\' keyword in {fits_files[0]}') if arm == 'IRDIS': mode = classify_irdis_dataset(reduction_path, logger=self._logger) @@ -563,13 +563,13 @@ def _create_reductions(self): continue if mode == 'imaging': - self._logger.info(' * IRDIS imaging reduction at path {}'.format(reduction_path)) + self._logger.info(f' * IRDIS imaging reduction at path {reduction_path}') reduction = IRDIS.ImagingReduction(reduction_path, log_level=self._log_level, sphere_handler=self._handler) elif mode == 'polar': self._logger.warning('IRDIS DPI not supported yet') elif mode == 'spectro': - self._logger.info(' * IRDIS spectro {} files'.format(len(fits_files))) + self._logger.info(f' ==> {len(fits_files)} files') # merge all reductions into a single list self._reductions = self.IFS_reductions + self.IRDIS_reductions diff --git a/sphere/__init__.py b/sphere/__init__.py index 2874613..d9bae7c 100644 --- a/sphere/__init__.py +++ b/sphere/__init__.py @@ -1,7 +1,7 @@ __author__ = 'Arthur Vigan' __copyright__ = 'Copyright (C) 2017-2021 Arthur Vigan' __license__ = 'MIT' -__version__ = '1.5.1' +__version__ = '1.6' import logging import enum diff --git a/sphere/instruments/IFS.ini b/sphere/instruments/IFS.ini index a0dea17..cf1e984 100644 --- a/sphere/instruments/IFS.ini +++ b/sphere/instruments/IFS.ini @@ -54,6 +54,7 @@ combine_cpix = True combine_psf_dim = 80 combine_science_dim = 290 combine_correct_anamorphism = True +combine_center_selection = first combine_manual_center = None combine_coarse_centering = False combine_shift_method = fft @@ -63,3 +64,4 @@ combine_save_scaled = False clean = True clean_delete_raw = False clean_delete_products = False +clean_delete_config = False diff --git a/sphere/instruments/IRDIS.ini b/sphere/instruments/IRDIS.ini index 3f47e12..364f6db 100644 --- a/sphere/instruments/IRDIS.ini +++ b/sphere/instruments/IRDIS.ini @@ -71,6 +71,7 @@ combine_cpix = True combine_psf_dim = 100 combine_science_dim = 800 combine_correct_anamorphism = True +combine_center_selection = first combine_manual_center = None combine_coarse_centering = False combine_shift_method = fft @@ -80,6 +81,7 @@ combine_save_scaled = False clean = True clean_delete_raw = False clean_delete_products = False +clean_delete_config = False # # default reduction parameters for long-slit spectroscopy @@ -108,6 +110,7 @@ combine_psf_dim = 100 combine_science_dim = 800 combine_correct_mrs_chromatism = True combine_split_posang = True +combine_center_selection = first combine_manual_center = None combine_coarse_centering = False combine_shift_method = fft @@ -116,3 +119,4 @@ combine_shift_method = fft clean = True clean_delete_raw = False clean_delete_products = False +clean_delete_config = False diff --git a/sphere/instruments/SPARTA.ini b/sphere/instruments/SPARTA.ini index b2e9f74..820470e 100644 --- a/sphere/instruments/SPARTA.ini +++ b/sphere/instruments/SPARTA.ini @@ -21,4 +21,4 @@ misc_query_timeout = 5 clean = True clean_delete_raw = False clean_delete_products = False - +clean_delete_config = False diff --git a/sphere/utils/__init__.py b/sphere/utils/__init__.py index ba55058..ef94cf2 100644 --- a/sphere/utils/__init__.py +++ b/sphere/utils/__init__.py @@ -1 +1,2 @@ from .reduction_path import ReductionPath +from .config import Configuration diff --git a/sphere/utils/config.py b/sphere/utils/config.py new file mode 100644 index 0000000..2766f6f --- /dev/null +++ b/sphere/utils/config.py @@ -0,0 +1,155 @@ +import logging +import configparser + +from collections import UserDict + +_log = logging.getLogger(__name__) + + +class Configuration(UserDict): + + ################################################## + # Constructor + ################################################## + + def __init__(self, path, logger, config): + self._file = path.root / 'reduction_config.ini' + self._logger = logger + + # initialize internal dict with user-provided configuration + self.data = config + + ################################################## + # dictionary-related functions + ################################################## + + def __setitem__(self, key, item): + super().__setitem__(key, item) + + self._logger.debug(f'Saving value {item} for key {key}') + + self.save() + + def __delitem__(self, key): + self._logger.error('Configuration keys cannot be modified') + + ################################################## + # Representation + ################################################## + + def full_description(self, pad=0): + repr = '' + padding = pad*' ' + + # parameters + repr += f'\n{padding}{"Parameter":<30s}Value\n' + repr += padding + '-'*35 + '\n' + catgs = ['misc', 'cal', 'preproc', 'center', 'combine', 'clean'] + for catg in catgs: + keys = [key for key in self if key.startswith(catg)] + for key in keys: + repr += f'{padding}{key:<30s}{self[key]}\n' + repr += padding + '-'*35 + '\n' + + return repr + + def __repr__(self): + return f'{type(self).__name__}({self.full_description(pad=4)})' + + def __str__(self): + return self.full_description() + + ################################################## + # Other methods + ################################################## + + def save(self): + ''' + Save configuration to reduction directory + ''' + + self._logger.debug('Saving full config to disk') + + with open(self._file, 'w') as file: + file.write('[default]\n\n') + + catgs = ['misc', 'cal', 'preproc', 'center', 'combine', 'clean'] + for catg in catgs: + keys = [key for key in self if key.startswith(catg)] + for key in keys: + file.write(f'{key:<30s} = {self[key]}\n') + file.write('\n') + + def load(self): + ''' + Load configuration from reduction directory + ''' + + if self._file.exists(): + self._logger.info('Load existing configuration file') + + try: + cfgparser = configparser.ConfigParser() + cfgparser.read(self._file) + except configparser.MissingSectionHeaderError: + # add section if it was missing + file = open(self._file, 'r') + lines = file.readlines() + + with open(self._file, 'w') as file: + file.write('[default]\n\n') + file.writelines(lines) + finally: + cfgparser = configparser.ConfigParser() + cfgparser.read(self._file) + + for section in cfgparser.sections(): + items = dict(cfgparser.items(section)) + for key, value in items.items(): + try: + val = eval(value) + except NameError: + val = value + + self.data[key] = val + else: + self.save() + + def load_from_file(self, filepath): + ''' + Load configuration from provided file + + Parameters + ---------- + filepath : str + Path of the configuration file + ''' + + if filepath.exists(): + self._logger.info(f'Load configuration file at path {filepath}') + + try: + cfgparser = configparser.ConfigParser() + cfgparser.read(filepath) + except configparser.MissingSectionHeaderError: + # add section if it was missing + file = open(filepath, 'r') + lines = file.readlines() + + with open(filepath, 'w') as file: + file.write('[default]\n\n') + file.writelines(lines) + finally: + cfgparser = configparser.ConfigParser() + cfgparser.read(filepath) + + for section in cfgparser.sections(): + items = dict(cfgparser.items(section)) + for key, value in items.items(): + try: + val = eval(value) + except NameError: + val = value + + self.data[key] = val + diff --git a/sphere/utils/imutils.py b/sphere/utils/imutils.py index bcb10ad..44462ba 100644 --- a/sphere/utils/imutils.py +++ b/sphere/utils/imutils.py @@ -161,7 +161,7 @@ def shift(array, shift_value, method='fft', mode='constant', cval=0): method = 'roll' else: # force integer values - if method is 'roll': + if method == 'roll': shift_value = np.round(shift_value) # FFT limitations @@ -838,11 +838,11 @@ def sigma_filter(img, box=5, nsigma=3, iterate=False, return_mask=False, max_ite box2 = box**2 kernel = Box2DKernel(box) - img_clip = (convolve(img, kernel)*box2 - img) / (box2-1) + img_clip = (convolve(img, kernel, fill_value=0, nan_treatment='fill', preserve_nan=True)*box2 - img) / (box2-1) imdev = (img - img_clip)**2 fact = nsigma**2 / (box2-2) - imvar = fact*(convolve(imdev, kernel)*box2 - imdev) + imvar = fact*(convolve(imdev, kernel, fill_value=0, nan_treatment='fill', preserve_nan=True)*box2 - imdev) # following solution is faster but does not support bad pixels # see avigan/SPHERE#49 diff --git a/sphere/utils/reduction_path.py b/sphere/utils/reduction_path.py index 00fe2e4..fc4c56e 100644 --- a/sphere/utils/reduction_path.py +++ b/sphere/utils/reduction_path.py @@ -125,34 +125,34 @@ def remove(self, delete_raw=False, delete_products=False, logger=_log): # tmp if self._tmp.exists(): - logger.debug('> remove {}'.format(self._tmp)) + logger.debug(f'> remove {self._tmp}') shutil.rmtree(self._tmp, ignore_errors=True) # sof if self._sof.exists(): - logger.debug('> remove {}'.format(self._sof)) + logger.debug(f'> remove {self._sof}') shutil.rmtree(self._sof, ignore_errors=True) # calib if self._calib.exists(): - logger.debug('> remove {}'.format(self._calib)) + logger.debug(f'> remove {self._calib}') shutil.rmtree(self._calib, ignore_errors=True) # preproc if self._preproc.exists(): - logger.debug('> remove {}'.format(self._preproc)) + logger.debug(f'> remove {self._preproc}') shutil.rmtree(self._preproc, ignore_errors=True) # raw if delete_raw: if self._raw.exists(): - logger.debug('> remove {}'.format(self._raw)) + logger.debug(f'> remove {self._raw}') logger.warning(' ==> delete raw files') shutil.rmtree(self._raw, ignore_errors=True) # products if delete_products: if self._products.exists(): - logger.debug('> remove {}'.format(self._products)) + logger.debug(f'> remove {self._products}') logger.warning(' ==> delete products') shutil.rmtree(self._products, ignore_errors=True) diff --git a/sphere/toolbox.py b/sphere/utils/toolbox.py similarity index 97% rename from sphere/toolbox.py rename to sphere/utils/toolbox.py index 4a49f0b..c0f1419 100644 --- a/sphere/toolbox.py +++ b/sphere/utils/toolbox.py @@ -67,10 +67,10 @@ def recipe_executable(recipes_status, reduction_status, recipe, requirements, lo missing.append(r) if not execute_recipe: - logger.error('{} cannot be executed because the following recipes have not been executed or have result in unrecoverable errors: {}. '.format(recipe, missing)) + logger.error(f'{recipe} cannot be executed because the following recipes have not been executed or have result in unrecoverable errors: {missing}. ') recipes_status[recipe] = sphere.ERROR - logger.debug('> execution requirements check for {}: {}'.format(recipe, execute_recipe)) + logger.debug(f'> execution requirements check for {recipe}: {execute_recipe}') return execute_recipe @@ -274,7 +274,7 @@ def compute_angles(frames_info, true_north, logger=_log): # TRUE_NORTH = -1.75 ± 0.08 # if len(instrument) != 1: - logger.error('Sequence is mixing different instruments: {0}'.format(instrument)) + logger.error(f'Sequence is mixing different instruments: {instrument}') return sphere.ERROR if instrument == 'IFS': instru_offset = -100.48 @@ -283,7 +283,7 @@ def compute_angles(frames_info, true_north, logger=_log): elif instrument == 'SPARTA': instru_offset = 0.0 else: - logger.error('Unkown instrument {0}'.format(instrument)) + logger.error(f'Unkown instrument {instrument}') return sphere.ERROR drot_mode = frames_info['INS4 DROT2 MODE'].unique() @@ -297,7 +297,7 @@ def compute_angles(frames_info, true_north, logger=_log): elif drot_mode == 'STAT': pupoff = -100.48 else: - logger.error('Unknown derotator mode {0}'.format(drot_mode)) + logger.error(f'Unknown derotator mode {drot_mode}') return sphere.ERROR frames_info['PUPIL OFFSET'] = pupoff + instru_offset @@ -329,7 +329,7 @@ def compute_bad_pixel_map(bpm_files, dtype=np.uint8, logger=_log): Combined bad pixel map ''' - logger.debug('> compute master bad pixel map from {} files'.format(len(bpm_files))) + logger.debug(f'> compute master bad pixel map from {len(bpm_files)} files') # get shape shape = fits.getdata(bpm_files[0]).shape @@ -414,7 +414,7 @@ def collapse_frames_info(finfo, fname, true_north, collapse_type, coadd_value=2, NDIT = len(finfo) NDIT_new = NDIT // coadd_value - logger.debug('> type=coadd: extract sub-groups of {} frames'.format(coadd_value)) + logger.debug(f'> type=coadd: extract sub-groups of {coadd_value} frames') index = pd.MultiIndex.from_arrays([np.full(NDIT_new, fname), np.arange(NDIT_new)], names=['FILE', 'IMG']) nfinfo = pd.DataFrame(columns=finfo.columns, index=index, dtype=np.float) @@ -439,7 +439,7 @@ def collapse_frames_info(finfo, fname, true_north, collapse_type, coadd_value=2, if ret == sphere.ERROR: return None else: - logger.error('Unknown collapse type {0}'.format(collapse_type)) + logger.error(f'Unknown collapse type {collapse_type}') return None return nfinfo @@ -543,7 +543,7 @@ def star_centers_from_PSF_img_cube(cube, wave, pixel, exclude_fraction=0.1, high img_centers = np.zeros((nwave, 2)) failed_centers = np.zeros(nwave, dtype=np.bool) for idx, (cwave, img) in enumerate(zip(wave, cube)): - logger.info(' ==> wave {0:2d}/{1:2d} ({2:4.0f} nm)'.format(idx+1, nwave, cwave)) + logger.info(f' ==> wave {idx + 1:2d}/{nwave:2d} ({cwave:4.0f} nm)') # remove any NaN img = np.nan_to_num(cube[idx]) @@ -707,7 +707,7 @@ def star_centers_from_PSF_lss_cube(cube, wave_cube, pixel, high_pass=False, box_ nimg = len(cube) psf_centers = np.full((1024, nimg), np.nan) for fidx, img in enumerate(cube): - logger.info(' ==> field {0:2d}/{1:2d}'.format(fidx+1, nimg)) + logger.info(f' ==> field {fidx + 1:2d}/{nimg:2d}') # remove any NaN img = np.nan_to_num(cube[fidx]) @@ -866,7 +866,7 @@ def star_centers_from_waffle_img_cube(cube_cen, wave, waffle_orientation, center spot_dist = np.zeros((nwave, 6)) img_centers = np.zeros((nwave, 2)) for idx, (wave, img) in enumerate(zip(wave, cube_cen)): - logger.info(' ==> wave {0:2d}/{1:2d} ({2:4.0f} nm)'.format(idx+1, nwave, wave)) + logger.info(f' ==> wave {idx + 1:2d}/{nwave:2d} ({wave:4.0f} nm)') # remove any NaN img = np.nan_to_num(img) @@ -1063,7 +1063,7 @@ def star_centers_from_waffle_lss_cube(cube_cen, cube_sci, wave_cube, center_gues spot_dist = np.full((1024, nimg), np.nan) img_centers = np.full((1024, nimg), np.nan) for fidx, img in enumerate(cube_cen): - logger.info(' ==> field {0:2d}/{1:2d}'.format(fidx+1, nimg)) + logger.info(f' ==> field {fidx + 1:2d}/{nimg:2d}') # remove any NaN img = np.nan_to_num(cube_cen[fidx]) diff --git a/sphere/transmission.py b/sphere/utils/transmission.py similarity index 95% rename from sphere/transmission.py rename to sphere/utils/transmission.py index a941206..de464e1 100644 --- a/sphere/transmission.py +++ b/sphere/utils/transmission.py @@ -125,7 +125,7 @@ def _load(type, name): # find file package_directory = os.path.dirname(os.path.abspath(__file__)) - filter_file = os.path.join(package_directory, 'data', 'SPHERE_CPI_ND.txt') + filter_file = os.path.join(package_directory, '../data/SPHERE_CPI_ND.txt') # load data ndf_all_tr = np.loadtxt(filter_file, unpack=False).T @@ -142,7 +142,7 @@ def _load(type, name): # find file package_directory = os.path.dirname(os.path.abspath(__file__)) - filter_file = os.path.join(package_directory, 'data', 'SPHERE_IRDIS_{0}.txt'.format(name)) + filter_file = os.path.join(package_directory, f'../data/SPHERE_IRDIS_{name}.txt') # load data cfw_tr = np.loadtxt(filter_file, unpack=False).T @@ -156,7 +156,7 @@ def _load(type, name): # find file package_directory = os.path.dirname(os.path.abspath(__file__)) - filter_file = os.path.join(package_directory, 'data', 'SPHERE_IRDIS_ND.txt') + filter_file = os.path.join(package_directory, '../data/SPHERE_IRDIS_ND.txt') # load data ird_ndf_tr = np.loadtxt(filter_file, unpack=False).T @@ -170,7 +170,7 @@ def _load(type, name): # find file package_directory = os.path.dirname(os.path.abspath(__file__)) - filter_file = os.path.join(package_directory, 'data', 'SPHERE_IRDIS_{0}.txt'.format(name)) + filter_file = os.path.join(package_directory, f'data/../SPHERE_IRDIS_{name}.txt') # load data dfw_tr_tmp = np.loadtxt(filter_file, unpack=False).T @@ -184,7 +184,7 @@ def _load(type, name): return transmissions[name] else: - raise ValueError('Unknown type {0}'.format(type)) + raise ValueError(f'Unknown type {type}') def irdis_nd(combination, nd_filter): @@ -255,11 +255,11 @@ def irdis_nd(combination, nd_filter): # check if combination exists setup = combinations.get(combination) if setup is None: - raise ValueError('Unknown filter combination {0}'.format(combination)) + raise ValueError(f'Unknown filter combination {combination}') # check if ND filter exists if nd_filter not in ['OPEN', 'ND_1.0', 'ND_2.0', 'ND_3.5']: - raise ValueError('Unknown neutral density filter {0}'.format(nd_filter)) + raise ValueError(f'Unknown neutral density filter {nd_filter}') # setup ndf = nd_filter @@ -340,7 +340,7 @@ def transmission_nd(nd_filter, wave=None): # check if ND filter exists if nd_filter not in ['OPEN', 'ND_1.0', 'ND_2.0', 'ND_3.5']: - raise ValueError('Unknown neutral density filter {0}'.format(nd_filter)) + raise ValueError(f'Unknown neutral density filter {nd_filter}') ndf = nd_filter @@ -417,7 +417,7 @@ def transmission_filter(combination): # check if combination exists setup = combinations.get(combination) if setup is None: - raise ValueError('Unknown filter combination {0}'.format(combination)) + raise ValueError(f'Unknown filter combination {combination}') # setup cfw = setup['CFW'] @@ -510,7 +510,7 @@ def wavelength_bandwidth_filter(combination): setup = combinations.get(combination) if setup is None: - raise ValueError('Unknown filter combination {0}'.format(combination)) + raise ValueError(f'Unknown filter combination {combination}') wave = setup['Wavelength'] bandwidth = setup['Bandwidth']