From 777e9ebb68dfa18eed6cc5de8ecc6bff5e233f81 Mon Sep 17 00:00:00 2001 From: hposborn Date: Tue, 9 Jul 2024 10:09:53 +0200 Subject: [PATCH] Making all functions have consistent names with lowercase/underscores --- MonoTools/fit.py | 120 ++++++++++++++++----------------- MonoTools/lightcurve.py | 16 ++--- MonoTools/search.py | 12 ++-- MonoTools/tools.py | 142 ++++++++++++++++++++-------------------- 4 files changed, 145 insertions(+), 145 deletions(-) diff --git a/MonoTools/fit.py b/MonoTools/fit.py index d713763..62cb6ef 100755 --- a/MonoTools/fit.py +++ b/MonoTools/fit.py @@ -94,9 +94,9 @@ def __init__(self, ID, mission, lc=None, rvs=None, planets=None, overwrite=False 'fit_no_flatten':False, # fit_no_flatten - bool - If no GP, by default we spline-flatten the lightcurve. Use fit_no_flatten to turn this off 'constrain_LD':True, # constrain_LD - bool - Use constrained LDs from model or unconstrained? 'ld_mult':3., # ld_mult - float - How much to multiply theoretical LD param uncertainties - 'useL2':False, # useL2 - bool - Fit for "second light" (i.e. a binary or planet+blend) + 'use_L2':False, # use_L2 - bool - Fit for "second light" (i.e. a binary or planet+blend) 'FeH':0.0, # FeH - float - Stellar FeH - 'LoadFromFile':False, # LoadFromFile - bool - Load previous model? + 'load_from_file':False, # load_from_file - bool - Load previous model? 'cut_distance':3.75, # cut_distance - float - cut out points further than cut_distance*Tdur. 0.0 means no cutting 'mask_distance': 0.666, #Distance, in transit durations, from set transits, to "mask" as in-transit data when e.g. flattening. 'force_match_input':None,# force_match_input - Float/None add potential with this the sigma between the input and the output logror and logdur to force MCMC to match the input duration & maximise logror [e.g. 0.1 = match to 1-sigma=10%] @@ -134,10 +134,10 @@ def __init__(self, ID, mission, lc=None, rvs=None, planets=None, overwrite=False self.mission=mission #Initalising save locations - if self.LoadFromFile and not self.overwrite: + if self.load_from_file and not self.overwrite: #Catching the case where the file doesnt exist: - success = self.LoadModelFromFile(loadfile=savefileloc) - self.LoadFromFile = success + success = self.load_model_from_file(loadfile=savefileloc) + self.load_from_file = success if lc is None or type(lc) is not lightcurve.multilc: lc = lightcurve.multilc(ID,mission) @@ -150,7 +150,7 @@ def __init__(self, ID, mission, lc=None, rvs=None, planets=None, overwrite=False assert ID is not None and mission is not None and lc is not None - if not self.LoadFromFile: + if not self.load_from_file: if rvs is not None: self.add_rvs(rvs) #If we don;t have a past model to load, we load the lightcurve and, if a "planets" dict was passes, initialise those: @@ -162,17 +162,17 @@ def __init__(self, ID, mission, lc=None, rvs=None, planets=None, overwrite=False self.add_planet(self, planets[pl]['orbit_flag'], planets[pl], pl) self.savefileloc=savefileloc - def LoadModelFromFile(self, loadfile=None): + def load_model_from_file(self, loadfile=None): """Load a monoModel object direct from file. Args: - loadfile (str, optional): File to load from, otherwise it takes the default location using `GetSavename`. Defaults to None. + loadfile (str, optional): File to load from, otherwise it takes the default location using `get_savename`. Defaults to None. Returns: bool: Whether the load is successful """ if loadfile is None: - self.GetSavename(how='load') + self.get_savename(how='load') loadfile=self.savenames[0]+'_model.pickle' if self.debug: print(self.savenames) @@ -195,16 +195,16 @@ def LoadModelFromFile(self, loadfile=None): else: return False - def SaveModelToFile(self, savefile=None, limit_size=False): + def save_model_to_file(self, savefile=None, limit_size=False): """Save a monoModel object direct to file. Args: - savefile (str, optional): File location to save to, otherwise it takes the default location using `GetSavename`. Defaults to None. + savefile (str, optional): File location to save to, otherwise it takes the default location using `get_savename`. Defaults to None. limit_size (bool, optional): If we want to limit size this function can delete unuseful hyperparameters before saving. Defaults to False. """ if savefile is None: if not hasattr(self,'savenames'): - self.GetSavename(how='save') + self.get_savename(how='save') savefile=self.savenames[0]+'_model.pickle' if hasattr(self,'trace'): self.trace.to_netcdf(self.savenames[0]+'_trace.nc') @@ -558,7 +558,7 @@ def compute_period_gaps(self,tcen,tdur,depth,max_per=1250,SNR_thresh=4, gap_widt if len(gaps)>0: #Looping from minimum distance from transit to gap, to maximum distance from transit to end-of-lc checkpers=np.arange(dist_from_t0[gaps[0]]-tdur,np.max(dist_from_t0)+tdur,tdur*0.166) - checkpers_ix=self.CheckPeriodsHaveGaps(checkpers,tdur,tcen,**kwargs).astype(int) #Seeing if each period has data coverage + checkpers_ix=self.check_periods_have_gaps(checkpers,tdur,tcen,**kwargs).astype(int) #Seeing if each period has data coverage #Creating an array of tuples which form start->end of specific gaps: starts=checkpers[:-1][np.diff(checkpers_ix)==1.0] @@ -581,7 +581,7 @@ def make_phase(self,time,tcens,per): #print(ix,np.column_stack([(time-tc-per*0.5)%per-per*0.5 for tc in tcens])[ix,:]) return np.hstack([(time-tcens-per*0.5)%per-per*0.5]) - def CheckPeriodsHaveGaps(self,pers,tdur,tcen,tcen_2=None,tcen_3=None,match_trans_thresh=2.5,coverage_thresh=0.15,**kwargs): + def check_periods_have_gaps(self,pers,tdur,tcen,tcen_2=None,tcen_3=None,match_trans_thresh=2.5,coverage_thresh=0.15,**kwargs): """Checking a list of potential periods and seeing if period are observed by counting the number of points in-transit Args: @@ -660,9 +660,9 @@ def compute_period_aliases(self,pl_dic,dur=0.5,**kwargs): check_pers_ints = np.arange(1,np.ceil(pl_dic['period']/10),1.0) if 'tcen_3' in pl_dic: #Also need to check that the implied periods match the third period - check_pers_ix = self.CheckPeriodsHaveGaps(pl_dic['period']/check_pers_ints,pl_dic['tdur'],pl_dic['tcen'],tcen_2=pl_dic['tcen_2'],tcen_3=pl_dic['tcen_3'],**kwargs) + check_pers_ix = self.check_periods_have_gaps(pl_dic['period']/check_pers_ints,pl_dic['tdur'],pl_dic['tcen'],tcen_2=pl_dic['tcen_2'],tcen_3=pl_dic['tcen_3'],**kwargs) else: - check_pers_ix = self.CheckPeriodsHaveGaps(pl_dic['period']/check_pers_ints,pl_dic['tdur'],pl_dic['tcen'],tcen_2=pl_dic['tcen_2'],**kwargs) + check_pers_ix = self.check_periods_have_gaps(pl_dic['period']/check_pers_ints,pl_dic['tdur'],pl_dic['tcen'],tcen_2=pl_dic['tcen_2'],**kwargs) pl_dic['period_int_aliases']=check_pers_ints[check_pers_ix] if len(pl_dic['period_int_aliases'])==0: @@ -916,7 +916,7 @@ def init_starpars(self,Rstar=None,Teff=None,logg=None,FeH=0.0,rhostar=None,Mstar self.Mstar=rhostar[0]*self.Rstar[0]**3 - def GetSavename(self, how='load',overwrite=None): + def get_savename(self, how='load',overwrite=None): """Adds unique savename prefixes to class (self.savenames) with two formats: '[savefileloc]/[T/K]IC[11-number ID]_[20YY-MM-DD]_[n]...' '[savefileloc]/[T/K]IC[11-number ID]_[n]...' @@ -1157,9 +1157,9 @@ def init_model(self, overwrite=False, **kwargs): train_GP (bool, optional): Train the lightcurve GP on out-of-transit data before sampling? Defaults to True constrain_LD (bool, optional): Use constrained LDs from model or unconstrained? Defaults to True ld_mult (float, optional): How much to multiply theoretical LD param uncertainties. Defaults to 3. - useL2 (bool, optional): Fit for "second light" (i.e. a binary or planet+blend). Defaults to False + use_L2 (bool, optional): Fit for "second light" (i.e. a binary or planet+blend). Defaults to False FeH (float, optional): Stellar FeH. Defaults to 0.0 - LoadFromFile (bool, optional): Load previous model? Defaults to False + load_from_file (bool, optional): Load previous model? Defaults to False cut_distance (float, optional): cut out points further than cut_distance*Tdur. 0.0 means no cutting. Defaults to 3.75 mask_distance (float, optional): Distance, in transit durations, from set transits, to "mask" as in-transit data when e.g. flattening. Defaults to 0.666 @@ -1415,7 +1415,7 @@ def init_pymc(self,ld_mult=1.5): # The 2nd light (not third light as companion light is not modelled) # This quantity is in delta-mag unq_missions = np.unique([cad.split('_')[0] for cad in self.cads_short]) - if self.useL2: + if self.use_L2: deltamag_contam = {mis:pm.Uniform("deltamag_contam_"+mis, lower=-10.0, upper=10.0) for mis in unq_missions} mult = {mis:pm.Deterministic("mult_"+mis,(1+pm.math.power(2.511,-1*deltamag_contam[mis]))) for mis in unq_missions} #Factor to multiply normalised lightcurve by else: @@ -1551,12 +1551,12 @@ def init_pymc(self,ld_mult=1.5): logrors[pl]=pm.TruncatedNormal("logror_"+pl, mu=np.tile(np.log(self.planets[pl]['ror']),self.n_margs[pl]), sigma=np.tile(1.0,self.n_margs[pl]), - lower=np.log(0.001), upper=np.log(0.25+int(self.useL2)), + lower=np.log(0.001), upper=np.log(0.25+int(self.use_L2)), initval=np.tile(np.log(self.planets[pl]['ror']),self.n_margs[pl]), shape=self.n_margs[pl]) else: logrors[pl]=pm.TruncatedNormal("logror_"+pl,mu=np.log(self.planets[pl]['ror']), sigma=0.75, - lower=np.log(0.001), upper=np.log(0.25+int(self.useL2)), + lower=np.log(0.001), upper=np.log(0.25+int(self.use_L2)), initval=np.log(self.planets[pl]['ror'])) rors[pl]=pm.Deterministic("ror_"+pl,pm.math.exp(logrors[pl])) rpls[pl]=pm.Deterministic("rpl_"+pl,109.2*rors[pl]*Rs) @@ -1737,7 +1737,7 @@ def init_pymc(self,ld_mult=1.5): #Single mission if np.any([c[:2]=='ts' for c in self.cads_short]) and self.constrain_LD: - ld_dists=self.getLDs(n_samples=1200,mission='tess') + ld_dists=self.get_lds(n_samples=1200,mission='tess') u_star_tess = pm.TruncatedNormal("u_star_tess", mu=np.nanmedian(ld_dists,axis=0), sigma=np.clip(ld_mult*np.nanstd(ld_dists,axis=0),0.1,1.0), shape=2, @@ -1745,7 +1745,7 @@ def init_pymc(self,ld_mult=1.5): elif np.any([c[:2]=='ts' for c in self.cads_short]) and not self.constrain_LD: u_star_tess = xo.distributions.QuadLimbDark("u_star_tess", initval=np.array([0.3, 0.2])) if np.any([(c[:2]=='k1')|(c[:2]=='k2') for c in self.cads_short]) and self.constrain_LD: - ld_dists=self.getLDs(n_samples=3000,mission='kepler') + ld_dists=self.get_lds(n_samples=3000,mission='kepler') if self.debug: print("LDs",ld_dists) u_star_kep = pm.TruncatedNormal("u_star_kep", mu=np.nanmedian(ld_dists,axis=0), sigma=np.clip(ld_mult*np.nanstd(ld_dists,axis=0),0.1,1.0), @@ -1753,14 +1753,14 @@ def init_pymc(self,ld_mult=1.5): elif np.any([(c[:2]=='k1')|(c[:2]=='k2') for c in self.cads_short]) and not self.constrain_LD: u_star_kep = xo.distributions.QuadLimbDark("u_star_kep", initval=np.array([0.3, 0.2])) if np.any([c[:2]=='co' for c in self.cads_short]) and self.constrain_LD: - ld_dists=self.getLDs(n_samples=1200,mission='corot') + ld_dists=self.get_lds(n_samples=1200,mission='corot') u_star_corot = pm.TruncatedNormal("u_star_corot", mu=np.nanmedian(ld_dists,axis=0), sigma=np.clip(ld_mult*np.nanstd(ld_dists,axis=0),0.1,1.0), shape=2, lower=0.0, upper=1.0, initval=np.clip(np.nanmedian(ld_dists,axis=0),0,1)) elif np.any([c[:2]=='co' for c in self.cads_short]) and not self.constrain_LD: u_star_corot = xo.distributions.QuadLimbDark("u_star_corot", initval=np.array([0.3, 0.2])) if np.any([c[:2]=='ch' for c in self.cads_short]) and self.constrain_LD: - ld_dists=self.getLDs(n_samples=1200,mission='cheops') + ld_dists=self.get_lds(n_samples=1200,mission='cheops') u_star_cheops = pm.TruncatedNormal("u_star_cheops", mu=np.nanmedian(ld_dists,axis=0), sigma=np.clip(ld_mult*np.nanstd(ld_dists,axis=0),0.1,1.0), shape=2, @@ -2633,7 +2633,7 @@ def create_orbit(pl, Rs, rho_S, pers, t0s, bs, n_marg=1, eccs=None, omegas=None) self.model = model self.init_soln = map_soln - def SampleModel(self, n_draws=500, n_burn_in=None, overwrite=False, continue_sampling=False, n_chains=4, **kwargs): + def sample_model(self, n_draws=500, n_burn_in=None, overwrite=False, continue_sampling=False, n_chains=4, **kwargs): """Run PyMC3 sampler Args: @@ -2648,7 +2648,7 @@ def SampleModel(self, n_draws=500, n_burn_in=None, overwrite=False, continue_sam # self.init_lc() if not overwrite: - self.LoadPickle() + self.load_pickle() if hasattr(self,'trace') and self.debug: print("LOADED MCMC") @@ -2672,8 +2672,8 @@ def SampleModel(self, n_draws=500, n_burn_in=None, overwrite=False, continue_sam self.trace = pm.sample(tune=n_burn_in, draws=n_draws, start=self.init_soln, chains=n_chains, compute_convergence_checks=False)#, **kwargs) self.trace=az.extract(self.trace) #Saving both the class and a pandas dataframe of output data. - self.SaveModelToFile() - _=self.MakeTable(save=True) + self.save_model_to_file() + _=self.make_table(save=True) elif not (hasattr(self,'trace') or hasattr(self,'trace_df')): print("Trace or trace df exists...") @@ -2681,11 +2681,11 @@ def SampleModel(self, n_draws=500, n_burn_in=None, overwrite=False, continue_sam def Table(self): """AI is creating summary for Table """ - if LoadFromFile and not self.overwrite and os.path.exists(self.savenames[0]+'_results.txt'): + if load_from_file and not self.overwrite and os.path.exists(self.savenames[0]+'_results.txt'): with open(self.savenames[0]+'_results.txt', 'r', encoding='UTF-8') as file: restable = file.read() else: - restable=self.ToLatexTable(trace, ID, mission=mission, varnames=None,order='columns', + restable=self.to_latex_table(trace, ID, mission=mission, varnames=None,order='columns', savename=self.savenames[0]+'_results.txt', overwrite=False, savefileloc=None, tracemask=tracemask) ''' @@ -2758,7 +2758,7 @@ def init_gp_to_plot(self, n_samp=7, max_gp_len=12000, interp=True, newgp=False, elif newgp: for key in self.lc_regions['limits']: #Only creating out-of-transit GP for the binned (e.g. 30min) data - cutBools = tools.cutLc(self.lc.time[self.lc_regions[key]['ix']],max_gp_len, + cutBools = tools.cut_lc(self.lc.time[self.lc_regions[key]['ix']],max_gp_len, transit_mask=~self.lc.in_trans['all'][self.lc_regions[key]['ix']]) limit_mask_bool[n]={} @@ -2816,7 +2816,7 @@ def init_gp_to_plot(self, n_samp=7, max_gp_len=12000, interp=True, newgp=False, #Doing multiple samples and making percentiles: for key in self.lc_regions: #Need to break up the lightcurve even further to avoid GP burning memory: - cutBools = tools.cutLc(self.lc.time[self.lc_regions[key]['ix']],max_gp_len, + cutBools = tools.cut_lc(self.lc.time[self.lc_regions[key]['ix']],max_gp_len, transit_mask=~self.lc.in_trans['all'][self.lc_regions[key]['ix']]) i_kernel = pymc_terms.SHOTerm(S0=self.meds['phot_S0'], w0=self.meds['phot_w0'], Q=1/np.sqrt(2)) i_gp = celerite2.pymc.GaussianProcess(i_kernel, mean=self.meds['phot_mean']) @@ -3157,7 +3157,7 @@ def init_plot(self, interactive=False, gap_thresh=10, plottype='lc',pointcol='k' plt.rcParams["axes.linewidth"] = 0.75 if not hasattr(self,'savenames'): - self.GetSavename(how='save') + self.get_savename(how='save') #Making sure lc is binned to 30mins if plottype=='lc': if plot_flat: @@ -3216,7 +3216,7 @@ def init_plot(self, interactive=False, gap_thresh=10, plottype='lc',pointcol='k' # #elif plottype=='rv': - def PlotRVs(self, interactive=False, plot_alias='best', nbest=4, n_samp=300, overwrite=False, return_fig=False, plot_resids=False, + def plot_RVs(self, interactive=False, plot_alias='best', nbest=4, n_samp=300, overwrite=False, return_fig=False, plot_resids=False, plot_loc=None, palette=None, pointcol='k', plottype='png',raster=False, nmargtoplot=0, save=True,**kwargs): """Varied plotting function for RVs of MonoTransit model @@ -3678,7 +3678,7 @@ def PlotRVs(self, interactive=False, plot_alias='best', nbest=4, n_samp=300, ove ''' - def Plot(self, interactive=False, n_samp=None, overwrite=False, interp=True, newgp=False, return_fig=False, max_gp_len=20000, n_intrans_bins=15, + def plot(self, interactive=False, n_samp=None, overwrite=False, interp=True, newgp=False, return_fig=False, max_gp_len=20000, n_intrans_bins=15, save=True, plot_loc=None, palette=None, plot_flat=False, pointcol="k", plottype='png',plot_rows=None, ylim=None, xlim=None, **kwargs): """Varied photometric plotting function for MonoTransit model @@ -4442,7 +4442,7 @@ def Plot(self, interactive=False, n_samp=None, overwrite=False, interp=True, new if return_fig: return fig - def PlotPeriods(self, plot_loc=None, ylog=True, xlog=True, nbins=25, + def plot_periods(self, plot_loc=None, ylog=True, xlog=True, nbins=25, pmax=None, pmin=None, ymin=None,ymax=None,extra_factor=1): """Plot Marginalised probabilities of the possible periods @@ -4587,7 +4587,7 @@ def PlotPeriods(self, plot_loc=None, ylog=True, xlog=True, nbins=25, else: plt.savefig(plot_loc) - def PlotCorner(self,corner_vars=None,use_marg=True,truths=None): + def plot_corner(self,corner_vars=None,use_marg=True,truths=None): """Create Corner plot for MCMC samples Args: @@ -4692,7 +4692,7 @@ def PlotCorner(self,corner_vars=None,use_marg=True,truths=None): fig.savefig(self.savenames[0]+'_corner.pdf')#,dpi=400,rasterized=True) - def MakeTable(self,short=True,save=True,cols=['all']): + def make_table(self,short=True,save=True,cols=['all']): """Make table from MCMC Samples Args: @@ -4735,7 +4735,7 @@ def MakeTable(self,short=True,save=True,cols=['all']): df.to_csv(self.savenames[0]+'_mcmc_output.csv') return df - def CheopsPlanetPropertiesTable(self,planet=None): + def cheops_planet_properties_table(self,planet=None): """Create output compatible with the Cheops "PlanetPropertiesTable". Not yet implemented Args: @@ -4743,7 +4743,7 @@ def CheopsPlanetPropertiesTable(self,planet=None): """ "target,gaia_id,planet_id,T0,e_T0,P,e_P,ecosw,e_ecosw,esinw,e_esinw,D,e_D,W,e_W,K,e_K" - def PlotTable(self,plot_loc=None,return_table=False): + def plot_table(self,plot_loc=None,return_table=False): """Plot table as PDF (i.e. to assemble PDF report) Args: @@ -4751,10 +4751,10 @@ def PlotTable(self,plot_loc=None,return_table=False): return_table (bool, optional): Return DF figure? Defaults to False. Returns: - pandas DataFrame: Dataframe of parameters and specific parameters (output from `MakeTable`) + pandas DataFrame: Dataframe of parameters and specific parameters (output from `make_table`) """ - df = self.MakeTable(short=True) + df = self.make_table(short=True) # Making table a plot for PDF: fig=plt.figure(figsize=(11.69,8.27)) @@ -4783,7 +4783,7 @@ def PlotTable(self,plot_loc=None,return_table=False): - def LoadPickle(self, loadname=None): + def load_pickle(self, loadname=None): """Load data from saved pickle Args: @@ -4808,7 +4808,7 @@ def LoadPickle(self, loadname=None): else: self.trace=loaded if not hasattr(self, 'savenames') or self.savenames is None: - self.GetSavename(how='load') + self.get_savename(how='load') #print(self.savenames, self.savenames is None) #[0]+'_mcmc.pickle',os.path.exists(self.savenames[0]+'_mcmc.pickle')) if os.path.exists(self.savenames[0]+'_mcmc.pickle'): @@ -4829,7 +4829,7 @@ def LoadPickle(self, loadname=None): else: self.trace=loaded - def PredictFutureTransits(self, time_start=None, time_end=None, time_dur=180, include_multis=True, + def predict_future_transits(self, time_start=None, time_end=None, time_dur=180, include_multis=True, save=True, compute_solsys_dist=True, check_TESS=True): """Return a dataframe of potential transits of all Duo candidates between time_start & time_end dates. @@ -4846,7 +4846,7 @@ def PredictFutureTransits(self, time_start=None, time_end=None, time_dur=180, in Example: # e.g. after running model.RunMcmc(): - df = model.PredictFutureTransits(Time('2021-06-01T00:00:00.000',format='isot'),Time('2021-10-01T00:00:00.000',format='isot')) + df = model.predict_future_transits(Time('2021-06-01T00:00:00.000',format='isot'),Time('2021-10-01T00:00:00.000',format='isot')) """ from astropy.time import Time @@ -4882,7 +4882,7 @@ def PredictFutureTransits(self, time_start=None, time_end=None, time_dur=180, in if check_TESS: - sect_start_ends=self.CheckTESS() + sect_start_ends=self.check_TESS() all_trans_fin=pd.DataFrame() loopplanets = self.duos+self.trios+self.multis if include_multis else self.duos+self.trios @@ -4982,7 +4982,7 @@ def PredictFutureTransits(self, time_start=None, time_end=None, time_dur=180, in all_trans_fin.to_csv(self.savenames[0]+"_list_all_trans.csv") return all_trans_fin - def CheckTESS(self,**kwargs): + def check_TESS(self,**kwargs): """Returns time frames in the future when TESS is observing """ import importlib @@ -4996,14 +4996,14 @@ def CheckTESS(self,**kwargs): return np.column_stack((midtimes-0.5*sectdiffs[future_sect_ix]+0.2,midtimes+0.5*sectdiffs[future_sect_ix]-0.2)) #Now we have sector start & end times, let's check which future transit will be TESS observed: - def CheopsRMS(self, Gmag, tdur): + def cheops_RMS(self, Gmag, tdur): #RMS polynomial fits for 3 hour durations: rms_brightfit = np.array([ 2.49847572, -6.41232409]) rms_faintfit = np.array([ 30.2599025 , -256.41381477]) rms = np.max([np.polyval(rms_faintfit,Gmag),np.polyval(rms_brightfit,Gmag)]) return rms/np.sqrt(tdur/0.125) - def MakeCheopsOR(self, DR2ID=None, pl=None, min_eff=45, oot_min_orbits=1.0, timing_sigma=3, t_start=None, t_end=None, Texp=None, + def make_cheops_OR(self, DR2ID=None, pl=None, min_eff=45, oot_min_orbits=1.0, timing_sigma=3, t_start=None, t_end=None, Texp=None, max_orbits=14, min_pretrans_orbits=0.5, min_intrans_orbits=None, orbits_flex=1.4, observe_sigma=2, observe_threshold=None, max_ORs=None,prio_1_threshold=0.25, prio_3_threshold=0.0, targetnamestring=None, min_orbits=4.0, outfilesuffix='_output_ORs.csv',avoid_TESS=True,pre_post_TESS="pre"): @@ -5035,7 +5035,7 @@ def MakeCheopsOR(self, DR2ID=None, pl=None, min_eff=45, oot_min_orbits=1.0, timi pre_post_TESS (str, optional): If we are avoiding TESS, should we create the pre-TESS ORs, or the post-TESS ORs? Defaults to "pre" Returns: - df = model.PredictFutureTransits: panda DF to save as csv in location where one can run make_xml_files. e.g. `make_xml_files output.csv --auto-expose -f` + df = model.predict_future_transits: panda DF to save as csv in location where one can run make_xml_files. e.g. `make_xml_files output.csv --auto-expose -f` """ #radec, SpTy, Vmag, e_Vmag, @@ -5096,11 +5096,11 @@ def MakeCheopsOR(self, DR2ID=None, pl=None, min_eff=45, oot_min_orbits=1.0, timi t_end = next_vernal-(old_radec.ra.deg/360)*365.25+60 #We always need an array of all possible transits/aliases also saved to file to check: - all_trans = self.PredictFutureTransits(t_start-self.lc.jd_base,t_end-self.lc.jd_base, check_TESS=avoid_TESS) + all_trans = self.predict_future_transits(t_start-self.lc.jd_base,t_end-self.lc.jd_base, check_TESS=avoid_TESS) all_trans.to_csv(self.savenames[0]+outfilesuffix.replace("_ORs","").replace(".csv","_list_all_trans.csv")) if not hasattr(self,'savenames'): - self.GetSavename(how='save') + self.get_savename(how='save') if avoid_TESS and np.any(all_trans['in_TESS']) and pre_post_TESS=="pre": t_end=2457000+np.min(all_trans.loc[all_trans['in_TESS'],"transit_mid_med"].values)-0.5 @@ -5140,8 +5140,8 @@ def MakeCheopsOR(self, DR2ID=None, pl=None, min_eff=45, oot_min_orbits=1.0, timi observe_threshold=np.sort(allprobs)[::-1][max_ORs] depth=1e6*np.nanmedian(self.trace['ror_'+ipl])**2 - print("SNR for whole transit is: ",depth/self.CheopsRMS(gaiainfo['phot_g_mean_mag'], np.nanmedian(self.trace['tdur_'+ipl]))) - print("SNR for single orbit in/egress is: ",depth/self.CheopsRMS(gaiainfo['phot_g_mean_mag'], 0.5*98/1440)) + print("SNR for whole transit is: ",depth/self.cheops_RMS(gaiainfo['phot_g_mean_mag'], np.nanmedian(self.trace['tdur_'+ipl]))) + print("SNR for single orbit in/egress is: ",depth/self.cheops_RMS(gaiainfo['phot_g_mean_mag'], 0.5*98/1440)) prio_1_prob_threshold = np.ceil(np.sum(allprobs>observe_threshold)*prio_1_threshold) prio_3_prob_threshold = np.ceil(np.sum(allprobs>observe_threshold)*(1-prio_3_threshold)) @@ -5255,7 +5255,7 @@ def MakeCheopsOR(self, DR2ID=None, pl=None, min_eff=45, oot_min_orbits=1.0, timi print("Run the following command in a terminal to generate ORs:\n\""+command+"\"") return out_tab - def getLDs(self,n_samples,mission='tess',how='2'): + def get_lds(self,n_samples,mission='tess',how='2'): """Gets theoretical quadratic Limb Darkening parameters for any specified mission. This is done by first interpolating the theoretical samples (e.g. Claret) onto Teff and logg axes. FeH is typically fixed to the closest value. Then, using stellar samples from normally-distributed Teff and logg, a distribution of values for each LD parameter are retrieved. @@ -5370,7 +5370,7 @@ def vals_to_latex(self, vals): except: return " - " - def ToLatexTable(self,varnames='all',order='columns'): + def to_latex_table(self,varnames='all',order='columns'): """Creating a Latex table for specfic parameters Args: @@ -5383,7 +5383,7 @@ def ToLatexTable(self,varnames='all',order='columns'): #Plotting corner of the parameters to see correlations print("Making Latex Table") if not hasattr(self,'savenames'): - self.GetSavename(how='save') + self.get_savename(how='save') if self.tracemask is None: self.tracemask=np.tile(True,len(self.trace['Rs'])) if varnames is None or varnames == 'all': diff --git a/MonoTools/lightcurve.py b/MonoTools/lightcurve.py index 65b5763..1fcc13a 100755 --- a/MonoTools/lightcurve.py +++ b/MonoTools/lightcurve.py @@ -259,7 +259,7 @@ def make_fluxmask(self,flux_arr_name='flux',cut_all_anom_lim=5.0,end_of_orbit=Tr #For corot cadences, we'll cut regions from the SAA for corotcad in [cad for cad in self.cadence_list if 'co_' in cad]: ix=np.in1d(self.cadence,corotcad) - self.flux_mask[ix] = tools.CutHighRegions(self.flux[ix],self.flux_mask[ix],std_thresh=4.5,n_pts=25,n_loops=2) + self.flux_mask[ix] = tools.cut_high_regions(self.flux[ix],self.flux_mask[ix],std_thresh=4.5,n_pts=25,n_loops=2) if np.sum(self.flux_mask)>0: # & (lc[prefix+'flux'+suffix]>0.0) @@ -294,12 +294,12 @@ def make_fluxmask(self,flux_arr_name='flux',cut_all_anom_lim=5.0,end_of_orbit=Tr stack_shitfed_flux=np.column_stack([getattr(self,flux_arr_name)[self.flux_mask][n:(-20+n)] for n in range(20)]) self.flux_mask[self.flux_mask][10:-10]=abs(getattr(self,flux_arr_name)[self.flux_mask][10:-10] - np.nanmedian(stack_shitfed_flux,axis=1))12hrs from other data @@ -2108,15 +2108,15 @@ def interactive_plot(self, plot_rows=None, timeseries=['flux'], ylim=None, xlim= rowlens+=[len(rows[i])] if len(rowlens)>2: if type(include_table)==str and include_table=='tic': - tab = tools.MakeBokehTable(self.all_ids['tess']['data'],dftype='tic',width=160,height=int(plot_height/self.init_plot_info['plot_rows']),**kwargs) + tab = tools.make_bokeh_table(self.all_ids['tess']['data'],dftype='tic',width=160,height=int(plot_height/self.init_plot_info['plot_rows']),**kwargs) elif type(include_table) in [pd.Series,pd.DataFrame]: - tab = tools.MakeBokehTable(include_table,dftype=None,width=160,height=int(plot_height/self.init_plot_info['plot_rows']),**kwargs) + tab = tools.make_bokeh_table(include_table,dftype=None,width=160,height=int(plot_height/self.init_plot_info['plot_rows']),**kwargs) rows[np.argmin(rowlens)]=[tab]+rows[np.argmin(rowlens)] else: if type(include_table)==str and include_table=='tic': - tab = tools.MakeBokehTable(self.all_ids['tess']['data'],dftype='tic',width=plot_width, height=int(plot_height/self.init_plot_info['plot_rows']), **kwargs) + tab = tools.make_bokeh_table(self.all_ids['tess']['data'],dftype='tic',width=plot_width, height=int(plot_height/self.init_plot_info['plot_rows']), **kwargs) elif type(include_table) in [pd.Series,pd.DataFrame]: - tab = tools.MakeBokehTable(include_table, width=plot_width, dftype=None, height=int(plot_height/self.init_plot_info['plot_rows']), **kwargs) + tab = tools.make_bokeh_table(include_table, width=plot_width, dftype=None, height=int(plot_height/self.init_plot_info['plot_rows']), **kwargs) rows=[[tab]]+rows p = layout(rows, sizing_mode='stretch_both') save(p) diff --git a/MonoTools/search.py b/MonoTools/search.py index 4c10fd6..f98140a 100755 --- a/MonoTools/search.py +++ b/MonoTools/search.py @@ -494,7 +494,7 @@ def init_mono(self,tcen,tdur,depth,name=None,otherinfo=None, **kwargs): self.detns[name].update({col:otherinfo[col] for col in otherinfo.index if col not in self.detns[name]}) self.monos+=[name] - self.QuickMonoFit(name, ndurs=4.5, **kwargs) + self.quick_mono_fit(name, ndurs=4.5, **kwargs) def init_multi(self,tcen,tdur,depth,period,name=None,otherinfo=None,**kwargs): """Initalise multi-transiting candidate @@ -515,7 +515,7 @@ def init_multi(self,tcen,tdur,depth,period,name=None,otherinfo=None,**kwargs): self.multis+=[name] #Fitting: - self.QuickMonoFit(name,ndurs=4.5,fluxindex='flux_flat',fit_poly=False,**kwargs) + self.quick_mono_fit(name,ndurs=4.5,fluxindex='flux_flat',fit_poly=False,**kwargs) def init_duo(self,tcen,tcen2,tdur,depth,period,name=None,otherinfo=None,**kwargs): """Initalise multi-transiting candidate @@ -537,7 +537,7 @@ def init_duo(self,tcen,tcen2,tdur,depth,period,name=None,otherinfo=None,**kwargs self.duos+=[name] #Fitting: - self.QuickMonoFit(name,ndurs=4.5,fluxindex='flux_flat',fit_poly=False,**kwargs) + self.quick_mono_fit(name,ndurs=4.5,fluxindex='flux_flat',fit_poly=False,**kwargs) def remove_detn(self,name): """Remove candidate detection given the name. @@ -978,7 +978,7 @@ def search_multi_planets(self, fluxname='bin_flux_flat', binsize=15/1440.0, n_se ires=self.run_BLS(modx,mody,modyerr,max_period=p_max,min_period=1.1) self.multi_results+=[ires[0]] self.multi_power_spectra[planet_name]=ires[1] - #anommask *= tools.CutAnomDiff(mody) + #anommask *= tools.cut_anom_diff(mody) #print(n_pl,"norm_mask:",np.sum(self.lc.mask),"anoms:",np.sum(anommask),"pl mask",np.sum(plmask),"total len",len(anommask)) #print(results[-1]) print(self.multi_results[-1]) @@ -1169,7 +1169,7 @@ def plot_multi_search(self,plot_loc=None,plot_extent=0.8): fig.savefig(plot_loc, dpi=400) - def QuickMonoFit(self, planet, useL2=False, fit_poly=True, tdur_prior='loguniform', sample_model=True, + def quick_mono_fit(self, planet, useL2=False, fit_poly=True, tdur_prior='loguniform', sample_model=True, polyorder=3, ndurs=3.3, fluxindex='flux', mask=None, **kwargs): """Performs simple planet fit to monotransit dip given the detection data. @@ -1249,7 +1249,7 @@ def QuickMonoFit(self, planet, useL2=False, fit_poly=True, tdur_prior='logunifor mean = pm.Normal("mean", mu=0.0, sd=3*np.nanstd(y)) flux_trend = mean - u_star = tools.getLDs(self.Teff['val'])[0] + u_star = tools.get_lds(self.Teff['val'])[0] #xo.distributions.QuadLimbDark("u_star") rhostar=self.Mstar['val']/self.Rstar['val']**3 diff --git a/MonoTools/tools.py b/MonoTools/tools.py index 2debf7e..aad5e8e 100755 --- a/MonoTools/tools.py +++ b/MonoTools/tools.py @@ -52,7 +52,7 @@ #goto='/Users/hosborn' if 'Users' in os.path.dirname(os.path.realpath(__file__)).split('/') else '/home/hosborn' -def openFits(f,fname,mission,cut_all_anom_lim=4.0,use_ppt=True,force_raw_flux=False,end_of_orbit=False,mask=None,**kwargs): +def open_fits(f,fname,mission,cut_all_anom_lim=4.0,use_ppt=True,force_raw_flux=False,end_of_orbit=False,mask=None,**kwargs): """opens and processes all lightcurve files (especially, but not only, fits files). Args: @@ -152,7 +152,7 @@ def openFits(f,fname,mission,cut_all_anom_lim=4.0,use_ppt=True,force_raw_flux=Fa lc['flux_err'][(lc['flux_err']>0.0)*(~np.isnan(lc['flux_err']))]=mederr lc['flux_err']/=np.nanmedian(lc['flux']) lc['flux']/=np.nanmedian(lc['flux']) - lc['mask']=CutHighRegions(lc['flux'],np.isfinite(lc['flux']),std_thresh=4.5,n_pts=25,n_loops=2) + lc['mask']=cut_high_regions(lc['flux'],np.isfinite(lc['flux']),std_thresh=4.5,n_pts=25,n_loops=2) elif type(f).__name__=='TessLightCurve': import lightkurve lc={'time':f.time,'flux':f.flux,'flux_err':f.flux_err,'quality':f.quality, @@ -225,7 +225,7 @@ def magerr2flux(magerrs,mags): lc['detrended_flux']=lc.pop('flux') lc['flux']=lc['raw_flux'][:] - lc['mask']=maskLc(lc,fname,cut_all_anom_lim=cut_all_anom_lim,use_ppt=use_ppt,end_of_orbit=end_of_orbit,input_mask=mask) + lc['mask']=mask_lc(lc,fname,cut_all_anom_lim=cut_all_anom_lim,use_ppt=use_ppt,end_of_orbit=end_of_orbit,input_mask=mask) #Including the cadence in the lightcurve as ["t2","t30","k1","k30"] mission letter + cadence lc['cadence']=np.tile(mission[0]+str(np.round(np.nanmedian(np.diff(lc['time']))*1440).astype(int)),len(lc['time'])) @@ -263,7 +263,7 @@ def find_time_regions(time,split_gap_size=1.5,min_region_dur=0.25,**kwargs): else: return [(np.nanmin(time),np.nanmax(time))] -def maskLc(lc,fhead,cut_all_anom_lim=5.0,use_ppt=False,end_of_orbit=True,mask=None, +def mask_lc(lc,fhead,cut_all_anom_lim=5.0,use_ppt=False,end_of_orbit=True,mask=None, use_binned=False,use_flat=False,mask_islands=True,input_mask=None,**kwargs): # Mask bad data (nans, infs and negatives) @@ -295,12 +295,12 @@ def maskLc(lc,fhead,cut_all_anom_lim=5.0,use_ppt=False,end_of_orbit=True,mask=No stack_shitfed_flux=np.column_stack([lc[prefix+'flux'+suffix][mask][n:(-20+n)] for n in range(20)]) mask[mask][10:-10]=abs(lc[prefix+'flux'+suffix][mask][10:-10] - np.nanmedian(stack_shitfed_flux,axis=1))3 if camp=='et' or camp=='E' or camp=='e': @@ -436,11 +436,11 @@ def openVand(epic,camp,v=1,use_ppt=True,**kwargs): print("Vanderburg LC at ",url1) if requests.get(url1, timeout=600).status_code==200: with fits.open(url1,show_progress=False) as hdus: - lcvand+=[openFits(hdus,url1,mission='k2',use_ppt=use_ppt,**kwargs)] + lcvand+=[open_fits(hdus,url1,mission='k2',use_ppt=use_ppt,**kwargs)] url2='http://archive.stsci.edu/missions/hlsp/k2sff/c'+str(int(camp))+'2/'+str(epic)[:4]+'00000/'+str(epic)[4:]+'/hlsp_k2sff_k2_lightcurve_'+str(epic)+'-c'+str(int(camp))+'2_kepler_v1_llc.fits' if requests.get(url2, timeout=600).status_code==200: with fits.open(url2,show_progress=False) as hdus: - lcvand+=[openFits(hdus,url2,mission='k2',use_ppt=use_ppt)] + lcvand+=[open_fits(hdus,url2,mission='k2',use_ppt=use_ppt)] elif camp=='e': print("Engineering data") #https://www.cfa.harvard.edu/~avanderb/k2/ep60023342alldiagnostics.csv @@ -450,25 +450,25 @@ def openVand(epic,camp,v=1,use_ppt=True,**kwargs): lc={'time':df['BJD - 2454833'].values, 'flux':df[' Corrected Flux'].values, 'flux_err':np.tile(np.median(abs(np.diff(df[' Corrected Flux'].values))),df.shape[0])} - lcvand+=[openFits(lc,url,mission='k2',use_ppt=use_ppt,**kwargs)] + lcvand+=[open_fits(lc,url,mission='k2',use_ppt=use_ppt,**kwargs)] else: urlfitsname='http://archive.stsci.edu/missions/hlsp/k2sff/c'+str(camp)+'/'+str(epic)[:4]+'00000/'+str(epic)[4:]+'/hlsp_k2sff_k2_lightcurve_'+str(epic)+'-c'+str(camp)+'_kepler_v'+str(int(v))+'_llc.fits'.replace(' ','') if requests.get(urlfitsname, timeout=600).status_code==200: with fits.open(urlfitsname,show_progress=False) as hdus: - lcvand+=[openFits(hdus,urlfitsname,mission='k2',use_ppt=use_ppt,**kwargs)] + lcvand+=[open_fits(hdus,urlfitsname,mission='k2',use_ppt=use_ppt,**kwargs)] print("Extracted vanderburg LC from ",urlfitsname) else: print("Cannot find vanderburg LC at ",urlfitsname) #Cutting Nones: lcvand=[lc for lc in lcvand if lc is not None] if lcvand is not None and len(lcvand)>0: - lc=lcStack(lcvand) + lc=lc_stack(lcvand) lc['src']='K2_vand' return lc else: return None -def openEverest(epic,camp,pers=None,durs=None,t0s=None,use_ppt=True,**kwargs): +def open_Everest(epic,camp,pers=None,durs=None,t0s=None,use_ppt=True,**kwargs): import everest if camp in [10,11,10.0,11.0,'10','11','10.0','11.0']: camp=[int(str(int(float(camp)))+'1'),int(str(int(float(camp)))+'2')] @@ -508,28 +508,28 @@ def openEverest(epic,camp,pers=None,durs=None,t0s=None,use_ppt=True,**kwargs): print(c,"not possible to load") continue if hdr is not None: - lc=openFits(lcev,hdr,mission='k2',use_ppt=use_ppt) + lc=open_fits(lcev,hdr,mission='k2',use_ppt=use_ppt) #elif int(camp)>=14: # lcloc='https://archive.stsci.edu/hlsps/everest/v2/c'+str(int(camp))+'/'+str(epic)[:4]+'00000/'+str(epic)[4:]+'/hlsp_everest_k2_llc_'+str(epic)+'-c'+str(int(camp))+'_kepler_v2.0_lc.fits' - # lcev=openFits(fits.open(lcloc),lcloc) - #lc=lcStack(lcs) + # lcev=open_fits(fits.open(lcloc),lcloc) + #lc=lc_stack(lcs) lc['src']='K2_ev' return lc else: return None -def getK2lc(epic,camp,saveloc=None,pers=None,durs=None,t0s=None,use_ppt=True): +def get_k2_lc(epic,camp,saveloc=None,pers=None,durs=None,t0s=None,use_ppt=True): ''' Gets (or tries to get) all LCs from K2 sources. Order is Everest > Vanderburg > PDC. ''' from urllib.request import urlopen import everest lcs={} - lcs['vand']={camp:openVand(int(epic), camp, use_ppt=use_ppt)} + lcs['vand']={camp:open_Vand(int(epic), camp, use_ppt=use_ppt)} if camp!='E': - lcs['ev']={camp:openEverest(int(epic), int(float(camp)), pers=pers, durs=durs, t0s=t0s, use_ppt=use_ppt)} - lcs['pdc']={camp:openPDC(int(epic),int(float(camp)),use_ppt=use_ppt)} + lcs['ev']={camp:open_Everest(int(epic), int(float(camp)), pers=pers, durs=durs, t0s=t0s, use_ppt=use_ppt)} + lcs['pdc']={camp:open_PDC(int(epic),int(float(camp)),use_ppt=use_ppt)} lcs={ilc:lcs[ilc] for ilc in lcs if lcs[ilc][camp] is not None} if len(lcs.keys())>1: lens = {l:len(lcs[l][camp]['flux'][lcs[l][camp]['mask']]) for l in lcs} @@ -537,7 +537,7 @@ def getK2lc(epic,camp,saveloc=None,pers=None,durs=None,t0s=None,use_ppt=True): #Making a metric from std and length - std/len_norm**3. i.e. a lc 75% as long as the longest is downweighted by 0.42 (e.g. std increased by 2.4 ordered_keys = [k for k, v in sorted(stds.items(), key=lambda item: item[1])] list(np.array(list(lcs.keys()))[np.argsort(stds)]) - lc=lcStackDicts(lcs,ordered=ordered_keys) + lc=lc_stackDicts(lcs,ordered=ordered_keys) return lc elif len(lcs.keys())==1: return lcs[list(lcs.keys())[0]][camp] @@ -579,10 +579,10 @@ def K2_lc(epic,coor=None,pers=None,durs=None,t0s=None, use_ppt=True): print("K2 campaigns to search:",np.unique(np.array(str(df['campaign']).split(',')))) for camp in np.unique(np.array(str(df['campaign']).split(','))): if camp!='': - lcs+=[getK2lc(epic,camp,pers=pers,durs=durs,t0s=t0s, use_ppt=use_ppt)] + lcs+=[get_k2_lc(epic,camp,pers=pers,durs=durs,t0s=t0s, use_ppt=use_ppt)] lcs=[lc for lc in lcs if lc is not None] if len(lcs)>1: - lcs=lcStack(lcs) + lcs=lc_stack(lcs) return lcs,df elif len(lcs)==1: return lcs[0],df @@ -590,7 +590,7 @@ def K2_lc(epic,coor=None,pers=None,durs=None,t0s=None, use_ppt=True): return None,df -def getKeplerLC(kic,cadence='long',use_ppt=True,**kwargs): +def get_kepler_lc(kic,cadence='long',use_ppt=True,**kwargs): ''' This module uses the KIC of a planet candidate to download lightcurves @@ -625,7 +625,7 @@ def getKeplerLC(kic,cadence='long',use_ppt=True,**kwargs): resp = h.request(lcloc, 'HEAD') if int(resp[0]['status']) < 400: with fits.open(lcloc,show_progress=False) as hdu: - ilc=openFits(hdu,lcloc,mission='kepler',use_ppt=use_ppt,**kwargs) + ilc=open_fits(hdu,lcloc,mission='kepler',use_ppt=use_ppt,**kwargs) if ilc is not None: lcs+=[ilc] hdr=hdu[1].header @@ -636,17 +636,17 @@ def getKeplerLC(kic,cadence='long',use_ppt=True,**kwargs): resp = h.request(lcloc, 'HEAD') if int(resp[0]['status']) < 400: with fits.open(lcloc,show_progress=False) as hdu: - ilc=openFits(hdu,lcloc,mission='kepler',use_ppt=use_ppt,**kwargs) + ilc=open_fits(hdu,lcloc,mission='kepler',use_ppt=use_ppt,**kwargs) if ilc is not None: lcs+=[ilc] hdr=hdu[1].header if len(lcs)>0: - lc=lcStack(lcs) + lc=lc_stack(lcs) return lc,hdr else: return None,None -def lcStackDicts(lcdicts, ordered=None): +def lc_stackDicts(lcdicts, ordered=None): #Stacks multiple lcs together while keeping info from secondary data sources. #lcdicts must be in form {'src1':{'camp1':{'time':[],'flux:[], ...},'sect2':{'time':...}},'src2':{'camp1':...}}} @@ -702,10 +702,10 @@ def lcStackDicts(lcdicts, ordered=None): outlc_by_sect+=[sec_lc] ''' - lc=lcStack(outlc_by_sect) + lc=lc_stack(outlc_by_sect) return lc -def lcStack(lcs): +def lc_stack(lcs): if len(lcs)==1: return lcs[0] else: @@ -744,7 +744,7 @@ def lcStack(lcs): outlc['flux_unit']=lcs[nlc]['flux_unit'] return outlc -def CutAnomDiff(flux,thresh=4.2): +def cut_anom_diff(flux,thresh=4.2): #Uses differences between points to establish anomalies. #Only removes single points with differences to both neighbouring points greater than threshold above median difference (ie ~rms) #Fast: 0.05s for 1 million-point array. @@ -769,7 +769,7 @@ def observed(tic,radec=None,maxsect=84): #print(out_dic) return out_dic -def getCorotLC(corid,use_ppt=True,**kwargs): +def get_corot_lc(corid,use_ppt=True,**kwargs): #These are pre-computed CoRoT LCs I have lying around. There is no easy API as far as I can tell. initstring="https://exoplanetarchive.ipac.caltech.edu/data/ETSS/corot_exo/FITSfiles/" corotlclocs={102356770:["LRa03/EN2_STAR_MON_0102356770_20091003T223149_20100301T055642.fits"], @@ -812,10 +812,10 @@ def getCorotLC(corid,use_ppt=True,**kwargs): lcs=[] for loc in corotlclocs[int(corid)]: with fits.open(initstring+loc,show_progress=False,timeout=120) as hdus: - lci=openFits(hdus,initstring+loc,mission='corot',use_ppt=use_ppt,**kwargs) + lci=open_fits(hdus,initstring+loc,mission='corot',use_ppt=use_ppt,**kwargs) lci['src']='corot' lcs+=[lci] - lc=lcStack(lcs) + lc=lc_stack(lcs) lc['jd_base']=2451545 return lc else: @@ -1003,7 +1003,7 @@ def TESS_lc(tic, sectors='all',use_ppt=True, coords=None, use_qlp=None, use_elea resp = h.request(fitsloc, 'HEAD') if int(resp[0]['status']) < 400: with fits.open(fitsloc,show_progress=False) as hdus: - spoclcs[key]=openFits(hdus,fitsloc,mission='tess',use_ppt=use_ppt,**kwargs) + spoclcs[key]=open_fits(hdus,fitsloc,mission='tess',use_ppt=use_ppt,**kwargs) lchdrs+=[hdus[0].header] else: print("LC type",types[ntype],"is not accessible") @@ -1017,7 +1017,7 @@ def TESS_lc(tic, sectors='all',use_ppt=True, coords=None, use_qlp=None, use_elea resp = h.request(fitsloc, 'HEAD') if int(resp[0]['status']) < 400: with fits.open(fitsloc,show_progress=False) as hdus: - spoclcs[key]=openFits(hdus,fitsloc,mission='tess') + spoclcs[key]=open_fits(hdus,fitsloc,mission='tess') lchdrs+=[hdus[0].header] if use_qlp is None or use_qlp is True: @@ -1029,8 +1029,8 @@ def TESS_lc(tic, sectors='all',use_ppt=True, coords=None, use_qlp=None, use_elea f1=h5py.File(qlpfiles[0]) f2=h5py.File(qlpfiles[1]) - qlplcs[key]=lcStack([openFits(f1,sect_to_orbit[key][0],mission='tess',use_ppt=use_ppt,**kwargs), - openFits(f2,sect_to_orbit[key][1],mission='tess',use_ppt=use_ppt,**kwargs)]) + qlplcs[key]=lc_stack([open_fits(f1,sect_to_orbit[key][0],mission='tess',use_ppt=use_ppt,**kwargs), + open_fits(f2,sect_to_orbit[key][1],mission='tess',use_ppt=use_ppt,**kwargs)]) lchdrs+=[{'source':'qlp'}] else: fitsloc='https://mast.stsci.edu/api/v0.1/Download/file?uri=mast:HLSP/qlp/s'+str(int(key)).zfill(4) + \ @@ -1040,7 +1040,7 @@ def TESS_lc(tic, sectors='all',use_ppt=True, coords=None, use_qlp=None, use_elea resp = h.request(fitsloc, 'HEAD') if int(resp[0]['status']) < 400: with fits.open(fitsloc,show_progress=False) as hdus: - qlplcs[key]=openFits(hdus,fitsloc,mission='tess',use_ppt=use_ppt,**kwargs) + qlplcs[key]=open_fits(hdus,fitsloc,mission='tess',use_ppt=use_ppt,**kwargs) lchdrs+=[hdus[0].header] elif use_eleanor is None or use_eleanor is True: from eleanor import eleanor @@ -1061,13 +1061,13 @@ def TESS_lc(tic, sectors='all',use_ppt=True, coords=None, use_qlp=None, use_elea elen_hdr={'ID':star.tic,'GaiaID':star.gaia,'Tmag':star.tess_mag, 'RA':star.coords[0],'dec':star.coords[1],'mission':'TESS','campaign':key,'source':'eleanor', 'ap_masks':elen_obj.all_apertures,'ap_image':np.nanmedian(elen_obj.tpf[50:80],axis=0)} - elenorlcs[key]=openFits(elen_obj,elen_hdr,mission='tess',use_ppt=use_ppt,**kwargs) + elenorlcs[key]=open_fits(elen_obj,elen_hdr,mission='tess',use_ppt=use_ppt,**kwargs) lchdrs+=[elen_hdr] except Exception as e: print(e, tic,"not observed by TESS in sector",key) if len(spoclcs)+len(qlplcs)+len(elenorlcs)>0: - lc=lcStackDicts({'spoc':spoclcs,'qlp':qlplcs,'elen':elenorlcs},['spoc','qlp','elen']) + lc=lc_stackDicts({'spoc':spoclcs,'qlp':qlplcs,'elen':elenorlcs},['spoc','qlp','elen']) return lc,lchdrs[0] #elif len(lcs)==1: # #print(lcs,lchdrs) @@ -1075,7 +1075,7 @@ def TESS_lc(tic, sectors='all',use_ppt=True, coords=None, use_qlp=None, use_elea else: return None,None -def openLightCurve(ID,mission,coor=None,use_ppt=True,other_data=True, +def open_light_curve(ID,mission,coor=None,use_ppt=True,other_data=True, jd_base=2457000,save=True,**kwargs): #from ..stellar import tess_stars2px_mod if coor is None: @@ -1153,7 +1153,7 @@ def openLightCurve(ID,mission,coor=None,use_ppt=True,other_data=True, if 'time' in key: lcs['k2'][key]-=(jd_base-2454833) if IDs['kepler'] is not None: - lcs['kepler'],hdrs['kepler'] = getKeplerLC(IDs['kepler'],use_ppt=use_ppt) + lcs['kepler'],hdrs['kepler'] = get_kepler_lc(IDs['kepler'],use_ppt=use_ppt) if lcs['kepler'] is not None: lcs['kepler']['time']-=(jd_base-2454833) if mission.lower() == 'corot': @@ -1163,7 +1163,7 @@ def openLightCurve(ID,mission,coor=None,use_ppt=True,other_data=True, hdrs['corot'] = None #print(IDs,lcs) if len(lcs.keys())>=1: - lc=lcStack([lcs[lc] for lc in lcs if lcs[lc] is not None]) + lc=lc_stack([lcs[lc] for lc in lcs if lcs[lc] is not None]) elif not other_data: lc=lcs[mission.lower()] else: @@ -1187,7 +1187,7 @@ def openLightCurve(ID,mission,coor=None,use_ppt=True,other_data=True, return lc,hdrs[mission.lower()] -def LoadLc(lcid,mission='tess',file_loc=None): +def load_lc(lcid,mission='tess',file_loc=None): # Quick tool to load pickled lightcurve dict. # lcid = ID # mission = 'tess'. mission string (TESS, K2, Kepler, etc) @@ -1196,7 +1196,7 @@ def LoadLc(lcid,mission='tess',file_loc=None): file_loc=MonoData_savepath+'/'+ID_string if file_loc is not None else file_loc return pickle.load(open(MonoData_savepath+'/'+ID_string+'/'+ID_string+'_lc.pickle','rb')) -def cutLc(lctimes,max_len=10000,return_bool=True,transit_mask=None): +def cut_lc(lctimes,max_len=10000,return_bool=True,transit_mask=None): # Naturally cut the lightcurve time into chunks smaller than max_len (e.g. for GP computations) assert(np.isnan(lctimes).sum()==0) if return_bool: @@ -1265,14 +1265,14 @@ def weighted_avg_and_std(values, errs, masknans=True, axis=None): else: return [np.nan, np.nan] -def lcBin(lc,binsize=1/48,split_gap_size=0.8,use_flat=True,use_masked=True, use_raw=False,extramask=None,modify_lc=True): +def lc_bin(lc,binsize=1/48,split_gap_size=0.8,use_flat=True,use_masked=True, use_raw=False,extramask=None,modify_lc=True): #Binning lightcurve to e.g. 30-min cadence for planet search # Can optionally use the flatted lightcurve binlc={} #Using flattened lightcurve as well as normal one: if use_flat and 'flux_flat' not in lc: - lc=lcFlatten(lc) + lc=lc_flatten(lc) if use_flat: flux_dic=['flux_flat','flux'] binlc['flux_flat']=[] @@ -1466,7 +1466,7 @@ def create_transit_mask(t,tcens,tdurs,maskdist=1.1): in_trans+=abs(t-tcens[n])<0.5*maskdist*tdurs[n] return ~in_trans -def dopolyfit(win,mask=None,stepcent=0.0,d=3,ni=10,sigclip=3): +def do_polyfit(win,mask=None,stepcent=0.0,d=3,ni=10,sigclip=3): mask=np.tile(True,len(win)) if mask is None else mask maskedwin=win[mask] @@ -1497,7 +1497,7 @@ def dopolyfit(win,mask=None,stepcent=0.0,d=3,ni=10,sigclip=3): best_base=new_base[:] return best_base -def formwindow(dat,cent,size,boxsize,gapthresh=1.0): +def form_window(dat,cent,size,boxsize,gapthresh=1.0): win = (dat[:,0]>cent-size/2.)&(dat[:,0]cent-boxsize/2.)&(dat[:,0]0 and np.sum(win&uselc[:,3].astype(bool))>0: #Forming the polynomial fit from the window around the box: if debug: print("window size:",np.sum(win),"masked points:",np.sum(uselc[win,3])) if debug: print("window lc:",uselc[win,:3]) - baseline = dopolyfit(uselc[win,:3],mask=uselc[win,3].astype(bool), + baseline = do_polyfit(uselc[win,:3],mask=uselc[win,3].astype(bool), stepcent=stepcent,d=polydegree,ni=niter,sigclip=sigmaclip) lc[prefix+'flux_flat'][newbox] = lc[prefix+'flux'][newbox] - np.polyval(baseline,lc[prefix+'time'][newbox]-stepcent)*lc['flux_unit'] #Here we have return lc -def RunFromScratch(ID, mission, tcen, tdur, ra=None, dec=None, +def run_from_scratch(ID, mission, tcen, tdur, ra=None, dec=None, mono_SNRthresh=6.0, other_planet_SNRthresh=6.0, PL_ror_thresh=0.2): ''' @@ -1619,8 +1619,8 @@ def RunFromScratch(ID, mission, tcen, tdur, ra=None, dec=None, savedf=True) #Gets Lightcurve - lc,hdr=openLightCurve(ID,mission,use_ppt=False) - lc=lcFlatten(lc,winsize=9*tdur,stepsize=0.1*tdur) + lc,hdr=open_light_curve(ID,mission,use_ppt=False) + lc=lc_flatten(lc,winsize=9*tdur,stepsize=0.1*tdur) #Runs Quick Model fit monoparams, interpmodel = search.QuickMonoFit(lc,tc,dur,Rs=Rstar[0],Ms=rhostar[0]*Rstar[0]**3) @@ -1672,7 +1672,7 @@ def RunFromScratch(ID, mission, tcen, tdur, ra=None, dec=None, elif planet_dic_1['01']['flag']=='periodic': print(" ") -def GetSavename(ID, mission, how='load', suffix='mcmc.pickle', overwrite=False, savefileloc=None): +def get_savename(ID, mission, how='load', suffix='mcmc.pickle', overwrite=False, savefileloc=None): ''' # Get unique savename (defaults to MCMC suffic) with format: # [savefileloc]/[T/K]IC[11-number ID]_[20YY-MM-DD]_[n]_mcmc.pickle @@ -1722,10 +1722,10 @@ def GetSavename(ID, mission, how='load', suffix='mcmc.pickle', overwrite=False, return [os.path.join(savefileloc,id_dic[mission]+str(ID).zfill(11)+"_"+date+"_"+str(int(nsim))+"_"+suffix), os.path.join(savefileloc,id_dic[mission]+str(ID).zfill(11)+'_'+suffix)] -def LoadPickle(ID, mission,loadname=None,savefileloc=None): +def load_pickle(ID, mission,loadname=None,savefileloc=None): #Pickle file style: folder/TIC[11-number ID]_[20YY-MM-DD]_[n]_mcmc.pickle if loadname is None: - loadname=GetSavename(ID, mission, how='load', suffix='mcmc.pickle', savefileloc=savefileloc)[0] + loadname=get_savename(ID, mission, how='load', suffix='mcmc.pickle', savefileloc=savefileloc)[0] if os.path.exists(loadname): n_bytes = 2**31 max_bytes = 2**31 - 1 @@ -1741,9 +1741,9 @@ def LoadPickle(ID, mission,loadname=None,savefileloc=None): else: return None -def SavePickle(trace,ID,mission,savename=None,overwrite=False,savefileloc=None): +def save_pickle(trace,ID,mission,savename=None,overwrite=False,savefileloc=None): if savename is None: - savename=GetSavename(ID, mission, how='save', suffix='mcmc.pickle', overwrite=overwrite, savefileloc=savefileloc)[0] + savename=get_savename(ID, mission, how='save', suffix='mcmc.pickle', overwrite=overwrite, savefileloc=savefileloc)[0] n_bytes = 2**31 max_bytes = 2**31 - 1 @@ -1754,7 +1754,7 @@ def SavePickle(trace,ID,mission,savename=None,overwrite=False,savefileloc=None): for idx in range(0, len(bytes_out), max_bytes): f_out.write(bytes_out[idx:idx+max_bytes]) -def getLDs(Ts,logg=4.43812,FeH=0.0,mission="TESS"): +def get_lds(Ts,logg=4.43812,FeH=0.0,mission="TESS"): from scipy.interpolate import CloughTocher2DInterpolator as ct2d if mission[0]=="T" or mission[0]=="t": @@ -1812,7 +1812,7 @@ def PlotCorner(trace, ID, mission='TESS', varnames=["b", "ecc", "period", "r_pl" print("varnames = ",varnames) if savename is None: - savename=GetSavename(ID, mission, how='save', suffix='_corner.png', + savename=get_savename(ID, mission, how='save', suffix='_corner.png', overwrite=overwrite, savefileloc=savefileloc)[0] if tracemask is None: @@ -1855,12 +1855,12 @@ def vals_to_latex(vals): except: return " - " -def ToLatexTable(trace, ID, mission='TESS', varnames='all',order='columns', +def to_latex_table(trace, ID, mission='TESS', varnames='all',order='columns', savename=None, overwrite=False, savefileloc=None, tracemask=None): #Plotting corner of the parameters to see correlations print("MakingLatexTable") if savename is None: - savename=GetSavename(ID, mission, how='save', suffix='_table.txt',overwrite=False, savefileloc=savefileloc)[0] + savename=get_savename(ID, mission, how='save', suffix='_table.txt',overwrite=False, savefileloc=savefileloc)[0] if tracemask is None: tracemask=np.tile(True,len(trace['Rs'])) if varnames is None or varnames == 'all': @@ -2136,7 +2136,7 @@ def err_string_parse(s): return True, s[:-1*len(estr)] return False, None -def MakeBokehTable(df, dftype='toi', cols2use=None, cols2avoid=None, errtype=' err', width=300, height=350): +def make_bokeh_table(df, dftype='toi', cols2use=None, cols2avoid=None, errtype=' err', width=300, height=350): """Form Bokeh table from an input pandas dataframe Args: @@ -2221,7 +2221,7 @@ def MakeBokehTable(df, dftype='toi', cols2use=None, cols2avoid=None, errtype=' e data_table = DataTable(source=ColumnDataSource(newdf), columns=columns, width=width, height=height) return data_table -def GapCull(t0,t,dat,std_thresh=10,boolean=None,time_jump_thresh=0.4): +def gap_cull(t0,t,dat,std_thresh=10,boolean=None,time_jump_thresh=0.4): #Removes data before/after gaps and jumps in t & y #If there's a big gap or a big jump, we'll remove the far side of that if boolean is None: