# package to perform the false-positive rate estimation import numpy as np import os from astropy.io import fits, ascii import celerite from celerite import terms import runpypetal as rp def prep_fpr_test(rmid=[17], nsample=100): topdir = '/data2/yshen/sdssrm/final_result/' output_dir = './pypetal/' nobj = len(rmid) for i in range(nobj): rmtag = "{:03d}".format(rmid[i]) objdir = topdir + 'rm' + rmtag + '/' mockdir = objdir + 'fpr/' if os.path.exists(mockdir) is False: os.makedirs(mockdir) lcdir = objdir + output_dir + 'processed_lcs/' contfile = lcdir+ 'cont_data.dat' if os.path.exists(contfile): # get the original contiuum light curve xi, yi, erri = np.loadtxt(contfile, delimiter=',', unpack=True, usecols=[0,1,2]) # get drw parameter drw_file = objdir + output_dir + 'cont/drw_rej/cont_chain.dat' sigma_all,tau_all = np.loadtxt(drw_file, delimiter=',', unpack=True, usecols=[0,1],skiprows=1) sigma, tau = np.median(sigma_all), np.median(tau_all) ymean = np.mean(yi) # generate mock light curve log_a = np.log(2*sigma**2) log_c = np.log(1/tau) kernel = terms.RealTerm(log_a=log_a, log_c=log_c) # very rarely, there are duplicate MJDs in the light curve (valid data points) # but celerite.GP does not like that xi_uniq, ind_uniq = np.unique(xi, return_index=True) ndata = len(xi_uniq) try: gp = celerite.GP(kernel, mean=ymean) gp.compute(xi_uniq) except Exception as exc: print(f"failed to generate mock LCs: {exc}") with open(topdir+"gen_mock_lc_failed.txt", "a") as f: f.write(str(rmtag) + "\n") continue yy = gp.sample(size=nsample) for j in range(nsample): yout = yy[j, :] + np.random.normal(size=ndata)*erri[ind_uniq] outdir = mockdir + 'cont' + "{:03d}".format(j) + '/' if os.path.exists(outdir) is False: os.makedirs(outdir) outfile = outdir + 'cont_lc' + "{:03d}".format(j) + '.txt' ascii.write( [xi_uniq, yout, erri[ind_uniq]] , outfile, format='no_header', overwrite=True ) print('finished RMID:',rmtag) def run_fpr_test(rmid=np.arange(849),line=None,nsample=100): topdir = '/data2/yshen/sdssrm/final_result/' nobj = len(rmid) infile = '/data2/yshen/sdssrm/final_result/summary.fits' hdu = fits.open(infile) sum0 = hdu[1].data lagexp = sum0['LAG_OBS_EXP'] hdu.close() lagexp = lagexp[rmid] sum0 = sum0[rmid] if line is None: alllines = ['ha','hb','mg2','c4'] else: if isinstance(line, list): alllines = line else: alllines = [line] for i in range(nobj): rmtag = "{:03d}".format(rmid[i]) objdir = topdir + 'rm' + rmtag + '/' mockdir = objdir + 'fpr/' # check if line_lag_det is set for iline in alllines: if (sum0[iline.upper()+'_LAG_DET'][i] > 0): for j in range(nsample): trial_dir = mockdir + 'cont' + "{:03d}".format(j) + '/' contfile = trial_dir + 'cont_lc' + "{:03d}".format(j) + '.txt' cmd = 'cp ' + contfile + ' ' + objdir + 'cont_lc_tmp.txt' os.system(cmd) rp.runpypetal(rmid=[rmid[i]],input_lag=[lagexp[i]], fold=5.0,alllines=[iline],run_drw_rej=False,run_pyzdcf=False, contfile='cont_lc_tmp.txt',output_dir=trial_dir) cmd = 'rm ' + objdir + 'cont_lc_tmp.txt' os.system(cmd) print('finished RMID:', rmtag)