From 047d505e523e10ad9fd3631616173c16b160406c Mon Sep 17 00:00:00 2001 From: wbalmer Date: Fri, 17 Mar 2023 15:54:15 -0400 Subject: [PATCH] add back planetkiller --- spaceKLIP/companion.py | 262 ++++++++++++++++++++++++++++++++++++++ tests/run_planetkiller.py | 4 +- 2 files changed, 264 insertions(+), 2 deletions(-) diff --git a/spaceKLIP/companion.py b/spaceKLIP/companion.py index c2141e44..0e91c73c 100644 --- a/spaceKLIP/companion.py +++ b/spaceKLIP/companion.py @@ -1757,3 +1757,265 @@ def planet_killer(meta, small_planet=None, recenter_offsetpsf=False, print(' lnZ/Z0 = %.2f' % (res[key][temp]['evidence_ratio'])) return + + +def planet_killer(meta, small_planet=None, recenter_offsetpsf=False, + use_fm_psf=True, fourier=True): + ''' + Is your planet buried in the PSF of another planet, or co-incident source? + Did you already fit a PSF to the brighter object using extract_companions? + Use this function! + + Function to inject negative copies of fit companions and perform + the PSF fitting on residual images. + + Initial steps are very similar to extract companions, inject_fit + + ''' + + if hasattr(meta, "ref_obs") and (meta.ref_obs is not None) and isinstance(meta.ref_obs, (list,np.ndarray)): + sci_ref_dir = 'SCI+REF' + elif hasattr(meta, 'ref_obs_override') and meta.ref_obs_override == True: + sci_ref_dir = 'SCI+REF' + else: + sci_ref_dir = 'SCI' + + # If necessary, extract the metadata of the observations we're injecting into + if (not meta.done_subtraction): + if meta.comp_usefile == 'bgsub': + subdir = 'IMGPROCESS/BGSUB' + elif meta.use_cleaned: + subdir = f'IMGPROCESS/{sci_ref_dir}_CLEAN' + else: + subdir = f'IMGPROCESS/sci_ref_dir' + basefiles = io.get_working_files(meta, meta.done_imgprocess, subdir=subdir, search=meta.sub_ext) + + meta = utils.prepare_meta(meta, basefiles) + meta.done_subtraction = True # set the subtraction flag for the subsequent pipeline stages + + # Loop through all directories of subtracted images. + meta.truenumbasis = {} + for counter, rdir in enumerate(meta.rundirs): + # Check if run directory actually exists + if not os.path.exists(rdir): + raise ValueError('Could not find provided run directory "{}"'.format(rdir)) + + # Get the mode from the saved meta file + if (meta.verbose == True): + dirparts = rdir.split('/')[-2].split('_') # -2 because of trailing '/' + print('--> Mode = {}, annuli = {}, subsections = {}, scenario {} of {}'.format(dirparts[3], dirparts[4], dirparts[5], counter+1, len(meta.rundirs))) + + # Get the mode from the saved meta file. + metasave = io.read_metajson(rdir+'SUBTRACTED/MetaSave.json') + mode = metasave['used_mode'] + + # Define the input and output directories for each set of pyKLIP + # parameters. + idir = rdir+'SUBTRACTED/' + odir = rdir+'COMPANION_KL{}/'.format(meta.KL) + if (not os.path.exists(odir)): + os.makedirs(odir) + + # Create an output directory for the forward-modeled datasets. + odir_temp = odir+'FITS/' + if (not os.path.exists(odir_temp)): + os.makedirs(odir_temp) + + # Loop through all concatenations. + res = {} + for i, key in enumerate(meta.obs.keys()): + + ww_sci = np.where(meta.obs[key]['TYP'] == 'SCI')[0] + filepaths = np.array(meta.obs[key]['FITSFILE'][ww_sci], dtype=str).tolist() + ww_cal = np.where(meta.obs[key]['TYP'] == 'CAL')[0] + psflib_filepaths = np.array(meta.obs[key]['FITSFILE'][ww_cal], dtype=str).tolist() + hdul = pyfits.open(idir+key+'-KLmodes-all.fits') + filt = meta.filter[key] + pxsc = meta.pixscale[key] # mas + pxar = meta.pixar_sr[key] # sr + wave = meta.wave[filt] # m + weff = meta.weff[filt] # m + fwhm = wave/meta.diam*utils.rad2mas/pxsc # pix + + if filt in ['F250M']: + #Don't use a fourier shift for these. + fourier=False + + all_numbasis = [] + # Loop over up to 100 different KL mode inputs + for i in range(100): + try: + # Get value from header + all_numbasis.append(hdul[0].header['KLMODE{}'.format(i)]) + except: + # No more KL modes + continue + + meta.truenumbasis[key] = [num for num in all_numbasis if (num <= meta.maxnumbasis[key])] + + if meta.KL == 'max': + if '_ADI_' in rdir: + KL = meta.adimaxnumbasis[key] + elif '_RDI_' in rdir: + KL = meta.rdimaxnumbasis[key] + elif '_ADI+RDI_' in rdir: + KL = meta.maxnumbasis[key] + else: + KL = meta.KL + + # Get the index of the KL component we are interested in + try: + KLindex = all_numbasis.index(KL) + except: + raise ValueError('KL={} not found. Calculated options are: {}, and maximum possible for this data is {}'.format(KL, all_numbasis, meta.maxnumbasis[key])) + + hdul.close() + + # Create a new pyKLIP dataset for forward modeling the companion + # PSFs. + if meta.repeatcentering_companion == False: + centering_alg = 'savefile' + else: + centering_alg = meta.repeatcentering_companion + + if not hasattr(meta, 'blur_images'): + meta.blur_images = False + + load_file0_center = meta.load_file0_center if hasattr(meta,'load_file0_center') else False + + if (meta.use_psfmask == True): + try: + mask = pyfits.getdata(meta.psfmask[key], 'SCI') #NIRCam + except: + try: + mask = pyfits.getdata(meta.psfmask[key], 0) #MIRI + except: + raise FileNotFoundError('Unable to read psfmask file {}'.format(meta.psfmask[key])) + else: + mask = None + + raw_dataset = JWST.JWSTData(filepaths=filepaths, psflib_filepaths=psflib_filepaths, centering=meta.centering_alg, + scishiftfile=meta.ancildir+'shifts/scishifts', refshiftfile=meta.ancildir+'shifts/refshifts', + fiducial_point_override=meta.fiducial_point_override, blur=meta.blur_images, + load_file0_center=load_file0_center,save_center_file=meta.ancildir+'shifts/file0_centers', + spectral_type=meta.spt, mask=mask) + + # Make the offset PSF + if pxsc > 100: + inst = 'MIRI' + immask = 'FQPM{}'.format(filt[1:5]) + else: + inst = 'NIRCAM' + immask = key.split('_')[-1] + if hasattr(meta, "psf_spec_file"): + if meta.psf_spec_file != False: + SED = io.read_spec_file(meta.psf_spec_file) + else: + SED = None + else: + SED = None + offsetpsf_func = psf.JWST_PSF(inst, filt, immask, fov_pix=65, + sp=SED, use_coeff=True, + date=meta.psfdate) + + field_dep_corr = None + psf_nocoromask = offsetpsf_func.psf_off + + # Want to subtract the known companions using the fit from extract_companions() + all_seps, all_flux = [], [] + # find the json file with the companions + import glob + try: + compjson = glob.glob(odir+key+'-comp_save_fakes.json')[0] + except: + compjson = glob.glob(odir+key+'*.json')[0] + with open(compjson, 'r') as f: + compdata = json.load(f)[key] + for comp in list(compdata.keys()): + # Get companion information + ra = compdata[comp]['ra'] + de = compdata[comp]['de'] + try: + guess_flux = meta.contrast_guess + except: + guess_flux = 2.452e-4 + flux = compdata[comp]['f'] #/guess_flux #Flux relative to star + print('flux is ',str(flux)) + sep = np.sqrt(ra**2+de**2) / pxsc + pa = np.rad2deg(np.arctan2(ra, de)) + + offsetpsf = offsetpsf_func.gen_psf([-ra/1e3,de/1e3], do_shift=False, quick=False) + if meta.blur_images != False: + offsetpsf = gaussian_filter(offsetpsf, meta.blur_images) + # As the throughput of the coronagraphic mask is not incorporated into the flux + # calibration of the pipeline, the integrated flux from the actual detector pixels + # will be underestimated. Therefore, we will scale the generated PSF so that it + # is still affected by the coronagraphic throughput (i.e. fainter). Compute + # scale factor by comparing PSF simulation with and without coronagraphic mask. + scale_factor = np.sum(offsetpsf) / np.sum(psf_nocoromask) + + # Normalise PSF to a total integrated flux of 1 + offsetpsf /= np.sum(offsetpsf) + + # Normalise to the flux of the star in this bandpass + offsetpsf *= meta.F0[filt]/10.**(meta.mstar[filt]/2.5)/1e6/pxar # MJy/sr + + # Apply scaling to apply the throughput of the coronagraphic mask + offsetpsf *= scale_factor + offsetpsf *= flux #Negative flux as we are subtracting! + + # Save some things + all_seps.append(sep) + all_flux.append(flux) + + # Inject the planet as a negative source! + stamps = np.array([-offsetpsf for k in range(raw_dataset.input.shape[0])]) + # fakes.inject_planet(frames=raw_dataset.input, centers=raw_dataset.centers, inputflux=stamps, astr_hdrs=raw_dataset.wcs, radius=sep, pa=pa, field_dependent_correction=None) + + # Get some information from the original meta file in the run directory + metasave = io.read_metajson(rdir+'SUBTRACTED/MetaSave.json') + mode = metasave['used_mode'] + annuli = metasave['used_annuli'] + subsections = metasave['used_subsections'] + + # Reduce the companion subtracted images + if small_planet is not None: + sep = small_planet['sep'] + pa = small_planet['pa'] + flux = small_planet['flux'] + print('using input source position') + else: + sep = 3 + pa = 120 + flux = 0 + ctr = 0 + # make a copy of the dataset above + dataset = deepcopy(raw_dataset) + + # Now run KLIP to get the subtracted image + # Sometimes faster to just use 1 thread. + odir_temp = odir+'FMMF/' + if (not os.path.exists(odir_temp)): + os.makedirs(odir_temp) + + ra = sep*pxsc*np.sin(np.deg2rad(pa)) # mas + de = sep*pxsc*np.cos(np.deg2rad(pa)) # mas + + # Need to make a new offsetpsf but can use old function + offsetpsf = offsetpsf_func.gen_psf([-ra/1e3,de/1e3], do_shift=False, quick=False) + if meta.blur_images != False: + offsetpsf = gaussian_filter(offsetpsf, meta.blur_images) + scale_factor = np.sum(offsetpsf) / np.sum(psf_nocoromask) + + # Normalise PSF to a total integrated flux of 1 + offsetpsf /= np.sum(offsetpsf) + + # Normalise to the flux of the star in this bandpass + offsetpsf *= meta.F0[filt]/10.**(meta.mstar[filt]/2.5)/1e6/pxar # MJy/sr + + # Apply scaling to apply the throughput of the coronagraphic mask + offsetpsf *= scale_factor + + + + return diff --git a/tests/run_planetkiller.py b/tests/run_planetkiller.py index de00d77d..43f25dc8 100644 --- a/tests/run_planetkiller.py +++ b/tests/run_planetkiller.py @@ -23,8 +23,8 @@ pipe = JWST(config_file) pipe.run_all(skip_ramp=True, skip_imgproc=True, - skip_sub=True, + skip_sub=False, skip_rawcon=True, skip_calcon=True, - skip_comps=True) + skip_comps=False) sklip.companion.planet_killer(pipe.meta, small_planet=small_planet)