def fits_table_all(input_path: str, output_path: str = "", science_only: bool = True): """ Produces and writes to disk a table of .fits files in the given path, with the vital statistics of each. Intended only for use with raw ESO data. :param input_path: :param output_path: :param science_only: If True, we are writing a list for a folder that also contains calibration files, which we want to ignore. :return: """ # If there's no trailing slash in the paths, add one. u.check_trailing_slash(output_path) if output_path == "": output_path = input_path + "fits_table.csv" elif output_path[-4:] != ".csv": if output_path[-1] == "/": output_path = output_path + "fits_table.csv" else: output_path = output_path + ".csv" print('Writing table of fits files to: \n', output_path) files = os.listdir(input_path) files.sort() files_fits = list(filter(lambda x: x[-5:] == '.fits', files)) # Create list of dictionaries to be used as the output data output = [] ids = string.ascii_lowercase if len(ids) < len(files_fits): ids = ids + string.ascii_uppercase if len(ids) < len(files_fits): ids = ids + string.digits for i, f in enumerate(files_fits): data = {} file = fits.open(input_path + f) header = file[0].header for key in header: data[key] = header[key] if 'ESO TEL AIRM END' in data and 'ESO TEL AIRM START' in data: data['AIRMASS'] = (float(data['ESO TEL AIRM END']) + float(data['ESO TEL AIRM START'])) / 2 if science_only and 'SCIENCE' in data['ESO DPR CATG']: output.append(data) elif not science_only: output.append(data) file.close() output.sort(key=lambda a: a['ARCFILE']) out_file = pd.DataFrame(output) out_file.to_csv(output_path) return out_file
def check_for_config(): u.mkdir_check(config_dir) p = load_params(config_file) if p is None: config_text = pkg_resources.resource_string( __name__, os.path.join("..", f"param", "config_template.yaml")).decode() print(type(config_text)) config_text = config_text.replace( "proj_dir: <some_directory>/craft-optical-followup/", f"proj_dir: {os.getcwd()}/") with open(config_file, "w") as cfg: cfg.write(config_text) print(f"No config file was detected at {config_file}.") print(f"A fresh config file has been created at '{config_file}'") print( "In this file, please set 'top_data_dir' to a valid path in which to store all " "data products of this package (This may require a large amount of space.)." ) print("You may also like to specify an alternate param_dir") input("\nOnce you have edited this file, press any key to proceed.") p = load_params(config_file) else: for param in p: p[param] = u.check_trailing_slash(p[param]) save_params(config_file, p) yaml_to_json(config_file) return p
def main(path): print(f"\nExecuting Python script plot_fwhms.py, with path {path}\n") path = check_trailing_slash(path) outputs = p.tabulate_output_values(path=path, output=path + "output_values.csv") plt.plot(range(len(outputs)), outputs["_fwhm_arcsec"]) plt.ylabel("FWHM") plt.xlabel("Frame") plt.title(f"FWHM in {path}") plt.savefig(path + "fwhm.png")
def check_for_config(): p = load_params('param/config.yaml') if p is None: print("No config.yaml file found.") else: for param in p: p[param] = u.check_trailing_slash(p[param]) save_params('param/config.yaml', p) yaml_to_json('param/config.yaml') return p
def tabulate_output_values(path: str, output: str = None): path = u.check_trailing_slash(path=path) outputs = [] for file in filter(lambda filename: 'output_values.yaml' in filename, os.listdir(path)): output_values = load_params(file=path + file) output_values["filename"] = file outputs.append(output_values) outputs = tbl.Table(outputs) if output is not None: output = u.sanitise_file_ext(filename=output, ext='.csv') outputs.write(output) outputs.sort(keys="filename") return outputs
def main(data_dir, data_title, origin, destination, all_synths): print("\nExecuting Python script pipeline_fors2/5-background_subtract.py, with:") print(f"\tepoch {data_title}") print(f"\torigin directory {origin}") print(f"\tdestination directory {destination}") print() methods = ["ESO backgrounds only", "SExtractor backgrounds only", "polynomial fit", "Gaussian fit", "median value"] if all_synths: frame = 56 method = "polynomial fit" degree = 5 do_mask = True local = True global_sub = False trim_image = False recorrect_subbed = True eso_back = False else: frame = 200 # frame_arcsec = 30 * units.arcsec # frame_deg = frame_arcsec.to(units.deg) eso_back = False _, method = u.select_option(message="Please select the background subtraction method.", options=methods, default="polynomial fit") degree = None if method == "polynomial fit": degree = u.user_input(message=f"Please enter the degree of {method} to use:", typ=int, default=3) elif method == "ESO backgrounds only": eso_back = True do_mask = False if method not in ["ESO backgrounds only", "SExtractor backgrounds only", "median value"]: do_mask = u.select_yn(message="Mask sources using SExtractor catalogue?", default=True) if method in ["polynomial fit", "Gaussian fit"]: local = u.select_yn(message="Use a local fit?", default=True) else: local = False global_sub = False trim_image = False recorrect_subbed = False if local: global_sub = u.select_yn(message="Subtract local fit from entire image?", default="n") if not global_sub: trim_image = u.select_yn(message="Trim images to subtracted region?", default="y") recorrect_subbed = u.select_yn(message="Re-normalise background of subtracted region?", default="y") # if not eso_back and method != "SExtractor backgrounds only": # eso_back = u.select_yn(message="Subtract ESO Reflex fitted backgrounds first?", default=False) outputs = p.object_output_params(data_title, instrument='FORS2') data_dir = u.check_trailing_slash(data_dir) destination = u.check_trailing_slash(destination) destination = data_dir + destination u.mkdir_check_nested(destination) origin = u.check_trailing_slash(origin) science_origin = data_dir + origin + "science/" print(science_origin) filters = outputs['filters'] frb_params = p.object_params_frb(obj=data_title[:-2]) epoch_params = p.object_params_fors2(obj=data_title) background_origin_eso = "" if eso_back: background_origin_eso = data_dir + "/" + origin + "/backgrounds/" if method == "SExtractor backgrounds only": background_origin = f"{data_dir}{origin}backgrounds_sextractor/" elif method == "polynomial fit": background_origin = f"{destination}backgrounds/" # f"{destination}backgrounds_{method.replace(' ', '')}_degree_{degree}_local_{local}_globalsub_{global_sub}/" else: background_origin = f"{destination}backgrounds/" # f"{destination}backgrounds_{method.replace(' ', '')}_local_{local}_globalsub_{global_sub}/" trimmed_path = "" if trim_image: trimmed_path = f"{data_dir}{origin}trimmed_to_background/" u.mkdir_check_nested(trimmed_path) ra = frb_params["burst_ra"] dec = frb_params["burst_dec"] if all_synths: ras = epoch_params["test_synths"]["ra"] decs = epoch_params["test_synths"]["dec"] else: ras = [ra] decs = [dec] for fil in filters: trimmed_path_fil = "" if trim_image: trimmed_path_fil = f"{trimmed_path}{fil}/" u.mkdir_check(trimmed_path_fil) background_fil_dir = f"{background_origin}{fil}/" u.mkdir_check_nested(background_fil_dir) science_destination_fil = f"{destination}science/{fil}/" u.mkdir_check_nested(science_destination_fil) files = os.listdir(science_origin + fil + "/") for file_name in files: if file_name.endswith('.fits'): new_file = file_name.replace("norm", "bg_sub") new_path = f"{science_destination_fil}/{new_file}" print("NEW_PATH:", new_path) science = science_origin + fil + "/" + file_name # First subtract ESO Reflex background images # frame = (frame_deg / f.get_pixel_scale(file=science, astropy_units=True)[1]).to(f.pix).value if eso_back: background_eso = background_origin_eso + fil + "/" + file_name.replace("SCIENCE_REDUCED", "PHOT_BACKGROUND_SCI") ff.subtract_file(file=science, sub_file=background_eso, output=new_path) science_image = new_path if method != "ESO backgrounds only": print(ra, dec) print("Science image:", science) science_image = fits.open(science) print("Science file:", science_image) wcs_this = WCS(header=science_image[0].header) if method == "SExtractor backgrounds only": background = background_origin + fil + "/" + file_name + "_back.fits" print("Background image:", background) else: if method == "median value": print(science_image[0].data.shape) _, background_value, _ = sigma_clipped_stats(science_image[0].data) background = deepcopy(science_image) background[0].data = np.full(shape=science_image[0].data.shape, fill_value=background_value) background_path = background_origin + fil + "/" + file_name.replace("SCIENCE_REDUCED", "PHOT_BACKGROUND_MEDIAN") # Next do background fitting. else: background = deepcopy(science_image) background[0].data = np.zeros(background[0].data.shape) background_path = background_origin + fil + "/" + file_name.replace("SCIENCE_REDUCED", "PHOT_BACKGROUND_FITTED") for i, ra in enumerate(ras): dec = decs[i] x, y = wcs_this.all_world2pix(ra, dec, 0) print(x, y) bottom, top, left, right = ff.subimage_edges(data=science_image[0].data, x=x, y=y, frame=frame) if do_mask: # Produce a pixel mask that roughly masks out the true sources in the image so that # they don't get fitted. mask_max = 10 _, pixel_scale = ff.get_pixel_scale(science_image) sextractor = Table.read( f"{data_dir}analysis/sextractor/4-divided_by_exp_time/{fil}/{file_name.replace('.fits', '_psf-fit.cat')}", format='ascii.sextractor') weights = np.ones(shape=science_image[0].data.shape) for obj in filter( lambda o: left < o["X_IMAGE"] < right and bottom < o["Y_IMAGE"] < top, sextractor): mask_rad = min(int(obj["A_WORLD"] * obj["KRON_RADIUS"] / pixel_scale), mask_max) x_prime = int(np.round(obj["X_IMAGE"])) y_prime = int(np.round(obj["Y_IMAGE"])) weights[y_prime - mask_rad:y_prime + mask_rad, x_prime - mask_rad:x_prime + mask_rad] = 0.0 plt.imshow(weights, origin="lower") plt.savefig( background_origin + fil + "/" + file_name.replace("norm.fits", "mask.png")) else: weights = None background_this = fit_background_fits(image=science_image, model_type=method[:method.find(" ")], deg=degree, local=local, global_sub=global_sub, centre_x=x, centre_y=y, frame=frame, weights=weights) background[0].data += background_this[0].data if recorrect_subbed: offset = get_median_background(image=science, ra=epoch_params["renormalise_centre_ra"], dec=epoch_params["renormalise_centre_dec"], frame=50, show=False, output=new_path[ :new_path.find("bg_sub")] + "renorm_patch_") print("RECORRECT_SUBBED:", recorrect_subbed) print("SUBTRACTING FROM BACKGROUND:", offset) print(bottom, top, left, right) print(background[0].data[bottom:top, left:right].shape) print(np.median(background[0].data[bottom:top, left:right])) background[0].data[bottom:top, left:right] -= offset print(np.median(background[0].data[bottom:top, left:right])) if trim_image: print("TRIMMED_PATH_FIL:", trimmed_path_fil) science_image = ff.trim_file(path=science_image, left=left, right=right, top=top, bottom=bottom, new_path=trimmed_path_fil + file_name.replace( "norm.fits", "trimmed_to_back.fits")) print("Science after trim:", science_image) background = ff.trim_file(path=background, left=left, right=right, top=top, bottom=bottom, new_path=background_path) print("Writing background to:") print(background_path) background.writeto(background_path, overwrite=True) print("SCIENCE:", science_image) print("BACKGROUND:", background) subbed = ff.subtract_file(file=science_image, sub_file=background, output=new_path) # # TODO: check if regions overlap # # plt.hist(subbed[0].data[int(y - frame + 1):int(y + frame - 1), # int(x - frame + 1):int(x + frame - 1)].flatten(), # bins=10) # plt.savefig(new_path[:new_path.find("bg_sub")] + "histplot.png") # plt.close() copyfile(data_dir + "/" + origin + "/" + data_title + ".log", destination + data_title + ".log") u.write_log(path=destination + data_title + ".log", action=f'Backgrounds subtracted using 4-background_subtract.py with method {method}\n')
if not quiet: print('Saving parameter file to ' + output) for param in p: if type(p[param]) is date: p[param] = str(p[param]) with open(output, 'w') as fj: json.dump(p, fj) return p config = check_for_config() param_path = u.check_trailing_slash(config['param_dir']) def path_or_params_obj(obj: Union[dict, str], instrument: str = 'FORS2', quiet: bool = False): if type(obj) is str: return obj, object_params_instrument(obj, instrument=instrument, quiet=quiet) elif type(obj) is dict: params = obj obj = params[ 'data_title'] # TODO: This is broken since you removed data_title from epoch params. return obj, params
def fits_table(input_path: str, output_path: str = "", science_only: bool = True): """ Produces and writes to disk a table of .fits files in the given path, with the vital statistics of each. Intended only for use with raw ESO data. :param input_path: :param output_path: :param science_only: If True, we are writing a list for a folder that also contains calibration files, which we want to ignore. :return: """ # If there's no trailing slash in the paths, add one. input_path = u.check_trailing_slash(input_path) if output_path == "": output_path = input_path + "fits_table.csv" elif output_path[-4:] != ".csv": if output_path[-1] == "/": output_path = output_path + "fits_table.csv" else: output_path = output_path + ".csv" print('Writing table of fits files to: \n', output_path) files = os.listdir(input_path) files.sort() files_fits = [] # Keep only the relevant fits files for f in files: if f[-5:] == ".fits": files_fits.append(f) # Create list of dictionaries to be used as the output data output = [] ids = string.ascii_lowercase if len(ids) < len(files_fits): ids = ids + string.ascii_uppercase if len(ids) < len(files_fits): ids = ids + string.digits for i, f in enumerate(files_fits): data = {} file = fits.open(input_path + f) header = file[0].header data['identifier'] = f if science_only and ('ESO DPR CATG' not in header or 'SCIENCE' not in header['ESO DPR CATG']): continue if len(ids) >= len(files_fits): data['id'] = ids[i] if "OBJECT" in header: data['object'] = header["OBJECT"] if "ESO OBS NAME" in header: data['obs_name'] = header["ESO OBS NAME"] if "EXPTIME" in header: data['exp_time'] = header["EXPTIME"] if "AIRMASS" in header: data['airmass'] = header["AIRMASS"] elif "ESO TEL AIRM START" in header and "ESO TEL AIRM END": data['airmass'] = (header["ESO TEL AIRM START"] + header["ESO TEL AIRM END"]) / 2 if "CRVAL1" in header: data['ref_ra'] = header["CRVAL1"] if "CRVAL2" in header: data['ref_dec'] = header["CRVAL2"] if "CRPIX1" in header: data['ref_pix_x'] = header["CRPIX1"] if "CRPIX2" in header: data['ref_pix_y'] = header["CRPIX2"] if "EXTNAME" in header: data['chip'] = header["EXTNAME"] elif "ESO DET CHIP1 ID" in header: if header["ESO DET CHIP1 ID"] == 'CCID20-14-5-3': data['chip'] = 'CHIP1' if header["ESO DET CHIP1 ID"] == 'CCID20-14-5-6': data['chip'] = 'CHIP2' if "GAIN" in header: data['gain'] = header["GAIN"] if "INSTRUME" in header: data['instrument'] = header["INSTRUME"] if "ESO TEL AIRM START" in header: data['airmass_start'] = header["ESO TEL AIRM START"] if "ESO TEL AIRM END" in header: data['airmass_end'] = header["ESO TEL AIRM END"] if "ESO INS OPTI3 NAME" in header: data['collimater'] = header["ESO INS OPTI3 NAME"] if "ESO INS OPTI5 NAME" in header: data['filter1'] = header["ESO INS OPTI5 NAME"] if "ESO INS OPTI6 NAME" in header: data['filter2'] = header["ESO INS OPTI6 NAME"] if "ESO INS OPTI7 NAME" in header: data['filter3'] = header["ESO INS OPTI7 NAME"] if "ESO INS OPTI9 NAME" in header: data['filter4'] = header["ESO INS OPTI9 NAME"] if "ESO INS OPTI10 NAME" in header: data['filter5'] = header["ESO INS OPTI10 NAME"] if "ESO INS OPTI8 NAME" in header: data['camera'] = header["ESO INS OPTI8 NAME"] if "NAXIS1" in header: data['pixels_x'] = header["NAXIS1"] if "NAXIS2" in header: data['pixels_y'] = header["NAXIS2"] if "SATURATE" in header: data['saturate'] = header["SATURATE"] if "MJD-OBS" in header: data['mjd_obs'] = header["MJD-OBS"] output.append(data) file.close() output.sort(key=lambda a: a['identifier']) out_file = pd.DataFrame(output) out_file.to_csv(output_path) return out_file
p = load_params(file=yaml_file) u.debug_print(1, 'Saving parameter file to ' + output) for param in p: if type(p[param]) is date: p[param] = str(p[param]) with open(output, 'w') as fj: json.dump(p, fj) return p config = check_for_config() param_dir = u.check_trailing_slash(config['param_dir']) project_path = u.check_trailing_slash(config['proj_dir']) data_path = u.check_trailing_slash(config["top_data_dir"]) furby_path = None if "furby_dir" in config and config["furby_dir"] is not None: furby_path = u.check_trailing_slash(config["furby_dir"]) def get_project_git_hash(short: bool = False): return u.get_git_hash(directory=project_path, short=short) def path_or_params_obj(obj: Union[dict, str], instrument: str = 'FORS2', quiet: bool = False): if type(obj) is str: