def create_mbias(obs, plp, b_cluster, index): """ Function that creates the master bias for a specified b_cluster for the passed Observation Object. This function wraps the Observation Object's create_master_bias method, to construct a master bias which corresponds to the passed cluster. The newly generated master bias is directly saved within this function. Note that also a header is added to the master bias, which is a direct copy of the header from the final frame of this bias cluster. """ # Get additional info binning, fltr = plp.get_file_info(b_cluster[-1], ["BINNING", "FILTER"]) ps.running(f"Creating {binning} Master bias for cluster {index+1}") # Generate the master bias mbias = plp.create_master_bias(b_cluster) # Add source files to the header header = fits.getheader(b_cluster[-1]) header = BlaauwPipe.header_add_source(header, b_cluster) # Save the master bias and the updated header filename = "master_bias" + binning + "C" + str(index + 1) + ".fits" savepath = os.path.join(plp.cor_dir, filename) BlaauwPipe.save_fits(savepath, data=mbias, header=header) ps.updateDone( f"{binning} Master bias saved at {BlaauwPipe.strip_filepath(savepath)}" )
def reduce_imgs(self, obs, plp): """ Wrapper function that loops over every light file and calls reduce_img() to do the actual reduction process on a per file basis. """ # Initiliase or create the saving dir for reduced content if not os.path.isdir(plp.red_dir): os.mkdir(plp.red_dir) ps.done( f"Savepath created at {BlaauwPipe.strip_filepath(plp.red_dir)}") # Get basic information about this observation lightFiles = sorted( plp.lightFiles, key=lambda file: datetime.strptime(fits.getval(file, 'DATE-OBS'), "%Y-%m-%dT%H:%M:%S.%f")) # # Get the time difference between start and end of the observation # start_time = datetime.strptime(fits.getval(lightFiles[0], 'DATE-OBS'), "%Y-%m-%dT%H:%M:%S.%f") # end_time = datetime.strptime(fits.getval(lightFiles[-1], 'DATE-OBS'), "%Y-%m-%dT%H:%M:%S.%f") # time_diff = end_time - start_time # # This block is not necessary yet, but it starts calculating time-based weights that can # # be used to construct better master frames by interpolating multiple master biases # # Get the creation time relative to the start of the observation # cr_time_rel = cr_time - start_time # # Creation time scaled between 0 and 1: # # 0 means that it was created at the start; # # 0.5 exactly halfway through the night; # # 1 at the end of the night. # cr_time_scl = cr_time_rel / time_diff self.failed_reds = 0 # Loop over every light file in the target for light_file in lightFiles: filename = os.path.basename(light_file) savepath = os.path.join(plp.red_dir, filename) ps.progressBar(lightFiles.index(light_file), len(lightFiles), f"Reducing light file: {filename}", log=False) self.reduce_img(obs, plp, light_file, 365) ps.updateDone(f"Reduction process finished", progressbar=True) if len(lightFiles) - self.failed_reds > 0: ps.done( f"Succesfully reduced {len(lightFiles) - self.failed_reds} light files!" ) if self.failed_reds > 0: ps.warning( f"{self.failed_reds} light files were not optimally reduced!")
def check_obs(self): """ Function that checks some properties of the generated Observation object. Nothing fancy, just some extra checks to prevent unforeseen circumstances... """ obs = self.obs args = self.args # Update the user on the found content ps.updateDone(f"Found {len(obs.files)} fits files") ps.done(f" {len(obs.lightFiles)} light frames") # Now, check what correction frame types we have frame_check = 3 # Check for the presence of bias frames bias_found = len(obs.biasFiles) > 0 if not bias_found: ps.warning("No bias files were found") frame_check -= 1 else: ps.done(f" {len(obs.biasFiles)} bias frames") # Check for dark presence of frames dark_found = len(obs.darkFiles) > 0 if not dark_found: ps.warning("No dark files were found") frame_check -= 1 else: ps.done(f" {len(obs.darkFiles)} dark frames") # Check for flat presence of fields flat_found = len(obs.flatFiles) > 0 if not flat_found: ps.warning("No flat fields were found") frame_check -= 1 else: ps.done(f" {len(obs.flatFiles)} flat fields") # Not likely to happen, but raise an error if # literally no correction frames were found if frame_check == 0: raise ers.MissingFramesError("No suitable correction frames found") return
def on_run(self, obs, plp): """ Copy all the files in the passed observation object to a new directory, just like a backup. """ # Construct the save path if not os.path.isdir(plp.raw_dir): os.mkdir(plp.raw_dir) ps.done( f"Savepath created at {BlaauwPipe.strip_filepath(plp.raw_dir)}") # Make a copy of each file for ori_file in obs.files: filename = os.path.basename(ori_file) filepath = os.path.join(plp.raw_dir, filename) # Update user ps.progressBar(obs.files.index(ori_file), len(obs.files), f"Copying file to backup: {filename}") # Copy files copy2(ori_file, filepath) # Use chmod to ensure write permission (- rw- r-- r--) os.chmod(filepath, stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH | stat.S_IWUSR) # Add header keyword TRAW header = fits.getheader(filepath) header = BlaauwPipe.header_add_traw(header, ori_file) BlaauwPipe.save_fits(filepath, header=header) ps.updateDone(f"Copying files done", progressbar=True) ps.done(f"Changed file permissions to rw-r--r--") ps.done(f"Added TRAW keyword to headers") ps.done(f"Successfully copied and prepared {len(obs.files)} files!")
def reduce_img(self, obs, plp, light_file, max_days_off): """ Function that reduces a passed light_file. Tries to find correction frames with a maximum relative age of max_days_off. """ # Retrieve basic information binning, fltr, exptime, crtn = plp.get_file_info( light_file, ["BINNING", "FILTER", "EXPTIME", "DATE-OBS"]) creation_datetime = datetime.strptime(crtn, "%Y-%m-%dT%H:%M:%S.%f") # Retrieve the closest master correction frames try: master_bias, mbias_path, bias_off = BlaauwPipe.get_closest_master( creation_datetime, plp, max_days_off, binning, "master_bias") master_dark, mdark_path, dark_off = BlaauwPipe.get_closest_master( creation_datetime, plp, max_days_off, binning, "master_dark") master_flat, mflat_path, flat_off = BlaauwPipe.get_closest_master( creation_datetime, plp, max_days_off, binning, "master_flat", fltr=fltr) except ers.SuitableMasterMissingError as err: # warnings.warn(f"Re-reduction failed: {err} for {light_file}") # Let's hope the pending log can someday fix this folder_datetime = datetime.strptime( plp.working_dir.replace(cst.base_path, '').split(os.sep)[1], '%y%m%d') new_line = np.array([ folder_datetime.date(), "Light file", binning, fltr, "?", "?", "?", "-", light_file ]) pd.append_pending_log(new_line) ps.updateFailed( f"Failed to reduce light file ({err}): {light_file}", progressbar=True) self.failed_reds += 1 return # Open the content of the current light fits hduList = fits.open(light_file) hdu_data = hduList[0].data # Add source files to the header header = fits.getheader(light_file) header = BlaauwPipe.header_add_praw( header, light_file) # Add the raw version to this pipeline reduced file header = BlaauwPipe.header_add_mbias(header, mbias_path, days_off=bias_off) header = BlaauwPipe.header_add_mdark(header, mdark_path, days_off=dark_off) header = BlaauwPipe.header_add_mflat(header, mflat_path, days_off=flat_off) # Reduce the content and save it hdu_data_red = (hdu_data - master_bias - master_dark * exptime) / master_flat filename_ori = os.path.basename(light_file) savepath = os.path.join(plp.red_dir, filename_ori) BlaauwPipe.save_fits(savepath, data=hdu_data_red, header=header) # Add the current file to the raw version pred header = fits.getheader(light_file) header = BlaauwPipe.header_add_pred(header, savepath) BlaauwPipe.save_fits(light_file, data=hdu_data, header=header) # Add to the pending log if need be max_off = abs(max(bias_off, dark_off, flat_off)) if max_off > 0: folder_datetime = datetime.strptime( plp.working_dir.replace(cst.base_path, '').split(os.sep)[1], '%y%m%d') new_line = np.array([ folder_datetime.date(), "Light file", binning, fltr, bias_off, dark_off, flat_off, (folder_datetime + timedelta(days=max_off)).date(), light_file ]) pd.append_pending_log(new_line) self.failed_reds += 1 ps.updateWarning( f"Reduced light file with non-zero days-off ({days_off}) saved at {BlaauwPipe.strip_filepath(savepath)}", progressbar=True) else: ps.updateDone( f"Reduced light file saved at {BlaauwPipe.strip_filepath(savepath)}", progressbar=True) ps.newline() return
def main(self, obs, plp): ps.running("Initializing client...") c = Client(api_key=cst.api_key) ps.updateDone("Logged into Astrometry") # set view field width in this range (15-30 arcmin) # WARNING: this can be very different for your application. c.settings.set_scale_range(15, 30) #c.settings.use_sextractor = True # Get the raw & corresponding reduced files fits_files = plp.red_files[:3] # give the iterable of filenames to the function, which returns a # generator, generating pairs containing the finished job and filename. ps.running("Preparing files for uploading...") result_iter = c.upload_files_gen( fits_files) #, filter_func=self.enough_sources) ps.updateDone("Light files are ready for Astrometry") file_counter = 0 success_counter = 0 for job, filename in result_iter: if not job.success(): file_counter += 1 ps.progressBar( file_counter, len(fits_files), f"Astrometry failed for {BlaauwPipe.strip_filepath(filename)}" ) ps.newline() continue # retrieve the wcs file from the successful job wcs = job.wcs_file() file_counter += 1 success_counter += 1 ps.progressBar( file_counter, len(fits_files), f"Received WCS for {BlaauwPipe.strip_filepath(filename)}") ps.newline() with fits.open(filename) as hdul: # append resulting header (with astrometry) to existing header hdul[0].header.extend(wcs) astrom_file = self.change_filename(filename, plp) ps.running( f"Writing to {BlaauwPipe.strip_filepath(astrom_file)}...") hdul.writeto(astrom_file) ps.updateDone( f"Astrometry results are saved to {BlaauwPipe.strip_filepath(astrom_file)}" ) if success_counter > 0: ps.done( f"{success_counter} files were successfully run through Astrometry!" ) if file_counter - success_counter > 0: ps.warning( f"{file_counter-success_counter} files could not be resolved!")
def create_mflat(obs, plp, f_cluster, fltr, index, max_days_off=365): """ Function that creates the master bias for a specified d_cluster for the passed Observation Object. This function wraps the Observation Object's create_master_dark method, to construct a master dark which corresponds to the passed cluster. The newly generated master dark is directly saved within this function. Note that also a header is added to the master dark, which is a direct copy of the header from the final frame of this bias cluster. The master bias and dark files used for reduction are the closest frame that can be found, up to max_days_off days from the current plp.working_dir. """ # Get additional info binning, fltr = plp.get_file_info(f_cluster[-1], ["BINNING", "FILTER"]) ps.running( f"Creating {binning} Master flat of filter {fltr} for cluster {index+1}" ) # Find the closest master bias and master dark to the flat creation time flat_creation = datetime.strptime( fits.getval(f_cluster[-1], 'DATE-OBS'), "%Y-%m-%dT%H:%M:%S.%f") try: closest_mbias, mbias_path, bias_off = BlaauwPipe.get_closest_master( flat_creation, plp, max_days_off, binning, "master_bias") logging.info( f"Closest master bias (days_off={bias_off}) of size closest_mbias.shape: {mbias_path}" ) closest_mdark, mdark_path, dark_off = BlaauwPipe.get_closest_master( flat_creation, plp, max_days_off, binning, "master_dark") logging.info( f"Closest master dark (days_off={dark_off}) of size closest_mdark.shape: {mdark_path}" ) except ers.SuitableMasterMissingError as err: warnings.warn( f"Master flat creation failed: {err} for {plp.working_dir}") logging.warn( f"SuitableMasterMissingError: could not find frames for flat cluster {index+1} of filter {fltr}" ) # Let's hope the pending log can someday fix this new_line = np.array([ flat_creation.date(), "Flat file", binning, fltr, "?", "?", "-", "-", f_cluster[0] ]) pd.append_pending_log(new_line) return # Generate the master flat using the found master bias and master dark mflat = plp.create_master_flats(f_cluster, [fltr], closest_mbias, closest_mdark) # Add source files to the header header = fits.getheader(f_cluster[-1]) header = BlaauwPipe.header_add_source(header, f_cluster) header = BlaauwPipe.header_add_mbias(header, mbias_path, days_off=bias_off) header = BlaauwPipe.header_add_mdark(header, mdark_path, days_off=dark_off) # Save the master flat and the updated header filename = "master_flat" + binning + fltr + "C" + str(index + 1) + ".fits" savepath = os.path.join(plp.cor_dir, filename) BlaauwPipe.save_fits(savepath, data=mflat, header=header) ps.updateDone( f"{binning} Master flat of filter {fltr} saved at {BlaauwPipe.strip_filepath(savepath)}" ) # Add to the pending log if need be max_off = abs(max(bias_off, dark_off)) if max_off > 0: folder_datetime = datetime.strptime( plp.working_dir.replace(cst.base_path, '').split(os.sep)[1], '%y%m%d') new_line = np.array([ folder_datetime.date(), "Flat file", binning, fltr, bias_off, dark_off, "-", (folder_datetime + timedelta(days=max_off)).date(), savepath ]) pd.append_pending_log(new_line)
def create_pipelineproduct(self): ps.running("Initializing Pipeline Product...") self.plp = PipelineProduct(self.working_dir) ps.updateDone("Pipeline Product initialized")