def create_mbias(obs, plp, b_cluster, index): """ Function that creates the master bias for a specified b_cluster for the passed Observation Object. This function wraps the Observation Object's create_master_bias method, to construct a master bias which corresponds to the passed cluster. The newly generated master bias is directly saved within this function. Note that also a header is added to the master bias, which is a direct copy of the header from the final frame of this bias cluster. """ # Get additional info binning, fltr = plp.get_file_info(b_cluster[-1], ["BINNING", "FILTER"]) ps.running(f"Creating {binning} Master bias for cluster {index+1}") # Generate the master bias mbias = plp.create_master_bias(b_cluster) # Add source files to the header header = fits.getheader(b_cluster[-1]) header = BlaauwPipe.header_add_source(header, b_cluster) # Save the master bias and the updated header filename = "master_bias" + binning + "C" + str(index + 1) + ".fits" savepath = os.path.join(plp.cor_dir, filename) BlaauwPipe.save_fits(savepath, data=mbias, header=header) ps.updateDone( f"{binning} Master bias saved at {BlaauwPipe.strip_filepath(savepath)}" )
def create_observation(self): """ Function that creates the observation object for the passed target. Also performs some small checkups to be sure that we have some proper data. """ target = self.target args = self.args ps.running(f"Looking for files in {self.target}") self.obs = Observation(target) self.check_obs() ps.done("Observation Object initialized")
def main(self, obs, plp): ps.running("Initializing client...") c = Client(api_key=cst.api_key) ps.updateDone("Logged into Astrometry") # set view field width in this range (15-30 arcmin) # WARNING: this can be very different for your application. c.settings.set_scale_range(15, 30) #c.settings.use_sextractor = True # Get the raw & corresponding reduced files fits_files = plp.red_files[:3] # give the iterable of filenames to the function, which returns a # generator, generating pairs containing the finished job and filename. ps.running("Preparing files for uploading...") result_iter = c.upload_files_gen( fits_files) #, filter_func=self.enough_sources) ps.updateDone("Light files are ready for Astrometry") file_counter = 0 success_counter = 0 for job, filename in result_iter: if not job.success(): file_counter += 1 ps.progressBar( file_counter, len(fits_files), f"Astrometry failed for {BlaauwPipe.strip_filepath(filename)}" ) ps.newline() continue # retrieve the wcs file from the successful job wcs = job.wcs_file() file_counter += 1 success_counter += 1 ps.progressBar( file_counter, len(fits_files), f"Received WCS for {BlaauwPipe.strip_filepath(filename)}") ps.newline() with fits.open(filename) as hdul: # append resulting header (with astrometry) to existing header hdul[0].header.extend(wcs) astrom_file = self.change_filename(filename, plp) ps.running( f"Writing to {BlaauwPipe.strip_filepath(astrom_file)}...") hdul.writeto(astrom_file) ps.updateDone( f"Astrometry results are saved to {BlaauwPipe.strip_filepath(astrom_file)}" ) if success_counter > 0: ps.done( f"{success_counter} files were successfully run through Astrometry!" ) if file_counter - success_counter > 0: ps.warning( f"{file_counter-success_counter} files could not be resolved!")
def create_mflat(obs, plp, f_cluster, fltr, index, max_days_off=365): """ Function that creates the master bias for a specified d_cluster for the passed Observation Object. This function wraps the Observation Object's create_master_dark method, to construct a master dark which corresponds to the passed cluster. The newly generated master dark is directly saved within this function. Note that also a header is added to the master dark, which is a direct copy of the header from the final frame of this bias cluster. The master bias and dark files used for reduction are the closest frame that can be found, up to max_days_off days from the current plp.working_dir. """ # Get additional info binning, fltr = plp.get_file_info(f_cluster[-1], ["BINNING", "FILTER"]) ps.running( f"Creating {binning} Master flat of filter {fltr} for cluster {index+1}" ) # Find the closest master bias and master dark to the flat creation time flat_creation = datetime.strptime( fits.getval(f_cluster[-1], 'DATE-OBS'), "%Y-%m-%dT%H:%M:%S.%f") try: closest_mbias, mbias_path, bias_off = BlaauwPipe.get_closest_master( flat_creation, plp, max_days_off, binning, "master_bias") logging.info( f"Closest master bias (days_off={bias_off}) of size closest_mbias.shape: {mbias_path}" ) closest_mdark, mdark_path, dark_off = BlaauwPipe.get_closest_master( flat_creation, plp, max_days_off, binning, "master_dark") logging.info( f"Closest master dark (days_off={dark_off}) of size closest_mdark.shape: {mdark_path}" ) except ers.SuitableMasterMissingError as err: warnings.warn( f"Master flat creation failed: {err} for {plp.working_dir}") logging.warn( f"SuitableMasterMissingError: could not find frames for flat cluster {index+1} of filter {fltr}" ) # Let's hope the pending log can someday fix this new_line = np.array([ flat_creation.date(), "Flat file", binning, fltr, "?", "?", "-", "-", f_cluster[0] ]) pd.append_pending_log(new_line) return # Generate the master flat using the found master bias and master dark mflat = plp.create_master_flats(f_cluster, [fltr], closest_mbias, closest_mdark) # Add source files to the header header = fits.getheader(f_cluster[-1]) header = BlaauwPipe.header_add_source(header, f_cluster) header = BlaauwPipe.header_add_mbias(header, mbias_path, days_off=bias_off) header = BlaauwPipe.header_add_mdark(header, mdark_path, days_off=dark_off) # Save the master flat and the updated header filename = "master_flat" + binning + fltr + "C" + str(index + 1) + ".fits" savepath = os.path.join(plp.cor_dir, filename) BlaauwPipe.save_fits(savepath, data=mflat, header=header) ps.updateDone( f"{binning} Master flat of filter {fltr} saved at {BlaauwPipe.strip_filepath(savepath)}" ) # Add to the pending log if need be max_off = abs(max(bias_off, dark_off)) if max_off > 0: folder_datetime = datetime.strptime( plp.working_dir.replace(cst.base_path, '').split(os.sep)[1], '%y%m%d') new_line = np.array([ folder_datetime.date(), "Flat file", binning, fltr, bias_off, dark_off, "-", (folder_datetime + timedelta(days=max_off)).date(), savepath ]) pd.append_pending_log(new_line)
def create_pipelineproduct(self): ps.running("Initializing Pipeline Product...") self.plp = PipelineProduct(self.working_dir) ps.updateDone("Pipeline Product initialized")