def enable_debug(self, **kwargs): """ Extracts the default data from the FITS files Parameters ---------- base_folder: path to folder in which the FITS files are located Returns ------- """ try: logger.info("Sepyarchng for the file") default_path = path_finder(mode="default", **kwargs) except: logger.fatal("Could not find the path to the file", exc_info=True) return -1 with fits.open(default_path) as hdulist: self.default_lightcurve = hdulist[1].data["flux"] self.DRP_uncert = hdulist[1].data["FLUXERR"] self.debug = 1
def export_txt(Data_fits, path, **kwargs): """ Stores the light curves on a text file with the following format: MJD_TIME;ROLL_ANGLE FLux <Star i> FLUX_ERR <Star i> Parameters ---------- Data_fits :class:`~pyarchi.data_objects.Data.Data` object. path: Path in which the file shall be stored kwargs Configuration values Returns ------- """ default_path = path_finder(mode="default", **kwargs) file_name = "/" + kwargs["base_folder"].split("/")[-2].split("/")[-1] try: hdulist = fits.open(default_path) except IOError: logger.error("Lightcurve-Default file not found", exc_info=True) return -1 except Exception as e: logger.fatal("Unspecified error. Refer to previous log messages", exc_info=True) else: with hdulist: roll_ang = hdulist[1].data["ROLL_ANGLE"] mjd_time = hdulist[1].data["MJD_TIME"] off_curve = default_path.split("-")[-1].split("_")[0] with fits.open(default_path) as hdulist: default_lightcurve = hdulist[1].data["flux"] default_err = hdulist[1].data["FLUXERR"] header = "mjd_time; roll_ang; {} lightcurve".format(off_curve) np.savetxt( path + file_name + ".txt", np.c_[mjd_time, roll_ang, default_lightcurve, default_err], delimiter=" ", header=header, ) results_file = os.path.join(path, "pyarchi_output.txt") header = "Method - {}\n".format(kwargs["method"]) header += "Detect mode - {}\n".format(kwargs["detect_mode"]) header += "Initial load - {}\n".format(kwargs["initial_detect"]) header += "Background grid - {}\n".format(kwargs["grid_bg"]) optim_mask = get_optimized_mask(**kwargs) header += "Stars information:\n" for key, val in optim_mask.items(): header += "\t Star: {}; Factor: {}; Out of bounds: {}\n".format( key, val, Data_fits.stars[int(key)].out_bound) header += "mjd_time; roll_ang; -> Stars" a = Data_fits.all_curves b = Data_fits.all_uncertainties c = np.empty((a.shape[0] * 2, a.shape[1]), dtype=a.dtype) c[0::2] = a c[1::2] = b np.savetxt(results_file, np.vstack((mjd_time, roll_ang, c)).T, delimiter=" ", header=header)
def _init_load(self, **kwargs): """ Opens the fits files and extracts the roll angle and the images Parameters ---------- kwargs Returns ------- """ self._image_dict = {} logger.info("Extracting official pipeline Lightcurve information") default_path = path_finder(mode="default", **kwargs) subarray_path = path_finder(mode="subarray", **kwargs) try: hdulist = fits.open(default_path) except IOError: logger.error("Official Lightcurve file not found") self._error_flag = 1 return -1 else: with hdulist: self.roll_ang = hdulist[1].data["ROLL_ANGLE"] self.mjd_time = hdulist[1].data["MJD_TIME"] self.offsets = list( zip(hdulist[1].data["CENTROID_X"], hdulist[1].data["CENTROID_Y"])) if self.calc_uncert: possible_curves = ["DEFAULT", "OPTIMAL", "RSUP", "RINF"] possible_curves.remove(kwargs["official_curve"]) darks = [] def_points = np.pi * (hdulist[1].header["AP_RADI"])**2 self.uncertainties_params["bg"] = ( hdulist[1].data["BACKGROUND"] / def_points) curr_dark = hdulist[1].data["DARK"] / def_points darks.append(curr_dark) for curve in possible_curves: default_path = path_finder(mode="default", off_curve=curve, **kwargs) with fits.open(default_path) as file: def_points = (np.pi * (file[1].header["AP_RADI"])**2) # pylint: disable=no-member curr_dark = (file[1].data["DARK"] / def_points) # pylint: disable=no-member darks.append(curr_dark) self.uncertainties_params["dark"] = np.median(darks, axis=0) self.uncertainties_params["t_exp"] = hdulist[1].header[ "EXPTIME"] self.uncertainties_params["nstack"] = hdulist[1].header[ "NEXP"] try: hdulist = fits.open(subarray_path) except IOError: logger.error("Subarray file not found") self._error_flag = 1 return -1 else: with hdulist: imgs = hdulist[1].data self._imgs = imgs.astype(np.float) self.image_number = self._imgs.shape[0] if self.calc_uncert: self.uncertainties_params[ "cron"] = 1.96 * hdulist[2].data["RON"]
def centers_from_fits(primary, secondary, stars, initial_angle,initial_offset, **kwargs): """ Using information stored on the fits files, we determine the centers positions. The centers are determined using relations between the differences in RA and DEC of all stars in relation to the known point : the central star. After determining the center, we use the primary and secondary arguments to see if this function should change the initial position of the star. Parameters ---------- primary: str Methodology to apply to the central star. If it's fits then the initial position of that star is changed to be the one determined here. secondary: str Methodology to apply to the outer stars. If it's fits then the initial position of those stars are changed to be the ones determined here. stars: list List with all the stars found with the dynam method initial_angle: float Rotation angle of the satellite for the first image initial_offset: list DRP's estimation of the central star location kwargs kwargs Returns ------- List: Updated list of stars, with the positions determined by the fits method """ if primary != "fits" and secondary != "fits": return stars to_calculate = [] if primary == "fits": to_calculate = to_calculate + [0] if secondary == "fits": to_calculate = to_calculate + [1] arcsec = 1 / 3600 arcsec_per_pix = 1 scaling_factor = kwargs['grid_bg'] / 200 if kwargs['grid_bg'] != 0 else 1 r_ang = initial_angle stars_path = path_finder(mode="stars", **kwargs) try: hdulist = fits.open(stars_path) except IOError: logger.error("Star Catalogue file not found") return -1 else: with hdulist: cent_ra = hdulist[1].header["CENT_RA"] cent_dec = hdulist[1].header["CENT_DEC"] first_RA = hdulist[1].data["RA"] first_DEC = hdulist[1].data["DEC"] mags = hdulist[1].data["MAG_CHEOPS"] logger.info( "Extracted StarCatalogue's information; Starting the position analysis process" ) central_star = initial_offset valid_stars = ( 0 ) # number of valid stars found so far. There are more than the usable ones distances = [] positions = [] if 0 in to_calculate: # for the central star pos = [central_star[1] - 412, -412 + central_star[0]] pos = np.multiply(pos, scaling_factor) + np.floor(scaling_factor / 2) stars[valid_stars].change_init_pos(pos) for index in range(len(first_DEC)): star_ra, star_dec = first_RA[index], first_DEC[index] r_mat = matrix_cnter_clock((360 - r_ang) * np.pi / 180) # http://spiff.rit.edu/classes/phys373/lectures/astrom/astrom.html delta_RA = (cent_ra - star_ra) * np.cos((cent_dec * np.pi / 180)) / arcsec delta_DEC = (cent_dec - star_dec) / arcsec initial_coords = [[delta_RA], [delta_DEC]] new_coords = np.dot(r_mat, initial_coords) star_ra = new_coords[0][0] star_dec = new_coords[1][0] x_pos = 100 - (central_star[1] - 512 + arcsec_per_pix * star_dec) y_pos = 100 + central_star[0] - 512 + arcsec_per_pix * star_ra if 0 < x_pos < 200 and 0 < y_pos < 200: if mags[index] <= 13: distances.append( np.sqrt( (x_pos - (central_star[0] - 412)) ** 2 + (y_pos - (central_star[1] - 412)) ** 2 ) ) positions.append([x_pos, y_pos]) valid_stars = 1 warning_mismatch = 0 if len(distances) != len(stars): logger.warning("Mismatch between fits and dynam initial detection methods. Possible non-visible stars inside the image!!") logger.warning("Creating all of the background stars from fits file; Disregarding data from 'dynam' method") warning_mismatch = 1 stars = stars[:1] Star.reset_number(1) for dist in sorted(distances[1:]): pos = positions[distances.index(dist)] if 1 in to_calculate: # for all other stars pos = np.multiply(pos, scaling_factor) + np.floor(scaling_factor / 2) if warning_mismatch: # if the fits and dynam do not match, create from scratch stars.append(Star(kwargs["CDPP_type"], positions, dist)) else: stars[valid_stars].change_init_pos(pos) valid_stars += 1 return stars
def create_fits(master_folder, data_fits, **kwargs): """ Export the results to a fits file, containing the light curves and a correspondence of star to Cv and respective radius factor. Parameters ---------- master_folder: Path in which the data shall be stored data_fits :class:`~pyarchi.data_objects.Data.Data` object. kwargs Notes ----- Data stored in the header unit of the file : Keyword data method type of mask used detect tracking method initial initial detection method grid size of the background grid CDPP_TYPE CDPP algorithm in use In the data unit of the file, we have each star, with the corresponding time, rotation angle, flux values and uncertainties """ # TODO: store more info -> improve organization logger.info("Extracting data to FITS file") hdus = [] default_path = path_finder(mode="default", **kwargs) try: hdulist = fits.open(default_path) except IOError: logger.fatal("File does not exist") return -1 else: with hdulist: roll_ang = hdulist[1].data["ROLL_ANGLE"] mjd_time = hdulist[1].data["MJD_TIME"] col1 = fits.Column(name="MJD_TIME", format="E", array=mjd_time) col2 = fits.Column(name="Rotation", unit="deg", format="E", array=roll_ang) send_cols = [col1, col2] for star in data_fits.stars: send_cols.append(fits.Column(name=star.name, format="E", array=star.photom)) send_cols.append( fits.Column( name="FLUX_ERR_{}".format(star.number), format="E", array=star.uncertainties, ) ) cols = fits.ColDefs(send_cols) hdr = fits.Header() hdr["method"] = kwargs["method"] hdr["detect"] = kwargs["detect_mode"] hdr["initial"] = kwargs["initial_detect"] hdr["grid"] = kwargs["grid_bg"] hdr["CDPPTYPE"] = kwargs["CDPP_type"] primary_hdu = fits.PrimaryHDU(header=hdr) hdus.append(primary_hdu) hdu = fits.BinTableHDU.from_columns(cols, name="Photometry") hdus.append(hdu) col1 = fits.Column( name="Star", format="B", array=[star.number for star in data_fits.stars] ) col2 = fits.Column( name="Cv", format="E", array=[star.calculate_cdpp(data_fits.mjd_time)[0] for star in data_fits.stars], ) col3 = fits.Column( name="Factors", format="E", array=[star.mask_factor for star in data_fits.stars] ) col4 = fits.Column( name="Out bounds", format="E", array=[star.out_bound for star in data_fits.stars] ) cols = fits.ColDefs([col1, col2, col3, col4]) hdu = fits.BinTableHDU.from_columns(cols, name="General") hdus.append(hdu) hdul = fits.HDUList(hdus) hdul.writeto(os.path.join(master_folder, "pyarchi_output.fits"), overwrite=True) logger.info("Fit file was created")