Exemplo n.º 1
0
def _solLon2jd(solFunc, year, month, L):
    """ Internal function. Numerically calculates the Julian date from the given solar longitude with the
        given method. The inverse precision is around 0.5 milliseconds.

        Because the solar longitudes around Dec 31 and Jan 1 can be ambigous, the month also has to be given.

    Arguments:
        solFunc: [function] Function which calculates solar longitudes from Julian dates.
        year: [int] Year of the event.
        month: [int] Month of the event.
        L: [float] Solar longitude (radians), J2000 epoch.

    Return:
        JD: [float] Julian date.

    """

    def _previousMonth(year, month):
        """ Internal function. Calculates the previous month. """

        dt = datetime.datetime(year, month, 1, 0, 0, 0)

        # Get some day in the next month
        next_month = dt.replace(day=1) - datetime.timedelta(days=4)

        return next_month.year, next_month.month


    def _nextMonth(year, month):
        """ Internal function. Calculates the next month. """

        dt = datetime.datetime(year, month, 1, 0, 0, 0)

        # Get some day in the next month
        next_month = dt.replace(day=28) + datetime.timedelta(days=4)

        return next_month.year, next_month.month


    # Calculate the upper and lower bounds for the Julian date using the given year
    prev_year, prev_month = _previousMonth(year, month)
    jd_min = date2JD(prev_year, prev_month, 1, 0, 0, 0)

    next_year, next_month = _nextMonth(year, month)
    jd_max = date2JD(next_year, next_month, 28, 23, 59, 59)

    # Function which returns the difference between the given JD and solar longitude that is being matched
    sol_res_func = lambda jd, sol_lon: (np.sin(sol_lon) - np.sin(solFunc(jd)))**2 + (np.cos(sol_lon) \
        - np.cos(solFunc(jd)))**2

    # Find the Julian date corresponding to the given solar longitude
    res = scipy.optimize.minimize(sol_res_func, x0=[(jd_min + jd_max)/2], args=(L), \
        bounds=[(jd_min, jd_max)], tol=1e-13)

    return res.x[0]
Exemplo n.º 2
0
def xyToRaDec(time_data, X_data, Y_data, level_data, lat, lon, Ho, X_res, Y_res, RA_d, dec_d, \
    pos_angle_ref, F_scale, mag_lev, vignetting_coeff, x_poly_fwd, y_poly_fwd, station_ht):
    """ A function that does the complete calibration and coordinate transformations of a meteor detection.

    First, it applies field distortion on the data, then converts the XY coordinates
    to altitude and azimuth. Then it converts the altitude and azimuth data to right ascension and 
    declination. The resulting coordinates are in J2000.0 epoch.
    
    Arguments:
        time_data: [2D ndarray] Numpy array containing time tuples of each data point (year, month, day, 
            hour, minute, second, millisecond).
        X_data: [ndarray] 1D numpy array containing the image X component.
        Y_data: [ndarray] 1D numpy array containing the image Y component.
        level_data: [ndarray] Levels of the meteor centroid.
        lat: [float] Latitude of the observer in degrees.
        lon: [float] Longitde of the observer in degress.
        Ho: [float] Reference hour angle (deg).
        X_res: [int] Image size, X dimension (px).
        Y_res: [int] Image size, Y dimenstion (px).
        RA_d: [float] Reference right ascension of the image centre (degrees).
        dec_d: [float] Reference declination of the image centre (degrees).
        pos_angle_ref: [float] Field rotation parameter (degrees).
        F_scale: [float] Image scale (px/deg).
        mag_lev: [float] Magnitude calibration equation parameter (intercept).
        vignetting_coeff: [float] Vignetting ceofficient (deg/px).
        x_poly_fwd: [ndarray] 1D numpy array of 12 elements containing forward X axis polynomial parameters.
        y_poly_fwd: [ndarray] 1D numpy array of 12 elements containing forward Y axis polynomial parameters.
        station_ht: [float] Height above sea level of the station (m).
    
    Return:
        (JD_data, RA_data, dec_data, magnitude_data): [tuple of ndarrays]
            JD_data: [ndarray] Julian date of each data point.
            RA_data: [ndarray] Right ascension of each point (deg).
            dec_data: [ndarray] Declination of each point (deg).
            magnitude_data: [ndarray] Array of meteor's lightcurve apparent magnitudes.

    """


    # Convert time to Julian date
    JD_data = np.array([date2JD(*time_data_entry) for time_data_entry in time_data], dtype=np.float64)

    # Convert x,y to RA/Dec using a fast cython function
    RA_data, dec_data = cyXYToRADec(JD_data, np.array(X_data, dtype=np.float64), np.array(Y_data, \
        dtype=np.float64), float(lat), float(lon), float(Ho), float(X_res), float(Y_res), float(RA_d), \
        float(dec_d), float(pos_angle_ref), float(F_scale), x_poly_fwd, y_poly_fwd)

    # Compute radiia from image centre
    radius_arr = np.hypot(np.array(X_data) - X_res/2, np.array(Y_data) - Y_res/2)

    # Calculate magnitudes
    magnitude_data = calculateMagnitudes(level_data, radius_arr, mag_lev, vignetting_coeff)

    # CURRENTLY DISABLED!
    # Compute the apparent magnitudes corrected to relative atmospheric extinction
    # magnitude_data -= atmosphericExtinctionCorrection(alt_data, station_ht) \
    #   - atmosphericExtinctionCorrection(90, station_ht)

    
    return JD_data, RA_data, dec_data, magnitude_data
Exemplo n.º 3
0
def xyToRaDecPP(time_data,
                X_data,
                Y_data,
                level_data,
                platepar,
                extinction_correction=True):
    """ Converts image XY to RA,Dec, but it takes a platepar instead of individual parameters. 
    
    Arguments:
        time_data: [2D ndarray] Numpy array containing time tuples of each data point (year, month, day, 
            hour, minute, second, millisecond).
        X_data: [ndarray] 1D numpy array containing the image X component.
        Y_data: [ndarray] 1D numpy array containing the image Y component.
        level_data: [ndarray] Levels of the meteor centroid.
        platepar: [Platepar structure] Astrometry parameters.

    Keyword arguments:
        extinction_correction: [bool] Apply extinction correction. True by default. False is set to prevent 
            infinite recursion in extinctionCorrectionApparentToTrue when set to True.


    Return:
        (JD_data, RA_data, dec_data, magnitude_data): [tuple of ndarrays]
            JD_data: [ndarray] Julian date of each data point.
            RA_data: [ndarray] Right ascension of each point (deg).
            dec_data: [ndarray] Declination of each point (deg).
            magnitude_data: [ndarray] Array of meteor's lightcurve apparent magnitudes.
    """

    # Convert time to Julian date
    JD_data = np.array(
        [date2JD(*time_data_entry) for time_data_entry in time_data],
        dtype=np.float64)

    # Convert x,y to RA/Dec using a fast cython function
    RA_data, dec_data = cyXYToRADec(JD_data, np.array(X_data, dtype=np.float64), \
        np.array(Y_data, dtype=np.float64), float(platepar.lat), float(platepar.lon), float(platepar.X_res), \
        float(platepar.Y_res), float(platepar.Ho), float(platepar.RA_d), float(platepar.dec_d), \
        float(platepar.pos_angle_ref), float(platepar.F_scale), platepar.x_poly_fwd, platepar.y_poly_fwd, \
        unicode(platepar.distortion_type), refraction=platepar.refraction, \
        equal_aspect=platepar.equal_aspect, force_distortion_centre=platepar.force_distortion_centre)

    # Compute radiia from image centre
    radius_arr = np.hypot(
        np.array(X_data) - platepar.X_res / 2,
        np.array(Y_data) - platepar.Y_res / 2)

    # Calculate magnitudes
    magnitude_data = calculateMagnitudes(level_data, radius_arr,
                                         platepar.mag_lev,
                                         platepar.vignetting_coeff)

    # Extinction correction
    if extinction_correction:
        magnitude_data = extinctionCorrectionApparentToTrue(magnitude_data, X_data, Y_data, JD_data[0], \
            platepar)

    return JD_data, RA_data, dec_data, magnitude_data
Exemplo n.º 4
0
Arquivo: AST.py Projeto: tammojan/RMS
def xyToRaDecAST(time_data, X_data, Y_data, level_data, ast, photom_offset):
    """ Converts image XY to RA,Dec, but it takes a platepar instead of individual parameters. 
    
    Arguments:
        time_data: [2D ndarray] Numpy array containing time tuples of each data point (year, month, day, 
            hour, minute, second, millisecond).
        X_data: [ndarray] 1D numpy array containing the image X component.
        Y_data: [ndarray] 1D numpy array containing the image Y component.
        level_data: [ndarray] Levels of the meteor centroid.
        ast: [AstPlate object] AST plate structure.
        photom_offset: [float] Photometric offset used to compute the magnitude.


    Return:
        (JD_data, RA_data, dec_data, magnitude_data): [tuple of ndarrays]
            JD_data: [ndarray] Julian date of each data point.
            RA_data: [ndarray] Right ascension of each point (deg).
            dec_data: [ndarray] Declination of each point (deg).
            magnitude_data: [ndarray] Array of meteor's lightcurve apparent magnitudes.
    """

    X_data = np.array(X_data)
    Y_data = np.array(Y_data)

    # Compute theta and phi from X, Y
    theta_data, phi_data = plateASTMap(ast, X_data, Y_data)

    # Compute altitude and azimuth in degrees
    azimuth_data = np.degrees(np.pi / 2 - phi_data)
    altitude_data = np.degrees(np.pi / 2 - theta_data)

    # Convert azimuth (+E of due N) and altitude to RA and Dec
    JD_data = [date2JD(*t) for t in time_data]
    RA_data, dec_data = altAz2RADec(azimuth_data, altitude_data, JD_data, np.degrees(ast.lat), \
        np.degrees(ast.lon))

    # Calculate magnitudes (ignore vignetting)
    magnitude_data = calculateMagnitudes(level_data, np.zeros_like(level_data),
                                         photom_offset, 0.0)

    return JD_data, RA_data, dec_data, magnitude_data
Exemplo n.º 5
0
def starListToDict(config, calstars_list, max_ffs=None):
    """ Converts the list of calstars into dictionary where the keys are FF file JD and the values is
        a list of (X, Y, bg_intens, intens) of stars. 

    """

    # Convert the list to a dictionary
    calstars = {ff_file: star_data for ff_file, star_data in calstars_list}

    # Dictionary which will contain the JD, and a list of (X, Y, bg_intens, intens) of the stars
    star_dict = {}

    # Take only those files with enough stars on them
    for ff_name in calstars:

        stars_list = calstars[ff_name]

        # Check if there are enough stars on the image
        if len(stars_list) >= config.ff_min_stars:

            # Calculate the JD time of the FF file
            dt = FFfile.getMiddleTimeFF(ff_name,
                                        config.fps,
                                        ret_milliseconds=True)
            jd = date2JD(*dt)

            # Add the time and the stars to the dict
            star_dict[jd] = stars_list

    if max_ffs is not None:

        # Limit the number of FF files used
        if len(star_dict) > max_ffs:

            # Randomly choose calstars_files_N image files from the whole list
            rand_keys = random.sample(list(star_dict), max_ffs)
            star_dict = {key: star_dict[key] for key in rand_keys}

    return star_dict
Exemplo n.º 6
0
    def read(self, file_name, fmt=None):
        """ Read the platepar. 
            
        Arguments:
            file_name: [str] Path and the name of the platepar to read.

        Keyword arguments:
            fmt: [str] Format of the platepar file. 'json' for JSON format and 'txt' for the usual CMN textual
                format.

        Return:
            fmt: [str]
        """


        # Check if platepar exists
        if not os.path.isfile(file_name):
            return False


        # Determine the type of the platepar if it is not given
        if fmt is None:

            with open(file_name) as f:
                data = " ".join(f.readlines())

                # Try parsing the file as JSON
                try:
                    json.loads(data)
                    fmt = 'json'

                except:
                    fmt = 'txt'


        # Load the file as JSON
        if fmt == 'json':

            # Load the JSON file
            with open(file_name) as f:
                data = " ".join(f.readlines())

            # Load the platepar from the JSON dictionary
            self.loadFromDict(json.loads(data))




        # Load the file as TXT
        else:

            with open(file_name) as f:

                self.UT_corr = 0
                self.gamma = 1.0
                self.star_list = []

                # Parse latitude, longitude, elevation
                self.lon, self.lat, self.elev = self.parseLine(f)

                # Parse date and time as int
                D, M, Y, h, m, s = map(int, f.readline().split())

                # Calculate the datetime of the platepar time
                self.time = datetime.datetime(Y, M, D, h, m, s)

                # Convert time to JD
                self.JD = date2JD(Y, M, D, h, m, s)

                # Calculate the reference hour angle
                T = (self.JD - 2451545.0)/36525.0
                self.Ho = (280.46061837 + 360.98564736629*(self.JD - 2451545.0) + 0.000387933*T**2 - 
                    T**3/38710000.0)%360

                # Parse camera parameters
                self.X_res, self.Y_res, self.focal_length = self.parseLine(f)

                # Parse the right ascension of the image centre
                self.RA_d, self.RA_H, self.RA_M, self.RA_S = self.parseLine(f)

                # Parse the declination of the image centre
                self.dec_d, self.dec_D, self.dec_M, self.dec_S = self.parseLine(f)

                # Parse the rotation parameter
                self.pos_angle_ref = self.parseLine(f)[0]

                # Parse the image scale (convert from arcsec/px to px/deg)
                self.F_scale = self.parseLine(f)[0]
                self.F_scale = 3600/self.F_scale

                # Load magnitude slope parameters
                self.mag_0, self.mag_lev = self.parseLine(f)

                # Load X axis polynomial parameters
                self.x_poly_fwd = self.x_poly_rev = np.zeros(shape=(12,), dtype=np.float64)
                for i in range(12):
                    self.x_poly_fwd[i] = self.x_poly_fwd[i] = self.parseLine(f)[0]

                # Load Y axis polynomial parameters
                self.y_poly_fwd = self.y_poly_rev = np.zeros(shape=(12,), dtype=np.float64)
                for i in range(12):
                    self.y_poly_fwd[i] = self.y_poly_rev[i] = self.parseLine(f)[0]

                # Read station code
                self.station_code = f.readline().replace('\r', '').replace('\n', '')


        return fmt
Exemplo n.º 7
0
    def _handleFailure(config, platepar, calstars_list, catalog_stars,
                       _fft_refinement):
        """ Run FFT alignment before giving up on ACF. """

        if not _fft_refinement:

            print()
            print(
                "-------------------------------------------------------------------------------"
            )
            print(
                'The initial platepar is bad, trying to refine it using FFT phase correlation...'
            )
            print()

            # Prepare data for FFT image registration

            calstars_dict = {
                ff_file: star_data
                for ff_file, star_data in calstars_list
            }

            # Extract star list from CALSTARS file from FF file with most stars
            max_len_ff = max(calstars_dict,
                             key=lambda k: len(calstars_dict[k]))

            # Take only X, Y (change order so X is first)
            calstars_coords = np.array(calstars_dict[max_len_ff])[:, :2]
            calstars_coords[:, [0, 1]] = calstars_coords[:, [1, 0]]

            # Get the time of the FF file
            calstars_time = FFfile.getMiddleTimeFF(max_len_ff,
                                                   config.fps,
                                                   ret_milliseconds=True)

            # Try aligning the platepar using FFT image registration
            platepar_refined = alignPlatepar(config, platepar, calstars_time,
                                             calstars_coords)

            print()

            ### If there are still not enough stars matched, try FFT again ###
            min_radius = 10

            # Prepare star dictionary to check the match
            dt = FFfile.getMiddleTimeFF(max_len_ff,
                                        config.fps,
                                        ret_milliseconds=True)
            jd = date2JD(*dt)
            star_dict_temp = {}
            star_dict_temp[jd] = calstars_dict[max_len_ff]

            # Check the number of matched stars
            n_matched, _, _, _ = matchStarsResiduals(config, platepar_refined, catalog_stars, \
                star_dict_temp, min_radius, ret_nmatch=True, verbose=True)

            # Realign again if necessary
            if n_matched < config.min_matched_stars:
                print()
                print(
                    "-------------------------------------------------------------------------------"
                )
                print(
                    'Doing a second FFT pass as the number of matched stars was too small...'
                )
                print()
                platepar_refined = alignPlatepar(config, platepar_refined,
                                                 calstars_time,
                                                 calstars_coords)
                print()

            ### ###

            # Redo autoCF
            return autoCheckFit(config,
                                platepar_refined,
                                calstars_list,
                                _fft_refinement=True)

        else:
            print(
                'Auto Check Fit failed completely, please redo the plate manually!'
            )
            return platepar, False
Exemplo n.º 8
0
def autoCheckFit(config, platepar, calstars_list, distorsion_refinement=False, _fft_refinement=False):
    """ Attempts to refine the astrometry fit with the given stars and and initial astrometry parameters.

    Arguments:
        config: [Config structure]
        platepar: [Platepar structure] Initial astrometry parameters.
        calstars_list: [list] A list containing stars extracted from FF files. See RMS.Formats.CALSTARS for
            more details.

    Keyword arguments:
        distorsion_refinement: [bool] Whether the distorsion should be fitted as well. False by default.
        _fft_refinement: [bool] Internal flag indicating that autoCF is running the second time recursively
            after FFT platepar adjustment.
    
    Return:
        (platepar, fit_status):
            platepar: [Platepar structure] Estimated/refined platepar.
            fit_status: [bool] True if fit was successfuly, False if not.
    """


    def _handleFailure(config, platepar, calstars_list, distorsion_refinement, _fft_refinement):
        """ Run FFT alignment before giving up on ACF. """

        if not _fft_refinement:

            print('The initial platepar is bad, trying to refine it using FFT phase correlation...')

            # Prepare data for FFT image registration

            calstars_dict = {ff_file: star_data for ff_file, star_data in calstars_list}

            # Extract star list from CALSTARS file from FF file with most stars
            max_len_ff = max(calstars_dict, key=lambda k: len(calstars_dict[k]))
            
            # Take only X, Y (change order so X is first)
            calstars_coords = np.array(calstars_dict[max_len_ff])[:, :2]
            calstars_coords[:, [0, 1]] = calstars_coords[:, [1, 0]]
                
            # Get the time of the FF file
            calstars_time = FFfile.getMiddleTimeFF(max_len_ff, config.fps, ret_milliseconds=True)

            # Try aligning the platepar using FFT image registration
            platepar_refined = alignPlatepar(config, platepar, calstars_time, calstars_coords)

            # Redo autoCF
            return autoCheckFit(config, platepar_refined, calstars_list, \
                distorsion_refinement=distorsion_refinement, _fft_refinement=True)

        else:
            print('Auto Check Fit failed completely, please redo the plate manually!')
            return platepar, False


    if _fft_refinement:
        print('Second ACF run with an updated platepar via FFT phase correlation...')


    # Convert the list to a dictionary
    calstars = {ff_file: star_data for ff_file, star_data in calstars_list}

    # Load catalog stars (overwrite the mag band ratios if specific catalog is used)
    catalog_stars, _, config.star_catalog_band_ratios = StarCatalog.readStarCatalog(config.star_catalog_path, \
        config.star_catalog_file, lim_mag=config.catalog_mag_limit, \
        mag_band_ratios=config.star_catalog_band_ratios)


    # Dictionary which will contain the JD, and a list of (X, Y, bg_intens, intens) of the stars
    star_dict = {}

    # Take only those files with enough stars on them
    for ff_name in calstars:

        stars_list = calstars[ff_name]

        # Check if there are enough stars on the image
        if len(stars_list) >= config.ff_min_stars:
            
            # Calculate the JD time of the FF file
            dt = FFfile.getMiddleTimeFF(ff_name, config.fps, ret_milliseconds=True)
            jd = date2JD(*dt)

            # Add the time and the stars to the dict
            star_dict[jd] = stars_list


    # There has to be a minimum of 200 FF files for star fitting, and only 100 will be subset if there are more
    if len(star_dict) < config.calstars_files_N:
        print('Not enough FF files in CALSTARS for ACF!')
        return platepar, False

    else:

        # Randomly choose calstars_files_N image files from the whole list
        rand_keys = random.sample(list(star_dict), config.calstars_files_N)
        star_dict = {key: star_dict[key] for key in rand_keys}


    # Calculate the total number of calibration stars used
    total_calstars = sum([len(star_dict[key]) for key in star_dict])
    print('Total calstars:', total_calstars)

    if total_calstars < config.calstars_min_stars:
        print('Not enough calibration stars, need at least', config.calstars_min_stars)
        return platepar, False


    # A list of matching radiuses to try, pairs of [radius, fit_distorsion_flag]
    #   The distorsion will be fitted only if explicity requested
    min_radius = 0.5
    radius_list = [[10, False], 
                    [5, False], 
                    [3, False],
                    [1.5, True and distorsion_refinement], 
                    [min_radius, True and distorsion_refinement]]


    # Calculate the function tolerance, so the desired precision can be reached (the number is calculated
    # in the same regard as the cost function)
    fatol, xatol_ang = computeMinimizationTolerances(config, platepar, len(star_dict))


    ### If the initial match is good enough, do only quick recalibratoin ###
     
    # Match the stars and calculate the residuals
    n_matched, avg_dist, cost, _ = matchStarsResiduals(config, platepar, catalog_stars, star_dict, \
        min_radius, ret_nmatch=True)

    if n_matched >= config.calstars_files_N:

        # Check if the average distance with the tightest radius is close
        if avg_dist < config.dist_check_quick_threshold:

            # Use a reduced set of initial radius values
            radius_list = [[1.5, True and distorsion_refinement], 
                           [min_radius, True and distorsion_refinement]]

    ##########


    # Match increasingly smaller search radiia around image stars
    for i, (match_radius, fit_distorsion) in enumerate(radius_list):

        # Match the stars and calculate the residuals
        n_matched, avg_dist, cost, _ = matchStarsResiduals(config, platepar, catalog_stars, star_dict, \
            match_radius, ret_nmatch=True)

        print('Max radius:', match_radius)
        print('Initial values:')
        print(' Matched stars:', n_matched)
        print(' Average deviation:', avg_dist)


        # The initial number of matched stars has to be at least the number of FF imaages, otherwise it means
        #   that the initial platepar is no good
        if n_matched < config.calstars_files_N:
            print('The total number of initially matched stars is too small! Please manually redo the plate or make sure there are enough calibration stars.')
            
            # Try to refine the platepar with FFT phase correlation and redo the ACF
            return _handleFailure(config, platepar, calstars_list, distorsion_refinement, _fft_refinement)


        # Check if the platepar is good enough and do not estimate further parameters
        if checkFitGoodness(config, platepar, catalog_stars, star_dict, min_radius, verbose=True):

            # Print out notice only if the platepar is good right away
            if i == 0:
                print("Initial platepar is good enough!")

            return platepar, True


        # Initial parameters for the astrometric fit (don't fit the scale if the distorsion is not being fit)
        if fit_distorsion:
            p0 = [platepar.RA_d, platepar.dec_d, platepar.pos_angle_ref, platepar.F_scale]
        else:
            p0 = [platepar.RA_d, platepar.dec_d, platepar.pos_angle_ref]

        # Fit the astrometric parameters
        res = scipy.optimize.minimize(_calcImageResidualsAstro, p0, args=(config, platepar, catalog_stars, \
            star_dict, match_radius, fit_distorsion), method='Nelder-Mead', \
            options={'fatol': fatol, 'xatol': xatol_ang})

        print(res)

        # If the fit was not successful, stop further fitting
        if not res.success:

            # Try to refine the platepar with FFT phase correlation and redo the ACF
            return _handleFailure(config, platepar, calstars_list, distorsion_refinement, _fft_refinement)


        else:
            # If the fit was successful, use the new parameters from now on
            if fit_distorsion:
                ra_ref, dec_ref, pos_angle_ref, F_scale = res.x

            else:
                ra_ref, dec_ref, pos_angle_ref = res.x
                F_scale = platepar.F_scale

            platepar.RA_d = ra_ref
            platepar.dec_d = dec_ref
            platepar.pos_angle_ref = pos_angle_ref
            platepar.F_scale = F_scale


        
        # Check if the platepar is good enough and do not estimate further parameters
        if checkFitGoodness(config, platepar, catalog_stars, star_dict, min_radius, verbose=True):
            return platepar, True


        # Fit the lens distorsion parameters
        if fit_distorsion:


            ### REVERSE DISTORSION POLYNOMIALS FIT ###

            # Fit the distortion parameters (X axis)
            res = scipy.optimize.minimize(_calcImageResidualsDistorsion, platepar.x_poly_rev, args=(config, \
                platepar, catalog_stars, star_dict, match_radius, 'x'), method='Nelder-Mead', \
                options={'fatol': fatol, 'xatol': 0.1})

            print(res)

            # If the fit was not successfull, stop further fitting
            if not res.success:
                # Try to refine the platepar with FFT phase correlation and redo the ACF
                return _handleFailure(config, platepar, calstars_list, distorsion_refinement, _fft_refinement)

            else:
                platepar.x_poly_rev = res.x


            # Fit the distortion parameters (Y axis)
            res = scipy.optimize.minimize(_calcImageResidualsDistorsion, platepar.y_poly_rev, args=(config, \
                platepar,catalog_stars, star_dict, match_radius, 'y'), method='Nelder-Mead', \
                options={'fatol': fatol, 'xatol': 0.1})

            print(res)

            # If the fit was not successfull, stop further fitting
            if not res.success:
                
                # Try to refine the platepar with FFT phase correlation and redo the ACF
                return _handleFailure(config, platepar, calstars_list, distorsion_refinement, _fft_refinement)

            else:
                platepar.y_poly_rev = res.x


            ### ###


            ### FORWARD DISTORSION POLYNOMIALS FIT ###

            # Fit the distortion parameters (X axis)
            res = scipy.optimize.minimize(_calcSkyResidualsDistorsion, platepar.x_poly_fwd, args=(config, \
                platepar, catalog_stars, star_dict, match_radius, 'x'), method='Nelder-Mead', \
                options={'fatol': fatol, 'xatol': 0.1})

            print(res)

            # If the fit was not successfull, stop further fitting
            if not res.success:
                
                # Try to refine the platepar with FFT phase correlation and redo the ACF
                return _handleFailure(config, platepar, calstars_list, distorsion_refinement, _fft_refinement)

            else:
                platepar.x_poly_fwd = res.x


            # Fit the distortion parameters (Y axis)
            res = scipy.optimize.minimize(_calcSkyResidualsDistorsion, platepar.y_poly_fwd, args=(config, \
                platepar,catalog_stars, star_dict, match_radius, 'y'), method='Nelder-Mead', \
                options={'fatol': fatol, 'xatol': 0.1})

            print(res)

            # If the fit was not successfull, stop further fitting
            if not res.success:
                return platepar, False

            else:
                platepar.y_poly_fwd = res.x

            ### ###



    # Match the stars and calculate the residuals
    n_matched, avg_dist, cost, matched_stars = matchStarsResiduals(config, platepar, catalog_stars, \
        star_dict, min_radius, ret_nmatch=True)

    print('FINAL SOLUTION with {:f} px:'.format(min_radius))
    print('Matched stars:', n_matched)
    print('Average deviation:', avg_dist)


    # Mark the platepar to indicate that it was automatically refined with CheckFit
    platepar.auto_check_fit_refined = True

    return platepar, True
Exemplo n.º 9
0
def makeFlat(dir_path, config):
    """ Makes a flat field from the files in the given folder. CALSTARS file is needed to estimate the
        quality of every image by counting the number of detected stars.

    Arguments:
        dir_path: [str] Path to the directory which contains the FF files and a CALSTARS file.
        config: [config object]

    Return:
        [2d ndarray] Flat field image as a numpy array. If the flat generation failed, None will be returned.
        
    """

    # Find the CALSTARS file in the given folder
    calstars_file = None
    for calstars_file in os.listdir(dir_path):
        if ('CALSTARS' in calstars_file) and ('.txt' in calstars_file):
            break

    if calstars_file is None:
        print('CALSTARS file could not be found in the given directory!')
        return None

    # Load the calstars file
    calstars_list = CALSTARS.readCALSTARS(dir_path, calstars_file)

    # Convert the list to a dictionary
    calstars = {ff_file: star_data for ff_file, star_data in calstars_list}

    print('CALSTARS file: ' + calstars_file + ' loaded!')

    # A list of FF files which have any stars on them
    calstars_ff_files = [line[0] for line in calstars_list]

    ff_list = []

    # Get a list of FF files in the folder
    for file_name in os.listdir(dir_path):
        if validFFName(file_name) and (file_name in calstars_ff_files):
            ff_list.append(file_name)

    # Check that there are any FF files in the folder
    if not ff_list:
        print('No FF files in the selected folder!')
        return None

    ff_list_good = []
    ff_times = []

    # Take only those FF files with enough stars on them
    for ff_name in ff_list:

        if not validFFName(ff_name):
            continue

        if ff_name in calstars:

            # Get the number of stars detected on the FF image
            ff_nstars = len(calstars[ff_name])

            # Check if the number of stars on the image is over the detection threshold
            if ff_nstars > config.ff_min_stars:

                # Add the FF file to the list of FF files to be used to make a flat
                ff_list_good.append(ff_name)

                # Calculate the time of the FF files
                ff_time = date2JD(*getMiddleTimeFF(
                    ff_name, config.fps, ret_milliseconds=True))
                ff_times.append(ff_time)

    # Check that there are enough good FF files in the folder
    if len(ff_times) < config.flat_min_imgs:
        print('Not enough FF files have enough stars on them!')
        return None

    # Make sure the files cover at least 2 hours
    if not (max(ff_times) - min(ff_times)) * 24 > 2:
        print('Good FF files cover less than 2 hours!')
        return None

    # Sample FF files if there are more than 200
    max_ff_flat = 200
    if len(ff_list_good) > max_ff_flat:
        ff_list_good = sorted(random.sample(ff_list_good, max_ff_flat))

    print('Using {:d} files for flat...'.format(len(ff_list_good)))

    c = 0
    ff_avg_list = []
    median_list = []

    # Median combine all good FF files
    for i in range(len(ff_list_good)):

        # Load 10 files at the time and median combine them, which conserves memory
        if c < 10:

            ff = readFF(dir_path, ff_list_good[i])
            ff_avg_list.append(ff.avepixel)

            c += 1

        else:

            ff_avg_list = np.array(ff_avg_list)

            # Median combine the loaded 10 (or less) images
            ff_median = np.median(ff_avg_list, axis=0)
            median_list.append(ff_median)

            ff_avg_list = []
            c = 0

    # If there are more than 1 calculated median image, combine them
    if len(median_list) > 1:

        # Median combine all median images
        median_list = np.array(median_list)
        ff_median = np.median(median_list, axis=0)

    else:
        ff_median = median_list[0]

    # Stretch flat to 0-255
    ff_median = ff_median / np.max(ff_median) * 255

    # Convert the flat to 8 bits
    ff_median = ff_median.astype(np.uint8)

    return ff_median
Exemplo n.º 10
0
def alignPlatepar(config, platepar, calstars_time, calstars_coords, scale_update=False, show_plot=False):
    """ Align the platepar using FFT registration between catalog stars and the given list of image stars.

    Arguments:
        config:
        platepar: [Platepar instance] Initial platepar.
        calstars_time: [list] A list of (year, month, day, hour, minute, second, millisecond) of the middle of
            the FF file used for alignment.
        calstars_coords: [ndarray] A 2D numpy array of (x, y) coordinates of image stars.

    Keyword arguments:
        scale_update: [bool] Update the platepar scale. False by default.
        show_plot: [bool] Show the comparison between the reference and image synthetic images.

    Return:
        platepar_aligned: [Platepar instance] The aligned platepar.
    """


    # Try to optimize the catalog limiting magnitude until the number of image and catalog stars are matched
    maxiter = 10
    search_fainter = True
    mag_step = 0.2
    for inum in range(maxiter):

        # Load the catalog stars
        catalog_stars, _, _ = StarCatalog.readStarCatalog(config.star_catalog_path, config.star_catalog_file, \
            lim_mag=config.catalog_mag_limit, mag_band_ratios=config.star_catalog_band_ratios)

        # Get the RA/Dec of the image centre
        _, ra_centre, dec_centre, _ = ApplyAstrometry.xyToRaDecPP([calstars_time], [platepar.X_res/2], \
                [platepar.Y_res/2], [1], platepar)

        ra_centre = ra_centre[0]
        dec_centre = dec_centre[0]

        # Calculate the FOV radius in degrees
        fov_y, fov_x = ApplyAstrometry.computeFOVSize(platepar)
        fov_radius = np.sqrt(fov_x**2 + fov_y**2)

        # Take only those stars which are inside the FOV
        filtered_indices, _ = subsetCatalog(catalog_stars, ra_centre, dec_centre, \
            fov_radius, config.catalog_mag_limit)

        # Take those catalog stars which should be inside the FOV
        ra_catalog, dec_catalog, _ = catalog_stars[filtered_indices].T
        jd = date2JD(*calstars_time)
        catalog_xy = ApplyAstrometry.raDecToXYPP(ra_catalog, dec_catalog, jd, platepar)

        catalog_x, catalog_y = catalog_xy
        catalog_xy = np.c_[catalog_x, catalog_y]

        # Cut all stars that are outside image coordinates
        catalog_xy = catalog_xy[catalog_xy[:, 0] > 0]
        catalog_xy = catalog_xy[catalog_xy[:, 0] < config.width]
        catalog_xy = catalog_xy[catalog_xy[:, 1] > 0]
        catalog_xy = catalog_xy[catalog_xy[:, 1] < config.height]


        # If there are more catalog than image stars, this means that the limiting magnitude is too faint
        #   and that the search should go in the brighter direction
        if len(catalog_xy) > len(calstars_coords):
            search_fainter = False
        else:
            search_fainter = True

        # print('Catalog stars:', len(catalog_xy), 'Image stars:', len(calstars_coords), \
        #     'Limiting magnitude:', config.catalog_mag_limit)

        # Search in mag_step magnitude steps
        if search_fainter:
            config.catalog_mag_limit += mag_step
        else:
            config.catalog_mag_limit -= mag_step

    print('Final catalog limiting magnitude:', config.catalog_mag_limit)


    # Find the transform between the image coordinates and predicted platepar coordinates
    res = findStarsTransform(config, calstars_coords, catalog_xy, show_plot=show_plot)
    angle, scale, translation_x, translation_y = res


    ### Update the platepar ###

    platepar_aligned = copy.deepcopy(platepar)

    # Correct the rotation
    platepar_aligned.pos_angle_ref = (platepar_aligned.pos_angle_ref - angle)%360

    # Update the scale if needed
    if scale_update:
        platepar_aligned.F_scale *= scale

    # Compute the new reference RA and Dec
    # _, ra_centre_new, dec_centre_new, _ = ApplyAstrometry.xyToRaDecPP([jd2Date(platepar.JD)], \
    #     [platepar.X_res/2 - translation_x], [platepar.Y_res/2 - translation_y], [1], platepar)
    _, ra_centre_new, dec_centre_new, _ = ApplyAstrometry.xyToRaDecPP([jd2Date(platepar.JD)], \
        [platepar.X_res/2 - platepar.x_poly_fwd[0] - translation_x], \
        [platepar.Y_res/2 - platepar.y_poly_fwd[0] - translation_y], [1], platepar)

    # Correct RA/Dec
    platepar_aligned.RA_d = ra_centre_new[0]
    platepar_aligned.dec_d = dec_centre_new[0]

    # # Update the reference time and hour angle
    # platepar_aligned.JD = jd
    # platepar_aligned.Ho = JD2HourAngle(jd)

    # Recompute the FOV centre in Alt/Az and update the rotation
    platepar_aligned.az_centre, platepar_aligned.alt_centre = ApplyAstrometry.raDec2AltAz(platepar.JD, \
                platepar.lon, platepar.lat, platepar.RA_d, platepar.dec_d)
    platepar_aligned.rotation_from_horiz = ApplyAstrometry.rotationWrtHorizon(platepar_aligned)

    # Indicate that the platepar has been automatically updated
    platepar_aligned.auto_check_fit_refined = True

    ###

    return platepar_aligned
Exemplo n.º 11
0
def makeFlat(dir_path, config, nostars=False, use_images=False):
    """ Makes a flat field from the files in the given folder. CALSTARS file is needed to estimate the
        quality of every image by counting the number of detected stars.

    Arguments:
        dir_path: [str] Path to the directory which contains the FF files and a CALSTARS file.
        config: [config object]

    Keyword arguments:
        nostars: [bool] If True, all files will be taken regardless of if they have stars on them or not.
        use_images: [bool] Use image files instead of FF files. False by default.

    Return:
        [2d ndarray] Flat field image as a numpy array. If the flat generation failed, None will be returned.
        
    """

    # If only images are used, then don't look for a CALSTARS file
    if use_images:
        nostars = True

    # Load the calstars file if it should be used
    if not nostars:

        # Find the CALSTARS file in the given folder
        calstars_file = None
        for calstars_file in os.listdir(dir_path):
            if ('CALSTARS' in calstars_file) and ('.txt' in calstars_file):
                break

        if calstars_file is None:
            print('CALSTARS file could not be found in the given directory!')
            return None

        # Load the calstars file
        calstars_list = CALSTARS.readCALSTARS(dir_path, calstars_file)

        # Convert the list to a dictionary
        calstars = {ff_file: star_data for ff_file, star_data in calstars_list}

        print('CALSTARS file: ' + calstars_file + ' loaded!')

        # A list of FF files which have any stars on them
        calstars_ff_files = [line[0] for line in calstars_list]

    else:
        calstars = {}



    # Use image files
    if use_images:

        # Find the file type with the highest file frequency in the given folder
        file_extensions = []
        for file_name in os.listdir(dir_path):
            file_ext = file_name.split('.')[-1]
            if file_ext.lower() in ['jpg', 'png', 'bmp']:
                file_extensions.append(file_ext)
            
        # Get only the most frequent file type
        file_freqs = np.unique(file_extensions, return_counts=True)
        most_freq_type = file_freqs[0][0]

        print('Using image type:', most_freq_type)

        # Take only files of that file type
        ff_list = [file_name for file_name in sorted(os.listdir(dir_path)) \
            if file_name.lower().endswith(most_freq_type)]


    # Use FF files
    else:
        ff_list = []

        # Get a list of FF files in the folder
        for file_name in os.listdir(dir_path):
            if validFFName(file_name) and ((file_name in calstars_ff_files) or nostars):
                ff_list.append(file_name)
                

        # Check that there are any FF files in the folder
        if not ff_list:
            print('No valid FF files in the selected folder!')
            return None



    ff_list_good = []
    ff_times = []

    # Take only those FF files with enough stars on them
    for ff_name in ff_list:

        if (ff_name in calstars) or nostars:

            # Disable requiring minimum number of stars if specified
            if not nostars:
                
                # Get the number of stars detected on the FF image
                ff_nstars = len(calstars[ff_name])

            else:
                ff_nstars = 0

            
            # Check if the number of stars on the image is over the detection threshold
            if (ff_nstars > config.ff_min_stars) or nostars:

                # Add the FF file to the list of FF files to be used to make a flat
                ff_list_good.append(ff_name)


                # If images are used, don't compute the time
                if use_images:
                    ff_time = 0

                else:
                    # Calculate the time of the FF files
                    ff_time = date2JD(*getMiddleTimeFF(ff_name, config.fps, ret_milliseconds=True))


                ff_times.append(ff_time)


    # Check that there are enough good FF files in the folder
    if (len(ff_times) < config.flat_min_imgs) and (not nostars):
        print('Not enough FF files have enough stars on them!')
        return None
        
    
    # Make sure the files cover at least 2 hours
    if (not (max(ff_times) - min(ff_times))*24 > 2) and (not nostars):
        print('Good FF files cover less than 2 hours!')
        return None


    # Sample FF files if there are more than 200
    max_ff_flat = 200
    if len(ff_list_good) > max_ff_flat:
        ff_list_good = sorted(random.sample(ff_list_good, max_ff_flat))


    print('Using {:d} files for flat...'.format(len(ff_list_good)))


    c = 0
    img_list = []
    median_list = []

    # Median combine all good FF files
    for i in range(len(ff_list_good)):

        # Load 10 files at the time and median combine them, which conserves memory
        if c < 10:

            # Use images
            if use_images:
                img = scipy.ndimage.imread(os.path.join(dir_path, ff_list_good[i]), -1)


            # Use FF files
            else:
                ff = readFF(dir_path, ff_list_good[i])

                # Skip the file if it is corruped
                if ff is None:
                    continue

                img = ff.avepixel

            
            img_list.append(img)

            c += 1


        else:

            img_list = np.array(img_list)

            # Median combine the loaded 10 (or less) images
            ff_median = np.median(img_list, axis=0)
            median_list.append(ff_median)

            img_list = []
            c = 0


    # If there are more than 1 calculated median image, combine them
    if len(median_list) > 1:

        # Median combine all median images
        median_list = np.array(median_list)
        ff_median = np.median(median_list, axis=0)

    else:
        if len(median_list) > 0:
            ff_median = median_list[0]
        else:
            ff_median = np.median(np.array(img_list), axis=0)


    # Stretch flat to 0-255
    ff_median = ff_median/np.max(ff_median)*255

    # Convert the flat to 8 bits
    ff_median = ff_median.astype(np.uint8)

    return ff_median
Exemplo n.º 12
0
def xyToRaDecPP(time_data, X_data, Y_data, level_data, platepar, extinction_correction=True, \
    measurement=False):
    """ Converts image XY to RA,Dec, but it takes a platepar instead of individual parameters. 

    Arguments:
        time_data: [2D ndarray] Numpy array containing time tuples of each data point (year, month, day,
            hour, minute, second, millisecond).
        X_data: [ndarray] 1D numpy array containing the image X component.
        Y_data: [ndarray] 1D numpy array containing the image Y component.
        level_data: [ndarray] Levels of the meteor centroid.
        platepar: [Platepar structure] Astrometry parameters.

    Keyword arguments:
        extinction_correction: [bool] Apply extinction correction. True by default. False is set to prevent 
            infinite recursion in extinctionCorrectionApparentToTrue when set to True.
        measurement: [bool] Indicates if the given images values are image measurements. Used for correcting
            celestial coordinates for refraction if the refraction was not taken into account during
            plate fitting.

    Return:
        (JD_data, RA_data, dec_data, magnitude_data): [tuple of ndarrays]
            JD_data: [ndarray] Julian date of each data point.
            RA_data: [ndarray] Right ascension of each point (deg).
            dec_data: [ndarray] Declination of each point (deg).
            magnitude_data: [ndarray] Array of meteor's lightcurve apparent magnitudes.
    """

    # Convert time to Julian date
    JD_data = np.array(
        [date2JD(*time_data_entry) for time_data_entry in time_data],
        dtype=np.float64)

    # Convert x,y to RA/Dec using a fast cython function
    RA_data, dec_data = cyXYToRADec(JD_data, np.array(X_data, dtype=np.float64), \
        np.array(Y_data, dtype=np.float64), float(platepar.lat), float(platepar.lon), float(platepar.X_res), \
        float(platepar.Y_res), float(platepar.Ho), float(platepar.RA_d), float(platepar.dec_d), \
        float(platepar.pos_angle_ref), float(platepar.F_scale), platepar.x_poly_fwd, platepar.y_poly_fwd, \
        unicode(platepar.distortion_type), refraction=platepar.refraction, \
        equal_aspect=platepar.equal_aspect, force_distortion_centre=platepar.force_distortion_centre, \
        asymmetry_corr=platepar.asymmetry_corr)

    # Correct the coordinates for refraction if it wasn't taken into account during the astrometry calibration
    #   procedure
    if (not platepar.refraction
        ) and measurement and platepar.measurement_apparent_to_true_refraction:
        for i, entry in enumerate(zip(JD_data, RA_data, dec_data)):
            jd, ra, dec = entry
            ra, dec = eqRefractionApparentToTrue(np.radians(ra), np.radians(dec), jd, \
                np.radians(platepar.lat), np.radians(platepar.lon))

            RA_data[i] = np.degrees(ra)
            dec_data[i] = np.degrees(dec)

    # Compute radiia from image centre
    radius_arr = np.hypot(
        np.array(X_data) - platepar.X_res / 2,
        np.array(Y_data) - platepar.Y_res / 2)

    # Calculate magnitudes
    magnitude_data = calculateMagnitudes(level_data, radius_arr,
                                         platepar.mag_lev,
                                         platepar.vignetting_coeff)

    # Extinction correction
    if extinction_correction:
        magnitude_data = extinctionCorrectionApparentToTrue(magnitude_data, X_data, Y_data, JD_data[0], \
            platepar)

    return JD_data, RA_data, dec_data, magnitude_data
Exemplo n.º 13
0
def recalibratePlateparsForFF(
    prev_platepar,
    ff_file_names,
    calstars,
    catalog_stars,
    config,
    lim_mag=None,
    ignore_distance_threshold=False,
):
    """
    Recalibrate platepars corresponding to ff files based on the stars.

    Arguments:
        prev_platepar: [platepar]
        ff_file_names: [list] list of ff file names
        calstars: [dict] A dictionary with only one entry, where the key is 'jd' and the value is the
            list of star coordinates.
        catalog_stars: [list] A list of entries [[ff_name, star_coordinates], ...].
        config: [config]

    Keyword arguments:
        lim_mag: [float]
        ignore_distance_threshold: [bool] Don't consider the recalib as failed if the median distance
            is larger than the threshold.

    Returns:
        recalibrated_platepars: [dict] A dictionary where one key is ff file name and the value is
            a calibrated corresponding platepar.
    """
    # Go through all FF files with detections, recalibrate and apply astrometry
    recalibrated_platepars = {}
    for ff_name in ff_file_names:

        working_platepar = copy.deepcopy(prev_platepar)

        # Skip this meteor if its FF file was already recalibrated
        if ff_name in recalibrated_platepars:
            continue

        print()
        print('Processing: ', ff_name)
        print(
            '------------------------------------------------------------------------------'
        )

        # Find extracted stars on this image
        if not ff_name in calstars:
            print('Skipped because it was not in CALSTARS:', ff_name)
            continue

        # Get stars detected on this FF file (create a dictionaly with only one entry, the residuals function
        #   needs this format)
        calstars_time = FFfile.getMiddleTimeFF(ff_name,
                                               config.fps,
                                               ret_milliseconds=True)
        jd = date2JD(*calstars_time)
        star_dict_ff = {jd: calstars[ff_name]}

        result = None

        # Skip recalibration if less than a minimum number of stars were detected
        if (len(calstars[ff_name]) >= config.ff_min_stars) and (len(
                calstars[ff_name]) >= config.min_matched_stars):

            # Recalibrate the platepar using star matching
            result, min_match_radius = recalibrateFF(
                config,
                working_platepar,
                jd,
                star_dict_ff,
                catalog_stars,
                lim_mag=lim_mag,
                ignore_distance_threshold=ignore_distance_threshold,
            )

            # If the recalibration failed, try using FFT alignment
            if result is None:

                print()
                print('Running FFT alignment...')

                # Run FFT alignment
                calstars_coords = np.array(star_dict_ff[jd])[:, :2]
                calstars_coords[:, [0, 1]] = calstars_coords[:, [1, 0]]
                print(calstars_time)
                test_platepar = alignPlatepar(config,
                                              prev_platepar,
                                              calstars_time,
                                              calstars_coords,
                                              show_plot=False)

                # Try to recalibrate after FFT alignment
                result, _ = recalibrateFF(config,
                                          test_platepar,
                                          jd,
                                          star_dict_ff,
                                          catalog_stars,
                                          lim_mag=lim_mag)

                # If the FFT alignment failed, align the original platepar using the smallest radius that matched
                #   and force save the the platepar
                if (result is None) and (min_match_radius is not None):
                    print()
                    print(
                        "Using the old platepar with the minimum match radius of: {:.2f}"
                        .format(min_match_radius))
                    result, _ = recalibrateFF(
                        config,
                        working_platepar,
                        jd,
                        star_dict_ff,
                        catalog_stars,
                        max_match_radius=min_match_radius,
                        force_platepar_save=True,
                        lim_mag=lim_mag,
                    )

                    if result is not None:
                        working_platepar = result

                # If the alignment succeeded, save the result
                else:
                    working_platepar = result

            else:
                working_platepar = result

        # Store the platepar if the fit succeeded
        if result is not None:

            # Recompute alt/az of the FOV centre
            working_platepar.az_centre, working_platepar.alt_centre = raDec2AltAz(
                working_platepar.RA_d,
                working_platepar.dec_d,
                working_platepar.JD,
                working_platepar.lat,
                working_platepar.lon,
            )

            # Recompute the rotation wrt horizon
            working_platepar.rotation_from_horiz = rotationWrtHorizon(
                working_platepar)

            # Mark the platepar to indicate that it was automatically recalibrated on an individual FF file
            working_platepar.auto_recalibrated = True

            recalibrated_platepars[ff_name] = working_platepar
            prev_platepar = working_platepar

        else:

            print(
                'Recalibration of {:s} failed, using the previous platepar...'.
                format(ff_name))

            # Mark the platepar to indicate that autorecalib failed
            prev_platepar_tmp = copy.deepcopy(prev_platepar)
            prev_platepar_tmp.auto_recalibrated = False

            # If the aligning failed, set the previous platepar as the one that should be used for this FF file
            recalibrated_platepars[ff_name] = prev_platepar_tmp

    return recalibrated_platepars
Exemplo n.º 14
0
def generateCalibrationReport(config, night_dir_path, match_radius=2.0, platepar=None, show_graphs=False):
    """ Given the folder of the night, find the Calstars file, check the star fit and generate a report
        with the quality of the calibration. The report contains information about both the astrometry and
        the photometry calibration. Graphs will be saved in the given directory of the night.
    
    Arguments:
        config: [Config instance]
        night_dir_path: [str] Full path to the directory of the night.

    Keyword arguments:
        match_radius: [float] Match radius for star matching between image and catalog stars (px).
        platepar: [Platepar instance] Use this platepar instead of finding one in the folder.
        show_graphs: [bool] Show the graphs on the screen. False by default.

    Return:
        None
    """

    # Find the CALSTARS file in the given folder
    calstars_file = None
    for calstars_file in os.listdir(night_dir_path):
        if ('CALSTARS' in calstars_file) and ('.txt' in calstars_file):
            break

    if calstars_file is None:
        print('CALSTARS file could not be found in the given directory!')
        return None


    # Load the calstars file
    star_list = readCALSTARS(night_dir_path, calstars_file)



    ### Load recalibrated platepars, if they exist ###

    # Find recalibrated platepars file per FF file
    platepars_recalibrated_file = None
    for file_name in os.listdir(night_dir_path):
        if file_name == config.platepars_recalibrated_name:
            platepars_recalibrated_file = file_name
            break


    # Load all recalibrated platepars if the file is available
    recalibrated_platepars = None
    if platepars_recalibrated_file:
        with open(os.path.join(night_dir_path, platepars_recalibrated_file)) as f:
            recalibrated_platepars = json.load(f)
            print('Loaded recalibrated platepars JSON file for the calibration report...')

    ### ###


    ### Load the platepar file ###

    # Find the platepar file in the given directory if it was not given
    if platepar is None:

        # Find the platepar file
        platepar_file = None
        for file_name in os.listdir(night_dir_path):
            if file_name == config.platepar_name:
                platepar_file = file_name
                break

        if platepar_file is None:
            print('The platepar cannot be found in the night directory!')
            return None

        # Load the platepar file
        platepar = Platepar()
        platepar.read(os.path.join(night_dir_path, platepar_file))


    ### ###


    night_name = os.path.split(night_dir_path.strip(os.sep))[1]


    # Go one mag deeper than in the config
    lim_mag = config.catalog_mag_limit + 1

    # Load catalog stars (load one magnitude deeper)
    catalog_stars, mag_band_str, config.star_catalog_band_ratios = StarCatalog.readStarCatalog(\
        config.star_catalog_path, config.star_catalog_file, lim_mag=lim_mag, \
        mag_band_ratios=config.star_catalog_band_ratios)

    
    ### Take only those CALSTARS entires for which FF files exist in the folder ###

    # Get a list of FF files in the folder\
    ff_list = []
    for file_name in os.listdir(night_dir_path):
        if validFFName(file_name):
            ff_list.append(file_name)


    # Filter out calstars entries, generate a star dictionary where the keys are JDs of FFs
    star_dict = {}
    ff_dict = {}
    for entry in star_list:

        ff_name, star_data = entry

        # Check if the FF from CALSTARS exists in the folder
        if ff_name not in ff_list:
            continue


        dt = getMiddleTimeFF(ff_name, config.fps, ret_milliseconds=True)
        jd = date2JD(*dt)

        # Add the time and the stars to the dict
        star_dict[jd] = star_data
        ff_dict[jd] = ff_name

    ### ###

    # If there are no FF files in the directory, don't generate a report
    if len(star_dict) == 0:
        print('No FF files from the CALSTARS file in the directory!')
        return None


    # If the recalibrated platepars file exists, take the one with the most stars
    max_jd = 0
    if recalibrated_platepars is not None:
        max_stars = 0
        for ff_name_temp in recalibrated_platepars:

            # Compute the Julian date of the FF middle
            dt = getMiddleTimeFF(ff_name_temp, config.fps, ret_milliseconds=True)
            jd = date2JD(*dt)

            # Check that this file exists in CALSTARS and the list of FF files
            if (jd not in star_dict) or (jd not in ff_dict):
                continue

            # Check if the number of stars on this FF file is larger than the before
            if len(star_dict[jd]) > max_stars:
                max_jd = jd
                max_stars = len(star_dict[jd])


        # Set a flag to indicate if using recalibrated platepars has failed
        if max_jd == 0:
            using_recalib_platepars = False
        else:

            print('Using recalibrated platepars, file:', ff_dict[max_jd])
            using_recalib_platepars = True

            # Select the platepar where the FF file has the most stars
            platepar_dict = recalibrated_platepars[ff_dict[max_jd]]
            platepar = Platepar()
            platepar.loadFromDict(platepar_dict)

            filtered_star_dict = {max_jd: star_dict[max_jd]}

            # Match stars on the image with the stars in the catalog
            n_matched, avg_dist, cost, matched_stars = matchStarsResiduals(config, platepar, catalog_stars, \
                filtered_star_dict, match_radius, ret_nmatch=True, lim_mag=lim_mag)

            max_matched_stars = n_matched


    # Otherwise take the optimal FF file for evaluation
    if (recalibrated_platepars is None) or (not using_recalib_platepars):

        # If there are more than a set number of FF files to evaluate, choose only the ones with most stars on
        #   the image
        if len(star_dict) > config.calstars_files_N:

            # Find JDs of FF files with most stars on them
            top_nstars_indices = np.argsort([len(x) for x in star_dict.values()])[::-1][:config.calstars_files_N \
                - 1]

            filtered_star_dict = {}
            for i in top_nstars_indices:
                filtered_star_dict[list(star_dict.keys())[i]] = list(star_dict.values())[i]

            star_dict = filtered_star_dict


        # Match stars on the image with the stars in the catalog
        n_matched, avg_dist, cost, matched_stars = matchStarsResiduals(config, platepar, catalog_stars, \
            star_dict, match_radius, ret_nmatch=True, lim_mag=lim_mag)



    # If no recalibrated platepars where found, find the image with the largest number of matched stars
    if (not using_recalib_platepars) or (max_jd == 0):

        max_jd = 0
        max_matched_stars = 0
        for jd in matched_stars:
            _, _, distances = matched_stars[jd]
            if len(distances) > max_matched_stars:
                max_jd = jd
                max_matched_stars = len(distances)

        
        # If there are no matched stars, use the image with the largest number of detected stars
        if max_matched_stars <= 2:
            max_jd = max(star_dict, key=lambda x: len(star_dict[x]))
            distances = [np.inf]



    # Take the FF file with the largest number of matched stars
    ff_name = ff_dict[max_jd]

    # Load the FF file
    ff = readFF(night_dir_path, ff_name)
    img_h, img_w = ff.avepixel.shape

    dpi = 200
    plt.figure(figsize=(ff.avepixel.shape[1]/dpi, ff.avepixel.shape[0]/dpi), dpi=dpi)

    # Take the average pixel
    img = ff.avepixel

    # Slightly adjust the levels
    img = Image.adjustLevels(img, np.percentile(img, 1.0), 1.2, np.percentile(img, 99.99))

    plt.imshow(img, cmap='gray', interpolation='nearest')

    legend_handles = []


    # Plot detected stars
    for img_star in star_dict[max_jd]:

        y, x, _, _ = img_star

        rect_side = 5*match_radius
        square_patch = plt.Rectangle((x - rect_side/2, y - rect_side/2), rect_side, rect_side, color='g', \
            fill=False, label='Image stars')

        plt.gca().add_artist(square_patch)

    legend_handles.append(square_patch)



    # If there are matched stars, plot them
    if max_matched_stars > 2:

        # Take the solution with the largest number of matched stars
        image_stars, matched_catalog_stars, distances = matched_stars[max_jd]

        # Plot matched stars
        for img_star in image_stars:
            x, y, _, _ = img_star

            circle_patch = plt.Circle((y, x), radius=3*match_radius, color='y', fill=False, \
                label='Matched stars')

            plt.gca().add_artist(circle_patch)

        legend_handles.append(circle_patch)


        ### Plot match residuals ###

        # Compute preducted positions of matched image stars from the catalog
        x_predicted, y_predicted = raDecToXYPP(matched_catalog_stars[:, 0], \
            matched_catalog_stars[:, 1], max_jd, platepar)

        img_y, img_x, _, _ = image_stars.T

        delta_x = x_predicted - img_x
        delta_y = y_predicted - img_y

        # Compute image residual and angle of the error
        res_angle = np.arctan2(delta_y, delta_x)
        res_distance = np.sqrt(delta_x**2 + delta_y**2)


        # Calculate coordinates of the beginning of the residual line
        res_x_beg = img_x + 3*match_radius*np.cos(res_angle)
        res_y_beg = img_y + 3*match_radius*np.sin(res_angle)

        # Calculate coordinates of the end of the residual line
        res_x_end = img_x + 100*np.cos(res_angle)*res_distance
        res_y_end = img_y + 100*np.sin(res_angle)*res_distance

        # Plot the 100x residuals
        for i in range(len(x_predicted)):
            res_plot = plt.plot([res_x_beg[i], res_x_end[i]], [res_y_beg[i], res_y_end[i]], color='orange', \
                lw=0.5, label='100x residuals')

        legend_handles.append(res_plot[0])

        ### ###

    else:

        distances = [np.inf]
        
        # If there are no matched stars, plot large text in the middle of the screen
        plt.text(img_w/2, img_h/2, "NO MATCHED STARS!", color='r', alpha=0.5, fontsize=20, ha='center',
            va='center')


    ### Plot positions of catalog stars to the limiting magnitude of the faintest matched star + 1 mag ###

    # Find the faintest magnitude among matched stars
    if max_matched_stars > 2:
        faintest_mag = np.max(matched_catalog_stars[:, 2]) + 1

    else:
        # If there are no matched stars, use the limiting magnitude from config
        faintest_mag = config.catalog_mag_limit + 1


    # Estimate RA,dec of the centre of the FOV
    _, RA_c, dec_c, _ = xyToRaDecPP([jd2Date(max_jd)], [platepar.X_res/2], [platepar.Y_res/2], [1], 
        platepar)

    RA_c = RA_c[0]
    dec_c = dec_c[0]

    fov_radius = np.hypot(*computeFOVSize(platepar))

    # Get stars from the catalog around the defined center in a given radius
    _, extracted_catalog = subsetCatalog(catalog_stars, RA_c, dec_c, fov_radius, faintest_mag)
    ra_catalog, dec_catalog, mag_catalog = extracted_catalog.T

    # Compute image positions of all catalog stars that should be on the image
    x_catalog, y_catalog = raDecToXYPP(ra_catalog, dec_catalog, max_jd, platepar)

    # Filter all catalog stars outside the image
    temp_arr = np.c_[x_catalog, y_catalog, mag_catalog]
    temp_arr = temp_arr[temp_arr[:, 0] >= 0]
    temp_arr = temp_arr[temp_arr[:, 0] <= ff.avepixel.shape[1]]
    temp_arr = temp_arr[temp_arr[:, 1] >= 0]
    temp_arr = temp_arr[temp_arr[:, 1] <= ff.avepixel.shape[0]]
    x_catalog, y_catalog, mag_catalog = temp_arr.T

    # Plot catalog stars on the image
    cat_stars_handle = plt.scatter(x_catalog, y_catalog, c='none', marker='D', lw=1.0, alpha=0.4, \
        s=((4.0 + (faintest_mag - mag_catalog))/3.0)**(2*2.512), edgecolor='r', label='Catalog stars')

    legend_handles.append(cat_stars_handle)

    ### ###


    # Add info text
    info_text = ff_dict[max_jd] + '\n' \
        + "Matched stars: {:d}/{:d}\n".format(max_matched_stars, len(star_dict[max_jd])) \
        + "Median distance: {:.2f} px\n".format(np.median(distances)) \
        + "Catalog limiting magnitude: {:.1f}".format(lim_mag)

    plt.text(10, 10, info_text, bbox=dict(facecolor='black', alpha=0.5), va='top', ha='left', fontsize=4, \
        color='w')

    legend = plt.legend(handles=legend_handles, prop={'size': 4}, loc='upper right')
    legend.get_frame().set_facecolor('k')
    legend.get_frame().set_edgecolor('k')
    for txt in legend.get_texts():
        txt.set_color('w')


    plt.axis('off')
    plt.gca().get_xaxis().set_visible(False)
    plt.gca().get_yaxis().set_visible(False)

    plt.xlim([0, ff.avepixel.shape[1]])
    plt.ylim([ff.avepixel.shape[0], 0])

    # Remove the margins
    plt.subplots_adjust(left=0, bottom=0, right=1, top=1, wspace=0, hspace=0)

    plt.savefig(os.path.join(night_dir_path, night_name + '_calib_report_astrometry.jpg'), \
        bbox_inches='tight', pad_inches=0, dpi=dpi)


    if show_graphs:
        plt.show()

    else:
        plt.clf()
        plt.close()



    if max_matched_stars > 2:

        ### Plot the photometry ###

        plt.figure(dpi=dpi)

        # Take only those stars which are inside the 3/4 of the shorter image axis from the center
        photom_selection_radius = np.min([img_h, img_w])/3
        filter_indices = ((image_stars[:, 0] - img_h/2)**2 + (image_stars[:, 1] \
            - img_w/2)**2) <= photom_selection_radius**2
        star_intensities = image_stars[filter_indices, 2]
        catalog_mags = matched_catalog_stars[filter_indices, 2]

        # Plot intensities of image stars
        #star_intensities = image_stars[:, 2]
        plt.scatter(-2.5*np.log10(star_intensities), catalog_mags, s=5, c='r')

        # Fit the photometry on automated star intensities
        photom_offset, fit_stddev, _ = photometryFit(np.log10(star_intensities), catalog_mags)


        # Plot photometric offset from the platepar
        x_min, x_max = plt.gca().get_xlim()
        y_min, y_max = plt.gca().get_ylim()

        x_min_w = x_min - 3
        x_max_w = x_max + 3
        y_min_w = y_min - 3
        y_max_w = y_max + 3

        photometry_info = 'Platepar: {:+.2f}LSP {:+.2f} +/- {:.2f} \nGamma = {:.2f}'.format(platepar.mag_0, \
            platepar.mag_lev, platepar.mag_lev_stddev, platepar.gamma)

        # Plot the photometry calibration from the platepar
        logsum_arr = np.linspace(x_min_w, x_max_w, 10)
        plt.plot(logsum_arr, logsum_arr + platepar.mag_lev, label=photometry_info, linestyle='--', \
            color='k', alpha=0.5)

        # Plot the fitted photometry calibration
        fit_info = "Fit: {:+.2f}LSP {:+.2f} +/- {:.2f}".format(-2.5, photom_offset, fit_stddev)
        plt.plot(logsum_arr, logsum_arr + photom_offset, label=fit_info, linestyle='--', color='red', 
            alpha=0.5)

        plt.legend()

        plt.ylabel("Catalog magnitude ({:s})".format(mag_band_str))
        plt.xlabel("Uncalibrated magnitude")

        # Set wider axis limits
        plt.xlim(x_min_w, x_max_w)
        plt.ylim(y_min_w, y_max_w)

        plt.gca().invert_yaxis()
        plt.gca().invert_xaxis()

        plt.grid()

        plt.savefig(os.path.join(night_dir_path, night_name + '_calib_report_photometry.png'), dpi=150)


        if show_graphs:
            plt.show()

        else:
            plt.clf()
            plt.close()
Exemplo n.º 15
0
def recalibrateIndividualFFsAndApplyAstrometry(dir_path, ftpdetectinfo_path, calstars_list, config, platepar):
    """ Recalibrate FF files with detections and apply the recalibrated platepar to those detections. 

    Arguments:
        dir_path: [str] Path where the FTPdetectinfo file is.
        ftpdetectinfo_path: [str] Name of the FTPdetectinfo file.
        calstars_list: [list] A list of entries [[ff_name, star_coordinates], ...].
        config: [Config instance]
        platepar: [Platepar instance] Initial platepar.

    Return:
        recalibrated_platepars: [dict] A dictionary where the keys are FF file names and values are 
            recalibrated platepar instances for every FF file.
    """


    # Read the FTPdetectinfo data
    cam_code, fps, meteor_list = FTPdetectinfo.readFTPdetectinfo(*os.path.split(ftpdetectinfo_path), \
        ret_input_format=True)

    # Convert the list of stars to a per FF name dictionary
    calstars = {ff_file: star_data for ff_file, star_data in calstars_list}


    # Load catalog stars (overwrite the mag band ratios if specific catalog is used)
    catalog_stars, _, config.star_catalog_band_ratios = StarCatalog.readStarCatalog(config.star_catalog_path,\
        config.star_catalog_file, lim_mag=config.catalog_mag_limit, \
        mag_band_ratios=config.star_catalog_band_ratios)



    prev_platepar = copy.deepcopy(platepar)

    # Go through all FF files with detections, recalibrate and apply astrometry
    recalibrated_platepars = {}
    for meteor_entry in meteor_list:

        working_platepar = copy.deepcopy(prev_platepar)

        ff_name, meteor_No, rho, phi, meteor_meas = meteor_entry

        # Skip this meteors if its FF file was already recalibrated
        if ff_name in recalibrated_platepars:
            continue

        print()
        print('Processing: ', ff_name)
        print('------------------------------------------------------------------------------')

        # Find extracted stars on this image
        if not ff_name in calstars:
            print('Skipped because it was not in CALSTARS:', ff_name)
            continue

        # Get stars detected on this FF file (create a dictionaly with only one entry, the residuals function
        #   needs this format)
        calstars_time = FFfile.getMiddleTimeFF(ff_name, config.fps, ret_milliseconds=True)
        jd = date2JD(*calstars_time)
        star_dict_ff = {jd: calstars[ff_name]}

        # Recalibrate the platepar using star matching
        result = recalibrateFF(config, working_platepar, jd, star_dict_ff, catalog_stars)

        
        # If the recalibration failed, try using FFT alignment
        if result is None:

            print()
            print('Running FFT alignment...')

            # Run FFT alignment
            calstars_coords = np.array(star_dict_ff[jd])[:, :2]
            calstars_coords[:, [0, 1]] = calstars_coords[:, [1, 0]]
            print(calstars_time)
            working_platepar = alignPlatepar(config, prev_platepar, calstars_time, calstars_coords, \
                show_plot=False)

            # Try to recalibrate after FFT alignment
            result = recalibrateFF(config, working_platepar, jd, star_dict_ff, catalog_stars)

            if result is not None:
                working_platepar = result


        else:
            working_platepar = result


        # Store the platepar if the fit succeeded
        if result is not None:
            recalibrated_platepars[ff_name] = working_platepar
            prev_platepar = working_platepar

        else:

            print('Recalibration of {:s} failed, using the previous platepar...'.format(ff_name))

            # If the aligning failed, set the previous platepar as the one that should be used for this FF file
            recalibrated_platepars[ff_name] = prev_platepar


    ### Store all recalibrated platepars as a JSON file ###

    all_pps = {}
    for ff_name in recalibrated_platepars:

        json_str = recalibrated_platepars[ff_name].jsonStr()
        
        all_pps[ff_name] = json.loads(json_str)

    with open(os.path.join(dir_path, config.platepars_recalibrated_name), 'w') as f:
        
        # Convert all platepars to a JSON file
        out_str = json.dumps(all_pps, default=lambda o: o.__dict__, indent=4, sort_keys=True)

        f.write(out_str)

    ### ###



    # If no platepars were recalibrated, use the single platepar recalibration procedure
    if len(recalibrated_platepars) == 0:

        print('No FF images were used for recalibration, using the single platepar calibration function...')

        # Use the initial platepar for calibration
        applyAstrometryFTPdetectinfo(dir_path, os.path.basename(ftpdetectinfo_path), None, platepar=platepar)

        return recalibrated_platepars



    ### Plot difference from reference platepar in angular distance from (0, 0) vs rotation ###

    ang_dists = []
    rot_angles = []
    hour_list = []

    first_jd = np.min([FFfile.filenameToDatetime(ff_name) for ff_name in recalibrated_platepars])

    for ff_name in recalibrated_platepars:
        
        pp_temp = recalibrated_platepars[ff_name]

        # If the fitting failed, skip the platepar
        if pp_temp is None:
            continue

        # Compute the angular separation from the reference platepar
        ang_dist = np.degrees(angularSeparation(np.radians(platepar.RA_d), np.radians(platepar.dec_d), \
            np.radians(pp_temp.RA_d), np.radians(pp_temp.dec_d)))
        ang_dists.append(ang_dist*60)

        rot_angles.append((platepar.pos_angle_ref - pp_temp.pos_angle_ref)*60)

        # Compute the hour of the FF used for recalibration
        hour_list.append((FFfile.filenameToDatetime(ff_name) - first_jd).total_seconds()/3600)


    plt.figure()

    plt.scatter(0, 0, marker='o', edgecolor='k', label='Reference platepar', s=100, c='none', zorder=3)

    plt.scatter(ang_dists, rot_angles, c=hour_list, zorder=3)
    plt.colorbar(label='Hours from first FF file')
    
    plt.xlabel("Angular distance from reference (arcmin)")
    plt.ylabel('Rotation from reference (arcmin)')

    plt.grid()
    plt.legend()

    plt.tight_layout()

    # Generate the name for the plot
    calib_plot_name = os.path.basename(ftpdetectinfo_path).replace('FTPdetectinfo_', '').replace('.txt', '') \
        + '_calibration_variation.png'

    plt.savefig(os.path.join(dir_path, calib_plot_name), dpi=150)

    # plt.show()

    plt.clf()
    plt.close()

    ### ###



    ### Apply platepars to FTPdetectinfo ###

    meteor_output_list = []
    for meteor_entry in meteor_list:

        ff_name, meteor_No, rho, phi, meteor_meas = meteor_entry

        # Get the platepar that will be applied to this FF file
        if ff_name in recalibrated_platepars:
            working_platepar = recalibrated_platepars[ff_name]

        else:
            print('Using default platepar for:', ff_name)
            working_platepar = platepar

        # Apply the recalibrated platepar to meteor centroids
        meteor_picks = applyPlateparToCentroids(ff_name, fps, meteor_meas, working_platepar, \
            add_calstatus=True)

        meteor_output_list.append([ff_name, meteor_No, rho, phi, meteor_picks])


    # Calibration string to be written to the FTPdetectinfo file
    calib_str = 'Recalibrated with RMS on: ' + str(datetime.datetime.utcnow()) + ' UTC'

    # If no meteors were detected, set dummpy parameters
    if len(meteor_list) == 0:
        cam_code = ''
        fps = 0


    # Back up the old FTPdetectinfo file
    shutil.copy(ftpdetectinfo_path, ftpdetectinfo_path.strip('.txt') \
        + '_backup_{:s}.txt'.format(datetime.datetime.utcnow().strftime('%Y%m%d_%H%M%S.%f')))

    # Save the updated FTPdetectinfo
    FTPdetectinfo.writeFTPdetectinfo(meteor_output_list, dir_path, os.path.basename(ftpdetectinfo_path), \
        dir_path, cam_code, fps, calibration=calib_str, celestial_coords_given=True)


    ### ###

    return recalibrated_platepars
Exemplo n.º 16
0
def alignPlatepar(config,
                  platepar,
                  calstars_time,
                  calstars_coords,
                  scale_update=False,
                  show_plot=False):
    """ Align the platepar using FFT registration between catalog stars and the given list of image stars.
    Arguments:
        config:
        platepar: [Platepar instance] Initial platepar.
        calstars_time: [list] A list of (year, month, day, hour, minute, second, millisecond) of the middle of
            the FF file used for alignment.
        calstars_coords: [ndarray] A 2D numpy array of (x, y) coordinates of image stars.
    Keyword arguments:
        scale_update: [bool] Update the platepar scale. False by default.
        show_plot: [bool] Show the comparison between the reference and image synthetic images.
    Return:
        platepar_aligned: [Platepar instance] The aligned platepar.
    """

    # Create a copy of the config not to mess with the original config parameters
    config = copy.deepcopy(config)

    # Try to optimize the catalog limiting magnitude until the number of image and catalog stars are matched
    maxiter = 10
    search_fainter = True
    mag_step = 0.2
    for inum in range(maxiter):

        # Load the catalog stars
        catalog_stars, _, _ = StarCatalog.readStarCatalog(config.star_catalog_path, config.star_catalog_file, \
            lim_mag=config.catalog_mag_limit, mag_band_ratios=config.star_catalog_band_ratios)

        # Get the RA/Dec of the image centre
        _, ra_centre, dec_centre, _ = ApplyAstrometry.xyToRaDecPP([calstars_time], [platepar.X_res/2], \
                [platepar.Y_res/2], [1], platepar, extinction_correction=False)

        ra_centre = ra_centre[0]
        dec_centre = dec_centre[0]

        # Compute Julian date
        jd = date2JD(*calstars_time)

        # Calculate the FOV radius in degrees
        fov_y, fov_x = ApplyAstrometry.computeFOVSize(platepar)
        fov_radius = np.sqrt(fov_x**2 + fov_y**2)

        # Take only those stars which are inside the FOV
        filtered_indices, _ = subsetCatalog(catalog_stars, ra_centre, dec_centre, jd, platepar.lat, \
            platepar.lon, fov_radius, config.catalog_mag_limit)

        # Take those catalog stars which should be inside the FOV
        ra_catalog, dec_catalog, _ = catalog_stars[filtered_indices].T
        catalog_xy = ApplyAstrometry.raDecToXYPP(ra_catalog, dec_catalog, jd,
                                                 platepar)

        catalog_x, catalog_y = catalog_xy
        catalog_xy = np.c_[catalog_x, catalog_y]

        # Cut all stars that are outside image coordinates
        catalog_xy = catalog_xy[catalog_xy[:, 0] > 0]
        catalog_xy = catalog_xy[catalog_xy[:, 0] < config.width]
        catalog_xy = catalog_xy[catalog_xy[:, 1] > 0]
        catalog_xy = catalog_xy[catalog_xy[:, 1] < config.height]

        # If there are more catalog than image stars, this means that the limiting magnitude is too faint
        #   and that the search should go in the brighter direction
        if len(catalog_xy) > len(calstars_coords):
            search_fainter = False
        else:
            search_fainter = True

        # print('Catalog stars:', len(catalog_xy), 'Image stars:', len(calstars_coords), \
        #     'Limiting magnitude:', config.catalog_mag_limit)

        # Search in mag_step magnitude steps
        if search_fainter:
            config.catalog_mag_limit += mag_step
        else:
            config.catalog_mag_limit -= mag_step

    print('Final catalog limiting magnitude:', config.catalog_mag_limit)

    # Find the transform between the image coordinates and predicted platepar coordinates
    res = findStarsTransform(config,
                             calstars_coords,
                             catalog_xy,
                             show_plot=show_plot)
    angle, scale, translation_x, translation_y = res

    ### Update the platepar ###

    platepar_aligned = copy.deepcopy(platepar)

    # Correct the rotation
    platepar_aligned.pos_angle_ref = (platepar_aligned.pos_angle_ref -
                                      angle) % 360

    # Update the scale if needed
    if scale_update:
        platepar_aligned.F_scale *= scale

    # Compute the new reference RA and Dec
    _, ra_centre_new, dec_centre_new, _ = ApplyAstrometry.xyToRaDecPP([jd2Date(platepar.JD)], \
        [platepar.X_res/2 - platepar.x_poly_fwd[0] - translation_x], \
        [platepar.Y_res/2 - platepar.y_poly_fwd[0] - translation_y], [1], platepar, \
        extinction_correction=False)

    # Correct RA/Dec
    platepar_aligned.RA_d = ra_centre_new[0]
    platepar_aligned.dec_d = dec_centre_new[0]

    # # Update the reference time and hour angle
    # platepar_aligned.JD = jd
    # platepar_aligned.Ho = JD2HourAngle(jd)

    # Recompute the FOV centre in Alt/Az and update the rotation
    platepar_aligned.az_centre, platepar_aligned.alt_centre = raDec2AltAz(platepar.RA_d, \
        platepar.dec_d, platepar.JD, platepar.lat, platepar.lon)
    platepar_aligned.rotation_from_horiz = ApplyAstrometry.rotationWrtHorizon(
        platepar_aligned)

    ###

    return platepar_aligned
Exemplo n.º 17
0
    def read(self, file_name, fmt=None):
        """ Read the platepar. 
            
        Arguments:
            file_name: [str] Path and the name of the platepar to read.

        Keyword arguments:
            fmt: [str] Format of the platepar file. 'json' for JSON format and 'txt' for the usual CMN textual
                format.

        Return:
            fmt: [str]
        """

        # Check if platepar exists
        if not os.path.isfile(file_name):
            return False

        # Determine the type of the platepar if it is not given
        if fmt is None:

            with open(file_name) as f:
                data = " ".join(f.readlines())

                # Try parsing the file as JSON
                try:
                    json.loads(data)
                    fmt = 'json'

                except:
                    fmt = 'txt'

        # Load the file as JSON
        if fmt == 'json':

            # Load the JSON file
            with open(file_name) as f:
                data = " ".join(f.readlines())

            # Parse JSON into an object with attributes corresponding to dict keys
            self.__dict__ = json.loads(data)

            # Add UT correction if it was not in the platepar
            if not 'UT_corr' in self.__dict__:
                self.UT_corr = 0

            # Convert lists to numpy arrays
            self.x_poly = np.array(self.x_poly)
            self.y_poly = np.array(self.y_poly)

            # Calculate the datetime
            self.time = jd2Date(self.JD, dt_obj=True)

        # Load the file as TXT
        else:

            with open(file_name) as f:

                # Parse latitude, longitude, elevation
                self.lon, self.lat, self.elev = self.parseLine(f)

                # Parse date and time as int
                D, M, Y, h, m, s = map(int, f.readline().split())

                # Calculate the datetime of the platepar time
                self.time = datetime.datetime(Y, M, D, h, m, s)

                # Convert time to JD
                self.JD = date2JD(Y, M, D, h, m, s)

                # Calculate the referent hour angle
                T = (self.JD - 2451545.0) / 36525.0
                self.Ho = (280.46061837 + 360.98564736629 *
                           (self.JD - 2451545.0) + 0.000387933 * T**2 -
                           T**3 / 38710000.0) % 360

                # Parse camera parameters
                self.X_res, self.Y_res, self.focal_length = self.parseLine(f)

                # Parse the right ascension of the image centre
                self.RA_d, self.RA_H, self.RA_M, self.RA_S = self.parseLine(f)

                # Parse the declination of the image centre
                self.dec_d, self.dec_D, self.dec_M, self.dec_S = self.parseLine(
                    f)

                # Parse the rotation parameter
                self.pos_angle_ref = self.parseLine(f)[0]

                # Parse the sum of image scales per each image axis (arcsec per px)
                self.F_scale = self.parseLine(f)[0]
                self.w_pix = 50 * self.F_scale / 3600
                self.F_scale = 3600 / self.F_scale

                # Load magnitude slope parameters
                self.mag_0, self.mag_lev = self.parseLine(f)

                # Load X axis polynomial parameters
                self.x_poly = np.zeros(shape=(12, ), dtype=np.float64)
                for i in range(12):
                    self.x_poly[i] = self.parseLine(f)[0]

                # Load Y axis polynomial parameters
                self.y_poly = np.zeros(shape=(12, ), dtype=np.float64)
                for i in range(12):
                    self.y_poly[i] = self.parseLine(f)[0]

                # Read station code
                self.station_code = f.readline().replace('\r',
                                                         '').replace('\n', '')

        return fmt
Exemplo n.º 18
0
def makeFlat(dir_path, config, nostars=False, use_images=False):
    """ Makes a flat field from the files in the given folder. CALSTARS file is needed to estimate the
        quality of every image by counting the number of detected stars.

    Arguments:
        dir_path: [str] Path to the directory which contains the FF files and a CALSTARS file.
        config: [config object]

    Keyword arguments:
        nostars: [bool] If True, all files will be taken regardless of if they have stars on them or not.
        use_images: [bool] Use image files instead of FF files. False by default.

    Return:
        [2d ndarray] Flat field image as a numpy array. If the flat generation failed, None will be returned.
        
    """

    # If only images are used, then don't look for a CALSTARS file
    if use_images:
        nostars = True

    # Load the calstars file if it should be used
    if not nostars:

        # Find the CALSTARS file in the given folder
        calstars_file = None
        for calstars_file in os.listdir(dir_path):
            if ('CALSTARS' in calstars_file) and ('.txt' in calstars_file):
                break

        if calstars_file is None:
            print('CALSTARS file could not be found in the given directory!')
            return None

        # Load the calstars file
        calstars_list = CALSTARS.readCALSTARS(dir_path, calstars_file)

        # Convert the list to a dictionary
        calstars = {ff_file: star_data for ff_file, star_data in calstars_list}

        print('CALSTARS file: ' + calstars_file + ' loaded!')

        # A list of FF files which have any stars on them
        calstars_ff_files = [line[0] for line in calstars_list]

    else:
        calstars = {}
        calstars_ff_files = []

    # Use image files
    if use_images:

        # Find the file type with the highest file frequency in the given folder
        file_extensions = []
        for file_name in os.listdir(dir_path):
            file_ext = file_name.split('.')[-1]
            if file_ext.lower() in ['jpg', 'png', 'bmp']:
                file_extensions.append(file_ext)

        # Get only the most frequent file type
        file_freqs = np.unique(file_extensions, return_counts=True)
        most_freq_type = file_freqs[0][0]

        print('Using image type:', most_freq_type)

        # Take only files of that file type
        ff_list = [file_name for file_name in sorted(os.listdir(dir_path)) \
            if file_name.lower().endswith(most_freq_type)]

    # Use FF files
    else:
        ff_list = []

        # Get a list of FF files in the folder
        for file_name in os.listdir(dir_path):
            if validFFName(file_name) and ((file_name in calstars_ff_files)
                                           or nostars):
                ff_list.append(file_name)

        # Check that there are any FF files in the folder
        if not ff_list:
            print('No valid FF files in the selected folder!')
            return None

    ff_list_good = []
    ff_times = []

    # Take only those FF files with enough stars on them
    for ff_name in ff_list:

        if (ff_name in calstars) or nostars:

            # Disable requiring minimum number of stars if specified
            if not nostars:

                # Get the number of stars detected on the FF image
                ff_nstars = len(calstars[ff_name])

            else:
                ff_nstars = 0

            # Check if the number of stars on the image is over the detection threshold
            if (ff_nstars > config.ff_min_stars) or nostars:

                # Add the FF file to the list of FF files to be used to make a flat
                ff_list_good.append(ff_name)

                # If images are used, don't compute the time
                if use_images:
                    ff_time = 0

                else:
                    # Calculate the time of the FF files
                    ff_time = date2JD(*getMiddleTimeFF(
                        ff_name, config.fps, ret_milliseconds=True))

                ff_times.append(ff_time)

    # Check that there are enough good FF files in the folder
    if (len(ff_times) < config.flat_min_imgs) and (not nostars):
        print('Not enough FF files have enough stars on them!')
        return None

    # Make sure the files cover at least 2 hours
    if (not (max(ff_times) - min(ff_times)) * 24 > 2) and (not nostars):
        print('Good FF files cover less than 2 hours!')
        return None

    # Sample FF files if there are more than 200
    max_ff_flat = 200
    if len(ff_list_good) > max_ff_flat:
        ff_list_good = sorted(random.sample(ff_list_good, max_ff_flat))

    print('Using {:d} files for flat...'.format(len(ff_list_good)))

    c = 0
    img_list = []
    median_list = []

    # Median combine all good FF files
    for i in range(len(ff_list_good)):

        # Load 10 files at the time and median combine them, which conserves memory
        if c < 10:

            # Use images
            if use_images:
                img = loadImage(os.path.join(dir_path, ff_list_good[i]), -1)

            # Use FF files
            else:
                ff = readFF(dir_path, ff_list_good[i])

                # Skip the file if it is corruped
                if ff is None:
                    continue

                img = ff.avepixel

            img_list.append(img)

            c += 1

        else:

            img_list = np.array(img_list)

            # Median combine the loaded 10 (or less) images
            ff_median = np.median(img_list, axis=0)
            median_list.append(ff_median)

            img_list = []
            c = 0

    # If there are more than 1 calculated median image, combine them
    if len(median_list) > 1:

        # Median combine all median images
        median_list = np.array(median_list)
        ff_median = np.median(median_list, axis=0)

    else:
        if len(median_list) > 0:
            ff_median = median_list[0]
        else:
            ff_median = np.median(np.array(img_list), axis=0)

    # Stretch flat to 0-255
    ff_median = ff_median / np.max(ff_median) * 255

    # Convert the flat to 8 bits
    ff_median = ff_median.astype(np.uint8)

    return ff_median
Exemplo n.º 19
0
def recalibrateIndividualFFsAndApplyAstrometry(dir_path, ftpdetectinfo_path, calstars_list, config, platepar,
    generate_plot=True):
    """ Recalibrate FF files with detections and apply the recalibrated platepar to those detections. 

    Arguments:
        dir_path: [str] Path where the FTPdetectinfo file is.
        ftpdetectinfo_path: [str] Name of the FTPdetectinfo file.
        calstars_list: [list] A list of entries [[ff_name, star_coordinates], ...].
        config: [Config instance]
        platepar: [Platepar instance] Initial platepar.

    Keyword arguments:
        generate_plot: [bool] Generate the calibration variation plot. True by default.

    Return:
        recalibrated_platepars: [dict] A dictionary where the keys are FF file names and values are 
            recalibrated platepar instances for every FF file.
    """

    # Use a copy of the config file
    config = copy.deepcopy(config)

    # If the given file does not exits, return nothing
    if not os.path.isfile(ftpdetectinfo_path):
        print('ERROR! The FTPdetectinfo file does not exist: {:s}'.format(ftpdetectinfo_path))
        print('    The recalibration on every file was not done!')

        return {}


    # Read the FTPdetectinfo data
    cam_code, fps, meteor_list = FTPdetectinfo.readFTPdetectinfo(*os.path.split(ftpdetectinfo_path), \
        ret_input_format=True)

    # Convert the list of stars to a per FF name dictionary
    calstars = {ff_file: star_data for ff_file, star_data in calstars_list}


    ### Add neighboring FF files for more robust photometry estimation ###

    ff_processing_list = []

    # Make a list of sorted FF files in CALSTARS
    calstars_ffs = sorted([ff_file for ff_file in calstars])

    # Go through the list of FF files with detections and add neighboring FFs
    for meteor_entry in meteor_list:

        ff_name = meteor_entry[0]

        if ff_name in calstars_ffs:

            # Find the index of the given FF file in the list of calstars
            ff_indx = calstars_ffs.index(ff_name)

            # Add neighbours to the processing list
            for k in range(-(RECALIBRATE_NEIGHBOURHOOD_SIZE//2), RECALIBRATE_NEIGHBOURHOOD_SIZE//2 + 1):

                k_indx = ff_indx + k

                if (k_indx > 0) and (k_indx < len(calstars_ffs)):

                    ff_name_tmp = calstars_ffs[k_indx]
                    if ff_name_tmp not in ff_processing_list:
                        ff_processing_list.append(ff_name_tmp)


    # Sort the processing list of FF files
    ff_processing_list = sorted(ff_processing_list)


    ### ###


    # Globally increase catalog limiting magnitude
    config.catalog_mag_limit += 1

    # Load catalog stars (overwrite the mag band ratios if specific catalog is used)
    star_catalog_status = StarCatalog.readStarCatalog(config.star_catalog_path,\
        config.star_catalog_file, lim_mag=config.catalog_mag_limit, \
        mag_band_ratios=config.star_catalog_band_ratios)

    if not star_catalog_status:
        print("Could not load the star catalog!")
        print(os.path.join(config.star_catalog_path, config.star_catalog_file))
        return {}

    catalog_stars, _, config.star_catalog_band_ratios = star_catalog_status


    # Update the platepar coordinates from the config file
    platepar.lat = config.latitude
    platepar.lon = config.longitude
    platepar.elev = config.elevation


    prev_platepar = copy.deepcopy(platepar)

    # Go through all FF files with detections, recalibrate and apply astrometry
    recalibrated_platepars = {}
    for ff_name in ff_processing_list:

        working_platepar = copy.deepcopy(prev_platepar)

        # Skip this meteor if its FF file was already recalibrated
        if ff_name in recalibrated_platepars:
            continue

        print()
        print('Processing: ', ff_name)
        print('------------------------------------------------------------------------------')

        # Find extracted stars on this image
        if not ff_name in calstars:
            print('Skipped because it was not in CALSTARS:', ff_name)
            continue

        # Get stars detected on this FF file (create a dictionaly with only one entry, the residuals function
        #   needs this format)
        calstars_time = FFfile.getMiddleTimeFF(ff_name, config.fps, ret_milliseconds=True)
        jd = date2JD(*calstars_time)
        star_dict_ff = {jd: calstars[ff_name]}

        # Recalibrate the platepar using star matching
        result, min_match_radius = recalibrateFF(config, working_platepar, jd, star_dict_ff, catalog_stars)

        
        # If the recalibration failed, try using FFT alignment
        if result is None:

            print()
            print('Running FFT alignment...')

            # Run FFT alignment
            calstars_coords = np.array(star_dict_ff[jd])[:, :2]
            calstars_coords[:, [0, 1]] = calstars_coords[:, [1, 0]]
            print(calstars_time)
            test_platepar = alignPlatepar(config, prev_platepar, calstars_time, calstars_coords, \
                show_plot=False)

            # Try to recalibrate after FFT alignment
            result, _ = recalibrateFF(config, test_platepar, jd, star_dict_ff, catalog_stars)


            # If the FFT alignment failed, align the original platepar using the smallest radius that matched
            #   and force save the the platepar
            if (result is None) and (min_match_radius is not None):
                print()
                print("Using the old platepar with the minimum match radius of: {:.2f}".format(min_match_radius))
                result, _ = recalibrateFF(config, working_platepar, jd, star_dict_ff, catalog_stars, 
                    max_match_radius=min_match_radius, force_platepar_save=True)

                if result is not None:
                    working_platepar = result


            # If the alignment succeeded, save the result
            else:
                working_platepar = result


        else:
            working_platepar = result


        # Store the platepar if the fit succeeded
        if result is not None:

            # Recompute alt/az of the FOV centre
            working_platepar.az_centre, working_platepar.alt_centre = raDec2AltAz(working_platepar.RA_d, \
                working_platepar.dec_d, working_platepar.JD, working_platepar.lat, working_platepar.lon)

            # Recompute the rotation wrt horizon
            working_platepar.rotation_from_horiz = rotationWrtHorizon(working_platepar)

            # Mark the platepar to indicate that it was automatically recalibrated on an individual FF file
            working_platepar.auto_recalibrated = True

            recalibrated_platepars[ff_name] = working_platepar
            prev_platepar = working_platepar

        else:

            print('Recalibration of {:s} failed, using the previous platepar...'.format(ff_name))

            # Mark the platepar to indicate that autorecalib failed
            prev_platepar_tmp = copy.deepcopy(prev_platepar)
            prev_platepar_tmp.auto_recalibrated = False

            # If the aligning failed, set the previous platepar as the one that should be used for this FF file
            recalibrated_platepars[ff_name] = prev_platepar_tmp



    ### Average out photometric offsets within the given neighbourhood size ###

    # Go through the list of FF files with detections
    for meteor_entry in meteor_list:

        ff_name = meteor_entry[0]

        # Make sure the FF was successfuly recalibrated
        if ff_name in recalibrated_platepars:

            # Find the index of the given FF file in the list of calstars
            ff_indx = calstars_ffs.index(ff_name)

            # Compute the average photometric offset and the improved standard deviation using all
            #   neighbors
            photom_offset_tmp_list = []
            photom_offset_std_tmp_list = []
            neighboring_ffs = []
            for k in range(-(RECALIBRATE_NEIGHBOURHOOD_SIZE//2), RECALIBRATE_NEIGHBOURHOOD_SIZE//2 + 1):

                k_indx = ff_indx + k

                if (k_indx > 0) and (k_indx < len(calstars_ffs)):

                    # Get the name of the FF file
                    ff_name_tmp = calstars_ffs[k_indx]

                    # Check that the neighboring FF was successfuly recalibrated
                    if ff_name_tmp in recalibrated_platepars:
                        
                        # Get the computed photometric offset and stddev
                        photom_offset_tmp_list.append(recalibrated_platepars[ff_name_tmp].mag_lev)
                        photom_offset_std_tmp_list.append(recalibrated_platepars[ff_name_tmp].mag_lev_stddev)
                        neighboring_ffs.append(ff_name_tmp)


            # Compute the new photometric offset and improved standard deviation (assume equal sample size)
            #   Source: https://stats.stackexchange.com/questions/55999/is-it-possible-to-find-the-combined-standard-deviation
            photom_offset_new = np.mean(photom_offset_tmp_list)
            photom_offset_std_new = np.sqrt(\
                np.sum([st**2 + (mt - photom_offset_new)**2 \
                for mt, st in zip(photom_offset_tmp_list, photom_offset_std_tmp_list)]) \
                / len(photom_offset_tmp_list)
                )

            # Assign the new photometric offset and standard deviation to all FFs used for computation
            for ff_name_tmp in neighboring_ffs:
                recalibrated_platepars[ff_name_tmp].mag_lev = photom_offset_new
                recalibrated_platepars[ff_name_tmp].mag_lev_stddev = photom_offset_std_new

    ### ###


    ### Store all recalibrated platepars as a JSON file ###

    all_pps = {}
    for ff_name in recalibrated_platepars:

        json_str = recalibrated_platepars[ff_name].jsonStr()
        
        all_pps[ff_name] = json.loads(json_str)

    with open(os.path.join(dir_path, config.platepars_recalibrated_name), 'w') as f:
        
        # Convert all platepars to a JSON file
        out_str = json.dumps(all_pps, default=lambda o: o.__dict__, indent=4, sort_keys=True)

        f.write(out_str)

    ### ###



    # If no platepars were recalibrated, use the single platepar recalibration procedure
    if len(recalibrated_platepars) == 0:

        print('No FF images were used for recalibration, using the single platepar calibration function...')

        # Use the initial platepar for calibration
        applyAstrometryFTPdetectinfo(dir_path, os.path.basename(ftpdetectinfo_path), None, platepar=platepar)

        return recalibrated_platepars



    ### GENERATE PLOTS ###

    dt_list = []
    ang_dists = []
    rot_angles = []
    hour_list = []
    photom_offset_list = []
    photom_offset_std_list = []

    first_dt = np.min([FFfile.filenameToDatetime(ff_name) for ff_name in recalibrated_platepars])

    for ff_name in recalibrated_platepars:
        
        pp_temp = recalibrated_platepars[ff_name]

        # If the fitting failed, skip the platepar
        if pp_temp is None:
            continue

        # Add the datetime of the FF file to the list
        ff_dt = FFfile.filenameToDatetime(ff_name)
        dt_list.append(ff_dt)


        # Compute the angular separation from the reference platepar
        ang_dist = np.degrees(angularSeparation(np.radians(platepar.RA_d), np.radians(platepar.dec_d), \
            np.radians(pp_temp.RA_d), np.radians(pp_temp.dec_d)))
        ang_dists.append(ang_dist*60)

        # Compute rotation difference
        rot_diff = (platepar.pos_angle_ref - pp_temp.pos_angle_ref + 180)%360 - 180
        rot_angles.append(rot_diff*60)

        # Compute the hour of the FF used for recalibration
        hour_list.append((ff_dt - first_dt).total_seconds()/3600)

        # Add the photometric offset to the list
        photom_offset_list.append(pp_temp.mag_lev)
        photom_offset_std_list.append(pp_temp.mag_lev_stddev)



    if generate_plot:

        # Generate the name the plots
        plot_name = os.path.basename(ftpdetectinfo_path).replace('FTPdetectinfo_', '').replace('.txt', '')

        
        ### Plot difference from reference platepar in angular distance from (0, 0) vs rotation ###    

        plt.figure()

        plt.scatter(0, 0, marker='o', edgecolor='k', label='Reference platepar', s=100, c='none', zorder=3)

        plt.scatter(ang_dists, rot_angles, c=hour_list, zorder=3)
        plt.colorbar(label="Hours from first FF file")
        
        plt.xlabel("Angular distance from reference (arcmin)")
        plt.ylabel("Rotation from reference (arcmin)")

        plt.title("FOV centre drift starting at {:s}".format(first_dt.strftime("%Y/%m/%d %H:%M:%S")))

        plt.grid()
        plt.legend()

        plt.tight_layout()            

        plt.savefig(os.path.join(dir_path, plot_name + '_calibration_variation.png'), dpi=150)

        # plt.show()

        plt.clf()
        plt.close()

        ### ###


        ### Plot the photometric offset variation ###

        plt.figure()

        plt.errorbar(dt_list, photom_offset_list, yerr=photom_offset_std_list, fmt="o", \
            ecolor='lightgray', elinewidth=2, capsize=0, ms=2)

        # Format datetimes
        plt.gca().xaxis.set_major_formatter(mdates.DateFormatter("%H:%M"))

        # rotate and align the tick labels so they look better
        plt.gcf().autofmt_xdate()

        plt.xlabel("UTC time")
        plt.ylabel("Photometric offset")

        plt.title("Photometric offset variation")

        plt.grid()

        plt.tight_layout()

        plt.savefig(os.path.join(dir_path, plot_name + '_photometry_variation.png'), dpi=150)

        plt.clf()
        plt.close()

    ### ###



    ### Apply platepars to FTPdetectinfo ###

    meteor_output_list = []
    for meteor_entry in meteor_list:

        ff_name, meteor_No, rho, phi, meteor_meas = meteor_entry

        # Get the platepar that will be applied to this FF file
        if ff_name in recalibrated_platepars:
            working_platepar = recalibrated_platepars[ff_name]

        else:
            print('Using default platepar for:', ff_name)
            working_platepar = platepar

        # Apply the recalibrated platepar to meteor centroids
        meteor_picks = applyPlateparToCentroids(ff_name, fps, meteor_meas, working_platepar, \
            add_calstatus=True)

        meteor_output_list.append([ff_name, meteor_No, rho, phi, meteor_picks])


    # Calibration string to be written to the FTPdetectinfo file
    calib_str = 'Recalibrated with RMS on: ' + str(datetime.datetime.utcnow()) + ' UTC'

    # If no meteors were detected, set dummpy parameters
    if len(meteor_list) == 0:
        cam_code = ''
        fps = 0


    # Back up the old FTPdetectinfo file
    try:
        shutil.copy(ftpdetectinfo_path, ftpdetectinfo_path.strip('.txt') \
            + '_backup_{:s}.txt'.format(datetime.datetime.utcnow().strftime('%Y%m%d_%H%M%S.%f')))
    except:
        print('ERROR! The FTPdetectinfo file could not be backed up: {:s}'.format(ftpdetectinfo_path))

    # Save the updated FTPdetectinfo
    FTPdetectinfo.writeFTPdetectinfo(meteor_output_list, dir_path, os.path.basename(ftpdetectinfo_path), \
        dir_path, cam_code, fps, calibration=calib_str, celestial_coords_given=True)


    ### ###

    return recalibrated_platepars
Exemplo n.º 20
0
def altAz2RADec(lat,
                lon,
                UT_corr,
                time_data,
                azimuth_data,
                altitude_data,
                dt_time=False):
    """ Convert the azimuth and altitude in a given time and position on Earth to right ascension and 
        declination. 
    
    Arguments:
        lat: [float] latitude of the observer in degrees
        lon: [float] longitde of the observer in degress
        UT_corr: [float] UT correction in hours (difference from local time to UT)
        time_data: [2D ndarray] numpy array containing time tuples of each data point (year, month, day, 
            hour, minute, second, millisecond)
        azimuth_data: [ndarray] 1D numpy array containing the azimuth of each data point (degrees)
        altitude_data: [ndarray] 1D numpy array containing the altitude of each data point (degrees)

    Keyword arguments:
        dt_time: [bool] If True, datetime objects can be passed for time_data.

    Return: 
        (JD_data, RA_data, dec_data): [tuple of ndarrays]
            JD_data: [ndarray] julian date of each data point
            RA_data: [ndarray] right ascension of each point
            dec_data: [ndarray] declination of each point
    """

    # Initialize final values containers
    JD_data = np.zeros_like(azimuth_data, dtype=np.float64)
    RA_data = np.zeros_like(azimuth_data, dtype=np.float64)
    dec_data = np.zeros_like(azimuth_data, dtype=np.float64)

    # Precalculate some parameters
    sl = math.sin(math.radians(lat))
    cl = math.cos(math.radians(lat))

    i = 0
    data_matrix = np.vstack((azimuth_data, altitude_data)).T

    # Go through all given data points
    for azimuth, altitude in data_matrix:

        if dt_time:
            JD = datetime2JD(time_data[i], UT_corr=-UT_corr)

        else:
            # Extract time
            Y, M, D, h, m, s, ms = time_data[i]
            JD = date2JD(Y, M, D, h, m, s, ms, UT_corr=-UT_corr)

        # Convert altitude and azimuth to radians
        az_rad = math.radians(azimuth)
        alt_rad = math.radians(altitude)

        saz = math.sin(az_rad)
        salt = math.sin(alt_rad)
        caz = math.cos(az_rad)
        calt = math.cos(alt_rad)

        x = -saz * calt
        y = -caz * sl * calt + salt * cl
        HA = math.degrees(math.atan2(x, y))

        # Calculate the referent hour angle

        T = (JD - 2451545.0) / 36525.0
        Ho = (280.46061837 + 360.98564736629 *
              (JD - 2451545.0) + 0.000387933 * T**2 - T**3 / 38710000.0) % 360

        RA = (Ho + lon - HA) % 360
        dec = math.degrees(math.asin(sl * salt + cl * calt * caz))

        # Save calculated values to an output array
        JD_data[i] = JD
        RA_data[i] = RA
        dec_data[i] = dec

        i += 1

    return JD_data, RA_data, dec_data
Exemplo n.º 21
0
    def read(self, file_name, fmt=None, use_flat=None):
        """ Read the platepar.

        Arguments:
            file_name: [str] Path and the name of the platepar to read.
        Keyword arguments:
            fmt: [str] Format of the platepar file. 'json' for JSON format and 'txt' for the usual CMN textual
                format.
            use_flat: [bool] Indicates wheter a flat is used or not. None by default.
        Return:
            fmt: [str]
        """


        # Check if platepar exists
        if not os.path.isfile(file_name):
            return False


        # Determine the type of the platepar if it is not given
        if fmt is None:

            with open(file_name) as f:
                data = " ".join(f.readlines())

                # Try parsing the file as JSON
                try:
                    json.loads(data)
                    fmt = 'json'

                except:
                    fmt = 'txt'


        # Load the file as JSON
        if fmt == 'json':

            # Load the JSON file
            with open(file_name) as f:
                data = " ".join(f.readlines())

            # Load the platepar from the JSON dictionary
            self.loadFromDict(json.loads(data), use_flat=use_flat)




        # Load the file as TXT (old CMN format)
        else:

            with open(file_name) as f:

                self.UT_corr = 0
                self.gamma = 1.0
                self.star_list = []

                # Parse latitude, longitude, elevation
                self.lon, self.lat, self.elev = self.parseLine(f)

                # Parse date and time as int
                D, M, Y, h, m, s = map(int, f.readline().split())

                # Calculate the datetime of the platepar time
                self.time = datetime.datetime(Y, M, D, h, m, s)

                # Convert time to JD
                self.JD = date2JD(Y, M, D, h, m, s)

                # Calculate the reference hour angle
                T = (self.JD - 2451545.0)/36525.0
                self.Ho = (280.46061837 + 360.98564736629*(self.JD - 2451545.0) + 0.000387933*T**2 -
                    T**3/38710000.0)%360

                # Parse camera parameters
                self.X_res, self.Y_res, self.focal_length = self.parseLine(f)

                # Parse the right ascension of the image centre
                self.RA_d, self.RA_H, self.RA_M, self.RA_S = self.parseLine(f)

                # Parse the declination of the image centre
                self.dec_d, self.dec_D, self.dec_M, self.dec_S = self.parseLine(f)

                # Parse the rotation parameter
                self.pos_angle_ref = self.parseLine(f)[0]

                # Parse the image scale (convert from arcsec/px to px/deg)
                self.F_scale = self.parseLine(f)[0]
                self.F_scale = 3600/self.F_scale

                # Load magnitude slope parameters
                self.mag_0, self.mag_lev = self.parseLine(f)

                # Load X axis polynomial parameters
                self.x_poly_fwd = self.x_poly_rev = np.zeros(shape=(12,), dtype=np.float64)
                for i in range(12):
                    self.x_poly_fwd[i] = self.x_poly_fwd[i] = self.parseLine(f)[0]

                # Load Y axis polynomial parameters
                self.y_poly_fwd = self.y_poly_rev = np.zeros(shape=(12,), dtype=np.float64)
                for i in range(12):
                    self.y_poly_fwd[i] = self.y_poly_rev[i] = self.parseLine(f)[0]

                # Read station code
                self.station_code = f.readline().replace('\r', '').replace('\n', '')


        # Add a default vignetting coefficient if it already doesn't exist
        self.addVignettingCoeff(use_flat)

        return fmt
Exemplo n.º 22
0
def autoCheckFit(config, platepar, calstars_list):
    """ Attempts to refine the astrometry fit with the given stars and and initial astrometry parameters.

    Arguments:
        config: [Config structure]
        platepar: [Platepar structure] Initial astrometry parameters.
        calstars_list: [list] A list containing stars extracted from FF files. See RMS.Formats.CALSTARS for
            more details.
    
    Return:
        (platepar, fit_status):
            platepar: [Platepar structure] Estimated/refined platepar.
            fit_status: [bool] True if fit was successfuly, False if not.
    """

    # Convert the list to a dictionary
    calstars = {ff_file: star_data for ff_file, star_data in calstars_list}

    # Load catalog stars
    catalog_stars = StarCatalog.readStarCatalog(config.star_catalog_path, config.star_catalog_file, \
        lim_mag=config.catalog_mag_limit, mag_band_ratios=config.star_catalog_band_ratios)

    # Dictionary which will contain the JD, and a list of (X, Y, bg_intens, intens) of the stars
    star_dict = {}

    # Take only those files with enough stars on them
    for ff_name in calstars:

        stars_list = calstars[ff_name]

        # Check if there are enough stars on the image
        if len(stars_list) >= config.ff_min_stars:

            # Calculate the JD time of the FF file
            dt = FFfile.getMiddleTimeFF(ff_name,
                                        config.fps,
                                        ret_milliseconds=True)
            jd = date2JD(*dt)

            # Add the time and the stars to the dict
            star_dict[jd] = stars_list

    # There has to be a minimum of 200 FF files for star fitting, and only 100 will be subset if there are more
    if len(star_dict) < config.calstars_files_N:
        print('Not enough FF files in CALSTARS for ACF!')
        return platepar, False

    else:

        # Randomly choose calstars_files_N image files from the whole list
        rand_keys = random.sample(list(star_dict), config.calstars_files_N)
        star_dict = {key: star_dict[key] for key in rand_keys}

    # Calculate the total number of calibration stars used
    total_calstars = sum([len(star_dict[key]) for key in star_dict])
    print('Total calstars:', total_calstars)

    if total_calstars < config.calstars_min_stars:
        print('Not enough calibration stars, need at least',
              config.calstars_min_stars)
        return platepar, False

    # A list of matching radiuses to try, pairs of [radius, fit_distorsion_flag]
    min_radius = 0.5
    radius_list = [[10, False], [5, False], [3, False], [1.5, True],
                   [min_radius, True]]

    # Calculate the function tolerance, so the desired precision can be reached (the number is calculated
    # in the same reagrd as the cost function)
    fatol = (config.dist_check_threshold**
             2) / np.sqrt(len(star_dict) * config.min_matched_stars + 1)

    # Parameter estimation tolerance for angular values
    fov_w = platepar.X_res / platepar.F_scale
    xatol_ang = config.dist_check_threshold * fov_w / platepar.X_res

    ### If the initial match is good enough, do only quick recalibratoin ###

    # Match the stars and calculate the residuals
    n_matched, avg_dist, cost, _ = matchStarsResiduals(config, platepar, catalog_stars, star_dict, \
        min_radius, ret_nmatch=True)

    if n_matched >= config.calstars_files_N:

        # Check if the average distance with the tightest radius is close
        if avg_dist < config.dist_check_quick_threshold:

            # Use a reduced set of initial radius values
            radius_list = [[1.5, True], [min_radius, True]]

    ##########

    # Match increasingly smaller search radiia around image stars
    for i, (match_radius, fit_distorsion) in enumerate(radius_list):

        # Match the stars and calculate the residuals
        n_matched, avg_dist, cost, _ = matchStarsResiduals(config, platepar, catalog_stars, star_dict, \
            match_radius, ret_nmatch=True)

        print('Max radius:', match_radius)
        print('Initial values:')
        print(' Matched stars:', n_matched)
        print(' Average deviation:', avg_dist)

        # The initial number of matched stars has to be at least the number of FF imaages, otherwise it means
        #   that the initial platepar is no good
        if n_matched < config.calstars_files_N:
            print(
                'The total number of initially matched stars is too small! Please manually redo the plate or make sure there are enough calibration stars.'
            )
            return platepar, False

        # Check if the platepar is good enough and do not estimate further parameters
        if checkFitGoodness(config, platepar, catalog_stars, star_dict,
                            min_radius):

            # Print out notice only if the platepar is good right away
            if i == 0:
                print("Initial platepar is good enough!")

            return platepar, True

        # Initial parameters for the astrometric fit
        p0 = [
            platepar.RA_d, platepar.dec_d, platepar.pos_angle_ref,
            platepar.F_scale
        ]

        # Fit the astrometric parameters
        res = scipy.optimize.minimize(_calcImageResidualsAstro, p0, args=(config, platepar, catalog_stars, \
            star_dict, match_radius), method='Nelder-Mead', \
            options={'fatol': fatol, 'xatol': xatol_ang})

        print(res)

        # If the fit was not successful, stop further fitting
        if not res.success:
            return platepar, False

        else:
            # If the fit was successful, use the new parameters from now on
            ra_ref, dec_ref, pos_angle_ref, F_scale = res.x

            platepar.RA_d = ra_ref
            platepar.dec_d = dec_ref
            platepar.pos_angle_ref = pos_angle_ref
            platepar.F_scale = F_scale

        # Check if the platepar is good enough and do not estimate further parameters
        if checkFitGoodness(config, platepar, catalog_stars, star_dict,
                            min_radius):
            return platepar, True

        # Fit the lens distorsion parameters
        if fit_distorsion:

            # Fit the distortion parameters (X axis)
            res = scipy.optimize.minimize(_calcImageResidualsDistorsion, platepar.x_poly, args=(config, platepar,\
                catalog_stars, star_dict, match_radius, 'x'), method='Nelder-Mead', \
                options={'fatol': fatol, 'xatol': 0.1})

            print(res)

            # If the fit was not successfull, stop further fitting
            if not res.success:
                return platepar, False

            else:
                platepar.x_poly = res.x

            # Check if the platepar is good enough and do not estimate further parameters
            if checkFitGoodness(config, platepar, catalog_stars, star_dict,
                                min_radius):
                return platepar, True

            # Fit the distortion parameters (Y axis)
            res = scipy.optimize.minimize(_calcImageResidualsDistorsion, platepar.y_poly, args=(config, platepar,\
                catalog_stars, star_dict, match_radius, 'y'), method='Nelder-Mead', \
                options={'fatol': fatol, 'xatol': 0.1})

            print(res)

            # If the fit was not successfull, stop further fitting
            if not res.success:
                return platepar, False

            else:
                platepar.y_poly = res.x

    # Match the stars and calculate the residuals
    n_matched, avg_dist, cost, matched_stars = matchStarsResiduals(config, platepar, catalog_stars, \
        star_dict, min_radius, ret_nmatch=True)

    print('FINAL SOLUTION with {:f} px:'.format(min_radius))
    print('Matched stars:', n_matched)
    print('Average deviation:', avg_dist)

    return platepar, True
Exemplo n.º 23
0
def generateCalibrationReport(config,
                              night_dir_path,
                              match_radius=2.0,
                              platepar=None,
                              show_graphs=False):
    """ Given the folder of the night, find the Calstars file, check the star fit and generate a report
        with the quality of the calibration. The report contains information about both the astrometry and
        the photometry calibration. Graphs will be saved in the given directory of the night.
    
    Arguments:
        config: [Config instance]
        night_dir_path: [str] Full path to the directory of the night.

    Keyword arguments:
        match_radius: [float] Match radius for star matching between image and catalog stars (px).
        platepar: [Platepar instance] Use this platepar instead of finding one in the folder.
        show_graphs: [bool] Show the graphs on the screen. False by default.

    Return:
        None
    """

    # Find the CALSTARS file in the given folder
    calstars_file = None
    for calstars_file in os.listdir(night_dir_path):
        if ('CALSTARS' in calstars_file) and ('.txt' in calstars_file):
            break

    if calstars_file is None:
        print('CALSTARS file could not be found in the given directory!')
        return None

    # Load the calstars file
    star_list = readCALSTARS(night_dir_path, calstars_file)

    ### Load recalibrated platepars, if they exist ###

    # Find recalibrated platepars file per FF file
    platepars_recalibrated_file = None
    for file_name in os.listdir(night_dir_path):
        if file_name == config.platepars_recalibrated_name:
            platepars_recalibrated_file = file_name
            break

    # Load all recalibrated platepars if the file is available
    recalibrated_platepars = None
    if platepars_recalibrated_file:
        with open(os.path.join(night_dir_path,
                               platepars_recalibrated_file)) as f:
            recalibrated_platepars = json.load(f)
            print(
                'Loaded recalibrated platepars JSON file for the calibration report...'
            )

    ### ###

    ### Load the platepar file ###

    # Find the platepar file in the given directory if it was not given
    if platepar is None:

        # Find the platepar file
        platepar_file = None
        for file_name in os.listdir(night_dir_path):
            if file_name == config.platepar_name:
                platepar_file = file_name
                break

        if platepar_file is None:
            print('The platepar cannot be found in the night directory!')
            return None

        # Load the platepar file
        platepar = Platepar()
        platepar.read(os.path.join(night_dir_path, platepar_file),
                      use_flat=config.use_flat)

    ### ###

    night_name = os.path.split(night_dir_path.strip(os.sep))[1]

    # Go one mag deeper than in the config
    lim_mag = config.catalog_mag_limit + 1

    # Load catalog stars (load one magnitude deeper)
    catalog_stars, mag_band_str, config.star_catalog_band_ratios = StarCatalog.readStarCatalog(\
        config.star_catalog_path, config.star_catalog_file, lim_mag=lim_mag, \
        mag_band_ratios=config.star_catalog_band_ratios)

    ### Take only those CALSTARS entires for which FF files exist in the folder ###

    # Get a list of FF files in the folder
    ff_list = []
    for file_name in os.listdir(night_dir_path):
        if validFFName(file_name):
            ff_list.append(file_name)

    # Filter out calstars entries, generate a star dictionary where the keys are JDs of FFs
    star_dict = {}
    ff_dict = {}
    for entry in star_list:

        ff_name, star_data = entry

        # Check if the FF from CALSTARS exists in the folder
        if ff_name not in ff_list:
            continue

        dt = getMiddleTimeFF(ff_name, config.fps, ret_milliseconds=True)
        jd = date2JD(*dt)

        # Add the time and the stars to the dict
        star_dict[jd] = star_data
        ff_dict[jd] = ff_name

    ### ###

    # If there are no FF files in the directory, don't generate a report
    if len(star_dict) == 0:
        print('No FF files from the CALSTARS file in the directory!')
        return None

    # If the recalibrated platepars file exists, take the one with the most stars
    max_jd = 0
    using_recalib_platepars = False
    if recalibrated_platepars is not None:
        max_stars = 0
        for ff_name_temp in recalibrated_platepars:

            # Compute the Julian date of the FF middle
            dt = getMiddleTimeFF(ff_name_temp,
                                 config.fps,
                                 ret_milliseconds=True)
            jd = date2JD(*dt)

            # Check that this file exists in CALSTARS and the list of FF files
            if (jd not in star_dict) or (jd not in ff_dict):
                continue

            # Check if the number of stars on this FF file is larger than the before
            if len(star_dict[jd]) > max_stars:
                max_jd = jd
                max_stars = len(star_dict[jd])

        # Set a flag to indicate if using recalibrated platepars has failed
        if max_jd == 0:
            using_recalib_platepars = False
        else:

            print('Using recalibrated platepars, file:', ff_dict[max_jd])
            using_recalib_platepars = True

            # Select the platepar where the FF file has the most stars
            platepar_dict = recalibrated_platepars[ff_dict[max_jd]]
            platepar = Platepar()
            platepar.loadFromDict(platepar_dict, use_flat=config.use_flat)

            filtered_star_dict = {max_jd: star_dict[max_jd]}

            # Match stars on the image with the stars in the catalog
            n_matched, avg_dist, cost, matched_stars = matchStarsResiduals(config, platepar, catalog_stars, \
                filtered_star_dict, match_radius, ret_nmatch=True, lim_mag=lim_mag)

            max_matched_stars = n_matched

    # Otherwise take the optimal FF file for evaluation
    if (recalibrated_platepars is None) or (not using_recalib_platepars):

        # If there are more than a set number of FF files to evaluate, choose only the ones with most stars on
        #   the image
        if len(star_dict) > config.calstars_files_N:

            # Find JDs of FF files with most stars on them
            top_nstars_indices = np.argsort([len(x) for x in star_dict.values()])[::-1][:config.calstars_files_N \
                - 1]

            filtered_star_dict = {}
            for i in top_nstars_indices:
                filtered_star_dict[list(star_dict.keys())[i]] = list(
                    star_dict.values())[i]

            star_dict = filtered_star_dict

        # Match stars on the image with the stars in the catalog
        n_matched, avg_dist, cost, matched_stars = matchStarsResiduals(config, platepar, catalog_stars, \
            star_dict, match_radius, ret_nmatch=True, lim_mag=lim_mag)

    # If no recalibrated platepars where found, find the image with the largest number of matched stars
    if (not using_recalib_platepars) or (max_jd == 0):

        max_jd = 0
        max_matched_stars = 0
        for jd in matched_stars:
            _, _, distances = matched_stars[jd]
            if len(distances) > max_matched_stars:
                max_jd = jd
                max_matched_stars = len(distances)

        # If there are no matched stars, use the image with the largest number of detected stars
        if max_matched_stars <= 2:
            max_jd = max(star_dict, key=lambda x: len(star_dict[x]))
            distances = [np.inf]

    # Take the FF file with the largest number of matched stars
    ff_name = ff_dict[max_jd]

    # Load the FF file
    ff = readFF(night_dir_path, ff_name)
    img_h, img_w = ff.avepixel.shape

    dpi = 200
    plt.figure(figsize=(ff.avepixel.shape[1] / dpi,
                        ff.avepixel.shape[0] / dpi),
               dpi=dpi)

    # Take the average pixel
    img = ff.avepixel

    # Slightly adjust the levels
    img = Image.adjustLevels(img, np.percentile(img, 1.0), 1.3,
                             np.percentile(img, 99.99))

    plt.imshow(img, cmap='gray', interpolation='nearest')

    legend_handles = []

    # Plot detected stars
    for img_star in star_dict[max_jd]:

        y, x, _, _ = img_star

        rect_side = 5 * match_radius
        square_patch = plt.Rectangle((x - rect_side/2, y - rect_side/2), rect_side, rect_side, color='g', \
            fill=False, label='Image stars')

        plt.gca().add_artist(square_patch)

    legend_handles.append(square_patch)

    # If there are matched stars, plot them
    if max_matched_stars > 2:

        # Take the solution with the largest number of matched stars
        image_stars, matched_catalog_stars, distances = matched_stars[max_jd]

        # Plot matched stars
        for img_star in image_stars:
            x, y, _, _ = img_star

            circle_patch = plt.Circle((y, x), radius=3*match_radius, color='y', fill=False, \
                label='Matched stars')

            plt.gca().add_artist(circle_patch)

        legend_handles.append(circle_patch)

        ### Plot match residuals ###

        # Compute preducted positions of matched image stars from the catalog
        x_predicted, y_predicted = raDecToXYPP(matched_catalog_stars[:, 0], \
            matched_catalog_stars[:, 1], max_jd, platepar)

        img_y, img_x, _, _ = image_stars.T

        delta_x = x_predicted - img_x
        delta_y = y_predicted - img_y

        # Compute image residual and angle of the error
        res_angle = np.arctan2(delta_y, delta_x)
        res_distance = np.sqrt(delta_x**2 + delta_y**2)

        # Calculate coordinates of the beginning of the residual line
        res_x_beg = img_x + 3 * match_radius * np.cos(res_angle)
        res_y_beg = img_y + 3 * match_radius * np.sin(res_angle)

        # Calculate coordinates of the end of the residual line
        res_x_end = img_x + 100 * np.cos(res_angle) * res_distance
        res_y_end = img_y + 100 * np.sin(res_angle) * res_distance

        # Plot the 100x residuals
        for i in range(len(x_predicted)):
            res_plot = plt.plot([res_x_beg[i], res_x_end[i]], [res_y_beg[i], res_y_end[i]], color='orange', \
                lw=0.5, label='100x residuals')

        legend_handles.append(res_plot[0])

        ### ###

    else:

        distances = [np.inf]

        # If there are no matched stars, plot large text in the middle of the screen
        plt.text(img_w / 2,
                 img_h / 2,
                 "NO MATCHED STARS!",
                 color='r',
                 alpha=0.5,
                 fontsize=20,
                 ha='center',
                 va='center')

    ### Plot positions of catalog stars to the limiting magnitude of the faintest matched star + 1 mag ###

    # Find the faintest magnitude among matched stars
    if max_matched_stars > 2:
        faintest_mag = np.max(matched_catalog_stars[:, 2]) + 1

    else:
        # If there are no matched stars, use the limiting magnitude from config
        faintest_mag = config.catalog_mag_limit + 1

    # Estimate RA,dec of the centre of the FOV
    _, RA_c, dec_c, _ = xyToRaDecPP([jd2Date(max_jd)], [platepar.X_res / 2],
                                    [platepar.Y_res / 2], [1], platepar)

    RA_c = RA_c[0]
    dec_c = dec_c[0]

    fov_radius = np.hypot(*computeFOVSize(platepar))

    # Get stars from the catalog around the defined center in a given radius
    _, extracted_catalog = subsetCatalog(catalog_stars, RA_c, dec_c,
                                         fov_radius, faintest_mag)
    ra_catalog, dec_catalog, mag_catalog = extracted_catalog.T

    # Compute image positions of all catalog stars that should be on the image
    x_catalog, y_catalog = raDecToXYPP(ra_catalog, dec_catalog, max_jd,
                                       platepar)

    # Filter all catalog stars outside the image
    temp_arr = np.c_[x_catalog, y_catalog, mag_catalog]
    temp_arr = temp_arr[temp_arr[:, 0] >= 0]
    temp_arr = temp_arr[temp_arr[:, 0] <= ff.avepixel.shape[1]]
    temp_arr = temp_arr[temp_arr[:, 1] >= 0]
    temp_arr = temp_arr[temp_arr[:, 1] <= ff.avepixel.shape[0]]
    x_catalog, y_catalog, mag_catalog = temp_arr.T

    # Plot catalog stars on the image
    cat_stars_handle = plt.scatter(x_catalog, y_catalog, c='none', marker='D', lw=1.0, alpha=0.4, \
        s=((4.0 + (faintest_mag - mag_catalog))/3.0)**(2*2.512), edgecolor='r', label='Catalog stars')

    legend_handles.append(cat_stars_handle)

    ### ###

    # Add info text in the corner
    info_text = ff_dict[max_jd] + '\n' \
        + "Matched stars within {:.1f} px radius: {:d}/{:d} \n".format(match_radius, max_matched_stars, \
            len(star_dict[max_jd])) \
        + "Median distance = {:.2f} px\n".format(np.median(distances)) \
        + "Catalog lim mag = {:.1f}".format(lim_mag)

    plt.text(10, 10, info_text, bbox=dict(facecolor='black', alpha=0.5), va='top', ha='left', fontsize=4, \
        color='w', family='monospace')

    legend = plt.legend(handles=legend_handles,
                        prop={'size': 4},
                        loc='upper right')
    legend.get_frame().set_facecolor('k')
    legend.get_frame().set_edgecolor('k')
    for txt in legend.get_texts():
        txt.set_color('w')

    ### Add FOV info (centre, size) ###

    # Mark FOV centre
    plt.scatter(platepar.X_res / 2,
                platepar.Y_res / 2,
                marker='+',
                s=20,
                c='r',
                zorder=4)

    # Compute FOV centre alt/az
    azim_centre, alt_centre = raDec2AltAz(max_jd, platepar.lon, platepar.lat,
                                          RA_c, dec_c)

    # Compute FOV size
    fov_h, fov_v = computeFOVSize(platepar)

    # Compute the rotation wrt. horizon
    rot_horizon = rotationWrtHorizon(platepar)

    fov_centre_text = "Azim  = {:6.2f}$\\degree$\n".format(azim_centre) \
                    + "Alt   = {:6.2f}$\\degree$\n".format(alt_centre) \
                    + "Rot h = {:6.2f}$\\degree$\n".format(rot_horizon) \
                    + "FOV h = {:6.2f}$\\degree$\n".format(fov_h) \
                    + "FOV v = {:6.2f}$\\degree$".format(fov_v) \

    plt.text(10, platepar.Y_res - 10, fov_centre_text, bbox=dict(facecolor='black', alpha=0.5), \
        va='bottom', ha='left', fontsize=4, color='w', family='monospace')

    ### ###

    # Plot RA/Dec gridlines #
    addEquatorialGrid(plt, platepar, max_jd)

    plt.axis('off')
    plt.gca().get_xaxis().set_visible(False)
    plt.gca().get_yaxis().set_visible(False)

    plt.xlim([0, ff.avepixel.shape[1]])
    plt.ylim([ff.avepixel.shape[0], 0])

    # Remove the margins
    plt.subplots_adjust(left=0, bottom=0, right=1, top=1, wspace=0, hspace=0)

    plt.savefig(os.path.join(night_dir_path, night_name + '_calib_report_astrometry.jpg'), \
        bbox_inches='tight', pad_inches=0, dpi=dpi)

    if show_graphs:
        plt.show()

    else:
        plt.clf()
        plt.close()

    if max_matched_stars > 2:

        ### PHOTOMETRY FIT ###

        # If a flat is used, set the vignetting coeff to 0
        if config.use_flat:
            platepar.vignetting_coeff = 0.0

        # Extact intensities and mangitudes
        star_intensities = image_stars[:, 2]
        catalog_mags = matched_catalog_stars[:, 2]

        # Compute radius of every star from image centre
        radius_arr = np.hypot(image_stars[:, 0] - img_h / 2,
                              image_stars[:, 1] - img_w / 2)

        # Fit the photometry on automated star intensities (use the fixed vignetting coeff, use robust fit)
        photom_params, fit_stddev, fit_resid, star_intensities, radius_arr, catalog_mags = \
            photometryFitRobust(star_intensities, radius_arr, catalog_mags, \
            fixed_vignetting=platepar.vignetting_coeff)

        photom_offset, _ = photom_params

        ### ###

        ### PLOT PHOTOMETRY ###
        # Note: An almost identical code exists in RMS.Astrometry.SkyFit in the PlateTool.photometry function

        dpi = 130
        fig_p, (ax_p, ax_r) = plt.subplots(nrows=2, facecolor=None, figsize=(6.0, 7.0), dpi=dpi, \
            gridspec_kw={'height_ratios':[2, 1]})

        # Plot raw star intensities
        ax_p.scatter(-2.5 * np.log10(star_intensities),
                     catalog_mags,
                     s=5,
                     c='r',
                     alpha=0.5,
                     label="Raw")

        # If a flat is used, disregard the vignetting
        if not config.use_flat:

            # Plot intensities of image stars corrected for vignetting
            lsp_corr_arr = np.log10(correctVignetting(star_intensities, radius_arr, \
                platepar.vignetting_coeff))
            ax_p.scatter(-2.5*lsp_corr_arr, catalog_mags, s=5, c='b', alpha=0.5, \
                label="Corrected for vignetting")

        # Plot photometric offset from the platepar
        x_min, x_max = ax_p.get_xlim()
        y_min, y_max = ax_p.get_ylim()

        x_min_w = x_min - 3
        x_max_w = x_max + 3
        y_min_w = y_min - 3
        y_max_w = y_max + 3

        photometry_info = "Platepar: {:+.1f}*LSP + {:.2f} +/- {:.2f}".format(platepar.mag_0, \
            platepar.mag_lev, platepar.mag_lev_stddev) \
            + "\nVignetting coeff = {:.5f}".format(platepar.vignetting_coeff) \
            + "\nGamma = {:.2f}".format(platepar.gamma)

        # Plot the photometry calibration from the platepar
        logsum_arr = np.linspace(x_min_w, x_max_w, 10)
        ax_p.plot(logsum_arr, logsum_arr + platepar.mag_lev, label=photometry_info, linestyle='--', \
            color='k', alpha=0.5)

        # Plot the fitted photometry calibration
        fit_info = "Fit: {:+.1f}*LSP + {:.2f} +/- {:.2f}".format(
            -2.5, photom_offset, fit_stddev)
        ax_p.plot(logsum_arr,
                  logsum_arr + photom_offset,
                  label=fit_info,
                  linestyle='--',
                  color='b',
                  alpha=0.75)

        ax_p.legend()

        ax_p.set_ylabel("Catalog magnitude ({:s})".format(mag_band_str))
        ax_p.set_xlabel("Uncalibrated magnitude")

        # Set wider axis limits
        ax_p.set_xlim(x_min_w, x_max_w)
        ax_p.set_ylim(y_min_w, y_max_w)

        ax_p.invert_yaxis()
        ax_p.invert_xaxis()

        ax_p.grid()

        ### Plot photometry vs radius ###

        img_diagonal = np.hypot(img_h / 2, img_w / 2)

        # Plot photometry residuals (including vignetting)
        ax_r.scatter(radius_arr, fit_resid, c='b', alpha=0.75, s=5, zorder=3)

        # Plot a zero line
        ax_r.plot(np.linspace(0, img_diagonal, 10), np.zeros(10), linestyle='dashed', alpha=0.5, \
            color='k')

        # Plot only when no flat is used
        if not config.use_flat:

            #  Plot radius from centre vs. fit residual
            fit_resids_novignetting = catalog_mags - photomLine((np.array(star_intensities), \
                np.array(radius_arr)), photom_offset, 0.0)
            ax_r.scatter(radius_arr,
                         fit_resids_novignetting,
                         s=5,
                         c='r',
                         alpha=0.5,
                         zorder=3)

            px_sum_tmp = 1000
            radius_arr_tmp = np.linspace(0, img_diagonal, 50)

            # Plot vignetting loss curve
            vignetting_loss = 2.5*np.log10(px_sum_tmp) \
                - 2.5*np.log10(correctVignetting(px_sum_tmp, radius_arr_tmp, \
                    platepar.vignetting_coeff))

            ax_r.plot(radius_arr_tmp,
                      vignetting_loss,
                      linestyle='dotted',
                      alpha=0.5,
                      color='k')

        ax_r.grid()

        ax_r.set_ylabel("Fit residuals (mag)")
        ax_r.set_xlabel("Radius from centre (px)")

        ax_r.set_xlim(0, img_diagonal)

        ### ###

        plt.tight_layout()

        plt.savefig(os.path.join(night_dir_path,
                                 night_name + '_calib_report_photometry.png'),
                    dpi=150)

        if show_graphs:
            plt.show()

        else:
            plt.clf()
            plt.close()
Exemplo n.º 24
0
def trackStack(dir_path,
               config,
               border=5,
               background_compensation=True,
               hide_plot=False):
    """ Generate a stack with aligned stars, so the sky appears static. The folder should have a
        platepars_all_recalibrated.json file.

    Arguments:
        dir_path: [str] Path to the directory with image files.
        config: [Config instance]

    Keyword arguments:
        border: [int] Border around the image to exclude (px).
        background_compensation: [bool] Normalize the background by applying a median filter to avepixel and
            use it as a flat field. Slows down the procedure and may sometimes introduce artifacts. True
            by default.
    """

    # Load recalibrated platepars, if they exist ###

    # Find recalibrated platepars file per FF file
    platepars_recalibrated_file = None
    for file_name in os.listdir(dir_path):
        if file_name == config.platepars_recalibrated_name:
            platepars_recalibrated_file = file_name
            break

    # Load all recalibrated platepars if the file is available
    recalibrated_platepars = None
    if platepars_recalibrated_file is not None:
        with open(os.path.join(dir_path, platepars_recalibrated_file)) as f:
            recalibrated_platepars = json.load(f)
            print(
                'Loaded recalibrated platepars JSON file for the calibration report...'
            )

    # ###

    # If the recalib platepars is not found, stop
    if recalibrated_platepars is None:
        print("The {:s} file was not found!".format(
            config.platepars_recalibrated_name))
        return False

    # Get a list of FF files in the folder
    ff_list = []
    for file_name in os.listdir(dir_path):
        if validFFName(file_name):
            ff_list.append(file_name)

    # Take the platepar with the middle time as the reference one
    ff_found_list = []
    jd_list = []
    for ff_name_temp in recalibrated_platepars:

        if ff_name_temp in ff_list:

            # Compute the Julian date of the FF middle
            dt = getMiddleTimeFF(ff_name_temp,
                                 config.fps,
                                 ret_milliseconds=True)
            jd = date2JD(*dt)

            jd_list.append(jd)
            ff_found_list.append(ff_name_temp)

    if len(jd_list) < 2:
        print("Not more than 1 FF image!")
        return False

    # Take the FF file with the middle JD
    jd_list = np.array(jd_list)
    jd_middle = np.mean(jd_list)
    jd_mean_index = np.argmin(np.abs(jd_list - jd_middle))
    ff_mid = ff_found_list[jd_mean_index]

    # Load the middle platepar as the reference one
    pp_ref = Platepar()
    pp_ref.loadFromDict(recalibrated_platepars[ff_mid],
                        use_flat=config.use_flat)

    # Try loading the mask
    mask_path = None
    if os.path.exists(os.path.join(dir_path, config.mask_file)):
        mask_path = os.path.join(dir_path, config.mask_file)

    # Try loading the default mask
    elif os.path.exists(config.mask_file):
        mask_path = os.path.abspath(config.mask_file)

    # Load the mask if given
    mask = None
    if mask_path is not None:
        mask = loadMask(mask_path)
        print("Loaded mask:", mask_path)

    # If the shape of the mask doesn't fit, init an empty mask
    if mask is not None:
        if (mask.img.shape[0] != pp_ref.Y_res) or (mask.img.shape[1] !=
                                                   pp_ref.X_res):
            print("Mask is of wrong shape!")
            mask = None

    if mask is None:
        mask = MaskStructure(255 + np.zeros(
            (pp_ref.Y_res, pp_ref.X_res), dtype=np.uint8))

    # Compute the middle RA/Dec of the reference platepar
    _, ra_temp, dec_temp, _ = xyToRaDecPP([jd2Date(jd_middle)],
                                          [pp_ref.X_res / 2],
                                          [pp_ref.Y_res / 2], [1],
                                          pp_ref,
                                          extinction_correction=False)

    ra_mid, dec_mid = ra_temp[0], dec_temp[0]

    # Go through all FF files and find RA/Dec of image corners to find the size of the stack image ###

    # List of corners
    x_corns = [0, pp_ref.X_res, 0, pp_ref.X_res]
    y_corns = [0, 0, pp_ref.Y_res, pp_ref.Y_res]

    ra_list = []
    dec_list = []

    for ff_temp in ff_found_list:

        # Load the recalibrated platepar
        pp_temp = Platepar()
        pp_temp.loadFromDict(recalibrated_platepars[ff_temp],
                             use_flat=config.use_flat)

        for x_c, y_c in zip(x_corns, y_corns):
            _, ra_temp, dec_temp, _ = xyToRaDecPP(
                [getMiddleTimeFF(ff_temp, config.fps, ret_milliseconds=True)],
                [x_c], [y_c], [1],
                pp_ref,
                extinction_correction=False)
            ra_c, dec_c = ra_temp[0], dec_temp[0]

            ra_list.append(ra_c)
            dec_list.append(dec_c)

    # Compute the angular separation from the middle equatorial coordinates of the reference image to all
    #   RA/Dec corner coordinates
    ang_sep_list = []
    for ra_c, dec_c in zip(ra_list, dec_list):
        ang_sep = np.degrees(
            angularSeparation(np.radians(ra_mid), np.radians(dec_mid),
                              np.radians(ra_c), np.radians(dec_c)))

        ang_sep_list.append(ang_sep)

    # Find the maximum angular separation and compute the image size using the plate scale
    #   The image size will be resampled to 1/2 of the original size to avoid interpolation
    scale = 0.5
    ang_sep_max = np.max(ang_sep_list)
    img_size = int(scale * 2 * ang_sep_max * pp_ref.F_scale)

    #

    # Create the stack platepar with no distortion and a large image size
    pp_stack = copy.deepcopy(pp_ref)
    pp_stack.resetDistortionParameters()
    pp_stack.X_res = img_size
    pp_stack.Y_res = img_size
    pp_stack.F_scale *= scale
    pp_stack.refraction = False

    # Init the image
    avg_stack_sum = np.zeros((img_size, img_size), dtype=float)
    avg_stack_count = np.zeros((img_size, img_size), dtype=int)
    max_deaveraged = np.zeros((img_size, img_size), dtype=np.uint8)

    # Load individual FFs and map them to the stack
    for i, ff_name in enumerate(ff_found_list):

        print("Stacking {:s}, {:.1f}% done".format(
            ff_name, 100 * i / len(ff_found_list)))

        # Read the FF file
        ff = readFF(dir_path, ff_name)

        # Load the recalibrated platepar
        pp_temp = Platepar()
        pp_temp.loadFromDict(recalibrated_platepars[ff_name],
                             use_flat=config.use_flat)

        # Make a list of X and Y image coordinates
        x_coords, y_coords = np.meshgrid(
            np.arange(border, pp_ref.X_res - border),
            np.arange(border, pp_ref.Y_res - border))
        x_coords = x_coords.ravel()
        y_coords = y_coords.ravel()

        # Map image pixels to sky
        jd_arr, ra_coords, dec_coords, _ = xyToRaDecPP(
            len(x_coords) *
            [getMiddleTimeFF(ff_name, config.fps, ret_milliseconds=True)],
            x_coords,
            y_coords,
            len(x_coords) * [1],
            pp_temp,
            extinction_correction=False)

        # Map sky coordinates to stack image coordinates
        stack_x, stack_y = raDecToXYPP(ra_coords, dec_coords, jd_middle,
                                       pp_stack)

        # Round pixel coordinates
        stack_x = np.round(stack_x, decimals=0).astype(int)
        stack_y = np.round(stack_y, decimals=0).astype(int)

        # Cut the image to limits
        filter_arr = (stack_x > 0) & (stack_x < img_size) & (stack_y > 0) & (
            stack_y < img_size)
        x_coords = x_coords[filter_arr].astype(int)
        y_coords = y_coords[filter_arr].astype(int)
        stack_x = stack_x[filter_arr]
        stack_y = stack_y[filter_arr]

        # Apply the mask to maxpixel and avepixel
        maxpixel = copy.deepcopy(ff.maxpixel)
        maxpixel[mask.img == 0] = 0
        avepixel = copy.deepcopy(ff.avepixel)
        avepixel[mask.img == 0] = 0

        # Compute deaveraged maxpixel
        max_deavg = maxpixel - avepixel

        # Normalize the backgroud brightness by applying a large-kernel median filter to avepixel
        if background_compensation:

            # # Apply a median filter to the avepixel to get an estimate of the background brightness
            # avepixel_median = scipy.ndimage.median_filter(ff.avepixel, size=101)
            avepixel_median = cv2.medianBlur(ff.avepixel, 301)

            # Make sure to avoid zero division
            avepixel_median[avepixel_median < 1] = 1

            # Normalize the avepixel by subtracting out the background brightness
            avepixel = avepixel.astype(float)
            avepixel /= avepixel_median
            avepixel *= 50  # Normalize to a good background value, which is usually 50
            avepixel = np.clip(avepixel, 0, 255)
            avepixel = avepixel.astype(np.uint8)

            # plt.imshow(avepixel, cmap='gray', vmin=0, vmax=255)
            # plt.show()

        # Add the average pixel to the sum
        avg_stack_sum[stack_y, stack_x] += avepixel[y_coords, x_coords]

        # Increment the counter image where the avepixel is not zero
        ones_img = np.ones_like(avepixel)
        ones_img[avepixel == 0] = 0
        avg_stack_count[stack_y, stack_x] += ones_img[y_coords, x_coords]

        # Set pixel values to the stack, only take the max values
        max_deaveraged[stack_y, stack_x] = np.max(np.dstack(
            [max_deaveraged[stack_y, stack_x], max_deavg[y_coords, x_coords]]),
                                                  axis=2)

    # Compute the blended avepixel background
    stack_img = avg_stack_sum
    stack_img[avg_stack_count > 0] /= avg_stack_count[avg_stack_count > 0]
    stack_img += max_deaveraged
    stack_img = np.clip(stack_img, 0, 255)
    stack_img = stack_img.astype(np.uint8)

    # Crop image
    non_empty_columns = np.where(stack_img.max(axis=0) > 0)[0]
    non_empty_rows = np.where(stack_img.max(axis=1) > 0)[0]
    crop_box = (np.min(non_empty_rows), np.max(non_empty_rows),
                np.min(non_empty_columns), np.max(non_empty_columns))
    stack_img = stack_img[crop_box[0]:crop_box[1] + 1,
                          crop_box[2]:crop_box[3] + 1]

    # Plot and save the stack ###

    dpi = 200
    plt.figure(figsize=(stack_img.shape[1] / dpi, stack_img.shape[0] / dpi),
               dpi=dpi)

    plt.imshow(stack_img,
               cmap='gray',
               vmin=0,
               vmax=256,
               interpolation='nearest')

    plt.axis('off')
    plt.gca().get_xaxis().set_visible(False)
    plt.gca().get_yaxis().set_visible(False)

    plt.xlim([0, stack_img.shape[1]])
    plt.ylim([stack_img.shape[0], 0])

    # Remove the margins (top and right are set to 0.9999, as setting them to 1.0 makes the image blank in
    #   some matplotlib versions)
    plt.subplots_adjust(left=0,
                        bottom=0,
                        right=0.9999,
                        top=0.9999,
                        wspace=0,
                        hspace=0)

    filenam = os.path.join(dir_path,
                           os.path.basename(dir_path) + "_track_stack.jpg")
    plt.savefig(filenam, bbox_inches='tight', pad_inches=0, dpi=dpi)

    #

    if hide_plot is False:
        plt.show()