예제 #1
0
def computeFOVSize(platepar):
    """ Computes the size of the FOV in deg from the given platepar.

    Arguments:
        platepar: [Platepar instance]
    Return:
        fov_h: [float] Horizontal FOV in degrees.
        fov_v: [float] Vertical FOV in degrees.
    """

    # Construct poinits on the middle of every side of the image
    time_data = np.array(4 * [jd2Date(platepar.JD)])
    x_data = np.array(
        [0, platepar.X_res, platepar.X_res / 2, platepar.X_res / 2])
    y_data = np.array(
        [platepar.Y_res / 2, platepar.Y_res / 2, 0, platepar.Y_res])
    level_data = np.ones(4)

    # Compute RA/Dec of the points
    _, ra_data, dec_data, _ = xyToRaDecPP(time_data, x_data, y_data,
                                          level_data, platepar)

    ra1, ra2, ra3, ra4 = ra_data
    dec1, dec2, dec3, dec4 = dec_data

    # Compute horizontal FOV
    fov_h = np.degrees(angularSeparation(np.radians(ra1), np.radians(dec1), np.radians(ra2), \
        np.radians(dec2)))

    # Compute vertical FOV
    fov_v = np.degrees(angularSeparation(np.radians(ra3), np.radians(dec3), np.radians(ra4), \
        np.radians(dec4)))

    return fov_h, fov_v
예제 #2
0
        def _calcSkyResidualsDistortion(params, platepar, jd, catalog_stars, img_stars, dimension):
            """ Calculates the differences between the stars on the image and catalog stars in sky
                coordinates with the given astrometrical solution.
            Arguments:
                ...
                dimension: [str] 'x' for X polynomial fit, 'y' for Y polynomial fit
            """

            pp_copy = copy.deepcopy(platepar)

            if (dimension == 'x') or (dimension == 'radial'):
                pp_copy.x_poly_fwd = params

            else:
                pp_copy.y_poly_fwd = params


            img_x, img_y, _ = img_stars.T

            # Get image coordinates of catalog stars
            ra_array, dec_array = getPairedStarsSkyPositions(img_x, img_y, jd, pp_copy)

            ra_catalog, dec_catalog, _ = catalog_stars.T

            # Compute the sum of the angular separation
            separation_sum = np.sum(angularSeparation(np.radians(ra_array), np.radians(dec_array), \
                np.radians(ra_catalog), np.radians(dec_catalog))**2)

            return separation_sum
예제 #3
0
        def _calcSkyResidualsAstro(params, platepar, jd, catalog_stars, img_stars):
            """ Calculates the differences between the stars on the image and catalog stars in sky
                coordinates with the given astrometrical solution.
            """

            # Extract fitting parameters
            ra_ref, dec_ref, pos_angle_ref, F_scale = params

            pp_copy = copy.deepcopy(platepar)

            pp_copy.RA_d = ra_ref
            pp_copy.dec_d = dec_ref
            pp_copy.pos_angle_ref = pos_angle_ref
            pp_copy.F_scale = F_scale

            img_x, img_y, _ = img_stars.T

            # Get image coordinates of catalog stars
            ra_array, dec_array = getPairedStarsSkyPositions(img_x, img_y, jd, pp_copy)

            ra_catalog, dec_catalog, _ = catalog_stars.T

            # Compute the sum of the angular separation
            separation_sum = np.sum(angularSeparation(np.radians(ra_array), np.radians(dec_array), \
                np.radians(ra_catalog), np.radians(dec_catalog))**2)


            return separation_sum
예제 #4
0
def computeFOVSize(platepar):
    """ Computes the size of the FOV in deg from the given platepar.

    Arguments:
        platepar: [Platepar instance]
    Return:
        fov_h: [float] Horizontal FOV in degrees.
        fov_v: [float] Vertical FOV in degrees.
    """

    # Construct poinits on the middle of every side of the image
    x_data = np.array([
        0, platepar.X_res, platepar.X_res / 2, platepar.X_res / 2,
        platepar.X_res / 2.0
    ])
    y_data = np.array([
        platepar.Y_res / 2, platepar.Y_res / 2, 0, platepar.Y_res,
        platepar.Y_res / 2.0
    ])
    time_data = np.array(len(x_data) * [jd2Date(platepar.JD)])
    level_data = np.ones(len(x_data))

    # Compute RA/Dec of the points
    _, ra_data, dec_data, _ = xyToRaDecPP(time_data, x_data, y_data, level_data, platepar, \
        extinction_correction=False)

    ra1, ra2, ra3, ra4, ra_mid = ra_data
    dec1, dec2, dec3, dec4, dec_mid = dec_data

    # Compute horizontal FOV
    fov_hl = np.degrees(angularSeparation(np.radians(ra1), np.radians(dec1), np.radians(ra_mid), \
        np.radians(dec_mid)))
    fov_hr = np.degrees(angularSeparation(np.radians(ra2), np.radians(dec2), np.radians(ra_mid), \
        np.radians(dec_mid)))
    fov_h = fov_hl + fov_hr

    # Compute vertical FOV
    fov_vu = np.degrees(angularSeparation(np.radians(ra3), np.radians(dec3), np.radians(ra_mid), \
        np.radians(dec_mid)))
    fov_vd = np.degrees(angularSeparation(np.radians(ra4), np.radians(dec4), np.radians(ra_mid), \
        np.radians(dec_mid)))
    fov_v = fov_vu + fov_vd

    return fov_h, fov_v
예제 #5
0
def getFOVSelectionRadius(platepar):
    """ Get a radius around the centre of the FOV which includes the FOV, but excludes stars outside the FOV.
    Arguments:
        platepar: [Platepar instance]

    Return:
        fov_radius: [float] Radius in degrees.
    """

    # Construct poinits on the middle of every side of the image
    x_data = np.array(
        [0, platepar.X_res, platepar.X_res, 0, platepar.X_res / 2.0])
    y_data = np.array(
        [0, platepar.Y_res, 0, platepar.Y_res, platepar.Y_res / 2.0])
    time_data = np.array(len(x_data) * [jd2Date(platepar.JD)])
    level_data = np.ones(len(x_data))

    # Compute RA/Dec of the points
    _, ra_data, dec_data, _ = xyToRaDecPP(time_data, x_data, y_data, level_data, platepar, \
        extinction_correction=False)

    ra1, ra2, ra3, ra4, ra_mid = ra_data
    dec1, dec2, dec3, dec4, dec_mid = dec_data

    # Angular separation between the centre of the FOV and corners
    ul_sep = np.degrees(
        angularSeparation(np.radians(ra1), np.radians(dec1),
                          np.radians(ra_mid), np.radians(dec_mid)))
    lr_sep = np.degrees(
        angularSeparation(np.radians(ra2), np.radians(dec2),
                          np.radians(ra_mid), np.radians(dec_mid)))
    ur_sep = np.degrees(
        angularSeparation(np.radians(ra3), np.radians(dec3),
                          np.radians(ra_mid), np.radians(dec_mid)))
    ll_sep = np.degrees(
        angularSeparation(np.radians(ra4), np.radians(dec4),
                          np.radians(ra_mid), np.radians(dec_mid)))

    # Take the average radius
    fov_radius = np.mean([ul_sep, lr_sep, ur_sep, ll_sep])

    return fov_radius
예제 #6
0
    def angularSeparationFromGC(self, ra, dec):
        """ Compute the angular separation from the given coordinaes to the great circle. 
    
        Arguments;
            ra: [float] RA (deg).
            dec: [float] Declination (deg).

        Return:
            ang_separation: [float] Radiant dsitance (deg).
        """

        ang_separation = np.degrees(abs(np.pi/2 - angularSeparation(np.radians(ra), \
                np.radians(dec), np.radians(self.normal_ra), np.radians(self.normal_dec))))

        return ang_separation
예제 #7
0
def computeFlux(config, dir_path, ftpdetectinfo_path, shower_code, dt_beg, dt_end, timebin, mass_index, \
    timebin_intdt=0.25, ht_std_percent=5.0, mask=None):
    """ Compute flux using measurements in the given FTPdetectinfo file. 
    
    Arguments:
        config: [Config instance]
        dir_path: [str] Path to the working directory.
        ftpdetectinfo_path: [str] Path to a FTPdetectinfo file.
        shower_code: [str] IAU shower code (e.g. ETA, PER, SDA).
        dt_beg: [Datetime] Datetime object of the observation beginning.
        dt_end: [Datetime] Datetime object of the observation end.
        timebin: [float] Time bin in hours.
        mass_index: [float] Cumulative mass index of the shower.

    Keyword arguments:
        timebin_intdt: [float] Time step for computing the integrated collection area in hours. 15 minutes by
            default. If smaller than that, only one collection are will be computed.
        ht_std_percent: [float] Meteor height standard deviation in percent.
        mask: [Mask object] Mask object, None by default.

    """


    # Get a list of files in the night folder
    file_list = sorted(os.listdir(dir_path))



    # Find and load the platepar file
    if config.platepar_name in file_list:

        # Load the platepar
        platepar = Platepar.Platepar()
        platepar.read(os.path.join(dir_path, config.platepar_name), use_flat=config.use_flat)

    else:
        print("Cannot find the platepar file in the night directory: ", config.platepar_name)
        return None




    # # Load FTPdetectinfos
    # meteor_data = []
    # for ftpdetectinfo_path in ftpdetectinfo_list:

    #     if not os.path.isfile(ftpdetectinfo_path):
    #         print('No such file:', ftpdetectinfo_path)
    #         continue

    #     meteor_data += readFTPdetectinfo(*os.path.split(ftpdetectinfo_path))


    # Load meteor data from the FTPdetectinfo file
    meteor_data = readFTPdetectinfo(*os.path.split(ftpdetectinfo_path))

    if not len(meteor_data):
        print("No meteors in the FTPdetectinfo file!")
        return None




    # Find and load recalibrated platepars
    if config.platepars_recalibrated_name in file_list:
        with open(os.path.join(dir_path, config.platepars_recalibrated_name)) as f:
            recalibrated_platepars_dict = json.load(f)

            print("Recalibrated platepars loaded!")

    # If the file is not available, apply the recalibration procedure
    else:

        recalibrated_platepars_dict = applyRecalibrate(ftpdetectinfo_path, config)

        print("Recalibrated platepar file not available!")
        print("Recalibrating...")


    # Convert the dictionary of recalibrated platepars to a dictionary of Platepar objects
    recalibrated_platepars = {}
    for ff_name in recalibrated_platepars_dict:
        pp = Platepar.Platepar()
        pp.loadFromDict(recalibrated_platepars_dict[ff_name], use_flat=config.use_flat)

        recalibrated_platepars[ff_name] = pp


    # Compute nighly mean of the photometric zero point
    mag_lev_nightly_mean = np.mean([recalibrated_platepars[ff_name].mag_lev \
                                        for ff_name in recalibrated_platepars])




    # Locate and load the mask file
    if config.mask_file in file_list:
        mask_path = os.path.join(dir_path, config.mask_file)
        mask = loadMask(mask_path)
        print("Using mask:", mask_path)

    else:
        print("No mask used!")
        mask = None



    # Compute the population index using the classical equation
    population_index = 10**((mass_index - 1)/2.5)


    ### SENSOR CHARACTERIZATION ###
    # Computes FWHM of stars and noise profile of the sensor
    
    # File which stores the sensor characterization profile
    sensor_characterization_file = "flux_sensor_characterization.json"
    sensor_characterization_path = os.path.join(dir_path, sensor_characterization_file)

    # Load sensor characterization file if present, so the procedure can be skipped
    if os.path.isfile(sensor_characterization_path):

        # Load the JSON file
        with open(sensor_characterization_path) as f:
            
            data = " ".join(f.readlines())
            sensor_data = json.loads(data)

            # Remove the info entry
            if '-1' in sensor_data:
                del sensor_data['-1']

    else:

        # Run sensor characterization
        sensor_data = sensorCharacterization(config, dir_path)

        # Save to file for posterior use
        with open(sensor_characterization_path, 'w') as f:

            # Add an explanation what each entry means
            sensor_data_save = dict(sensor_data)
            sensor_data_save['-1'] = {"FF file name": ['median star FWHM', 'median background noise stddev']}

            # Convert collection areas to JSON
            out_str = json.dumps(sensor_data_save, indent=4, sort_keys=True)

            # Save to disk
            f.write(out_str)



    # Compute the nighly mean FWHM and noise stddev
    fwhm_nightly_mean = np.mean([sensor_data[key][0] for key in sensor_data])
    stddev_nightly_mean = np.mean([sensor_data[key][1] for key in sensor_data])

    ### ###



    # Perform shower association
    associations, shower_counts = showerAssociation(config, [ftpdetectinfo_path], shower_code=shower_code, \
        show_plot=False, save_plot=False, plot_activity=False)

    # If there are no shower association, return nothing
    if not associations:
        print("No meteors associated with the shower!")
        return None


    # Print the list of used meteors
    peak_mags = []
    for key in associations:
        meteor, shower = associations[key]

        if shower is not None:

            # Compute peak magnitude
            peak_mag = np.min(meteor.mag_array)

            peak_mags.append(peak_mag)

            print("{:.6f}, {:3s}, {:+.2f}".format(meteor.jdt_ref, shower.name, peak_mag))

    print()


    # Init the flux configuration
    flux_config = FluxConfig()



    ### COMPUTE COLLECTION AREAS ###

    # Make a file name to save the raw collection areas
    col_areas_file_name = generateColAreaJSONFileName(platepar.station_code, flux_config.side_points, \
        flux_config.ht_min, flux_config.ht_max, flux_config.dht, flux_config.elev_limit)

    # Check if the collection area file exists. If yes, load the data. If not, generate collection areas
    if col_areas_file_name in os.listdir(dir_path):
        col_areas_ht = loadRawCollectionAreas(dir_path, col_areas_file_name)

        print("Loaded collection areas from:", col_areas_file_name)

    else:

        # Compute the collecting areas segments per height
        col_areas_ht = collectingArea(platepar, mask=mask, side_points=flux_config.side_points, \
            ht_min=flux_config.ht_min, ht_max=flux_config.ht_max, dht=flux_config.dht, \
            elev_limit=flux_config.elev_limit)

        # Save the collection areas to file
        saveRawCollectionAreas(dir_path, col_areas_file_name, col_areas_ht)

        print("Saved raw collection areas to:", col_areas_file_name)


    ### ###



    # Compute the pointing of the middle of the FOV
    _, ra_mid, dec_mid, _ = xyToRaDecPP([jd2Date(J2000_JD.days)], [platepar.X_res/2], [platepar.Y_res/2], \
        [1], platepar, extinction_correction=False)
    azim_mid, elev_mid = raDec2AltAz(ra_mid[0], dec_mid[0], J2000_JD.days, platepar.lat, platepar.lon)

    # Compute the range to the middle point
    ref_ht = 100000
    r_mid, _, _, _ = xyHt2Geo(platepar, platepar.X_res/2, platepar.Y_res/2, ref_ht, indicate_limit=True, \
        elev_limit=flux_config.elev_limit)


    ### Compute the average angular velocity to which the flux variation throught the night will be normalized 
    #   The ang vel is of the middle of the FOV in the middle of observations

    # Middle Julian date of the night
    jd_night_mid = (datetime2JD(dt_beg) + datetime2JD(dt_end))/2

    # Compute the apparent radiant
    ra, dec, v_init = shower.computeApparentRadiant(platepar.lat, platepar.lon, jd_night_mid)

    # Compute the radiant elevation
    radiant_azim, radiant_elev = raDec2AltAz(ra, dec, jd_night_mid, platepar.lat, platepar.lon)

    # Compute the angular velocity in the middle of the FOV
    rad_dist_night_mid = angularSeparation(np.radians(radiant_azim), np.radians(radiant_elev), 
                np.radians(azim_mid), np.radians(elev_mid))
    ang_vel_night_mid = v_init*np.sin(rad_dist_night_mid)/r_mid

    ###




    # Compute the average limiting magnitude to which all flux will be normalized

    # Standard deviation of star PSF, nightly mean (px)
    star_stddev = fwhm_nightly_mean/2.355

    # Compute the theoretical stellar limiting magnitude (nightly average)
    star_sum = 2*np.pi*(config.k1_det*stddev_nightly_mean + config.j1_det)*star_stddev**2
    lm_s_nightly_mean = -2.5*np.log10(star_sum) + mag_lev_nightly_mean

    # A meteor needs to be visible on at least 4 frames, thus it needs to have at least 4x the mass to produce
    #   that amount of light. 1 magnitude difference scales as -0.4 of log of mass, thus:
    frame_min_loss = np.log10(config.line_minimum_frame_range_det)/(-0.4)

    lm_s_nightly_mean += frame_min_loss

    # Compute apparent meteor magnitude
    lm_m_nightly_mean = lm_s_nightly_mean - 5*np.log10(r_mid/1e5) - 2.5*np.log10( \
        np.degrees(platepar.F_scale*v_init*np.sin(rad_dist_night_mid)/(config.fps*r_mid*fwhm_nightly_mean)) \
        )

    #
    print("Stellar lim mag using detection thresholds:", lm_s_nightly_mean)
    print("Apparent meteor limiting magnitude:", lm_m_nightly_mean)


    ### Apply time-dependent corrections ###

    sol_data = []
    flux_lm_6_5_data = []

    # Go through all time bins within the observation period
    total_time_hrs = (dt_end - dt_beg).total_seconds()/3600
    nbins = int(np.ceil(total_time_hrs/timebin))
    for t_bin in range(nbins):

        # Compute bin start and end time
        bin_dt_beg = dt_beg + datetime.timedelta(hours=timebin*t_bin)
        bin_dt_end = bin_dt_beg + datetime.timedelta(hours=timebin)

        if bin_dt_end > dt_end:
            bin_dt_end = dt_end


        # Compute bin duration in hours
        bin_hours = (bin_dt_end - bin_dt_beg).total_seconds()/3600

        # Convert to Julian date
        bin_jd_beg = datetime2JD(bin_dt_beg)
        bin_jd_end = datetime2JD(bin_dt_end)

        # Only select meteors in this bin
        bin_meteors = []
        bin_ffs = []
        for key in associations:
            meteor, shower = associations[key]

            if shower is not None:
                if (shower.name == shower_code) and (meteor.jdt_ref > bin_jd_beg) \
                    and (meteor.jdt_ref <= bin_jd_end):
                    
                    bin_meteors.append([meteor, shower])
                    bin_ffs.append(meteor.ff_name)



        if len(bin_meteors) > 0:


            ### Compute the radiant elevation at the middle of the time bin ###

            jd_mean = (bin_jd_beg + bin_jd_end)/2

            # Compute the mean solar longitude
            sol_mean = np.degrees(jd2SolLonSteyaert(jd_mean))

            print()
            print()
            print("-- Bin information ---")
            print("Bin beg:", bin_dt_beg)
            print("Bin end:", bin_dt_end)
            print("Sol mid: {:.5f}".format(sol_mean))
            print("Meteors:", len(bin_meteors))

            # Compute the apparent radiant
            ra, dec, v_init = shower.computeApparentRadiant(platepar.lat, platepar.lon, jd_mean)

            # Compute the mean meteor height
            meteor_ht_beg = heightModel(v_init, ht_type='beg')
            meteor_ht_end = heightModel(v_init, ht_type='end')
            meteor_ht = (meteor_ht_beg + meteor_ht_end)/2

            # Compute the standard deviation of the height
            meteor_ht_std = meteor_ht*ht_std_percent/100.0

            # Init the Gaussian height distribution
            meteor_ht_gauss = scipy.stats.norm(meteor_ht, meteor_ht_std)


            # Compute the radiant elevation
            radiant_azim, radiant_elev = raDec2AltAz(ra, dec, jd_mean, platepar.lat, platepar.lon)

            ### ###


            ### Weight collection area by meteor height distribution ###

            # Determine weights for each height
            weight_sum = 0
            weights = {}
            for ht in col_areas_ht:
                wt = meteor_ht_gauss.pdf(float(ht))
                weight_sum += wt
                weights[ht] = wt

            # Normalize the weights so that the sum is 1
            for ht in weights:
                weights[ht] /= weight_sum

            ### ###


            # Compute the angular velocity in the middle of the FOV
            rad_dist_mid = angularSeparation(np.radians(radiant_azim), np.radians(radiant_elev), 
                        np.radians(azim_mid), np.radians(elev_mid))
            ang_vel_mid = v_init*np.sin(rad_dist_mid)/r_mid



            ### Compute the limiting magnitude ###

            # Compute the mean star FWHM in the given bin
            fwhm_bin_mean = np.mean([sensor_data[ff_name][0] for ff_name in bin_ffs])

            # Compute the mean background stddev in the given bin
            stddev_bin_mean = np.mean([sensor_data[ff_name][1] for ff_name in bin_ffs])

            # Compute the mean photometric zero point in the given bin
            mag_lev_bin_mean = np.mean([recalibrated_platepars[ff_name].mag_lev for ff_name in bin_ffs if ff_name in recalibrated_platepars])



            # Standard deviation of star PSF, nightly mean (px)
            star_stddev = fwhm_bin_mean/2.355

            # Compute the theoretical stellar limiting magnitude (nightly average)
            star_sum = 2*np.pi*(config.k1_det*stddev_bin_mean + config.j1_det)*star_stddev**2
            lm_s = -2.5*np.log10(star_sum) + mag_lev_bin_mean
            lm_s += frame_min_loss

            # Compute apparent meteor magnitude
            lm_m = lm_s - 5*np.log10(r_mid/1e5) - 2.5*np.log10( \
                    np.degrees(platepar.F_scale*v_init*np.sin(rad_dist_mid)/(config.fps*r_mid*fwhm_bin_mean))\
                    )

            ### ###


            # Final correction area value (height-weightned)
            collection_area = 0

            # Go through all heights and segment blocks
            for ht in col_areas_ht:
                for img_coords in col_areas_ht[ht]:

                    x_mean, y_mean = img_coords

                    # Unpack precomputed values
                    area, azim, elev, sensitivity_ratio, r = col_areas_ht[ht][img_coords]


                    # Compute the angular velocity (rad/s) in the middle of this block
                    rad_dist = angularSeparation(np.radians(radiant_azim), np.radians(radiant_elev), 
                        np.radians(azim), np.radians(elev))
                    ang_vel = v_init*np.sin(rad_dist)/r


                    # Compute the range correction
                    range_correction = (1e5/r)**2

                    #ang_vel_correction = ang_vel/ang_vel_mid
                    # Compute angular velocity correction relative to the nightly mean
                    ang_vel_correction = ang_vel/ang_vel_night_mid


                    ### Apply corrections

                    correction_ratio = 1.0
                    
                    # Correct the area for vignetting and extinction
                    correction_ratio *= sensitivity_ratio

                    # Correct for the range
                    correction_ratio *= range_correction

                    # Correct for the radiant elevation
                    correction_ratio *= np.sin(np.radians(radiant_elev))

                    # Correct for angular velocity
                    correction_ratio *= ang_vel_correction


                    # Add the collection area to the final estimate with the height weight
                    #   Raise the correction to the mass index power
                    collection_area += weights[ht]*area*correction_ratio**(mass_index - 1)



            # Compute the flux at the bin LM (meteors/1000km^2/h)
            flux = 1e9*len(bin_meteors)/collection_area/bin_hours

            # Compute the flux scaled to the nightly mean LM
            flux_lm_nightly_mean = flux*population_index**(lm_m_nightly_mean - lm_m)

            # Compute the flux scaled to +6.5M
            flux_lm_6_5 = flux*population_index**(6.5 - lm_m)



            print("-- Sensor information ---")
            print("Star FWHM:  {:5.2f} px".format(fwhm_bin_mean))
            print("Bkg stddev: {:4.1f} ADU".format(stddev_bin_mean))
            print("Photom ZP:  {:+6.2f} mag".format(mag_lev_bin_mean))
            print("Stellar LM: {:+.2f} mag".format(lm_s))
            print("-- Flux ---")
            print("Col area: {:d} km^2".format(int(collection_area/1e6)))
            print("Ang vel:  {:.2f} deg/s".format(np.degrees(ang_vel_mid)))
            print("LM app:   {:+.2f} mag".format(lm_m))
            print("Flux:     {:.2f} meteors/1000km^2/h".format(flux))
            print("to {:+.2f}: {:.2f} meteors/1000km^2/h".format(lm_m_nightly_mean, flux_lm_nightly_mean))
            print("to +6.50: {:.2f} meteors/1000km^2/h".format(flux_lm_6_5))


            sol_data.append(sol_mean)
            flux_lm_6_5_data.append(flux_lm_6_5)


    # Print the results
    print("Solar longitude, Flux at LM +6.5:")
    for sol, flux_lm_6_5 in zip(sol_data, flux_lm_6_5_data):
        print("{:9.5f}, {:8.4f}".format(sol, flux_lm_6_5))

    # Plot a histogram of peak magnitudes
    plt.hist(peak_mags, cumulative=True)
    plt.show()
예제 #8
0
def recalibrateIndividualFFsAndApplyAstrometry(dir_path, ftpdetectinfo_path, calstars_list, config, platepar,
    generate_plot=True):
    """ Recalibrate FF files with detections and apply the recalibrated platepar to those detections. 

    Arguments:
        dir_path: [str] Path where the FTPdetectinfo file is.
        ftpdetectinfo_path: [str] Name of the FTPdetectinfo file.
        calstars_list: [list] A list of entries [[ff_name, star_coordinates], ...].
        config: [Config instance]
        platepar: [Platepar instance] Initial platepar.

    Keyword arguments:
        generate_plot: [bool] Generate the calibration variation plot. True by default.

    Return:
        recalibrated_platepars: [dict] A dictionary where the keys are FF file names and values are 
            recalibrated platepar instances for every FF file.
    """

    # Use a copy of the config file
    config = copy.deepcopy(config)

    # If the given file does not exits, return nothing
    if not os.path.isfile(ftpdetectinfo_path):
        print('ERROR! The FTPdetectinfo file does not exist: {:s}'.format(ftpdetectinfo_path))
        print('    The recalibration on every file was not done!')

        return {}


    # Read the FTPdetectinfo data
    cam_code, fps, meteor_list = FTPdetectinfo.readFTPdetectinfo(*os.path.split(ftpdetectinfo_path), \
        ret_input_format=True)

    # Convert the list of stars to a per FF name dictionary
    calstars = {ff_file: star_data for ff_file, star_data in calstars_list}


    ### Add neighboring FF files for more robust photometry estimation ###

    ff_processing_list = []

    # Make a list of sorted FF files in CALSTARS
    calstars_ffs = sorted([ff_file for ff_file in calstars])

    # Go through the list of FF files with detections and add neighboring FFs
    for meteor_entry in meteor_list:

        ff_name = meteor_entry[0]

        if ff_name in calstars_ffs:

            # Find the index of the given FF file in the list of calstars
            ff_indx = calstars_ffs.index(ff_name)

            # Add neighbours to the processing list
            for k in range(-(RECALIBRATE_NEIGHBOURHOOD_SIZE//2), RECALIBRATE_NEIGHBOURHOOD_SIZE//2 + 1):

                k_indx = ff_indx + k

                if (k_indx > 0) and (k_indx < len(calstars_ffs)):

                    ff_name_tmp = calstars_ffs[k_indx]
                    if ff_name_tmp not in ff_processing_list:
                        ff_processing_list.append(ff_name_tmp)


    # Sort the processing list of FF files
    ff_processing_list = sorted(ff_processing_list)


    ### ###


    # Globally increase catalog limiting magnitude
    config.catalog_mag_limit += 1

    # Load catalog stars (overwrite the mag band ratios if specific catalog is used)
    star_catalog_status = StarCatalog.readStarCatalog(config.star_catalog_path,\
        config.star_catalog_file, lim_mag=config.catalog_mag_limit, \
        mag_band_ratios=config.star_catalog_band_ratios)

    if not star_catalog_status:
        print("Could not load the star catalog!")
        print(os.path.join(config.star_catalog_path, config.star_catalog_file))
        return {}

    catalog_stars, _, config.star_catalog_band_ratios = star_catalog_status


    # Update the platepar coordinates from the config file
    platepar.lat = config.latitude
    platepar.lon = config.longitude
    platepar.elev = config.elevation


    prev_platepar = copy.deepcopy(platepar)

    # Go through all FF files with detections, recalibrate and apply astrometry
    recalibrated_platepars = {}
    for ff_name in ff_processing_list:

        working_platepar = copy.deepcopy(prev_platepar)

        # Skip this meteor if its FF file was already recalibrated
        if ff_name in recalibrated_platepars:
            continue

        print()
        print('Processing: ', ff_name)
        print('------------------------------------------------------------------------------')

        # Find extracted stars on this image
        if not ff_name in calstars:
            print('Skipped because it was not in CALSTARS:', ff_name)
            continue

        # Get stars detected on this FF file (create a dictionaly with only one entry, the residuals function
        #   needs this format)
        calstars_time = FFfile.getMiddleTimeFF(ff_name, config.fps, ret_milliseconds=True)
        jd = date2JD(*calstars_time)
        star_dict_ff = {jd: calstars[ff_name]}

        # Recalibrate the platepar using star matching
        result, min_match_radius = recalibrateFF(config, working_platepar, jd, star_dict_ff, catalog_stars)

        
        # If the recalibration failed, try using FFT alignment
        if result is None:

            print()
            print('Running FFT alignment...')

            # Run FFT alignment
            calstars_coords = np.array(star_dict_ff[jd])[:, :2]
            calstars_coords[:, [0, 1]] = calstars_coords[:, [1, 0]]
            print(calstars_time)
            test_platepar = alignPlatepar(config, prev_platepar, calstars_time, calstars_coords, \
                show_plot=False)

            # Try to recalibrate after FFT alignment
            result, _ = recalibrateFF(config, test_platepar, jd, star_dict_ff, catalog_stars)


            # If the FFT alignment failed, align the original platepar using the smallest radius that matched
            #   and force save the the platepar
            if (result is None) and (min_match_radius is not None):
                print()
                print("Using the old platepar with the minimum match radius of: {:.2f}".format(min_match_radius))
                result, _ = recalibrateFF(config, working_platepar, jd, star_dict_ff, catalog_stars, 
                    max_match_radius=min_match_radius, force_platepar_save=True)

                if result is not None:
                    working_platepar = result


            # If the alignment succeeded, save the result
            else:
                working_platepar = result


        else:
            working_platepar = result


        # Store the platepar if the fit succeeded
        if result is not None:

            # Recompute alt/az of the FOV centre
            working_platepar.az_centre, working_platepar.alt_centre = raDec2AltAz(working_platepar.RA_d, \
                working_platepar.dec_d, working_platepar.JD, working_platepar.lat, working_platepar.lon)

            # Recompute the rotation wrt horizon
            working_platepar.rotation_from_horiz = rotationWrtHorizon(working_platepar)

            # Mark the platepar to indicate that it was automatically recalibrated on an individual FF file
            working_platepar.auto_recalibrated = True

            recalibrated_platepars[ff_name] = working_platepar
            prev_platepar = working_platepar

        else:

            print('Recalibration of {:s} failed, using the previous platepar...'.format(ff_name))

            # Mark the platepar to indicate that autorecalib failed
            prev_platepar_tmp = copy.deepcopy(prev_platepar)
            prev_platepar_tmp.auto_recalibrated = False

            # If the aligning failed, set the previous platepar as the one that should be used for this FF file
            recalibrated_platepars[ff_name] = prev_platepar_tmp



    ### Average out photometric offsets within the given neighbourhood size ###

    # Go through the list of FF files with detections
    for meteor_entry in meteor_list:

        ff_name = meteor_entry[0]

        # Make sure the FF was successfuly recalibrated
        if ff_name in recalibrated_platepars:

            # Find the index of the given FF file in the list of calstars
            ff_indx = calstars_ffs.index(ff_name)

            # Compute the average photometric offset and the improved standard deviation using all
            #   neighbors
            photom_offset_tmp_list = []
            photom_offset_std_tmp_list = []
            neighboring_ffs = []
            for k in range(-(RECALIBRATE_NEIGHBOURHOOD_SIZE//2), RECALIBRATE_NEIGHBOURHOOD_SIZE//2 + 1):

                k_indx = ff_indx + k

                if (k_indx > 0) and (k_indx < len(calstars_ffs)):

                    # Get the name of the FF file
                    ff_name_tmp = calstars_ffs[k_indx]

                    # Check that the neighboring FF was successfuly recalibrated
                    if ff_name_tmp in recalibrated_platepars:
                        
                        # Get the computed photometric offset and stddev
                        photom_offset_tmp_list.append(recalibrated_platepars[ff_name_tmp].mag_lev)
                        photom_offset_std_tmp_list.append(recalibrated_platepars[ff_name_tmp].mag_lev_stddev)
                        neighboring_ffs.append(ff_name_tmp)


            # Compute the new photometric offset and improved standard deviation (assume equal sample size)
            #   Source: https://stats.stackexchange.com/questions/55999/is-it-possible-to-find-the-combined-standard-deviation
            photom_offset_new = np.mean(photom_offset_tmp_list)
            photom_offset_std_new = np.sqrt(\
                np.sum([st**2 + (mt - photom_offset_new)**2 \
                for mt, st in zip(photom_offset_tmp_list, photom_offset_std_tmp_list)]) \
                / len(photom_offset_tmp_list)
                )

            # Assign the new photometric offset and standard deviation to all FFs used for computation
            for ff_name_tmp in neighboring_ffs:
                recalibrated_platepars[ff_name_tmp].mag_lev = photom_offset_new
                recalibrated_platepars[ff_name_tmp].mag_lev_stddev = photom_offset_std_new

    ### ###


    ### Store all recalibrated platepars as a JSON file ###

    all_pps = {}
    for ff_name in recalibrated_platepars:

        json_str = recalibrated_platepars[ff_name].jsonStr()
        
        all_pps[ff_name] = json.loads(json_str)

    with open(os.path.join(dir_path, config.platepars_recalibrated_name), 'w') as f:
        
        # Convert all platepars to a JSON file
        out_str = json.dumps(all_pps, default=lambda o: o.__dict__, indent=4, sort_keys=True)

        f.write(out_str)

    ### ###



    # If no platepars were recalibrated, use the single platepar recalibration procedure
    if len(recalibrated_platepars) == 0:

        print('No FF images were used for recalibration, using the single platepar calibration function...')

        # Use the initial platepar for calibration
        applyAstrometryFTPdetectinfo(dir_path, os.path.basename(ftpdetectinfo_path), None, platepar=platepar)

        return recalibrated_platepars



    ### GENERATE PLOTS ###

    dt_list = []
    ang_dists = []
    rot_angles = []
    hour_list = []
    photom_offset_list = []
    photom_offset_std_list = []

    first_dt = np.min([FFfile.filenameToDatetime(ff_name) for ff_name in recalibrated_platepars])

    for ff_name in recalibrated_platepars:
        
        pp_temp = recalibrated_platepars[ff_name]

        # If the fitting failed, skip the platepar
        if pp_temp is None:
            continue

        # Add the datetime of the FF file to the list
        ff_dt = FFfile.filenameToDatetime(ff_name)
        dt_list.append(ff_dt)


        # Compute the angular separation from the reference platepar
        ang_dist = np.degrees(angularSeparation(np.radians(platepar.RA_d), np.radians(platepar.dec_d), \
            np.radians(pp_temp.RA_d), np.radians(pp_temp.dec_d)))
        ang_dists.append(ang_dist*60)

        # Compute rotation difference
        rot_diff = (platepar.pos_angle_ref - pp_temp.pos_angle_ref + 180)%360 - 180
        rot_angles.append(rot_diff*60)

        # Compute the hour of the FF used for recalibration
        hour_list.append((ff_dt - first_dt).total_seconds()/3600)

        # Add the photometric offset to the list
        photom_offset_list.append(pp_temp.mag_lev)
        photom_offset_std_list.append(pp_temp.mag_lev_stddev)



    if generate_plot:

        # Generate the name the plots
        plot_name = os.path.basename(ftpdetectinfo_path).replace('FTPdetectinfo_', '').replace('.txt', '')

        
        ### Plot difference from reference platepar in angular distance from (0, 0) vs rotation ###    

        plt.figure()

        plt.scatter(0, 0, marker='o', edgecolor='k', label='Reference platepar', s=100, c='none', zorder=3)

        plt.scatter(ang_dists, rot_angles, c=hour_list, zorder=3)
        plt.colorbar(label="Hours from first FF file")
        
        plt.xlabel("Angular distance from reference (arcmin)")
        plt.ylabel("Rotation from reference (arcmin)")

        plt.title("FOV centre drift starting at {:s}".format(first_dt.strftime("%Y/%m/%d %H:%M:%S")))

        plt.grid()
        plt.legend()

        plt.tight_layout()            

        plt.savefig(os.path.join(dir_path, plot_name + '_calibration_variation.png'), dpi=150)

        # plt.show()

        plt.clf()
        plt.close()

        ### ###


        ### Plot the photometric offset variation ###

        plt.figure()

        plt.errorbar(dt_list, photom_offset_list, yerr=photom_offset_std_list, fmt="o", \
            ecolor='lightgray', elinewidth=2, capsize=0, ms=2)

        # Format datetimes
        plt.gca().xaxis.set_major_formatter(mdates.DateFormatter("%H:%M"))

        # rotate and align the tick labels so they look better
        plt.gcf().autofmt_xdate()

        plt.xlabel("UTC time")
        plt.ylabel("Photometric offset")

        plt.title("Photometric offset variation")

        plt.grid()

        plt.tight_layout()

        plt.savefig(os.path.join(dir_path, plot_name + '_photometry_variation.png'), dpi=150)

        plt.clf()
        plt.close()

    ### ###



    ### Apply platepars to FTPdetectinfo ###

    meteor_output_list = []
    for meteor_entry in meteor_list:

        ff_name, meteor_No, rho, phi, meteor_meas = meteor_entry

        # Get the platepar that will be applied to this FF file
        if ff_name in recalibrated_platepars:
            working_platepar = recalibrated_platepars[ff_name]

        else:
            print('Using default platepar for:', ff_name)
            working_platepar = platepar

        # Apply the recalibrated platepar to meteor centroids
        meteor_picks = applyPlateparToCentroids(ff_name, fps, meteor_meas, working_platepar, \
            add_calstatus=True)

        meteor_output_list.append([ff_name, meteor_No, rho, phi, meteor_picks])


    # Calibration string to be written to the FTPdetectinfo file
    calib_str = 'Recalibrated with RMS on: ' + str(datetime.datetime.utcnow()) + ' UTC'

    # If no meteors were detected, set dummpy parameters
    if len(meteor_list) == 0:
        cam_code = ''
        fps = 0


    # Back up the old FTPdetectinfo file
    try:
        shutil.copy(ftpdetectinfo_path, ftpdetectinfo_path.strip('.txt') \
            + '_backup_{:s}.txt'.format(datetime.datetime.utcnow().strftime('%Y%m%d_%H%M%S.%f')))
    except:
        print('ERROR! The FTPdetectinfo file could not be backed up: {:s}'.format(ftpdetectinfo_path))

    # Save the updated FTPdetectinfo
    FTPdetectinfo.writeFTPdetectinfo(meteor_output_list, dir_path, os.path.basename(ftpdetectinfo_path), \
        dir_path, cam_code, fps, calibration=calib_str, celestial_coords_given=True)


    ### ###

    return recalibrated_platepars
예제 #9
0
def addEquatorialGrid(plt_handle, platepar, jd):
    """ Given the plot handle containing the image, the function plots an equatorial grid.

        Arguments:
            plt_handle: [pyplot instance]
            platepar: [Platepar object]
            jd: [float] Julian date of the image. 


        Return:
            plt_handle: [pyplot instance] Pyplot instance with the added grid.

    """

    # Estimate RA,dec of the centre of the FOV
    _, RA_c, dec_c, _ = xyToRaDecPP([jd2Date(jd)], [platepar.X_res / 2],
                                    [platepar.Y_res / 2], [1],
                                    platepar,
                                    extinction_correction=False)

    RA_c = RA_c[0]
    dec_c = dec_c[0]

    # Compute FOV centre alt/az
    azim_centre, alt_centre = raDec2AltAz(RA_c, dec_c, jd, platepar.lat,
                                          platepar.lon)

    # Compute FOV size
    fov_h, fov_v = computeFOVSize(platepar)
    fov_radius = np.hypot(*computeFOVSize(platepar))

    # Determine gridline frequency (double the gridlines if the number is < 4eN)
    grid_freq = 10**np.floor(np.log10(fov_radius))
    if 10**(np.log10(fov_radius) - np.floor(np.log10(fov_radius))) < 4:
        grid_freq /= 2

    # Set a maximum grid frequency of 15 deg
    if grid_freq > 15:
        grid_freq = 15

    # Grid plot density
    plot_dens = grid_freq / 100

    # Compute the range of declinations to consider
    dec_min = platepar.dec_d - fov_radius / 2
    if dec_min < -90:
        dec_min = -90

    dec_max = platepar.dec_d + fov_radius / 2
    if dec_max > 90:
        dec_max = 90

    ra_grid_arr = np.arange(0, 360, grid_freq)
    dec_grid_arr = np.arange(-90, 90, grid_freq)

    # Filter out the dec grid for min/max declination
    dec_grid_arr = dec_grid_arr[(dec_grid_arr >= dec_min)
                                & (dec_grid_arr <= dec_max)]

    # Plot the celestial parallel grid
    for dec_grid in dec_grid_arr:

        ra_grid_plot = np.arange(0, 360, plot_dens)
        dec_grid_plot = np.zeros_like(ra_grid_plot) + dec_grid

        # Compute alt/az
        az_grid_plot, alt_grid_plot = raDec2AltAz_vect(ra_grid_plot, dec_grid_plot, jd, platepar.lat, \
            platepar.lon)

        # Filter out points below the horizon  and outside the FOV
        filter_arr = (alt_grid_plot > 0) & (np.degrees(angularSeparation(np.radians(alt_centre), \
            np.radians(azim_centre), np.radians(alt_grid_plot), np.radians(az_grid_plot))) < fov_radius)
        ra_grid_plot = ra_grid_plot[filter_arr]
        dec_grid_plot = dec_grid_plot[filter_arr]

        # Find gaps in continuity and break up plotting individual lines
        gap_indices = np.argwhere(
            np.abs(ra_grid_plot[1:] - ra_grid_plot[:-1]) > fov_radius)
        if len(gap_indices):

            ra_grid_plot_list = []
            dec_grid_plot_list = []

            # Separate gridlines with large gaps
            prev_gap_indx = 0
            for entry in gap_indices:

                gap_indx = entry[0]

                ra_grid_plot_list.append(ra_grid_plot[prev_gap_indx:gap_indx +
                                                      1])
                dec_grid_plot_list.append(
                    dec_grid_plot[prev_gap_indx:gap_indx + 1])

                prev_gap_indx = gap_indx

            # Add the last segment
            ra_grid_plot_list.append(ra_grid_plot[prev_gap_indx + 1:-1])
            dec_grid_plot_list.append(dec_grid_plot[prev_gap_indx + 1:-1])

        else:
            ra_grid_plot_list = [ra_grid_plot]
            dec_grid_plot_list = [dec_grid_plot]

        # Plot all grid segments
        for ra_grid_plot, dec_grid_plot in zip(ra_grid_plot_list,
                                               dec_grid_plot_list):

            # Compute image coordinates for every grid celestial parallel
            x_grid, y_grid = raDecToXYPP(ra_grid_plot, dec_grid_plot, jd,
                                         platepar)

            # Plot the grid
            plt_handle.plot(x_grid,
                            y_grid,
                            color='w',
                            alpha=0.2,
                            zorder=2,
                            linewidth=0.5,
                            linestyle='dotted')

    # Plot the celestial meridian grid
    for ra_grid in ra_grid_arr:

        dec_grid_plot = np.arange(-90, 90, plot_dens)
        ra_grid_plot = np.zeros_like(dec_grid_plot) + ra_grid

        # Filter out the dec grid
        filter_arr = (dec_grid_plot >= dec_min) & (dec_grid_plot <= dec_max)
        ra_grid_plot = ra_grid_plot[filter_arr]
        dec_grid_plot = dec_grid_plot[filter_arr]

        # Compute alt/az
        az_grid_plot, alt_grid_plot = raDec2AltAz_vect(ra_grid_plot, dec_grid_plot, jd, platepar.lat, \
            platepar.lon)

        # Filter out points below the horizon
        filter_arr = (alt_grid_plot > 0) & (np.degrees(angularSeparation(np.radians(alt_centre), \
            np.radians(azim_centre), np.radians(alt_grid_plot), np.radians(az_grid_plot))) < fov_radius)
        ra_grid_plot = ra_grid_plot[filter_arr]
        dec_grid_plot = dec_grid_plot[filter_arr]

        # Compute image coordinates for every grid celestial parallel
        x_grid, y_grid = raDecToXYPP(ra_grid_plot, dec_grid_plot, jd, platepar)

        # # Filter out everything outside the FOV
        # filter_arr = (x_grid >= 0) & (x_grid <= platepar.X_res) & (y_grid >= 0) & (y_grid <= platepar.Y_res)
        # x_grid = x_grid[filter_arr]
        # y_grid = y_grid[filter_arr]

        # Plot the grid
        plt_handle.plot(x_grid,
                        y_grid,
                        color='w',
                        alpha=0.2,
                        zorder=2,
                        linewidth=0.5,
                        linestyle='dotted')

    return plt_handle
예제 #10
0
def computeFlux(config, dir_path, ftpdetectinfo_path, shower_code, dt_beg, dt_end, timebin, mass_index, \
    timebin_intdt=0.25, ht_std_percent=5.0, mask=None, show_plots=True):
    """ Compute flux using measurements in the given FTPdetectinfo file. 
    
    Arguments:
        config: [Config instance]
        dir_path: [str] Path to the working directory.
        ftpdetectinfo_path: [str] Path to a FTPdetectinfo file.
        shower_code: [str] IAU shower code (e.g. ETA, PER, SDA).
        dt_beg: [Datetime] Datetime object of the observation beginning.
        dt_end: [Datetime] Datetime object of the observation end.
        timebin: [float] Time bin in hours.
        mass_index: [float] Cumulative mass index of the shower.

    Keyword arguments:
        timebin_intdt: [float] Time step for computing the integrated collection area in hours. 15 minutes by
            default. If smaller than that, only one collection are will be computed.
        ht_std_percent: [float] Meteor height standard deviation in percent.
        mask: [Mask object] Mask object, None by default.
        show_plots: [bool] Show flux plots. True by default.

    Return:
        [tuple] sol_data, flux_lm_6_5_data
            - sol_data: [list] Array of solar longitudes (in degrees) of time bins.
            - flux_lm6_5_data: [list] Array of meteoroid flux at the limiting magnitude of +6.5 in 
                meteors/1000km^2/h.
    """


    # Get a list of files in the night folder
    file_list = sorted(os.listdir(dir_path))



    # Find and load the platepar file
    if config.platepar_name in file_list:

        # Load the platepar
        platepar = Platepar.Platepar()
        platepar.read(os.path.join(dir_path, config.platepar_name), use_flat=config.use_flat)

    else:
        print("Cannot find the platepar file in the night directory: ", config.platepar_name)
        return None




    # # Load FTPdetectinfos
    # meteor_data = []
    # for ftpdetectinfo_path in ftpdetectinfo_list:

    #     if not os.path.isfile(ftpdetectinfo_path):
    #         print('No such file:', ftpdetectinfo_path)
    #         continue

    #     meteor_data += readFTPdetectinfo(*os.path.split(ftpdetectinfo_path))


    # Load meteor data from the FTPdetectinfo file
    meteor_data = readFTPdetectinfo(*os.path.split(ftpdetectinfo_path))

    if not len(meteor_data):
        print("No meteors in the FTPdetectinfo file!")
        return None




    # Find and load recalibrated platepars
    if config.platepars_recalibrated_name in file_list:
        with open(os.path.join(dir_path, config.platepars_recalibrated_name)) as f:
            recalibrated_platepars_dict = json.load(f)

            print("Recalibrated platepars loaded!")

    # If the file is not available, apply the recalibration procedure
    else:

        recalibrated_platepars_dict = applyRecalibrate(ftpdetectinfo_path, config)

        print("Recalibrated platepar file not available!")
        print("Recalibrating...")


    # Convert the dictionary of recalibrated platepars to a dictionary of Platepar objects
    recalibrated_platepars = {}
    for ff_name in recalibrated_platepars_dict:
        pp = Platepar.Platepar()
        pp.loadFromDict(recalibrated_platepars_dict[ff_name], use_flat=config.use_flat)

        recalibrated_platepars[ff_name] = pp


    # Compute nighly mean of the photometric zero point
    mag_lev_nightly_mean = np.mean([recalibrated_platepars[ff_name].mag_lev \
                                        for ff_name in recalibrated_platepars])




    # Locate and load the mask file
    if config.mask_file in file_list:
        mask_path = os.path.join(dir_path, config.mask_file)
        mask = loadMask(mask_path)
        print("Using mask:", mask_path)

    else:
        print("No mask used!")
        mask = None



    # Compute the population index using the classical equation
    population_index = 10**((mass_index - 1)/2.5) # Found to be more consistent when comparing fluxes
    #population_index = 10**((mass_index - 1)/2.3) # TEST !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!1


    ### SENSOR CHARACTERIZATION ###
    # Computes FWHM of stars and noise profile of the sensor
    
    # File which stores the sensor characterization profile
    sensor_characterization_file = "flux_sensor_characterization.json"
    sensor_characterization_path = os.path.join(dir_path, sensor_characterization_file)

    # Load sensor characterization file if present, so the procedure can be skipped
    if os.path.isfile(sensor_characterization_path):

        # Load the JSON file
        with open(sensor_characterization_path) as f:
            
            data = " ".join(f.readlines())
            sensor_data = json.loads(data)

            # Remove the info entry
            if '-1' in sensor_data:
                del sensor_data['-1']

    else:

        # Run sensor characterization
        sensor_data = sensorCharacterization(config, dir_path)

        # Save to file for posterior use
        with open(sensor_characterization_path, 'w') as f:

            # Add an explanation what each entry means
            sensor_data_save = dict(sensor_data)
            sensor_data_save['-1'] = {"FF file name": ['median star FWHM', 'median background noise stddev']}

            # Convert collection areas to JSON
            out_str = json.dumps(sensor_data_save, indent=4, sort_keys=True)

            # Save to disk
            f.write(out_str)



    # Compute the nighly mean FWHM and noise stddev
    fwhm_nightly_mean = np.mean([sensor_data[key][0] for key in sensor_data])
    stddev_nightly_mean = np.mean([sensor_data[key][1] for key in sensor_data])

    ### ###



    # Perform shower association
    associations, _ = showerAssociation(config, [ftpdetectinfo_path], shower_code=shower_code, \
        show_plot=False, save_plot=False, plot_activity=False)

    # Init the flux configuration
    flux_config = FluxConfig()


    # Remove all meteors which begin below the limit height
    filtered_associations = {}
    for key in associations:
        meteor, shower = associations[key]

        if meteor.beg_alt > flux_config.elev_limit:
            print("Rejecting:", meteor.jdt_ref)
            filtered_associations[key] = [meteor, shower]

    associations = filtered_associations



    # If there are no shower association, return nothing
    if not associations:
        print("No meteors associated with the shower!")
        return None


    # Print the list of used meteors
    peak_mags = []
    for key in associations:
        meteor, shower = associations[key]

        if shower is not None:

            # Compute peak magnitude
            peak_mag = np.min(meteor.mag_array)

            peak_mags.append(peak_mag)

            print("{:.6f}, {:3s}, {:+.2f}".format(meteor.jdt_ref, shower.name, peak_mag))

    print()



    ### COMPUTE COLLECTION AREAS ###

    # Make a file name to save the raw collection areas
    col_areas_file_name = generateColAreaJSONFileName(platepar.station_code, flux_config.side_points, \
        flux_config.ht_min, flux_config.ht_max, flux_config.dht, flux_config.elev_limit)

    # Check if the collection area file exists. If yes, load the data. If not, generate collection areas
    if col_areas_file_name in os.listdir(dir_path):
        col_areas_ht = loadRawCollectionAreas(dir_path, col_areas_file_name)

        print("Loaded collection areas from:", col_areas_file_name)

    else:

        # Compute the collecting areas segments per height
        col_areas_ht = collectingArea(platepar, mask=mask, side_points=flux_config.side_points, \
            ht_min=flux_config.ht_min, ht_max=flux_config.ht_max, dht=flux_config.dht, \
            elev_limit=flux_config.elev_limit)

        # Save the collection areas to file
        saveRawCollectionAreas(dir_path, col_areas_file_name, col_areas_ht)

        print("Saved raw collection areas to:", col_areas_file_name)


    ### ###


    # Compute the raw collection area at the height of 100 km
    col_area_100km_raw = 0
    col_areas_100km_blocks = col_areas_ht[100000.0]
    for block in col_areas_100km_blocks:
        col_area_100km_raw += col_areas_100km_blocks[block][0]

    print("Raw collection area at height of 100 km: {:.2f} km^2".format(col_area_100km_raw/1e6))


    # Compute the pointing of the middle of the FOV
    _, ra_mid, dec_mid, _ = xyToRaDecPP([jd2Date(J2000_JD.days)], [platepar.X_res/2], [platepar.Y_res/2], \
        [1], platepar, extinction_correction=False)
    azim_mid, elev_mid = raDec2AltAz(ra_mid[0], dec_mid[0], J2000_JD.days, platepar.lat, platepar.lon)

    # Compute the range to the middle point
    ref_ht = 100000
    r_mid, _, _, _ = xyHt2Geo(platepar, platepar.X_res/2, platepar.Y_res/2, ref_ht, indicate_limit=True, \
        elev_limit=flux_config.elev_limit)

    print("Range at 100 km in the middle of the image: {:.2f} km".format(r_mid/1000))


    ### Compute the average angular velocity to which the flux variation throught the night will be normalized 
    #   The ang vel is of the middle of the FOV in the middle of observations

    # Middle Julian date of the night
    jd_night_mid = (datetime2JD(dt_beg) + datetime2JD(dt_end))/2

    # Compute the apparent radiant
    ra, dec, v_init = shower.computeApparentRadiant(platepar.lat, platepar.lon, jd_night_mid)

    # Compute the radiant elevation
    radiant_azim, radiant_elev = raDec2AltAz(ra, dec, jd_night_mid, platepar.lat, platepar.lon)

    # Compute the angular velocity in the middle of the FOV
    rad_dist_night_mid = angularSeparation(np.radians(radiant_azim), np.radians(radiant_elev), 
                np.radians(azim_mid), np.radians(elev_mid))
    ang_vel_night_mid = v_init*np.sin(rad_dist_night_mid)/r_mid

    ###




    # Compute the average limiting magnitude to which all flux will be normalized

    # Standard deviation of star PSF, nightly mean (px)
    star_stddev = fwhm_nightly_mean/2.355

    # # Compute the theoretical stellar limiting magnitude (nightly average)
    # star_sum = 2*np.pi*(config.k1_det*stddev_nightly_mean + config.j1_det)*star_stddev**2
    # lm_s_nightly_mean = -2.5*np.log10(star_sum) + mag_lev_nightly_mean

    # Compute the theoretical stellar limiting magnitude using an empirical model (nightly average)
    lm_s_nightly_mean = stellarLMModel(mag_lev_nightly_mean)


    # A meteor needs to be visible on at least 4 frames, thus it needs to have at least 4x the mass to produce
    #   that amount of light. 1 magnitude difference scales as -0.4 of log of mass, thus:
    # frame_min_loss = np.log10(config.line_minimum_frame_range_det)/(-0.4)
    frame_min_loss = 0.0 # TEST !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!11

    print("Frame min loss: {:.2} mag".format(frame_min_loss))

    lm_s_nightly_mean += frame_min_loss

    # Compute apparent meteor magnitude
    lm_m_nightly_mean = lm_s_nightly_mean - 5*np.log10(r_mid/1e5) - 2.5*np.log10( \
        np.degrees(platepar.F_scale*v_init*np.sin(rad_dist_night_mid)/(config.fps*r_mid*fwhm_nightly_mean)) \
        )

    #
    print("Stellar lim mag using detection thresholds:", lm_s_nightly_mean)
    print("Apparent meteor limiting magnitude:", lm_m_nightly_mean)


    ### Apply time-dependent corrections ###

    # Track values used for flux
    sol_data = []
    flux_lm_6_5_data = []
    meteor_num_data = []
    effective_collection_area_data = []
    radiant_elev_data = []
    radiant_dist_mid_data = []
    ang_vel_mid_data = []
    lm_s_data = []
    lm_m_data = []
    sensitivity_corr_data = []
    range_corr_data = []
    radiant_elev_corr_data = []
    ang_vel_corr_data = []
    total_corr_data = []


    # Go through all time bins within the observation period
    total_time_hrs = (dt_end - dt_beg).total_seconds()/3600
    nbins = int(np.ceil(total_time_hrs/timebin))
    for t_bin in range(nbins):
        for subbin in range(flux_config.sub_time_bins):

            # Compute bin start and end time
            bin_dt_beg = dt_beg + datetime.timedelta(hours=(timebin*t_bin + timebin*subbin/flux_config.sub_time_bins))
            bin_dt_end = bin_dt_beg + datetime.timedelta(hours=timebin)

            if bin_dt_end > dt_end:
                bin_dt_end = dt_end


            # Compute bin duration in hours
            bin_hours = (bin_dt_end - bin_dt_beg).total_seconds()/3600

            # Convert to Julian date
            bin_jd_beg = datetime2JD(bin_dt_beg)
            bin_jd_end = datetime2JD(bin_dt_end)

                

            jd_mean = (bin_jd_beg + bin_jd_end)/2

            # Compute the mean solar longitude
            sol_mean = np.degrees(jd2SolLonSteyaert(jd_mean))

            ### Compute the radiant elevation at the middle of the time bin ###

            # Compute the apparent radiant
            ra, dec, v_init = shower.computeApparentRadiant(platepar.lat, platepar.lon, jd_mean)

            # Compute the mean meteor height
            meteor_ht_beg = heightModel(v_init, ht_type='beg')
            meteor_ht_end = heightModel(v_init, ht_type='end')
            meteor_ht = (meteor_ht_beg + meteor_ht_end)/2

            # Compute the standard deviation of the height
            meteor_ht_std = meteor_ht*ht_std_percent/100.0

            # Init the Gaussian height distribution
            meteor_ht_gauss = scipy.stats.norm(meteor_ht, meteor_ht_std)


            # Compute the radiant elevation
            radiant_azim, radiant_elev = raDec2AltAz(ra, dec, jd_mean, platepar.lat, platepar.lon)




            # Only select meteors in this bin and not too close to the radiant
            bin_meteors = []
            bin_ffs = []
            for key in associations:
                meteor, shower = associations[key]

                if shower is not None:
                    if (shower.name == shower_code) and (meteor.jdt_ref > bin_jd_beg) \
                        and (meteor.jdt_ref <= bin_jd_end):

                        # Filter out meteors ending too close to the radiant
                        if np.degrees(angularSeparation(np.radians(radiant_azim), np.radians(radiant_elev), \
                            np.radians(meteor.end_azim), np.radians(meteor.end_alt))) >= flux_config.rad_dist_min:
                        
                            bin_meteors.append([meteor, shower])
                            bin_ffs.append(meteor.ff_name)

            ### ###

            print()
            print()
            print("-- Bin information ---")
            print("Bin beg:", bin_dt_beg)
            print("Bin end:", bin_dt_end)
            print("Sol mid: {:.5f}".format(sol_mean))
            print("Radiant elevation: {:.2f} deg".format(radiant_elev))
            print("Apparent speed: {:.2f} km/s".format(v_init/1000))

            # If the elevation of the radiant is below the limit, skip this bin
            if radiant_elev < flux_config.rad_elev_limit:
                print("!!! Mean radiant elevation below {:.2f} deg threshold, skipping time bin!".format(flux_config.rad_elev_limit))
                continue

            # The minimum duration of the time bin should be larger than 50% of the given dt
            if bin_hours < 0.5*timebin:
                print("!!! Time bin duration of {:.2f} h is shorter than 0.5x of the time bin!".format(bin_hours))
                continue


            if len(bin_meteors) >= flux_config.meteros_min:
                print("Meteors:", len(bin_meteors))


                ### Weight collection area by meteor height distribution ###

                # Determine weights for each height
                weight_sum = 0
                weights = {}
                for ht in col_areas_ht:
                    wt = meteor_ht_gauss.pdf(float(ht))
                    weight_sum += wt
                    weights[ht] = wt

                # Normalize the weights so that the sum is 1
                for ht in weights:
                    weights[ht] /= weight_sum

                ### ###


                col_area_meteor_ht_raw = 0
                for ht in col_areas_ht:
                    for block in col_areas_ht[ht]:
                        col_area_meteor_ht_raw += weights[ht]*col_areas_ht[ht][block][0]

                print("Raw collection area at meteor heights: {:.2f} km^2".format(col_area_meteor_ht_raw/1e6))

                # Compute the angular velocity in the middle of the FOV
                rad_dist_mid = angularSeparation(np.radians(radiant_azim), np.radians(radiant_elev), 
                            np.radians(azim_mid), np.radians(elev_mid))
                ang_vel_mid = v_init*np.sin(rad_dist_mid)/r_mid



                ### Compute the limiting magnitude ###

                # Compute the mean star FWHM in the given bin
                fwhm_bin_mean = np.mean([sensor_data[ff_name][0] for ff_name in bin_ffs])

                # Compute the mean background stddev in the given bin
                stddev_bin_mean = np.mean([sensor_data[ff_name][1] for ff_name in bin_ffs])

                # Compute the mean photometric zero point in the given bin
                mag_lev_bin_mean = np.mean([recalibrated_platepars[ff_name].mag_lev for ff_name in bin_ffs if ff_name in recalibrated_platepars])



                # # Standard deviation of star PSF, nightly mean (px)
                # star_stddev = fwhm_bin_mean/2.355

                # Compute the theoretical stellar limiting magnitude (bin average)
                # star_sum = 2*np.pi*(config.k1_det*stddev_bin_mean + config.j1_det)*star_stddev**2
                # lm_s = -2.5*np.log10(star_sum) + mag_lev_bin_mean
                
                # Use empirical LM calculation
                lm_s = stellarLMModel(mag_lev_bin_mean)

                lm_s += frame_min_loss


                # ### TEST !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!11

                # # Artificialy increase limiting magnitude
                # lm_s += 1.2

                # #####

                # Compute apparent meteor magnitude
                lm_m = lm_s - 5*np.log10(r_mid/1e5) - 2.5*np.log10( \
                    np.degrees(platepar.F_scale*v_init*np.sin(rad_dist_mid)/(config.fps*r_mid*fwhm_bin_mean)))

                ### ###


                # Final correction area value (height-weightned)
                collection_area = 0

                # Keep track of the corrections
                sensitivity_corr_arr = []
                range_corr_arr = []
                radiant_elev_corr_arr = []
                ang_vel_corr_arr = []
                total_corr_arr = []
                col_area_raw_arr = []
                col_area_eff_arr = []
                col_area_eff_block_dict = {}

                # Go through all heights and segment blocks
                for ht in col_areas_ht:
                    for img_coords in col_areas_ht[ht]:

                        x_mean, y_mean = img_coords

                        # Unpack precomputed values
                        area, azim, elev, sensitivity_ratio, r = col_areas_ht[ht][img_coords]


                        # Compute the angular velocity (rad/s) in the middle of this block
                        rad_dist = angularSeparation(np.radians(radiant_azim), np.radians(radiant_elev), 
                            np.radians(azim), np.radians(elev))
                        ang_vel = v_init*np.sin(rad_dist)/r


                        # If the angular distance from the radiant is less than 15 deg, don't use the block
                        #   in the effective collection area
                        if np.degrees(rad_dist) < flux_config.rad_dist_min:
                            area = 0.0


                        # Compute the range correction
                        range_correction = (1e5/r)**2

                        #ang_vel_correction = ang_vel/ang_vel_mid
                        # Compute angular velocity correction relative to the nightly mean
                        ang_vel_correction = ang_vel/ang_vel_night_mid



                        ### Apply corrections

                        correction_ratio = 1.0
                        
                        # Correct the area for vignetting and extinction
                        sensitivity_corr_arr.append(sensitivity_ratio)
                        correction_ratio *= sensitivity_ratio


                        # Correct for the range (cap to an order of magnitude correction)
                        range_correction = max(range_correction, 0.1)
                        range_corr_arr.append(range_correction)
                        correction_ratio *= range_correction

                        # Correct for the radiant elevation (cap to an order of magnitude correction)
                        radiant_elev_correction = np.sin(np.radians(radiant_elev))
                        radiant_elev_correction = max(radiant_elev_correction, 0.1)
                        radiant_elev_corr_arr.append(radiant_elev_correction)
                        correction_ratio *= radiant_elev_correction


                        # Correct for angular velocity (cap to an order of magnitude correction)
                        ang_vel_correction = min(max(ang_vel_correction, 0.1), 10)
                        correction_ratio *= ang_vel_correction
                        ang_vel_corr_arr.append(ang_vel_correction)


                        # Add the collection area to the final estimate with the height weight
                        #   Raise the correction to the mass index power
                        total_correction = correction_ratio**(mass_index - 1)
                        total_correction = min(max(total_correction, 0.1), 10)
                        collection_area += weights[ht]*area*total_correction
                        total_corr_arr.append(total_correction)

                        col_area_raw_arr.append(weights[ht]*area)
                        col_area_eff_arr.append(weights[ht]*area*total_correction)

                        if img_coords not in col_area_eff_block_dict:
                            col_area_eff_block_dict[img_coords] = []

                        col_area_eff_block_dict[img_coords].append(weights[ht]*area*total_correction)




                # Compute mean corrections
                sensitivity_corr_avg = np.mean(sensitivity_corr_arr)
                range_corr_avg = np.mean(range_corr_arr)
                radiant_elev_corr_avg = np.mean(radiant_elev_corr_arr)
                ang_vel_corr_avg = np.mean(ang_vel_corr_arr)
                total_corr_avg = np.median(total_corr_arr)
                col_area_raw_sum = np.sum(col_area_raw_arr)
                col_area_eff_sum = np.sum(col_area_eff_arr)

                print("Raw collection area at meteor heights (CHECK): {:.2f} km^2".format(col_area_raw_sum/1e6))
                print("Eff collection area at meteor heights (CHECK): {:.2f} km^2".format(col_area_eff_sum/1e6))



                # ### PLOT HOW THE CORRECTION VARIES ACROSS THE FOV
                # x_arr = []
                # y_arr = []
                # col_area_eff_block_arr = []

                # for img_coords in col_area_eff_block_dict:
                    
                #     x_mean, y_mean = img_coords

                #     #if x_mean not in x_arr:
                #     x_arr.append(x_mean)
                #     #if y_mean not in y_arr:
                #     y_arr.append(y_mean)

                #     col_area_eff_block_arr.append(np.sum(col_area_eff_block_dict[img_coords]))

                # x_unique = np.unique(x_arr)
                # y_unique = np.unique(y_arr)
                # # plt.pcolormesh(x_arr, y_arr, np.array(col_area_eff_block_arr).reshape(len(x_unique), len(y_unique)).T, shading='auto')
                # plt.title("TOTAL = " + str(np.sum(col_area_eff_block_arr)/1e6))
                # plt.scatter(x_arr, y_arr, c=np.array(col_area_eff_block_arr)/1e6)
                # #plt.pcolor(np.array(x_arr).reshape(len(x_unique), len(y_unique)), np.array(y_arr).reshape(len(x_unique), len(y_unique)), np.array(col_area_eff_block_arr).reshape(len(x_unique), len(y_unique))/1e6)
                # plt.colorbar(label="km^2")
                # plt.gca().invert_yaxis()
                # plt.show()

                # ###


                # Compute the flux at the bin LM (meteors/1000km^2/h)
                flux = 1e9*len(bin_meteors)/collection_area/bin_hours

                # Compute the flux scaled to the nightly mean LM
                flux_lm_nightly_mean = flux*population_index**(lm_m_nightly_mean - lm_m)

                # Compute the flux scaled to +6.5M
                flux_lm_6_5 = flux*population_index**(6.5 - lm_m)



                print("-- Sensor information ---")
                print("Star FWHM:  {:5.2f} px".format(fwhm_bin_mean))
                print("Bkg stddev: {:4.1f} ADU".format(stddev_bin_mean))
                print("Photom ZP:  {:+6.2f} mag".format(mag_lev_bin_mean))
                print("Stellar LM: {:+.2f} mag".format(lm_s))
                print("-- Flux ---")
                print("Meteors:  {:d}".format(len(bin_meteors)))
                print("Col area: {:d} km^2".format(int(collection_area/1e6)))
                print("Ang vel:  {:.2f} deg/s".format(np.degrees(ang_vel_mid)))
                print("LM app:   {:+.2f} mag".format(lm_m))
                print("Flux:     {:.2f} meteors/1000km^2/h".format(flux))
                print("to {:+.2f}: {:.2f} meteors/1000km^2/h".format(lm_m_nightly_mean, flux_lm_nightly_mean))
                print("to +6.50: {:.2f} meteors/1000km^2/h".format(flux_lm_6_5))


                sol_data.append(sol_mean)
                flux_lm_6_5_data.append(flux_lm_6_5)
                meteor_num_data.append(len(bin_meteors))
                effective_collection_area_data.append(collection_area)
                radiant_elev_data.append(radiant_elev)
                radiant_dist_mid_data.append(np.degrees(rad_dist_mid))
                ang_vel_mid_data.append(np.degrees(ang_vel_mid))
                lm_s_data.append(lm_s)
                lm_m_data.append(lm_m)

                sensitivity_corr_data.append(sensitivity_corr_avg)
                range_corr_data.append(range_corr_avg)
                radiant_elev_corr_data.append(radiant_elev_corr_avg)
                ang_vel_corr_data.append(ang_vel_corr_avg)
                total_corr_data.append(total_corr_avg)


    # Print the results
    print("Solar longitude, Flux at LM +6.5:")
    for sol, flux_lm_6_5 in zip(sol_data, flux_lm_6_5_data):
        print("{:9.5f}, {:8.4f}".format(sol, flux_lm_6_5))


    if show_plots and len(sol_data):

        # Plot a histogram of peak magnitudes
        plt.hist(peak_mags, cumulative=True, log=True, bins=len(peak_mags), density=True)

        # Plot population index
        r_intercept = -0.7
        x_arr = np.linspace(np.min(peak_mags), np.percentile(peak_mags, 60))
        plt.plot(x_arr, 10**(np.log10(population_index)*x_arr + r_intercept))

        plt.title("r = {:.2f}".format(population_index))

        plt.show()


        # Plot how the derived values change throughout the night
        fig, axes \
            = plt.subplots(nrows=4, ncols=2, sharex=True, figsize=(10, 8))

        ((ax_met,      ax_lm),
         (ax_rad_elev, ax_corrs),
         (ax_rad_dist, ax_col_area),
         (ax_ang_vel,  ax_flux)) = axes


        fig.suptitle("{:s}, s = {:.2f}, r = {:.2f}".format(shower_code, mass_index, population_index))


        ax_met.scatter(sol_data, meteor_num_data)
        ax_met.set_ylabel("Meteors")

        ax_rad_elev.plot(sol_data, radiant_elev_data)
        ax_rad_elev.set_ylabel("Radiant elev (deg)")

        ax_rad_dist.plot(sol_data, radiant_dist_mid_data)
        ax_rad_dist.set_ylabel("Radiant dist (deg)")

        ax_ang_vel.plot(sol_data, ang_vel_mid_data)
        ax_ang_vel.set_ylabel("Ang vel (deg/s)")
        ax_ang_vel.set_xlabel("La Sun (deg)")


        ax_lm.plot(sol_data, lm_s_data, label="Stellar")
        ax_lm.plot(sol_data, lm_m_data, label="Meteor")
        ax_lm.set_ylabel("LM")
        ax_lm.legend()

        ax_corrs.plot(sol_data, sensitivity_corr_data, label="Sensitivity")
        ax_corrs.plot(sol_data, range_corr_data, label="Range")
        ax_corrs.plot(sol_data, radiant_elev_corr_data, label="Rad elev")
        ax_corrs.plot(sol_data, ang_vel_corr_data, label="Ang vel")
        ax_corrs.plot(sol_data, total_corr_data, label="Total (median)")
        ax_corrs.set_ylabel("Corrections")
        ax_corrs.legend()

        

        ax_col_area.plot(sol_data, np.array(effective_collection_area_data)/1e6)
        ax_col_area.plot(sol_data, len(sol_data)*[col_area_100km_raw/1e6], color='k', \
            label="Raw col area at 100 km")
        ax_col_area.plot(sol_data, len(sol_data)*[col_area_meteor_ht_raw/1e6], color='k', linestyle='dashed', \
            label="Raw col area at met ht")
        ax_col_area.set_ylabel("Eff. col. area (km^2)")
        ax_col_area.legend()

        ax_flux.scatter(sol_data, flux_lm_6_5_data)
        ax_flux.set_ylabel("Flux@+6.5M (met/1000km^2/h)")
        ax_flux.set_xlabel("La Sun (deg)")

        plt.tight_layout()

        plt.show()


    return sol_data, flux_lm_6_5_data
예제 #11
0
def matchStarsResiduals(config, platepar, catalog_stars, star_dict, match_radius, ret_nmatch=False, \
    sky_coords=False, lim_mag=None, verbose=False):
    """ Match the image and catalog stars with the given astrometry solution and estimate the residuals
        between them.

    Arguments:
        config: [Config structure]
        platepar: [Platepar structure] Astrometry parameters.
        catalog_stars: [ndarray] An array of catalog stars (ra, dec, mag).
        star_dict: [ndarray] A dictionary where the keys are JDs when the stars were recorded and values are
            2D list of stars, each entry is (X, Y, bg_level, level, fwhm).
        match_radius: [float] Maximum radius for star matching (pixels).
        min_matched_stars: [int] Minimum number of matched stars on the image for the image to be accepted.
    Keyword arguments:
        ret_nmatch: [bool] If True, the function returns the number of matched stars and the average
            deviation. False by default.
        sky_coords: [bool] If True, sky coordinate residuals in RA, dec will be used to compute the cost,
            function, not image coordinates.
        lim_mag: [float] Override the limiting magnitude from config. None by default.
        verbose: [bool] Print results. True by default.
    Return:
        cost: [float] The cost function which weights the number of matched stars and the average deviation.
    """

    if lim_mag is None:
        lim_mag = config.catalog_mag_limit

    # Estimate the FOV radius
    fov_radius = getFOVSelectionRadius(platepar)

    # Dictionary containing the matched stars, the keys are JDs of every image
    matched_stars = {}

    # Go through every FF image and its stars
    for jd in star_dict:

        # Estimate RA,dec of the centre of the FOV
        _, RA_c, dec_c, _ = xyToRaDecPP([jd2Date(jd)], [platepar.X_res/2], [platepar.Y_res/2], [1], \
            platepar, extinction_correction=False)

        RA_c = RA_c[0]
        dec_c = dec_c[0]

        # Get stars from the catalog around the defined center in a given radius
        _, extracted_catalog = subsetCatalog(catalog_stars, RA_c, dec_c, jd, platepar.lat, platepar.lon, \
            fov_radius, lim_mag)
        ra_catalog, dec_catalog, mag_catalog = extracted_catalog.T

        # Extract stars for the given Julian date
        stars_list = star_dict[jd]
        stars_list = np.array(stars_list)

        # Convert all catalog stars to image coordinates
        cat_x_array, cat_y_array = raDecToXYPP(ra_catalog, dec_catalog, jd,
                                               platepar)

        # Take only those stars which are within the FOV
        x_indices = np.argwhere((cat_x_array >= 0)
                                & (cat_x_array < platepar.X_res))
        y_indices = np.argwhere((cat_y_array >= 0)
                                & (cat_y_array < platepar.Y_res))
        cat_good_indices = np.intersect1d(x_indices,
                                          y_indices).astype(np.uint32)

        # cat_x_array = cat_x_array[good_indices]
        # cat_y_array = cat_y_array[good_indices]

        # # Plot image stars
        # im_y, im_x, _, _ = stars_list.T
        # plt.scatter(im_y, im_x, facecolors='none', edgecolor='g')

        # # Plot catalog stars
        # plt.scatter(cat_y_array[cat_good_indices], cat_x_array[cat_good_indices], c='r', s=20, marker='+')

        # plt.show()

        # Match image and catalog stars
        matched_indices = matchStars(stars_list, cat_x_array, cat_y_array,
                                     cat_good_indices, match_radius)

        # Skip this image is no stars were matched
        if len(matched_indices) < config.min_matched_stars:
            continue

        matched_indices = np.array(matched_indices)
        matched_img_inds, matched_cat_inds, dist_list = matched_indices.T

        # Extract data from matched stars
        matched_img_stars = stars_list[matched_img_inds.astype(np.int)]
        matched_cat_stars = extracted_catalog[matched_cat_inds.astype(np.int)]

        # Put the matched stars to a dictionary
        matched_stars[jd] = [matched_img_stars, matched_cat_stars, dist_list]

        # # Plot matched stars
        # im_y, im_x, _, _ = matched_img_stars.T
        # cat_y = cat_y_array[matched_cat_inds.astype(np.int)]
        # cat_x = cat_x_array[matched_cat_inds.astype(np.int)]

        # plt.scatter(im_x, im_y, c='r', s=5)
        # plt.scatter(cat_x, cat_y, facecolors='none', edgecolor='g')

        # plt.xlim([0, platepar.X_res])
        # plt.ylim([platepar.Y_res, 0])

        # plt.show()

    # If residuals on the image should be computed
    if not sky_coords:

        unit_label = 'px'

        # Extract all distances
        global_dist_list = []
        # level_list = []
        # mag_list = []
        for jd in matched_stars:
            # matched_img_stars, matched_cat_stars, dist_list = matched_stars[jd]

            _, _, dist_list = matched_stars[jd]

            global_dist_list += dist_list.tolist()

            # # TEST
            # level_list += matched_img_stars[:, 3].tolist()
            # mag_list += matched_cat_stars[:, 2].tolist()

        # # Plot levels vs. magnitudes
        # plt.scatter(mag_list, np.log10(level_list))
        # plt.xlabel('Magnitude')
        # plt.ylabel('Log10 level')
        # plt.show()

    # Compute the residuals on the sky
    else:

        unit_label = 'arcmin'

        global_dist_list = []

        # Go through all matched stars
        for jd in matched_stars:

            matched_img_stars, matched_cat_stars, dist_list = matched_stars[jd]

            # Go through all stars on the image
            for img_star_entry, cat_star_entry in zip(matched_img_stars,
                                                      matched_cat_stars):

                # Extract star coords
                star_y = img_star_entry[0]
                star_x = img_star_entry[1]
                cat_ra = cat_star_entry[0]
                cat_dec = cat_star_entry[1]

                # Convert image coordinates to RA/Dec
                _, star_ra, star_dec, _ = xyToRaDecPP([jd2Date(jd)], [star_x], [star_y], [1], \
                    platepar, extinction_correction=False)

                # Compute angular distance between the predicted and the catalog position
                ang_dist = np.degrees(angularSeparation(np.radians(cat_ra), np.radians(cat_dec), \
                    np.radians(star_ra[0]), np.radians(star_dec[0])))

                # Store the angular separation in arc minutes
                global_dist_list.append(ang_dist * 60)

    # Number of matched stars
    n_matched = len(global_dist_list)

    if n_matched == 0:

        if verbose:
            print(
                'No matched stars with radius {:.1f} px!'.format(match_radius))

        if ret_nmatch:
            return 0, 9999.0, 9999.0, {}

        else:
            return 9999.0

    # Calculate the average distance
    avg_dist = np.median(global_dist_list)

    cost = (avg_dist**2) * (1.0 / np.sqrt(n_matched + 1))

    if verbose:

        print()
        print("Matched {:d} stars with radius of {:.1f} px".format(
            n_matched, match_radius))
        print("    Average distance = {:.3f} {:s}".format(
            avg_dist, unit_label))
        print("    Cost function    = {:.5f}".format(cost))

    if ret_nmatch:
        return n_matched, avg_dist, cost, matched_stars

    else:
        return cost
예제 #12
0
def matchStarsResiduals(config, platepar, catalog_stars, star_dict, match_radius, ret_nmatch=False, \
    sky_coords=False, lim_mag=None, verbose=False):
    """ Match the image and catalog stars with the given astrometry solution and estimate the residuals 
        between them.
    
    Arguments:
        config: [Config structure]
        platepar: [Platepar structure] Astrometry parameters.
        catalog_stars: [ndarray] An array of catalog stars (ra, dec, mag).
        star_dict: [ndarray] A dictionary where the keys are JDs when the stars were recorded and values are
            2D list of stars, each entry is (X, Y, bg_level, level).
        match_radius: [float] Maximum radius for star matching (pixels).
        min_matched_stars: [int] Minimum number of matched stars on the image for the image to be accepted.

    Keyword arguments:
        ret_nmatch: [bool] If True, the function returns the number of matched stars and the average 
            deviation. False by default.
        sky_coords: [bool] If True, sky coordinate residuals in RA, dec will be used to compute the cost,
            function, not image coordinates.
        lim_mag: [float] Override the limiting magnitude from config. None by default.
        verbose: [bool] Print results. True by default.

    Return:
        cost: [float] The cost function which weights the number of matched stars and the average deviation.

    """


    if lim_mag is None:
        lim_mag = config.catalog_mag_limit


    # Estimate the FOV radius
    fov_w = platepar.X_res/platepar.F_scale
    fov_h = platepar.Y_res/platepar.F_scale

    fov_radius = np.sqrt((fov_w/2)**2 + (fov_h/2)**2)

    # print('fscale', platepar.F_scale)
    # print('FOV w:', fov_w)
    # print('FOV h:', fov_h)
    # print('FOV radius:', fov_radius)


    # Dictionary containing the matched stars, the keys are JDs of every image
    matched_stars = {}


    # Go through every FF image and its stars
    for jd in star_dict:

        # Estimate RA,dec of the centre of the FOV
        _, RA_c, dec_c, _ = xyToRaDecPP([jd2Date(jd)], [platepar.X_res/2], [platepar.Y_res/2], [1], 
            platepar)

        RA_c = RA_c[0]
        dec_c = dec_c[0]

        # Get stars from the catalog around the defined center in a given radius
        _, extracted_catalog = subsetCatalog(catalog_stars, RA_c, dec_c, fov_radius, lim_mag)
        ra_catalog, dec_catalog, mag_catalog = extracted_catalog.T


        # Extract stars for the given Julian date
        stars_list = star_dict[jd]
        stars_list = np.array(stars_list)

        # Convert all catalog stars to image coordinates
        cat_x_array, cat_y_array = raDecToXYPP(ra_catalog, dec_catalog, jd, platepar)

        # Take only those stars which are within the FOV
        x_indices = np.argwhere((cat_x_array >= 0) & (cat_x_array < platepar.X_res))
        y_indices = np.argwhere((cat_y_array >= 0) & (cat_y_array < platepar.Y_res))
        cat_good_indices = np.intersect1d(x_indices, y_indices).astype(np.uint32)

        # cat_x_array = cat_x_array[good_indices]
        # cat_y_array = cat_y_array[good_indices]


        # # Plot image stars
        # im_y, im_x, _, _ = stars_list.T
        # plt.scatter(im_y, im_x, facecolors='none', edgecolor='g')

        # # Plot catalog stars
        # plt.scatter(cat_y_array[cat_good_indices], cat_x_array[cat_good_indices], c='r', s=20, marker='+')

        # plt.show()


        # Match image and catalog stars
        matched_indices = matchStars(stars_list, cat_x_array, cat_y_array, cat_good_indices, match_radius)

        # Skip this image is no stars were matched
        if len(matched_indices) < config.min_matched_stars:
            continue

        matched_indices = np.array(matched_indices)
        matched_img_inds, matched_cat_inds, dist_list = matched_indices.T

        # Extract data from matched stars
        matched_img_stars = stars_list[matched_img_inds.astype(np.int)]
        matched_cat_stars = extracted_catalog[matched_cat_inds.astype(np.int)]

        # Put the matched stars to a dictionary
        matched_stars[jd] = [matched_img_stars, matched_cat_stars, dist_list]


        # # Plot matched stars
        # im_y, im_x, _, _ = matched_img_stars.T
        # cat_y = cat_y_array[matched_cat_inds.astype(np.int)]
        # cat_x = cat_x_array[matched_cat_inds.astype(np.int)]

        # plt.scatter(im_x, im_y, c='r', s=5)
        # plt.scatter(cat_x, cat_y, facecolors='none', edgecolor='g')

        # plt.xlim([0, platepar.X_res])
        # plt.ylim([platepar.Y_res, 0])

        # plt.show()



    # If residuals on the image should be computed
    if not sky_coords:

        unit_label = 'px'

        # Extract all distances
        global_dist_list = []
        # level_list = []
        # mag_list = []
        for jd in matched_stars:
            # matched_img_stars, matched_cat_stars, dist_list = matched_stars[jd]

            _, _, dist_list = matched_stars[jd]
            
            global_dist_list += dist_list.tolist()

            # # TEST
            # level_list += matched_img_stars[:, 3].tolist()
            # mag_list += matched_cat_stars[:, 2].tolist()



        # # Plot levels vs. magnitudes
        # plt.scatter(mag_list, np.log10(level_list))
        # plt.xlabel('Magnitude')
        # plt.ylabel('Log10 level')
        # plt.show()

    # Compute the residuals on the sky
    else:

        unit_label = 'arcmin'

        global_dist_list = []

        # Go through all matched stars
        for jd in matched_stars:

            matched_img_stars, matched_cat_stars, dist_list = matched_stars[jd]

            # Go through all stars on the image
            for img_star_entry, cat_star_entry in zip(matched_img_stars, matched_cat_stars):

                # Extract star coords
                star_y = img_star_entry[0]
                star_x = img_star_entry[1]
                cat_ra = cat_star_entry[0]
                cat_dec = cat_star_entry[1]

                # Convert image coordinates to RA/Dec
                _, star_ra, star_dec, _ = xyToRaDecPP([jd2Date(jd)], [star_x], [star_y], [1], \
                    platepar)

                # Compute angular distance between the predicted and the catalog position
                ang_dist = np.degrees(angularSeparation(np.radians(cat_ra), np.radians(cat_dec), \
                    np.radians(star_ra[0]), np.radians(star_dec[0])))

                # Store the angular separation in arc minutes
                global_dist_list.append(ang_dist*60)



    # Number of matched stars
    n_matched = len(global_dist_list)

    if n_matched == 0:

        if verbose:
            print('No matched stars with radius {:.2f} px!'.format(match_radius))
        
        if ret_nmatch:
            return 0, 9999.0, 9999.0, {}

        else:
            return 9999.0

    # Calculate the average distance
    avg_dist = np.mean(global_dist_list)

    cost = (avg_dist**2)*(1.0/np.sqrt(n_matched + 1))

    if verbose:

        print('Matched {:d} stars with radius of {:.2f} px'.format(n_matched, match_radius))
        print('Avg dist', avg_dist, unit_label)
        print('Cost:', cost)
        print('-----')


    if ret_nmatch:
        return n_matched, avg_dist, cost, matched_stars

    else:
        return cost
예제 #13
0
def recalibrateIndividualFFsAndApplyAstrometry(dir_path, ftpdetectinfo_path, calstars_list, config, platepar):
    """ Recalibrate FF files with detections and apply the recalibrated platepar to those detections. 

    Arguments:
        dir_path: [str] Path where the FTPdetectinfo file is.
        ftpdetectinfo_path: [str] Name of the FTPdetectinfo file.
        calstars_list: [list] A list of entries [[ff_name, star_coordinates], ...].
        config: [Config instance]
        platepar: [Platepar instance] Initial platepar.

    Return:
        recalibrated_platepars: [dict] A dictionary where the keys are FF file names and values are 
            recalibrated platepar instances for every FF file.
    """


    # Read the FTPdetectinfo data
    cam_code, fps, meteor_list = FTPdetectinfo.readFTPdetectinfo(*os.path.split(ftpdetectinfo_path), \
        ret_input_format=True)

    # Convert the list of stars to a per FF name dictionary
    calstars = {ff_file: star_data for ff_file, star_data in calstars_list}


    # Load catalog stars (overwrite the mag band ratios if specific catalog is used)
    catalog_stars, _, config.star_catalog_band_ratios = StarCatalog.readStarCatalog(config.star_catalog_path,\
        config.star_catalog_file, lim_mag=config.catalog_mag_limit, \
        mag_band_ratios=config.star_catalog_band_ratios)



    prev_platepar = copy.deepcopy(platepar)

    # Go through all FF files with detections, recalibrate and apply astrometry
    recalibrated_platepars = {}
    for meteor_entry in meteor_list:

        working_platepar = copy.deepcopy(prev_platepar)

        ff_name, meteor_No, rho, phi, meteor_meas = meteor_entry

        # Skip this meteors if its FF file was already recalibrated
        if ff_name in recalibrated_platepars:
            continue

        print()
        print('Processing: ', ff_name)
        print('------------------------------------------------------------------------------')

        # Find extracted stars on this image
        if not ff_name in calstars:
            print('Skipped because it was not in CALSTARS:', ff_name)
            continue

        # Get stars detected on this FF file (create a dictionaly with only one entry, the residuals function
        #   needs this format)
        calstars_time = FFfile.getMiddleTimeFF(ff_name, config.fps, ret_milliseconds=True)
        jd = date2JD(*calstars_time)
        star_dict_ff = {jd: calstars[ff_name]}

        # Recalibrate the platepar using star matching
        result = recalibrateFF(config, working_platepar, jd, star_dict_ff, catalog_stars)

        
        # If the recalibration failed, try using FFT alignment
        if result is None:

            print()
            print('Running FFT alignment...')

            # Run FFT alignment
            calstars_coords = np.array(star_dict_ff[jd])[:, :2]
            calstars_coords[:, [0, 1]] = calstars_coords[:, [1, 0]]
            print(calstars_time)
            working_platepar = alignPlatepar(config, prev_platepar, calstars_time, calstars_coords, \
                show_plot=False)

            # Try to recalibrate after FFT alignment
            result = recalibrateFF(config, working_platepar, jd, star_dict_ff, catalog_stars)

            if result is not None:
                working_platepar = result


        else:
            working_platepar = result


        # Store the platepar if the fit succeeded
        if result is not None:
            recalibrated_platepars[ff_name] = working_platepar
            prev_platepar = working_platepar

        else:

            print('Recalibration of {:s} failed, using the previous platepar...'.format(ff_name))

            # If the aligning failed, set the previous platepar as the one that should be used for this FF file
            recalibrated_platepars[ff_name] = prev_platepar


    ### Store all recalibrated platepars as a JSON file ###

    all_pps = {}
    for ff_name in recalibrated_platepars:

        json_str = recalibrated_platepars[ff_name].jsonStr()
        
        all_pps[ff_name] = json.loads(json_str)

    with open(os.path.join(dir_path, config.platepars_recalibrated_name), 'w') as f:
        
        # Convert all platepars to a JSON file
        out_str = json.dumps(all_pps, default=lambda o: o.__dict__, indent=4, sort_keys=True)

        f.write(out_str)

    ### ###



    # If no platepars were recalibrated, use the single platepar recalibration procedure
    if len(recalibrated_platepars) == 0:

        print('No FF images were used for recalibration, using the single platepar calibration function...')

        # Use the initial platepar for calibration
        applyAstrometryFTPdetectinfo(dir_path, os.path.basename(ftpdetectinfo_path), None, platepar=platepar)

        return recalibrated_platepars



    ### Plot difference from reference platepar in angular distance from (0, 0) vs rotation ###

    ang_dists = []
    rot_angles = []
    hour_list = []

    first_jd = np.min([FFfile.filenameToDatetime(ff_name) for ff_name in recalibrated_platepars])

    for ff_name in recalibrated_platepars:
        
        pp_temp = recalibrated_platepars[ff_name]

        # If the fitting failed, skip the platepar
        if pp_temp is None:
            continue

        # Compute the angular separation from the reference platepar
        ang_dist = np.degrees(angularSeparation(np.radians(platepar.RA_d), np.radians(platepar.dec_d), \
            np.radians(pp_temp.RA_d), np.radians(pp_temp.dec_d)))
        ang_dists.append(ang_dist*60)

        rot_angles.append((platepar.pos_angle_ref - pp_temp.pos_angle_ref)*60)

        # Compute the hour of the FF used for recalibration
        hour_list.append((FFfile.filenameToDatetime(ff_name) - first_jd).total_seconds()/3600)


    plt.figure()

    plt.scatter(0, 0, marker='o', edgecolor='k', label='Reference platepar', s=100, c='none', zorder=3)

    plt.scatter(ang_dists, rot_angles, c=hour_list, zorder=3)
    plt.colorbar(label='Hours from first FF file')
    
    plt.xlabel("Angular distance from reference (arcmin)")
    plt.ylabel('Rotation from reference (arcmin)')

    plt.grid()
    plt.legend()

    plt.tight_layout()

    # Generate the name for the plot
    calib_plot_name = os.path.basename(ftpdetectinfo_path).replace('FTPdetectinfo_', '').replace('.txt', '') \
        + '_calibration_variation.png'

    plt.savefig(os.path.join(dir_path, calib_plot_name), dpi=150)

    # plt.show()

    plt.clf()
    plt.close()

    ### ###



    ### Apply platepars to FTPdetectinfo ###

    meteor_output_list = []
    for meteor_entry in meteor_list:

        ff_name, meteor_No, rho, phi, meteor_meas = meteor_entry

        # Get the platepar that will be applied to this FF file
        if ff_name in recalibrated_platepars:
            working_platepar = recalibrated_platepars[ff_name]

        else:
            print('Using default platepar for:', ff_name)
            working_platepar = platepar

        # Apply the recalibrated platepar to meteor centroids
        meteor_picks = applyPlateparToCentroids(ff_name, fps, meteor_meas, working_platepar, \
            add_calstatus=True)

        meteor_output_list.append([ff_name, meteor_No, rho, phi, meteor_picks])


    # Calibration string to be written to the FTPdetectinfo file
    calib_str = 'Recalibrated with RMS on: ' + str(datetime.datetime.utcnow()) + ' UTC'

    # If no meteors were detected, set dummpy parameters
    if len(meteor_list) == 0:
        cam_code = ''
        fps = 0


    # Back up the old FTPdetectinfo file
    shutil.copy(ftpdetectinfo_path, ftpdetectinfo_path.strip('.txt') \
        + '_backup_{:s}.txt'.format(datetime.datetime.utcnow().strftime('%Y%m%d_%H%M%S.%f')))

    # Save the updated FTPdetectinfo
    FTPdetectinfo.writeFTPdetectinfo(meteor_output_list, dir_path, os.path.basename(ftpdetectinfo_path), \
        dir_path, cam_code, fps, calibration=calib_str, celestial_coords_given=True)


    ### ###

    return recalibrated_platepars
예제 #14
0
def updateAzAltGrid(grid, platepar):
    """
    Updates the values of grid to form an azimuth and altitude grid on a pyqtgraph plot.

    Arguments:
        grid: [pg.PlotCurveItem]
        platepar: [Platepar object]

    """

    ### COMPUTE FOV CENTRE ###

    # Estimate RA,dec of the centre of the FOV
    _, RA_c, dec_c, _ = xyToRaDecPP([jd2Date(platepar.JD)], [platepar.X_res/2], [platepar.Y_res/2], [1], \
                                    platepar, extinction_correction=False)

    # Compute alt/az of FOV centre
    azim_centre, alt_centre = trueRaDec2ApparentAltAz(RA_c[0], dec_c[0], platepar.JD, platepar.lat, \
        platepar.lon)

    ### ###

    # Compute FOV size
    fov_radius = getFOVSelectionRadius(platepar)

    # Determine gridline frequency (double the gridlines if the number is < 4eN)
    grid_freq = 10**np.floor(np.log10(2 * fov_radius))
    if 10**(np.log10(2 * fov_radius) - np.floor(np.log10(2 * fov_radius))) < 4:
        grid_freq /= 2

    # Set a maximum grid frequency of 15 deg
    if grid_freq > 15:
        grid_freq = 15

    # Grid plot density
    plot_dens = grid_freq / 100

    # Generate a grid of all azimuths and altitudes
    alt_grid_arr = np.arange(0, 90, grid_freq)
    az_grid_arr = np.arange(0, 360, grid_freq)

    x = []
    y = []
    cuts = []

    # Altitude lines
    for alt_grid in alt_grid_arr:

        # Keep the altitude fixed and plot all azimuth lines
        az_grid_plot = np.arange(0, 360, plot_dens)
        alt_grid_plot = np.zeros_like(az_grid_plot) + alt_grid

        # Filter out all lines outside the FOV
        filter_arr = np.degrees(angularSeparation(np.radians(azim_centre), np.radians(alt_centre), \
            np.radians(az_grid_plot), np.radians(alt_grid_plot))) <= fov_radius

        az_grid_plot = az_grid_plot[filter_arr]
        alt_grid_plot = alt_grid_plot[filter_arr]

        # Compute image coordinates
        ra_grid_plot, dec_grid_plot = apparentAltAz2TrueRADec(az_grid_plot, alt_grid_plot, platepar.JD, \
            platepar.lat, platepar.lon, platepar.refraction)
        x_grid, y_grid = raDecToXYPP(ra_grid_plot, dec_grid_plot, platepar.JD,
                                     platepar)

        # Filter out all points outside the image
        filter_arr = (x_grid >= 0) & (x_grid <= platepar.X_res) & (
            y_grid >= 0) & (y_grid <= platepar.Y_res)
        x_grid = x_grid[filter_arr]
        y_grid = y_grid[filter_arr]

        x.extend(x_grid)
        y.extend(y_grid)
        cuts.append(len(x) - 1)

    # Azimuth lines
    for az_grid in az_grid_arr:

        # Keep the azimuth fixed and plot all altitude lines
        alt_grid_plot = np.arange(0, 90 + plot_dens, plot_dens)
        az_grid_plot = np.zeros_like(alt_grid_plot) + az_grid

        # Filter out all lines outside the FOV
        filter_arr = np.degrees(angularSeparation(np.radians(azim_centre), np.radians(alt_centre), \
            np.radians(az_grid_plot), np.radians(alt_grid_plot))) <= fov_radius

        az_grid_plot = az_grid_plot[filter_arr]
        alt_grid_plot = alt_grid_plot[filter_arr]

        # Compute image coordinates
        ra_grid_plot, dec_grid_plot = apparentAltAz2TrueRADec(az_grid_plot, alt_grid_plot, platepar.JD, \
            platepar.lat, platepar.lon, platepar.refraction)
        x_grid, y_grid = raDecToXYPP(ra_grid_plot, dec_grid_plot, platepar.JD,
                                     platepar)

        # Filter out all points outside the image
        filter_arr = (x_grid >= 0) & (x_grid <= platepar.X_res) & (
            y_grid >= 0) & (y_grid <= platepar.Y_res)
        x_grid = x_grid[filter_arr]
        y_grid = y_grid[filter_arr]

        x.extend(x_grid)
        y.extend(y_grid)
        cuts.append(len(x) - 1)

    r = 15  # adjust this parameter if you see extraneous lines
    # disconnect lines that are distant (unfinished circles had straight lines completing them)
    for i in range(len(x) - 1):
        if (x[i] - x[i + 1])**2 + (y[i] - y[i + 1])**2 > r**2:
            cuts.append(i)

    connect = np.full(len(x), 1)
    for i in cuts[:-1]:
        connect[i] = 0

    grid.setData(x=x, y=y, connect=connect)
예제 #15
0
def updateRaDecGrid(grid, platepar):
    """
    Updates the values of grid to form a right ascension and declination grid on a pyqtgraph plot.

    Arguments:
        grid: [pg.PlotCurveItem]
        platepar: [Platepar object]

    """

    ### COMPUTE FOV CENTRE ###

    # Estimate RA,dec of the centre of the FOV
    _, RA_c, dec_c, _ = xyToRaDecPP([jd2Date(platepar.JD)], [platepar.X_res/2], [platepar.Y_res/2], [1], \
                                    platepar, extinction_correction=False)

    # Compute alt/az of FOV centre
    azim_centre, alt_centre = trueRaDec2ApparentAltAz(RA_c[0], dec_c[0], platepar.JD, platepar.lat, \
        platepar.lon)

    ### ###

    # Compute FOV size
    fov_radius = getFOVSelectionRadius(platepar)

    # Determine gridline frequency (double the gridlines if the number is < 4eN)
    grid_freq = 10**np.floor(np.log10(2 * fov_radius))
    if 10**(np.log10(2 * fov_radius) - np.floor(np.log10(2 * fov_radius))) < 4:
        grid_freq /= 2

    # Set a maximum grid frequency of 15 deg
    if grid_freq > 15:
        grid_freq = 15

    # Grid plot density
    plot_dens = grid_freq / 100

    # Make an array of RA and Dec
    ra_grid_arr = np.arange(0, 360, grid_freq)
    dec_grid_arr = np.arange(-90, 90, grid_freq)

    x = []
    y = []
    cuts = []

    # Generate points for the celestial parallels grid
    for dec_grid in dec_grid_arr:

        # Keep the declination fixed and evaluate all right ascensions
        ra_grid_plot = np.arange(0, 360, plot_dens)
        dec_grid_plot = np.zeros_like(ra_grid_plot) + dec_grid

        # Compute alt/az
        az_grid_plot, alt_grid_plot = trueRaDec2ApparentAltAz(ra_grid_plot, dec_grid_plot, platepar.JD, \
            platepar.lat, platepar.lon, platepar.refraction)

        # Filter out points below the horizon and outside the FOV
        filter_arr = (alt_grid_plot >= 0) & (np.degrees(angularSeparation(np.radians(azim_centre), \
            np.radians(alt_centre), np.radians(az_grid_plot), np.radians(alt_grid_plot))) <= fov_radius)

        ra_grid_plot = ra_grid_plot[filter_arr]
        dec_grid_plot = dec_grid_plot[filter_arr]

        # Compute image coordinates for every grid celestial parallel
        x_grid, y_grid = raDecToXYPP(ra_grid_plot, dec_grid_plot, platepar.JD,
                                     platepar)

        # Filter out all points outside the image
        filter_arr = (x_grid >= 0) & (x_grid <= platepar.X_res) & (
            y_grid >= 0) & (y_grid <= platepar.Y_res)
        x_grid = x_grid[filter_arr]
        y_grid = y_grid[filter_arr]

        # Add points to the list
        x.extend(x_grid)
        y.extend(y_grid)
        cuts.append(len(x) - 1)

    # Generate points for the celestial meridian grid
    for ra_grid in ra_grid_arr:

        # Keep the RA fixed and evaluate all declinations
        dec_grid_plot = np.arange(-90, 90 + plot_dens, plot_dens)
        ra_grid_plot = np.zeros_like(dec_grid_plot) + ra_grid

        # Compute alt/az
        az_grid_plot, alt_grid_plot = trueRaDec2ApparentAltAz(ra_grid_plot, dec_grid_plot, platepar.JD, \
            platepar.lat, platepar.lon, platepar.refraction)

        # Filter out points below the horizon
        filter_arr = (alt_grid_plot >= 0) & (np.degrees(angularSeparation(np.radians(azim_centre), \
            np.radians(alt_centre), np.radians(az_grid_plot), np.radians(alt_grid_plot))) <= fov_radius)
        ra_grid_plot = ra_grid_plot[filter_arr]
        dec_grid_plot = dec_grid_plot[filter_arr]

        # Compute image coordinates for every grid celestial parallel
        x_grid, y_grid = raDecToXYPP(ra_grid_plot, dec_grid_plot, platepar.JD,
                                     platepar)

        # Filter out points outside the image
        filter_arr = (x_grid >= 0) & (x_grid <= platepar.X_res) & (
            y_grid >= 0) & (y_grid <= platepar.Y_res)
        x_grid = x_grid[filter_arr]
        y_grid = y_grid[filter_arr]

        x.extend(x_grid)
        y.extend(y_grid)
        cuts.append(len(x) - 1)

    # Generate points for the horizon
    az_horiz_arr = np.arange(0, 360, plot_dens)
    alt_horiz_arr = np.zeros_like(az_horiz_arr)
    ra_horiz_plot, dec_horiz_plot = apparentAltAz2TrueRADec(az_horiz_arr, alt_horiz_arr, platepar.JD, \
        platepar.lat, platepar.lon, platepar.refraction)

    # Filter out all horizon points outside the FOV
    filter_arr = np.degrees(angularSeparation(np.radians(alt_centre), np.radians(azim_centre), \
        np.radians(alt_horiz_arr), np.radians(az_horiz_arr))) <= fov_radius

    ra_horiz_plot = ra_horiz_plot[filter_arr]
    dec_horiz_plot = dec_horiz_plot[filter_arr]

    # Compute image coordinates of the horizon
    x_horiz, y_horiz = raDecToXYPP(ra_horiz_plot, dec_horiz_plot, platepar.JD,
                                   platepar)

    # Filter out all horizon points outside the image
    filter_arr = (x_horiz >= 0) & (x_horiz <= platepar.X_res) & (
        y_horiz >= 0) & (y_horiz <= platepar.Y_res)
    x_horiz = x_horiz[filter_arr]
    y_horiz = y_horiz[filter_arr]

    x.extend(x_horiz)
    y.extend(y_horiz)
    cuts.append(len(x) - 1)

    r = 15  # adjust this parameter if you see extraneous lines
    # disconnect lines that are distant (unfinished circles had straight lines completing them)
    for i in range(len(x) - 1):
        if (x[i] - x[i + 1])**2 + (y[i] - y[i + 1])**2 > r**2:
            cuts.append(i)

    # convert cuts into connect
    connect = np.full(len(x), 1)
    if len(connect) > 0:
        for i in cuts:
            connect[i] = 0

    grid.setData(x=x, y=y, connect=connect)
예제 #16
0
파일: TrackStack.py 프로젝트: tammojan/RMS
def trackStack(dir_path,
               config,
               border=5,
               background_compensation=True,
               hide_plot=False):
    """ Generate a stack with aligned stars, so the sky appears static. The folder should have a
        platepars_all_recalibrated.json file.

    Arguments:
        dir_path: [str] Path to the directory with image files.
        config: [Config instance]

    Keyword arguments:
        border: [int] Border around the image to exclude (px).
        background_compensation: [bool] Normalize the background by applying a median filter to avepixel and
            use it as a flat field. Slows down the procedure and may sometimes introduce artifacts. True
            by default.
    """

    # Load recalibrated platepars, if they exist ###

    # Find recalibrated platepars file per FF file
    platepars_recalibrated_file = None
    for file_name in os.listdir(dir_path):
        if file_name == config.platepars_recalibrated_name:
            platepars_recalibrated_file = file_name
            break

    # Load all recalibrated platepars if the file is available
    recalibrated_platepars = None
    if platepars_recalibrated_file is not None:
        with open(os.path.join(dir_path, platepars_recalibrated_file)) as f:
            recalibrated_platepars = json.load(f)
            print(
                'Loaded recalibrated platepars JSON file for the calibration report...'
            )

    # ###

    # If the recalib platepars is not found, stop
    if recalibrated_platepars is None:
        print("The {:s} file was not found!".format(
            config.platepars_recalibrated_name))
        return False

    # Get a list of FF files in the folder
    ff_list = []
    for file_name in os.listdir(dir_path):
        if validFFName(file_name):
            ff_list.append(file_name)

    # Take the platepar with the middle time as the reference one
    ff_found_list = []
    jd_list = []
    for ff_name_temp in recalibrated_platepars:

        if ff_name_temp in ff_list:

            # Compute the Julian date of the FF middle
            dt = getMiddleTimeFF(ff_name_temp,
                                 config.fps,
                                 ret_milliseconds=True)
            jd = date2JD(*dt)

            jd_list.append(jd)
            ff_found_list.append(ff_name_temp)

    if len(jd_list) < 2:
        print("Not more than 1 FF image!")
        return False

    # Take the FF file with the middle JD
    jd_list = np.array(jd_list)
    jd_middle = np.mean(jd_list)
    jd_mean_index = np.argmin(np.abs(jd_list - jd_middle))
    ff_mid = ff_found_list[jd_mean_index]

    # Load the middle platepar as the reference one
    pp_ref = Platepar()
    pp_ref.loadFromDict(recalibrated_platepars[ff_mid],
                        use_flat=config.use_flat)

    # Try loading the mask
    mask_path = None
    if os.path.exists(os.path.join(dir_path, config.mask_file)):
        mask_path = os.path.join(dir_path, config.mask_file)

    # Try loading the default mask
    elif os.path.exists(config.mask_file):
        mask_path = os.path.abspath(config.mask_file)

    # Load the mask if given
    mask = None
    if mask_path is not None:
        mask = loadMask(mask_path)
        print("Loaded mask:", mask_path)

    # If the shape of the mask doesn't fit, init an empty mask
    if mask is not None:
        if (mask.img.shape[0] != pp_ref.Y_res) or (mask.img.shape[1] !=
                                                   pp_ref.X_res):
            print("Mask is of wrong shape!")
            mask = None

    if mask is None:
        mask = MaskStructure(255 + np.zeros(
            (pp_ref.Y_res, pp_ref.X_res), dtype=np.uint8))

    # Compute the middle RA/Dec of the reference platepar
    _, ra_temp, dec_temp, _ = xyToRaDecPP([jd2Date(jd_middle)],
                                          [pp_ref.X_res / 2],
                                          [pp_ref.Y_res / 2], [1],
                                          pp_ref,
                                          extinction_correction=False)

    ra_mid, dec_mid = ra_temp[0], dec_temp[0]

    # Go through all FF files and find RA/Dec of image corners to find the size of the stack image ###

    # List of corners
    x_corns = [0, pp_ref.X_res, 0, pp_ref.X_res]
    y_corns = [0, 0, pp_ref.Y_res, pp_ref.Y_res]

    ra_list = []
    dec_list = []

    for ff_temp in ff_found_list:

        # Load the recalibrated platepar
        pp_temp = Platepar()
        pp_temp.loadFromDict(recalibrated_platepars[ff_temp],
                             use_flat=config.use_flat)

        for x_c, y_c in zip(x_corns, y_corns):
            _, ra_temp, dec_temp, _ = xyToRaDecPP(
                [getMiddleTimeFF(ff_temp, config.fps, ret_milliseconds=True)],
                [x_c], [y_c], [1],
                pp_ref,
                extinction_correction=False)
            ra_c, dec_c = ra_temp[0], dec_temp[0]

            ra_list.append(ra_c)
            dec_list.append(dec_c)

    # Compute the angular separation from the middle equatorial coordinates of the reference image to all
    #   RA/Dec corner coordinates
    ang_sep_list = []
    for ra_c, dec_c in zip(ra_list, dec_list):
        ang_sep = np.degrees(
            angularSeparation(np.radians(ra_mid), np.radians(dec_mid),
                              np.radians(ra_c), np.radians(dec_c)))

        ang_sep_list.append(ang_sep)

    # Find the maximum angular separation and compute the image size using the plate scale
    #   The image size will be resampled to 1/2 of the original size to avoid interpolation
    scale = 0.5
    ang_sep_max = np.max(ang_sep_list)
    img_size = int(scale * 2 * ang_sep_max * pp_ref.F_scale)

    #

    # Create the stack platepar with no distortion and a large image size
    pp_stack = copy.deepcopy(pp_ref)
    pp_stack.resetDistortionParameters()
    pp_stack.X_res = img_size
    pp_stack.Y_res = img_size
    pp_stack.F_scale *= scale
    pp_stack.refraction = False

    # Init the image
    avg_stack_sum = np.zeros((img_size, img_size), dtype=float)
    avg_stack_count = np.zeros((img_size, img_size), dtype=int)
    max_deaveraged = np.zeros((img_size, img_size), dtype=np.uint8)

    # Load individual FFs and map them to the stack
    for i, ff_name in enumerate(ff_found_list):

        print("Stacking {:s}, {:.1f}% done".format(
            ff_name, 100 * i / len(ff_found_list)))

        # Read the FF file
        ff = readFF(dir_path, ff_name)

        # Load the recalibrated platepar
        pp_temp = Platepar()
        pp_temp.loadFromDict(recalibrated_platepars[ff_name],
                             use_flat=config.use_flat)

        # Make a list of X and Y image coordinates
        x_coords, y_coords = np.meshgrid(
            np.arange(border, pp_ref.X_res - border),
            np.arange(border, pp_ref.Y_res - border))
        x_coords = x_coords.ravel()
        y_coords = y_coords.ravel()

        # Map image pixels to sky
        jd_arr, ra_coords, dec_coords, _ = xyToRaDecPP(
            len(x_coords) *
            [getMiddleTimeFF(ff_name, config.fps, ret_milliseconds=True)],
            x_coords,
            y_coords,
            len(x_coords) * [1],
            pp_temp,
            extinction_correction=False)

        # Map sky coordinates to stack image coordinates
        stack_x, stack_y = raDecToXYPP(ra_coords, dec_coords, jd_middle,
                                       pp_stack)

        # Round pixel coordinates
        stack_x = np.round(stack_x, decimals=0).astype(int)
        stack_y = np.round(stack_y, decimals=0).astype(int)

        # Cut the image to limits
        filter_arr = (stack_x > 0) & (stack_x < img_size) & (stack_y > 0) & (
            stack_y < img_size)
        x_coords = x_coords[filter_arr].astype(int)
        y_coords = y_coords[filter_arr].astype(int)
        stack_x = stack_x[filter_arr]
        stack_y = stack_y[filter_arr]

        # Apply the mask to maxpixel and avepixel
        maxpixel = copy.deepcopy(ff.maxpixel)
        maxpixel[mask.img == 0] = 0
        avepixel = copy.deepcopy(ff.avepixel)
        avepixel[mask.img == 0] = 0

        # Compute deaveraged maxpixel
        max_deavg = maxpixel - avepixel

        # Normalize the backgroud brightness by applying a large-kernel median filter to avepixel
        if background_compensation:

            # # Apply a median filter to the avepixel to get an estimate of the background brightness
            # avepixel_median = scipy.ndimage.median_filter(ff.avepixel, size=101)
            avepixel_median = cv2.medianBlur(ff.avepixel, 301)

            # Make sure to avoid zero division
            avepixel_median[avepixel_median < 1] = 1

            # Normalize the avepixel by subtracting out the background brightness
            avepixel = avepixel.astype(float)
            avepixel /= avepixel_median
            avepixel *= 50  # Normalize to a good background value, which is usually 50
            avepixel = np.clip(avepixel, 0, 255)
            avepixel = avepixel.astype(np.uint8)

            # plt.imshow(avepixel, cmap='gray', vmin=0, vmax=255)
            # plt.show()

        # Add the average pixel to the sum
        avg_stack_sum[stack_y, stack_x] += avepixel[y_coords, x_coords]

        # Increment the counter image where the avepixel is not zero
        ones_img = np.ones_like(avepixel)
        ones_img[avepixel == 0] = 0
        avg_stack_count[stack_y, stack_x] += ones_img[y_coords, x_coords]

        # Set pixel values to the stack, only take the max values
        max_deaveraged[stack_y, stack_x] = np.max(np.dstack(
            [max_deaveraged[stack_y, stack_x], max_deavg[y_coords, x_coords]]),
                                                  axis=2)

    # Compute the blended avepixel background
    stack_img = avg_stack_sum
    stack_img[avg_stack_count > 0] /= avg_stack_count[avg_stack_count > 0]
    stack_img += max_deaveraged
    stack_img = np.clip(stack_img, 0, 255)
    stack_img = stack_img.astype(np.uint8)

    # Crop image
    non_empty_columns = np.where(stack_img.max(axis=0) > 0)[0]
    non_empty_rows = np.where(stack_img.max(axis=1) > 0)[0]
    crop_box = (np.min(non_empty_rows), np.max(non_empty_rows),
                np.min(non_empty_columns), np.max(non_empty_columns))
    stack_img = stack_img[crop_box[0]:crop_box[1] + 1,
                          crop_box[2]:crop_box[3] + 1]

    # Plot and save the stack ###

    dpi = 200
    plt.figure(figsize=(stack_img.shape[1] / dpi, stack_img.shape[0] / dpi),
               dpi=dpi)

    plt.imshow(stack_img,
               cmap='gray',
               vmin=0,
               vmax=256,
               interpolation='nearest')

    plt.axis('off')
    plt.gca().get_xaxis().set_visible(False)
    plt.gca().get_yaxis().set_visible(False)

    plt.xlim([0, stack_img.shape[1]])
    plt.ylim([stack_img.shape[0], 0])

    # Remove the margins (top and right are set to 0.9999, as setting them to 1.0 makes the image blank in
    #   some matplotlib versions)
    plt.subplots_adjust(left=0,
                        bottom=0,
                        right=0.9999,
                        top=0.9999,
                        wspace=0,
                        hspace=0)

    filenam = os.path.join(dir_path,
                           os.path.basename(dir_path) + "_track_stack.jpg")
    plt.savefig(filenam, bbox_inches='tight', pad_inches=0, dpi=dpi)

    #

    if hide_plot is False:
        plt.show()