def fitGC(self): """ Fits great circle to observations. """ self.cartesian_points = [] self.ra_array = np.array(self.ra_array) self.dec_array = np.array(self.dec_array) for ra, dec in zip(self.ra_array, self.dec_array): vect = vectNorm(raDec2Vector(ra, dec)) self.cartesian_points.append(vect) self.cartesian_points = np.array(self.cartesian_points) # Set begin and end pointing vectors self.beg_vect = self.cartesian_points[0] self.end_vect = self.cartesian_points[-1] # Compute alt of the begining and the last point self.beg_azim, self.beg_alt = raDec2AltAz(self.ra_array[0], self.dec_array[0], self.jd_array[0], \ self.lat, self.lon) self.end_azim, self.end_alt = raDec2AltAz(self.ra_array[-1], self.dec_array[-1], self.jd_array[-1], \ self.lat, self.lon) # Fit a great circle through observations x_arr, y_arr, z_arr = self.cartesian_points.T coeffs, self.theta0, self.phi0 = fitGreatCircle(x_arr, y_arr, z_arr) # Calculate the plane normal self.normal = np.array([coeffs[0], coeffs[1], -1.0]) # Norm the normal vector to unit length self.normal = vectNorm(self.normal) # Compute RA/Dec of the normal direction self.normal_ra, self.normal_dec = vector2RaDec(self.normal) # Take pointing directions of the beginning and the end of the meteor self.meteor_begin_cartesian = vectNorm(self.cartesian_points[0]) self.meteor_end_cartesian = vectNorm(self.cartesian_points[-1]) # Compute angular distance between begin and end (radians) self.ang_be = angularSeparationVect(self.beg_vect, self.end_vect) # Compute meteor duration in seconds self.duration = (self.jd_array[-1] - self.jd_array[0])*86400.0 # Set the reference JD as the JD of the beginning self.jdt_ref = self.jd_array[0] # Compute the solar longitude of the beginning (degrees) self.lasun = np.degrees(jd2SolLonSteyaert(self.jdt_ref))
def extinctionCorrectionApparentToTrue(mags, x_data, y_data, jd, platepar): """ Compute true magnitudes by applying extinction correction to apparent magnitudes. Arguments: mags: [list] A list of apparent magnitudes. x_data: [list] A list of pixel columns. y_data: [list] A list of pixel rows. jd: [float] Julian date. platepar: [Platepar object] Return: corrected_mags: [list] A list of extinction corrected mangitudes. """ ### Compute star elevations above the horizon (epoch of date, true) ### # Compute RA/Dec in J2000 _, ra_data, dec_data, _ = xyToRaDecPP(len(x_data)*[jd2Date(jd)], x_data, y_data, len(x_data)*[1], \ platepar, extinction_correction=False) # Compute elevation above the horizon elevation_data = [] for ra, dec in zip(ra_data, dec_data): # Precess to epoch of date ra, dec = equatorialCoordPrecession(J2000_JD.days, jd, np.radians(ra), np.radians(dec)) # Compute elevation _, elev = raDec2AltAz(np.degrees(ra), np.degrees(dec), jd, platepar.lat, platepar.lon) if elev < 0: elev = 0 elevation_data.append(elev) ### ### # Correct catalog magnitudes for extinction extinction_correction = atmosphericExtinctionCorrection(np.array(elevation_data), platepar.elev) \ - atmosphericExtinctionCorrection(90, platepar.elev) corrected_mags = np.array( mags) - platepar.extinction_scale * extinction_correction return corrected_mags
def extinctionCorrectionTrueToApparent(catalog_mags, ra_data, dec_data, jd, platepar): """ Compute apparent magnitudes by applying extinction correction to catalog magnitudes. Arguments: catalog_mags: [list] A list of catalog magnitudes. ra_data: [list] A list of catalog right ascensions (J2000) in degrees. dec_data: [list] A list of catalog declinations (J2000) in degrees. jd: [float] Julian date. platepar: [Platepar object] Return: corrected_catalog_mags: [list] Extinction corrected catalog magnitudes. """ ### Compute star elevations above the horizon (epoch of date, true) ### # Compute elevation above the horizon elevation_data = [] for ra, dec in zip(ra_data, dec_data): # Precess to epoch of date ra, dec = equatorialCoordPrecession(J2000_JD.days, jd, np.radians(ra), np.radians(dec)) # Compute elevation _, elev = raDec2AltAz(np.degrees(ra), np.degrees(dec), jd, platepar.lat, platepar.lon) if elev < 0: elev = 0 elevation_data.append(elev) ### ### # Correct catalog magnitudes for extinction extinction_correction = atmosphericExtinctionCorrection(np.array(elevation_data), platepar.elev) \ - atmosphericExtinctionCorrection(90, platepar.elev) corrected_catalog_mags = np.array( catalog_mags) + platepar.extinction_scale * extinction_correction return corrected_catalog_mags
def computeFlux(config, dir_path, ftpdetectinfo_path, shower_code, dt_beg, dt_end, timebin, mass_index, \ timebin_intdt=0.25, ht_std_percent=5.0, mask=None): """ Compute flux using measurements in the given FTPdetectinfo file. Arguments: config: [Config instance] dir_path: [str] Path to the working directory. ftpdetectinfo_path: [str] Path to a FTPdetectinfo file. shower_code: [str] IAU shower code (e.g. ETA, PER, SDA). dt_beg: [Datetime] Datetime object of the observation beginning. dt_end: [Datetime] Datetime object of the observation end. timebin: [float] Time bin in hours. mass_index: [float] Cumulative mass index of the shower. Keyword arguments: timebin_intdt: [float] Time step for computing the integrated collection area in hours. 15 minutes by default. If smaller than that, only one collection are will be computed. ht_std_percent: [float] Meteor height standard deviation in percent. mask: [Mask object] Mask object, None by default. """ # Get a list of files in the night folder file_list = sorted(os.listdir(dir_path)) # Find and load the platepar file if config.platepar_name in file_list: # Load the platepar platepar = Platepar.Platepar() platepar.read(os.path.join(dir_path, config.platepar_name), use_flat=config.use_flat) else: print("Cannot find the platepar file in the night directory: ", config.platepar_name) return None # # Load FTPdetectinfos # meteor_data = [] # for ftpdetectinfo_path in ftpdetectinfo_list: # if not os.path.isfile(ftpdetectinfo_path): # print('No such file:', ftpdetectinfo_path) # continue # meteor_data += readFTPdetectinfo(*os.path.split(ftpdetectinfo_path)) # Load meteor data from the FTPdetectinfo file meteor_data = readFTPdetectinfo(*os.path.split(ftpdetectinfo_path)) if not len(meteor_data): print("No meteors in the FTPdetectinfo file!") return None # Find and load recalibrated platepars if config.platepars_recalibrated_name in file_list: with open(os.path.join(dir_path, config.platepars_recalibrated_name)) as f: recalibrated_platepars_dict = json.load(f) print("Recalibrated platepars loaded!") # If the file is not available, apply the recalibration procedure else: recalibrated_platepars_dict = applyRecalibrate(ftpdetectinfo_path, config) print("Recalibrated platepar file not available!") print("Recalibrating...") # Convert the dictionary of recalibrated platepars to a dictionary of Platepar objects recalibrated_platepars = {} for ff_name in recalibrated_platepars_dict: pp = Platepar.Platepar() pp.loadFromDict(recalibrated_platepars_dict[ff_name], use_flat=config.use_flat) recalibrated_platepars[ff_name] = pp # Compute nighly mean of the photometric zero point mag_lev_nightly_mean = np.mean([recalibrated_platepars[ff_name].mag_lev \ for ff_name in recalibrated_platepars]) # Locate and load the mask file if config.mask_file in file_list: mask_path = os.path.join(dir_path, config.mask_file) mask = loadMask(mask_path) print("Using mask:", mask_path) else: print("No mask used!") mask = None # Compute the population index using the classical equation population_index = 10**((mass_index - 1)/2.5) ### SENSOR CHARACTERIZATION ### # Computes FWHM of stars and noise profile of the sensor # File which stores the sensor characterization profile sensor_characterization_file = "flux_sensor_characterization.json" sensor_characterization_path = os.path.join(dir_path, sensor_characterization_file) # Load sensor characterization file if present, so the procedure can be skipped if os.path.isfile(sensor_characterization_path): # Load the JSON file with open(sensor_characterization_path) as f: data = " ".join(f.readlines()) sensor_data = json.loads(data) # Remove the info entry if '-1' in sensor_data: del sensor_data['-1'] else: # Run sensor characterization sensor_data = sensorCharacterization(config, dir_path) # Save to file for posterior use with open(sensor_characterization_path, 'w') as f: # Add an explanation what each entry means sensor_data_save = dict(sensor_data) sensor_data_save['-1'] = {"FF file name": ['median star FWHM', 'median background noise stddev']} # Convert collection areas to JSON out_str = json.dumps(sensor_data_save, indent=4, sort_keys=True) # Save to disk f.write(out_str) # Compute the nighly mean FWHM and noise stddev fwhm_nightly_mean = np.mean([sensor_data[key][0] for key in sensor_data]) stddev_nightly_mean = np.mean([sensor_data[key][1] for key in sensor_data]) ### ### # Perform shower association associations, shower_counts = showerAssociation(config, [ftpdetectinfo_path], shower_code=shower_code, \ show_plot=False, save_plot=False, plot_activity=False) # If there are no shower association, return nothing if not associations: print("No meteors associated with the shower!") return None # Print the list of used meteors peak_mags = [] for key in associations: meteor, shower = associations[key] if shower is not None: # Compute peak magnitude peak_mag = np.min(meteor.mag_array) peak_mags.append(peak_mag) print("{:.6f}, {:3s}, {:+.2f}".format(meteor.jdt_ref, shower.name, peak_mag)) print() # Init the flux configuration flux_config = FluxConfig() ### COMPUTE COLLECTION AREAS ### # Make a file name to save the raw collection areas col_areas_file_name = generateColAreaJSONFileName(platepar.station_code, flux_config.side_points, \ flux_config.ht_min, flux_config.ht_max, flux_config.dht, flux_config.elev_limit) # Check if the collection area file exists. If yes, load the data. If not, generate collection areas if col_areas_file_name in os.listdir(dir_path): col_areas_ht = loadRawCollectionAreas(dir_path, col_areas_file_name) print("Loaded collection areas from:", col_areas_file_name) else: # Compute the collecting areas segments per height col_areas_ht = collectingArea(platepar, mask=mask, side_points=flux_config.side_points, \ ht_min=flux_config.ht_min, ht_max=flux_config.ht_max, dht=flux_config.dht, \ elev_limit=flux_config.elev_limit) # Save the collection areas to file saveRawCollectionAreas(dir_path, col_areas_file_name, col_areas_ht) print("Saved raw collection areas to:", col_areas_file_name) ### ### # Compute the pointing of the middle of the FOV _, ra_mid, dec_mid, _ = xyToRaDecPP([jd2Date(J2000_JD.days)], [platepar.X_res/2], [platepar.Y_res/2], \ [1], platepar, extinction_correction=False) azim_mid, elev_mid = raDec2AltAz(ra_mid[0], dec_mid[0], J2000_JD.days, platepar.lat, platepar.lon) # Compute the range to the middle point ref_ht = 100000 r_mid, _, _, _ = xyHt2Geo(platepar, platepar.X_res/2, platepar.Y_res/2, ref_ht, indicate_limit=True, \ elev_limit=flux_config.elev_limit) ### Compute the average angular velocity to which the flux variation throught the night will be normalized # The ang vel is of the middle of the FOV in the middle of observations # Middle Julian date of the night jd_night_mid = (datetime2JD(dt_beg) + datetime2JD(dt_end))/2 # Compute the apparent radiant ra, dec, v_init = shower.computeApparentRadiant(platepar.lat, platepar.lon, jd_night_mid) # Compute the radiant elevation radiant_azim, radiant_elev = raDec2AltAz(ra, dec, jd_night_mid, platepar.lat, platepar.lon) # Compute the angular velocity in the middle of the FOV rad_dist_night_mid = angularSeparation(np.radians(radiant_azim), np.radians(radiant_elev), np.radians(azim_mid), np.radians(elev_mid)) ang_vel_night_mid = v_init*np.sin(rad_dist_night_mid)/r_mid ### # Compute the average limiting magnitude to which all flux will be normalized # Standard deviation of star PSF, nightly mean (px) star_stddev = fwhm_nightly_mean/2.355 # Compute the theoretical stellar limiting magnitude (nightly average) star_sum = 2*np.pi*(config.k1_det*stddev_nightly_mean + config.j1_det)*star_stddev**2 lm_s_nightly_mean = -2.5*np.log10(star_sum) + mag_lev_nightly_mean # A meteor needs to be visible on at least 4 frames, thus it needs to have at least 4x the mass to produce # that amount of light. 1 magnitude difference scales as -0.4 of log of mass, thus: frame_min_loss = np.log10(config.line_minimum_frame_range_det)/(-0.4) lm_s_nightly_mean += frame_min_loss # Compute apparent meteor magnitude lm_m_nightly_mean = lm_s_nightly_mean - 5*np.log10(r_mid/1e5) - 2.5*np.log10( \ np.degrees(platepar.F_scale*v_init*np.sin(rad_dist_night_mid)/(config.fps*r_mid*fwhm_nightly_mean)) \ ) # print("Stellar lim mag using detection thresholds:", lm_s_nightly_mean) print("Apparent meteor limiting magnitude:", lm_m_nightly_mean) ### Apply time-dependent corrections ### sol_data = [] flux_lm_6_5_data = [] # Go through all time bins within the observation period total_time_hrs = (dt_end - dt_beg).total_seconds()/3600 nbins = int(np.ceil(total_time_hrs/timebin)) for t_bin in range(nbins): # Compute bin start and end time bin_dt_beg = dt_beg + datetime.timedelta(hours=timebin*t_bin) bin_dt_end = bin_dt_beg + datetime.timedelta(hours=timebin) if bin_dt_end > dt_end: bin_dt_end = dt_end # Compute bin duration in hours bin_hours = (bin_dt_end - bin_dt_beg).total_seconds()/3600 # Convert to Julian date bin_jd_beg = datetime2JD(bin_dt_beg) bin_jd_end = datetime2JD(bin_dt_end) # Only select meteors in this bin bin_meteors = [] bin_ffs = [] for key in associations: meteor, shower = associations[key] if shower is not None: if (shower.name == shower_code) and (meteor.jdt_ref > bin_jd_beg) \ and (meteor.jdt_ref <= bin_jd_end): bin_meteors.append([meteor, shower]) bin_ffs.append(meteor.ff_name) if len(bin_meteors) > 0: ### Compute the radiant elevation at the middle of the time bin ### jd_mean = (bin_jd_beg + bin_jd_end)/2 # Compute the mean solar longitude sol_mean = np.degrees(jd2SolLonSteyaert(jd_mean)) print() print() print("-- Bin information ---") print("Bin beg:", bin_dt_beg) print("Bin end:", bin_dt_end) print("Sol mid: {:.5f}".format(sol_mean)) print("Meteors:", len(bin_meteors)) # Compute the apparent radiant ra, dec, v_init = shower.computeApparentRadiant(platepar.lat, platepar.lon, jd_mean) # Compute the mean meteor height meteor_ht_beg = heightModel(v_init, ht_type='beg') meteor_ht_end = heightModel(v_init, ht_type='end') meteor_ht = (meteor_ht_beg + meteor_ht_end)/2 # Compute the standard deviation of the height meteor_ht_std = meteor_ht*ht_std_percent/100.0 # Init the Gaussian height distribution meteor_ht_gauss = scipy.stats.norm(meteor_ht, meteor_ht_std) # Compute the radiant elevation radiant_azim, radiant_elev = raDec2AltAz(ra, dec, jd_mean, platepar.lat, platepar.lon) ### ### ### Weight collection area by meteor height distribution ### # Determine weights for each height weight_sum = 0 weights = {} for ht in col_areas_ht: wt = meteor_ht_gauss.pdf(float(ht)) weight_sum += wt weights[ht] = wt # Normalize the weights so that the sum is 1 for ht in weights: weights[ht] /= weight_sum ### ### # Compute the angular velocity in the middle of the FOV rad_dist_mid = angularSeparation(np.radians(radiant_azim), np.radians(radiant_elev), np.radians(azim_mid), np.radians(elev_mid)) ang_vel_mid = v_init*np.sin(rad_dist_mid)/r_mid ### Compute the limiting magnitude ### # Compute the mean star FWHM in the given bin fwhm_bin_mean = np.mean([sensor_data[ff_name][0] for ff_name in bin_ffs]) # Compute the mean background stddev in the given bin stddev_bin_mean = np.mean([sensor_data[ff_name][1] for ff_name in bin_ffs]) # Compute the mean photometric zero point in the given bin mag_lev_bin_mean = np.mean([recalibrated_platepars[ff_name].mag_lev for ff_name in bin_ffs if ff_name in recalibrated_platepars]) # Standard deviation of star PSF, nightly mean (px) star_stddev = fwhm_bin_mean/2.355 # Compute the theoretical stellar limiting magnitude (nightly average) star_sum = 2*np.pi*(config.k1_det*stddev_bin_mean + config.j1_det)*star_stddev**2 lm_s = -2.5*np.log10(star_sum) + mag_lev_bin_mean lm_s += frame_min_loss # Compute apparent meteor magnitude lm_m = lm_s - 5*np.log10(r_mid/1e5) - 2.5*np.log10( \ np.degrees(platepar.F_scale*v_init*np.sin(rad_dist_mid)/(config.fps*r_mid*fwhm_bin_mean))\ ) ### ### # Final correction area value (height-weightned) collection_area = 0 # Go through all heights and segment blocks for ht in col_areas_ht: for img_coords in col_areas_ht[ht]: x_mean, y_mean = img_coords # Unpack precomputed values area, azim, elev, sensitivity_ratio, r = col_areas_ht[ht][img_coords] # Compute the angular velocity (rad/s) in the middle of this block rad_dist = angularSeparation(np.radians(radiant_azim), np.radians(radiant_elev), np.radians(azim), np.radians(elev)) ang_vel = v_init*np.sin(rad_dist)/r # Compute the range correction range_correction = (1e5/r)**2 #ang_vel_correction = ang_vel/ang_vel_mid # Compute angular velocity correction relative to the nightly mean ang_vel_correction = ang_vel/ang_vel_night_mid ### Apply corrections correction_ratio = 1.0 # Correct the area for vignetting and extinction correction_ratio *= sensitivity_ratio # Correct for the range correction_ratio *= range_correction # Correct for the radiant elevation correction_ratio *= np.sin(np.radians(radiant_elev)) # Correct for angular velocity correction_ratio *= ang_vel_correction # Add the collection area to the final estimate with the height weight # Raise the correction to the mass index power collection_area += weights[ht]*area*correction_ratio**(mass_index - 1) # Compute the flux at the bin LM (meteors/1000km^2/h) flux = 1e9*len(bin_meteors)/collection_area/bin_hours # Compute the flux scaled to the nightly mean LM flux_lm_nightly_mean = flux*population_index**(lm_m_nightly_mean - lm_m) # Compute the flux scaled to +6.5M flux_lm_6_5 = flux*population_index**(6.5 - lm_m) print("-- Sensor information ---") print("Star FWHM: {:5.2f} px".format(fwhm_bin_mean)) print("Bkg stddev: {:4.1f} ADU".format(stddev_bin_mean)) print("Photom ZP: {:+6.2f} mag".format(mag_lev_bin_mean)) print("Stellar LM: {:+.2f} mag".format(lm_s)) print("-- Flux ---") print("Col area: {:d} km^2".format(int(collection_area/1e6))) print("Ang vel: {:.2f} deg/s".format(np.degrees(ang_vel_mid))) print("LM app: {:+.2f} mag".format(lm_m)) print("Flux: {:.2f} meteors/1000km^2/h".format(flux)) print("to {:+.2f}: {:.2f} meteors/1000km^2/h".format(lm_m_nightly_mean, flux_lm_nightly_mean)) print("to +6.50: {:.2f} meteors/1000km^2/h".format(flux_lm_6_5)) sol_data.append(sol_mean) flux_lm_6_5_data.append(flux_lm_6_5) # Print the results print("Solar longitude, Flux at LM +6.5:") for sol, flux_lm_6_5 in zip(sol_data, flux_lm_6_5_data): print("{:9.5f}, {:8.4f}".format(sol, flux_lm_6_5)) # Plot a histogram of peak magnitudes plt.hist(peak_mags, cumulative=True) plt.show()
def collectingArea(platepar, mask=None, side_points=20, ht_min=60, ht_max=130, dht=2, elev_limit=10): """ Compute the collecting area for the range of given heights. Arguments: platepar: [Platepar object] Keyword arguments: mask: [Mask object] Mask object, None by default. side_points: [int] How many points to use to evaluate the FOV on seach side of the image. Normalized to the longest side. ht_min: [float] Minimum height (km). ht_max: [float] Maximum height (km). dht: [float] Height delta (km). elev_limit: [float] Limit of elevation above horizon (deg). 10 degrees by default. Return: col_areas_ht: [dict] A dictionary where the keys are heights of area evaluation, and values are segment dictionaries. Segment dictionaries have keys which are tuples of (x, y) coordinates of segment midpoints, and values are segment collection areas corrected for sensor effects. """ # If the mask is not given, make a dummy mask with all white pixels if mask is None: mask = MaskStructure(255 + np.zeros((platepar.Y_res, platepar.X_res), dtype=np.uint8)) # Compute the number of samples for every image axis longer_side_points = side_points shorter_side_points = int(np.ceil(side_points*platepar.Y_res/platepar.X_res)) # Compute pixel delta for every side longer_dpx = int(platepar.X_res//longer_side_points) shorter_dpx = int(platepar.Y_res//shorter_side_points) # Distionary of collection areas per height col_areas_ht = collections.OrderedDict() # Estimate the collection area for a given range of heights for ht in np.arange(ht_min, ht_max + dht, dht): # Convert the height to meters ht = 1000*ht print(ht/1000, "km") total_area = 0 # Dictionary of computed sensor-corrected collection areas where X and Y are keys col_areas_xy = collections.OrderedDict() # Sample the image for x0 in np.linspace(0, platepar.X_res, longer_side_points, dtype=np.int, endpoint=False): for y0 in np.linspace(0, platepar.Y_res, shorter_side_points, dtype=np.int, endpoint=False): # Compute lower right corners of the segment xe = x0 + longer_dpx ye = y0 + shorter_dpx # Compute geo coordinates of the image corners (if the corner is below the elevation limit, # the *_elev value will be -1) _, ul_lat, ul_lon, ul_ht = xyHt2Geo(platepar, x0, y0, ht, indicate_limit=True, \ elev_limit=elev_limit) _, ll_lat, ll_lon, ll_ht = xyHt2Geo(platepar, x0, ye, ht, indicate_limit=True, \ elev_limit=elev_limit) _, lr_lat, lr_lon, lr_ht = xyHt2Geo(platepar, xe, ye, ht, indicate_limit=True, \ elev_limit=elev_limit) _, ur_lat, ur_lon, ur_ht = xyHt2Geo(platepar, xe, y0, ht, indicate_limit=True, \ elev_limit=elev_limit) # Skip the block if all corners are hitting the lower apparent elevation limit if np.all([ul_ht < 0, ll_ht < 0, lr_ht < 0, ur_ht < 0]): continue # Make a polygon (clockwise direction) lats = [ul_lat, ll_lat, lr_lat, ur_lat] lons = [ul_lon, ll_lon, lr_lon, ur_lon] # Compute the area of the polygon area = areaGeoPolygon(lats, lons, ht) ### Apply sensitivity corrections to the area ### # Compute ratio of masked portion of the segment mask_segment = mask.img[y0:ye, x0:xe] unmasked_ratio = 1 - np.count_nonzero(~mask_segment)/mask_segment.size ## Compute the pointing direction and the vignetting and extinction loss for the mean location x_mean = (x0 + xe)/2 y_mean = (y0 + ye)/2 # Use a test pixel sum test_px_sum = 400 # Compute the pointing direction and magnitude corrected for vignetting and extinction _, ra, dec, mag = xyToRaDecPP([jd2Date(J2000_JD.days)], [x_mean], [y_mean], [test_px_sum], \ platepar) azim, elev = raDec2AltAz(ra[0], dec[0], J2000_JD.days, platepar.lat, platepar.lon) # Compute the pixel sum back assuming no corrections rev_level = 10**((mag[0] - platepar.mag_lev)/(-2.5)) # Compute the sensitivty loss due to vignetting and extinction sensitivity_ratio = test_px_sum/rev_level # print(np.abs(np.hypot(x_mean - platepar.X_res/2, y_mean - platepar.Y_res/2)), sensitivity_ratio, mag[0]) ## # Compute the range correction (w.r.t 100 km) to the mean point r, _, _, _ = xyHt2Geo(platepar, x_mean, y_mean, ht, indicate_limit=True, \ elev_limit=elev_limit) # Correct the area for the masked portion area *= unmasked_ratio ### ### # Store the raw masked segment collection area, sensivitiy, and the range col_areas_xy[(x_mean, y_mean)] = [area, azim, elev, sensitivity_ratio, r] total_area += area # Store segments to the height dictionary (save a copy so it doesn't get overwritten) col_areas_ht[float(ht)] = dict(col_areas_xy) print("SUM:", total_area/1e6, "km^2") # Compare to total area computed from the whole area side_points_list = fovArea(platepar, mask=mask, area_ht=ht, side_points=side_points, \ elev_limit=elev_limit) lats = [] lons = [] for side in side_points_list: for entry in side: lats.append(entry[0]) lons.append(entry[1]) print("DIR:", areaGeoPolygon(lats, lons, ht)/1e6) return col_areas_ht
def generateCalibrationReport(config, night_dir_path, match_radius=2.0, platepar=None, show_graphs=False): """ Given the folder of the night, find the Calstars file, check the star fit and generate a report with the quality of the calibration. The report contains information about both the astrometry and the photometry calibration. Graphs will be saved in the given directory of the night. Arguments: config: [Config instance] night_dir_path: [str] Full path to the directory of the night. Keyword arguments: match_radius: [float] Match radius for star matching between image and catalog stars (px). platepar: [Platepar instance] Use this platepar instead of finding one in the folder. show_graphs: [bool] Show the graphs on the screen. False by default. Return: None """ # Find the CALSTARS file in the given folder calstars_file = None for calstars_file in os.listdir(night_dir_path): if ('CALSTARS' in calstars_file) and ('.txt' in calstars_file): break if calstars_file is None: print('CALSTARS file could not be found in the given directory!') return None # Load the calstars file star_list = readCALSTARS(night_dir_path, calstars_file) ### Load recalibrated platepars, if they exist ### # Find recalibrated platepars file per FF file platepars_recalibrated_file = None for file_name in os.listdir(night_dir_path): if file_name == config.platepars_recalibrated_name: platepars_recalibrated_file = file_name break # Load all recalibrated platepars if the file is available recalibrated_platepars = None if platepars_recalibrated_file: with open(os.path.join(night_dir_path, platepars_recalibrated_file)) as f: recalibrated_platepars = json.load(f) print('Loaded recalibrated platepars JSON file for the calibration report...') ### ### ### Load the platepar file ### # Find the platepar file in the given directory if it was not given if platepar is None: # Find the platepar file platepar_file = None for file_name in os.listdir(night_dir_path): if file_name == config.platepar_name: platepar_file = file_name break if platepar_file is None: print('The platepar cannot be found in the night directory!') return None # Load the platepar file platepar = Platepar() platepar.read(os.path.join(night_dir_path, platepar_file), use_flat=config.use_flat) ### ### night_name = os.path.split(night_dir_path.strip(os.sep))[1] # Go one mag deeper than in the config lim_mag = config.catalog_mag_limit + 1 # Load catalog stars (load one magnitude deeper) catalog_stars, mag_band_str, config.star_catalog_band_ratios = StarCatalog.readStarCatalog(\ config.star_catalog_path, config.star_catalog_file, lim_mag=lim_mag, \ mag_band_ratios=config.star_catalog_band_ratios) ### Take only those CALSTARS entires for which FF files exist in the folder ### # Get a list of FF files in the folder ff_list = [] for file_name in os.listdir(night_dir_path): if validFFName(file_name): ff_list.append(file_name) # Filter out calstars entries, generate a star dictionary where the keys are JDs of FFs star_dict = {} ff_dict = {} for entry in star_list: ff_name, star_data = entry # Check if the FF from CALSTARS exists in the folder if ff_name not in ff_list: continue dt = getMiddleTimeFF(ff_name, config.fps, ret_milliseconds=True) jd = date2JD(*dt) # Add the time and the stars to the dict star_dict[jd] = star_data ff_dict[jd] = ff_name ### ### # If there are no FF files in the directory, don't generate a report if len(star_dict) == 0: print('No FF files from the CALSTARS file in the directory!') return None # If the recalibrated platepars file exists, take the one with the most stars max_jd = 0 using_recalib_platepars = False if recalibrated_platepars is not None: max_stars = 0 for ff_name_temp in recalibrated_platepars: # Compute the Julian date of the FF middle dt = getMiddleTimeFF(ff_name_temp, config.fps, ret_milliseconds=True) jd = date2JD(*dt) # Check that this file exists in CALSTARS and the list of FF files if (jd not in star_dict) or (jd not in ff_dict): continue # Make sure that the chosen file has been successfuly recalibrated if "auto_recalibrated" in recalibrated_platepars[ff_name_temp]: if not recalibrated_platepars[ff_name_temp]["auto_recalibrated"]: continue # Check if the number of stars on this FF file is larger than the before if len(star_dict[jd]) > max_stars: max_jd = jd max_stars = len(star_dict[jd]) # Set a flag to indicate if using recalibrated platepars has failed if max_jd == 0: using_recalib_platepars = False else: print('Using recalibrated platepars, file:', ff_dict[max_jd]) using_recalib_platepars = True # Select the platepar where the FF file has the most stars platepar_dict = recalibrated_platepars[ff_dict[max_jd]] platepar = Platepar() platepar.loadFromDict(platepar_dict, use_flat=config.use_flat) filtered_star_dict = {max_jd: star_dict[max_jd]} # Match stars on the image with the stars in the catalog n_matched, avg_dist, cost, matched_stars = matchStarsResiduals(config, platepar, catalog_stars, \ filtered_star_dict, match_radius, ret_nmatch=True, lim_mag=lim_mag) max_matched_stars = n_matched # Otherwise take the optimal FF file for evaluation if (recalibrated_platepars is None) or (not using_recalib_platepars): # If there are more than a set number of FF files to evaluate, choose only the ones with most stars on # the image if len(star_dict) > config.calstars_files_N: # Find JDs of FF files with most stars on them top_nstars_indices = np.argsort([len(x) for x in star_dict.values()])[::-1][:config.calstars_files_N \ - 1] filtered_star_dict = {} for i in top_nstars_indices: filtered_star_dict[list(star_dict.keys())[i]] = list(star_dict.values())[i] star_dict = filtered_star_dict # Match stars on the image with the stars in the catalog n_matched, avg_dist, cost, matched_stars = matchStarsResiduals(config, platepar, catalog_stars, \ star_dict, match_radius, ret_nmatch=True, lim_mag=lim_mag) # If no recalibrated platepars where found, find the image with the largest number of matched stars if (not using_recalib_platepars) or (max_jd == 0): max_jd = 0 max_matched_stars = 0 for jd in matched_stars: _, _, distances = matched_stars[jd] if len(distances) > max_matched_stars: max_jd = jd max_matched_stars = len(distances) # If there are no matched stars, use the image with the largest number of detected stars if max_matched_stars <= 2: max_jd = max(star_dict, key=lambda x: len(star_dict[x])) distances = [np.inf] # Take the FF file with the largest number of matched stars ff_name = ff_dict[max_jd] # Load the FF file ff = readFF(night_dir_path, ff_name) img_h, img_w = ff.avepixel.shape dpi = 200 plt.figure(figsize=(ff.avepixel.shape[1]/dpi, ff.avepixel.shape[0]/dpi), dpi=dpi) # Take the average pixel img = ff.avepixel # Slightly adjust the levels img = Image.adjustLevels(img, np.percentile(img, 1.0), 1.3, np.percentile(img, 99.99)) plt.imshow(img, cmap='gray', interpolation='nearest') legend_handles = [] # Plot detected stars for img_star in star_dict[max_jd]: y, x = img_star[:2] rect_side = 5*match_radius square_patch = plt.Rectangle((x - rect_side/2, y - rect_side/2), rect_side, rect_side, color='g', \ fill=False, label='Image stars') plt.gca().add_artist(square_patch) legend_handles.append(square_patch) # If there are matched stars, plot them if max_matched_stars > 2: # Take the solution with the largest number of matched stars image_stars, matched_catalog_stars, distances = matched_stars[max_jd] # Plot matched stars for img_star in image_stars: x, y = img_star[:2] circle_patch = plt.Circle((y, x), radius=3*match_radius, color='y', fill=False, \ label='Matched stars') plt.gca().add_artist(circle_patch) legend_handles.append(circle_patch) ### Plot match residuals ### # Compute preducted positions of matched image stars from the catalog x_predicted, y_predicted = raDecToXYPP(matched_catalog_stars[:, 0], \ matched_catalog_stars[:, 1], max_jd, platepar) img_y = image_stars[:, 0] img_x = image_stars[:, 1] delta_x = x_predicted - img_x delta_y = y_predicted - img_y # Compute image residual and angle of the error res_angle = np.arctan2(delta_y, delta_x) res_distance = np.sqrt(delta_x**2 + delta_y**2) # Calculate coordinates of the beginning of the residual line res_x_beg = img_x + 3*match_radius*np.cos(res_angle) res_y_beg = img_y + 3*match_radius*np.sin(res_angle) # Calculate coordinates of the end of the residual line res_x_end = img_x + 100*np.cos(res_angle)*res_distance res_y_end = img_y + 100*np.sin(res_angle)*res_distance # Plot the 100x residuals for i in range(len(x_predicted)): res_plot = plt.plot([res_x_beg[i], res_x_end[i]], [res_y_beg[i], res_y_end[i]], color='orange', \ lw=0.5, label='100x residuals') legend_handles.append(res_plot[0]) ### ### else: distances = [np.inf] # If there are no matched stars, plot large text in the middle of the screen plt.text(img_w/2, img_h/2, "NO MATCHED STARS!", color='r', alpha=0.5, fontsize=20, ha='center', va='center') ### Plot positions of catalog stars to the limiting magnitude of the faintest matched star + 1 mag ### # Find the faintest magnitude among matched stars if max_matched_stars > 2: faintest_mag = np.max(matched_catalog_stars[:, 2]) + 1 else: # If there are no matched stars, use the limiting magnitude from config faintest_mag = config.catalog_mag_limit + 1 # Estimate RA,dec of the centre of the FOV _, RA_c, dec_c, _ = xyToRaDecPP([jd2Date(max_jd)], [platepar.X_res/2], [platepar.Y_res/2], [1], platepar, extinction_correction=False) RA_c = RA_c[0] dec_c = dec_c[0] fov_radius = getFOVSelectionRadius(platepar) # Get stars from the catalog around the defined center in a given radius _, extracted_catalog = subsetCatalog(catalog_stars, RA_c, dec_c, max_jd, platepar.lat, platepar.lon, \ fov_radius, faintest_mag) ra_catalog, dec_catalog, mag_catalog = extracted_catalog.T # Compute image positions of all catalog stars that should be on the image x_catalog, y_catalog = raDecToXYPP(ra_catalog, dec_catalog, max_jd, platepar) # Filter all catalog stars outside the image temp_arr = np.c_[x_catalog, y_catalog, mag_catalog] temp_arr = temp_arr[temp_arr[:, 0] >= 0] temp_arr = temp_arr[temp_arr[:, 0] <= ff.avepixel.shape[1]] temp_arr = temp_arr[temp_arr[:, 1] >= 0] temp_arr = temp_arr[temp_arr[:, 1] <= ff.avepixel.shape[0]] x_catalog, y_catalog, mag_catalog = temp_arr.T # Plot catalog stars on the image cat_stars_handle = plt.scatter(x_catalog, y_catalog, c='none', marker='D', lw=1.0, alpha=0.4, \ s=((4.0 + (faintest_mag - mag_catalog))/3.0)**(2*2.512), edgecolor='r', label='Catalog stars') legend_handles.append(cat_stars_handle) ### ### # Add info text in the corner info_text = ff_dict[max_jd] + '\n' \ + "Matched stars within {:.1f} px radius: {:d}/{:d} \n".format(match_radius, max_matched_stars, \ len(star_dict[max_jd])) \ + "Median distance = {:.2f} px\n".format(np.median(distances)) \ + "Catalog lim mag = {:.1f}".format(lim_mag) plt.text(10, 10, info_text, bbox=dict(facecolor='black', alpha=0.5), va='top', ha='left', fontsize=4, \ color='w', family='monospace') legend = plt.legend(handles=legend_handles, prop={'size': 4}, loc='upper right') legend.get_frame().set_facecolor('k') legend.get_frame().set_edgecolor('k') for txt in legend.get_texts(): txt.set_color('w') ### Add FOV info (centre, size) ### # Mark FOV centre plt.scatter(platepar.X_res/2, platepar.Y_res/2, marker='+', s=20, c='r', zorder=4) # Compute FOV centre alt/az azim_centre, alt_centre = raDec2AltAz(RA_c, dec_c, max_jd, platepar.lat, platepar.lon) # Compute FOV size fov_h, fov_v = computeFOVSize(platepar) # Compute the rotation wrt. horizon rot_horizon = rotationWrtHorizon(platepar) fov_centre_text = "Azim = {:6.2f}$\\degree$\n".format(azim_centre) \ + "Alt = {:6.2f}$\\degree$\n".format(alt_centre) \ + "Rot h = {:6.2f}$\\degree$\n".format(rot_horizon) \ + "FOV h = {:6.2f}$\\degree$\n".format(fov_h) \ + "FOV v = {:6.2f}$\\degree$".format(fov_v) \ plt.text(10, platepar.Y_res - 10, fov_centre_text, bbox=dict(facecolor='black', alpha=0.5), \ va='bottom', ha='left', fontsize=4, color='w', family='monospace') ### ### # Plot RA/Dec gridlines # addEquatorialGrid(plt, platepar, max_jd) plt.axis('off') plt.gca().get_xaxis().set_visible(False) plt.gca().get_yaxis().set_visible(False) plt.xlim([0, ff.avepixel.shape[1]]) plt.ylim([ff.avepixel.shape[0], 0]) # Remove the margins (top and right are set to 0.9999, as setting them to 1.0 makes the image blank in # some matplotlib versions) plt.subplots_adjust(left=0, bottom=0, right=0.9999, top=0.9999, wspace=0, hspace=0) plt.savefig(os.path.join(night_dir_path, night_name + '_calib_report_astrometry.jpg'), \ bbox_inches='tight', pad_inches=0, dpi=dpi) if show_graphs: plt.show() else: plt.clf() plt.close() if max_matched_stars > 2: ### PHOTOMETRY FIT ### # If a flat is used, set the vignetting coeff to 0 if config.use_flat: platepar.vignetting_coeff = 0.0 # Extact intensities and mangitudes star_intensities = image_stars[:, 2] catalog_ra, catalog_dec, catalog_mags = matched_catalog_stars.T # Compute radius of every star from image centre radius_arr = np.hypot(image_stars[:, 0] - img_h/2, image_stars[:, 1] - img_w/2) # Compute apparent extinction corrected magnitudes catalog_mags = extinctionCorrectionTrueToApparent(catalog_mags, catalog_ra, catalog_dec, max_jd, \ platepar) # Fit the photometry on automated star intensities (use the fixed vignetting coeff, use robust fit) photom_params, fit_stddev, fit_resid, star_intensities, radius_arr, catalog_mags = \ photometryFitRobust(star_intensities, radius_arr, catalog_mags, \ fixed_vignetting=platepar.vignetting_coeff) photom_offset, _ = photom_params ### ### ### PLOT PHOTOMETRY ### # Note: An almost identical code exists in RMS.Astrometry.SkyFit in the PlateTool.photometry function dpi = 130 fig_p, (ax_p, ax_r) = plt.subplots(nrows=2, facecolor=None, figsize=(6.0, 7.0), dpi=dpi, \ gridspec_kw={'height_ratios':[2, 1]}) # Plot raw star intensities ax_p.scatter(-2.5*np.log10(star_intensities), catalog_mags, s=5, c='r', alpha=0.5, \ label="Raw (extinction corrected)") # If a flat is used, disregard the vignetting if not config.use_flat: # Plot intensities of image stars corrected for vignetting lsp_corr_arr = np.log10(correctVignetting(star_intensities, radius_arr, \ platepar.vignetting_coeff)) ax_p.scatter(-2.5*lsp_corr_arr, catalog_mags, s=5, c='b', alpha=0.5, \ label="Corrected for vignetting") # Plot photometric offset from the platepar x_min, x_max = ax_p.get_xlim() y_min, y_max = ax_p.get_ylim() x_min_w = x_min - 3 x_max_w = x_max + 3 y_min_w = y_min - 3 y_max_w = y_max + 3 photometry_info = "Platepar: {:+.1f}*LSP + {:.2f} +/- {:.2f}".format(platepar.mag_0, \ platepar.mag_lev, platepar.mag_lev_stddev) \ + "\nVignetting coeff = {:.5f}".format(platepar.vignetting_coeff) \ + "\nGamma = {:.2f}".format(platepar.gamma) # Plot the photometry calibration from the platepar logsum_arr = np.linspace(x_min_w, x_max_w, 10) ax_p.plot(logsum_arr, logsum_arr + platepar.mag_lev, label=photometry_info, linestyle='--', \ color='k', alpha=0.5) # Plot the fitted photometry calibration fit_info = "Fit: {:+.1f}*LSP + {:.2f} +/- {:.2f}".format(-2.5, photom_offset, fit_stddev) ax_p.plot(logsum_arr, logsum_arr + photom_offset, label=fit_info, linestyle='--', color='b', alpha=0.75) ax_p.legend() ax_p.set_ylabel("Catalog magnitude ({:s})".format(mag_band_str)) ax_p.set_xlabel("Uncalibrated magnitude") # Set wider axis limits ax_p.set_xlim(x_min_w, x_max_w) ax_p.set_ylim(y_min_w, y_max_w) ax_p.invert_yaxis() ax_p.invert_xaxis() ax_p.grid() ### Plot photometry vs radius ### img_diagonal = np.hypot(img_h/2, img_w/2) # Plot photometry residuals (including vignetting) ax_r.scatter(radius_arr, fit_resid, c='b', alpha=0.75, s=5, zorder=3) # Plot a zero line ax_r.plot(np.linspace(0, img_diagonal, 10), np.zeros(10), linestyle='dashed', alpha=0.5, \ color='k') # Plot only when no flat is used if not config.use_flat: # Plot radius from centre vs. fit residual fit_resids_novignetting = catalog_mags - photomLine((np.array(star_intensities), \ np.array(radius_arr)), photom_offset, 0.0) ax_r.scatter(radius_arr, fit_resids_novignetting, s=5, c='r', alpha=0.5, zorder=3) px_sum_tmp = 1000 radius_arr_tmp = np.linspace(0, img_diagonal, 50) # Plot vignetting loss curve vignetting_loss = 2.5*np.log10(px_sum_tmp) \ - 2.5*np.log10(correctVignetting(px_sum_tmp, radius_arr_tmp, \ platepar.vignetting_coeff)) ax_r.plot(radius_arr_tmp, vignetting_loss, linestyle='dotted', alpha=0.5, color='k') ax_r.grid() ax_r.set_ylabel("Fit residuals (mag)") ax_r.set_xlabel("Radius from centre (px)") ax_r.set_xlim(0, img_diagonal) ### ### plt.tight_layout() plt.savefig(os.path.join(night_dir_path, night_name + '_calib_report_photometry.png'), dpi=150) if show_graphs: plt.show() else: plt.clf() plt.close()
def recalibrateIndividualFFsAndApplyAstrometry(dir_path, ftpdetectinfo_path, calstars_list, config, platepar, generate_plot=True): """ Recalibrate FF files with detections and apply the recalibrated platepar to those detections. Arguments: dir_path: [str] Path where the FTPdetectinfo file is. ftpdetectinfo_path: [str] Name of the FTPdetectinfo file. calstars_list: [list] A list of entries [[ff_name, star_coordinates], ...]. config: [Config instance] platepar: [Platepar instance] Initial platepar. Keyword arguments: generate_plot: [bool] Generate the calibration variation plot. True by default. Return: recalibrated_platepars: [dict] A dictionary where the keys are FF file names and values are recalibrated platepar instances for every FF file. """ # Use a copy of the config file config = copy.deepcopy(config) # If the given file does not exits, return nothing if not os.path.isfile(ftpdetectinfo_path): print('ERROR! The FTPdetectinfo file does not exist: {:s}'.format(ftpdetectinfo_path)) print(' The recalibration on every file was not done!') return {} # Read the FTPdetectinfo data cam_code, fps, meteor_list = FTPdetectinfo.readFTPdetectinfo(*os.path.split(ftpdetectinfo_path), \ ret_input_format=True) # Convert the list of stars to a per FF name dictionary calstars = {ff_file: star_data for ff_file, star_data in calstars_list} ### Add neighboring FF files for more robust photometry estimation ### ff_processing_list = [] # Make a list of sorted FF files in CALSTARS calstars_ffs = sorted([ff_file for ff_file in calstars]) # Go through the list of FF files with detections and add neighboring FFs for meteor_entry in meteor_list: ff_name = meteor_entry[0] if ff_name in calstars_ffs: # Find the index of the given FF file in the list of calstars ff_indx = calstars_ffs.index(ff_name) # Add neighbours to the processing list for k in range(-(RECALIBRATE_NEIGHBOURHOOD_SIZE//2), RECALIBRATE_NEIGHBOURHOOD_SIZE//2 + 1): k_indx = ff_indx + k if (k_indx > 0) and (k_indx < len(calstars_ffs)): ff_name_tmp = calstars_ffs[k_indx] if ff_name_tmp not in ff_processing_list: ff_processing_list.append(ff_name_tmp) # Sort the processing list of FF files ff_processing_list = sorted(ff_processing_list) ### ### # Globally increase catalog limiting magnitude config.catalog_mag_limit += 1 # Load catalog stars (overwrite the mag band ratios if specific catalog is used) star_catalog_status = StarCatalog.readStarCatalog(config.star_catalog_path,\ config.star_catalog_file, lim_mag=config.catalog_mag_limit, \ mag_band_ratios=config.star_catalog_band_ratios) if not star_catalog_status: print("Could not load the star catalog!") print(os.path.join(config.star_catalog_path, config.star_catalog_file)) return {} catalog_stars, _, config.star_catalog_band_ratios = star_catalog_status # Update the platepar coordinates from the config file platepar.lat = config.latitude platepar.lon = config.longitude platepar.elev = config.elevation prev_platepar = copy.deepcopy(platepar) # Go through all FF files with detections, recalibrate and apply astrometry recalibrated_platepars = {} for ff_name in ff_processing_list: working_platepar = copy.deepcopy(prev_platepar) # Skip this meteor if its FF file was already recalibrated if ff_name in recalibrated_platepars: continue print() print('Processing: ', ff_name) print('------------------------------------------------------------------------------') # Find extracted stars on this image if not ff_name in calstars: print('Skipped because it was not in CALSTARS:', ff_name) continue # Get stars detected on this FF file (create a dictionaly with only one entry, the residuals function # needs this format) calstars_time = FFfile.getMiddleTimeFF(ff_name, config.fps, ret_milliseconds=True) jd = date2JD(*calstars_time) star_dict_ff = {jd: calstars[ff_name]} # Recalibrate the platepar using star matching result, min_match_radius = recalibrateFF(config, working_platepar, jd, star_dict_ff, catalog_stars) # If the recalibration failed, try using FFT alignment if result is None: print() print('Running FFT alignment...') # Run FFT alignment calstars_coords = np.array(star_dict_ff[jd])[:, :2] calstars_coords[:, [0, 1]] = calstars_coords[:, [1, 0]] print(calstars_time) test_platepar = alignPlatepar(config, prev_platepar, calstars_time, calstars_coords, \ show_plot=False) # Try to recalibrate after FFT alignment result, _ = recalibrateFF(config, test_platepar, jd, star_dict_ff, catalog_stars) # If the FFT alignment failed, align the original platepar using the smallest radius that matched # and force save the the platepar if (result is None) and (min_match_radius is not None): print() print("Using the old platepar with the minimum match radius of: {:.2f}".format(min_match_radius)) result, _ = recalibrateFF(config, working_platepar, jd, star_dict_ff, catalog_stars, max_match_radius=min_match_radius, force_platepar_save=True) if result is not None: working_platepar = result # If the alignment succeeded, save the result else: working_platepar = result else: working_platepar = result # Store the platepar if the fit succeeded if result is not None: # Recompute alt/az of the FOV centre working_platepar.az_centre, working_platepar.alt_centre = raDec2AltAz(working_platepar.RA_d, \ working_platepar.dec_d, working_platepar.JD, working_platepar.lat, working_platepar.lon) # Recompute the rotation wrt horizon working_platepar.rotation_from_horiz = rotationWrtHorizon(working_platepar) # Mark the platepar to indicate that it was automatically recalibrated on an individual FF file working_platepar.auto_recalibrated = True recalibrated_platepars[ff_name] = working_platepar prev_platepar = working_platepar else: print('Recalibration of {:s} failed, using the previous platepar...'.format(ff_name)) # Mark the platepar to indicate that autorecalib failed prev_platepar_tmp = copy.deepcopy(prev_platepar) prev_platepar_tmp.auto_recalibrated = False # If the aligning failed, set the previous platepar as the one that should be used for this FF file recalibrated_platepars[ff_name] = prev_platepar_tmp ### Average out photometric offsets within the given neighbourhood size ### # Go through the list of FF files with detections for meteor_entry in meteor_list: ff_name = meteor_entry[0] # Make sure the FF was successfuly recalibrated if ff_name in recalibrated_platepars: # Find the index of the given FF file in the list of calstars ff_indx = calstars_ffs.index(ff_name) # Compute the average photometric offset and the improved standard deviation using all # neighbors photom_offset_tmp_list = [] photom_offset_std_tmp_list = [] neighboring_ffs = [] for k in range(-(RECALIBRATE_NEIGHBOURHOOD_SIZE//2), RECALIBRATE_NEIGHBOURHOOD_SIZE//2 + 1): k_indx = ff_indx + k if (k_indx > 0) and (k_indx < len(calstars_ffs)): # Get the name of the FF file ff_name_tmp = calstars_ffs[k_indx] # Check that the neighboring FF was successfuly recalibrated if ff_name_tmp in recalibrated_platepars: # Get the computed photometric offset and stddev photom_offset_tmp_list.append(recalibrated_platepars[ff_name_tmp].mag_lev) photom_offset_std_tmp_list.append(recalibrated_platepars[ff_name_tmp].mag_lev_stddev) neighboring_ffs.append(ff_name_tmp) # Compute the new photometric offset and improved standard deviation (assume equal sample size) # Source: https://stats.stackexchange.com/questions/55999/is-it-possible-to-find-the-combined-standard-deviation photom_offset_new = np.mean(photom_offset_tmp_list) photom_offset_std_new = np.sqrt(\ np.sum([st**2 + (mt - photom_offset_new)**2 \ for mt, st in zip(photom_offset_tmp_list, photom_offset_std_tmp_list)]) \ / len(photom_offset_tmp_list) ) # Assign the new photometric offset and standard deviation to all FFs used for computation for ff_name_tmp in neighboring_ffs: recalibrated_platepars[ff_name_tmp].mag_lev = photom_offset_new recalibrated_platepars[ff_name_tmp].mag_lev_stddev = photom_offset_std_new ### ### ### Store all recalibrated platepars as a JSON file ### all_pps = {} for ff_name in recalibrated_platepars: json_str = recalibrated_platepars[ff_name].jsonStr() all_pps[ff_name] = json.loads(json_str) with open(os.path.join(dir_path, config.platepars_recalibrated_name), 'w') as f: # Convert all platepars to a JSON file out_str = json.dumps(all_pps, default=lambda o: o.__dict__, indent=4, sort_keys=True) f.write(out_str) ### ### # If no platepars were recalibrated, use the single platepar recalibration procedure if len(recalibrated_platepars) == 0: print('No FF images were used for recalibration, using the single platepar calibration function...') # Use the initial platepar for calibration applyAstrometryFTPdetectinfo(dir_path, os.path.basename(ftpdetectinfo_path), None, platepar=platepar) return recalibrated_platepars ### GENERATE PLOTS ### dt_list = [] ang_dists = [] rot_angles = [] hour_list = [] photom_offset_list = [] photom_offset_std_list = [] first_dt = np.min([FFfile.filenameToDatetime(ff_name) for ff_name in recalibrated_platepars]) for ff_name in recalibrated_platepars: pp_temp = recalibrated_platepars[ff_name] # If the fitting failed, skip the platepar if pp_temp is None: continue # Add the datetime of the FF file to the list ff_dt = FFfile.filenameToDatetime(ff_name) dt_list.append(ff_dt) # Compute the angular separation from the reference platepar ang_dist = np.degrees(angularSeparation(np.radians(platepar.RA_d), np.radians(platepar.dec_d), \ np.radians(pp_temp.RA_d), np.radians(pp_temp.dec_d))) ang_dists.append(ang_dist*60) # Compute rotation difference rot_diff = (platepar.pos_angle_ref - pp_temp.pos_angle_ref + 180)%360 - 180 rot_angles.append(rot_diff*60) # Compute the hour of the FF used for recalibration hour_list.append((ff_dt - first_dt).total_seconds()/3600) # Add the photometric offset to the list photom_offset_list.append(pp_temp.mag_lev) photom_offset_std_list.append(pp_temp.mag_lev_stddev) if generate_plot: # Generate the name the plots plot_name = os.path.basename(ftpdetectinfo_path).replace('FTPdetectinfo_', '').replace('.txt', '') ### Plot difference from reference platepar in angular distance from (0, 0) vs rotation ### plt.figure() plt.scatter(0, 0, marker='o', edgecolor='k', label='Reference platepar', s=100, c='none', zorder=3) plt.scatter(ang_dists, rot_angles, c=hour_list, zorder=3) plt.colorbar(label="Hours from first FF file") plt.xlabel("Angular distance from reference (arcmin)") plt.ylabel("Rotation from reference (arcmin)") plt.title("FOV centre drift starting at {:s}".format(first_dt.strftime("%Y/%m/%d %H:%M:%S"))) plt.grid() plt.legend() plt.tight_layout() plt.savefig(os.path.join(dir_path, plot_name + '_calibration_variation.png'), dpi=150) # plt.show() plt.clf() plt.close() ### ### ### Plot the photometric offset variation ### plt.figure() plt.errorbar(dt_list, photom_offset_list, yerr=photom_offset_std_list, fmt="o", \ ecolor='lightgray', elinewidth=2, capsize=0, ms=2) # Format datetimes plt.gca().xaxis.set_major_formatter(mdates.DateFormatter("%H:%M")) # rotate and align the tick labels so they look better plt.gcf().autofmt_xdate() plt.xlabel("UTC time") plt.ylabel("Photometric offset") plt.title("Photometric offset variation") plt.grid() plt.tight_layout() plt.savefig(os.path.join(dir_path, plot_name + '_photometry_variation.png'), dpi=150) plt.clf() plt.close() ### ### ### Apply platepars to FTPdetectinfo ### meteor_output_list = [] for meteor_entry in meteor_list: ff_name, meteor_No, rho, phi, meteor_meas = meteor_entry # Get the platepar that will be applied to this FF file if ff_name in recalibrated_platepars: working_platepar = recalibrated_platepars[ff_name] else: print('Using default platepar for:', ff_name) working_platepar = platepar # Apply the recalibrated platepar to meteor centroids meteor_picks = applyPlateparToCentroids(ff_name, fps, meteor_meas, working_platepar, \ add_calstatus=True) meteor_output_list.append([ff_name, meteor_No, rho, phi, meteor_picks]) # Calibration string to be written to the FTPdetectinfo file calib_str = 'Recalibrated with RMS on: ' + str(datetime.datetime.utcnow()) + ' UTC' # If no meteors were detected, set dummpy parameters if len(meteor_list) == 0: cam_code = '' fps = 0 # Back up the old FTPdetectinfo file try: shutil.copy(ftpdetectinfo_path, ftpdetectinfo_path.strip('.txt') \ + '_backup_{:s}.txt'.format(datetime.datetime.utcnow().strftime('%Y%m%d_%H%M%S.%f'))) except: print('ERROR! The FTPdetectinfo file could not be backed up: {:s}'.format(ftpdetectinfo_path)) # Save the updated FTPdetectinfo FTPdetectinfo.writeFTPdetectinfo(meteor_output_list, dir_path, os.path.basename(ftpdetectinfo_path), \ dir_path, cam_code, fps, calibration=calib_str, celestial_coords_given=True) ### ### return recalibrated_platepars
def addEquatorialGrid(plt_handle, platepar, jd): """ Given the plot handle containing the image, the function plots an equatorial grid. Arguments: plt_handle: [pyplot instance] platepar: [Platepar object] jd: [float] Julian date of the image. Return: plt_handle: [pyplot instance] Pyplot instance with the added grid. """ # Estimate RA,dec of the centre of the FOV _, RA_c, dec_c, _ = xyToRaDecPP([jd2Date(jd)], [platepar.X_res / 2], [platepar.Y_res / 2], [1], platepar, extinction_correction=False) RA_c = RA_c[0] dec_c = dec_c[0] # Compute FOV centre alt/az azim_centre, alt_centre = raDec2AltAz(RA_c, dec_c, jd, platepar.lat, platepar.lon) # Compute FOV size fov_h, fov_v = computeFOVSize(platepar) fov_radius = np.hypot(*computeFOVSize(platepar)) # Determine gridline frequency (double the gridlines if the number is < 4eN) grid_freq = 10**np.floor(np.log10(fov_radius)) if 10**(np.log10(fov_radius) - np.floor(np.log10(fov_radius))) < 4: grid_freq /= 2 # Set a maximum grid frequency of 15 deg if grid_freq > 15: grid_freq = 15 # Grid plot density plot_dens = grid_freq / 100 # Compute the range of declinations to consider dec_min = platepar.dec_d - fov_radius / 2 if dec_min < -90: dec_min = -90 dec_max = platepar.dec_d + fov_radius / 2 if dec_max > 90: dec_max = 90 ra_grid_arr = np.arange(0, 360, grid_freq) dec_grid_arr = np.arange(-90, 90, grid_freq) # Filter out the dec grid for min/max declination dec_grid_arr = dec_grid_arr[(dec_grid_arr >= dec_min) & (dec_grid_arr <= dec_max)] # Plot the celestial parallel grid for dec_grid in dec_grid_arr: ra_grid_plot = np.arange(0, 360, plot_dens) dec_grid_plot = np.zeros_like(ra_grid_plot) + dec_grid # Compute alt/az az_grid_plot, alt_grid_plot = raDec2AltAz_vect(ra_grid_plot, dec_grid_plot, jd, platepar.lat, \ platepar.lon) # Filter out points below the horizon and outside the FOV filter_arr = (alt_grid_plot > 0) & (np.degrees(angularSeparation(np.radians(alt_centre), \ np.radians(azim_centre), np.radians(alt_grid_plot), np.radians(az_grid_plot))) < fov_radius) ra_grid_plot = ra_grid_plot[filter_arr] dec_grid_plot = dec_grid_plot[filter_arr] # Find gaps in continuity and break up plotting individual lines gap_indices = np.argwhere( np.abs(ra_grid_plot[1:] - ra_grid_plot[:-1]) > fov_radius) if len(gap_indices): ra_grid_plot_list = [] dec_grid_plot_list = [] # Separate gridlines with large gaps prev_gap_indx = 0 for entry in gap_indices: gap_indx = entry[0] ra_grid_plot_list.append(ra_grid_plot[prev_gap_indx:gap_indx + 1]) dec_grid_plot_list.append( dec_grid_plot[prev_gap_indx:gap_indx + 1]) prev_gap_indx = gap_indx # Add the last segment ra_grid_plot_list.append(ra_grid_plot[prev_gap_indx + 1:-1]) dec_grid_plot_list.append(dec_grid_plot[prev_gap_indx + 1:-1]) else: ra_grid_plot_list = [ra_grid_plot] dec_grid_plot_list = [dec_grid_plot] # Plot all grid segments for ra_grid_plot, dec_grid_plot in zip(ra_grid_plot_list, dec_grid_plot_list): # Compute image coordinates for every grid celestial parallel x_grid, y_grid = raDecToXYPP(ra_grid_plot, dec_grid_plot, jd, platepar) # Plot the grid plt_handle.plot(x_grid, y_grid, color='w', alpha=0.2, zorder=2, linewidth=0.5, linestyle='dotted') # Plot the celestial meridian grid for ra_grid in ra_grid_arr: dec_grid_plot = np.arange(-90, 90, plot_dens) ra_grid_plot = np.zeros_like(dec_grid_plot) + ra_grid # Filter out the dec grid filter_arr = (dec_grid_plot >= dec_min) & (dec_grid_plot <= dec_max) ra_grid_plot = ra_grid_plot[filter_arr] dec_grid_plot = dec_grid_plot[filter_arr] # Compute alt/az az_grid_plot, alt_grid_plot = raDec2AltAz_vect(ra_grid_plot, dec_grid_plot, jd, platepar.lat, \ platepar.lon) # Filter out points below the horizon filter_arr = (alt_grid_plot > 0) & (np.degrees(angularSeparation(np.radians(alt_centre), \ np.radians(azim_centre), np.radians(alt_grid_plot), np.radians(az_grid_plot))) < fov_radius) ra_grid_plot = ra_grid_plot[filter_arr] dec_grid_plot = dec_grid_plot[filter_arr] # Compute image coordinates for every grid celestial parallel x_grid, y_grid = raDecToXYPP(ra_grid_plot, dec_grid_plot, jd, platepar) # # Filter out everything outside the FOV # filter_arr = (x_grid >= 0) & (x_grid <= platepar.X_res) & (y_grid >= 0) & (y_grid <= platepar.Y_res) # x_grid = x_grid[filter_arr] # y_grid = y_grid[filter_arr] # Plot the grid plt_handle.plot(x_grid, y_grid, color='w', alpha=0.2, zorder=2, linewidth=0.5, linestyle='dotted') return plt_handle
def applyPlateparToCentroids(ff_name, fps, meteor_meas, platepar, add_calstatus=False): """ Given the meteor centroids and a platepar file, compute meteor astrometry and photometry (RA/Dec, alt/az, mag). Arguments: ff_name: [str] Name of the FF file with the meteor. fps: [float] Frames per second of the video. meteor_meas: [list] A list of [calib_status, frame_n, x, y, ra, dec, azim, elev, inten, mag]. platepar: [Platepar instance] Platepar which will be used for astrometry and photometry. Keyword arguments: add_calstatus: [bool] Add a column with calibration status at the beginning. False by default. Return: meteor_picks: [ndarray] A numpy 2D array of: [frames, X_data, Y_data, RA_data, dec_data, az_data, alt_data, level_data, magnitudes] """ meteor_meas = np.array(meteor_meas) # Add a line which is indicating the calibration status if add_calstatus: meteor_meas = np.c_[np.ones((meteor_meas.shape[0], 1)), meteor_meas] # Remove all entries where levels are equal to or smaller than 0, unless all are zero level_data = meteor_meas[:, 8] if np.any(level_data): meteor_meas = meteor_meas[level_data > 0, :] # Extract frame number, x, y, intensity frames = meteor_meas[:, 1] X_data = meteor_meas[:, 2] Y_data = meteor_meas[:, 3] level_data = meteor_meas[:, 8] # Get the beginning time of the FF file time_beg = filenameToDatetime(ff_name) # Calculate time data of every point time_data = [] for frame_n in frames: t = time_beg + datetime.timedelta(seconds=frame_n / fps) time_data.append([ t.year, t.month, t.day, t.hour, t.minute, t.second, int(t.microsecond / 1000) ]) # Convert image cooredinates to RA and Dec, and do the photometry JD_data, RA_data, dec_data, magnitudes = xyToRaDecPP(np.array(time_data), X_data, Y_data, \ level_data, platepar) # Compute azimuth and altitude of centroids az_data = np.zeros_like(RA_data) alt_data = np.zeros_like(RA_data) for i in range(len(az_data)): jd = JD_data[i] ra_tmp = RA_data[i] dec_tmp = dec_data[i] # Precess RA/Dec to epoch of date ra_tmp, dec_tmp = equatorialCoordPrecession(J2000_JD.days, jd, np.radians(ra_tmp), \ np.radians(dec_tmp)) # Alt/Az are apparent (in the epoch of date, corresponding to geographical azimuths) az_tmp, alt_tmp = raDec2AltAz(np.degrees(ra_tmp), np.degrees(dec_tmp), jd, platepar.lat, platepar.lon) az_data[i] = az_tmp alt_data[i] = alt_tmp # print(ff_name, cam_code, meteor_No, fps) # print(X_data, Y_data) # print(RA_data, dec_data) # print('------------------------------------------') # Construct the meteor measurements array meteor_picks = np.c_[frames, X_data, Y_data, RA_data, dec_data, az_data, alt_data, level_data, \ magnitudes] return meteor_picks
def computeFlux(config, dir_path, ftpdetectinfo_path, shower_code, dt_beg, dt_end, timebin, mass_index, \ timebin_intdt=0.25, ht_std_percent=5.0, mask=None, show_plots=True): """ Compute flux using measurements in the given FTPdetectinfo file. Arguments: config: [Config instance] dir_path: [str] Path to the working directory. ftpdetectinfo_path: [str] Path to a FTPdetectinfo file. shower_code: [str] IAU shower code (e.g. ETA, PER, SDA). dt_beg: [Datetime] Datetime object of the observation beginning. dt_end: [Datetime] Datetime object of the observation end. timebin: [float] Time bin in hours. mass_index: [float] Cumulative mass index of the shower. Keyword arguments: timebin_intdt: [float] Time step for computing the integrated collection area in hours. 15 minutes by default. If smaller than that, only one collection are will be computed. ht_std_percent: [float] Meteor height standard deviation in percent. mask: [Mask object] Mask object, None by default. show_plots: [bool] Show flux plots. True by default. Return: [tuple] sol_data, flux_lm_6_5_data - sol_data: [list] Array of solar longitudes (in degrees) of time bins. - flux_lm6_5_data: [list] Array of meteoroid flux at the limiting magnitude of +6.5 in meteors/1000km^2/h. """ # Get a list of files in the night folder file_list = sorted(os.listdir(dir_path)) # Find and load the platepar file if config.platepar_name in file_list: # Load the platepar platepar = Platepar.Platepar() platepar.read(os.path.join(dir_path, config.platepar_name), use_flat=config.use_flat) else: print("Cannot find the platepar file in the night directory: ", config.platepar_name) return None # # Load FTPdetectinfos # meteor_data = [] # for ftpdetectinfo_path in ftpdetectinfo_list: # if not os.path.isfile(ftpdetectinfo_path): # print('No such file:', ftpdetectinfo_path) # continue # meteor_data += readFTPdetectinfo(*os.path.split(ftpdetectinfo_path)) # Load meteor data from the FTPdetectinfo file meteor_data = readFTPdetectinfo(*os.path.split(ftpdetectinfo_path)) if not len(meteor_data): print("No meteors in the FTPdetectinfo file!") return None # Find and load recalibrated platepars if config.platepars_recalibrated_name in file_list: with open(os.path.join(dir_path, config.platepars_recalibrated_name)) as f: recalibrated_platepars_dict = json.load(f) print("Recalibrated platepars loaded!") # If the file is not available, apply the recalibration procedure else: recalibrated_platepars_dict = applyRecalibrate(ftpdetectinfo_path, config) print("Recalibrated platepar file not available!") print("Recalibrating...") # Convert the dictionary of recalibrated platepars to a dictionary of Platepar objects recalibrated_platepars = {} for ff_name in recalibrated_platepars_dict: pp = Platepar.Platepar() pp.loadFromDict(recalibrated_platepars_dict[ff_name], use_flat=config.use_flat) recalibrated_platepars[ff_name] = pp # Compute nighly mean of the photometric zero point mag_lev_nightly_mean = np.mean([recalibrated_platepars[ff_name].mag_lev \ for ff_name in recalibrated_platepars]) # Locate and load the mask file if config.mask_file in file_list: mask_path = os.path.join(dir_path, config.mask_file) mask = loadMask(mask_path) print("Using mask:", mask_path) else: print("No mask used!") mask = None # Compute the population index using the classical equation population_index = 10**((mass_index - 1)/2.5) # Found to be more consistent when comparing fluxes #population_index = 10**((mass_index - 1)/2.3) # TEST !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!1 ### SENSOR CHARACTERIZATION ### # Computes FWHM of stars and noise profile of the sensor # File which stores the sensor characterization profile sensor_characterization_file = "flux_sensor_characterization.json" sensor_characterization_path = os.path.join(dir_path, sensor_characterization_file) # Load sensor characterization file if present, so the procedure can be skipped if os.path.isfile(sensor_characterization_path): # Load the JSON file with open(sensor_characterization_path) as f: data = " ".join(f.readlines()) sensor_data = json.loads(data) # Remove the info entry if '-1' in sensor_data: del sensor_data['-1'] else: # Run sensor characterization sensor_data = sensorCharacterization(config, dir_path) # Save to file for posterior use with open(sensor_characterization_path, 'w') as f: # Add an explanation what each entry means sensor_data_save = dict(sensor_data) sensor_data_save['-1'] = {"FF file name": ['median star FWHM', 'median background noise stddev']} # Convert collection areas to JSON out_str = json.dumps(sensor_data_save, indent=4, sort_keys=True) # Save to disk f.write(out_str) # Compute the nighly mean FWHM and noise stddev fwhm_nightly_mean = np.mean([sensor_data[key][0] for key in sensor_data]) stddev_nightly_mean = np.mean([sensor_data[key][1] for key in sensor_data]) ### ### # Perform shower association associations, _ = showerAssociation(config, [ftpdetectinfo_path], shower_code=shower_code, \ show_plot=False, save_plot=False, plot_activity=False) # Init the flux configuration flux_config = FluxConfig() # Remove all meteors which begin below the limit height filtered_associations = {} for key in associations: meteor, shower = associations[key] if meteor.beg_alt > flux_config.elev_limit: print("Rejecting:", meteor.jdt_ref) filtered_associations[key] = [meteor, shower] associations = filtered_associations # If there are no shower association, return nothing if not associations: print("No meteors associated with the shower!") return None # Print the list of used meteors peak_mags = [] for key in associations: meteor, shower = associations[key] if shower is not None: # Compute peak magnitude peak_mag = np.min(meteor.mag_array) peak_mags.append(peak_mag) print("{:.6f}, {:3s}, {:+.2f}".format(meteor.jdt_ref, shower.name, peak_mag)) print() ### COMPUTE COLLECTION AREAS ### # Make a file name to save the raw collection areas col_areas_file_name = generateColAreaJSONFileName(platepar.station_code, flux_config.side_points, \ flux_config.ht_min, flux_config.ht_max, flux_config.dht, flux_config.elev_limit) # Check if the collection area file exists. If yes, load the data. If not, generate collection areas if col_areas_file_name in os.listdir(dir_path): col_areas_ht = loadRawCollectionAreas(dir_path, col_areas_file_name) print("Loaded collection areas from:", col_areas_file_name) else: # Compute the collecting areas segments per height col_areas_ht = collectingArea(platepar, mask=mask, side_points=flux_config.side_points, \ ht_min=flux_config.ht_min, ht_max=flux_config.ht_max, dht=flux_config.dht, \ elev_limit=flux_config.elev_limit) # Save the collection areas to file saveRawCollectionAreas(dir_path, col_areas_file_name, col_areas_ht) print("Saved raw collection areas to:", col_areas_file_name) ### ### # Compute the raw collection area at the height of 100 km col_area_100km_raw = 0 col_areas_100km_blocks = col_areas_ht[100000.0] for block in col_areas_100km_blocks: col_area_100km_raw += col_areas_100km_blocks[block][0] print("Raw collection area at height of 100 km: {:.2f} km^2".format(col_area_100km_raw/1e6)) # Compute the pointing of the middle of the FOV _, ra_mid, dec_mid, _ = xyToRaDecPP([jd2Date(J2000_JD.days)], [platepar.X_res/2], [platepar.Y_res/2], \ [1], platepar, extinction_correction=False) azim_mid, elev_mid = raDec2AltAz(ra_mid[0], dec_mid[0], J2000_JD.days, platepar.lat, platepar.lon) # Compute the range to the middle point ref_ht = 100000 r_mid, _, _, _ = xyHt2Geo(platepar, platepar.X_res/2, platepar.Y_res/2, ref_ht, indicate_limit=True, \ elev_limit=flux_config.elev_limit) print("Range at 100 km in the middle of the image: {:.2f} km".format(r_mid/1000)) ### Compute the average angular velocity to which the flux variation throught the night will be normalized # The ang vel is of the middle of the FOV in the middle of observations # Middle Julian date of the night jd_night_mid = (datetime2JD(dt_beg) + datetime2JD(dt_end))/2 # Compute the apparent radiant ra, dec, v_init = shower.computeApparentRadiant(platepar.lat, platepar.lon, jd_night_mid) # Compute the radiant elevation radiant_azim, radiant_elev = raDec2AltAz(ra, dec, jd_night_mid, platepar.lat, platepar.lon) # Compute the angular velocity in the middle of the FOV rad_dist_night_mid = angularSeparation(np.radians(radiant_azim), np.radians(radiant_elev), np.radians(azim_mid), np.radians(elev_mid)) ang_vel_night_mid = v_init*np.sin(rad_dist_night_mid)/r_mid ### # Compute the average limiting magnitude to which all flux will be normalized # Standard deviation of star PSF, nightly mean (px) star_stddev = fwhm_nightly_mean/2.355 # # Compute the theoretical stellar limiting magnitude (nightly average) # star_sum = 2*np.pi*(config.k1_det*stddev_nightly_mean + config.j1_det)*star_stddev**2 # lm_s_nightly_mean = -2.5*np.log10(star_sum) + mag_lev_nightly_mean # Compute the theoretical stellar limiting magnitude using an empirical model (nightly average) lm_s_nightly_mean = stellarLMModel(mag_lev_nightly_mean) # A meteor needs to be visible on at least 4 frames, thus it needs to have at least 4x the mass to produce # that amount of light. 1 magnitude difference scales as -0.4 of log of mass, thus: # frame_min_loss = np.log10(config.line_minimum_frame_range_det)/(-0.4) frame_min_loss = 0.0 # TEST !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!11 print("Frame min loss: {:.2} mag".format(frame_min_loss)) lm_s_nightly_mean += frame_min_loss # Compute apparent meteor magnitude lm_m_nightly_mean = lm_s_nightly_mean - 5*np.log10(r_mid/1e5) - 2.5*np.log10( \ np.degrees(platepar.F_scale*v_init*np.sin(rad_dist_night_mid)/(config.fps*r_mid*fwhm_nightly_mean)) \ ) # print("Stellar lim mag using detection thresholds:", lm_s_nightly_mean) print("Apparent meteor limiting magnitude:", lm_m_nightly_mean) ### Apply time-dependent corrections ### # Track values used for flux sol_data = [] flux_lm_6_5_data = [] meteor_num_data = [] effective_collection_area_data = [] radiant_elev_data = [] radiant_dist_mid_data = [] ang_vel_mid_data = [] lm_s_data = [] lm_m_data = [] sensitivity_corr_data = [] range_corr_data = [] radiant_elev_corr_data = [] ang_vel_corr_data = [] total_corr_data = [] # Go through all time bins within the observation period total_time_hrs = (dt_end - dt_beg).total_seconds()/3600 nbins = int(np.ceil(total_time_hrs/timebin)) for t_bin in range(nbins): for subbin in range(flux_config.sub_time_bins): # Compute bin start and end time bin_dt_beg = dt_beg + datetime.timedelta(hours=(timebin*t_bin + timebin*subbin/flux_config.sub_time_bins)) bin_dt_end = bin_dt_beg + datetime.timedelta(hours=timebin) if bin_dt_end > dt_end: bin_dt_end = dt_end # Compute bin duration in hours bin_hours = (bin_dt_end - bin_dt_beg).total_seconds()/3600 # Convert to Julian date bin_jd_beg = datetime2JD(bin_dt_beg) bin_jd_end = datetime2JD(bin_dt_end) jd_mean = (bin_jd_beg + bin_jd_end)/2 # Compute the mean solar longitude sol_mean = np.degrees(jd2SolLonSteyaert(jd_mean)) ### Compute the radiant elevation at the middle of the time bin ### # Compute the apparent radiant ra, dec, v_init = shower.computeApparentRadiant(platepar.lat, platepar.lon, jd_mean) # Compute the mean meteor height meteor_ht_beg = heightModel(v_init, ht_type='beg') meteor_ht_end = heightModel(v_init, ht_type='end') meteor_ht = (meteor_ht_beg + meteor_ht_end)/2 # Compute the standard deviation of the height meteor_ht_std = meteor_ht*ht_std_percent/100.0 # Init the Gaussian height distribution meteor_ht_gauss = scipy.stats.norm(meteor_ht, meteor_ht_std) # Compute the radiant elevation radiant_azim, radiant_elev = raDec2AltAz(ra, dec, jd_mean, platepar.lat, platepar.lon) # Only select meteors in this bin and not too close to the radiant bin_meteors = [] bin_ffs = [] for key in associations: meteor, shower = associations[key] if shower is not None: if (shower.name == shower_code) and (meteor.jdt_ref > bin_jd_beg) \ and (meteor.jdt_ref <= bin_jd_end): # Filter out meteors ending too close to the radiant if np.degrees(angularSeparation(np.radians(radiant_azim), np.radians(radiant_elev), \ np.radians(meteor.end_azim), np.radians(meteor.end_alt))) >= flux_config.rad_dist_min: bin_meteors.append([meteor, shower]) bin_ffs.append(meteor.ff_name) ### ### print() print() print("-- Bin information ---") print("Bin beg:", bin_dt_beg) print("Bin end:", bin_dt_end) print("Sol mid: {:.5f}".format(sol_mean)) print("Radiant elevation: {:.2f} deg".format(radiant_elev)) print("Apparent speed: {:.2f} km/s".format(v_init/1000)) # If the elevation of the radiant is below the limit, skip this bin if radiant_elev < flux_config.rad_elev_limit: print("!!! Mean radiant elevation below {:.2f} deg threshold, skipping time bin!".format(flux_config.rad_elev_limit)) continue # The minimum duration of the time bin should be larger than 50% of the given dt if bin_hours < 0.5*timebin: print("!!! Time bin duration of {:.2f} h is shorter than 0.5x of the time bin!".format(bin_hours)) continue if len(bin_meteors) >= flux_config.meteros_min: print("Meteors:", len(bin_meteors)) ### Weight collection area by meteor height distribution ### # Determine weights for each height weight_sum = 0 weights = {} for ht in col_areas_ht: wt = meteor_ht_gauss.pdf(float(ht)) weight_sum += wt weights[ht] = wt # Normalize the weights so that the sum is 1 for ht in weights: weights[ht] /= weight_sum ### ### col_area_meteor_ht_raw = 0 for ht in col_areas_ht: for block in col_areas_ht[ht]: col_area_meteor_ht_raw += weights[ht]*col_areas_ht[ht][block][0] print("Raw collection area at meteor heights: {:.2f} km^2".format(col_area_meteor_ht_raw/1e6)) # Compute the angular velocity in the middle of the FOV rad_dist_mid = angularSeparation(np.radians(radiant_azim), np.radians(radiant_elev), np.radians(azim_mid), np.radians(elev_mid)) ang_vel_mid = v_init*np.sin(rad_dist_mid)/r_mid ### Compute the limiting magnitude ### # Compute the mean star FWHM in the given bin fwhm_bin_mean = np.mean([sensor_data[ff_name][0] for ff_name in bin_ffs]) # Compute the mean background stddev in the given bin stddev_bin_mean = np.mean([sensor_data[ff_name][1] for ff_name in bin_ffs]) # Compute the mean photometric zero point in the given bin mag_lev_bin_mean = np.mean([recalibrated_platepars[ff_name].mag_lev for ff_name in bin_ffs if ff_name in recalibrated_platepars]) # # Standard deviation of star PSF, nightly mean (px) # star_stddev = fwhm_bin_mean/2.355 # Compute the theoretical stellar limiting magnitude (bin average) # star_sum = 2*np.pi*(config.k1_det*stddev_bin_mean + config.j1_det)*star_stddev**2 # lm_s = -2.5*np.log10(star_sum) + mag_lev_bin_mean # Use empirical LM calculation lm_s = stellarLMModel(mag_lev_bin_mean) lm_s += frame_min_loss # ### TEST !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!11 # # Artificialy increase limiting magnitude # lm_s += 1.2 # ##### # Compute apparent meteor magnitude lm_m = lm_s - 5*np.log10(r_mid/1e5) - 2.5*np.log10( \ np.degrees(platepar.F_scale*v_init*np.sin(rad_dist_mid)/(config.fps*r_mid*fwhm_bin_mean))) ### ### # Final correction area value (height-weightned) collection_area = 0 # Keep track of the corrections sensitivity_corr_arr = [] range_corr_arr = [] radiant_elev_corr_arr = [] ang_vel_corr_arr = [] total_corr_arr = [] col_area_raw_arr = [] col_area_eff_arr = [] col_area_eff_block_dict = {} # Go through all heights and segment blocks for ht in col_areas_ht: for img_coords in col_areas_ht[ht]: x_mean, y_mean = img_coords # Unpack precomputed values area, azim, elev, sensitivity_ratio, r = col_areas_ht[ht][img_coords] # Compute the angular velocity (rad/s) in the middle of this block rad_dist = angularSeparation(np.radians(radiant_azim), np.radians(radiant_elev), np.radians(azim), np.radians(elev)) ang_vel = v_init*np.sin(rad_dist)/r # If the angular distance from the radiant is less than 15 deg, don't use the block # in the effective collection area if np.degrees(rad_dist) < flux_config.rad_dist_min: area = 0.0 # Compute the range correction range_correction = (1e5/r)**2 #ang_vel_correction = ang_vel/ang_vel_mid # Compute angular velocity correction relative to the nightly mean ang_vel_correction = ang_vel/ang_vel_night_mid ### Apply corrections correction_ratio = 1.0 # Correct the area for vignetting and extinction sensitivity_corr_arr.append(sensitivity_ratio) correction_ratio *= sensitivity_ratio # Correct for the range (cap to an order of magnitude correction) range_correction = max(range_correction, 0.1) range_corr_arr.append(range_correction) correction_ratio *= range_correction # Correct for the radiant elevation (cap to an order of magnitude correction) radiant_elev_correction = np.sin(np.radians(radiant_elev)) radiant_elev_correction = max(radiant_elev_correction, 0.1) radiant_elev_corr_arr.append(radiant_elev_correction) correction_ratio *= radiant_elev_correction # Correct for angular velocity (cap to an order of magnitude correction) ang_vel_correction = min(max(ang_vel_correction, 0.1), 10) correction_ratio *= ang_vel_correction ang_vel_corr_arr.append(ang_vel_correction) # Add the collection area to the final estimate with the height weight # Raise the correction to the mass index power total_correction = correction_ratio**(mass_index - 1) total_correction = min(max(total_correction, 0.1), 10) collection_area += weights[ht]*area*total_correction total_corr_arr.append(total_correction) col_area_raw_arr.append(weights[ht]*area) col_area_eff_arr.append(weights[ht]*area*total_correction) if img_coords not in col_area_eff_block_dict: col_area_eff_block_dict[img_coords] = [] col_area_eff_block_dict[img_coords].append(weights[ht]*area*total_correction) # Compute mean corrections sensitivity_corr_avg = np.mean(sensitivity_corr_arr) range_corr_avg = np.mean(range_corr_arr) radiant_elev_corr_avg = np.mean(radiant_elev_corr_arr) ang_vel_corr_avg = np.mean(ang_vel_corr_arr) total_corr_avg = np.median(total_corr_arr) col_area_raw_sum = np.sum(col_area_raw_arr) col_area_eff_sum = np.sum(col_area_eff_arr) print("Raw collection area at meteor heights (CHECK): {:.2f} km^2".format(col_area_raw_sum/1e6)) print("Eff collection area at meteor heights (CHECK): {:.2f} km^2".format(col_area_eff_sum/1e6)) # ### PLOT HOW THE CORRECTION VARIES ACROSS THE FOV # x_arr = [] # y_arr = [] # col_area_eff_block_arr = [] # for img_coords in col_area_eff_block_dict: # x_mean, y_mean = img_coords # #if x_mean not in x_arr: # x_arr.append(x_mean) # #if y_mean not in y_arr: # y_arr.append(y_mean) # col_area_eff_block_arr.append(np.sum(col_area_eff_block_dict[img_coords])) # x_unique = np.unique(x_arr) # y_unique = np.unique(y_arr) # # plt.pcolormesh(x_arr, y_arr, np.array(col_area_eff_block_arr).reshape(len(x_unique), len(y_unique)).T, shading='auto') # plt.title("TOTAL = " + str(np.sum(col_area_eff_block_arr)/1e6)) # plt.scatter(x_arr, y_arr, c=np.array(col_area_eff_block_arr)/1e6) # #plt.pcolor(np.array(x_arr).reshape(len(x_unique), len(y_unique)), np.array(y_arr).reshape(len(x_unique), len(y_unique)), np.array(col_area_eff_block_arr).reshape(len(x_unique), len(y_unique))/1e6) # plt.colorbar(label="km^2") # plt.gca().invert_yaxis() # plt.show() # ### # Compute the flux at the bin LM (meteors/1000km^2/h) flux = 1e9*len(bin_meteors)/collection_area/bin_hours # Compute the flux scaled to the nightly mean LM flux_lm_nightly_mean = flux*population_index**(lm_m_nightly_mean - lm_m) # Compute the flux scaled to +6.5M flux_lm_6_5 = flux*population_index**(6.5 - lm_m) print("-- Sensor information ---") print("Star FWHM: {:5.2f} px".format(fwhm_bin_mean)) print("Bkg stddev: {:4.1f} ADU".format(stddev_bin_mean)) print("Photom ZP: {:+6.2f} mag".format(mag_lev_bin_mean)) print("Stellar LM: {:+.2f} mag".format(lm_s)) print("-- Flux ---") print("Meteors: {:d}".format(len(bin_meteors))) print("Col area: {:d} km^2".format(int(collection_area/1e6))) print("Ang vel: {:.2f} deg/s".format(np.degrees(ang_vel_mid))) print("LM app: {:+.2f} mag".format(lm_m)) print("Flux: {:.2f} meteors/1000km^2/h".format(flux)) print("to {:+.2f}: {:.2f} meteors/1000km^2/h".format(lm_m_nightly_mean, flux_lm_nightly_mean)) print("to +6.50: {:.2f} meteors/1000km^2/h".format(flux_lm_6_5)) sol_data.append(sol_mean) flux_lm_6_5_data.append(flux_lm_6_5) meteor_num_data.append(len(bin_meteors)) effective_collection_area_data.append(collection_area) radiant_elev_data.append(radiant_elev) radiant_dist_mid_data.append(np.degrees(rad_dist_mid)) ang_vel_mid_data.append(np.degrees(ang_vel_mid)) lm_s_data.append(lm_s) lm_m_data.append(lm_m) sensitivity_corr_data.append(sensitivity_corr_avg) range_corr_data.append(range_corr_avg) radiant_elev_corr_data.append(radiant_elev_corr_avg) ang_vel_corr_data.append(ang_vel_corr_avg) total_corr_data.append(total_corr_avg) # Print the results print("Solar longitude, Flux at LM +6.5:") for sol, flux_lm_6_5 in zip(sol_data, flux_lm_6_5_data): print("{:9.5f}, {:8.4f}".format(sol, flux_lm_6_5)) if show_plots and len(sol_data): # Plot a histogram of peak magnitudes plt.hist(peak_mags, cumulative=True, log=True, bins=len(peak_mags), density=True) # Plot population index r_intercept = -0.7 x_arr = np.linspace(np.min(peak_mags), np.percentile(peak_mags, 60)) plt.plot(x_arr, 10**(np.log10(population_index)*x_arr + r_intercept)) plt.title("r = {:.2f}".format(population_index)) plt.show() # Plot how the derived values change throughout the night fig, axes \ = plt.subplots(nrows=4, ncols=2, sharex=True, figsize=(10, 8)) ((ax_met, ax_lm), (ax_rad_elev, ax_corrs), (ax_rad_dist, ax_col_area), (ax_ang_vel, ax_flux)) = axes fig.suptitle("{:s}, s = {:.2f}, r = {:.2f}".format(shower_code, mass_index, population_index)) ax_met.scatter(sol_data, meteor_num_data) ax_met.set_ylabel("Meteors") ax_rad_elev.plot(sol_data, radiant_elev_data) ax_rad_elev.set_ylabel("Radiant elev (deg)") ax_rad_dist.plot(sol_data, radiant_dist_mid_data) ax_rad_dist.set_ylabel("Radiant dist (deg)") ax_ang_vel.plot(sol_data, ang_vel_mid_data) ax_ang_vel.set_ylabel("Ang vel (deg/s)") ax_ang_vel.set_xlabel("La Sun (deg)") ax_lm.plot(sol_data, lm_s_data, label="Stellar") ax_lm.plot(sol_data, lm_m_data, label="Meteor") ax_lm.set_ylabel("LM") ax_lm.legend() ax_corrs.plot(sol_data, sensitivity_corr_data, label="Sensitivity") ax_corrs.plot(sol_data, range_corr_data, label="Range") ax_corrs.plot(sol_data, radiant_elev_corr_data, label="Rad elev") ax_corrs.plot(sol_data, ang_vel_corr_data, label="Ang vel") ax_corrs.plot(sol_data, total_corr_data, label="Total (median)") ax_corrs.set_ylabel("Corrections") ax_corrs.legend() ax_col_area.plot(sol_data, np.array(effective_collection_area_data)/1e6) ax_col_area.plot(sol_data, len(sol_data)*[col_area_100km_raw/1e6], color='k', \ label="Raw col area at 100 km") ax_col_area.plot(sol_data, len(sol_data)*[col_area_meteor_ht_raw/1e6], color='k', linestyle='dashed', \ label="Raw col area at met ht") ax_col_area.set_ylabel("Eff. col. area (km^2)") ax_col_area.legend() ax_flux.scatter(sol_data, flux_lm_6_5_data) ax_flux.set_ylabel("Flux@+6.5M (met/1000km^2/h)") ax_flux.set_xlabel("La Sun (deg)") plt.tight_layout() plt.show() return sol_data, flux_lm_6_5_data
def alignPlatepar(config, platepar, calstars_time, calstars_coords, scale_update=False, show_plot=False): """ Align the platepar using FFT registration between catalog stars and the given list of image stars. Arguments: config: platepar: [Platepar instance] Initial platepar. calstars_time: [list] A list of (year, month, day, hour, minute, second, millisecond) of the middle of the FF file used for alignment. calstars_coords: [ndarray] A 2D numpy array of (x, y) coordinates of image stars. Keyword arguments: scale_update: [bool] Update the platepar scale. False by default. show_plot: [bool] Show the comparison between the reference and image synthetic images. Return: platepar_aligned: [Platepar instance] The aligned platepar. """ # Create a copy of the config not to mess with the original config parameters config = copy.deepcopy(config) # Try to optimize the catalog limiting magnitude until the number of image and catalog stars are matched maxiter = 10 search_fainter = True mag_step = 0.2 for inum in range(maxiter): # Load the catalog stars catalog_stars, _, _ = StarCatalog.readStarCatalog(config.star_catalog_path, config.star_catalog_file, \ lim_mag=config.catalog_mag_limit, mag_band_ratios=config.star_catalog_band_ratios) # Get the RA/Dec of the image centre _, ra_centre, dec_centre, _ = ApplyAstrometry.xyToRaDecPP([calstars_time], [platepar.X_res/2], \ [platepar.Y_res/2], [1], platepar, extinction_correction=False) ra_centre = ra_centre[0] dec_centre = dec_centre[0] # Compute Julian date jd = date2JD(*calstars_time) # Calculate the FOV radius in degrees fov_y, fov_x = ApplyAstrometry.computeFOVSize(platepar) fov_radius = np.sqrt(fov_x**2 + fov_y**2) # Take only those stars which are inside the FOV filtered_indices, _ = subsetCatalog(catalog_stars, ra_centre, dec_centre, jd, platepar.lat, \ platepar.lon, fov_radius, config.catalog_mag_limit) # Take those catalog stars which should be inside the FOV ra_catalog, dec_catalog, _ = catalog_stars[filtered_indices].T catalog_xy = ApplyAstrometry.raDecToXYPP(ra_catalog, dec_catalog, jd, platepar) catalog_x, catalog_y = catalog_xy catalog_xy = np.c_[catalog_x, catalog_y] # Cut all stars that are outside image coordinates catalog_xy = catalog_xy[catalog_xy[:, 0] > 0] catalog_xy = catalog_xy[catalog_xy[:, 0] < config.width] catalog_xy = catalog_xy[catalog_xy[:, 1] > 0] catalog_xy = catalog_xy[catalog_xy[:, 1] < config.height] # If there are more catalog than image stars, this means that the limiting magnitude is too faint # and that the search should go in the brighter direction if len(catalog_xy) > len(calstars_coords): search_fainter = False else: search_fainter = True # print('Catalog stars:', len(catalog_xy), 'Image stars:', len(calstars_coords), \ # 'Limiting magnitude:', config.catalog_mag_limit) # Search in mag_step magnitude steps if search_fainter: config.catalog_mag_limit += mag_step else: config.catalog_mag_limit -= mag_step print('Final catalog limiting magnitude:', config.catalog_mag_limit) # Find the transform between the image coordinates and predicted platepar coordinates res = findStarsTransform(config, calstars_coords, catalog_xy, show_plot=show_plot) angle, scale, translation_x, translation_y = res ### Update the platepar ### platepar_aligned = copy.deepcopy(platepar) # Correct the rotation platepar_aligned.pos_angle_ref = (platepar_aligned.pos_angle_ref - angle) % 360 # Update the scale if needed if scale_update: platepar_aligned.F_scale *= scale # Compute the new reference RA and Dec _, ra_centre_new, dec_centre_new, _ = ApplyAstrometry.xyToRaDecPP([jd2Date(platepar.JD)], \ [platepar.X_res/2 - platepar.x_poly_fwd[0] - translation_x], \ [platepar.Y_res/2 - platepar.y_poly_fwd[0] - translation_y], [1], platepar, \ extinction_correction=False) # Correct RA/Dec platepar_aligned.RA_d = ra_centre_new[0] platepar_aligned.dec_d = dec_centre_new[0] # # Update the reference time and hour angle # platepar_aligned.JD = jd # platepar_aligned.Ho = JD2HourAngle(jd) # Recompute the FOV centre in Alt/Az and update the rotation platepar_aligned.az_centre, platepar_aligned.alt_centre = raDec2AltAz(platepar.RA_d, \ platepar.dec_d, platepar.JD, platepar.lat, platepar.lon) platepar_aligned.rotation_from_horiz = ApplyAstrometry.rotationWrtHorizon( platepar_aligned) ### return platepar_aligned
def autoCheckFit(config, platepar, calstars_list, _fft_refinement=False): """ Attempts to refine the astrometry fit with the given stars and and initial astrometry parameters. Arguments: config: [Config structure] platepar: [Platepar structure] Initial astrometry parameters. calstars_list: [list] A list containing stars extracted from FF files. See RMS.Formats.CALSTARS for more details. Keyword arguments: _fft_refinement: [bool] Internal flag indicating that autoCF is running the second time recursively after FFT platepar adjustment. Return: (platepar, fit_status): platepar: [Platepar structure] Estimated/refined platepar. fit_status: [bool] True if fit was successfuly, False if not. """ def _handleFailure(config, platepar, calstars_list, catalog_stars, _fft_refinement): """ Run FFT alignment before giving up on ACF. """ if not _fft_refinement: print() print( "-------------------------------------------------------------------------------" ) print( 'The initial platepar is bad, trying to refine it using FFT phase correlation...' ) print() # Prepare data for FFT image registration calstars_dict = { ff_file: star_data for ff_file, star_data in calstars_list } # Extract star list from CALSTARS file from FF file with most stars max_len_ff = max(calstars_dict, key=lambda k: len(calstars_dict[k])) # Take only X, Y (change order so X is first) calstars_coords = np.array(calstars_dict[max_len_ff])[:, :2] calstars_coords[:, [0, 1]] = calstars_coords[:, [1, 0]] # Get the time of the FF file calstars_time = FFfile.getMiddleTimeFF(max_len_ff, config.fps, ret_milliseconds=True) # Try aligning the platepar using FFT image registration platepar_refined = alignPlatepar(config, platepar, calstars_time, calstars_coords) print() ### If there are still not enough stars matched, try FFT again ### min_radius = 10 # Prepare star dictionary to check the match dt = FFfile.getMiddleTimeFF(max_len_ff, config.fps, ret_milliseconds=True) jd = date2JD(*dt) star_dict_temp = {} star_dict_temp[jd] = calstars_dict[max_len_ff] # Check the number of matched stars n_matched, _, _, _ = matchStarsResiduals(config, platepar_refined, catalog_stars, \ star_dict_temp, min_radius, ret_nmatch=True, verbose=True) # Realign again if necessary if n_matched < config.min_matched_stars: print() print( "-------------------------------------------------------------------------------" ) print( 'Doing a second FFT pass as the number of matched stars was too small...' ) print() platepar_refined = alignPlatepar(config, platepar_refined, calstars_time, calstars_coords) print() ### ### # Redo autoCF return autoCheckFit(config, platepar_refined, calstars_list, _fft_refinement=True) else: print( 'Auto Check Fit failed completely, please redo the plate manually!' ) return platepar, False if _fft_refinement: print( 'Second ACF run with an updated platepar via FFT phase correlation...' ) # Load catalog stars (overwrite the mag band ratios if specific catalog is used) catalog_stars, _, config.star_catalog_band_ratios = StarCatalog.readStarCatalog(config.star_catalog_path, \ config.star_catalog_file, lim_mag=config.catalog_mag_limit, \ mag_band_ratios=config.star_catalog_band_ratios) # Dictionary which will contain the JD, and a list of (X, Y, bg_intens, intens) of the stars star_dict = starListToDict(config, calstars_list, max_ffs=config.calstars_files_N) # There has to be a minimum of 200 FF files for star fitting if len(star_dict) < config.calstars_files_N: print('Not enough FF files in CALSTARS for ACF!') return platepar, False # Calculate the total number of calibration stars used total_calstars = sum([len(star_dict[key]) for key in star_dict]) print('Total calstars:', total_calstars) if total_calstars < config.calstars_min_stars: print('Not enough calibration stars, need at least', config.calstars_min_stars) return platepar, False print() # A list of matching radiuses to try min_radius = 0.5 radius_list = [10, 5, 3, 1.5, min_radius] # Calculate the function tolerance, so the desired precision can be reached (the number is calculated # in the same regard as the cost function) fatol, xatol_ang = computeMinimizationTolerances(config, platepar, len(star_dict)) ### If the initial match is good enough, do only quick recalibratoin ### # Match the stars and calculate the residuals n_matched, avg_dist, cost, _ = matchStarsResiduals(config, platepar, catalog_stars, star_dict, \ min_radius, ret_nmatch=True) if n_matched >= config.calstars_files_N: # Check if the average distance with the tightest radius is close if avg_dist < config.dist_check_quick_threshold: print("Using quick fit with smaller radiia...") # Use a reduced set of initial radius values radius_list = [1.5, min_radius] ########## # Match increasingly smaller search radiia around image stars for i, match_radius in enumerate(radius_list): # Match the stars and calculate the residuals n_matched, avg_dist, cost, _ = matchStarsResiduals(config, platepar, catalog_stars, star_dict, \ match_radius, ret_nmatch=True) print() print("-------------------------------------------------------------") print("Refining camera pointing with max pixel deviation = {:.1f} px". format(match_radius)) print("Initial values:") print(" Matched stars = {:>6d}".format(n_matched)) print(" Average deviation = {:>6.2f} px".format(avg_dist)) # The initial number of matched stars has to be at least the number of FF imaages, otherwise it means # that the initial platepar is no good if n_matched < config.calstars_files_N: print( "The total number of initially matched stars is too small! Please manually redo the plate or make sure there are enough calibration stars." ) # Try to refine the platepar with FFT phase correlation and redo the ACF return _handleFailure(config, platepar, calstars_list, catalog_stars, _fft_refinement) # Check if the platepar is good enough and do not estimate further parameters if checkFitGoodness(config, platepar, catalog_stars, star_dict, min_radius, verbose=True): # Print out notice only if the platepar is good right away if i == 0: print("Initial platepar is good enough!") return platepar, True # Initial parameters for the astrometric fit p0 = [ platepar.RA_d, platepar.dec_d, platepar.pos_angle_ref, platepar.F_scale ] # Fit the astrometric parameters res = scipy.optimize.minimize(_calcImageResidualsAstro, p0, args=(config, platepar, catalog_stars, \ star_dict, match_radius), method='Nelder-Mead', \ options={'fatol': fatol, 'xatol': xatol_ang}) print(res) # If the fit was not successful, stop further fitting if not res.success: # Try to refine the platepar with FFT phase correlation and redo the ACF return _handleFailure(config, platepar, calstars_list, catalog_stars, _fft_refinement) else: # If the fit was successful, use the new parameters from now on ra_ref, dec_ref, pos_angle_ref, F_scale = res.x platepar.RA_d = ra_ref platepar.dec_d = dec_ref platepar.pos_angle_ref = pos_angle_ref platepar.F_scale = F_scale # Check if the platepar is good enough and do not estimate further parameters if checkFitGoodness(config, platepar, catalog_stars, star_dict, min_radius, verbose=True): return platepar, True # Match the stars and calculate the residuals n_matched, avg_dist, cost, matched_stars = matchStarsResiduals(config, platepar, catalog_stars, \ star_dict, min_radius, ret_nmatch=True) print("FINAL SOLUTION with radius {:.1} px:".format(min_radius)) print(" Matched stars = {:>6d}".format(n_matched)) print(" Average deviation = {:>6.2f} px".format(avg_dist)) # Mark the platepar to indicate that it was automatically refined with CheckFit platepar.auto_check_fit_refined = True # Recompute alt/az of the FOV centre platepar.az_centre, platepar.alt_centre = raDec2AltAz(platepar.RA_d, platepar.dec_d, platepar.JD, \ platepar.lat, platepar.lon) # Recompute the rotation wrt horizon platepar.rotation_from_horiz = rotationWrtHorizon(platepar) return platepar, True
def recalibratePlateparsForFF( prev_platepar, ff_file_names, calstars, catalog_stars, config, lim_mag=None, ignore_distance_threshold=False, ): """ Recalibrate platepars corresponding to ff files based on the stars. Arguments: prev_platepar: [platepar] ff_file_names: [list] list of ff file names calstars: [dict] A dictionary with only one entry, where the key is 'jd' and the value is the list of star coordinates. catalog_stars: [list] A list of entries [[ff_name, star_coordinates], ...]. config: [config] Keyword arguments: lim_mag: [float] ignore_distance_threshold: [bool] Don't consider the recalib as failed if the median distance is larger than the threshold. Returns: recalibrated_platepars: [dict] A dictionary where one key is ff file name and the value is a calibrated corresponding platepar. """ # Go through all FF files with detections, recalibrate and apply astrometry recalibrated_platepars = {} for ff_name in ff_file_names: working_platepar = copy.deepcopy(prev_platepar) # Skip this meteor if its FF file was already recalibrated if ff_name in recalibrated_platepars: continue print() print('Processing: ', ff_name) print( '------------------------------------------------------------------------------' ) # Find extracted stars on this image if not ff_name in calstars: print('Skipped because it was not in CALSTARS:', ff_name) continue # Get stars detected on this FF file (create a dictionaly with only one entry, the residuals function # needs this format) calstars_time = FFfile.getMiddleTimeFF(ff_name, config.fps, ret_milliseconds=True) jd = date2JD(*calstars_time) star_dict_ff = {jd: calstars[ff_name]} result = None # Skip recalibration if less than a minimum number of stars were detected if (len(calstars[ff_name]) >= config.ff_min_stars) and (len( calstars[ff_name]) >= config.min_matched_stars): # Recalibrate the platepar using star matching result, min_match_radius = recalibrateFF( config, working_platepar, jd, star_dict_ff, catalog_stars, lim_mag=lim_mag, ignore_distance_threshold=ignore_distance_threshold, ) # If the recalibration failed, try using FFT alignment if result is None: print() print('Running FFT alignment...') # Run FFT alignment calstars_coords = np.array(star_dict_ff[jd])[:, :2] calstars_coords[:, [0, 1]] = calstars_coords[:, [1, 0]] print(calstars_time) test_platepar = alignPlatepar(config, prev_platepar, calstars_time, calstars_coords, show_plot=False) # Try to recalibrate after FFT alignment result, _ = recalibrateFF(config, test_platepar, jd, star_dict_ff, catalog_stars, lim_mag=lim_mag) # If the FFT alignment failed, align the original platepar using the smallest radius that matched # and force save the the platepar if (result is None) and (min_match_radius is not None): print() print( "Using the old platepar with the minimum match radius of: {:.2f}" .format(min_match_radius)) result, _ = recalibrateFF( config, working_platepar, jd, star_dict_ff, catalog_stars, max_match_radius=min_match_radius, force_platepar_save=True, lim_mag=lim_mag, ) if result is not None: working_platepar = result # If the alignment succeeded, save the result else: working_platepar = result else: working_platepar = result # Store the platepar if the fit succeeded if result is not None: # Recompute alt/az of the FOV centre working_platepar.az_centre, working_platepar.alt_centre = raDec2AltAz( working_platepar.RA_d, working_platepar.dec_d, working_platepar.JD, working_platepar.lat, working_platepar.lon, ) # Recompute the rotation wrt horizon working_platepar.rotation_from_horiz = rotationWrtHorizon( working_platepar) # Mark the platepar to indicate that it was automatically recalibrated on an individual FF file working_platepar.auto_recalibrated = True recalibrated_platepars[ff_name] = working_platepar prev_platepar = working_platepar else: print( 'Recalibration of {:s} failed, using the previous platepar...'. format(ff_name)) # Mark the platepar to indicate that autorecalib failed prev_platepar_tmp = copy.deepcopy(prev_platepar) prev_platepar_tmp.auto_recalibrated = False # If the aligning failed, set the previous platepar as the one that should be used for this FF file recalibrated_platepars[ff_name] = prev_platepar_tmp return recalibrated_platepars
def showerAssociation(config, ftpdetectinfo_list, shower_code=None, show_plot=False, save_plot=False, \ plot_activity=False): """ Do single station shower association based on radiant direction and height. Arguments: config: [Config instance] ftpdetectinfo_list: [list] A list of paths to FTPdetectinfo files. Keyword arguments: shower_code: [str] Only use this one shower for association (e.g. ETA, PER, SDA). None by default, in which case all active showers will be associated. show_plot: [bool] Show the plot on the screen. False by default. save_plot: [bool] Save the plot in the folder with FTPdetectinfos. False by default. plot_activity: [bool] Whether to plot the shower activity plot of not. False by default. Return: associations, shower_counts: [tuple] - associations: [dict] A dictionary where the FF name and the meteor ordinal number on the FF file are keys, and the associated Shower object are values. - shower_counts: [list] A list of shower code and shower count pairs. """ # Load the list of meteor showers shower_list = loadShowers(config.shower_path, config.shower_file_name) # Load FTPdetectinfos meteor_data = [] for ftpdetectinfo_path in ftpdetectinfo_list: if not os.path.isfile(ftpdetectinfo_path): print('No such file:', ftpdetectinfo_path) continue meteor_data += readFTPdetectinfo(*os.path.split(ftpdetectinfo_path)) if not len(meteor_data): return {}, [] # Dictionary which holds FF names as keys and meteor measurements + associated showers as values associations = {} for meteor in meteor_data: ff_name, cam_code, meteor_No, n_segments, fps, hnr, mle, binn, px_fm, rho, phi, meteor_meas = meteor # Skip very short meteors if len(meteor_meas) < 4: continue # Check if the data is calibrated if not meteor_meas[0][0]: print( 'Data is not calibrated! Meteors cannot be associated to showers!' ) break # Init container for meteor observation meteor_obj = MeteorSingleStation(cam_code, config.latitude, config.longitude, ff_name) # Infill the meteor structure for entry in meteor_meas: calib_status, frame_n, x, y, ra, dec, azim, elev, inten, mag = entry # Compute the Julian data of every point jd = datetime2JD( filenameToDatetime(ff_name) + datetime.timedelta(seconds=float(frame_n) / fps)) meteor_obj.addPoint(jd, ra, dec, mag) # Fit the great circle and compute the geometrical parameters meteor_obj.fitGC() # Skip all meteors with beginning heights below 15 deg if meteor_obj.beg_alt < 15: continue # Go through all showers in the list and find the best match best_match_shower = None best_match_dist = np.inf for shower_entry in shower_list: # Extract shower parameters shower = Shower(shower_entry) # If the shower code was given, only check this one shower if shower_code is not None: if shower.name.lower() != shower_code.lower(): continue ### Solar longitude filter # If the shower doesn't have a stated beginning or end, check if the meteor is within a preset # threshold solar longitude difference if np.any(np.isnan([shower.lasun_beg, shower.lasun_end])): shower.lasun_beg = (shower.lasun_max - config.shower_lasun_threshold) % 360 shower.lasun_end = (shower.lasun_max + config.shower_lasun_threshold) % 360 # Filter out all showers which are not active if not isAngleBetween(np.radians(shower.lasun_beg), np.radians(meteor_obj.lasun), np.radians(shower.lasun_end)): continue ### ### ### Radiant filter ### # Assume a fixed meteor height for an approximate apparent radiant meteor_fixed_ht = 100000 # 100 km shower.computeApparentRadiant(config.latitude, config.longitude, meteor_obj.jdt_ref, \ meteor_fixed_ht=meteor_fixed_ht) # Compute the angle between the meteor radiant and the great circle normal radiant_separation = meteor_obj.angularSeparationFromGC( shower.ra, shower.dec) # Make sure the meteor is within the radiant distance threshold if radiant_separation > config.shower_max_radiant_separation: continue # Compute angle between the meteor's beginning and end, and the shower radiant shower.radiant_vector = vectNorm( raDec2Vector(shower.ra, shower.dec)) begin_separation = np.degrees(angularSeparationVect(shower.radiant_vector, \ meteor_obj.meteor_begin_cartesian)) end_separation = np.degrees(angularSeparationVect(shower.radiant_vector, \ meteor_obj.meteor_end_cartesian)) # Make sure the beginning of the meteor is closer to the radiant than it's end if begin_separation > end_separation: continue ### ### ### Height filter ### # Estimate the limiting meteor height from the velocity (meters) filter_beg_ht = heightModel(shower.v_init, ht_type='beg') filter_end_ht = heightModel(shower.v_init, ht_type='end') ### Estimate the meteor beginning height with +/- 1 frame, otherwise some short meteor may get ### rejected meteor_obj_orig = copy.deepcopy(meteor_obj) # Shorter meteor_obj_m1 = copy.deepcopy(meteor_obj_orig) meteor_obj_m1.duration -= 1.0 / config.fps meteor_beg_ht_m1 = estimateMeteorHeight(config, meteor_obj_m1, shower) # Nominal meteor_beg_ht = estimateMeteorHeight(config, meteor_obj_orig, shower) # Longer meteor_obj_p1 = copy.deepcopy(meteor_obj_orig) meteor_obj_p1.duration += 1.0 / config.fps meteor_beg_ht_p1 = estimateMeteorHeight(config, meteor_obj_p1, shower) meteor_obj = meteor_obj_orig ### ### # If all heights (even those with +/- 1 frame) are outside the height range, reject the meteor if ((meteor_beg_ht_p1 < filter_end_ht) or (meteor_beg_ht_p1 > filter_beg_ht)) and \ ((meteor_beg_ht < filter_end_ht) or (meteor_beg_ht > filter_beg_ht)) and \ ((meteor_beg_ht_m1 < filter_end_ht) or (meteor_beg_ht_m1 > filter_beg_ht)): continue ### ### # Compute the radiant elevation above the horizon shower.azim, shower.elev = raDec2AltAz(shower.ra, shower.dec, meteor_obj.jdt_ref, \ config.latitude, config.longitude) # Take the shower that's closest to the great circle if there are multiple candidates if radiant_separation < best_match_dist: best_match_dist = radiant_separation best_match_shower = copy.deepcopy(shower) # If a shower is given and the match is not this shower, skip adding the meteor to the list # If no specific shower is give for association, add all meteors if ((shower_code is not None) and (best_match_shower is not None)) or (shower_code is None): # Store the associated shower associations[(ff_name, meteor_No)] = [meteor_obj, best_match_shower] # Find shower frequency and sort by count shower_name_list_temp = [] shower_list_temp = [] for key in associations: _, shower = associations[key] if shower is None: shower_name = '...' else: shower_name = shower.name shower_name_list_temp.append(shower_name) shower_list_temp.append(shower) _, unique_showers_indices = np.unique(shower_name_list_temp, return_index=True) unique_shower_names = np.array( shower_name_list_temp)[unique_showers_indices] unique_showers = np.array(shower_list_temp)[unique_showers_indices] shower_counts = [[shower_obj, shower_name_list_temp.count(shower_name)] for shower_obj, \ shower_name in zip(unique_showers, unique_shower_names)] shower_counts = sorted(shower_counts, key=lambda x: x[1], reverse=True) # Create a plot of showers if show_plot or save_plot: # Generate consistent colours colors_by_name = makeShowerColors(shower_list) def get_shower_color(shower): try: return colors_by_name[shower.name] if shower else "0.4" except KeyError: return 'gray' # Init the figure plt.figure() # Init subplots depending on if the activity plot is done as well if plot_activity: gs = gridspec.GridSpec(2, 1, height_ratios=[3, 1]) ax_allsky = plt.subplot(gs[0], facecolor='black') ax_activity = plt.subplot(gs[1], facecolor='black') else: ax_allsky = plt.subplot(111, facecolor='black') # Init the all-sky plot allsky_plot = AllSkyPlot(ax_handle=ax_allsky) # Plot all meteors for key in associations: meteor_obj, shower = associations[key] ### Plot the observed meteor points ### color = get_shower_color(shower) allsky_plot.plot(meteor_obj.ra_array, meteor_obj.dec_array, color=color, linewidth=1, zorder=4) # Plot the peak of shower meteors a different color peak_color = 'blue' if shower is not None: peak_color = 'tomato' allsky_plot.scatter(meteor_obj.ra_array[-1], meteor_obj.dec_array[-1], c=peak_color, marker='+', \ s=5, zorder=5) ### ### ### Plot fitted great circle points ### # Find the GC phase angle of the beginning of the meteor gc_beg_phase = meteor_obj.findGCPhase( meteor_obj.ra_array[0], meteor_obj.dec_array[0])[0] % 360 # If the meteor belongs to a shower, find the GC phase which ends at the shower if shower is not None: gc_end_phase = meteor_obj.findGCPhase(shower.ra, shower.dec)[0] % 360 # Fix 0/360 wrap if abs(gc_end_phase - gc_beg_phase) > 180: if gc_end_phase > gc_beg_phase: gc_end_phase -= 360 else: gc_beg_phase -= 360 gc_alpha = 1.0 else: # If it's a sporadic, find the direction to which the meteor should extend gc_end_phase = meteor_obj.findGCPhase(meteor_obj.ra_array[-1], \ meteor_obj.dec_array[-1])[0]%360 # Find the correct direction if (gc_beg_phase - gc_end_phase) % 360 > (gc_end_phase - gc_beg_phase) % 360: gc_end_phase = gc_beg_phase - 170 else: gc_end_phase = gc_beg_phase + 170 gc_alpha = 0.7 # Store great circle beginning and end phase meteor_obj.gc_beg_phase = gc_beg_phase meteor_obj.gc_end_phase = gc_end_phase # Get phases 180 deg before the meteor phase_angles = np.linspace(gc_end_phase, gc_beg_phase, 100) % 360 # Compute RA/Dec of points on the great circle ra_gc, dec_gc = meteor_obj.sampleGC(phase_angles) # Cull all points below the horizon azim_gc, elev_gc = raDec2AltAz(ra_gc, dec_gc, meteor_obj.jdt_ref, config.latitude, \ config.longitude) temp_arr = np.c_[ra_gc, dec_gc] temp_arr = temp_arr[elev_gc > 0] ra_gc, dec_gc = temp_arr.T # Plot the great circle fitted on the radiant gc_color = get_shower_color(shower) allsky_plot.plot(ra_gc, dec_gc, linestyle='dotted', color=gc_color, alpha=gc_alpha, linewidth=1) # Plot the point closest to the shower radiant if shower is not None: allsky_plot.plot(ra_gc[0], dec_gc[0], color='r', marker='+', ms=5, mew=1) # Store shower radiant point meteor_obj.radiant_ra = ra_gc[0] meteor_obj.radiant_dec = dec_gc[0] ### ### ### Plot all showers ### # Find unique showers and their apparent radiants computed at highest radiant elevation # (otherwise the apparent radiants can be quite off) shower_dict = {} for key in associations: meteor_obj, shower = associations[key] if shower is None: continue # If the shower name is in dict, find the shower with the highest radiant elevation if shower.name in shower_dict: if shower.elev > shower_dict[shower.name].elev: shower_dict[shower.name] = shower else: shower_dict[shower.name] = shower # Plot the location of shower radiants for shower_name in shower_dict: shower = shower_dict[shower_name] heading_arr = np.linspace(0, 360, 50) # Compute coordinates on a circle around the given RA, Dec ra_circle, dec_circle = sphericalPointFromHeadingAndDistance(shower.ra, shower.dec, \ heading_arr, config.shower_max_radiant_separation) # Plot the shower circle allsky_plot.plot(ra_circle, dec_circle, color=colors_by_name[shower_name]) # Plot the shower name x_text, y_text = allsky_plot.raDec2XY(shower.ra, shower.dec) allsky_plot.ax.text(x_text, y_text, shower.name, color='w', size=8, va='center', \ ha='center', zorder=6) # Plot station name and solar longiutde range allsky_plot.ax.text(-180, 89, "{:s}".format(cam_code), color='w', family='monospace') # Get a list of JDs of meteors jd_list = [associations[key][0].jdt_ref for key in associations] if len(jd_list): # Get the range of solar longitudes jd_min = min(jd_list) sol_min = np.degrees(jd2SolLonSteyaert(jd_min)) jd_max = max(jd_list) sol_max = np.degrees(jd2SolLonSteyaert(jd_max)) # Plot the date and solar longitude range date_sol_beg = u"Beg: {:s} (sol = {:.2f}\u00b0)".format( jd2Date(jd_min, dt_obj=True).strftime("%Y%m%d %H:%M:%S"), sol_min) date_sol_end = u"End: {:s} (sol = {:.2f}\u00b0)".format( jd2Date(jd_max, dt_obj=True).strftime("%Y%m%d %H:%M:%S"), sol_max) allsky_plot.ax.text(-180, 85, date_sol_beg, color='w', family='monospace') allsky_plot.ax.text(-180, 81, date_sol_end, color='w', family='monospace') allsky_plot.ax.text(-180, 77, "-" * len(date_sol_end), color='w', family='monospace') # Plot shower counts for i, (shower, count) in enumerate(shower_counts): if shower is not None: shower_name = shower.name else: shower_name = "..." allsky_plot.ax.text(-180, 73 - i*4, "{:s}: {:d}".format(shower_name, count), color='w', \ family='monospace') ### ### # Plot yearly meteor shower activity if plot_activity: # Plot the activity diagram generateActivityDiagram(config, shower_list, ax_handle=ax_activity, \ sol_marker=[sol_min, sol_max], colors=colors_by_name) # Save plot and text file if save_plot: dir_path, ftpdetectinfo_name = os.path.split(ftpdetectinfo_path) ftpdetectinfo_base_name = ftpdetectinfo_name.replace( 'FTPdetectinfo_', '').replace('.txt', '') plot_name = ftpdetectinfo_base_name + '_radiants.png' # Increase figure size allsky_plot.fig.set_size_inches(18, 9, forward=True) allsky_plot.beautify() plt.savefig(os.path.join(dir_path, plot_name), dpi=100, facecolor='k') # Save the text file with shower info if len(jd_list): with open( os.path.join(dir_path, ftpdetectinfo_base_name + "_radiants.txt"), 'w') as f: # Print station code f.write("# RMS single station association\n") f.write("# \n") f.write("# Station: {:s}\n".format(cam_code)) # Print date range f.write( "# Beg | End \n" ) f.write( "# -----------------------------------------------------\n" ) f.write("# Date | {:24s} | {:24s} \n".format(jd2Date(jd_min, \ dt_obj=True).strftime("%Y%m%d %H:%M:%S.%f"), jd2Date(jd_max, \ dt_obj=True).strftime("%Y%m%d %H:%M:%S.%f"))) f.write("# Sol | {:>24.2f} | {:>24.2f} \n".format( sol_min, sol_max)) # Write shower counts f.write("# \n") f.write("# Shower counts:\n") f.write("# --------------\n") f.write("# Code, Count, IAU link\n") for i, (shower, count) in enumerate(shower_counts): if shower is not None: shower_name = shower.name # Create link to the IAU database of showers iau_link = "https://www.ta3.sk/IAUC22DB/MDC2007/Roje/pojedynczy_obiekt.php?kodstrumienia={:05d}".format( shower.iau_code) else: shower_name = "..." iau_link = "None" f.write("# {:>4s}, {:>5d}, {:s}\n".format( shower_name, count, iau_link)) f.write("# \n") f.write("# Meteor parameters:\n") f.write("# ------------------\n") f.write( "# Date And Time, Beg Julian date, La Sun, Shower, RA beg, Dec beg, RA end, Dec end, RA rad, Dec rad, GC theta0, GC phi0, GC beg phase, GC end phase, Mag\n" ) # Create a sorted list of meteor associations by time associations_list = [ associations[key] for key in associations ] associations_list = sorted(associations_list, key=lambda x: x[0].jdt_ref) # Write out meteor parameters for meteor_obj, shower in associations_list: # Find peak magnitude if np.any(meteor_obj.mag_array): peak_mag = "{:+.1f}".format( np.min(meteor_obj.mag_array)) else: peak_mag = "None" if shower is not None: f.write("{:24s}, {:20.12f}, {:>10.6f}, {:>6s}, {:6.2f}, {:+7.2f}, {:6.2f}, {:+7.2f}, {:6.2f}, {:+7.2f}, {:9.3f}, {:8.3f}, {:12.3f}, {:12.3f}, {:4s}\n".format(jd2Date(meteor_obj.jdt_ref, dt_obj=True).strftime("%Y%m%d %H:%M:%S.%f"), \ meteor_obj.jdt_ref, meteor_obj.lasun, shower.name, \ meteor_obj.ra_array[0]%360, meteor_obj.dec_array[0], \ meteor_obj.ra_array[-1]%360, meteor_obj.dec_array[-1], \ meteor_obj.radiant_ra%360, meteor_obj.radiant_dec, \ np.degrees(meteor_obj.theta0), np.degrees(meteor_obj.phi0), \ meteor_obj.gc_beg_phase, meteor_obj.gc_end_phase, peak_mag)) else: f.write("{:24s}, {:20.12f}, {:>10.6f}, {:>6s}, {:6.2f}, {:+7.2f}, {:6.2f}, {:+7.2f}, {:>6s}, {:>7s}, {:9.3f}, {:8.3f}, {:12.3f}, {:12.3f}, {:4s}\n".format(jd2Date(meteor_obj.jdt_ref, dt_obj=True).strftime("%Y%m%d %H:%M:%S.%f"), \ meteor_obj.jdt_ref, meteor_obj.lasun, '...', meteor_obj.ra_array[0]%360, \ meteor_obj.dec_array[0], meteor_obj.ra_array[-1]%360, \ meteor_obj.dec_array[-1], "None", "None", np.degrees(meteor_obj.theta0), \ np.degrees(meteor_obj.phi0), meteor_obj.gc_beg_phase, \ meteor_obj.gc_end_phase, peak_mag)) if show_plot: allsky_plot.show() else: plt.clf() plt.close() return associations, shower_counts
def estimateMeteorHeight(config, meteor_obj, shower): """ Estimate the height of a meteor from single station give a candidate shower. Arguments: config: [Config instance] meteor_obj: [MeteorSingleStation instance] shower: [Shower instance] Return: ht: [float] Estimated height in meters. """ ### Compute all needed values in alt/az coordinates ### # Compute beginning point vector in alt/az beg_ra, beg_dec = vector2RaDec(meteor_obj.beg_vect) beg_azim, beg_alt = raDec2AltAz(beg_ra, beg_dec, meteor_obj.jdt_ref, meteor_obj.lat, meteor_obj.lon) beg_vect_horiz = raDec2Vector(beg_azim, beg_alt) # Compute end point vector in alt/az end_ra, end_dec = vector2RaDec(meteor_obj.end_vect) end_azim, end_alt = raDec2AltAz(end_ra, end_dec, meteor_obj.jdt_ref, meteor_obj.lat, meteor_obj.lon) end_vect_horiz = raDec2Vector(end_azim, end_alt) # Compute radiant vector in alt/az radiant_azim, radiant_alt = raDec2AltAz(shower.ra, shower.dec, meteor_obj.jdt_ref, meteor_obj.lat, \ meteor_obj.lon) radiant_vector_horiz = raDec2Vector(radiant_azim, radiant_alt) # Reject the pairing if the radiant is below the horizon if radiant_alt < 0: return -1 # Get distance from Earth's centre to the position given by geographical coordinates for the # observer's latitude earth_radius = EARTH.EQUATORIAL_RADIUS / np.sqrt( 1.0 - (EARTH.E**2) * np.sin(np.radians(config.latitude))**2) # Compute the distance from Earth's centre to the station (including the sea level height of the station) re_dist = earth_radius + config.elevation ### ### # Compute the distance the meteor traversed during its duration (meters) dist = shower.v_init * meteor_obj.duration # Compute the angle between the begin and the end point of the meteor (rad) ang_beg_end = np.arccos( np.dot(vectNorm(beg_vect_horiz), vectNorm(end_vect_horiz))) # Compute the angle between the radiant vector and the begin point (rad) ang_beg_rad = np.arccos( np.dot(vectNorm(radiant_vector_horiz), -vectNorm(beg_vect_horiz))) # Compute the distance from the station to the begin point (meters) dist_beg = dist * np.sin(ang_beg_rad) / np.sin(ang_beg_end) # Compute the height using the law of cosines ht = np.sqrt(dist_beg**2 + re_dist**2 - 2 * dist_beg * re_dist * np.cos(np.radians(90 + meteor_obj.beg_alt))) ht -= earth_radius ht = abs(ht) return ht
def estimateMeteorHeight(meteor_obj, shower): """ Estimate the height of a meteor from single station give a candidate shower. Arguments: meteor_obj: [MeteorSingleStation instance] shower: [Shower instance] Return: ht: [float] Estimated height in meters. """ ### Compute all needed values in alt/az coordinates ### # Compute beginning point vector in alt/az beg_ra, beg_dec = vector2RaDec(meteor_obj.beg_vect) beg_azim, beg_alt = raDec2AltAz(beg_ra, beg_dec, meteor_obj.jdt_ref, meteor_obj.lat, meteor_obj.lon) beg_vect_horiz = raDec2Vector(beg_azim, beg_alt) # Compute end point vector in alt/az end_ra, end_dec = vector2RaDec(meteor_obj.end_vect) end_azim, end_alt = raDec2AltAz(end_ra, end_dec, meteor_obj.jdt_ref, meteor_obj.lat, meteor_obj.lon) end_vect_horiz = raDec2Vector(end_azim, end_alt) # Compute normal vector in alt/az normal_azim, normal_alt = raDec2AltAz(meteor_obj.normal_ra, meteor_obj.normal_dec, meteor_obj.jdt_ref, \ meteor_obj.lat, meteor_obj.lon) normal_horiz = raDec2Vector(normal_azim, normal_alt) # Compute radiant vector in alt/az radiant_azim, radiant_alt = raDec2AltAz(shower.ra, shower.dec, meteor_obj.jdt_ref, meteor_obj.lat, \ meteor_obj.lon) radiant_vector_horiz = raDec2Vector(radiant_azim, radiant_alt) # Reject the pairing if the radiant is below the horizon if radiant_alt < 0: return -1 ### ### # Compute cartesian coordinates of the pointing at the beginning of the meteor pt = vectNorm(beg_vect_horiz) # Compute reference vector perpendicular to the plane normal and the radiant vec = vectNorm(np.cross(normal_horiz, radiant_vector_horiz)) # Compute angles between the reference vector and the pointing dot_vb = np.dot(vec, beg_vect_horiz) dot_ve = np.dot(vec, end_vect_horiz) dot_vp = np.dot(vec, pt) # Compute distance to the radiant intersection line r_mag = 1.0 / (dot_vb**2) r_mag += 1.0 / (dot_ve**2) r_mag += -2 * np.cos(meteor_obj.ang_be) / (dot_vb * dot_ve) r_mag = np.sqrt(r_mag) r_mag = shower.v_init * meteor_obj.duration / r_mag pt_mag = r_mag / dot_vp # Compute the height ht = pt_mag**2 + EARTH.EQUATORIAL_RADIUS**2 \ - 2*pt_mag*EARTH.EQUATORIAL_RADIUS*np.cos(np.radians(90 - meteor_obj.beg_alt)) ht = np.sqrt(ht) ht -= EARTH.EQUATORIAL_RADIUS ht = abs(ht) return ht