def applyAstrometryFTPdetectinfo(dir_path, ftp_detectinfo_file, platepar_file, UT_corr=0, platepar=None): """ Use the given platepar to calculate the celestial coordinates of detected meteors from a FTPdetectinfo file and save the updates values. Arguments: dir_path: [str] Path to the night. ftp_detectinfo_file: [str] Name of the FTPdetectinfo file. platepar_file: [str] Name of the platepar file. Keyword arguments: UT_corr: [float] Difference of time from UTC in hours. platepar: [Platepar obj] Loaded platepar. None by default. If given, the platepar file won't be read, but this platepar structure will be used instead. Return: None """ # If the FTPdetectinfo file does not exist, skip everything if not os.path.isfile(os.path.join(dir_path, ftp_detectinfo_file)): print('The given FTPdetectinfo file does not exist:', os.path.join(dir_path, ftp_detectinfo_file)) print('The astrometry was not computed!') return None # Save a copy of the uncalibrated FTPdetectinfo ftp_detectinfo_copy = "".join( ftp_detectinfo_file.split('.')[:-1]) + "_uncalibrated.txt" # Back up the original FTPdetectinfo, only if a backup does not exist already if not os.path.isfile(os.path.join(dir_path, ftp_detectinfo_copy)): shutil.copy2(os.path.join(dir_path, ftp_detectinfo_file), os.path.join(dir_path, ftp_detectinfo_copy)) # Load platepar from file if not given if platepar is None: # Load the platepar platepar = Platepar() platepar.read(os.path.join(dir_path, platepar_file), use_flat=None) # Load the FTPdetectinfo file meteor_data = readFTPdetectinfo(dir_path, ftp_detectinfo_file) # List for final meteor data meteor_list = [] # Go through every meteor for meteor in meteor_data: ff_name, cam_code, meteor_No, n_segments, fps, hnr, mle, binn, px_fm, rho, phi, meteor_meas = meteor # Apply the platepar to the given centroids meteor_picks = applyPlateparToCentroids(ff_name, fps, meteor_meas, platepar) # Add the calculated values to the final list meteor_list.append([ff_name, meteor_No, rho, phi, meteor_picks]) # Calibration string to be written to the FTPdetectinfo file calib_str = 'Calibrated with RMS on: ' + str( datetime.datetime.utcnow()) + ' UTC' # If no meteors were detected, set dummpy parameters if len(meteor_list) == 0: cam_code = '' fps = 0 # Save the updated FTPdetectinfo writeFTPdetectinfo(meteor_list, dir_path, ftp_detectinfo_file, dir_path, cam_code, fps, calibration=calib_str, celestial_coords_given=True)
def computeFlux(config, dir_path, ftpdetectinfo_path, shower_code, dt_beg, dt_end, timebin, mass_index, \ timebin_intdt=0.25, ht_std_percent=5.0, mask=None): """ Compute flux using measurements in the given FTPdetectinfo file. Arguments: config: [Config instance] dir_path: [str] Path to the working directory. ftpdetectinfo_path: [str] Path to a FTPdetectinfo file. shower_code: [str] IAU shower code (e.g. ETA, PER, SDA). dt_beg: [Datetime] Datetime object of the observation beginning. dt_end: [Datetime] Datetime object of the observation end. timebin: [float] Time bin in hours. mass_index: [float] Cumulative mass index of the shower. Keyword arguments: timebin_intdt: [float] Time step for computing the integrated collection area in hours. 15 minutes by default. If smaller than that, only one collection are will be computed. ht_std_percent: [float] Meteor height standard deviation in percent. mask: [Mask object] Mask object, None by default. """ # Get a list of files in the night folder file_list = sorted(os.listdir(dir_path)) # Find and load the platepar file if config.platepar_name in file_list: # Load the platepar platepar = Platepar.Platepar() platepar.read(os.path.join(dir_path, config.platepar_name), use_flat=config.use_flat) else: print("Cannot find the platepar file in the night directory: ", config.platepar_name) return None # # Load FTPdetectinfos # meteor_data = [] # for ftpdetectinfo_path in ftpdetectinfo_list: # if not os.path.isfile(ftpdetectinfo_path): # print('No such file:', ftpdetectinfo_path) # continue # meteor_data += readFTPdetectinfo(*os.path.split(ftpdetectinfo_path)) # Load meteor data from the FTPdetectinfo file meteor_data = readFTPdetectinfo(*os.path.split(ftpdetectinfo_path)) if not len(meteor_data): print("No meteors in the FTPdetectinfo file!") return None # Find and load recalibrated platepars if config.platepars_recalibrated_name in file_list: with open(os.path.join(dir_path, config.platepars_recalibrated_name)) as f: recalibrated_platepars_dict = json.load(f) print("Recalibrated platepars loaded!") # If the file is not available, apply the recalibration procedure else: recalibrated_platepars_dict = applyRecalibrate(ftpdetectinfo_path, config) print("Recalibrated platepar file not available!") print("Recalibrating...") # Convert the dictionary of recalibrated platepars to a dictionary of Platepar objects recalibrated_platepars = {} for ff_name in recalibrated_platepars_dict: pp = Platepar.Platepar() pp.loadFromDict(recalibrated_platepars_dict[ff_name], use_flat=config.use_flat) recalibrated_platepars[ff_name] = pp # Compute nighly mean of the photometric zero point mag_lev_nightly_mean = np.mean([recalibrated_platepars[ff_name].mag_lev \ for ff_name in recalibrated_platepars]) # Locate and load the mask file if config.mask_file in file_list: mask_path = os.path.join(dir_path, config.mask_file) mask = loadMask(mask_path) print("Using mask:", mask_path) else: print("No mask used!") mask = None # Compute the population index using the classical equation population_index = 10**((mass_index - 1)/2.5) ### SENSOR CHARACTERIZATION ### # Computes FWHM of stars and noise profile of the sensor # File which stores the sensor characterization profile sensor_characterization_file = "flux_sensor_characterization.json" sensor_characterization_path = os.path.join(dir_path, sensor_characterization_file) # Load sensor characterization file if present, so the procedure can be skipped if os.path.isfile(sensor_characterization_path): # Load the JSON file with open(sensor_characterization_path) as f: data = " ".join(f.readlines()) sensor_data = json.loads(data) # Remove the info entry if '-1' in sensor_data: del sensor_data['-1'] else: # Run sensor characterization sensor_data = sensorCharacterization(config, dir_path) # Save to file for posterior use with open(sensor_characterization_path, 'w') as f: # Add an explanation what each entry means sensor_data_save = dict(sensor_data) sensor_data_save['-1'] = {"FF file name": ['median star FWHM', 'median background noise stddev']} # Convert collection areas to JSON out_str = json.dumps(sensor_data_save, indent=4, sort_keys=True) # Save to disk f.write(out_str) # Compute the nighly mean FWHM and noise stddev fwhm_nightly_mean = np.mean([sensor_data[key][0] for key in sensor_data]) stddev_nightly_mean = np.mean([sensor_data[key][1] for key in sensor_data]) ### ### # Perform shower association associations, shower_counts = showerAssociation(config, [ftpdetectinfo_path], shower_code=shower_code, \ show_plot=False, save_plot=False, plot_activity=False) # If there are no shower association, return nothing if not associations: print("No meteors associated with the shower!") return None # Print the list of used meteors peak_mags = [] for key in associations: meteor, shower = associations[key] if shower is not None: # Compute peak magnitude peak_mag = np.min(meteor.mag_array) peak_mags.append(peak_mag) print("{:.6f}, {:3s}, {:+.2f}".format(meteor.jdt_ref, shower.name, peak_mag)) print() # Init the flux configuration flux_config = FluxConfig() ### COMPUTE COLLECTION AREAS ### # Make a file name to save the raw collection areas col_areas_file_name = generateColAreaJSONFileName(platepar.station_code, flux_config.side_points, \ flux_config.ht_min, flux_config.ht_max, flux_config.dht, flux_config.elev_limit) # Check if the collection area file exists. If yes, load the data. If not, generate collection areas if col_areas_file_name in os.listdir(dir_path): col_areas_ht = loadRawCollectionAreas(dir_path, col_areas_file_name) print("Loaded collection areas from:", col_areas_file_name) else: # Compute the collecting areas segments per height col_areas_ht = collectingArea(platepar, mask=mask, side_points=flux_config.side_points, \ ht_min=flux_config.ht_min, ht_max=flux_config.ht_max, dht=flux_config.dht, \ elev_limit=flux_config.elev_limit) # Save the collection areas to file saveRawCollectionAreas(dir_path, col_areas_file_name, col_areas_ht) print("Saved raw collection areas to:", col_areas_file_name) ### ### # Compute the pointing of the middle of the FOV _, ra_mid, dec_mid, _ = xyToRaDecPP([jd2Date(J2000_JD.days)], [platepar.X_res/2], [platepar.Y_res/2], \ [1], platepar, extinction_correction=False) azim_mid, elev_mid = raDec2AltAz(ra_mid[0], dec_mid[0], J2000_JD.days, platepar.lat, platepar.lon) # Compute the range to the middle point ref_ht = 100000 r_mid, _, _, _ = xyHt2Geo(platepar, platepar.X_res/2, platepar.Y_res/2, ref_ht, indicate_limit=True, \ elev_limit=flux_config.elev_limit) ### Compute the average angular velocity to which the flux variation throught the night will be normalized # The ang vel is of the middle of the FOV in the middle of observations # Middle Julian date of the night jd_night_mid = (datetime2JD(dt_beg) + datetime2JD(dt_end))/2 # Compute the apparent radiant ra, dec, v_init = shower.computeApparentRadiant(platepar.lat, platepar.lon, jd_night_mid) # Compute the radiant elevation radiant_azim, radiant_elev = raDec2AltAz(ra, dec, jd_night_mid, platepar.lat, platepar.lon) # Compute the angular velocity in the middle of the FOV rad_dist_night_mid = angularSeparation(np.radians(radiant_azim), np.radians(radiant_elev), np.radians(azim_mid), np.radians(elev_mid)) ang_vel_night_mid = v_init*np.sin(rad_dist_night_mid)/r_mid ### # Compute the average limiting magnitude to which all flux will be normalized # Standard deviation of star PSF, nightly mean (px) star_stddev = fwhm_nightly_mean/2.355 # Compute the theoretical stellar limiting magnitude (nightly average) star_sum = 2*np.pi*(config.k1_det*stddev_nightly_mean + config.j1_det)*star_stddev**2 lm_s_nightly_mean = -2.5*np.log10(star_sum) + mag_lev_nightly_mean # A meteor needs to be visible on at least 4 frames, thus it needs to have at least 4x the mass to produce # that amount of light. 1 magnitude difference scales as -0.4 of log of mass, thus: frame_min_loss = np.log10(config.line_minimum_frame_range_det)/(-0.4) lm_s_nightly_mean += frame_min_loss # Compute apparent meteor magnitude lm_m_nightly_mean = lm_s_nightly_mean - 5*np.log10(r_mid/1e5) - 2.5*np.log10( \ np.degrees(platepar.F_scale*v_init*np.sin(rad_dist_night_mid)/(config.fps*r_mid*fwhm_nightly_mean)) \ ) # print("Stellar lim mag using detection thresholds:", lm_s_nightly_mean) print("Apparent meteor limiting magnitude:", lm_m_nightly_mean) ### Apply time-dependent corrections ### sol_data = [] flux_lm_6_5_data = [] # Go through all time bins within the observation period total_time_hrs = (dt_end - dt_beg).total_seconds()/3600 nbins = int(np.ceil(total_time_hrs/timebin)) for t_bin in range(nbins): # Compute bin start and end time bin_dt_beg = dt_beg + datetime.timedelta(hours=timebin*t_bin) bin_dt_end = bin_dt_beg + datetime.timedelta(hours=timebin) if bin_dt_end > dt_end: bin_dt_end = dt_end # Compute bin duration in hours bin_hours = (bin_dt_end - bin_dt_beg).total_seconds()/3600 # Convert to Julian date bin_jd_beg = datetime2JD(bin_dt_beg) bin_jd_end = datetime2JD(bin_dt_end) # Only select meteors in this bin bin_meteors = [] bin_ffs = [] for key in associations: meteor, shower = associations[key] if shower is not None: if (shower.name == shower_code) and (meteor.jdt_ref > bin_jd_beg) \ and (meteor.jdt_ref <= bin_jd_end): bin_meteors.append([meteor, shower]) bin_ffs.append(meteor.ff_name) if len(bin_meteors) > 0: ### Compute the radiant elevation at the middle of the time bin ### jd_mean = (bin_jd_beg + bin_jd_end)/2 # Compute the mean solar longitude sol_mean = np.degrees(jd2SolLonSteyaert(jd_mean)) print() print() print("-- Bin information ---") print("Bin beg:", bin_dt_beg) print("Bin end:", bin_dt_end) print("Sol mid: {:.5f}".format(sol_mean)) print("Meteors:", len(bin_meteors)) # Compute the apparent radiant ra, dec, v_init = shower.computeApparentRadiant(platepar.lat, platepar.lon, jd_mean) # Compute the mean meteor height meteor_ht_beg = heightModel(v_init, ht_type='beg') meteor_ht_end = heightModel(v_init, ht_type='end') meteor_ht = (meteor_ht_beg + meteor_ht_end)/2 # Compute the standard deviation of the height meteor_ht_std = meteor_ht*ht_std_percent/100.0 # Init the Gaussian height distribution meteor_ht_gauss = scipy.stats.norm(meteor_ht, meteor_ht_std) # Compute the radiant elevation radiant_azim, radiant_elev = raDec2AltAz(ra, dec, jd_mean, platepar.lat, platepar.lon) ### ### ### Weight collection area by meteor height distribution ### # Determine weights for each height weight_sum = 0 weights = {} for ht in col_areas_ht: wt = meteor_ht_gauss.pdf(float(ht)) weight_sum += wt weights[ht] = wt # Normalize the weights so that the sum is 1 for ht in weights: weights[ht] /= weight_sum ### ### # Compute the angular velocity in the middle of the FOV rad_dist_mid = angularSeparation(np.radians(radiant_azim), np.radians(radiant_elev), np.radians(azim_mid), np.radians(elev_mid)) ang_vel_mid = v_init*np.sin(rad_dist_mid)/r_mid ### Compute the limiting magnitude ### # Compute the mean star FWHM in the given bin fwhm_bin_mean = np.mean([sensor_data[ff_name][0] for ff_name in bin_ffs]) # Compute the mean background stddev in the given bin stddev_bin_mean = np.mean([sensor_data[ff_name][1] for ff_name in bin_ffs]) # Compute the mean photometric zero point in the given bin mag_lev_bin_mean = np.mean([recalibrated_platepars[ff_name].mag_lev for ff_name in bin_ffs if ff_name in recalibrated_platepars]) # Standard deviation of star PSF, nightly mean (px) star_stddev = fwhm_bin_mean/2.355 # Compute the theoretical stellar limiting magnitude (nightly average) star_sum = 2*np.pi*(config.k1_det*stddev_bin_mean + config.j1_det)*star_stddev**2 lm_s = -2.5*np.log10(star_sum) + mag_lev_bin_mean lm_s += frame_min_loss # Compute apparent meteor magnitude lm_m = lm_s - 5*np.log10(r_mid/1e5) - 2.5*np.log10( \ np.degrees(platepar.F_scale*v_init*np.sin(rad_dist_mid)/(config.fps*r_mid*fwhm_bin_mean))\ ) ### ### # Final correction area value (height-weightned) collection_area = 0 # Go through all heights and segment blocks for ht in col_areas_ht: for img_coords in col_areas_ht[ht]: x_mean, y_mean = img_coords # Unpack precomputed values area, azim, elev, sensitivity_ratio, r = col_areas_ht[ht][img_coords] # Compute the angular velocity (rad/s) in the middle of this block rad_dist = angularSeparation(np.radians(radiant_azim), np.radians(radiant_elev), np.radians(azim), np.radians(elev)) ang_vel = v_init*np.sin(rad_dist)/r # Compute the range correction range_correction = (1e5/r)**2 #ang_vel_correction = ang_vel/ang_vel_mid # Compute angular velocity correction relative to the nightly mean ang_vel_correction = ang_vel/ang_vel_night_mid ### Apply corrections correction_ratio = 1.0 # Correct the area for vignetting and extinction correction_ratio *= sensitivity_ratio # Correct for the range correction_ratio *= range_correction # Correct for the radiant elevation correction_ratio *= np.sin(np.radians(radiant_elev)) # Correct for angular velocity correction_ratio *= ang_vel_correction # Add the collection area to the final estimate with the height weight # Raise the correction to the mass index power collection_area += weights[ht]*area*correction_ratio**(mass_index - 1) # Compute the flux at the bin LM (meteors/1000km^2/h) flux = 1e9*len(bin_meteors)/collection_area/bin_hours # Compute the flux scaled to the nightly mean LM flux_lm_nightly_mean = flux*population_index**(lm_m_nightly_mean - lm_m) # Compute the flux scaled to +6.5M flux_lm_6_5 = flux*population_index**(6.5 - lm_m) print("-- Sensor information ---") print("Star FWHM: {:5.2f} px".format(fwhm_bin_mean)) print("Bkg stddev: {:4.1f} ADU".format(stddev_bin_mean)) print("Photom ZP: {:+6.2f} mag".format(mag_lev_bin_mean)) print("Stellar LM: {:+.2f} mag".format(lm_s)) print("-- Flux ---") print("Col area: {:d} km^2".format(int(collection_area/1e6))) print("Ang vel: {:.2f} deg/s".format(np.degrees(ang_vel_mid))) print("LM app: {:+.2f} mag".format(lm_m)) print("Flux: {:.2f} meteors/1000km^2/h".format(flux)) print("to {:+.2f}: {:.2f} meteors/1000km^2/h".format(lm_m_nightly_mean, flux_lm_nightly_mean)) print("to +6.50: {:.2f} meteors/1000km^2/h".format(flux_lm_6_5)) sol_data.append(sol_mean) flux_lm_6_5_data.append(flux_lm_6_5) # Print the results print("Solar longitude, Flux at LM +6.5:") for sol, flux_lm_6_5 in zip(sol_data, flux_lm_6_5_data): print("{:9.5f}, {:8.4f}".format(sol, flux_lm_6_5)) # Plot a histogram of peak magnitudes plt.hist(peak_mags, cumulative=True) plt.show()
cml_args = arg_parser.parse_args() ######################### # Read command line arguments ftpdetectinfo_path = cml_args.ftpdetectinfo_path[0] ftpdetectinfo_path = findFTPdetectinfoFile(ftpdetectinfo_path) dir_path, ftpdetectinfo_name = os.path.split(ftpdetectinfo_path) gauss_sigma = cml_args.psf_sigma[0] saturation_lvl = cml_args.satlvl # Load meteor data from FTPdetecinfo cam_code, fps, meteor_list = readFTPdetectinfo(dir_path, ftpdetectinfo_name, ret_input_format=True) # Load the flat, if given flat = None if cml_args.flat: flat = loadFlat(*os.path.split(cml_args.flat)) corrected_meteor_list = [] # Find matching FF files in the directory for entry in meteor_list: ftp_ff_name, meteor_No, rho, phi, meteor_meas = entry # Find the matching FTPdetectinfo file in the directory
def processNight(night_data_dir, config, detection_results=None, nodetect=False): """ Given the directory with FF files, run detection and archiving. Arguments: night_data_dir: [str] Path to the directory with FF files. config: [Config obj] Keyword arguments: detection_results: [list] An optional list of detection. If None (default), detection will be done on the the files in the folder. nodetect: [bool] True if detection should be skipped. False by default. Return: night_archive_dir: [str] Path to the night directory in ArchivedFiles. archive_name: [str] Path to the archive. detector: [QueuedPool instance] Handle to the detector. """ # Remove final slash in the night dir if night_data_dir.endswith(os.sep): night_data_dir = night_data_dir[:-1] # Extract the name of the night night_data_dir_name = os.path.basename(os.path.abspath(night_data_dir)) platepar = None # If the detection should be run if (not nodetect): # If no detection was performed, run it if detection_results is None: # Run detection on the given directory calstars_name, ftpdetectinfo_name, ff_detected, \ detector = detectStarsAndMeteorsDirectory(night_data_dir, config) # Otherwise, save detection results else: # Save CALSTARS and FTPdetectinfo to disk calstars_name, ftpdetectinfo_name, ff_detected = saveDetections(detection_results, \ night_data_dir, config) # If the files were previously detected, there is no detector detector = None # Get the platepar file platepar, platepar_path, platepar_fmt = getPlatepar(config, night_data_dir) # Run calibration check and auto astrometry refinement if (platepar is not None) and (calstars_name is not None): # Read in the CALSTARS file calstars_list = CALSTARS.readCALSTARS(night_data_dir, calstars_name) # Run astrometry check and refinement platepar, fit_status = autoCheckFit(config, platepar, calstars_list) # If the fit was sucessful, apply the astrometry to detected meteors if fit_status: log.info('Astrometric calibration SUCCESSFUL!') # Save the refined platepar to the night directory and as default platepar.write(os.path.join(night_data_dir, config.platepar_name), fmt=platepar_fmt) platepar.write(platepar_path, fmt=platepar_fmt) else: log.info('Astrometric calibration FAILED!, Using old platepar for calibration...') # # Calculate astrometry for meteor detections # applyAstrometryFTPdetectinfo(night_data_dir, ftpdetectinfo_name, platepar_path) # If a flat is used, disable vignetting correction if config.use_flat: platepar.vignetting_coeff = 0.0 log.info("Recalibrating astrometry on FF files with detections...") # Recalibrate astrometry on every FF file and apply the calibration to detections recalibrateIndividualFFsAndApplyAstrometry(night_data_dir, os.path.join(night_data_dir, \ ftpdetectinfo_name), calstars_list, config, platepar) log.info("Converting RMS format to UFOOrbit format...") # Convert the FTPdetectinfo into UFOOrbit input file FTPdetectinfo2UFOOrbitInput(night_data_dir, ftpdetectinfo_name, platepar_path) # Generate a calibration report log.info("Generating a calibration report...") try: generateCalibrationReport(config, night_data_dir, platepar=platepar) except Exception as e: log.debug('Generating calibration report failed with the message:\n' + repr(e)) log.debug(repr(traceback.format_exception(*sys.exc_info()))) # Perform single station shower association log.info("Performing single station shower association...") try: showerAssociation(config, [os.path.join(night_data_dir, ftpdetectinfo_name)], \ save_plot=True, plot_activity=True) except Exception as e: log.debug('Shower association failed with the message:\n' + repr(e)) log.debug(repr(traceback.format_exception(*sys.exc_info()))) else: ff_detected = [] detector = None log.info('Plotting field sums...') # Plot field sums try: plotFieldsums(night_data_dir, config) except Exception as e: log.debug('Plotting field sums failed with message:\n' + repr(e)) log.debug(repr(traceback.format_exception(*sys.exc_info()))) # Archive all fieldsums to one archive archiveFieldsums(night_data_dir) # List for any extra files which will be copied to the night archive directory. Full paths have to be # given extra_files = [] log.info('Making a flat...') # Make a new flat field image try: flat_img = makeFlat(night_data_dir, config) except Exception as e: log.debug('Making a flat failed with message:\n' + repr(e)) log.debug(repr(traceback.format_exception(*sys.exc_info()))) flat_img = None # If making flat was sucessfull, save it if flat_img is not None: # Save the flat in the night directory, to keep the operational flat updated flat_path = os.path.join(night_data_dir, os.path.basename(config.flat_file)) saveImage(flat_path, flat_img) log.info('Flat saved to: ' + flat_path) # Copy the flat to the night's directory as well extra_files.append(flat_path) else: log.info('Making flat image FAILED!') ### Add extra files to archive # Add the config file to the archive too extra_files.append(os.path.join(os.getcwd(), '.config')) # Add the mask if (not nodetect): if os.path.exists(config.mask_file): mask_path = os.path.abspath(config.mask_file) extra_files.append(mask_path) # Add the platepar to the archive if it exists if (not nodetect): if os.path.exists(platepar_path): extra_files.append(platepar_path) # Add the json file with recalibrated platepars to the archive if (not nodetect): recalibrated_platepars_path = os.path.join(night_data_dir, config.platepars_recalibrated_name) if os.path.exists(recalibrated_platepars_path): extra_files.append(recalibrated_platepars_path) ### ### # If the detection should be run if (not nodetect): # Make a CAL file and a special CAMS FTPdetectinfo if full CAMS compatibility is desired if (config.cams_code > 0) and (platepar is not None): log.info('Generating a CAMS FTPdetectinfo file...') # Write the CAL file to disk cal_file_name = writeCAL(night_data_dir, config, platepar) # Check if the CAL file was successfully generated if cal_file_name is not None: cams_code_formatted = "{:06d}".format(int(config.cams_code)) # Load the FTPdetectinfo _, fps, meteor_list = readFTPdetectinfo(night_data_dir, ftpdetectinfo_name, \ ret_input_format=True) # Replace the camera code with the CAMS code for met in meteor_list: # Replace the station name and the FF file format ff_name = met[0] ff_name = ff_name.replace('.fits', '.bin') ff_name = ff_name.replace(config.stationID, cams_code_formatted) met[0] = ff_name # Write the CAMS compatible FTPdetectinfo file writeFTPdetectinfo(meteor_list, night_data_dir, \ ftpdetectinfo_name.replace(config.stationID, cams_code_formatted),\ night_data_dir, cams_code_formatted, fps, calibration=cal_file_name, \ celestial_coords_given=(platepar is not None)) night_archive_dir = os.path.join(os.path.abspath(config.data_dir), config.archived_dir, night_data_dir_name) log.info('Archiving detections to ' + night_archive_dir) # Archive the detections archive_name = archiveDetections(night_data_dir, night_archive_dir, ff_detected, config, \ extra_files=extra_files) return night_archive_dir, archive_name, detector
# Number of meteor profiles to plot n_profiles = 20 # Difference in Y coordinates between every profile vertical_step_offset = 70 # Force sigma for fitting the Gaussian PSF (disable with -1) force_sigma = 1.3 dir_path, ff_name = os.path.split(cml_args.ff_file[0]) # Load the FF file ff = readFF(dir_path, ff_name) # Load the FTPdetectinfo file meteor_list = readFTPdetectinfo( *os.path.split(findFTPdetectinfoFile(cml_args.ftpdetectinfo[0]))) # Find the FF file among the detections for entry in meteor_list: ftp_ff_name, cam_code, meteor_No, n_segments, fps, hnr, mle, binn, px_fm, rho, phi, \ meteor_meas = entry # Take only the FF file with the detection if ff_name == ftp_ff_name: img = ff.maxpixel x_beg = meteor_meas[0][2] y_beg = meteor_meas[0][3] x_end = meteor_meas[-1][2]
# Difference in Y coordinates between every profile vertical_step_offset = 70 # Force sigma for fitting the Gaussian PSF (disable with -1) force_sigma = 1.3 dir_path, ff_name = os.path.split(cml_args.ff_file[0]) # Load the FF file ff = readFF(dir_path, ff_name) # Load the FTPdetectinfo file meteor_list = readFTPdetectinfo(*os.path.split(cml_args.ftpdetectinfo[0])) # Find the FF file among the detections for entry in meteor_list: ftp_ff_name, cam_code, meteor_No, n_segments, fps, hnr, mle, binn, px_fm, rho, phi, \ meteor_meas = entry # Take only the FF file with the detection if ff_name == ftp_ff_name: img = ff.maxpixel x_beg = meteor_meas[0][2]
def computeFlux(config, dir_path, ftpdetectinfo_path, shower_code, dt_beg, dt_end, timebin, mass_index, \ timebin_intdt=0.25, ht_std_percent=5.0, mask=None, show_plots=True): """ Compute flux using measurements in the given FTPdetectinfo file. Arguments: config: [Config instance] dir_path: [str] Path to the working directory. ftpdetectinfo_path: [str] Path to a FTPdetectinfo file. shower_code: [str] IAU shower code (e.g. ETA, PER, SDA). dt_beg: [Datetime] Datetime object of the observation beginning. dt_end: [Datetime] Datetime object of the observation end. timebin: [float] Time bin in hours. mass_index: [float] Cumulative mass index of the shower. Keyword arguments: timebin_intdt: [float] Time step for computing the integrated collection area in hours. 15 minutes by default. If smaller than that, only one collection are will be computed. ht_std_percent: [float] Meteor height standard deviation in percent. mask: [Mask object] Mask object, None by default. show_plots: [bool] Show flux plots. True by default. Return: [tuple] sol_data, flux_lm_6_5_data - sol_data: [list] Array of solar longitudes (in degrees) of time bins. - flux_lm6_5_data: [list] Array of meteoroid flux at the limiting magnitude of +6.5 in meteors/1000km^2/h. """ # Get a list of files in the night folder file_list = sorted(os.listdir(dir_path)) # Find and load the platepar file if config.platepar_name in file_list: # Load the platepar platepar = Platepar.Platepar() platepar.read(os.path.join(dir_path, config.platepar_name), use_flat=config.use_flat) else: print("Cannot find the platepar file in the night directory: ", config.platepar_name) return None # # Load FTPdetectinfos # meteor_data = [] # for ftpdetectinfo_path in ftpdetectinfo_list: # if not os.path.isfile(ftpdetectinfo_path): # print('No such file:', ftpdetectinfo_path) # continue # meteor_data += readFTPdetectinfo(*os.path.split(ftpdetectinfo_path)) # Load meteor data from the FTPdetectinfo file meteor_data = readFTPdetectinfo(*os.path.split(ftpdetectinfo_path)) if not len(meteor_data): print("No meteors in the FTPdetectinfo file!") return None # Find and load recalibrated platepars if config.platepars_recalibrated_name in file_list: with open(os.path.join(dir_path, config.platepars_recalibrated_name)) as f: recalibrated_platepars_dict = json.load(f) print("Recalibrated platepars loaded!") # If the file is not available, apply the recalibration procedure else: recalibrated_platepars_dict = applyRecalibrate(ftpdetectinfo_path, config) print("Recalibrated platepar file not available!") print("Recalibrating...") # Convert the dictionary of recalibrated platepars to a dictionary of Platepar objects recalibrated_platepars = {} for ff_name in recalibrated_platepars_dict: pp = Platepar.Platepar() pp.loadFromDict(recalibrated_platepars_dict[ff_name], use_flat=config.use_flat) recalibrated_platepars[ff_name] = pp # Compute nighly mean of the photometric zero point mag_lev_nightly_mean = np.mean([recalibrated_platepars[ff_name].mag_lev \ for ff_name in recalibrated_platepars]) # Locate and load the mask file if config.mask_file in file_list: mask_path = os.path.join(dir_path, config.mask_file) mask = loadMask(mask_path) print("Using mask:", mask_path) else: print("No mask used!") mask = None # Compute the population index using the classical equation population_index = 10**((mass_index - 1)/2.5) # Found to be more consistent when comparing fluxes #population_index = 10**((mass_index - 1)/2.3) # TEST !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!1 ### SENSOR CHARACTERIZATION ### # Computes FWHM of stars and noise profile of the sensor # File which stores the sensor characterization profile sensor_characterization_file = "flux_sensor_characterization.json" sensor_characterization_path = os.path.join(dir_path, sensor_characterization_file) # Load sensor characterization file if present, so the procedure can be skipped if os.path.isfile(sensor_characterization_path): # Load the JSON file with open(sensor_characterization_path) as f: data = " ".join(f.readlines()) sensor_data = json.loads(data) # Remove the info entry if '-1' in sensor_data: del sensor_data['-1'] else: # Run sensor characterization sensor_data = sensorCharacterization(config, dir_path) # Save to file for posterior use with open(sensor_characterization_path, 'w') as f: # Add an explanation what each entry means sensor_data_save = dict(sensor_data) sensor_data_save['-1'] = {"FF file name": ['median star FWHM', 'median background noise stddev']} # Convert collection areas to JSON out_str = json.dumps(sensor_data_save, indent=4, sort_keys=True) # Save to disk f.write(out_str) # Compute the nighly mean FWHM and noise stddev fwhm_nightly_mean = np.mean([sensor_data[key][0] for key in sensor_data]) stddev_nightly_mean = np.mean([sensor_data[key][1] for key in sensor_data]) ### ### # Perform shower association associations, _ = showerAssociation(config, [ftpdetectinfo_path], shower_code=shower_code, \ show_plot=False, save_plot=False, plot_activity=False) # Init the flux configuration flux_config = FluxConfig() # Remove all meteors which begin below the limit height filtered_associations = {} for key in associations: meteor, shower = associations[key] if meteor.beg_alt > flux_config.elev_limit: print("Rejecting:", meteor.jdt_ref) filtered_associations[key] = [meteor, shower] associations = filtered_associations # If there are no shower association, return nothing if not associations: print("No meteors associated with the shower!") return None # Print the list of used meteors peak_mags = [] for key in associations: meteor, shower = associations[key] if shower is not None: # Compute peak magnitude peak_mag = np.min(meteor.mag_array) peak_mags.append(peak_mag) print("{:.6f}, {:3s}, {:+.2f}".format(meteor.jdt_ref, shower.name, peak_mag)) print() ### COMPUTE COLLECTION AREAS ### # Make a file name to save the raw collection areas col_areas_file_name = generateColAreaJSONFileName(platepar.station_code, flux_config.side_points, \ flux_config.ht_min, flux_config.ht_max, flux_config.dht, flux_config.elev_limit) # Check if the collection area file exists. If yes, load the data. If not, generate collection areas if col_areas_file_name in os.listdir(dir_path): col_areas_ht = loadRawCollectionAreas(dir_path, col_areas_file_name) print("Loaded collection areas from:", col_areas_file_name) else: # Compute the collecting areas segments per height col_areas_ht = collectingArea(platepar, mask=mask, side_points=flux_config.side_points, \ ht_min=flux_config.ht_min, ht_max=flux_config.ht_max, dht=flux_config.dht, \ elev_limit=flux_config.elev_limit) # Save the collection areas to file saveRawCollectionAreas(dir_path, col_areas_file_name, col_areas_ht) print("Saved raw collection areas to:", col_areas_file_name) ### ### # Compute the raw collection area at the height of 100 km col_area_100km_raw = 0 col_areas_100km_blocks = col_areas_ht[100000.0] for block in col_areas_100km_blocks: col_area_100km_raw += col_areas_100km_blocks[block][0] print("Raw collection area at height of 100 km: {:.2f} km^2".format(col_area_100km_raw/1e6)) # Compute the pointing of the middle of the FOV _, ra_mid, dec_mid, _ = xyToRaDecPP([jd2Date(J2000_JD.days)], [platepar.X_res/2], [platepar.Y_res/2], \ [1], platepar, extinction_correction=False) azim_mid, elev_mid = raDec2AltAz(ra_mid[0], dec_mid[0], J2000_JD.days, platepar.lat, platepar.lon) # Compute the range to the middle point ref_ht = 100000 r_mid, _, _, _ = xyHt2Geo(platepar, platepar.X_res/2, platepar.Y_res/2, ref_ht, indicate_limit=True, \ elev_limit=flux_config.elev_limit) print("Range at 100 km in the middle of the image: {:.2f} km".format(r_mid/1000)) ### Compute the average angular velocity to which the flux variation throught the night will be normalized # The ang vel is of the middle of the FOV in the middle of observations # Middle Julian date of the night jd_night_mid = (datetime2JD(dt_beg) + datetime2JD(dt_end))/2 # Compute the apparent radiant ra, dec, v_init = shower.computeApparentRadiant(platepar.lat, platepar.lon, jd_night_mid) # Compute the radiant elevation radiant_azim, radiant_elev = raDec2AltAz(ra, dec, jd_night_mid, platepar.lat, platepar.lon) # Compute the angular velocity in the middle of the FOV rad_dist_night_mid = angularSeparation(np.radians(radiant_azim), np.radians(radiant_elev), np.radians(azim_mid), np.radians(elev_mid)) ang_vel_night_mid = v_init*np.sin(rad_dist_night_mid)/r_mid ### # Compute the average limiting magnitude to which all flux will be normalized # Standard deviation of star PSF, nightly mean (px) star_stddev = fwhm_nightly_mean/2.355 # # Compute the theoretical stellar limiting magnitude (nightly average) # star_sum = 2*np.pi*(config.k1_det*stddev_nightly_mean + config.j1_det)*star_stddev**2 # lm_s_nightly_mean = -2.5*np.log10(star_sum) + mag_lev_nightly_mean # Compute the theoretical stellar limiting magnitude using an empirical model (nightly average) lm_s_nightly_mean = stellarLMModel(mag_lev_nightly_mean) # A meteor needs to be visible on at least 4 frames, thus it needs to have at least 4x the mass to produce # that amount of light. 1 magnitude difference scales as -0.4 of log of mass, thus: # frame_min_loss = np.log10(config.line_minimum_frame_range_det)/(-0.4) frame_min_loss = 0.0 # TEST !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!11 print("Frame min loss: {:.2} mag".format(frame_min_loss)) lm_s_nightly_mean += frame_min_loss # Compute apparent meteor magnitude lm_m_nightly_mean = lm_s_nightly_mean - 5*np.log10(r_mid/1e5) - 2.5*np.log10( \ np.degrees(platepar.F_scale*v_init*np.sin(rad_dist_night_mid)/(config.fps*r_mid*fwhm_nightly_mean)) \ ) # print("Stellar lim mag using detection thresholds:", lm_s_nightly_mean) print("Apparent meteor limiting magnitude:", lm_m_nightly_mean) ### Apply time-dependent corrections ### # Track values used for flux sol_data = [] flux_lm_6_5_data = [] meteor_num_data = [] effective_collection_area_data = [] radiant_elev_data = [] radiant_dist_mid_data = [] ang_vel_mid_data = [] lm_s_data = [] lm_m_data = [] sensitivity_corr_data = [] range_corr_data = [] radiant_elev_corr_data = [] ang_vel_corr_data = [] total_corr_data = [] # Go through all time bins within the observation period total_time_hrs = (dt_end - dt_beg).total_seconds()/3600 nbins = int(np.ceil(total_time_hrs/timebin)) for t_bin in range(nbins): for subbin in range(flux_config.sub_time_bins): # Compute bin start and end time bin_dt_beg = dt_beg + datetime.timedelta(hours=(timebin*t_bin + timebin*subbin/flux_config.sub_time_bins)) bin_dt_end = bin_dt_beg + datetime.timedelta(hours=timebin) if bin_dt_end > dt_end: bin_dt_end = dt_end # Compute bin duration in hours bin_hours = (bin_dt_end - bin_dt_beg).total_seconds()/3600 # Convert to Julian date bin_jd_beg = datetime2JD(bin_dt_beg) bin_jd_end = datetime2JD(bin_dt_end) jd_mean = (bin_jd_beg + bin_jd_end)/2 # Compute the mean solar longitude sol_mean = np.degrees(jd2SolLonSteyaert(jd_mean)) ### Compute the radiant elevation at the middle of the time bin ### # Compute the apparent radiant ra, dec, v_init = shower.computeApparentRadiant(platepar.lat, platepar.lon, jd_mean) # Compute the mean meteor height meteor_ht_beg = heightModel(v_init, ht_type='beg') meteor_ht_end = heightModel(v_init, ht_type='end') meteor_ht = (meteor_ht_beg + meteor_ht_end)/2 # Compute the standard deviation of the height meteor_ht_std = meteor_ht*ht_std_percent/100.0 # Init the Gaussian height distribution meteor_ht_gauss = scipy.stats.norm(meteor_ht, meteor_ht_std) # Compute the radiant elevation radiant_azim, radiant_elev = raDec2AltAz(ra, dec, jd_mean, platepar.lat, platepar.lon) # Only select meteors in this bin and not too close to the radiant bin_meteors = [] bin_ffs = [] for key in associations: meteor, shower = associations[key] if shower is not None: if (shower.name == shower_code) and (meteor.jdt_ref > bin_jd_beg) \ and (meteor.jdt_ref <= bin_jd_end): # Filter out meteors ending too close to the radiant if np.degrees(angularSeparation(np.radians(radiant_azim), np.radians(radiant_elev), \ np.radians(meteor.end_azim), np.radians(meteor.end_alt))) >= flux_config.rad_dist_min: bin_meteors.append([meteor, shower]) bin_ffs.append(meteor.ff_name) ### ### print() print() print("-- Bin information ---") print("Bin beg:", bin_dt_beg) print("Bin end:", bin_dt_end) print("Sol mid: {:.5f}".format(sol_mean)) print("Radiant elevation: {:.2f} deg".format(radiant_elev)) print("Apparent speed: {:.2f} km/s".format(v_init/1000)) # If the elevation of the radiant is below the limit, skip this bin if radiant_elev < flux_config.rad_elev_limit: print("!!! Mean radiant elevation below {:.2f} deg threshold, skipping time bin!".format(flux_config.rad_elev_limit)) continue # The minimum duration of the time bin should be larger than 50% of the given dt if bin_hours < 0.5*timebin: print("!!! Time bin duration of {:.2f} h is shorter than 0.5x of the time bin!".format(bin_hours)) continue if len(bin_meteors) >= flux_config.meteros_min: print("Meteors:", len(bin_meteors)) ### Weight collection area by meteor height distribution ### # Determine weights for each height weight_sum = 0 weights = {} for ht in col_areas_ht: wt = meteor_ht_gauss.pdf(float(ht)) weight_sum += wt weights[ht] = wt # Normalize the weights so that the sum is 1 for ht in weights: weights[ht] /= weight_sum ### ### col_area_meteor_ht_raw = 0 for ht in col_areas_ht: for block in col_areas_ht[ht]: col_area_meteor_ht_raw += weights[ht]*col_areas_ht[ht][block][0] print("Raw collection area at meteor heights: {:.2f} km^2".format(col_area_meteor_ht_raw/1e6)) # Compute the angular velocity in the middle of the FOV rad_dist_mid = angularSeparation(np.radians(radiant_azim), np.radians(radiant_elev), np.radians(azim_mid), np.radians(elev_mid)) ang_vel_mid = v_init*np.sin(rad_dist_mid)/r_mid ### Compute the limiting magnitude ### # Compute the mean star FWHM in the given bin fwhm_bin_mean = np.mean([sensor_data[ff_name][0] for ff_name in bin_ffs]) # Compute the mean background stddev in the given bin stddev_bin_mean = np.mean([sensor_data[ff_name][1] for ff_name in bin_ffs]) # Compute the mean photometric zero point in the given bin mag_lev_bin_mean = np.mean([recalibrated_platepars[ff_name].mag_lev for ff_name in bin_ffs if ff_name in recalibrated_platepars]) # # Standard deviation of star PSF, nightly mean (px) # star_stddev = fwhm_bin_mean/2.355 # Compute the theoretical stellar limiting magnitude (bin average) # star_sum = 2*np.pi*(config.k1_det*stddev_bin_mean + config.j1_det)*star_stddev**2 # lm_s = -2.5*np.log10(star_sum) + mag_lev_bin_mean # Use empirical LM calculation lm_s = stellarLMModel(mag_lev_bin_mean) lm_s += frame_min_loss # ### TEST !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!11 # # Artificialy increase limiting magnitude # lm_s += 1.2 # ##### # Compute apparent meteor magnitude lm_m = lm_s - 5*np.log10(r_mid/1e5) - 2.5*np.log10( \ np.degrees(platepar.F_scale*v_init*np.sin(rad_dist_mid)/(config.fps*r_mid*fwhm_bin_mean))) ### ### # Final correction area value (height-weightned) collection_area = 0 # Keep track of the corrections sensitivity_corr_arr = [] range_corr_arr = [] radiant_elev_corr_arr = [] ang_vel_corr_arr = [] total_corr_arr = [] col_area_raw_arr = [] col_area_eff_arr = [] col_area_eff_block_dict = {} # Go through all heights and segment blocks for ht in col_areas_ht: for img_coords in col_areas_ht[ht]: x_mean, y_mean = img_coords # Unpack precomputed values area, azim, elev, sensitivity_ratio, r = col_areas_ht[ht][img_coords] # Compute the angular velocity (rad/s) in the middle of this block rad_dist = angularSeparation(np.radians(radiant_azim), np.radians(radiant_elev), np.radians(azim), np.radians(elev)) ang_vel = v_init*np.sin(rad_dist)/r # If the angular distance from the radiant is less than 15 deg, don't use the block # in the effective collection area if np.degrees(rad_dist) < flux_config.rad_dist_min: area = 0.0 # Compute the range correction range_correction = (1e5/r)**2 #ang_vel_correction = ang_vel/ang_vel_mid # Compute angular velocity correction relative to the nightly mean ang_vel_correction = ang_vel/ang_vel_night_mid ### Apply corrections correction_ratio = 1.0 # Correct the area for vignetting and extinction sensitivity_corr_arr.append(sensitivity_ratio) correction_ratio *= sensitivity_ratio # Correct for the range (cap to an order of magnitude correction) range_correction = max(range_correction, 0.1) range_corr_arr.append(range_correction) correction_ratio *= range_correction # Correct for the radiant elevation (cap to an order of magnitude correction) radiant_elev_correction = np.sin(np.radians(radiant_elev)) radiant_elev_correction = max(radiant_elev_correction, 0.1) radiant_elev_corr_arr.append(radiant_elev_correction) correction_ratio *= radiant_elev_correction # Correct for angular velocity (cap to an order of magnitude correction) ang_vel_correction = min(max(ang_vel_correction, 0.1), 10) correction_ratio *= ang_vel_correction ang_vel_corr_arr.append(ang_vel_correction) # Add the collection area to the final estimate with the height weight # Raise the correction to the mass index power total_correction = correction_ratio**(mass_index - 1) total_correction = min(max(total_correction, 0.1), 10) collection_area += weights[ht]*area*total_correction total_corr_arr.append(total_correction) col_area_raw_arr.append(weights[ht]*area) col_area_eff_arr.append(weights[ht]*area*total_correction) if img_coords not in col_area_eff_block_dict: col_area_eff_block_dict[img_coords] = [] col_area_eff_block_dict[img_coords].append(weights[ht]*area*total_correction) # Compute mean corrections sensitivity_corr_avg = np.mean(sensitivity_corr_arr) range_corr_avg = np.mean(range_corr_arr) radiant_elev_corr_avg = np.mean(radiant_elev_corr_arr) ang_vel_corr_avg = np.mean(ang_vel_corr_arr) total_corr_avg = np.median(total_corr_arr) col_area_raw_sum = np.sum(col_area_raw_arr) col_area_eff_sum = np.sum(col_area_eff_arr) print("Raw collection area at meteor heights (CHECK): {:.2f} km^2".format(col_area_raw_sum/1e6)) print("Eff collection area at meteor heights (CHECK): {:.2f} km^2".format(col_area_eff_sum/1e6)) # ### PLOT HOW THE CORRECTION VARIES ACROSS THE FOV # x_arr = [] # y_arr = [] # col_area_eff_block_arr = [] # for img_coords in col_area_eff_block_dict: # x_mean, y_mean = img_coords # #if x_mean not in x_arr: # x_arr.append(x_mean) # #if y_mean not in y_arr: # y_arr.append(y_mean) # col_area_eff_block_arr.append(np.sum(col_area_eff_block_dict[img_coords])) # x_unique = np.unique(x_arr) # y_unique = np.unique(y_arr) # # plt.pcolormesh(x_arr, y_arr, np.array(col_area_eff_block_arr).reshape(len(x_unique), len(y_unique)).T, shading='auto') # plt.title("TOTAL = " + str(np.sum(col_area_eff_block_arr)/1e6)) # plt.scatter(x_arr, y_arr, c=np.array(col_area_eff_block_arr)/1e6) # #plt.pcolor(np.array(x_arr).reshape(len(x_unique), len(y_unique)), np.array(y_arr).reshape(len(x_unique), len(y_unique)), np.array(col_area_eff_block_arr).reshape(len(x_unique), len(y_unique))/1e6) # plt.colorbar(label="km^2") # plt.gca().invert_yaxis() # plt.show() # ### # Compute the flux at the bin LM (meteors/1000km^2/h) flux = 1e9*len(bin_meteors)/collection_area/bin_hours # Compute the flux scaled to the nightly mean LM flux_lm_nightly_mean = flux*population_index**(lm_m_nightly_mean - lm_m) # Compute the flux scaled to +6.5M flux_lm_6_5 = flux*population_index**(6.5 - lm_m) print("-- Sensor information ---") print("Star FWHM: {:5.2f} px".format(fwhm_bin_mean)) print("Bkg stddev: {:4.1f} ADU".format(stddev_bin_mean)) print("Photom ZP: {:+6.2f} mag".format(mag_lev_bin_mean)) print("Stellar LM: {:+.2f} mag".format(lm_s)) print("-- Flux ---") print("Meteors: {:d}".format(len(bin_meteors))) print("Col area: {:d} km^2".format(int(collection_area/1e6))) print("Ang vel: {:.2f} deg/s".format(np.degrees(ang_vel_mid))) print("LM app: {:+.2f} mag".format(lm_m)) print("Flux: {:.2f} meteors/1000km^2/h".format(flux)) print("to {:+.2f}: {:.2f} meteors/1000km^2/h".format(lm_m_nightly_mean, flux_lm_nightly_mean)) print("to +6.50: {:.2f} meteors/1000km^2/h".format(flux_lm_6_5)) sol_data.append(sol_mean) flux_lm_6_5_data.append(flux_lm_6_5) meteor_num_data.append(len(bin_meteors)) effective_collection_area_data.append(collection_area) radiant_elev_data.append(radiant_elev) radiant_dist_mid_data.append(np.degrees(rad_dist_mid)) ang_vel_mid_data.append(np.degrees(ang_vel_mid)) lm_s_data.append(lm_s) lm_m_data.append(lm_m) sensitivity_corr_data.append(sensitivity_corr_avg) range_corr_data.append(range_corr_avg) radiant_elev_corr_data.append(radiant_elev_corr_avg) ang_vel_corr_data.append(ang_vel_corr_avg) total_corr_data.append(total_corr_avg) # Print the results print("Solar longitude, Flux at LM +6.5:") for sol, flux_lm_6_5 in zip(sol_data, flux_lm_6_5_data): print("{:9.5f}, {:8.4f}".format(sol, flux_lm_6_5)) if show_plots and len(sol_data): # Plot a histogram of peak magnitudes plt.hist(peak_mags, cumulative=True, log=True, bins=len(peak_mags), density=True) # Plot population index r_intercept = -0.7 x_arr = np.linspace(np.min(peak_mags), np.percentile(peak_mags, 60)) plt.plot(x_arr, 10**(np.log10(population_index)*x_arr + r_intercept)) plt.title("r = {:.2f}".format(population_index)) plt.show() # Plot how the derived values change throughout the night fig, axes \ = plt.subplots(nrows=4, ncols=2, sharex=True, figsize=(10, 8)) ((ax_met, ax_lm), (ax_rad_elev, ax_corrs), (ax_rad_dist, ax_col_area), (ax_ang_vel, ax_flux)) = axes fig.suptitle("{:s}, s = {:.2f}, r = {:.2f}".format(shower_code, mass_index, population_index)) ax_met.scatter(sol_data, meteor_num_data) ax_met.set_ylabel("Meteors") ax_rad_elev.plot(sol_data, radiant_elev_data) ax_rad_elev.set_ylabel("Radiant elev (deg)") ax_rad_dist.plot(sol_data, radiant_dist_mid_data) ax_rad_dist.set_ylabel("Radiant dist (deg)") ax_ang_vel.plot(sol_data, ang_vel_mid_data) ax_ang_vel.set_ylabel("Ang vel (deg/s)") ax_ang_vel.set_xlabel("La Sun (deg)") ax_lm.plot(sol_data, lm_s_data, label="Stellar") ax_lm.plot(sol_data, lm_m_data, label="Meteor") ax_lm.set_ylabel("LM") ax_lm.legend() ax_corrs.plot(sol_data, sensitivity_corr_data, label="Sensitivity") ax_corrs.plot(sol_data, range_corr_data, label="Range") ax_corrs.plot(sol_data, radiant_elev_corr_data, label="Rad elev") ax_corrs.plot(sol_data, ang_vel_corr_data, label="Ang vel") ax_corrs.plot(sol_data, total_corr_data, label="Total (median)") ax_corrs.set_ylabel("Corrections") ax_corrs.legend() ax_col_area.plot(sol_data, np.array(effective_collection_area_data)/1e6) ax_col_area.plot(sol_data, len(sol_data)*[col_area_100km_raw/1e6], color='k', \ label="Raw col area at 100 km") ax_col_area.plot(sol_data, len(sol_data)*[col_area_meteor_ht_raw/1e6], color='k', linestyle='dashed', \ label="Raw col area at met ht") ax_col_area.set_ylabel("Eff. col. area (km^2)") ax_col_area.legend() ax_flux.scatter(sol_data, flux_lm_6_5_data) ax_flux.set_ylabel("Flux@+6.5M (met/1000km^2/h)") ax_flux.set_xlabel("La Sun (deg)") plt.tight_layout() plt.show() return sol_data, flux_lm_6_5_data
def applyAstrometryFTPdetectinfo(dir_path, ftp_detectinfo_file, platepar_file, UT_corr=0): """ Use the given platepar to calculate the celestial coordinates of detected meteors from a FTPdetectinfo file and save the updates values. Arguments: dir_path: [str] Path to the night. ftp_detectinfo_file: [str] Name of the FTPdetectinfo file. platepar_file: [str] Name of the platepar file. Keyword arguments: UT_corr: [float] Difference of time from UTC in hours. Return: None """ # Save a copy of the uncalibrated FTPdetectinfo ftp_detectinfo_copy = "".join( ftp_detectinfo_file.split('.')[:-1]) + "_uncalibrated.txt" # Back up the original FTPdetectinfo, only if a backup does not exist already if not os.path.isfile(os.path.join(dir_path, ftp_detectinfo_copy)): shutil.copy2(os.path.join(dir_path, ftp_detectinfo_file), os.path.join(dir_path, ftp_detectinfo_copy)) # Load the platepar platepar = Platepar() platepar.read(os.path.join(dir_path, platepar_file)) # Load the FTPdetectinfo file meteor_data = readFTPdetectinfo(dir_path, ftp_detectinfo_file) # List for final meteor data meteor_list = [] # Go through every meteor for meteor in meteor_data: ff_name, cam_code, meteor_No, n_segments, fps, hnr, mle, binn, px_fm, rho, phi, meteor_meas = meteor meteor_meas = np.array(meteor_meas) # Extract frame number, x, y, intensity frames = meteor_meas[:, 1] X_data = meteor_meas[:, 2] Y_data = meteor_meas[:, 3] level_data = meteor_meas[:, 8] # Get the beginning time of the FF file time_beg = filenameToDatetime(ff_name) # Calculate time data of every point time_data = [] for frame_n in frames: t = time_beg + datetime.timedelta(seconds=frame_n / fps) time_data.append([ t.year, t.month, t.day, t.hour, t.minute, t.second, int(t.microsecond / 1000) ]) # Convert image cooredinates to RA and Dec, and do the photometry JD_data, RA_data, dec_data, magnitudes = XY2CorrectedRADecPP(np.array(time_data), X_data, Y_data, \ level_data, platepar) # Compute azimuth and altitude of centroids az_data = np.zeros_like(RA_data) alt_data = np.zeros_like(RA_data) for i in range(len(az_data)): jd = JD_data[i] ra_tmp = RA_data[i] dec_tmp = dec_data[i] az_tmp, alt_tmp = raDec2AltAz(jd, platepar.lon, platepar.lat, ra_tmp, dec_tmp) az_data[i] = az_tmp alt_data[i] = alt_tmp # print(ff_name, cam_code, meteor_No, fps) # print(X_data, Y_data) # print(RA_data, dec_data) # print('------------------------------------------') # Construct the meteor measurements array meteor_picks = np.c_[frames, X_data, Y_data, RA_data, dec_data, az_data, alt_data, level_data, \ magnitudes] # Add the calculated values to the final list meteor_list.append([ff_name, meteor_No, rho, phi, meteor_picks]) # Calibration string to be written to the FTPdetectinfo file calib_str = 'Calibrated with RMS on: ' + str( datetime.datetime.utcnow()) + ' UTC' # If no meteors were detected, set dummpy parameters if len(meteor_list) == 0: cam_code = '' fps = 0 # Save the updated FTPdetectinfo writeFTPdetectinfo(meteor_list, dir_path, ftp_detectinfo_file, dir_path, cam_code, fps, calibration=calib_str, celestial_coords_given=True)
cml_args = arg_parser.parse_args() ######################### # Read command line arguments ftpdetectinfo_path = cml_args.ftpdetectinfo_path[0] dir_path, ftpdetectinfo_name = os.path.split(ftpdetectinfo_path) gauss_sigma = cml_args.psf_sigma[0] saturation_lvl = cml_args.satlvl # Load meteor data from FTPdetecinfo cam_code, fps, meteor_list = readFTPdetectinfo(dir_path, ftpdetectinfo_name, ret_input_format=True) # Load the flat, if given flat = None if cml_args.flat: flat = loadFlat(*os.path.split(cml_args.flat)) corrected_meteor_list = [] # Find matching FF files in the directory for entry in meteor_list: ftp_ff_name, meteor_No, rho, phi, meteor_meas = entry # Find the matching FTPdetectinfo file in the directory
def showerAssociation(config, ftpdetectinfo_list, shower_code=None, show_plot=False, save_plot=False, \ plot_activity=False): """ Do single station shower association based on radiant direction and height. Arguments: config: [Config instance] ftpdetectinfo_list: [list] A list of paths to FTPdetectinfo files. Keyword arguments: shower_code: [str] Only use this one shower for association (e.g. ETA, PER, SDA). None by default, in which case all active showers will be associated. show_plot: [bool] Show the plot on the screen. False by default. save_plot: [bool] Save the plot in the folder with FTPdetectinfos. False by default. plot_activity: [bool] Whether to plot the shower activity plot of not. False by default. Return: associations, shower_counts: [tuple] - associations: [dict] A dictionary where the FF name and the meteor ordinal number on the FF file are keys, and the associated Shower object are values. - shower_counts: [list] A list of shower code and shower count pairs. """ # Load the list of meteor showers shower_list = loadShowers(config.shower_path, config.shower_file_name) # Load FTPdetectinfos meteor_data = [] for ftpdetectinfo_path in ftpdetectinfo_list: if not os.path.isfile(ftpdetectinfo_path): print('No such file:', ftpdetectinfo_path) continue meteor_data += readFTPdetectinfo(*os.path.split(ftpdetectinfo_path)) if not len(meteor_data): return {}, [] # Dictionary which holds FF names as keys and meteor measurements + associated showers as values associations = {} for meteor in meteor_data: ff_name, cam_code, meteor_No, n_segments, fps, hnr, mle, binn, px_fm, rho, phi, meteor_meas = meteor # Skip very short meteors if len(meteor_meas) < 4: continue # Check if the data is calibrated if not meteor_meas[0][0]: print( 'Data is not calibrated! Meteors cannot be associated to showers!' ) break # Init container for meteor observation meteor_obj = MeteorSingleStation(cam_code, config.latitude, config.longitude, ff_name) # Infill the meteor structure for entry in meteor_meas: calib_status, frame_n, x, y, ra, dec, azim, elev, inten, mag = entry # Compute the Julian data of every point jd = datetime2JD( filenameToDatetime(ff_name) + datetime.timedelta(seconds=float(frame_n) / fps)) meteor_obj.addPoint(jd, ra, dec, mag) # Fit the great circle and compute the geometrical parameters meteor_obj.fitGC() # Skip all meteors with beginning heights below 15 deg if meteor_obj.beg_alt < 15: continue # Go through all showers in the list and find the best match best_match_shower = None best_match_dist = np.inf for shower_entry in shower_list: # Extract shower parameters shower = Shower(shower_entry) # If the shower code was given, only check this one shower if shower_code is not None: if shower.name.lower() != shower_code.lower(): continue ### Solar longitude filter # If the shower doesn't have a stated beginning or end, check if the meteor is within a preset # threshold solar longitude difference if np.any(np.isnan([shower.lasun_beg, shower.lasun_end])): shower.lasun_beg = (shower.lasun_max - config.shower_lasun_threshold) % 360 shower.lasun_end = (shower.lasun_max + config.shower_lasun_threshold) % 360 # Filter out all showers which are not active if not isAngleBetween(np.radians(shower.lasun_beg), np.radians(meteor_obj.lasun), np.radians(shower.lasun_end)): continue ### ### ### Radiant filter ### # Assume a fixed meteor height for an approximate apparent radiant meteor_fixed_ht = 100000 # 100 km shower.computeApparentRadiant(config.latitude, config.longitude, meteor_obj.jdt_ref, \ meteor_fixed_ht=meteor_fixed_ht) # Compute the angle between the meteor radiant and the great circle normal radiant_separation = meteor_obj.angularSeparationFromGC( shower.ra, shower.dec) # Make sure the meteor is within the radiant distance threshold if radiant_separation > config.shower_max_radiant_separation: continue # Compute angle between the meteor's beginning and end, and the shower radiant shower.radiant_vector = vectNorm( raDec2Vector(shower.ra, shower.dec)) begin_separation = np.degrees(angularSeparationVect(shower.radiant_vector, \ meteor_obj.meteor_begin_cartesian)) end_separation = np.degrees(angularSeparationVect(shower.radiant_vector, \ meteor_obj.meteor_end_cartesian)) # Make sure the beginning of the meteor is closer to the radiant than it's end if begin_separation > end_separation: continue ### ### ### Height filter ### # Estimate the limiting meteor height from the velocity (meters) filter_beg_ht = heightModel(shower.v_init, ht_type='beg') filter_end_ht = heightModel(shower.v_init, ht_type='end') ### Estimate the meteor beginning height with +/- 1 frame, otherwise some short meteor may get ### rejected meteor_obj_orig = copy.deepcopy(meteor_obj) # Shorter meteor_obj_m1 = copy.deepcopy(meteor_obj_orig) meteor_obj_m1.duration -= 1.0 / config.fps meteor_beg_ht_m1 = estimateMeteorHeight(config, meteor_obj_m1, shower) # Nominal meteor_beg_ht = estimateMeteorHeight(config, meteor_obj_orig, shower) # Longer meteor_obj_p1 = copy.deepcopy(meteor_obj_orig) meteor_obj_p1.duration += 1.0 / config.fps meteor_beg_ht_p1 = estimateMeteorHeight(config, meteor_obj_p1, shower) meteor_obj = meteor_obj_orig ### ### # If all heights (even those with +/- 1 frame) are outside the height range, reject the meteor if ((meteor_beg_ht_p1 < filter_end_ht) or (meteor_beg_ht_p1 > filter_beg_ht)) and \ ((meteor_beg_ht < filter_end_ht) or (meteor_beg_ht > filter_beg_ht)) and \ ((meteor_beg_ht_m1 < filter_end_ht) or (meteor_beg_ht_m1 > filter_beg_ht)): continue ### ### # Compute the radiant elevation above the horizon shower.azim, shower.elev = raDec2AltAz(shower.ra, shower.dec, meteor_obj.jdt_ref, \ config.latitude, config.longitude) # Take the shower that's closest to the great circle if there are multiple candidates if radiant_separation < best_match_dist: best_match_dist = radiant_separation best_match_shower = copy.deepcopy(shower) # If a shower is given and the match is not this shower, skip adding the meteor to the list # If no specific shower is give for association, add all meteors if ((shower_code is not None) and (best_match_shower is not None)) or (shower_code is None): # Store the associated shower associations[(ff_name, meteor_No)] = [meteor_obj, best_match_shower] # Find shower frequency and sort by count shower_name_list_temp = [] shower_list_temp = [] for key in associations: _, shower = associations[key] if shower is None: shower_name = '...' else: shower_name = shower.name shower_name_list_temp.append(shower_name) shower_list_temp.append(shower) _, unique_showers_indices = np.unique(shower_name_list_temp, return_index=True) unique_shower_names = np.array( shower_name_list_temp)[unique_showers_indices] unique_showers = np.array(shower_list_temp)[unique_showers_indices] shower_counts = [[shower_obj, shower_name_list_temp.count(shower_name)] for shower_obj, \ shower_name in zip(unique_showers, unique_shower_names)] shower_counts = sorted(shower_counts, key=lambda x: x[1], reverse=True) # Create a plot of showers if show_plot or save_plot: # Generate consistent colours colors_by_name = makeShowerColors(shower_list) def get_shower_color(shower): try: return colors_by_name[shower.name] if shower else "0.4" except KeyError: return 'gray' # Init the figure plt.figure() # Init subplots depending on if the activity plot is done as well if plot_activity: gs = gridspec.GridSpec(2, 1, height_ratios=[3, 1]) ax_allsky = plt.subplot(gs[0], facecolor='black') ax_activity = plt.subplot(gs[1], facecolor='black') else: ax_allsky = plt.subplot(111, facecolor='black') # Init the all-sky plot allsky_plot = AllSkyPlot(ax_handle=ax_allsky) # Plot all meteors for key in associations: meteor_obj, shower = associations[key] ### Plot the observed meteor points ### color = get_shower_color(shower) allsky_plot.plot(meteor_obj.ra_array, meteor_obj.dec_array, color=color, linewidth=1, zorder=4) # Plot the peak of shower meteors a different color peak_color = 'blue' if shower is not None: peak_color = 'tomato' allsky_plot.scatter(meteor_obj.ra_array[-1], meteor_obj.dec_array[-1], c=peak_color, marker='+', \ s=5, zorder=5) ### ### ### Plot fitted great circle points ### # Find the GC phase angle of the beginning of the meteor gc_beg_phase = meteor_obj.findGCPhase( meteor_obj.ra_array[0], meteor_obj.dec_array[0])[0] % 360 # If the meteor belongs to a shower, find the GC phase which ends at the shower if shower is not None: gc_end_phase = meteor_obj.findGCPhase(shower.ra, shower.dec)[0] % 360 # Fix 0/360 wrap if abs(gc_end_phase - gc_beg_phase) > 180: if gc_end_phase > gc_beg_phase: gc_end_phase -= 360 else: gc_beg_phase -= 360 gc_alpha = 1.0 else: # If it's a sporadic, find the direction to which the meteor should extend gc_end_phase = meteor_obj.findGCPhase(meteor_obj.ra_array[-1], \ meteor_obj.dec_array[-1])[0]%360 # Find the correct direction if (gc_beg_phase - gc_end_phase) % 360 > (gc_end_phase - gc_beg_phase) % 360: gc_end_phase = gc_beg_phase - 170 else: gc_end_phase = gc_beg_phase + 170 gc_alpha = 0.7 # Store great circle beginning and end phase meteor_obj.gc_beg_phase = gc_beg_phase meteor_obj.gc_end_phase = gc_end_phase # Get phases 180 deg before the meteor phase_angles = np.linspace(gc_end_phase, gc_beg_phase, 100) % 360 # Compute RA/Dec of points on the great circle ra_gc, dec_gc = meteor_obj.sampleGC(phase_angles) # Cull all points below the horizon azim_gc, elev_gc = raDec2AltAz(ra_gc, dec_gc, meteor_obj.jdt_ref, config.latitude, \ config.longitude) temp_arr = np.c_[ra_gc, dec_gc] temp_arr = temp_arr[elev_gc > 0] ra_gc, dec_gc = temp_arr.T # Plot the great circle fitted on the radiant gc_color = get_shower_color(shower) allsky_plot.plot(ra_gc, dec_gc, linestyle='dotted', color=gc_color, alpha=gc_alpha, linewidth=1) # Plot the point closest to the shower radiant if shower is not None: allsky_plot.plot(ra_gc[0], dec_gc[0], color='r', marker='+', ms=5, mew=1) # Store shower radiant point meteor_obj.radiant_ra = ra_gc[0] meteor_obj.radiant_dec = dec_gc[0] ### ### ### Plot all showers ### # Find unique showers and their apparent radiants computed at highest radiant elevation # (otherwise the apparent radiants can be quite off) shower_dict = {} for key in associations: meteor_obj, shower = associations[key] if shower is None: continue # If the shower name is in dict, find the shower with the highest radiant elevation if shower.name in shower_dict: if shower.elev > shower_dict[shower.name].elev: shower_dict[shower.name] = shower else: shower_dict[shower.name] = shower # Plot the location of shower radiants for shower_name in shower_dict: shower = shower_dict[shower_name] heading_arr = np.linspace(0, 360, 50) # Compute coordinates on a circle around the given RA, Dec ra_circle, dec_circle = sphericalPointFromHeadingAndDistance(shower.ra, shower.dec, \ heading_arr, config.shower_max_radiant_separation) # Plot the shower circle allsky_plot.plot(ra_circle, dec_circle, color=colors_by_name[shower_name]) # Plot the shower name x_text, y_text = allsky_plot.raDec2XY(shower.ra, shower.dec) allsky_plot.ax.text(x_text, y_text, shower.name, color='w', size=8, va='center', \ ha='center', zorder=6) # Plot station name and solar longiutde range allsky_plot.ax.text(-180, 89, "{:s}".format(cam_code), color='w', family='monospace') # Get a list of JDs of meteors jd_list = [associations[key][0].jdt_ref for key in associations] if len(jd_list): # Get the range of solar longitudes jd_min = min(jd_list) sol_min = np.degrees(jd2SolLonSteyaert(jd_min)) jd_max = max(jd_list) sol_max = np.degrees(jd2SolLonSteyaert(jd_max)) # Plot the date and solar longitude range date_sol_beg = u"Beg: {:s} (sol = {:.2f}\u00b0)".format( jd2Date(jd_min, dt_obj=True).strftime("%Y%m%d %H:%M:%S"), sol_min) date_sol_end = u"End: {:s} (sol = {:.2f}\u00b0)".format( jd2Date(jd_max, dt_obj=True).strftime("%Y%m%d %H:%M:%S"), sol_max) allsky_plot.ax.text(-180, 85, date_sol_beg, color='w', family='monospace') allsky_plot.ax.text(-180, 81, date_sol_end, color='w', family='monospace') allsky_plot.ax.text(-180, 77, "-" * len(date_sol_end), color='w', family='monospace') # Plot shower counts for i, (shower, count) in enumerate(shower_counts): if shower is not None: shower_name = shower.name else: shower_name = "..." allsky_plot.ax.text(-180, 73 - i*4, "{:s}: {:d}".format(shower_name, count), color='w', \ family='monospace') ### ### # Plot yearly meteor shower activity if plot_activity: # Plot the activity diagram generateActivityDiagram(config, shower_list, ax_handle=ax_activity, \ sol_marker=[sol_min, sol_max], colors=colors_by_name) # Save plot and text file if save_plot: dir_path, ftpdetectinfo_name = os.path.split(ftpdetectinfo_path) ftpdetectinfo_base_name = ftpdetectinfo_name.replace( 'FTPdetectinfo_', '').replace('.txt', '') plot_name = ftpdetectinfo_base_name + '_radiants.png' # Increase figure size allsky_plot.fig.set_size_inches(18, 9, forward=True) allsky_plot.beautify() plt.savefig(os.path.join(dir_path, plot_name), dpi=100, facecolor='k') # Save the text file with shower info if len(jd_list): with open( os.path.join(dir_path, ftpdetectinfo_base_name + "_radiants.txt"), 'w') as f: # Print station code f.write("# RMS single station association\n") f.write("# \n") f.write("# Station: {:s}\n".format(cam_code)) # Print date range f.write( "# Beg | End \n" ) f.write( "# -----------------------------------------------------\n" ) f.write("# Date | {:24s} | {:24s} \n".format(jd2Date(jd_min, \ dt_obj=True).strftime("%Y%m%d %H:%M:%S.%f"), jd2Date(jd_max, \ dt_obj=True).strftime("%Y%m%d %H:%M:%S.%f"))) f.write("# Sol | {:>24.2f} | {:>24.2f} \n".format( sol_min, sol_max)) # Write shower counts f.write("# \n") f.write("# Shower counts:\n") f.write("# --------------\n") f.write("# Code, Count, IAU link\n") for i, (shower, count) in enumerate(shower_counts): if shower is not None: shower_name = shower.name # Create link to the IAU database of showers iau_link = "https://www.ta3.sk/IAUC22DB/MDC2007/Roje/pojedynczy_obiekt.php?kodstrumienia={:05d}".format( shower.iau_code) else: shower_name = "..." iau_link = "None" f.write("# {:>4s}, {:>5d}, {:s}\n".format( shower_name, count, iau_link)) f.write("# \n") f.write("# Meteor parameters:\n") f.write("# ------------------\n") f.write( "# Date And Time, Beg Julian date, La Sun, Shower, RA beg, Dec beg, RA end, Dec end, RA rad, Dec rad, GC theta0, GC phi0, GC beg phase, GC end phase, Mag\n" ) # Create a sorted list of meteor associations by time associations_list = [ associations[key] for key in associations ] associations_list = sorted(associations_list, key=lambda x: x[0].jdt_ref) # Write out meteor parameters for meteor_obj, shower in associations_list: # Find peak magnitude if np.any(meteor_obj.mag_array): peak_mag = "{:+.1f}".format( np.min(meteor_obj.mag_array)) else: peak_mag = "None" if shower is not None: f.write("{:24s}, {:20.12f}, {:>10.6f}, {:>6s}, {:6.2f}, {:+7.2f}, {:6.2f}, {:+7.2f}, {:6.2f}, {:+7.2f}, {:9.3f}, {:8.3f}, {:12.3f}, {:12.3f}, {:4s}\n".format(jd2Date(meteor_obj.jdt_ref, dt_obj=True).strftime("%Y%m%d %H:%M:%S.%f"), \ meteor_obj.jdt_ref, meteor_obj.lasun, shower.name, \ meteor_obj.ra_array[0]%360, meteor_obj.dec_array[0], \ meteor_obj.ra_array[-1]%360, meteor_obj.dec_array[-1], \ meteor_obj.radiant_ra%360, meteor_obj.radiant_dec, \ np.degrees(meteor_obj.theta0), np.degrees(meteor_obj.phi0), \ meteor_obj.gc_beg_phase, meteor_obj.gc_end_phase, peak_mag)) else: f.write("{:24s}, {:20.12f}, {:>10.6f}, {:>6s}, {:6.2f}, {:+7.2f}, {:6.2f}, {:+7.2f}, {:>6s}, {:>7s}, {:9.3f}, {:8.3f}, {:12.3f}, {:12.3f}, {:4s}\n".format(jd2Date(meteor_obj.jdt_ref, dt_obj=True).strftime("%Y%m%d %H:%M:%S.%f"), \ meteor_obj.jdt_ref, meteor_obj.lasun, '...', meteor_obj.ra_array[0]%360, \ meteor_obj.dec_array[0], meteor_obj.ra_array[-1]%360, \ meteor_obj.dec_array[-1], "None", "None", np.degrees(meteor_obj.theta0), \ np.degrees(meteor_obj.phi0), meteor_obj.gc_beg_phase, \ meteor_obj.gc_end_phase, peak_mag)) if show_plot: allsky_plot.show() else: plt.clf() plt.close() return associations, shower_counts
def processNight(night_data_dir, config, detection_results=None, nodetect=False): """ Given the directory with FF files, run detection and archiving. Arguments: night_data_dir: [str] Path to the directory with FF files. config: [Config obj] Keyword arguments: detection_results: [list] An optional list of detection. If None (default), detection will be done on the the files in the folder. nodetect: [bool] True if detection should be skipped. False by default. Return: night_archive_dir: [str] Path to the night directory in ArchivedFiles. archive_name: [str] Path to the archive. detector: [QueuedPool instance] Handle to the detector. """ # Remove final slash in the night dir if night_data_dir.endswith(os.sep): night_data_dir = night_data_dir[:-1] # Extract the name of the night night_data_dir_name = os.path.basename(night_data_dir) # If the detection should be run if (not nodetect): # If no detection was performed, run it if detection_results is None: # Run detection on the given directory calstars_name, ftpdetectinfo_name, ff_detected, \ detector = detectStarsAndMeteorsDirectory(night_data_dir, config) # Otherwise, save detection results else: # Save CALSTARS and FTPdetectinfo to disk calstars_name, ftpdetectinfo_name, ff_detected = saveDetections(detection_results, \ night_data_dir, config) # If the files were previously detected, there is no detector detector = None # Get the platepar file platepar, platepar_path, platepar_fmt = getPlatepar(config, night_data_dir) # Run calibration check and auto astrometry refinement if platepar is not None: # Read in the CALSTARS file calstars_list = CALSTARS.readCALSTARS(night_data_dir, calstars_name) # Run astrometry check and refinement platepar, fit_status = autoCheckFit(config, platepar, calstars_list) # If the fit was sucessful, apply the astrometry to detected meteors if fit_status: log.info('Astrometric calibration SUCCESSFUL!') # Save the refined platepar to the night directory and as default platepar.write(os.path.join(night_data_dir, config.platepar_name), fmt=platepar_fmt) platepar.write(platepar_path, fmt=platepar_fmt) else: log.info('Astrometric calibration FAILED!, Using old platepar for calibration...') # # Calculate astrometry for meteor detections # applyAstrometryFTPdetectinfo(night_data_dir, ftpdetectinfo_name, platepar_path) log.info("Recalibrating astrometry on FF files with detections...") # Recalibrate astrometry on every FF file and apply the calibration to detections recalibrateIndividualFFsAndApplyAstrometry(night_data_dir, os.path.join(night_data_dir, \ ftpdetectinfo_name), calstars_list, config, platepar) log.info("Converting RMS format to UFOOrbit format...") # Convert the FTPdetectinfo into UFOOrbit input file FTPdetectinfo2UFOOrbitInput(night_data_dir, ftpdetectinfo_name, platepar_path) # Generate a calibration report log.info("Generating a calibration report...") try: generateCalibrationReport(config, night_data_dir, platepar=platepar) except Exception as e: log.debug('Generating calibration report failed with message:\n' + repr(e)) log.debug(repr(traceback.format_exception(*sys.exc_info()))) else: ff_detected = [] detector = None log.info('Plotting field sums...') # Plot field sums try: plotFieldsums(night_data_dir, config) except Exception as e: log.debug('Plotting field sums failed with message:\n' + repr(e)) log.debug(repr(traceback.format_exception(*sys.exc_info()))) # Archive all fieldsums to one archive archiveFieldsums(night_data_dir) # List for any extra files which will be copied to the night archive directory. Full paths have to be # given extra_files = [] log.info('Making a flat...') # Make a new flat field image try: flat_img = makeFlat(night_data_dir, config) except Exception as e: log.debug('Making a flat failed with message:\n' + repr(e)) log.debug(repr(traceback.format_exception(*sys.exc_info()))) flat_img = None # If making flat was sucessfull, save it if flat_img is not None: # Save the flat in the night directory, to keep the operational flat updated flat_path = os.path.join(night_data_dir, os.path.basename(config.flat_file)) scipy.misc.imsave(flat_path, flat_img) log.info('Flat saved to: ' + flat_path) # Copy the flat to the night's directory as well extra_files.append(flat_path) else: log.info('Making flat image FAILED!') ### Add extra files to archive # Add the config file to the archive too extra_files.append(os.path.join(os.getcwd(), '.config')) # Add the platepar to the archive if it exists if (not nodetect): if os.path.exists(platepar_path): extra_files.append(platepar_path) # Add the json file with recalibrated platepars to the archive if (not nodetect): recalibrated_platepars_path = os.path.join(night_data_dir, config.platepars_recalibrated_name) if os.path.exists(recalibrated_platepars_path): extra_files.append(recalibrated_platepars_path) ### ### # If the detection should be run if (not nodetect): # Make a CAL file and a special CAMS FTpdetectinfo if full CAMS compatibility is desired if config.cams_code > 0: log.info('Generating a CAMS FTPdetectinfo file...') # Write the CAL file to disk cal_file_name = writeCAL(night_data_dir, config, platepar) cams_code_formatted = "{:06d}".format(int(config.cams_code)) # Load the FTPdetectinfo _, fps, meteor_list = readFTPdetectinfo(night_data_dir, ftpdetectinfo_name, \ ret_input_format=True) # Replace the camera code with the CAMS code for met in meteor_list: # Replace the station name and the FF file format ff_name = met[0] ff_name = ff_name.replace('.fits', '.bin') ff_name = ff_name.replace(config.stationID, cams_code_formatted) met[0] = ff_name # Replace the station name met[1] = cams_code_formatted # Write the CAMS compatible FTPdetectinfo file writeFTPdetectinfo(meteor_list, night_data_dir, \ ftpdetectinfo_name.replace(config.stationID, cams_code_formatted),\ night_data_dir, cams_code_formatted, fps, calibration=cal_file_name, \ celestial_coords_given=(platepar is not None)) night_archive_dir = os.path.join(os.path.abspath(config.data_dir), config.archived_dir, night_data_dir_name) log.info('Archiving detections to ' + night_archive_dir) # Archive the detections archive_name = archiveDetections(night_data_dir, night_archive_dir, ff_detected, config, \ extra_files=extra_files) return night_archive_dir, archive_name, detector