def saveDetections(detection_results, ff_dir, config): """ Save detection to CALSTARS and FTPdetectinfo files. Arguments: detection_results: [list] A list of outputs from detectStarsAndMeteors function. ff_dir: [str] Path to the night directory. config: [Config obj] Return: calstars_name: [str] Name of the CALSTARS file. ftpdetectinfo_name: [str] Name of the FTPdetectinfo file. ff_detected: [list] A list of FF files with detections. """ ### SAVE DETECTIONS TO FILE # Init data lists star_list = [] meteor_list = [] ff_detected = [] # Remove all 'None' results, which were errors detection_results = [res for res in detection_results if res is not None] # Sort by FF name detection_results = sorted(detection_results, key=lambda x: x[0]) # Count the number of detected meteors meteors_num = 0 for _, _, meteor_data in detection_results: for meteor in meteor_data: meteors_num += 1 log.info('TOTAL: ' + str(meteors_num) + ' detected meteors.') # Save the detections to a file for ff_name, star_data, meteor_data in detection_results: if len(star_data) == 4: x2, y2, background, intensity = star_data else: _, x2, y2, background, intensity, _ = star_data # Skip if no stars were found if not x2: continue # Construct the table of the star parameters star_data = zip(x2, y2, background, intensity) # Add star info to the star list star_list.append([ff_name, star_data]) # Handle the detected meteors meteor_No = 1 for meteor in meteor_data: rho, theta, centroids = meteor # Append to the results list meteor_list.append([ff_name, meteor_No, rho, theta, centroids]) meteor_No += 1 # Add the FF file to the archive list if a meteor was detected on it if meteor_data: ff_detected.append(ff_name) # Generate the name for the CALSTARS file calstars_name = 'CALSTARS_' + "{:s}".format(str(config.stationID)) + '_' + os.path.basename(ff_dir) + '.txt' # Write detected stars to the CALSTARS file CALSTARS.writeCALSTARS(star_list, ff_dir, calstars_name, config.stationID, config.height, config.width) # Generate FTPdetectinfo file name ftpdetectinfo_name = 'FTPdetectinfo_' + os.path.basename(ff_dir) + '.txt' # Write FTPdetectinfo file FTPdetectinfo.writeFTPdetectinfo(meteor_list, ff_dir, ftpdetectinfo_name, ff_dir, config.stationID, config.fps) return calstars_name, ftpdetectinfo_name, ff_detected
def runCapture(config, duration=None, video_file=None, nodetect=False, detect_end=False, upload_manager=None): """ Run capture and compression for the given time.given Arguments: config: [config object] Configuration read from the .config file Keyword arguments: duration: [float] Time in seconds to capture. None by default. video_file: [str] Path to the video file, if it was given as the video source. None by default. nodetect: [bool] If True, detection will not be performed. False by defualt. detect_end: [bool] If True, detection will be performed at the end of the night, when capture finishes. False by default. upload_manager: [UploadManager object] A handle to the UploadManager, which handles uploading files to the central server. None by default. """ global STOP_CAPTURE # Create a directory for captured files night_data_dir_name = str( config.stationID) + '_' + datetime.datetime.utcnow().strftime( '%Y%m%d_%H%M%S_%f') # Full path to the data directory night_data_dir = os.path.join(os.path.abspath(config.data_dir), config.captured_dir, night_data_dir_name) # Make a directory for the night mkdirP(night_data_dir) log.info('Data directory: ' + night_data_dir) # Load the default flat field image if it is available flat_struct = None if config.use_flat: # Check if the flat exists if os.path.exists(os.path.join(os.getcwd(), config.flat_file)): flat_struct = Image.loadFlat(os.getcwd(), config.flat_file) log.info('Loaded flat field image: ' + os.path.join(os.getcwd(), config.flat_file)) # Get the platepar file platepar, platepar_path, platepar_fmt = getPlatepar(config) log.info('Initializing frame buffers...') ### For some reason, the RPi 3 does not like memory chunks which size is the multipier of its L2 ### cache size (512 kB). When such a memory chunk is provided, the compression becomes 10x slower ### then usual. We are applying a dirty fix here where we just add an extra image row and column ### if such a memory chunk will be created. The compression is performed, and the image is cropped ### back to its original dimensions. array_pad = 0 # Check if the image dimensions are divisible by RPi3 L2 cache size and add padding if (256 * config.width * config.height) % (512 * 1024) == 0: array_pad = 1 # Init arrays for parallel compression on 2 cores sharedArrayBase = multiprocessing.Array( ctypes.c_uint8, 256 * (config.width + array_pad) * (config.height + array_pad)) sharedArray = np.ctypeslib.as_array(sharedArrayBase.get_obj()) sharedArray = sharedArray.reshape(256, (config.height + array_pad), (config.width + array_pad)) startTime = multiprocessing.Value('d', 0.0) sharedArrayBase2 = multiprocessing.Array( ctypes.c_uint8, 256 * (config.width + array_pad) * (config.height + array_pad)) sharedArray2 = np.ctypeslib.as_array(sharedArrayBase2.get_obj()) sharedArray2 = sharedArray2.reshape(256, (config.height + array_pad), (config.width + array_pad)) startTime2 = multiprocessing.Value('d', 0.0) log.info('Initializing frame buffers done!') # Check if the detection should be performed or not if nodetect: detector = None else: if detect_end: # Delay detection until the end of the night delay_detection = duration else: # Delay the detection for 2 minutes after capture start delay_detection = 120 # Initialize the detector detector = QueuedPool(detectStarsAndMeteors, cores=1, log=log, delay_start=delay_detection) detector.startPool() # Initialize buffered capture bc = BufferedCapture(sharedArray, startTime, sharedArray2, startTime2, config, video_file=video_file) # Initialize the live image viewer live_view = LiveViewer(window_name='Maxpixel') # Initialize compression compressor = Compressor(night_data_dir, sharedArray, startTime, sharedArray2, startTime2, config, detector=detector, live_view=live_view, flat_struct=flat_struct) # Start buffered capture bc.startCapture() # Start the compression compressor.start() # Capture until Ctrl+C is pressed wait(duration) # If capture was manually stopped, end capture if STOP_CAPTURE: log.info('Ending capture...') # Stop the capture log.debug('Stopping capture...') bc.stopCapture() log.debug('Capture stopped') dropped_frames = bc.dropped_frames log.info('Total number of dropped frames: ' + str(dropped_frames)) # Stop the compressor log.debug('Stopping compression...') detector, live_view = compressor.stop() log.debug('Compression stopped') # Stop the live viewer log.debug('Stopping live viewer...') live_view.stop() del live_view log.debug('Live view stopped') # Init data lists star_list = [] meteor_list = [] ff_detected = [] # If detection should be performed if not nodetect: log.info('Finishing up the detection, ' + str(detector.input_queue.qsize()) + ' files to process...') # Reset the Ctrl+C to KeyboardInterrupt resetSIGINT() try: # If there are some more files to process, process them on more cores if detector.input_queue.qsize() > 0: # Let the detector use all cores, but leave 1 free available_cores = multiprocessing.cpu_count() - 1 if available_cores > 1: log.info('Running the detection on {:d} cores...'.format( available_cores)) # Start the detector detector.updateCoreNumber(cores=available_cores) log.info('Waiting for the detection to finish...') # Wait for the detector to finish and close it detector.closePool() log.info('Detection finished!') except KeyboardInterrupt: log.info('Ctrl + C pressed, exiting...') if upload_manager is not None: # Stop the upload manager if upload_manager.is_alive(): log.debug('Closing upload manager...') upload_manager.stop() del upload_manager # Terminate the detector if detector is not None: del detector sys.exit() # Set the Ctrl+C back to 'soft' program kill setSIGINT() ### SAVE DETECTIONS TO FILE log.info('Collecting results...') # Get the detection results from the queue detection_results = detector.getResults() # Remove all 'None' results, which were errors detection_results = [ res for res in detection_results if res is not None ] # Count the number of detected meteors meteors_num = 0 for _, _, meteor_data in detection_results: for meteor in meteor_data: meteors_num += 1 log.info('TOTAL: ' + str(meteors_num) + ' detected meteors.') # Save the detections to a file for ff_name, star_data, meteor_data in detection_results: x2, y2, background, intensity = star_data # Skip if no stars were found if not x2: continue # Construct the table of the star parameters star_data = zip(x2, y2, background, intensity) # Add star info to the star list star_list.append([ff_name, star_data]) # Handle the detected meteors meteor_No = 1 for meteor in meteor_data: rho, theta, centroids = meteor # Append to the results list meteor_list.append([ff_name, meteor_No, rho, theta, centroids]) meteor_No += 1 # Add the FF file to the archive list if a meteor was detected on it if meteor_data: ff_detected.append(ff_name) # Generate the name for the CALSTARS file calstars_name = 'CALSTARS_' + "{:s}".format(str(config.stationID)) + '_' \ + os.path.basename(night_data_dir) + '.txt' # Write detected stars to the CALSTARS file CALSTARS.writeCALSTARS(star_list, night_data_dir, calstars_name, config.stationID, config.height, \ config.width) # Generate FTPdetectinfo file name ftpdetectinfo_name = 'FTPdetectinfo_' + os.path.basename( night_data_dir) + '.txt' # Write FTPdetectinfo file FTPdetectinfo.writeFTPdetectinfo(meteor_list, night_data_dir, ftpdetectinfo_name, night_data_dir, \ config.stationID, config.fps) # Get the platepar file platepar, platepar_path, platepar_fmt = getPlatepar(config) # Run calibration check and auto astrometry refinement if platepar is not None: # Read in the CALSTARS file calstars_list = CALSTARS.readCALSTARS(night_data_dir, calstars_name) # Run astrometry check and refinement platepar, fit_status = autoCheckFit(config, platepar, calstars_list) # If the fit was sucessful, apply the astrometry to detected meteors if fit_status: log.info('Astrometric calibration SUCCESSFUL!') # Save the refined platepar to the night directory and as default platepar.write(os.path.join(night_data_dir, config.platepar_name), fmt=platepar_fmt) platepar.write(platepar_path, fmt=platepar_fmt) else: log.info( 'Astrometric calibration FAILED!, Using old platepar for calibration...' ) # Calculate astrometry for meteor detections applyAstrometryFTPdetectinfo(night_data_dir, ftpdetectinfo_name, platepar_path) log.info('Plotting field sums...') # Plot field sums to a graph plotFieldsums(night_data_dir, config) # Archive all fieldsums to one archive archiveFieldsums(night_data_dir) # List for any extra files which will be copied to the night archive directory. Full paths have to be # given extra_files = [] log.info('Making a flat...') # Make a new flat field flat_img = makeFlat(night_data_dir, config) # If making flat was sucessfull, save it if flat_img is not None: # Save the flat in the root directory, to keep the operational flat updated scipy.misc.imsave(config.flat_file, flat_img) flat_path = os.path.join(os.getcwd(), config.flat_file) log.info('Flat saved to: ' + flat_path) # Copy the flat to the night's directory as well extra_files.append(flat_path) else: log.info('Making flat image FAILED!') ### Add extra files to archive # Add the platepar to the archive if it exists if os.path.exists(platepar_path): extra_files.append(platepar_path) # Add the config file to the archive too extra_files.append(os.path.join(os.getcwd(), '.config')) ### ### night_archive_dir = os.path.join(os.path.abspath(config.data_dir), config.archived_dir, night_data_dir_name) log.info('Archiving detections to ' + night_archive_dir) # Archive the detections archive_name = archiveDetections(night_data_dir, night_archive_dir, ff_detected, config, \ extra_files=extra_files) # Put the archive up for upload if upload_manager is not None: log.info('Adding file on upload list: ' + archive_name) upload_manager.addFiles([archive_name]) # If capture was manually stopped, end program if STOP_CAPTURE: log.info('Ending program') # Stop the upload manager if upload_manager is not None: if upload_manager.is_alive(): upload_manager.stop() log.info('Closing upload manager...') sys.exit()
for meteor in meteor_data: rho, theta, centroids = meteor # Append to the results list meteor_list.append([ff_name, meteor_No, rho, theta, centroids]) meteor_No += 1 # Add the FF file to the archive list if a meteor was detected on it if meteor_data: ff_detected.append(ff_name) # Generate the name for the CALSTARS file calstars_name = 'CALSTARS_' + "{:s}".format(str(config.stationID)) + '_' + os.path.basename(ff_dir) + '.txt' # Write detected stars to the CALSTARS file CALSTARS.writeCALSTARS(star_list, ff_dir, calstars_name, config.stationID, config.height, config.width) # Generate FTPdetectinfo file name ftpdetectinfo_name = 'FTPdetectinfo_' + os.path.basename(ff_dir) + '.txt' # Write FTPdetectinfo file FTPdetectinfo.writeFTPdetectinfo(meteor_list, ff_dir, ftpdetectinfo_name, ff_dir, config.stationID, config.fps) print('Total time taken: ', datetime.datetime.utcnow() - time_start)
def saveDetections(detection_results, ff_dir, config): """ Save detection to CALSTARS and FTPdetectinfo files. Arguments: detection_results: [list] A list of outputs from detectStarsAndMeteors function. ff_dir: [str] Path to the night directory. config: [Config obj] Return: calstars_name: [str] Name of the CALSTARS file. ftpdetectinfo_name: [str] Name of the FTPdetectinfo file. ff_detected: [list] A list of FF files with detections. """ ### SAVE DETECTIONS TO FILE # Init data lists star_list = [] meteor_list = [] ff_detected = [] # Remove all 'None' results, which were errors detection_results = [res for res in detection_results if res is not None] # Sort by FF name detection_results = sorted(detection_results, key=lambda x: x[0]) # Count the number of detected meteors meteors_num = 0 for _, _, meteor_data in detection_results: for meteor in meteor_data: meteors_num += 1 log.info('TOTAL: ' + str(meteors_num) + ' detected meteors.') # Save the detections to a file for ff_name, star_data, meteor_data in detection_results: if len(star_data) == 4: x2, y2, background, intensity = star_data fwhm = (np.zeros_like(x2) - 1).tolist() else: _, x2, y2, background, intensity, fwhm = star_data # Skip if no stars were found if not x2: continue # Construct the table of the star parameters star_data = zip(x2, y2, background, intensity, fwhm) # Add star info to the star list star_list.append([ff_name, star_data]) # Handle the detected meteors meteor_No = 1 for meteor in meteor_data: rho, theta, centroids = meteor # Append to the results list meteor_list.append([ff_name, meteor_No, rho, theta, centroids]) meteor_No += 1 # Add the FF file to the archive list if a meteor was detected on it if meteor_data: ff_detected.append(ff_name) dir_name = os.path.basename(os.path.abspath(ff_dir)) if dir_name.startswith(config.stationID): prefix = dir_name else: prefix = "{:s}_{:s}".format(config.stationID, dir_name) # Generate the name for the CALSTARS file calstars_name = 'CALSTARS_' + prefix + '.txt' # Write detected stars to the CALSTARS file CALSTARS.writeCALSTARS(star_list, ff_dir, calstars_name, config.stationID, config.height, config.width) # Generate FTPdetectinfo file name ftpdetectinfo_name = 'FTPdetectinfo_' + os.path.basename(ff_dir) + '.txt' # Write FTPdetectinfo file FTPdetectinfo.writeFTPdetectinfo(meteor_list, ff_dir, ftpdetectinfo_name, ff_dir, config.stationID, config.fps) return calstars_name, ftpdetectinfo_name, ff_detected
def recalibrateIndividualFFsAndApplyAstrometry(dir_path, ftpdetectinfo_path, calstars_list, config, platepar, generate_plot=True): """ Recalibrate FF files with detections and apply the recalibrated platepar to those detections. Arguments: dir_path: [str] Path where the FTPdetectinfo file is. ftpdetectinfo_path: [str] Name of the FTPdetectinfo file. calstars_list: [list] A list of entries [[ff_name, star_coordinates], ...]. config: [Config instance] platepar: [Platepar instance] Initial platepar. Keyword arguments: generate_plot: [bool] Generate the calibration variation plot. True by default. Return: recalibrated_platepars: [dict] A dictionary where the keys are FF file names and values are recalibrated platepar instances for every FF file. """ # Use a copy of the config file config = copy.deepcopy(config) # If the given file does not exits, return nothing if not os.path.isfile(ftpdetectinfo_path): print('ERROR! The FTPdetectinfo file does not exist: {:s}'.format(ftpdetectinfo_path)) print(' The recalibration on every file was not done!') return {} # Read the FTPdetectinfo data cam_code, fps, meteor_list = FTPdetectinfo.readFTPdetectinfo(*os.path.split(ftpdetectinfo_path), \ ret_input_format=True) # Convert the list of stars to a per FF name dictionary calstars = {ff_file: star_data for ff_file, star_data in calstars_list} ### Add neighboring FF files for more robust photometry estimation ### ff_processing_list = [] # Make a list of sorted FF files in CALSTARS calstars_ffs = sorted([ff_file for ff_file in calstars]) # Go through the list of FF files with detections and add neighboring FFs for meteor_entry in meteor_list: ff_name = meteor_entry[0] if ff_name in calstars_ffs: # Find the index of the given FF file in the list of calstars ff_indx = calstars_ffs.index(ff_name) # Add neighbours to the processing list for k in range(-(RECALIBRATE_NEIGHBOURHOOD_SIZE//2), RECALIBRATE_NEIGHBOURHOOD_SIZE//2 + 1): k_indx = ff_indx + k if (k_indx > 0) and (k_indx < len(calstars_ffs)): ff_name_tmp = calstars_ffs[k_indx] if ff_name_tmp not in ff_processing_list: ff_processing_list.append(ff_name_tmp) # Sort the processing list of FF files ff_processing_list = sorted(ff_processing_list) ### ### # Globally increase catalog limiting magnitude config.catalog_mag_limit += 1 # Load catalog stars (overwrite the mag band ratios if specific catalog is used) star_catalog_status = StarCatalog.readStarCatalog(config.star_catalog_path,\ config.star_catalog_file, lim_mag=config.catalog_mag_limit, \ mag_band_ratios=config.star_catalog_band_ratios) if not star_catalog_status: print("Could not load the star catalog!") print(os.path.join(config.star_catalog_path, config.star_catalog_file)) return {} catalog_stars, _, config.star_catalog_band_ratios = star_catalog_status # Update the platepar coordinates from the config file platepar.lat = config.latitude platepar.lon = config.longitude platepar.elev = config.elevation prev_platepar = copy.deepcopy(platepar) # Go through all FF files with detections, recalibrate and apply astrometry recalibrated_platepars = {} for ff_name in ff_processing_list: working_platepar = copy.deepcopy(prev_platepar) # Skip this meteor if its FF file was already recalibrated if ff_name in recalibrated_platepars: continue print() print('Processing: ', ff_name) print('------------------------------------------------------------------------------') # Find extracted stars on this image if not ff_name in calstars: print('Skipped because it was not in CALSTARS:', ff_name) continue # Get stars detected on this FF file (create a dictionaly with only one entry, the residuals function # needs this format) calstars_time = FFfile.getMiddleTimeFF(ff_name, config.fps, ret_milliseconds=True) jd = date2JD(*calstars_time) star_dict_ff = {jd: calstars[ff_name]} # Recalibrate the platepar using star matching result, min_match_radius = recalibrateFF(config, working_platepar, jd, star_dict_ff, catalog_stars) # If the recalibration failed, try using FFT alignment if result is None: print() print('Running FFT alignment...') # Run FFT alignment calstars_coords = np.array(star_dict_ff[jd])[:, :2] calstars_coords[:, [0, 1]] = calstars_coords[:, [1, 0]] print(calstars_time) test_platepar = alignPlatepar(config, prev_platepar, calstars_time, calstars_coords, \ show_plot=False) # Try to recalibrate after FFT alignment result, _ = recalibrateFF(config, test_platepar, jd, star_dict_ff, catalog_stars) # If the FFT alignment failed, align the original platepar using the smallest radius that matched # and force save the the platepar if (result is None) and (min_match_radius is not None): print() print("Using the old platepar with the minimum match radius of: {:.2f}".format(min_match_radius)) result, _ = recalibrateFF(config, working_platepar, jd, star_dict_ff, catalog_stars, max_match_radius=min_match_radius, force_platepar_save=True) if result is not None: working_platepar = result # If the alignment succeeded, save the result else: working_platepar = result else: working_platepar = result # Store the platepar if the fit succeeded if result is not None: # Recompute alt/az of the FOV centre working_platepar.az_centre, working_platepar.alt_centre = raDec2AltAz(working_platepar.RA_d, \ working_platepar.dec_d, working_platepar.JD, working_platepar.lat, working_platepar.lon) # Recompute the rotation wrt horizon working_platepar.rotation_from_horiz = rotationWrtHorizon(working_platepar) # Mark the platepar to indicate that it was automatically recalibrated on an individual FF file working_platepar.auto_recalibrated = True recalibrated_platepars[ff_name] = working_platepar prev_platepar = working_platepar else: print('Recalibration of {:s} failed, using the previous platepar...'.format(ff_name)) # Mark the platepar to indicate that autorecalib failed prev_platepar_tmp = copy.deepcopy(prev_platepar) prev_platepar_tmp.auto_recalibrated = False # If the aligning failed, set the previous platepar as the one that should be used for this FF file recalibrated_platepars[ff_name] = prev_platepar_tmp ### Average out photometric offsets within the given neighbourhood size ### # Go through the list of FF files with detections for meteor_entry in meteor_list: ff_name = meteor_entry[0] # Make sure the FF was successfuly recalibrated if ff_name in recalibrated_platepars: # Find the index of the given FF file in the list of calstars ff_indx = calstars_ffs.index(ff_name) # Compute the average photometric offset and the improved standard deviation using all # neighbors photom_offset_tmp_list = [] photom_offset_std_tmp_list = [] neighboring_ffs = [] for k in range(-(RECALIBRATE_NEIGHBOURHOOD_SIZE//2), RECALIBRATE_NEIGHBOURHOOD_SIZE//2 + 1): k_indx = ff_indx + k if (k_indx > 0) and (k_indx < len(calstars_ffs)): # Get the name of the FF file ff_name_tmp = calstars_ffs[k_indx] # Check that the neighboring FF was successfuly recalibrated if ff_name_tmp in recalibrated_platepars: # Get the computed photometric offset and stddev photom_offset_tmp_list.append(recalibrated_platepars[ff_name_tmp].mag_lev) photom_offset_std_tmp_list.append(recalibrated_platepars[ff_name_tmp].mag_lev_stddev) neighboring_ffs.append(ff_name_tmp) # Compute the new photometric offset and improved standard deviation (assume equal sample size) # Source: https://stats.stackexchange.com/questions/55999/is-it-possible-to-find-the-combined-standard-deviation photom_offset_new = np.mean(photom_offset_tmp_list) photom_offset_std_new = np.sqrt(\ np.sum([st**2 + (mt - photom_offset_new)**2 \ for mt, st in zip(photom_offset_tmp_list, photom_offset_std_tmp_list)]) \ / len(photom_offset_tmp_list) ) # Assign the new photometric offset and standard deviation to all FFs used for computation for ff_name_tmp in neighboring_ffs: recalibrated_platepars[ff_name_tmp].mag_lev = photom_offset_new recalibrated_platepars[ff_name_tmp].mag_lev_stddev = photom_offset_std_new ### ### ### Store all recalibrated platepars as a JSON file ### all_pps = {} for ff_name in recalibrated_platepars: json_str = recalibrated_platepars[ff_name].jsonStr() all_pps[ff_name] = json.loads(json_str) with open(os.path.join(dir_path, config.platepars_recalibrated_name), 'w') as f: # Convert all platepars to a JSON file out_str = json.dumps(all_pps, default=lambda o: o.__dict__, indent=4, sort_keys=True) f.write(out_str) ### ### # If no platepars were recalibrated, use the single platepar recalibration procedure if len(recalibrated_platepars) == 0: print('No FF images were used for recalibration, using the single platepar calibration function...') # Use the initial platepar for calibration applyAstrometryFTPdetectinfo(dir_path, os.path.basename(ftpdetectinfo_path), None, platepar=platepar) return recalibrated_platepars ### GENERATE PLOTS ### dt_list = [] ang_dists = [] rot_angles = [] hour_list = [] photom_offset_list = [] photom_offset_std_list = [] first_dt = np.min([FFfile.filenameToDatetime(ff_name) for ff_name in recalibrated_platepars]) for ff_name in recalibrated_platepars: pp_temp = recalibrated_platepars[ff_name] # If the fitting failed, skip the platepar if pp_temp is None: continue # Add the datetime of the FF file to the list ff_dt = FFfile.filenameToDatetime(ff_name) dt_list.append(ff_dt) # Compute the angular separation from the reference platepar ang_dist = np.degrees(angularSeparation(np.radians(platepar.RA_d), np.radians(platepar.dec_d), \ np.radians(pp_temp.RA_d), np.radians(pp_temp.dec_d))) ang_dists.append(ang_dist*60) # Compute rotation difference rot_diff = (platepar.pos_angle_ref - pp_temp.pos_angle_ref + 180)%360 - 180 rot_angles.append(rot_diff*60) # Compute the hour of the FF used for recalibration hour_list.append((ff_dt - first_dt).total_seconds()/3600) # Add the photometric offset to the list photom_offset_list.append(pp_temp.mag_lev) photom_offset_std_list.append(pp_temp.mag_lev_stddev) if generate_plot: # Generate the name the plots plot_name = os.path.basename(ftpdetectinfo_path).replace('FTPdetectinfo_', '').replace('.txt', '') ### Plot difference from reference platepar in angular distance from (0, 0) vs rotation ### plt.figure() plt.scatter(0, 0, marker='o', edgecolor='k', label='Reference platepar', s=100, c='none', zorder=3) plt.scatter(ang_dists, rot_angles, c=hour_list, zorder=3) plt.colorbar(label="Hours from first FF file") plt.xlabel("Angular distance from reference (arcmin)") plt.ylabel("Rotation from reference (arcmin)") plt.title("FOV centre drift starting at {:s}".format(first_dt.strftime("%Y/%m/%d %H:%M:%S"))) plt.grid() plt.legend() plt.tight_layout() plt.savefig(os.path.join(dir_path, plot_name + '_calibration_variation.png'), dpi=150) # plt.show() plt.clf() plt.close() ### ### ### Plot the photometric offset variation ### plt.figure() plt.errorbar(dt_list, photom_offset_list, yerr=photom_offset_std_list, fmt="o", \ ecolor='lightgray', elinewidth=2, capsize=0, ms=2) # Format datetimes plt.gca().xaxis.set_major_formatter(mdates.DateFormatter("%H:%M")) # rotate and align the tick labels so they look better plt.gcf().autofmt_xdate() plt.xlabel("UTC time") plt.ylabel("Photometric offset") plt.title("Photometric offset variation") plt.grid() plt.tight_layout() plt.savefig(os.path.join(dir_path, plot_name + '_photometry_variation.png'), dpi=150) plt.clf() plt.close() ### ### ### Apply platepars to FTPdetectinfo ### meteor_output_list = [] for meteor_entry in meteor_list: ff_name, meteor_No, rho, phi, meteor_meas = meteor_entry # Get the platepar that will be applied to this FF file if ff_name in recalibrated_platepars: working_platepar = recalibrated_platepars[ff_name] else: print('Using default platepar for:', ff_name) working_platepar = platepar # Apply the recalibrated platepar to meteor centroids meteor_picks = applyPlateparToCentroids(ff_name, fps, meteor_meas, working_platepar, \ add_calstatus=True) meteor_output_list.append([ff_name, meteor_No, rho, phi, meteor_picks]) # Calibration string to be written to the FTPdetectinfo file calib_str = 'Recalibrated with RMS on: ' + str(datetime.datetime.utcnow()) + ' UTC' # If no meteors were detected, set dummpy parameters if len(meteor_list) == 0: cam_code = '' fps = 0 # Back up the old FTPdetectinfo file try: shutil.copy(ftpdetectinfo_path, ftpdetectinfo_path.strip('.txt') \ + '_backup_{:s}.txt'.format(datetime.datetime.utcnow().strftime('%Y%m%d_%H%M%S.%f'))) except: print('ERROR! The FTPdetectinfo file could not be backed up: {:s}'.format(ftpdetectinfo_path)) # Save the updated FTPdetectinfo FTPdetectinfo.writeFTPdetectinfo(meteor_output_list, dir_path, os.path.basename(ftpdetectinfo_path), \ dir_path, cam_code, fps, calibration=calib_str, celestial_coords_given=True) ### ### return recalibrated_platepars
flat_struct=flat_struct) meteor_No = 1 for meteor in meteor_detections: rho, theta, centroids = meteor # Print detection to file results_file.write( '-------------------------------------------------------\n') results_file.write(ff_name + '\n') results_file.write(str(rho) + ',' + str(theta) + '\n') results_file.write(str(centroids) + '\n') # Append to the results list results_list.append([ff_name, meteor_No, rho, theta, centroids]) meteor_No += 1 total_meteors += 1 results_file.close() ftpdetectinfo_name = 'FTPdetectinfo_' + results_name + '.txt' # Write FTPdetectinfo file FTPdetectinfo.writeFTPdetectinfo(results_list, results_path, ftpdetectinfo_name, results_path, config.stationID, config.fps) print('Time for the whole directory:', time() - time_whole) print('Detected meteors:', total_meteors)
def recalibrateIndividualFFsAndApplyAstrometry(dir_path, ftpdetectinfo_path, calstars_list, config, platepar): """ Recalibrate FF files with detections and apply the recalibrated platepar to those detections. Arguments: dir_path: [str] Path where the FTPdetectinfo file is. ftpdetectinfo_path: [str] Name of the FTPdetectinfo file. calstars_list: [list] A list of entries [[ff_name, star_coordinates], ...]. config: [Config instance] platepar: [Platepar instance] Initial platepar. Return: recalibrated_platepars: [dict] A dictionary where the keys are FF file names and values are recalibrated platepar instances for every FF file. """ # Read the FTPdetectinfo data cam_code, fps, meteor_list = FTPdetectinfo.readFTPdetectinfo(*os.path.split(ftpdetectinfo_path), \ ret_input_format=True) # Convert the list of stars to a per FF name dictionary calstars = {ff_file: star_data for ff_file, star_data in calstars_list} # Load catalog stars (overwrite the mag band ratios if specific catalog is used) catalog_stars, _, config.star_catalog_band_ratios = StarCatalog.readStarCatalog(config.star_catalog_path,\ config.star_catalog_file, lim_mag=config.catalog_mag_limit, \ mag_band_ratios=config.star_catalog_band_ratios) prev_platepar = copy.deepcopy(platepar) # Go through all FF files with detections, recalibrate and apply astrometry recalibrated_platepars = {} for meteor_entry in meteor_list: working_platepar = copy.deepcopy(prev_platepar) ff_name, meteor_No, rho, phi, meteor_meas = meteor_entry # Skip this meteors if its FF file was already recalibrated if ff_name in recalibrated_platepars: continue print() print('Processing: ', ff_name) print('------------------------------------------------------------------------------') # Find extracted stars on this image if not ff_name in calstars: print('Skipped because it was not in CALSTARS:', ff_name) continue # Get stars detected on this FF file (create a dictionaly with only one entry, the residuals function # needs this format) calstars_time = FFfile.getMiddleTimeFF(ff_name, config.fps, ret_milliseconds=True) jd = date2JD(*calstars_time) star_dict_ff = {jd: calstars[ff_name]} # Recalibrate the platepar using star matching result = recalibrateFF(config, working_platepar, jd, star_dict_ff, catalog_stars) # If the recalibration failed, try using FFT alignment if result is None: print() print('Running FFT alignment...') # Run FFT alignment calstars_coords = np.array(star_dict_ff[jd])[:, :2] calstars_coords[:, [0, 1]] = calstars_coords[:, [1, 0]] print(calstars_time) working_platepar = alignPlatepar(config, prev_platepar, calstars_time, calstars_coords, \ show_plot=False) # Try to recalibrate after FFT alignment result = recalibrateFF(config, working_platepar, jd, star_dict_ff, catalog_stars) if result is not None: working_platepar = result else: working_platepar = result # Store the platepar if the fit succeeded if result is not None: recalibrated_platepars[ff_name] = working_platepar prev_platepar = working_platepar else: print('Recalibration of {:s} failed, using the previous platepar...'.format(ff_name)) # If the aligning failed, set the previous platepar as the one that should be used for this FF file recalibrated_platepars[ff_name] = prev_platepar ### Store all recalibrated platepars as a JSON file ### all_pps = {} for ff_name in recalibrated_platepars: json_str = recalibrated_platepars[ff_name].jsonStr() all_pps[ff_name] = json.loads(json_str) with open(os.path.join(dir_path, config.platepars_recalibrated_name), 'w') as f: # Convert all platepars to a JSON file out_str = json.dumps(all_pps, default=lambda o: o.__dict__, indent=4, sort_keys=True) f.write(out_str) ### ### # If no platepars were recalibrated, use the single platepar recalibration procedure if len(recalibrated_platepars) == 0: print('No FF images were used for recalibration, using the single platepar calibration function...') # Use the initial platepar for calibration applyAstrometryFTPdetectinfo(dir_path, os.path.basename(ftpdetectinfo_path), None, platepar=platepar) return recalibrated_platepars ### Plot difference from reference platepar in angular distance from (0, 0) vs rotation ### ang_dists = [] rot_angles = [] hour_list = [] first_jd = np.min([FFfile.filenameToDatetime(ff_name) for ff_name in recalibrated_platepars]) for ff_name in recalibrated_platepars: pp_temp = recalibrated_platepars[ff_name] # If the fitting failed, skip the platepar if pp_temp is None: continue # Compute the angular separation from the reference platepar ang_dist = np.degrees(angularSeparation(np.radians(platepar.RA_d), np.radians(platepar.dec_d), \ np.radians(pp_temp.RA_d), np.radians(pp_temp.dec_d))) ang_dists.append(ang_dist*60) rot_angles.append((platepar.pos_angle_ref - pp_temp.pos_angle_ref)*60) # Compute the hour of the FF used for recalibration hour_list.append((FFfile.filenameToDatetime(ff_name) - first_jd).total_seconds()/3600) plt.figure() plt.scatter(0, 0, marker='o', edgecolor='k', label='Reference platepar', s=100, c='none', zorder=3) plt.scatter(ang_dists, rot_angles, c=hour_list, zorder=3) plt.colorbar(label='Hours from first FF file') plt.xlabel("Angular distance from reference (arcmin)") plt.ylabel('Rotation from reference (arcmin)') plt.grid() plt.legend() plt.tight_layout() # Generate the name for the plot calib_plot_name = os.path.basename(ftpdetectinfo_path).replace('FTPdetectinfo_', '').replace('.txt', '') \ + '_calibration_variation.png' plt.savefig(os.path.join(dir_path, calib_plot_name), dpi=150) # plt.show() plt.clf() plt.close() ### ### ### Apply platepars to FTPdetectinfo ### meteor_output_list = [] for meteor_entry in meteor_list: ff_name, meteor_No, rho, phi, meteor_meas = meteor_entry # Get the platepar that will be applied to this FF file if ff_name in recalibrated_platepars: working_platepar = recalibrated_platepars[ff_name] else: print('Using default platepar for:', ff_name) working_platepar = platepar # Apply the recalibrated platepar to meteor centroids meteor_picks = applyPlateparToCentroids(ff_name, fps, meteor_meas, working_platepar, \ add_calstatus=True) meteor_output_list.append([ff_name, meteor_No, rho, phi, meteor_picks]) # Calibration string to be written to the FTPdetectinfo file calib_str = 'Recalibrated with RMS on: ' + str(datetime.datetime.utcnow()) + ' UTC' # If no meteors were detected, set dummpy parameters if len(meteor_list) == 0: cam_code = '' fps = 0 # Back up the old FTPdetectinfo file shutil.copy(ftpdetectinfo_path, ftpdetectinfo_path.strip('.txt') \ + '_backup_{:s}.txt'.format(datetime.datetime.utcnow().strftime('%Y%m%d_%H%M%S.%f'))) # Save the updated FTPdetectinfo FTPdetectinfo.writeFTPdetectinfo(meteor_output_list, dir_path, os.path.basename(ftpdetectinfo_path), \ dir_path, cam_code, fps, calibration=calib_str, celestial_coords_given=True) ### ### return recalibrated_platepars