def saveDetections(detection_results, ff_dir, config): """ Save detection to CALSTARS and FTPdetectinfo files. Arguments: detection_results: [list] A list of outputs from detectStarsAndMeteors function. ff_dir: [str] Path to the night directory. config: [Config obj] Return: calstars_name: [str] Name of the CALSTARS file. ftpdetectinfo_name: [str] Name of the FTPdetectinfo file. ff_detected: [list] A list of FF files with detections. """ ### SAVE DETECTIONS TO FILE # Init data lists star_list = [] meteor_list = [] ff_detected = [] # Remove all 'None' results, which were errors detection_results = [res for res in detection_results if res is not None] # Sort by FF name detection_results = sorted(detection_results, key=lambda x: x[0]) # Count the number of detected meteors meteors_num = 0 for _, _, meteor_data in detection_results: for meteor in meteor_data: meteors_num += 1 log.info('TOTAL: ' + str(meteors_num) + ' detected meteors.') # Save the detections to a file for ff_name, star_data, meteor_data in detection_results: if len(star_data) == 4: x2, y2, background, intensity = star_data else: _, x2, y2, background, intensity, _ = star_data # Skip if no stars were found if not x2: continue # Construct the table of the star parameters star_data = zip(x2, y2, background, intensity) # Add star info to the star list star_list.append([ff_name, star_data]) # Handle the detected meteors meteor_No = 1 for meteor in meteor_data: rho, theta, centroids = meteor # Append to the results list meteor_list.append([ff_name, meteor_No, rho, theta, centroids]) meteor_No += 1 # Add the FF file to the archive list if a meteor was detected on it if meteor_data: ff_detected.append(ff_name) # Generate the name for the CALSTARS file calstars_name = 'CALSTARS_' + "{:s}".format(str(config.stationID)) + '_' + os.path.basename(ff_dir) + '.txt' # Write detected stars to the CALSTARS file CALSTARS.writeCALSTARS(star_list, ff_dir, calstars_name, config.stationID, config.height, config.width) # Generate FTPdetectinfo file name ftpdetectinfo_name = 'FTPdetectinfo_' + os.path.basename(ff_dir) + '.txt' # Write FTPdetectinfo file FTPdetectinfo.writeFTPdetectinfo(meteor_list, ff_dir, ftpdetectinfo_name, ff_dir, config.stationID, config.fps) return calstars_name, ftpdetectinfo_name, ff_detected
def runCapture(config, duration=None, video_file=None, nodetect=False, detect_end=False, upload_manager=None): """ Run capture and compression for the given time.given Arguments: config: [config object] Configuration read from the .config file Keyword arguments: duration: [float] Time in seconds to capture. None by default. video_file: [str] Path to the video file, if it was given as the video source. None by default. nodetect: [bool] If True, detection will not be performed. False by defualt. detect_end: [bool] If True, detection will be performed at the end of the night, when capture finishes. False by default. upload_manager: [UploadManager object] A handle to the UploadManager, which handles uploading files to the central server. None by default. """ global STOP_CAPTURE # Create a directory for captured files night_data_dir_name = str( config.stationID) + '_' + datetime.datetime.utcnow().strftime( '%Y%m%d_%H%M%S_%f') # Full path to the data directory night_data_dir = os.path.join(os.path.abspath(config.data_dir), config.captured_dir, night_data_dir_name) # Make a directory for the night mkdirP(night_data_dir) log.info('Data directory: ' + night_data_dir) # Load the default flat field image if it is available flat_struct = None if config.use_flat: # Check if the flat exists if os.path.exists(os.path.join(os.getcwd(), config.flat_file)): flat_struct = Image.loadFlat(os.getcwd(), config.flat_file) log.info('Loaded flat field image: ' + os.path.join(os.getcwd(), config.flat_file)) # Get the platepar file platepar, platepar_path, platepar_fmt = getPlatepar(config) log.info('Initializing frame buffers...') ### For some reason, the RPi 3 does not like memory chunks which size is the multipier of its L2 ### cache size (512 kB). When such a memory chunk is provided, the compression becomes 10x slower ### then usual. We are applying a dirty fix here where we just add an extra image row and column ### if such a memory chunk will be created. The compression is performed, and the image is cropped ### back to its original dimensions. array_pad = 0 # Check if the image dimensions are divisible by RPi3 L2 cache size and add padding if (256 * config.width * config.height) % (512 * 1024) == 0: array_pad = 1 # Init arrays for parallel compression on 2 cores sharedArrayBase = multiprocessing.Array( ctypes.c_uint8, 256 * (config.width + array_pad) * (config.height + array_pad)) sharedArray = np.ctypeslib.as_array(sharedArrayBase.get_obj()) sharedArray = sharedArray.reshape(256, (config.height + array_pad), (config.width + array_pad)) startTime = multiprocessing.Value('d', 0.0) sharedArrayBase2 = multiprocessing.Array( ctypes.c_uint8, 256 * (config.width + array_pad) * (config.height + array_pad)) sharedArray2 = np.ctypeslib.as_array(sharedArrayBase2.get_obj()) sharedArray2 = sharedArray2.reshape(256, (config.height + array_pad), (config.width + array_pad)) startTime2 = multiprocessing.Value('d', 0.0) log.info('Initializing frame buffers done!') # Check if the detection should be performed or not if nodetect: detector = None else: if detect_end: # Delay detection until the end of the night delay_detection = duration else: # Delay the detection for 2 minutes after capture start delay_detection = 120 # Initialize the detector detector = QueuedPool(detectStarsAndMeteors, cores=1, log=log, delay_start=delay_detection) detector.startPool() # Initialize buffered capture bc = BufferedCapture(sharedArray, startTime, sharedArray2, startTime2, config, video_file=video_file) # Initialize the live image viewer live_view = LiveViewer(window_name='Maxpixel') # Initialize compression compressor = Compressor(night_data_dir, sharedArray, startTime, sharedArray2, startTime2, config, detector=detector, live_view=live_view, flat_struct=flat_struct) # Start buffered capture bc.startCapture() # Start the compression compressor.start() # Capture until Ctrl+C is pressed wait(duration) # If capture was manually stopped, end capture if STOP_CAPTURE: log.info('Ending capture...') # Stop the capture log.debug('Stopping capture...') bc.stopCapture() log.debug('Capture stopped') dropped_frames = bc.dropped_frames log.info('Total number of dropped frames: ' + str(dropped_frames)) # Stop the compressor log.debug('Stopping compression...') detector, live_view = compressor.stop() log.debug('Compression stopped') # Stop the live viewer log.debug('Stopping live viewer...') live_view.stop() del live_view log.debug('Live view stopped') # Init data lists star_list = [] meteor_list = [] ff_detected = [] # If detection should be performed if not nodetect: log.info('Finishing up the detection, ' + str(detector.input_queue.qsize()) + ' files to process...') # Reset the Ctrl+C to KeyboardInterrupt resetSIGINT() try: # If there are some more files to process, process them on more cores if detector.input_queue.qsize() > 0: # Let the detector use all cores, but leave 1 free available_cores = multiprocessing.cpu_count() - 1 if available_cores > 1: log.info('Running the detection on {:d} cores...'.format( available_cores)) # Start the detector detector.updateCoreNumber(cores=available_cores) log.info('Waiting for the detection to finish...') # Wait for the detector to finish and close it detector.closePool() log.info('Detection finished!') except KeyboardInterrupt: log.info('Ctrl + C pressed, exiting...') if upload_manager is not None: # Stop the upload manager if upload_manager.is_alive(): log.debug('Closing upload manager...') upload_manager.stop() del upload_manager # Terminate the detector if detector is not None: del detector sys.exit() # Set the Ctrl+C back to 'soft' program kill setSIGINT() ### SAVE DETECTIONS TO FILE log.info('Collecting results...') # Get the detection results from the queue detection_results = detector.getResults() # Remove all 'None' results, which were errors detection_results = [ res for res in detection_results if res is not None ] # Count the number of detected meteors meteors_num = 0 for _, _, meteor_data in detection_results: for meteor in meteor_data: meteors_num += 1 log.info('TOTAL: ' + str(meteors_num) + ' detected meteors.') # Save the detections to a file for ff_name, star_data, meteor_data in detection_results: x2, y2, background, intensity = star_data # Skip if no stars were found if not x2: continue # Construct the table of the star parameters star_data = zip(x2, y2, background, intensity) # Add star info to the star list star_list.append([ff_name, star_data]) # Handle the detected meteors meteor_No = 1 for meteor in meteor_data: rho, theta, centroids = meteor # Append to the results list meteor_list.append([ff_name, meteor_No, rho, theta, centroids]) meteor_No += 1 # Add the FF file to the archive list if a meteor was detected on it if meteor_data: ff_detected.append(ff_name) # Generate the name for the CALSTARS file calstars_name = 'CALSTARS_' + "{:s}".format(str(config.stationID)) + '_' \ + os.path.basename(night_data_dir) + '.txt' # Write detected stars to the CALSTARS file CALSTARS.writeCALSTARS(star_list, night_data_dir, calstars_name, config.stationID, config.height, \ config.width) # Generate FTPdetectinfo file name ftpdetectinfo_name = 'FTPdetectinfo_' + os.path.basename( night_data_dir) + '.txt' # Write FTPdetectinfo file FTPdetectinfo.writeFTPdetectinfo(meteor_list, night_data_dir, ftpdetectinfo_name, night_data_dir, \ config.stationID, config.fps) # Get the platepar file platepar, platepar_path, platepar_fmt = getPlatepar(config) # Run calibration check and auto astrometry refinement if platepar is not None: # Read in the CALSTARS file calstars_list = CALSTARS.readCALSTARS(night_data_dir, calstars_name) # Run astrometry check and refinement platepar, fit_status = autoCheckFit(config, platepar, calstars_list) # If the fit was sucessful, apply the astrometry to detected meteors if fit_status: log.info('Astrometric calibration SUCCESSFUL!') # Save the refined platepar to the night directory and as default platepar.write(os.path.join(night_data_dir, config.platepar_name), fmt=platepar_fmt) platepar.write(platepar_path, fmt=platepar_fmt) else: log.info( 'Astrometric calibration FAILED!, Using old platepar for calibration...' ) # Calculate astrometry for meteor detections applyAstrometryFTPdetectinfo(night_data_dir, ftpdetectinfo_name, platepar_path) log.info('Plotting field sums...') # Plot field sums to a graph plotFieldsums(night_data_dir, config) # Archive all fieldsums to one archive archiveFieldsums(night_data_dir) # List for any extra files which will be copied to the night archive directory. Full paths have to be # given extra_files = [] log.info('Making a flat...') # Make a new flat field flat_img = makeFlat(night_data_dir, config) # If making flat was sucessfull, save it if flat_img is not None: # Save the flat in the root directory, to keep the operational flat updated scipy.misc.imsave(config.flat_file, flat_img) flat_path = os.path.join(os.getcwd(), config.flat_file) log.info('Flat saved to: ' + flat_path) # Copy the flat to the night's directory as well extra_files.append(flat_path) else: log.info('Making flat image FAILED!') ### Add extra files to archive # Add the platepar to the archive if it exists if os.path.exists(platepar_path): extra_files.append(platepar_path) # Add the config file to the archive too extra_files.append(os.path.join(os.getcwd(), '.config')) ### ### night_archive_dir = os.path.join(os.path.abspath(config.data_dir), config.archived_dir, night_data_dir_name) log.info('Archiving detections to ' + night_archive_dir) # Archive the detections archive_name = archiveDetections(night_data_dir, night_archive_dir, ff_detected, config, \ extra_files=extra_files) # Put the archive up for upload if upload_manager is not None: log.info('Adding file on upload list: ' + archive_name) upload_manager.addFiles([archive_name]) # If capture was manually stopped, end program if STOP_CAPTURE: log.info('Ending program') # Stop the upload manager if upload_manager is not None: if upload_manager.is_alive(): upload_manager.stop() log.info('Closing upload manager...') sys.exit()
for meteor in meteor_data: rho, theta, centroids = meteor # Append to the results list meteor_list.append([ff_name, meteor_No, rho, theta, centroids]) meteor_No += 1 # Add the FF file to the archive list if a meteor was detected on it if meteor_data: ff_detected.append(ff_name) # Generate the name for the CALSTARS file calstars_name = 'CALSTARS_' + "{:s}".format(str(config.stationID)) + '_' + os.path.basename(ff_dir) + '.txt' # Write detected stars to the CALSTARS file CALSTARS.writeCALSTARS(star_list, ff_dir, calstars_name, config.stationID, config.height, config.width) # Generate FTPdetectinfo file name ftpdetectinfo_name = 'FTPdetectinfo_' + os.path.basename(ff_dir) + '.txt' # Write FTPdetectinfo file FTPdetectinfo.writeFTPdetectinfo(meteor_list, ff_dir, ftpdetectinfo_name, ff_dir, config.stationID, config.fps) print('Total time taken: ', datetime.datetime.utcnow() - time_start)
def saveDetections(detection_results, ff_dir, config): """ Save detection to CALSTARS and FTPdetectinfo files. Arguments: detection_results: [list] A list of outputs from detectStarsAndMeteors function. ff_dir: [str] Path to the night directory. config: [Config obj] Return: calstars_name: [str] Name of the CALSTARS file. ftpdetectinfo_name: [str] Name of the FTPdetectinfo file. ff_detected: [list] A list of FF files with detections. """ ### SAVE DETECTIONS TO FILE # Init data lists star_list = [] meteor_list = [] ff_detected = [] # Remove all 'None' results, which were errors detection_results = [res for res in detection_results if res is not None] # Sort by FF name detection_results = sorted(detection_results, key=lambda x: x[0]) # Count the number of detected meteors meteors_num = 0 for _, _, meteor_data in detection_results: for meteor in meteor_data: meteors_num += 1 log.info('TOTAL: ' + str(meteors_num) + ' detected meteors.') # Save the detections to a file for ff_name, star_data, meteor_data in detection_results: if len(star_data) == 4: x2, y2, background, intensity = star_data fwhm = (np.zeros_like(x2) - 1).tolist() else: _, x2, y2, background, intensity, fwhm = star_data # Skip if no stars were found if not x2: continue # Construct the table of the star parameters star_data = zip(x2, y2, background, intensity, fwhm) # Add star info to the star list star_list.append([ff_name, star_data]) # Handle the detected meteors meteor_No = 1 for meteor in meteor_data: rho, theta, centroids = meteor # Append to the results list meteor_list.append([ff_name, meteor_No, rho, theta, centroids]) meteor_No += 1 # Add the FF file to the archive list if a meteor was detected on it if meteor_data: ff_detected.append(ff_name) dir_name = os.path.basename(os.path.abspath(ff_dir)) if dir_name.startswith(config.stationID): prefix = dir_name else: prefix = "{:s}_{:s}".format(config.stationID, dir_name) # Generate the name for the CALSTARS file calstars_name = 'CALSTARS_' + prefix + '.txt' # Write detected stars to the CALSTARS file CALSTARS.writeCALSTARS(star_list, ff_dir, calstars_name, config.stationID, config.height, config.width) # Generate FTPdetectinfo file name ftpdetectinfo_name = 'FTPdetectinfo_' + os.path.basename(ff_dir) + '.txt' # Write FTPdetectinfo file FTPdetectinfo.writeFTPdetectinfo(meteor_list, ff_dir, ftpdetectinfo_name, ff_dir, config.stationID, config.fps) return calstars_name, ftpdetectinfo_name, ff_detected
# Add star info to the star list star_list.append([ff_name, star_data]) # Print found stars print(' ROW COL amplitude intensity') for x, y, max_ampl, level in star_data: print(' {:06.2f} {:06.2f} {:6d} {:6d}'.format( round(y, 2), round(x, 2), int(max_ampl), int(level))) # # Show stars if there are only more then 10 of them # if len(x2) < 20: # continue # # Load the FF bin file # ff = FFfile.read(ff_dir, ff_name) # plotStars(ff, x2, y2) # Load data about the image ff = FFfile.read(ff_dir, ff_name) # Generate the name for the CALSTARS file calstars_name = 'CALSTARS_' + "{:s}".format(str(config.stationID)) + '_' \ + os.path.basename(ff_dir) + '.txt' # Write detected stars to the CALSTARS file CALSTARS.writeCALSTARS(star_list, ff_dir, calstars_name, ff.camno, ff.nrows, ff.ncols) print('Total time taken: ', time.clock() - time_start)
def extractStarsAndSave(config, ff_dir): """ Extract stars in the given folder and save the CALSTARS file. Arguments: config: [config object] configuration object (loaded from the .config file) ff_dir: [str] Path to directory where FF files are. Return: star_list: [list] A list of [ff_name, star_data] entries, where star_data contains a list of (column, row, amplitude, intensity, fwhm) values for every star. """ time_start = time.time() # Load mask, dark, flat mask, dark, flat_struct = loadImageCalibration(ff_dir, config) extraction_list = [] # Go through all files in the directory and add them to the detection list for ff_name in sorted(os.listdir(ff_dir)): # Check if the given file is a valid FF file if not FFfile.validFFName(ff_name): continue extraction_list.append(ff_name) # Run the QueuedPool for detection workpool = QueuedPool(extractStars, cores=-1, backup_dir=ff_dir) # Add jobs for the pool for ff_name in extraction_list: print('Adding for extraction:', ff_name) workpool.addJob([ ff_dir, ff_name, config, None, None, None, None, flat_struct, dark, mask ]) print('Starting pool...') # Start the detection workpool.startPool() print('Waiting for the detection to finish...') # Wait for the detector to finish and close it workpool.closePool() # Get extraction results star_list = [] for result in workpool.getResults(): ff_name, x2, y2, amplitude, intensity, fwhm_data = result # Skip if no stars were found if not x2: continue # Construct the table of the star parameters star_data = list(zip(x2, y2, amplitude, intensity, fwhm_data)) # Add star info to the star list star_list.append([ff_name, star_data]) dir_name = os.path.basename(os.path.abspath(ff_dir)) if dir_name.startswith(config.stationID): prefix = dir_name else: prefix = "{:s}_{:s}".format(config.stationID, dir_name) # Generate the name for the CALSTARS file calstars_name = 'CALSTARS_' + prefix + '.txt' # Write detected stars to the CALSTARS file CALSTARS.writeCALSTARS(star_list, ff_dir, calstars_name, config.stationID, config.height, config.width) # Delete QueudPool backed up files workpool.deleteBackupFiles() print('Total time taken: {:.2f} s'.format(time.time() - time_start)) return star_list
# Load data about the image ff = FFfile.read(ff_dir, ff_name) # Break when an FF file was successfully loaded if ff is not None: break # Generate the name for the CALSTARS file calstars_name = 'CALSTARS_' + "{:s}".format(str(config.stationID)) + '_' \ + os.path.basename(ff_dir) + '.txt' # Write detected stars to the CALSTARS file CALSTARS.writeCALSTARS(star_list, ff_dir, calstars_name, ff.camno, ff.nrows, ff.ncols) # Delete QueudPool backed up files workpool.deleteBackupFiles() print('Total time taken: ', time.clock() - time_start) # Show the histogram of PSF stddevs if cml_args.showstd: print('Median:', np.median(sigma_list)) # Compute the bin number nbins = int(np.ceil(np.sqrt(len(sigma_list)))) if nbins < 10: