new_record = {} new_record['date'] = instrument_results_dict['date'] new_record['instrument'] = instrument new_record['filetype'] = filetype new_record['count'] = instrument_results_dict[instrument][filetype]['count'] new_record['size'] = instrument_results_dict[instrument][filetype]['size'] engine.execute(FilesystemInstrument.__table__.insert(), new_record) session.commit() # Add data to central_storage table arealist = ['logs', 'outputs', 'test', 'preview_images', 'thumbnails', 'all'] for area in arealist: new_record = {} new_record['date'] = central_storage_dict['date'] new_record['area'] = area new_record['size'] = central_storage_dict[area]['size'] new_record['used'] = central_storage_dict[area]['used'] new_record['available'] = central_storage_dict[area]['available'] engine.execute(CentralStore.__table__.insert(), new_record) session.commit() if __name__ == '__main__': # Configure logging module = os.path.basename(__file__).strip('.py') start_time, log_file = initialize_instrument_monitor(module) monitor_filesystem() update_monitor_table(module, start_time, log_file)
'\tBias monitor skipped. {} new dark files for {}, {}.' .format(len(new_files), instrument, aperture)) monitor_run = False # Update the query history new_entry = { 'instrument': instrument, 'aperture': aperture, 'start_time_mjd': self.query_start, 'end_time_mjd': self.query_end, 'entries_found': len(new_entries), 'files_found': len(new_files), 'run_monitor': monitor_run, 'entry_date': datetime.datetime.now() } self.query_table.__table__.insert().execute(new_entry) logging.info('\tUpdated the query history table') logging.info('Bias Monitor completed successfully.') if __name__ == '__main__': module = os.path.basename(__file__).strip('.py') start_time, log_file = monitor_utils.initialize_instrument_monitor(module) monitor = Bias() monitor.run() monitor_utils.update_monitor_table(module, start_time, log_file)