def __init__(self, *args, **kwargs): """ """ # init DARCBase without logger, as we need a non-default logger super(ProcessorMasterManager, self).__init__(*args, no_logger=True, **kwargs) # initialize queue logger listener self.log_queue = mp.Queue() self.log_listener = get_queue_logger_listener(self.log_queue, self.log_file) self.log_listener.start() # create queue logger self.logger = get_queue_logger(self.module_name, self.log_queue) self.observations = {} self.observation_end_times = {} self.observation_queues = {} self.scavenger = None self.status_generator = None # reduce logging from status check commands logging.getLogger('darc.control').setLevel(logging.ERROR) self.logger.info("{} initialized".format(self.log_name))
def __init__(self, obs_config, output_dir, log_queue, input_queue, output_queue, ncluster, config_file=CONFIG_FILE, obs_name=''): """ :param dict obs_config: Observation settings :param str output_dir: Output directory for data products :param Queue log_queue: Queue to use for logging :param Queue input_queue: Input queue for triggers :param Queue output_queue: Output queue for clusters :param mp.Value ncluster: 0 :param str config_file: Path to config file :param str obs_name: Observation name to use in log messages """ super(Clustering, self).__init__() module_name = type(self).__module__.split('.')[-1] self.logger = get_queue_logger(module_name, log_queue) self.output_dir = output_dir self.obs_config = obs_config self.input_queue = input_queue self.output_queue = output_queue self.obs_name = obs_name # set system parameters dt = TSAMP.to(u.second).value chan_width = (BANDWIDTH / float(NCHAN)).to(u.MHz).value cent_freq = (self.obs_config['min_freq'] * u.MHz + 0.5 * BANDWIDTH).to( u.GHz).value self.sys_params = { 'dt': dt, 'delta_nu_MHz': chan_width, 'nu_GHz': cent_freq } # load config self.config_file = config_file self.config = self._load_config() # create stop event self.stop_event = mp.Event() self.input_empty = False self.output_file_handle = None self.ncluster = ncluster
def __init__(self, log_queue, *args, **kwargs): """ :param Queue log_queue: Queue to use for logging """ # init DARCBase without logger, as we need a non-default logger super(Processor, self).__init__(*args, no_logger=True, **kwargs) # create queue logger self.logger = get_queue_logger(self.module_name, log_queue) self.log_queue = log_queue self.observation_running = False self.threads = {} self.amber_triggers = [] self.hdr_mapping = {} self.obs_config = None self.output_dir = None self.reprocessing = False # create queues self.clustering_queue = mp.Queue() self.extractor_queue = mp.Queue() self.classifier_queue = mp.Queue() self.all_queues = (self.clustering_queue, self.extractor_queue, self.classifier_queue) # lock for accessing AMBER trigger list and obs stats self.amber_lock = threading.Lock() self.obs_stats_lock = threading.Lock() # initalize observation statistics. self.obs_stats = { 'ncand_raw': 0, 'ncand_post_clustering': 0, 'ncand_post_thresholds': 0, 'ncand_post_classifier': 0 } self.ncluster = mp.Value('i', 0) self.ncand_above_threshold = mp.Value('i', 0) self.candidates_to_visualize = [] self.classifier_parent_conn, self.classifier_child_conn = mp.Pipe() self.obs_name = '' self.logger.info("{} initialized".format(self.log_name))
def __init__(self, obs_config, output_dir, log_queue, input_queue, output_queue, ncand_above_threshold, config_file=CONFIG_FILE, obs_name=''): """ :param dict obs_config: Observation settings :param str output_dir: Output directory for data products :param Queue log_queue: Queue to use for logging :param Queue input_queue: Input queue for clusters :param Queue output_queue: Output queue for classifier :param mp.Value ncand_above_threshold: 0 :param str config_file: Path to config file :param str obs_name: Observation name to use in log messages """ super(Extractor, self).__init__() module_name = type(self).__module__.split('.')[-1] self.logger = get_queue_logger(module_name, log_queue) self.output_dir = os.path.join(output_dir, 'data') self.obs_config = obs_config self.input_queue = input_queue self.output_queue = output_queue self.obs_name = obs_name # load config self.config_file = config_file self.config = self._load_config() # create directory for output data util.makedirs(self.output_dir) # create stop event self.stop_event = mp.Event() self.input_empty = False self.filterbank_reader = None self.rfi_mask = np.array([], dtype=int) self.data = None self.data_dm_time = None self.ncand_above_threshold = ncand_above_threshold
def __init__(self, log_queue, *args, **kwargs): """ :param Queue log_queue: Queue to use for logging """ # init DARCBase without logger, as we need a non-default logger super(ProcessorMaster, self).__init__(*args, no_logger=True, **kwargs) # create queue logger self.logger = get_queue_logger(self.module_name, log_queue) # read result dir from worker processor config self.result_dir = self._get_result_dir() self.obs_config = None self.warnings_sent = [] self.status = None self.process = None self.central_result_dir = None self.logger.info("{} initialized".format(self.log_name))
def __init__(self, output_dir, result_dir, log_queue, obs_config, files, config_file=CONFIG_FILE, obs_name=''): """ :param str output_dir: Output directory for data products :param str result_dir: central directory to copy output PDF to :param Queue log_queue: Queue to use for logging :param dict obs_config: Observations settings :param list files: HDF5 files to visualize :param str config_file: Path to config file :param str obs_name: Observation name to use in log messages """ module_name = type(self).__module__.split('.')[-1] self.output_dir = output_dir self.result_dir = result_dir self.logger = get_queue_logger(module_name, log_queue) self.obs_config = obs_config self.files = np.array(files) self.obs_name = obs_name # load config self.config_file = config_file self.config = self._load_config() self.logger.info(f"{self.obs_name}Starting visualization") # switch the plot backend to pdf old_backend = plt.get_backend() plt.switch_backend('PDF') try: self._visualize() except Exception as e: self.logger.error( f"{self.obs_name}Visualization failed: {type(e)}: {e}") # put back the old backend plt.switch_backend(old_backend)
def __init__(self, *args, **kwargs): """ """ # init DARCBase without logger, as we need a non-default logger super(ProcessorManager, self).__init__(*args, no_logger=True, **kwargs) # initialize queue logger listener self.log_queue = mp.Queue() self.log_listener = get_queue_logger_listener(self.log_queue, self.log_file) self.log_listener.start() # create queue logger self.logger = get_queue_logger(self.module_name, self.log_queue) self.observations = {} self.observation_end_times = {} self.observation_queues = {} self.current_observation_queue = None self.scavenger = None self.status_generator = None self.logger.info("{} initialized".format(self.log_name))
def __init__(self, log_queue, input_queue, conn, config_file=CONFIG_FILE, obs_name=''): """ :param Queue log_queue: Queue to use for logging :param Queue input_queue: Input queue for triggers :param Connection conn: Pipe connection to send output to :param str config_file: Path to config file :param str obs_name: Observation name to use in log messages """ super(Classifier, self).__init__() module_name = type(self).__module__.split('.')[-1] self.logger = get_queue_logger(module_name, log_queue) self.input_queue = input_queue self.conn = conn self.obs_name = obs_name # load config self.config_file = config_file self.config = self._load_config() # create stop event self.stop_event = mp.Event() self.input_empty = False self.model_freqtime = None self.model_dmtime = None self.data_freqtime = None self.data_dmtime = None self.nfreq_data = None self.ndm_data = None self.ntime_data = None self.candidates_to_visualize = [] self.tf = None