Esempio n. 1
0
    def _load_parset(self, obs_config):
        """
        Load the observation parset

        :param dict obs_config: Observation config
        :return: parset as dict
        """
        if self.host_type == 'master':
            # encoded parset is already in config on master node
            # decode the parset
            raw_parset = util.decode_parset(obs_config['parset'])
            # convert to dict and store
            parset = util.parse_parset(raw_parset)
        else:
            # Load the parset from the master parset file
            master_config_file = os.path.join(obs_config['master_dir'],
                                              'parset', 'darc_master.parset')
            try:
                # Read raw config
                with open(master_config_file) as f:
                    master_config = f.read().strip()
                # Convert to dict
                master_config = util.parse_parset(master_config)
                # extract obs parset and decode
                raw_parset = util.decode_parset(master_config['parset'])
                parset = util.parse_parset(raw_parset)
            except Exception as e:
                self.logger.warning(
                    "Failed to load parset from master config file {}, setting parset to None: {}"
                    .format(master_config_file, e))
                parset = None

        return parset
Esempio n. 2
0
    def test_parse_parset_types(self):
        """
        Test that type conversion in the parser works
        """

        # generate a parset dict
        input_parset_dict = {'startpacket': 100000, 'beam': 0, 'ntabs': 12,
                             'nsynbeams': 71, 'duration': 300.05,
                             'history_i': 10.24, 'history_iquv': 15.36,
                             'snrmin': 10.1, 'min_freq': 1249.5,
                             'proctrigger': True, 'enable_iquv': False}

        # write to file
        PARSET_FILE = 'test.parset'
        with open(PARSET_FILE, 'w') as f:
            for k, v in input_parset_dict.items():
                f.write("{}={}\n".format(k, v))

        # Read back as done in processing
        with open(PARSET_FILE) as f:
            parset_str = f.read()

        # Feed to the parser
        output_parset_dict = parse_parset(parset_str)

        self.assertDictEqual(input_parset_dict, output_parset_dict)

        # remove the temp parset
        os.remove(PARSET_FILE)
Esempio n. 3
0
    def test_parse_parset(self):
        """
        Test that the parset parser works for strings
        """

        # generate a parset dict
        input_parset_dict = {'amber_dir': '/data2/output/20190518/2019-05-18-23:51:13.B1933+16/amber',
                             'network_port_event_iquv': 30001, 'obs_mode': 'survey', 'source': 'B1933+16',
                             'output_dir': '/data2/output/20190518/2019-05-18-23:51:13.B1933+16',
                             'network_port_start_i': '5000'}

        # write to file
        PARSET_FILE = 'test.parset'
        with open(PARSET_FILE, 'w') as f:
            for k, v in input_parset_dict.items():
                f.write("{}={}\n".format(k, v))

        # Read back as done in processing
        with open(PARSET_FILE) as f:
            parset_str = f.read()

        # Feed to the parser
        output_parset_dict = parse_parset(parset_str)

        self.assertDictEqual(input_parset_dict, output_parset_dict)

        # remove the temp parset
        os.remove(PARSET_FILE)
Esempio n. 4
0
    def _load_parset(self, obs_config):
        """
        Load the observation parset

        :param dict obs_config: Observation config
        :return: parset as dict
        """
        try:
            # encoded parset is already in config on master node
            # decode the parset
            raw_parset = util.decode_parset(obs_config['parset'])
            # convert to dict and store
            parset = util.parse_parset(raw_parset)
        except KeyError:
            self.logger.info(
                f"{obs_config['datetimesource']}: Observation parset not found in input config, "
                f"looking for master parset")
            # Load the parset from the master parset file
            master_config_file = os.path.join(obs_config['master_dir'],
                                              'parset', 'darc_master.parset')
            try:
                # Read raw config
                with open(master_config_file) as f:
                    master_config = f.read().strip()
                # Convert to dict
                master_config = util.parse_parset(master_config)
                # extract obs parset and decode
                raw_parset = util.decode_parset(master_config['parset'])
                parset = util.parse_parset(raw_parset)
            except Exception as e:
                self.logger.warning(
                    "Failed to load parset from master config file {}, "
                    "setting parset to None: {}".format(master_config_file, e))
                parset = None

        return parset
Esempio n. 5
0
 def amber_command(self):
     # load amber config file
     with open(self.header['amber_config']) as f:
         amber_conf = util.parse_parset(f.read())
     # extract step1 settings and add to a full config dict
     fullconfig = self.header.copy()
     for key, value in amber_conf.items():
         # some values are lists, interpret these
         if value.startswith('['):
             value = ast.literal_eval(value)
         if isinstance(value, list):
             # extract 1st item
             fullconfig[key] = value[0]
         else:
             fullconfig[key] = value
     # add freq to device name
     fullconfig['device_name'] = fullconfig['device_name'].format(
         **self.header)
     amber_step1 = "taskset -c 3 amber -sync -print -opencl_platform {opencl_platform} " \
                   "-opencl_device {opencl_device} " \
                   "-device_name {device_name} " \
                   "-padding_file {amber_conf_dir}/padding.conf " \
                   "-zapped_channels {amber_conf_dir}/zapped_channels_{freq}.conf " \
                   "-integration_steps {amber_conf_dir}/{integration_file} " \
                   "-subband_dedispersion " \
                   "-dedispersion_stepone_file {amber_conf_dir}/dedispersion_stepone.conf " \
                   "-dedispersion_steptwo_file {amber_conf_dir}/dedispersion_steptwo.conf " \
                   "-integration_file {amber_conf_dir}/integration.conf " \
                   "-snr_file {amber_conf_dir}/snr.conf " \
                   "-dms {num_dm} -dm_first {dm_first} -dm_step {dm_step} -subbands {subbands} " \
                   "-subbanding_dms {subbanding_dms} -subbanding_dm_first {subbanding_dm_first} " \
                   "-subbanding_dm_step {subbanding_dm_step} -snr_sc -nsigma {snr_nsigma} " \
                   "-downsampling_configuration {amber_conf_dir}/downsampling.conf " \
                   "-downsampling_factor {downsamp} -rfim -time_domain_sigma_cut -frequency_domain_sigma_cut " \
                   "-time_domain_sigma_cut_steps {amber_conf_dir}/tdsc_steps.conf" \
                   " -time_domain_sigma_cut_configuration {amber_conf_dir}/tdsc.conf " \
                   "-frequency_domain_sigma_cut_steps {amber_conf_dir}/fdsc_steps.conf " \
                   "-frequency_domain_sigma_cut_configuration {amber_conf_dir}/fdsc.conf " \
                   "-nr_bins {fdsc_nbins} -threshold {snrmin} " \
                   "-output {amber_dir}/CB{beam:02d}_step1 " \
                   "-beams {ntab} -synthesized_beams {nsb} -synthesized_beams_chunk {nsynbeams_chunk} " \
                   "-dada -dada_key {key_i} -batches {nbatch} {extra_flags} " \
                   "-synthesized_beams_file {sb_table}".format(**fullconfig)
     proc = mp.Process(target=os.system, args=(amber_step1, ))
     return proc
Esempio n. 6
0
    def start_observation(self, obs_config, reload=True):
        """
        Start an observation

        :param dict obs_config: observation config dict
        :param bool reload: reload service settings (default: True)
        """
        # reload config
        if reload:
            self.load_config()

        # Stop any running observation
        if self.observation_events or self.observation_threads:
            self.logger.info("Old observation found, stopping it first")
            self.stop_observation()

        self.logger.info("Reading AMBER settings")
        # Read number of amber threads from amber config file
        amber_conf_file = obs_config['amber_config']
        with open(amber_conf_file, 'r') as f:
            raw_amber_conf = f.read()
        amber_conf = util.parse_parset(raw_amber_conf)

        # get directory of amber trigger files
        amber_dir = obs_config['amber_dir']
        # get CB number
        beam = obs_config['beam']

        # start reader for each thread
        num_amber = len(ast.literal_eval(amber_conf['opencl_device']))
        self.logger.info("Expecting {} AMBER threads".format(num_amber))
        for step in range(1, num_amber + 1):
            trigger_file = os.path.join(
                amber_dir, "CB{:02d}_step{}.trigger".format(beam, step))
            # start thread for reading
            event = threading.Event()
            self.observation_events.append(event)
            thread = threading.Thread(target=self._follow_file,
                                      args=[trigger_file, event],
                                      name="step{}".format(step))
            thread.start()
            self.observation_threads.append(thread)

        self.logger.info("Observation started")
Esempio n. 7
0
    def _read_amber_triggers(self):
        """
        Read AMBER triggers for reprocessing of an observation.
        Based on AMBERListener
        """
        # read AMBER settings
        amber_conf_file = self.obs_config['amber_config']
        with open(amber_conf_file, 'r') as f:
            raw_amber_conf = f.read()
        amber_conf = util.parse_parset(raw_amber_conf)
        # get directory of amber trigger files
        amber_dir = self.obs_config['amber_dir']
        # get CB index and number of AMBER processes
        beam = self.obs_config['beam']
        num_amber = len(ast.literal_eval(amber_conf['opencl_device']))

        self.logger.info(
            f"{self.obs_name}reprocessing reading {num_amber} AMBER files")
        for step in range(1, num_amber + 1):
            trigger_file = os.path.join(
                amber_dir, "CB{:02d}_step{}.trigger".format(beam, step))
            # check if the file exists
            if not os.path.isfile(trigger_file):
                self.logger.error(
                    f"{self.obs_name}reprocessing AMBER file does not exist: {trigger_file}"
                )
                continue
            # read the file and put each line on the processor input queue
            with open(trigger_file, 'r') as f:
                lines = f.readlines()
            for line in lines:
                self.source_queue.put({
                    'command': 'trigger',
                    'trigger': line.strip()
                })

        # sleep for twice the processing interval to ensure triggers were picked up
        self.stop_event.wait(2 * self.interval)
        # reprocessing means no stop observation will be sent, do this manually
        self.logger.info(
            f"{self.obs_name}sending manual stop_observation command for reprocessing"
        )
        self.source_queue.put({'command': 'stop_observation'})
Esempio n. 8
0
    def _load_parset(self, config_file):
        """
        Load parset file and convert to observation config

        :param str config_file: Path to parset file
        :return: observation configuration
        """
        self.logger.info("Loading parset {}".format(config_file))
        if not os.path.isfile(config_file):
            self.logger.error("Parset not found: {}".format(config_file))
            # no parset - do not process this observation
            return {'proctrigger': False}

        # Read raw parset
        with open(config_file) as f:
            parset = f.read().strip()
        # Convert to dict
        config = util.parse_parset(parset)
        return config