Ejemplo n.º 1
0
 def test_insertion_of_history(self):
     fn = '/tmp/{}'.format(uuid.uuid4())
     f = EDDHDFFileWriter(fn)
     self.addCleanup(os.remove, f.filename)
     self.assertTrue('history' in f._file.keys())
     self.assertEqual(f._file['history'][0][1], b'mpikat.HDF5Writer')
     f.close()
Ejemplo n.º 2
0
    def test_auto_filename(self):
        f = EDDHDFFileWriter()

        self.assertTrue(f.getFileSize())
        self.addCleanup(os.remove, f.filename)
        f.close()
        self.assertTrue(os.path.exists(f.filename))
Ejemplo n.º 3
0
    def test_gated_spectrometer_data_insert(self):
        f = EDDHDFFileWriter()
        self.addCleanup(os.remove, f.filename)

        nchannels = 64 * 1024

        data = {}
        for n, d in gated_spectrometer_format(nchannels).items():
            data[n] = np.empty(**d)

        f.newSubscan()
        attr = {'foo': 'bar', 'nu': 3}
        f.addData('mysection', data, attr)
        f.close()

        infile = h5py.File(f.filename, "r")

        self.assertTrue("scan" in infile)
        self.assertTrue("scan/000" in infile)

        dataset = infile["scan/000/mysection"]
        for k in data:
            # Strip NaNs from test
            idx = data[k] == data[k]
            self.assertTrue((data[k] == dataset[k][0])[idx].all())

        self.assertEqual(dataset.attrs['foo'], 'bar')
        self.assertEqual(dataset.attrs['nu'], 3)
Ejemplo n.º 4
0
    def measurement_prepare(self, config={}):
        try:
            config = json.loads(config)
        except:
            _log.error("Cannot parse json:\n{}".format(config))
            raise RuntimeError("Cannot parse json.")

        if ("new_file" in config
                and config["new_file"]) or (not self._output_file):
            _log.debug("Creating new file")
            if "file_id" in config:
                file_id = config["file_id"]
            else:
                file_id = None

            if self._config["use_date_based_subfolders"]:
                _log.debug("Using date based subfolders:")

                dirlist = list(os.path.split(self._config["output_directory"]))
                timestamp = datetime.utcnow()
                dirlist.append("{}".format(timestamp.year))
                dirlist.append("{:02}".format(timestamp.month))
                dirlist.append("{:02}".format(timestamp.day))
                path = ""
                for d in dirlist:
                    path = os.path.join(path, d)
                    if not os.path.isdir(path):
                        _log.debug("Creating directory: %s", path)
                        os.mkdir(path)
            else:
                _log.debug("Using flat file hirarchy:")
                path = self._config["output_directory"]

            self._output_file = EDDHDFFileWriter(path=path, file_id_no=file_id)
            self._current_file.set_value(self._output_file.filename)

        if ("override_newsubscan" in config and config["override_newsubscan"]):
            _log.debug("Overriding new subscan creation")
        else:
            _log.debug("Creating new subscan")
            self._output_file.newSubscan()
Ejemplo n.º 5
0
    def test_creation_of_subscans(self):
        fn = '/tmp/{}'.format(uuid.uuid4())
        f = EDDHDFFileWriter(fn)
        self.addCleanup(os.remove, f.filename)

        f.newSubscan()
        f.newSubscan()
        f.close()
        infile = h5py.File(f.filename, "r")
        self.assertEqual(len(infile['scan'].keys()), 2)
Ejemplo n.º 6
0
class EDDHDF5WriterPipeline(EDDPipeline):
    """
    Write EDD Data Streams to HDF5 Files.

    Configuration
    -------------
        default_hdf5_group_prefix

    Input Data Steams
    -----------------



    """
    def __init__(self, ip, port):
        """
        Args:
            ip:   IP accepting katcp control connections from.
            port: Port number to serve on
        """
        EDDPipeline.__init__(
            self,
            ip,
            port,
            dict(
                output_directory="/mnt",
                use_date_based_subfolders=True,
                input_data_streams=[{
                    "source": "gated_spectrometer_0:polarization_0_0",
                    "hdf5_group_prefix": "P",
                    "format": "GatedSpectrometer:1",
                    "ip": "225.0.1.183",
                    "port": "7152"
                }, {
                    "source": "gated_spectrometer_0:polarization_0_1",
                    "hdf5_group_prefix": "P",
                    "format": "GatedSpectrometer:1",
                    "ip": "225.0.1.182",
                    "port": "7152"
                }, {
                    "source": "gated_spectrometer_1:polarization_1_0",
                    "hdf5_group_prefix": "P",
                    "format": "GatedSpectrometer:1",
                    "ip": "225.0.1.185",
                    "port": "7152"
                }, {
                    "source": "gated_spectrometer_1:polarization_1_1",
                    "hdf5_group_prefix": "P",
                    "format": "GatedSpectrometer:1",
                    "ip": "225.0.1.184",
                    "port": "7152"
                }],
                plot={
                    "P0_ND_0": 121,
                    "P0_ND_1": 121,
                    "P1_ND_0": 122,
                    "P1_ND_1": 122
                },  # Dictionary of prefixes to plot  with values indicatin subplot to use, e.g.: plot": {"P0_ND_0": 0, "P0_ND_1": 0, "P1_ND_0": 1, "P1_ND_1": 1},
                nplot=10.,  # update plot every 10 s
                default_hdf5_group_prefix="S",
                id="hdf5_writer",
                type="hdf5_writer"))
        self.mc_interface = []

        self.__periodic_callback = PeriodicCallback(
            self.periodic_sensor_update, 1000)
        self.__periodic_callback.start()

        self._output_file = None
        self.__measuring = False
        self.__plotting = True

        self.__data_snapshot = {}

        self._capture_threads = []
        self.periodic_plot()

    def setup_sensors(self):
        """
        Setup monitoring sensors
        """
        EDDPipeline.setup_sensors(self)

        self._spectra_written = Sensor.integer(
            "written-packages",
            description="Number of spectra written to file.",
            initial_status=Sensor.UNKNOWN)
        self.add_sensor(self._spectra_written)

        self._incomplete_heaps = Sensor.integer(
            "incomplete-heaps",
            description="Incomplete heaps received.",
            initial_status=Sensor.UNKNOWN)
        self.add_sensor(self._incomplete_heaps)

        self._complete_heaps = Sensor.integer(
            "complete-heaps",
            description="Complete heaps received.",
            initial_status=Sensor.UNKNOWN)
        self.add_sensor(self._complete_heaps)

        self._current_file = Sensor.string("current-file",
                                           description="Current filename.",
                                           initial_status=Sensor.UNKNOWN)
        self.add_sensor(self._current_file)

        self._current_file_size = Sensor.float("current-file-size",
                                               description="Current filesize.",
                                               initial_status=Sensor.UNKNOWN)
        self.add_sensor(self._current_file_size)

        self._bandpass = Sensor.string(
            "bandpass",
            description="band-pass data (base64 encoded)",
            initial_status=Sensor.UNKNOWN)
        self.add_sensor(self._bandpass)

    @state_change(target="idle",
                  allowed=["ready", "streaming", "deconfiguring"],
                  intermediate="capture_stopping")
    @coroutine
    def capture_stop(self):
        _log.debug("Cleaning up capture threads")
        for t in self._capture_threads:
            t.stop()
        for t in self._capture_threads:
            t.join(10.0)
        self._capture_threads = []
        _log.debug("Capture threads cleaned")
        _log.debug("Stopping subprocess pool")

    @state_change(target="idle", intermediate="deconfiguring", error='panic')
    @coroutine
    def deconfigure(self):
        pass

    @state_change(target="configured",
                  allowed=["idle"],
                  intermediate="configuring")
    @coroutine
    def configure(self, config_json='{}'):
        _log.info("Configuring HDF5 Writer")
        _log.debug("Configuration string: '{}'".format(config_json))

        yield self.set(config_json)

        cfs = json.dumps(self._config, indent=4)
        _log.info("Final configuration:\n" + cfs)

        #ToDo: allow streams with multiple multicast groups and multiple ports
        self.mc_subscriptions = {}

        for stream_description in value_list(
                self._config['input_data_streams']):
            hdf5_group = self._config["default_hdf5_group_prefix"]
            if "hdf5_group_prefix" in stream_description:
                hdf5_group = stream_description["hdf5_group_prefix"]
            if hdf5_group not in self.mc_subscriptions:
                self.mc_subscriptions[hdf5_group] = dict(
                    groups=[], port=stream_description['port'], attributes={})
            self.mc_subscriptions[hdf5_group]['groups'].append(
                stream_description['ip'])
            if self.mc_subscriptions[hdf5_group]['port'] != stream_description[
                    'port']:
                raise RuntimeError(
                    "All input streams of one group have to use the same port!!!"
                )

            for key in stream_description:
                if key in ["ip", "port"]:
                    continue
                self.mc_subscriptions[hdf5_group]['attributes'][
                    key] = stream_description[key]
        _log.debug("Got {} subscription groups".format(
            len(self.mc_subscriptions)))

    def _package_writer(self, data):
        if self._state == "measuring":
            _log.debug('Writing data to section: {}'.format(data[0]))
            self._output_file.addData(
                data[0],
                data[1],
            )
        else:
            _log.debug("Not measuring, Dropping package")

        if self._config['plot']:
            if data[0] not in self.__data_snapshot:
                self.__data_snapshot[data[0]] = []

            if len(self.__data_snapshot[data[0]]) > 1023:
                _log.warning(
                    "More than 1023 snapshots kept for plotting! Not storing more."
                )
            else:
                _log.debug("Adding data to snapshot: {}".format(data[0]))
                self.__data_snapshot[data[0]].append(copy.deepcopy(data[1]))

    @coroutine
    def periodic_plot(self):
        with ProcessPoolExecutor(max_workers=1) as executor:
            try:
                while self.__plotting:
                    starttime = time.time()
                    p = executor.submit(plot_script, self.__data_snapshot,
                                        self._config['plot'])
                    self.__data_snapshot = {}
                    plt = yield p
                    _log.debug("Setting bandpass sensor with timestamp")
                    self._bandpass.set_value(plt)
                    _log.debug("Received {} bytes".format(len(plt)))
                    _log.debug("Ready for next plot")
                    duration = time.time() - starttime
                    _log.debug("Plot duration: {} s".format(duration))
                    if duration > self._config['nplot']:
                        _log.warning(
                            "Plot duration {} larger than plto interval!".
                            format(duration, self._config["nplot"]))
                    else:
                        yield sleep(self._config['nplot'] - duration)
            except Exception as E:
                _log.error("Error in periodic plot. Abandon plotting.")
                _log.exception(E)

    @state_change(target="ready",
                  allowed=["configured"],
                  intermediate="capture_starting")
    @coroutine
    def capture_start(self):
        """
        """
        _log.info("Starting capture")
        nic_name, nic_description = numa.getFastestNic()
        self._capture_interface = nic_description['ip']
        _log.info("Capturing on interface {}, ip: {}, speed: {} Mbit/s".format(
            nic_name, nic_description['ip'], nic_description['speed']))
        affinity = numa.getInfo()[nic_description['node']]['cores']

        self._capture_threads = []
        for hdf5_group_prefix, mcg in self.mc_subscriptions.items():
            spead_handler = GatedSpectrometerSpeadHandler(
                hdf5_group_prefix, mcg['attributes'])
            ct = SpeadCapture(mcg["groups"], mcg["port"],
                              self._capture_interface, spead_handler,
                              self._package_writer, affinity)
            ct.start()
            self._capture_threads.append(ct)

        _log.debug("Done capture starting!")

    @coroutine
    def periodic_sensor_update(self):
        """
        Updates the katcp sensors.

        This is a peiodic update as the connection are managed using threads and not coroutines.
        """
        timestamp = time.time()
        incomplete_heaps = 0
        complete_heaps = 0
        for t in self._capture_threads:
            incomplete_heaps += t._incomplete_heaps
            complete_heaps += t._complete_heaps

        conditional_update(self._incomplete_heaps,
                           incomplete_heaps,
                           timestamp=timestamp)
        conditional_update(self._complete_heaps,
                           complete_heaps,
                           timestamp=timestamp)

        if self._output_file:
            conditional_update(self._current_file_size,
                               self._output_file.getFileSize(),
                               timestamp=timestamp)

    @state_change(
        target="set",
        allowed=["ready", "measurement_starting", "configured", "streaming"],
        intermediate="measurement_preparing")
    def measurement_prepare(self, config={}):
        try:
            config = json.loads(config)
        except:
            _log.error("Cannot parse json:\n{}".format(config))
            raise RuntimeError("Cannot parse json.")

        if ("new_file" in config
                and config["new_file"]) or (not self._output_file):
            _log.debug("Creating new file")
            if "file_id" in config:
                file_id = config["file_id"]
            else:
                file_id = None

            if self._config["use_date_based_subfolders"]:
                _log.debug("Using date based subfolders:")

                dirlist = list(os.path.split(self._config["output_directory"]))
                timestamp = datetime.utcnow()
                dirlist.append("{}".format(timestamp.year))
                dirlist.append("{:02}".format(timestamp.month))
                dirlist.append("{:02}".format(timestamp.day))
                path = ""
                for d in dirlist:
                    path = os.path.join(path, d)
                    if not os.path.isdir(path):
                        _log.debug("Creating directory: %s", path)
                        os.mkdir(path)
            else:
                _log.debug("Using flat file hirarchy:")
                path = self._config["output_directory"]

            self._output_file = EDDHDFFileWriter(path=path, file_id_no=file_id)
            self._current_file.set_value(self._output_file.filename)

        if ("override_newsubscan" in config and config["override_newsubscan"]):
            _log.debug("Overriding new subscan creation")
        else:
            _log.debug("Creating new subscan")
            self._output_file.newSubscan()

    @state_change(target="measuring",
                  allowed=["set", "ready", "measurement_preparing"],
                  waitfor="set",
                  intermediate="measurement_starting")
    @coroutine
    def measurement_start(self):
        _log.info("Starting file output")
        # The state measuring is what is checked,. This is enough, as also
        # error handling is done correctly

    @state_change(target="ready",
                  allowed="measuring",
                  intermediate="measurement_stopping")
    @coroutine
    def measurement_stop(self):
        _log.info("Stopping FITS interface data transfer")
        self._output_file.flush()
        # ToDo: There probably should be an output queue so that time ordering
        # is not becoming an issue

    @coroutine
    def stop(self):
        """
        Handle server stop. Stop all threads
        """
        try:
            self.__plotting = False
            for t in self._capture_threads:
                t.stop()
            for t in self._capture_interface:
                t.join(3.0)

        except Exception as E:
            _log.error("Exception during stop! {}".format(E))
        super(EDDHDF5WriterPipeline, self).stop()
Ejemplo n.º 7
0
 def test_insertion_of_format_version(self):
     fn = '/tmp/{}'.format(uuid.uuid4())
     f = EDDHDFFileWriter(fn)
     self.addCleanup(os.remove, f.filename)
     self.assertTrue('FORMAT_VERSION' in f._file.attrs.keys())
     f.close()
Ejemplo n.º 8
0
 def test_manual_filename(self):
     fn = '/tmp/{}'.format(uuid.uuid4())
     f = EDDHDFFileWriter(fn)
     self.addCleanup(os.remove, f.filename)
     f.close()
     self.assertTrue(os.path.exists(f.filename))