Пример #1
0
    def __init__(self, ip, port):
        """
        Args:
            ip:   IP accepting katcp control connections from.
            port: Port number to serve on
        """
        EDDPipeline.__init__(
            self, ip, port,
            dict(input_data_streams=[],
                 id="fits_interface",
                 type="fits_interface",
                 drop_nans=True,
                 fits_writer_ip="0.0.0.0",
                 fits_writer_port=5002))
        self._fw_connection_manager = None
        self._capture_thread = None
        self._shutdown = False
        self.mc_interface = []

        self.__periodic_callback = PeriodicCallback(
            self.periodic_sensor_update, 1000)
        self.__periodic_callback.start()

        self.__bandpass_callback = PeriodicCallback(self.bandpassplotter,
                                                    10000)
        self.__bandpass_callback.start()
        self.__plotting = False
Пример #2
0
    def test_set(self):
        pipeline = EDDPipeline("localhost", 1234, dict(foo=''))
        pipeline.set({"foo": "bar"})
        self.assertEqual(pipeline._config['foo'], 'bar')

        with self.assertRaises(FailReply) as cm:
            yield pipeline.set({"bar": "foo"})
Пример #3
0
    def __init__(self, ip, port, redis_ip, redis_port,
                 edd_ansible_git_repository_folder, inventory):
        """
        Args:

          ip:           The IP address on which the server should listen
          port:         The port that the server should bind to
          redis_ip:     IP for conenction to the EDD Datastore
          redis_port:   Port for the comnenctioon to the edd data store
          edd_ansible_git_repository_folder:
                        Directory of a (checked out) edd_ansible git repository
                        to be used for provisioning inventory to use for ansible
        """
        EDDPipeline.__init__(
            self, ip, port, {
                "data_store": dict(ip=redis_ip, port=redis_port),
                "id": "There can be only one."
            })
        self.state = "unprovisioned"

        self.__controller = {}
        self.__eddDataStore = EDDDataStore.EDDDataStore(redis_ip, redis_port)
        self.__edd_ansible_git_repository_folder = edd_ansible_git_repository_folder
        self.__inventory = inventory
        if not os.path.isdir(self.__edd_ansible_git_repository_folder):
            log.warning("{} is not a readable directory".format(
                self.__edd_ansible_git_repository_folder))

        self.__provisioned = None
        self.__controller = {}

        self.__periodicPipelineSensorUpdate = tornado.ioloop.PeriodicCallback(
            self.updateProductStatusSummary, 2500)
        self.__periodicPipelineSensorUpdate.start()
Пример #4
0
 def setup_sensors(self):
     """
     @brief Setup monitoring sensors
     """
     EDDPipeline.setup_sensors(self)
     self._fpga_clock = Sensor.float("fpga-clock",
                                     description="FPGA Clock estimate",
                                     initial_status=Sensor.UNKNOWN)
     self.add_sensor(self._fpga_clock)
Пример #5
0
 def __init__(self, ip, port, device_ip, device_port=7147):
     """@brief initialize the pipeline.
        @param device is the control ip of the board
     """
     EDDPipeline.__init__(self, ip, port, DEFAULT_CONFIG)
     log.info('Connecting to skarab @ {}:{}'.format(device_ip, device_port))
     self._client = SkarabChannelizerClient(device_ip, device_port)
     self.__periodic_callback = PeriodicCallback(self._check_fpga_sensors,
                                                 1000)
     self.__periodic_callback.start()
Пример #6
0
    def __init__(self, ip, port, device_ip, device_port=7147):
        """@brief initialize the pipeline.
           @param device is the control ip of the board
        """
        EDDPipeline.__init__(self, ip, port, _DEFAULT_CONFIG)
        log.info('Connecting to packetizer @ {}:{}'.format(device_ip, device_port))
        self._client = DigitiserPacketiserClient(device_ip, device_port)

        # We do not know the initial state of the packetizr before we take
        # control, thus we will config on first try
        self.__previous_config = None
        self.__plotting = False 
Пример #7
0
    def __init__(self, ip, port):
        """@brief initialize the pipeline."""
        EDDPipeline.__init__(self, ip, port, DEFAULT_CONFIG)
        self.mkrec_cmd = []
        self._dada_buffers = ["dada", "dadc"]
        self._dada_buffers_monitor = []
        self._data_processing_proc = None
        self._mkrecv_ingest_proc = None
        self._archive_directory_monitor = None

        # Pick first available numa node. Disable non-available nodes via
        # EDD_ALLOWED_NUMA_NODES environment variable
        self.numa_number = numa.getInfo().keys()[0]
Пример #8
0
    def setup_sensors(self):
        """
        Setup monitoring sensors
        """
        EDDPipeline.setup_sensors(self)

        self._fw_connection_status = Sensor.discrete(
            "fits-writer-connection-status",
            description="Status of the fits writer conenction",
            params=["Unmanaged", "Connected", "Unconnected"],
            default="Unmanaged",
            initial_status=Sensor.UNKNOWN)
        self.add_sensor(self._fw_connection_status)

        self._fw_packages_sent = Sensor.integer(
            "fw-sent-packages",
            description=
            "Number of packages sent to fits writer in this measurement",
            initial_status=Sensor.UNKNOWN)
        self.add_sensor(self._fw_packages_sent)

        self._fw_packages_dropped = Sensor.integer(
            "fw-dropped-packages",
            description=
            "Number of packages dropped by fits writer in this measurement",
            initial_status=Sensor.UNKNOWN)
        self.add_sensor(self._fw_packages_dropped)

        self._incomplete_heaps = Sensor.integer(
            "incomplete-heaps",
            description="Incomplete heaps received.",
            initial_status=Sensor.UNKNOWN)
        self.add_sensor(self._incomplete_heaps)

        self._complete_heaps = Sensor.integer(
            "complete-heaps",
            description="Complete heaps received.",
            initial_status=Sensor.UNKNOWN)
        self.add_sensor(self._complete_heaps)

        self._invalid_packages = Sensor.integer(
            "invalid-packages",
            description="Number of invalid packages dropped.",
            initial_status=Sensor.UNKNOWN)
        self.add_sensor(self._invalid_packages)

        self._bandpass = Sensor.string(
            "bandpass",
            description="band-pass data (base64 encoded)",
            initial_status=Sensor.UNKNOWN)
        self.add_sensor(self._bandpass)
Пример #9
0
 def set(self, config_json):
     cfg = yield self._cfgjson2dict(config_json)
     if 'output_data_streams' in cfg:
         log.debug("Stripping outputs from cfg before check")
         # Do not check output data streams, as the only relevant thing is here
         # that they are consecutive
         outputs = cfg.pop('output_data_streams')
         log.debug("Pipeline set")
         yield EDDPipeline.set(self, cfg)
         log.debug("Re-adding outputs")
         self._config['output_data_streams'] = outputs
         self._configUpdated()
     else:
         EDDPipeline.set(self, cfg)
Пример #10
0
    def setup_sensors(self):
        """
        @brief Setup monitoring sensors
        """
        EDDPipeline.setup_sensors(self)

        self._bandpass = Sensor.string(
            "bandpass_PNG",
            description="band-pass data (base64 encoded)",
            initial_status=Sensor.UNKNOWN)
        self.add_sensor(self._bandpass)

        self._level = Sensor.string(
            "level_PNG",
            description="ADC Level (base64 encoded)",
            initial_status=Sensor.UNKNOWN)
        self.add_sensor(self._level)
    def setup_sensors(self):
        """
        Setup monitoring sensors
        """
        EDDPipeline.setup_sensors(self)

        self._integration_time_status = Sensor.float(
            "integration-time",
            description="Integration time [s]",
            initial_status=Sensor.UNKNOWN)
        self.add_sensor(self._integration_time_status)

        self._output_rate_status = Sensor.float(
            "output-rate",
            description="Output data rate [Gbyte/s]",
            initial_status=Sensor.UNKNOWN)
        self.add_sensor(self._output_rate_status)

        self._mkrecv_sensors = MkrecvSensors("")

        for s in self._mkrecv_sensors.sensors.values():
            self.add_sensor(s)

        self._input_buffer_fill_level = Sensor.float(
            "input-buffer-fill-level",
            description="Fill level of the input buffer",
            params=[0, 1])
        self.add_sensor(self._input_buffer_fill_level)

        self._input_buffer_total_write = Sensor.float(
            "input-buffer-total-write",
            description="Total write into input buffer ",
            params=[0, 1])
        self.add_sensor(self._input_buffer_total_write)

        self._output_buffer_fill_level = Sensor.float(
            "output-buffer-fill-level",
            description="Fill level of the output buffer")
        self.add_sensor(self._output_buffer_fill_level)
        self._output_buffer_total_read = Sensor.float(
            "output-buffer-total-read",
            description="Total read from output buffer")
        self.add_sensor(self._output_buffer_total_read)
Пример #12
0
    def setup_sensors(self):
        """
        Setup the monitoring sensors
        """
        EDDPipeline.setup_sensors(self)

        self._integration_time_status = Sensor.float(
            "integration-time",
            description="Integration time [s]",
            initial_status=Sensor.UNKNOWN)
        self.add_sensor(self._integration_time_status)

        self._output_rate_status = Sensor.float(
            "output-rate",
            description="Output data rate [Gbyte/s]",
            initial_status=Sensor.UNKNOWN)
        self.add_sensor(self._output_rate_status)

        self._polarization_sensors = {}
Пример #13
0
    def setup_sensors(self):
        """
        @brief Setup monitoring sensors
        """
        EDDPipeline.setup_sensors(self)

        self._integration_time_status = Sensor.float(
            "block-length-time",
            description="Length of a processing block [s]",
            initial_status=Sensor.UNKNOWN)
        self.add_sensor(self._integration_time_status)

        self._output_rate_status = Sensor.float(
            "output-rate",
            description="Output data rate [Gbyte/s]",
            initial_status=Sensor.UNKNOWN)
        self.add_sensor(self._output_rate_status)

        self._polarization_sensors = {}
Пример #14
0
    def setup_sensors(self):
        """
        Setup monitoring sensors
        """
        EDDPipeline.setup_sensors(self)

        self._spectra_written = Sensor.integer(
            "written-packages",
            description="Number of spectra written to file.",
            initial_status=Sensor.UNKNOWN)
        self.add_sensor(self._spectra_written)

        self._incomplete_heaps = Sensor.integer(
            "incomplete-heaps",
            description="Incomplete heaps received.",
            initial_status=Sensor.UNKNOWN)
        self.add_sensor(self._incomplete_heaps)

        self._complete_heaps = Sensor.integer(
            "complete-heaps",
            description="Complete heaps received.",
            initial_status=Sensor.UNKNOWN)
        self.add_sensor(self._complete_heaps)

        self._current_file = Sensor.string("current-file",
                                           description="Current filename.",
                                           initial_status=Sensor.UNKNOWN)
        self.add_sensor(self._current_file)

        self._current_file_size = Sensor.float("current-file-size",
                                               description="Current filesize.",
                                               initial_status=Sensor.UNKNOWN)
        self.add_sensor(self._current_file_size)

        self._bandpass = Sensor.string(
            "bandpass",
            description="band-pass data (base64 encoded)",
            initial_status=Sensor.UNKNOWN)
        self.add_sensor(self._bandpass)
Пример #15
0
    def setup_sensors(self):
        """
        Setup katcp monitoring sensors.
        """
        EDDPipeline.setup_sensors(self)

        self._configuration_graph = Sensor.string(
            "configuration_graph",
            description="Graph of configuration",
            initial_status=Sensor.UNKNOWN)
        self.add_sensor(self._configuration_graph)
        self._provision_sensor = Sensor.string(
            "provision",
            description="Current provision configuration",
            initial_status=Sensor.UNKNOWN)
        self.add_sensor(self._provision_sensor)

        self._productStatusSummarySensor = Sensor.string(
            "product-status-summary",
            description="Status of all controlled products",
            default="{}",
            initial_status=Sensor.UNKNOWN)
        self.add_sensor(self._productStatusSummarySensor)
Пример #16
0
 def __init__(self, ip, port):
     EDDPipeline.__init__(self, ip, port, _DEFAULT_CONFIG)
     self.__numa_node_pool = []
     self.mkrec_cmd = []
     self._dada_buffers = []
Пример #17
0
    def __init__(self, ip, port):
        """
        Args:
            ip:   IP accepting katcp control connections from.
            port: Port number to serve on
        """
        EDDPipeline.__init__(
            self,
            ip,
            port,
            dict(
                output_directory="/mnt",
                use_date_based_subfolders=True,
                input_data_streams=[{
                    "source": "gated_spectrometer_0:polarization_0_0",
                    "hdf5_group_prefix": "P",
                    "format": "GatedSpectrometer:1",
                    "ip": "225.0.1.183",
                    "port": "7152"
                }, {
                    "source": "gated_spectrometer_0:polarization_0_1",
                    "hdf5_group_prefix": "P",
                    "format": "GatedSpectrometer:1",
                    "ip": "225.0.1.182",
                    "port": "7152"
                }, {
                    "source": "gated_spectrometer_1:polarization_1_0",
                    "hdf5_group_prefix": "P",
                    "format": "GatedSpectrometer:1",
                    "ip": "225.0.1.185",
                    "port": "7152"
                }, {
                    "source": "gated_spectrometer_1:polarization_1_1",
                    "hdf5_group_prefix": "P",
                    "format": "GatedSpectrometer:1",
                    "ip": "225.0.1.184",
                    "port": "7152"
                }],
                plot={
                    "P0_ND_0": 121,
                    "P0_ND_1": 121,
                    "P1_ND_0": 122,
                    "P1_ND_1": 122
                },  # Dictionary of prefixes to plot  with values indicatin subplot to use, e.g.: plot": {"P0_ND_0": 0, "P0_ND_1": 0, "P1_ND_0": 1, "P1_ND_1": 1},
                nplot=10.,  # update plot every 10 s
                default_hdf5_group_prefix="S",
                id="hdf5_writer",
                type="hdf5_writer"))
        self.mc_interface = []

        self.__periodic_callback = PeriodicCallback(
            self.periodic_sensor_update, 1000)
        self.__periodic_callback.start()

        self._output_file = None
        self.__measuring = False
        self.__plotting = True

        self.__data_snapshot = {}

        self._capture_threads = []
        self.periodic_plot()
Пример #18
0
    def setup_sensors(self):
        """
        @brief Setup monitoring sensors
        """
        EDDPipeline.setup_sensors(self)
        self._tscrunch = Sensor.string("tscrunch_PNG",
                                       description="tscrunch png",
                                       default=BLANK_IMAGE,
                                       initial_status=Sensor.UNKNOWN)
        self.add_sensor(self._tscrunch)

        self._fscrunch = Sensor.string("fscrunch_PNG",
                                       description="fscrunch png",
                                       default=BLANK_IMAGE,
                                       initial_status=Sensor.UNKNOWN)
        self.add_sensor(self._fscrunch)

        self._profile = Sensor.string("profile_PNG",
                                      description="pulse profile png",
                                      default=BLANK_IMAGE,
                                      initial_status=Sensor.UNKNOWN)
        self.add_sensor(self._profile)

        self._central_freq = Sensor.string("_central_freq",
                                           description="_central_freq",
                                           default="N/A",
                                           initial_status=Sensor.UNKNOWN)
        self.add_sensor(self._central_freq)

        self._source_name_sensor = Sensor.string("target_name",
                                                 description="target name",
                                                 default="N/A",
                                                 initial_status=Sensor.UNKNOWN)
        self.add_sensor(self._source_name_sensor)

        self._nchannels = Sensor.string("_nchannels",
                                        description="_nchannels",
                                        default="N/A",
                                        initial_status=Sensor.UNKNOWN)
        self.add_sensor(self._nchannels)

        self._nbins = Sensor.string("_nbins",
                                    description="_nbins",
                                    default="N/A",
                                    initial_status=Sensor.UNKNOWN)
        self.add_sensor(self._nbins)

        self._time_processed = Sensor.string("_time_processed",
                                             description="_time_processed",
                                             default=0,
                                             initial_status=Sensor.UNKNOWN)
        self.add_sensor(self._time_processed)

        self._dm_sensor = Sensor.string("_source_dm",
                                        description="_source_dm",
                                        default=0,
                                        initial_status=Sensor.UNKNOWN)
        self.add_sensor(self._dm_sensor)

        self._par_dict_sensor = Sensor.string("_par_dict_sensor",
                                              description="_par_dict_sensor",
                                              default="N/A",
                                              initial_status=Sensor.UNKNOWN)
        self.add_sensor(self._par_dict_sensor)
        self._directory_size_sensor = Sensor.string(
            "_directory_size_sensor",
            description="_directory_size_sensor",
            default=0,
            initial_status=Sensor.UNKNOWN)
        self.add_sensor(self._directory_size_sensor)
        self._input_buffer_fill_level = Sensor.float(
            "input-buffer-fill-level",
            description="Fill level of the input buffer",
            params=[0, 1])
        self.add_sensor(self._input_buffer_fill_level)

        self._input_buffer_total_write = Sensor.float(
            "input-buffer-total-write",
            description="Total write into input buffer ",
            params=[0, 1])
        self.add_sensor(self._input_buffer_total_write)

        self._output_buffer_fill_level = Sensor.float(
            "output-buffer-fill-level",
            description="Fill level of the output buffer")
        self.add_sensor(self._output_buffer_fill_level)
        self._output_buffer_total_read = Sensor.float(
            "output-buffer-total-read",
            description="Total read from output buffer")
        self.add_sensor(self._output_buffer_total_read)

        self._polarization_sensors = {}
Пример #19
0
 def __init__(self, ip, port, scpi_ip, scpi_port):
     """@brief initialize the pipeline."""
     self._dada_buffers = []
     EDDPipeline.__init__(self, ip, port, scpi_ip, scpi_port)
 def __init__(self, ip, port):
     """initialize the pipeline."""
     EDDPipeline.__init__(self, ip, port, _DEFAULT_CONFIG)
     self.mkrec_cmd = []
     self._dada_buffers = []
     self.__dada_key = "dada"  # key of inpt buffer, output is inverse
Пример #21
0
    def configure(self, config_json):
        """
        Configure the EDD backend

        Args:
            config_json:    A JSON dictionary object containing configuration information

        """
        log.info("Configuring EDD backend for processing")

        #log.info("Resetting data streams")
        #TODo: INterface? Decide if this is always done
        #self.__eddDataStore._dataStreams.flushdb()
        log.debug("Received configuration string: '{}'".format(config_json))

        try:
            cfg = json.loads(config_json)
        except:
            log.error("Error parsing json")
            raise FailReply(
                "Cannot handle config string {} - Not valid json!".format(
                    config_json))

        if not self.__provisioned:
            log.debug("Not provisioned. Using full config.")
            # Do not use set here, as there might not be a basic config from
            # provisioning
            cfg = self.__sanitizeConfig(cfg)
            self._config = cfg
        else:
            yield EDDPipeline.set(self, cfg)

        yield self._installController(self._config['products'])

        cfs = json.dumps(self._config, indent=4)
        log.debug("Starting configuration:\n" + cfs)

        # Data streams are only filled in on final configure as they may
        # require data from the configure command of previous products. As example, the packetizer
        # data stream has a sync time that is propagated to other components
        # The components are thus configured following the dependency tree,
        # which is a directed acyclic graph (DAG)
        log.debug("Build DAG from config")
        dag = nx.DiGraph()
        for product, product_config in self._config['products'].items():
            log.debug("Adding node: {}".format(product))
            dag.add_node(product)
            if "input_data_streams" in product_config:
                for stream in value_list(product_config["input_data_streams"]):
                    if not stream["source"]:
                        log.warning(
                            "Ignoring stream without source for DAG from {}".
                            format(product))
                        continue
                    source_product = stream["source"].split(":")[0]
                    if source_product not in self._config['products']:
                        raise FailReply(
                            "{} requires data stream of unknown product {}".
                            format(product, stream["source"]))
                    log.debug("Connecting: {} -> {}".format(
                        source_product, product))
                    dag.add_edge(source_product, product)

        log.debug("Checking for loops in graph")
        try:
            cycle = nx.find_cycle(dag)
            FailReply("Cycle detected in dependency graph: {}".format(cycle))
        except nx.NetworkXNoCycle:
            log.debug("No loop on graph found")
            pass
        graph = "\n".join(
            ["  {} --> {}".format(k[0], k[1]) for k in dag.edges()])
        log.info("Dependency graph of products:\n{}".format(graph))
        self._configuration_graph.set_value(graph)

        configure_results = {}
        configure_futures = []

        @coroutine
        def __process_node(node):
            """
            Wrapper to parallelize configuration of nodes. Any Node will wait for its predecessors to be done.
            """
            #Wait for all predecessors to be finished
            log.debug("DAG Processing {}: Waiting for {} predecessors".format(
                node, len(list(dag.predecessors(node)))))
            for pre in dag.predecessors(node):
                log.debug('DAG Processing {}: waiting for {}'.format(
                    node, pre))
                while not pre in configure_results:
                    # python3 asyncio coroutines would not run until awaited,
                    # so we could build the graph up front and then execute it
                    # without waiting
                    yield tornado.gen.sleep(0.5)
                log.debug('DAG Processing {}: Predecessor {} done.'.format(
                    node, pre))
                if not configure_results[pre]:
                    log.error(
                        'DAG Processing {}: fails due to error in predecessor {}'
                        .format(node, pre))
                    configure_results[node] = False
                    raise Return
                log.debug('DAG Processing {}: Predecessor {} was successfull.'.
                          format(node, pre))

            log.debug("DAG Processing {}: All predecessors done.".format(node))
            try:
                log.debug(
                    "DAG Processing {}: Checking input data streams for updates."
                    .format(node))
                if "input_data_streams" in self._config['products'][node]:
                    log.debug(
                        'DAG Processing {}: Update input streams'.format(node))
                    for stream in value_list(self._config['products'][node]
                                             ["input_data_streams"]):
                        product_name, stream_name = stream["source"].split(":")
                        stream.update(self._config['products'][product_name]
                                      ["output_data_streams"][stream_name])

                log.debug('DAG Processing {}: Set Final config'.format(node))
                yield self.__controller[node].set(
                    self._config['products'][node])
                log.debug(
                    'DAG Processing {}: Staring configuration'.format(node))
                yield self.__controller[node].configure()
                log.debug(
                    "DAG Processing {}: Getting updated config".format(node))
                cfg = yield self.__controller[node].getConfig()
                log.debug("Got: {}".format(json.dumps(cfg, indent=4)))
                self._config["products"][node] = cfg

            except Exception as E:
                log.error(
                    'DAG Processing: {} Exception cought during configuration:\n {}:{}'
                    .format(node,
                            type(E).__name__, E))
                configure_results[node] = False
            else:
                log.debug(
                    'DAG Processing: {} Successfully finished configuration'.
                    format(node))
                configure_results[node] = True

        log.debug("Creating processing futures")
        configure_futures = [__process_node(node) for node in dag.nodes()]
        yield configure_futures
        self._configUpdated()
        log.debug("Final configuration:\n '{}'".format(
            json.dumps(self._config, indent=2)))
        failed_prcts = [
            k for k in configure_results if not configure_results[k]
        ]
        if failed_prcts:
            raise FailReply("Failed products: {}".format(
                ",".join(failed_prcts)))
        log.info("Updating data streams in database")
        for productname, product in self._config["products"].items():
            log.debug(" - Checking {}".format(productname))
            if "output_data_streams" in product and isinstance(
                    product["output_data_streams"], dict):
                for stream, streamcfg in product["output_data_streams"].items(
                ):
                    key = "{}:{}".format(productname, stream)
                    self.__eddDataStore.addDataStream(key, streamcfg)

        log.info("Successfully configured EDD")
        raise Return("Successfully configured EDD")
Пример #22
0
    def setup_sensors(self):
        """
        @brief Setup monitoring sensors
        """
        EDDPipeline.setup_sensors(self)

        self._edd_config_sensor = Sensor.string(
            "current-config",
            description="The current configuration for the EDD backend",
            default=json.dumps(DEFAULT_CONFIG, indent=4),
            initial_status=Sensor.UNKNOWN)
        self.add_sensor(self._edd_config_sensor)

        self._output_rate_status = Sensor.float(
            "output-rate",
            description="Output data rate [Gbyte/s]",
            initial_status=Sensor.UNKNOWN)
        self.add_sensor(self._output_rate_status)

        self._polarization_sensors = {}
        for p in POLARIZATIONS:
            self._polarization_sensors[p] = {}
            self._polarization_sensors[p]["mkrecv_sensors"] = MkrecvSensors(p)
            for s in self._polarization_sensors[p][
                    "mkrecv_sensors"].sensors.itervalues():
                self.add_sensor(s)
            self._polarization_sensors[p][
                "input-buffer-fill-level"] = Sensor.float(
                    "input-buffer-fill-level-{}".format(p),
                    description=
                    "Fill level of the input buffer for polarization{}".format(
                        p),
                    params=[0, 1])
            self.add_sensor(
                self._polarization_sensors[p]["input-buffer-fill-level"])
            self._polarization_sensors[p][
                "input-buffer-total-write"] = Sensor.float(
                    "input-buffer-total-write-{}".format(p),
                    description=
                    "Total write into input buffer for polarization {}".format(
                        p),
                    params=[0, 1])

            self.add_sensor(
                self._polarization_sensors[p]["input-buffer-total-write"])
            self._polarization_sensors[p][
                "output-buffer-fill-level"] = Sensor.float(
                    "output-buffer-fill-level-{}".format(p),
                    description=
                    "Fill level of the output buffer for polarization {}".
                    format(p))
            self._polarization_sensors[p][
                "output-buffer-total-read"] = Sensor.float(
                    "output-buffer-total-read-{}".format(p),
                    description=
                    "Total read from output buffer for polarization {}".format(
                        p))
            self.add_sensor(
                self._polarization_sensors[p]["output-buffer-total-read"])
            self.add_sensor(
                self._polarization_sensors[p]["output-buffer-fill-level"])
Пример #23
0
 def __init__(self, ip, port):
     """@brief initialize the pipeline."""
     EDDPipeline.__init__(self, ip, port, _DEFAULT_CONFIG)
     self._dada_buffers = []
     self.mkrec_cmd = []