Esempio n. 1
0
    def test_set(self):
        pipeline = EDDPipeline("localhost", 1234, dict(foo=''))
        pipeline.set({"foo": "bar"})
        self.assertEqual(pipeline._config['foo'], 'bar')

        with self.assertRaises(FailReply) as cm:
            yield pipeline.set({"bar": "foo"})
Esempio n. 2
0
 def set(self, config_json):
     cfg = yield self._cfgjson2dict(config_json)
     if 'output_data_streams' in cfg:
         log.debug("Stripping outputs from cfg before check")
         # Do not check output data streams, as the only relevant thing is here
         # that they are consecutive
         outputs = cfg.pop('output_data_streams')
         log.debug("Pipeline set")
         yield EDDPipeline.set(self, cfg)
         log.debug("Re-adding outputs")
         self._config['output_data_streams'] = outputs
         self._configUpdated()
     else:
         EDDPipeline.set(self, cfg)
    def configure(self, config_json):
        """
        Configure the EDD backend

        Args:
            config_json:    A JSON dictionary object containing configuration information

        """
        log.info("Configuring EDD backend for processing")

        #log.info("Resetting data streams")
        #TODo: INterface? Decide if this is always done
        #self.__eddDataStore._dataStreams.flushdb()
        log.debug("Received configuration string: '{}'".format(config_json))

        try:
            cfg = json.loads(config_json)
        except:
            log.error("Error parsing json")
            raise FailReply(
                "Cannot handle config string {} - Not valid json!".format(
                    config_json))

        if not self.__provisioned:
            log.debug("Not provisioned. Using full config.")
            # Do not use set here, as there might not be a basic config from
            # provisioning
            cfg = self.__sanitizeConfig(cfg)
            self._config = cfg
        else:
            yield EDDPipeline.set(self, cfg)

        yield self._installController(self._config['products'])

        cfs = json.dumps(self._config, indent=4)
        log.debug("Starting configuration:\n" + cfs)

        # Data streams are only filled in on final configure as they may
        # require data from the configure command of previous products. As example, the packetizer
        # data stream has a sync time that is propagated to other components
        # The components are thus configured following the dependency tree,
        # which is a directed acyclic graph (DAG)
        log.debug("Build DAG from config")
        dag = nx.DiGraph()
        for product, product_config in self._config['products'].items():
            log.debug("Adding node: {}".format(product))
            dag.add_node(product)
            if "input_data_streams" in product_config:
                for stream in value_list(product_config["input_data_streams"]):
                    if not stream["source"]:
                        log.warning(
                            "Ignoring stream without source for DAG from {}".
                            format(product))
                        continue
                    source_product = stream["source"].split(":")[0]
                    if source_product not in self._config['products']:
                        raise FailReply(
                            "{} requires data stream of unknown product {}".
                            format(product, stream["source"]))
                    log.debug("Connecting: {} -> {}".format(
                        source_product, product))
                    dag.add_edge(source_product, product)

        log.debug("Checking for loops in graph")
        try:
            cycle = nx.find_cycle(dag)
            FailReply("Cycle detected in dependency graph: {}".format(cycle))
        except nx.NetworkXNoCycle:
            log.debug("No loop on graph found")
            pass
        graph = "\n".join(
            ["  {} --> {}".format(k[0], k[1]) for k in dag.edges()])
        log.info("Dependency graph of products:\n{}".format(graph))
        self._configuration_graph.set_value(graph)

        configure_results = {}
        configure_futures = []

        @coroutine
        def __process_node(node):
            """
            Wrapper to parallelize configuration of nodes. Any Node will wait for its predecessors to be done.
            """
            #Wait for all predecessors to be finished
            log.debug("DAG Processing {}: Waiting for {} predecessors".format(
                node, len(list(dag.predecessors(node)))))
            for pre in dag.predecessors(node):
                log.debug('DAG Processing {}: waiting for {}'.format(
                    node, pre))
                while not pre in configure_results:
                    # python3 asyncio coroutines would not run until awaited,
                    # so we could build the graph up front and then execute it
                    # without waiting
                    yield tornado.gen.sleep(0.5)
                log.debug('DAG Processing {}: Predecessor {} done.'.format(
                    node, pre))
                if not configure_results[pre]:
                    log.error(
                        'DAG Processing {}: fails due to error in predecessor {}'
                        .format(node, pre))
                    configure_results[node] = False
                    raise Return
                log.debug('DAG Processing {}: Predecessor {} was successfull.'.
                          format(node, pre))

            log.debug("DAG Processing {}: All predecessors done.".format(node))
            try:
                log.debug(
                    "DAG Processing {}: Checking input data streams for updates."
                    .format(node))
                if "input_data_streams" in self._config['products'][node]:
                    log.debug(
                        'DAG Processing {}: Update input streams'.format(node))
                    for stream in value_list(self._config['products'][node]
                                             ["input_data_streams"]):
                        product_name, stream_name = stream["source"].split(":")
                        stream.update(self._config['products'][product_name]
                                      ["output_data_streams"][stream_name])

                log.debug('DAG Processing {}: Set Final config'.format(node))
                yield self.__controller[node].set(
                    self._config['products'][node])
                log.debug(
                    'DAG Processing {}: Staring configuration'.format(node))
                yield self.__controller[node].configure()
                log.debug(
                    "DAG Processing {}: Getting updated config".format(node))
                cfg = yield self.__controller[node].getConfig()
                log.debug("Got: {}".format(json.dumps(cfg, indent=4)))
                self._config["products"][node] = cfg

            except Exception as E:
                log.error(
                    'DAG Processing: {} Exception cought during configuration:\n {}:{}'
                    .format(node,
                            type(E).__name__, E))
                configure_results[node] = False
            else:
                log.debug(
                    'DAG Processing: {} Successfully finished configuration'.
                    format(node))
                configure_results[node] = True

        log.debug("Creating processing futures")
        configure_futures = [__process_node(node) for node in dag.nodes()]
        yield configure_futures
        self._configUpdated()
        log.debug("Final configuration:\n '{}'".format(
            json.dumps(self._config, indent=2)))
        failed_prcts = [
            k for k in configure_results if not configure_results[k]
        ]
        if failed_prcts:
            raise FailReply("Failed products: {}".format(
                ",".join(failed_prcts)))
        log.info("Updating data streams in database")
        for productname, product in self._config["products"].items():
            log.debug(" - Checking {}".format(productname))
            if "output_data_streams" in product and isinstance(
                    product["output_data_streams"], dict):
                for stream, streamcfg in product["output_data_streams"].items(
                ):
                    key = "{}:{}".format(productname, stream)
                    self.__eddDataStore.addDataStream(key, streamcfg)

        log.info("Successfully configured EDD")
        raise Return("Successfully configured EDD")