def _run_search_node(self, config: dict, q: multiprocessing.Queue):
        from utils.src.python.Monitoring import init
        init(config["search_config"].get("search_prometheus_log_file"))

        from network_oef.src.python.SearchNode import SearchNode
        from utils.src.python.Logging import configure as configure_logging, get_logger
        configure_logging(file=config["search_log"])
        logger = get_logger("NODE_RUN: " + config["search_key"])

        search_node = SearchNode(config)

        logger.info("**** Node started @ {}:{}".format(config["host"],
                                                       config["search_port"]))
        time.sleep(1)
        try:
            while True:
                con = q.get()
                logger.info("**** SearchProcess got peer: %s @ %s ", con[2],
                            con[0] + ":" + str(con[1]))
                if len(con) != 3:
                    logger.error(
                        "**** Stopping connection queue listening, because invalid con: ",
                        con)
                    break
                search_node.connect_to_search_node(*con)
            search_node.block()
        except Exception as e:
            logger.exception("Exception in run_search_node: ", e)
        except:
            logger.exception("Exception")
        logger.error("******* EXIT SEARCH NODE")
Beispiel #2
0
def main():
    configure_logging(level=logging.ERROR)
    app = App()

    parser = argparse.ArgumentParser(description='Test application for PLUTO.')
    parser.add_argument("--http_port",        required=True, type=int, help="which port to run the HTTP interface on.")

    args = parser.parse_args()
    app.start(args)
    app.run()
Beispiel #3
0
def run_node(id: int, q: multiprocessing.Queue):
    from network_oef.src.python.FullLocalSearchNode import FullSearchNone
    from utils.src.python.Logging import configure as configure_logging
    configure_logging(id_flag=str(id))

    node = FullSearchNone("search{}".format(id), "127.0.0.1", 10000+id, [{
        "run_py_dap": True,
        "file": "ai_search_engine/src/resources/dap_config.json",
        "port": 20000+id
    }
    ], 7500+id, args.ssl_certificate, "api/src/resources/website")
    print("Node started")
    time.sleep(1)
    con = q.get()
    print("SearchProcess {} got: ".format(id), con)
    node.add_remote_peer(*con)
    node.block()
def _run_search_node(name: str, node_ip: str, node_port: int,
                     dap_port_start: int, director_api_port: int,
                     http_port: int, ssl_certificate: str,
                     q: multiprocessing.Queue, log_file: str):
    from network_oef.src.python.FullLocalSearchNode import FullSearchNone
    from utils.src.python.Logging import configure as configure_logging
    configure_logging(file=log_file)
    logger = get_logger("NODE_RUN: " + name)

    node = FullSearchNone(
        name,
        node_ip,
        node_port,
        [{
            #"run_py_dap": True,
            #"file": "ai_search_engine/src/resources/dap_config.json",
            #"port": dap_port_start,
            "run_mode": "CPP",  #PY/CPP
            "port": dap_port_start,
            "name": "in_memory_dap"
        }],
        http_port,
        ssl_certificate,
        "api/src/resources/website",
        director_api_port=director_api_port,
        log_dir=os.path.split(log_file)[0])
    logger.error("**** Node %s started", name)
    time.sleep(1)
    try:
        while True:
            con = q.get()
            logger.info("**** SearchProcess got peer: %s @ %s ", con[2],
                        con[0] + ":" + str(con[1]))
            if len(con) != 3:
                logger.error(
                    "**** Stopping connection queue listening, because invalid con: ",
                    con)
                break
            node.add_remote_peer(*con)
        node.block()
    except Exception as e:
        logger.exception("Exception in run_search_node: ", e)
    except:
        logger.exception("Exception")
    logger.error("******* EXIT SEARCH NODE")
Beispiel #5
0
    #create_kv_updates(update.update.add())
    upd1 = dap_update_pb2.DapUpdate.TableFieldValue()
    create_dm_updates(upd1)
    upd2 = dap_update_pb2.DapUpdate.TableFieldValue()
    create_kv_updates(upd2)
    return [upd1, upd2]


def lookup(clss, name):
    for cls in clss:
        if cls[0] == name:
            return cls[1]
    return None


configure_logging()
classes = inspect.getmembers(sys.modules[__name__])
daps = []
updates = create_dap_updates()
for name, conf in config_contract.items():
    cls = lookup(classes, conf["class"])
    if cls is not None and conf["config"]["port"] in selected_ports:
        daps.append(cls(name, conf["config"]))
        dap = daps[-1]
        print("------------------\nDESCRIBE")
        print(dap.describe())
        print("------------------\nUPDATE")
        for upd in updates:
            print(upd)
            ss = upd.SerializeToString()
            print(ss)
            config = json.load(f)
        config = update_config_from_state(config, log)
        b = Bootstrap(config["bootstrap"])
        config["bootstrap"] = b.update_config()
        update_state(config, log)
        await from_bootstrap(director, b, config)
    else:
        print("Command type {} not supported!".format(args.type))

    await director.close_all()

    await director.wait()


if __name__ == "__main__":
    configure_logging(level=logging.INFO)
    parser = argparse.ArgumentParser(description='DEMO Director')
    parser.add_argument("--targets",
                        nargs='+',
                        type=str,
                        help="Node addresses host:port ...")
    parser.add_argument(
        "--type",
        "-t",
        type=str,
        required=True,
        help=
        "weather_agent/location/location_and_connection/json_config/bootstrap")
    parser.add_argument("--config_file",
                        type=str,
                        required=False,