Beispiel #1
0
    def _launch_analytical_engine_locally(self):
        logger.info("Starting GAE rpc service on {} ...".format(
            str(self._analytical_engine_endpoint)))

        # generate and distribute hostfile
        with open("/tmp/kube_hosts", "w") as f:
            for i in range(len(self._pod_ip_list)):
                f.write("{} {}\n".format(self._pod_ip_list[i],
                                         self._pod_name_list[i]))

        for pod in self._pod_name_list:
            subprocess.check_call([
                "kubectl",
                "-n",
                self._namespace,
                "cp",
                "/tmp/kube_hosts",
                "{}:/etc/hosts_of_nodes".format(pod),
                "-c",
                self._engine_container_name,
            ])

        # launch engine
        rmcp = ResolveMPICmdPrefix(rsh_agent=True)
        cmd, mpi_env = rmcp.resolve(self._num_workers,
                                    ",".join(self._pod_name_list))

        cmd.append(self._analytical_engine_exec)
        cmd.extend(["--host", "0.0.0.0"])
        cmd.extend(["--port", str(self._random_analytical_engine_rpc_port)])

        if rmcp.openmpi():
            cmd.extend(["-v", str(self._glog_level)])
        else:
            mpi_env["GLOG_v"] = str(self._glog_level)

        cmd.extend(
            ["--vineyard_socket", "/tmp/vineyard_workspace/vineyard.sock"])
        logger.debug("Analytical engine launching command: {}".format(
            " ".join(cmd)))

        env = os.environ.copy()
        env.update(mpi_env)

        self._analytical_engine_process = subprocess.Popen(
            cmd,
            env=env,
            stdout=subprocess.PIPE,
            stderr=subprocess.STDOUT,
            encoding="utf-8",
        )

        stdout_watcher = PipeWatcher(self._analytical_engine_process.stdout,
                                     sys.stdout,
                                     drop=True)
        setattr(self._analytical_engine_process, "stdout_watcher",
                stdout_watcher)
Beispiel #2
0
    def create_learning_instance(self, object_id, handle, config):
        # allocate service for ports
        self._create_graphlearn_service(object_id,
                                        self._learning_engine_ports_usage,
                                        len(self._pod_name_list))

        # prepare arguments
        handle = json.loads(
            base64.b64decode(handle.encode("utf-8")).decode("utf-8"))
        hosts = ",".join([
            "%s:%s" % (pod_name, port) for pod_name, port in zip(
                self._pod_name_list,
                range(
                    self._learning_engine_ports_usage,
                    self._learning_engine_ports_usage +
                    len(self._pod_name_list),
                ),
            )
        ])
        handle["server"] = hosts
        handle = base64.b64encode(
            json.dumps(handle).encode("utf-8")).decode("utf-8")

        # launch the server
        self._learning_instance_processes[object_id] = []
        for pod_index, pod in enumerate(self._pod_name_list):
            cmd = [
                "kubectl",
                "-n",
                self._namespace,
                "exec",
                "-it",
                "-c",
                self._engine_container_name,
                pod,
                "--",
                "python3",
                "-m"
                "gscoordinator.learning",
                handle,
                config,
                str(pod_index),
            ]
            logging.info("launching learning server: %s", " ".join(cmd))
            proc = subprocess.Popen(cmd,
                                    stdout=subprocess.PIPE,
                                    stderr=subprocess.STDOUT)
            stdout_watcher = PipeWatcher(proc.stdout, sys.stdout, drop=True)
            setattr(proc, "stdout_watcher", stdout_watcher)
            self._learning_instance_processes[object_id].append(proc)

        # update the port usage record
        self._learning_engine_ports_usage += len(self._pod_name_list)

        # parse the service hosts and ports
        return self._parse_graphlearn_service_endpoint(object_id)
Beispiel #3
0
    def _start_analytical_engine(self):
        rmcp = ResolveMPICmdPrefix()
        cmd, mpi_env = rmcp.resolve(self._num_workers, self._hosts)

        master = self._hosts.split(",")[0]
        rpc_port = self._get_free_port(master)
        self._analytical_engine_endpoint = "{}:{}".format(
            master, str(rpc_port))

        cmd.append(ANALYTICAL_ENGINE_PATH)
        cmd.extend(["--host", "0.0.0.0"])
        cmd.extend(["--port", str(rpc_port)])

        if rmcp.openmpi():
            cmd.extend(["-v", str(self._glog_level)])
        else:
            mpi_env["GLOG_v"] = str(self._glog_level)

        if self._vineyard_socket:
            cmd.extend(["--vineyard_socket", self._vineyard_socket])

        env = os.environ.copy()
        env.update(mpi_env)

        process = subprocess.Popen(
            cmd,
            cwd=os.path.dirname(ANALYTICAL_ENGINE_PATH),
            env=env,
            universal_newlines=True,
            encoding="utf-8",
            stdin=subprocess.DEVNULL,
            stdout=subprocess.PIPE,
            stderr=subprocess.STDOUT,
            bufsize=1,
        )

        logger.info("Server is initializing analytical engine.")
        stdout_watcher = PipeWatcher(process.stdout, sys.stdout)
        setattr(process, "stdout_watcher", stdout_watcher)

        self._analytical_engine_process = process
Beispiel #4
0
def compile_app(workspace: str, app_name: str, attr, engine_config: dict):
    """Compile an application.

    Args:
        workspace (str): working dir.
        app_name (str): target app_name.
        attr (`AttrValue`): All information needed to compile an app.

    Returns:
        str: Path of the built library.
    """

    app_dir = os.path.join(workspace, app_name)
    os.makedirs(app_dir, exist_ok=True)

    # extract gar content
    _extract_gar(app_dir, attr)

    # codegen app and graph info
    # vd_type and md_type is None in cpp_pie
    (
        app_type,
        app_header,
        app_class,
        vd_type,
        md_type,
        pregel_combine,
    ) = _codegen_app_info(app_dir, DEFAULT_GS_CONFIG_FILE, attr)
    logger.info(
        "Codegened application type: %s, app header: %s, app_class: %s, vd_type: %s, md_type: %s, pregel_combine: %s",
        app_type,
        app_header,
        app_class,
        str(vd_type),
        str(md_type),
        str(pregel_combine),
    )

    graph_header, graph_type = _codegen_graph_info(attr)
    logger.info("Codegened graph type: %s, Graph header: %s", graph_type, graph_header)

    os.chdir(app_dir)

    module_name = ""
    cmake_commands = [
        "cmake",
        ".",
        "-DEXPERIMENTAL_ON=" + engine_config["experimental"],
    ]
    if app_type != "cpp_pie":
        if app_type == "cython_pregel":
            pxd_name = "pregel"
            cmake_commands += ["-DCYTHON_PREGEL_APP=True"]
            if pregel_combine:
                cmake_commands += ["-DENABLE_PREGEL_COMBINE=True"]
        else:
            pxd_name = "pie"
            cmake_commands += ["-DCYTHON_PIE_APP=True"]

        # Copy pxd file and generate cc file from pyx
        shutil.copyfile(
            os.path.join(TEMPLATE_DIR, "{}.pxd.template".format(pxd_name)),
            os.path.join(app_dir, "{}.pxd".format(pxd_name)),
        )
        # Assume the gar will have and only have one .pyx file
        for pyx_file in glob.glob(app_dir + "/*.pyx"):
            module_name = os.path.splitext(os.path.basename(pyx_file))[0]
            cc_file = os.path.join(app_dir, module_name + ".cc")
            subprocess.check_call(["cython", "-3", "--cplus", "-o", cc_file, pyx_file])
        app_header = "{}.h".format(module_name)

    # replace and generate cmakelist
    cmakelists_file_tmp = os.path.join(TEMPLATE_DIR, "CMakeLists.template")
    cmakelists_file = os.path.join(app_dir, "CMakeLists.txt")
    with open(cmakelists_file_tmp, mode="r") as template:
        content = template.read()
        content = Template(content).safe_substitute(
            _analytical_engine_home=ANALYTICAL_ENGINE_HOME,
            _frame_name=app_name,
            _vd_type=vd_type,
            _md_type=md_type,
            _graph_type=graph_type,
            _graph_header=graph_header,
            _module_name=module_name,
            _app_type=app_class,
            _app_header=app_header,
        )
        with open(cmakelists_file, mode="w") as f:
            f.write(content)

    # compile
    cmake_process = subprocess.Popen(
        cmake_commands,
        env=os.environ.copy(),
        universal_newlines=True,
        encoding="utf-8",
        stdout=subprocess.PIPE,
        stderr=subprocess.STDOUT,
    )
    cmake_stdout_watcher = PipeWatcher(cmake_process.stdout, sys.stdout)
    setattr(cmake_process, "stdout_watcher", cmake_stdout_watcher)
    cmake_process.wait()

    make_process = subprocess.Popen(
        ["make", "-j4"],
        env=os.environ.copy(),
        universal_newlines=True,
        encoding="utf-8",
        stdout=subprocess.PIPE,
        stderr=subprocess.STDOUT,
    )
    make_stdout_watcher = PipeWatcher(make_process.stdout, sys.stdout)
    setattr(make_process, "stdout_watcher", make_stdout_watcher)
    make_process.wait()

    return get_lib_path(app_dir, app_name)
Beispiel #5
0
def compile_graph_frame(
    workspace: str, frame_name: str, attr: dict, engine_config: dict
):
    """Compile an application.

    Args:
        workspace (str): Working dir.
        frame_name (str): Target app_name.
        attr (`AttrValue`): All information needed to compile a graph library.

    Raises:
        ValueError: When graph_type is not supported.

    Returns:
        str: Path of the built graph library.
    """

    frame_dir = os.path.join(workspace, frame_name)
    os.makedirs(frame_dir, exist_ok=True)

    graph_signature = generate_graph_type_sig(attr)

    logger.info("Codegened graph frame type: %s", graph_signature)

    os.chdir(frame_dir)

    graph_type = attr[types_pb2.GRAPH_TYPE].graph_type

    cmake_commands = [
        "cmake",
        ".",
        "-DEXPERIMENTAL_ON=" + engine_config["experimental"],
    ]
    if graph_type == types_pb2.ARROW_PROPERTY:
        cmake_commands += ["-DPROPERTY_GRAPH_FRAME=True"]
    elif (
        graph_type == types_pb2.ARROW_PROJECTED
        or graph_type == types_pb2.DYNAMIC_PROJECTED
    ):
        cmake_commands += ["-DPROJECT_FRAME=True"]
    else:
        raise ValueError("Illegal graph type: {}".format(graph_type))
    # replace and generate cmakelist
    cmakelists_file_tmp = os.path.join(TEMPLATE_DIR, "CMakeLists.template")
    cmakelists_file = os.path.join(frame_dir, "CMakeLists.txt")
    with open(cmakelists_file_tmp, mode="r") as template:
        content = template.read()
        content = Template(content).safe_substitute(
            _analytical_engine_home=ANALYTICAL_ENGINE_HOME,
            _frame_name=frame_name,
            _graph_type=graph_signature,
        )
        with open(cmakelists_file, mode="w") as f:
            f.write(content)

    # compile
    cmake_process = subprocess.Popen(
        cmake_commands,
        env=os.environ.copy(),
        universal_newlines=True,
        encoding="utf-8",
        stdout=subprocess.PIPE,
        stderr=subprocess.STDOUT,
    )
    cmake_stdout_watcher = PipeWatcher(cmake_process.stdout, sys.stdout)
    setattr(cmake_process, "stdout_watcher", cmake_stdout_watcher)
    cmake_process.wait()

    make_process = subprocess.Popen(
        ["make", "-j4"],
        env=os.environ.copy(),
        universal_newlines=True,
        encoding="utf-8",
        stdout=subprocess.PIPE,
        stderr=subprocess.STDOUT,
    )
    make_stdout_watcher = PipeWatcher(make_process.stdout, sys.stdout)
    setattr(make_process, "stdout_watcher", make_stdout_watcher)
    make_process.wait()

    return get_lib_path(frame_dir, frame_name)