Пример #1
0
def print_plugin_ports(cluster_config: ClusterConfiguration):

    if cluster_config and cluster_config.plugins:
        plugins = cluster_config.plugins
        has_ports = False
        for plugin in plugins:
            for port in plugin.ports:
                if port.expose_publicly:
                    has_ports = True
                    break

        if has_ports > 0:
            log.info("plugins:")
            for plugin in plugins:
                for port in plugin.ports:
                    if port.expose_publicly:
                        label = "  - open {}".format(plugin.name)

                        if port.name:
                            label += " {}".format(port.name)

                        url = "{0}{1}".format(http_prefix, port.public_port)
                        utils.log_property(label, url)
Пример #2
0
def print_plugin_ports(cluster_configuration: ClusterConfiguration):
    if cluster_configuration and cluster_configuration.plugins:
        plugins = cluster_configuration.plugins
        has_ports = False
        plugin_ports = {}
        for plugin in plugins:
            plugin_ports[plugin.name] = []
            for port in plugin.ports:
                if port.expose_publicly:
                    has_ports = True
                    plugin_ports[plugin.name].append(port)

        if has_ports:
            log.info("plugins:")

        for plugin in plugin_ports:
            if plugin_ports[plugin]:
                log.info(" %s ", plugin)
                for port in plugin_ports[plugin]:
                    label = "    - open"
                    if port.name:
                        label += " {}".format(port.name)
                    url = "{0}{1}".format(http_prefix, port.public_port)
                    utils.log_property(label, url)
Пример #3
0
def execute(args: typing.NamedTuple):
    spark_client = aztk.spark.Client(config.load_aztk_secrets())
    cluster = spark_client.get_cluster(args.cluster_id)
    cluster_config = spark_client.get_cluster_config(args.cluster_id)
    ssh_conf = SshConfig()

    ssh_conf.merge(cluster_id=args.cluster_id,
                   username=args.username,
                   job_ui_port=args.jobui,
                   job_history_ui_port=args.jobhistoryui,
                   web_ui_port=args.webui,
                   jupyter_port=args.jupyter,
                   name_node_ui_port=args.namenodeui,
                   rstudio_server_port=args.rstudioserver,
                   host=args.host,
                   connect=args.connect)

    log.info("-------------------------------------------")
    utils.log_property("spark cluster id", ssh_conf.cluster_id)
    utils.log_property("open webui", "{0}{1}".format(http_prefix,
                                                     ssh_conf.web_ui_port))
    utils.log_property("open jobui", "{0}{1}".format(http_prefix,
                                                     ssh_conf.job_ui_port))
    utils.log_property(
        "open jobhistoryui", "{0}{1}".format(http_prefix,
                                             ssh_conf.job_history_ui_port))
    print_plugin_ports(cluster_config)
    utils.log_property("ssh username", ssh_conf.username)
    utils.log_property("connect", ssh_conf.connect)
    log.info("-------------------------------------------")

    # get ssh command
    try:
        ssh_cmd = utils.ssh_in_master(
            client=spark_client,
            cluster_id=ssh_conf.cluster_id,
            webui=ssh_conf.web_ui_port,
            jobui=ssh_conf.job_ui_port,
            jobhistoryui=ssh_conf.job_history_ui_port,
            username=ssh_conf.username,
            host=ssh_conf.host,
            connect=ssh_conf.connect)

        if not ssh_conf.connect:
            log.info("")
            log.info(
                "Use the following command to connect to your spark head node:"
            )
            log.info("\t%s", ssh_cmd)

    except batch_error.BatchErrorException as e:
        if e.error.code == "PoolNotFound":
            raise aztk.error.AztkError(
                "The cluster you are trying to connect to does not exist.")
        else:
            raise
Пример #4
0
def execute(args: typing.NamedTuple):
    spark_client = aztk.spark.Client(config.load_aztk_secrets())
    cluster = spark_client.cluster.get(args.cluster_id)
    cluster_configuration = spark_client.cluster.get_configuration(
        args.cluster_id)
    ssh_conf = SshConfig()

    ssh_conf.merge(
        cluster_id=args.cluster_id,
        username=args.username,
        job_ui_port=args.jobui,
        job_history_ui_port=args.jobhistoryui,
        web_ui_port=args.webui,
        host=args.host,
        connect=args.connect,
        internal=args.internal,
    )

    log.info("-------------------------------------------")
    utils.log_property("spark cluster id", ssh_conf.cluster_id)
    utils.log_property("open webui", "{0}{1}".format(http_prefix,
                                                     ssh_conf.web_ui_port))
    utils.log_property("open jobui", "{0}{1}".format(http_prefix,
                                                     ssh_conf.job_ui_port))
    utils.log_property(
        "open jobhistoryui", "{0}{1}".format(http_prefix,
                                             ssh_conf.job_history_ui_port))
    print_plugin_ports(cluster_configuration)
    utils.log_property("ssh username", ssh_conf.username)
    utils.log_property("connect", ssh_conf.connect)
    log.info("-------------------------------------------")

    try:
        shell_out_ssh(spark_client, cluster_configuration, ssh_conf)
    except OSError:
        # no ssh client is found, falling back to pure python
        native_python_ssh_into_master(spark_client, cluster,
                                      cluster_configuration, ssh_conf,
                                      args.password)