Example #1
0
File: client.py Project: gridl/aztk
 def create_user(self, cluster_id: str, username: str, password: str = None, ssh_key: str = None) -> str:
     try:
         cluster = self.get_cluster(cluster_id)
         master_node_id = cluster.master_node_id
         if not master_node_id:
             raise error.ClusterNotReadyError("The master has not yet been picked, a user cannot be added.")
         self.__create_user(cluster.id, master_node_id, username, password, ssh_key)
     except batch_error.BatchErrorException as e:
         raise error.AztkError(helpers.format_batch_exception(e))
Example #2
0
def create_user(core_cluster_operations,
                spark_cluster_operations,
                cluster_id: str,
                username: str,
                password: str = None,
                ssh_key: str = None) -> str:
    try:
        cluster = spark_cluster_operations.get(cluster_id)
        master_node_id = cluster.master_node_id
        if not master_node_id:
            raise error.ClusterNotReadyError(
                "The master has not yet been picked, a user cannot be added.")
        core_cluster_operations.create_user_on_cluster(cluster.id,
                                                       cluster.nodes, username,
                                                       ssh_key, password)
    except batch_error.BatchErrorException as e:
        raise error.AztkError(helpers.format_batch_exception(e))
Example #3
0
def ssh_in_master(client,
                  cluster_id: str,
                  cluster_configuration: models.ClusterConfiguration,
                  username: str = None,
                  webui: str = None,
                  jobui: str = None,
                  jobhistoryui: str = None,
                  ports=None,
                  host: bool = False,
                  connect: bool = True,
                  internal: bool = False):
    """
        SSH into head node of spark-app
        :param cluster_id: Id of the cluster to ssh in
        :param username: Username to use to ssh
        :param webui: Port for the spark master web ui (Local port)
        :param jobui: Port for the job web ui (Local port)
        :param ports: an list of local and remote ports
        :type ports: [[<local-port>, <remote-port>]]
    """
    # check if ssh is available, this throws OSError if ssh is not present
    subprocess.call(["ssh"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)

    # Get master node id from task (job and task are both named pool_id)
    cluster = client.cluster.get(cluster_id)

    master_node_id = cluster.master_node_id

    if master_node_id is None:
        raise error.ClusterNotReadyError(
            "Master node has not yet been picked!")

    # get remote login settings for the user
    remote_login_settings = client.cluster.get_remote_login_settings(
        cluster.id, master_node_id)
    master_internal_node_ip = [
        node.ip_address for node in cluster.nodes if node.id == master_node_id
    ][0]
    master_node_ip = remote_login_settings.ip_address
    master_node_port = remote_login_settings.port

    spark_web_ui_port = utils.constants.DOCKER_SPARK_WEB_UI_PORT
    spark_worker_ui_port = utils.constants.DOCKER_SPARK_WORKER_UI_PORT
    spark_job_ui_port = utils.constants.DOCKER_SPARK_JOB_UI_PORT
    spark_job_history_ui_port = utils.constants.DOCKER_SPARK_JOB_UI_HISTORY_PORT

    ssh_command = utils.command_builder.CommandBuilder('ssh')

    # get ssh private key path if specified
    ssh_priv_key = client.secrets_configuration.ssh_priv_key
    if ssh_priv_key is not None:
        ssh_command.add_option("-i", ssh_priv_key)

    ssh_command.add_argument("-t")
    ssh_command.add_option("-L",
                           "{0}:localhost:{1}".format(webui,
                                                      spark_web_ui_port),
                           enable=bool(webui))
    ssh_command.add_option("-L",
                           "{0}:localhost:{1}".format(jobui,
                                                      spark_job_ui_port),
                           enable=bool(jobui))
    ssh_command.add_option("-L",
                           "{0}:localhost:{1}".format(
                               jobhistoryui, spark_job_history_ui_port),
                           enable=bool(jobui))

    if ports is not None:
        for port in ports:
            ssh_command.add_option(
                "-L", "{0}:localhost:{1}".format(port[0], port[1]))
    if cluster_configuration and cluster_configuration.plugins:
        for plugin in cluster_configuration.plugins:
            for port in plugin.ports:
                if port.expose_publicly:
                    ssh_command.add_option(
                        "-L",
                        "{0}:localhost:{1}".format(port.public_port,
                                                   port.internal))

    user = username if username is not None else '<username>'
    if internal:
        ssh_command.add_argument("{0}@{1}".format(user,
                                                  master_internal_node_ip))
    else:
        ssh_command.add_argument("{0}@{1} -p {2}".format(
            user, master_node_ip, master_node_port))

    if host is False:
        ssh_command.add_argument("\'sudo docker exec -it spark /bin/bash\'")

    command = ssh_command.to_str()

    if connect:
        call(command, shell=True)

    return '\n\t{}\n'.format(command)