Esempio n. 1
0
def ensure_disk_exists(args, gcloud_compute, disk_name, report_errors=False):
    """Create the given persistent disk if it does not already exist.

    Args:
      args: The Namespace returned by argparse
      gcloud_compute: Function that can be used for invoking `gcloud compute`
      disk_name: The name of the persistent disk
    Raises:
      subprocess.CalledProcessError: If the `gcloud` command fails
    """
    get_cmd = [
        'disks', 'describe', '--quiet', disk_name, '--format', 'value(name)'
    ]
    if args.zone:
        get_cmd.extend(['--zone', args.zone])
    try:
        with tempfile.TemporaryFile() as tf:
            gcloud_compute(args, get_cmd, stdout=tf, stderr=tf)
            return
    except subprocess.CalledProcessError:
        try:
            create_disk(args, gcloud_compute, disk_name, report_errors)
        except:
            if (not args.zone) and (not args.quiet):
                # We take this failure as a sign that gcloud might need
                # to prompt for a zone. As such, we do that prompting
                # for it, and then try again.
                args.zone = utils.prompt_for_zone(args, gcloud_compute)
                ensure_disk_exists(args,
                                   gcloud_compute,
                                   disk_name,
                                   report_errors=True)
            elif not report_errors:
                # We know the command failed (and will almost certainly
                # fail again), but we did not forward the errors that
                # it reported to the user. To work around this, we
                # re-run the command with 'report_errors' set to True
                create_disk(args, gcloud_compute, disk_name, True)
            else:
                raise
    return
Esempio n. 2
0
def run(args,
        gcloud_compute,
        gcloud_repos,
        email='',
        in_cloud_shell=False,
        gcloud_zone=None,
        **kwargs):
    """Implementation of the `datalab create` subcommand.

    Args:
      args: The Namespace instance returned by argparse
      gcloud_compute: Function that can be used to invoke `gcloud compute`
      gcloud_repos: Function that can be used to invoke
        `gcloud source repos`
      email: The user's email address
      in_cloud_shell: Whether or not the command is being run in the
        Google Cloud Shell
      gcloud_zone: The zone that gcloud is configured to use
    Raises:
      subprocess.CalledProcessError: If a nested `gcloud` calls fails
    """
    if (not args.zone) and (not args.disk_name):
        args.zone = gcloud_zone
    if (not args.zone) and (not args.quiet):
        args.zone = utils.prompt_for_zone(args, gcloud_compute)
    disk_cfg = prepare(args, gcloud_compute, gcloud_repos)

    print('Creating the instance {0}'.format(args.instance))
    cmd = ['instances', 'create']
    if args.zone:
        cmd.extend(['--zone', args.zone])

    enable_swap = "false" if args.no_swap else "true"
    enable_backups = "false" if args.no_backups else "true"
    idle_timeout = args.idle_timeout
    console_log_level = args.log_level or "warn"
    user_email = args.for_user or email
    service_account = args.service_account or "default"
    # We have to escape the user's email before using it in the YAML template.
    escaped_email = user_email.replace("'", "''")
    initial_user_settings = json.dumps({"idleTimeoutInterval": idle_timeout}) \
        if idle_timeout else ''
    with tempfile.NamedTemporaryFile(delete=False) as startup_script_file, \
            tempfile.NamedTemporaryFile(delete=False) as user_data_file, \
            tempfile.NamedTemporaryFile(delete=False) as for_user_file:
        try:
            startup_script_file.write(
                _DATALAB_STARTUP_SCRIPT.format(args.image_name,
                                               _DATALAB_NOTEBOOKS_REPOSITORY,
                                               enable_swap))
            startup_script_file.close()
            user_data_file.write(
                _DATALAB_CLOUD_CONFIG.format(args.image_name, enable_backups,
                                             console_log_level, escaped_email,
                                             initial_user_settings))
            user_data_file.close()
            for_user_file.write(user_email)
            for_user_file.close()
            metadata_template = ('startup-script={0},' + 'user-data={1},' +
                                 'for-user={2}')
            metadata_from_file = (metadata_template.format(
                startup_script_file.name, user_data_file.name,
                for_user_file.name))
            cmd.extend([
                '--format=none', '--boot-disk-size=20GB', '--network',
                args.network_name, '--image-family', 'cos-stable',
                '--image-project', 'cos-cloud', '--machine-type',
                args.machine_type, '--metadata-from-file', metadata_from_file,
                '--tags', 'datalab', '--disk', disk_cfg, '--service-account',
                service_account, '--scopes', 'cloud-platform', args.instance
            ])
            gcloud_compute(args, cmd)
        finally:
            os.remove(startup_script_file.name)
            os.remove(user_data_file.name)
            os.remove(for_user_file.name)

    if (not args.no_connect) and (not args.for_user):
        connect.connect(args, gcloud_compute, email, in_cloud_shell)
    return
Esempio n. 3
0
def run(args, gcloud_beta_compute, gcloud_repos,
        email='', in_cloud_shell=False, gcloud_zone=None,
        sdk_version='UNKNOWN', datalab_version='UNKNOWN', **kwargs):
    """Implementation of the `datalab create` subcommand.

    Args:
      args: The Namespace instance returned by argparse
      gcloud_beta_compute: Function that can be used to invoke `gcloud compute`
      gcloud_repos: Function that can be used to invoke
        `gcloud source repos`
      email: The user's email address
      in_cloud_shell: Whether or not the command is being run in the
        Google Cloud Shell
      gcloud_zone: The zone that gcloud is configured to use
      sdk_version: The version of the Cloud SDK being used
      datalab_version: The version of the datalab CLI being used
    Raises:
      subprocess.CalledProcessError: If a nested `gcloud` calls fails
    """
    if not utils.prompt_for_confirmation(
            args=args,
            message=_THIRD_PARTY_SOFTWARE_DIALOG,
            question='Do you accept',
            accept_by_default=False):
        print('Installation not accepted; Exiting.')
        return

    if (not args.zone) and (not args.disk_name):
        args.zone = gcloud_zone
    if (not args.zone) and (not args.quiet):
        args.zone = utils.prompt_for_zone(args, gcloud_beta_compute)
    disk_cfg = create.prepare(args, gcloud_beta_compute, gcloud_repos)

    print('Creating the instance {0}'.format(args.instance))
    print('\n\nDue to GPU Driver installation, please note that '
          'Datalab GPU instances take significantly longer to '
          'startup compared to non-GPU instances.')
    cmd = ['instances', 'create']
    if args.zone:
        cmd.extend(['--zone', args.zone])

    enable_swap = "false" if args.no_swap else "true"
    enable_backups = "false" if args.no_backups else "true"
    idle_timeout = args.idle_timeout
    console_log_level = args.log_level or "warn"
    user_email = args.for_user or email
    service_account = args.service_account or "default"
    # We need to map all of the GPUs.
    device_mapping = ""
    for i in range(min(args.accelerator_count, 32)):
        device_mapping += (" --device /dev/nvidia" + str(i) +
                           ":/dev/nvidia" + str(i) + " ")
    # We have to escape the user's email before using it in the YAML template.
    escaped_email = user_email.replace("'", "''")
    initial_user_settings = json.dumps({"idleTimeoutInterval": idle_timeout}) \
        if idle_timeout else ''
    with tempfile.NamedTemporaryFile(delete=False) as startup_script_file, \
            tempfile.NamedTemporaryFile(delete=False) as user_data_file, \
            tempfile.NamedTemporaryFile(delete=False) as for_user_file, \
            tempfile.NamedTemporaryFile(delete=False) as os_login_file, \
            tempfile.NamedTemporaryFile(delete=False) as sdk_version_file, \
            tempfile.NamedTemporaryFile(delete=False) as datalab_version_file:
        try:
            startup_script_file.write(create._DATALAB_STARTUP_SCRIPT.format(
                args.image_name, create._DATALAB_NOTEBOOKS_REPOSITORY,
                enable_swap))
            startup_script_file.close()
            user_data_file.write(_DATALAB_CLOUD_CONFIG.format(
                args.image_name, enable_backups,
                console_log_level, escaped_email, initial_user_settings,
                device_mapping))
            user_data_file.close()
            for_user_file.write(user_email)
            for_user_file.close()
            os_login_file.write("FALSE")
            os_login_file.close()
            sdk_version_file.write(sdk_version)
            sdk_version_file.close()
            datalab_version_file.write(datalab_version)
            datalab_version_file.close()
            metadata_template = (
                'startup-script={0},' +
                'user-data={1},' +
                'for-user={2},' +
                'enable-oslogin={3},' +
                'created-with-sdk-version={4},' +
                'created-with-datalab-version={5}')
            metadata_from_file = (
                metadata_template.format(
                    startup_script_file.name,
                    user_data_file.name,
                    for_user_file.name,
                    os_login_file.name,
                    sdk_version_file.name,
                    datalab_version_file.name))
            cmd.extend([
                '--format=none',
                '--boot-disk-size=20GB',
                '--network', args.network_name,
                '--image-family', 'cos-stable',
                '--image-project', 'cos-cloud',
                '--machine-type', args.machine_type,
                '--accelerator',
                'type=' + args.accelerator_type + ',count='
                + str(args.accelerator_count),
                '--maintenance-policy', 'TERMINATE', '--restart-on-failure',
                '--metadata-from-file', metadata_from_file,
                '--tags', 'datalab',
                '--disk', disk_cfg,
                '--service-account', service_account,
                '--scopes', 'cloud-platform',
                args.instance])
            gcloud_beta_compute(args, cmd)
        finally:
            os.remove(startup_script_file.name)
            os.remove(user_data_file.name)
            os.remove(for_user_file.name)
            os.remove(os_login_file.name)
            os.remove(sdk_version_file.name)
            os.remove(datalab_version_file.name)

    if (not args.no_connect) and (not args.for_user):
        connect.connect(args, gcloud_beta_compute, email, in_cloud_shell)
    return
Esempio n. 4
0
def run(args, gcloud_compute, gcloud_repos,
        email='', in_cloud_shell=False, gcloud_zone=None,
        sdk_version='UNKNOWN', datalab_version='UNKNOWN', **kwargs):
    """Implementation of the `datalab create` subcommand.

    Args:
      args: The Namespace instance returned by argparse
      gcloud_compute: Function that can be used to invoke `gcloud compute`
      gcloud_repos: Function that can be used to invoke
        `gcloud source repos`
      email: The user's email address
      in_cloud_shell: Whether or not the command is being run in the
        Google Cloud Shell
      gcloud_zone: The zone that gcloud is configured to use
      sdk_version: The version of the Cloud SDK being used
      datalab_version: The version of the datalab CLI being used
    Raises:
      subprocess.CalledProcessError: If a nested `gcloud` calls fails
    """
    if (not args.zone) and (not args.disk_name):
        args.zone = gcloud_zone
    if (not args.zone) and (not args.quiet):
        args.zone = utils.prompt_for_zone(args, gcloud_compute)
    disk_cfg = prepare(args, gcloud_compute, gcloud_repos)

    print('Creating the instance {0}'.format(args.instance))
    cmd = ['instances', 'create']
    if args.zone:
        cmd.extend(['--zone', args.zone])

    enable_swap = "false" if args.no_swap else "true"
    enable_backups = "false" if args.no_backups else "true"
    idle_timeout = args.idle_timeout
    console_log_level = args.log_level or "warn"
    user_email = args.for_user or email
    service_account = args.service_account or "default"
    # We have to escape the user's email before using it in the YAML template.
    escaped_email = user_email.replace("'", "''")
    initial_user_settings = json.dumps({"idleTimeoutInterval": idle_timeout}) \
        if idle_timeout else ''
    with tempfile.NamedTemporaryFile(delete=False) as startup_script_file, \
            tempfile.NamedTemporaryFile(delete=False) as user_data_file, \
            tempfile.NamedTemporaryFile(delete=False) as for_user_file, \
            tempfile.NamedTemporaryFile(delete=False) as os_login_file, \
            tempfile.NamedTemporaryFile(delete=False) as sdk_version_file, \
            tempfile.NamedTemporaryFile(delete=False) as datalab_version_file:
        try:
            startup_script_file.write(_DATALAB_STARTUP_SCRIPT.format(
                args.image_name, _DATALAB_NOTEBOOKS_REPOSITORY, enable_swap))
            startup_script_file.close()
            user_data_file.write(_DATALAB_CLOUD_CONFIG.format(
                args.image_name, enable_backups,
                console_log_level, escaped_email, initial_user_settings))
            user_data_file.close()
            for_user_file.write(user_email)
            for_user_file.close()
            os_login_file.write("FALSE")
            os_login_file.close()
            sdk_version_file.write(sdk_version)
            sdk_version_file.close()
            datalab_version_file.write(datalab_version)
            datalab_version_file.close()
            metadata_template = (
                'startup-script={0},' +
                'user-data={1},' +
                'for-user={2},' +
                'enable-oslogin={3},' +
                'created-with-sdk-version={4},' +
                'created-with-datalab-version={5}')
            metadata_from_file = (
                metadata_template.format(
                    startup_script_file.name,
                    user_data_file.name,
                    for_user_file.name,
                    os_login_file.name,
                    sdk_version_file.name,
                    datalab_version_file.name))
            cmd.extend([
                '--format=none',
                '--boot-disk-size=20GB',
                '--network', args.network_name,
                '--image-family', 'cos-stable',
                '--image-project', 'cos-cloud',
                '--machine-type', args.machine_type,
                '--metadata-from-file', metadata_from_file,
                '--tags', 'datalab',
                '--disk', disk_cfg,
                '--service-account', service_account,
                '--scopes', 'cloud-platform',
                args.instance])
            gcloud_compute(args, cmd)
        finally:
            os.remove(startup_script_file.name)
            os.remove(user_data_file.name)
            os.remove(for_user_file.name)
            os.remove(os_login_file.name)
            os.remove(sdk_version_file.name)
            os.remove(datalab_version_file.name)

    if (not args.no_connect) and (not args.for_user):
        connect.connect(args, gcloud_compute, email, in_cloud_shell)
    return
Esempio n. 5
0
def run(args,
        gcloud_beta_compute,
        gcloud_repos,
        email='',
        in_cloud_shell=False,
        gcloud_zone=None,
        **kwargs):
    """Implementation of the `datalab create` subcommand.

    Args:
      args: The Namespace instance returned by argparse
      gcloud_beta_compute: Function that can be used to invoke `gcloud compute`
      gcloud_repos: Function that can be used to invoke
        `gcloud source repos`
      email: The user's email address
      in_cloud_shell: Whether or not the command is being run in the
        Google Cloud Shell
      gcloud_zone: The zone that gcloud is configured to use
    Raises:
      subprocess.CalledProcessError: If a nested `gcloud` calls fails
    """
    if not utils.prompt_for_confirmation(args=args,
                                         message=_THIRD_PARTY_SOFTWARE_DIALOG,
                                         question='Do you accept',
                                         accept_by_default=False):
        print('Installation not accepted; Exiting.')
        return

    if (not args.zone) and (not args.disk_name):
        args.zone = gcloud_zone
    if (not args.zone) and (not args.quiet):
        args.zone = utils.prompt_for_zone(args, gcloud_beta_compute)
    disk_cfg = create.prepare(args, gcloud_beta_compute, gcloud_repos)

    print('Creating the instance {0}'.format(args.instance))
    print('\n\nDue to GPU Driver installation, please note that '
          'Datalab GPU instances take significantly longer to '
          'startup compared to non-GPU instances.')
    cmd = ['instances', 'create']
    if args.zone:
        cmd.extend(['--zone', args.zone])

    enable_swap = "false" if args.no_swap else "true"
    enable_backups = "false" if args.no_backups else "true"
    idle_timeout = args.idle_timeout
    console_log_level = args.log_level or "warn"
    user_email = args.for_user or email
    service_account = args.service_account or "default"
    # We need to map all of the GPUs.
    device_mapping = ""
    for i in range(min(args.accelerator_count, 32)):
        device_mapping += (" --device /dev/nvidia" + str(i) + ":/dev/nvidia" +
                           str(i) + " ")
    # We have to escape the user's email before using it in the YAML template.
    escaped_email = user_email.replace("'", "''")
    initial_user_settings = json.dumps({"idleTimeoutInterval": idle_timeout}) \
        if idle_timeout else ''
    with tempfile.NamedTemporaryFile(delete=False) as startup_script_file, \
            tempfile.NamedTemporaryFile(delete=False) as user_data_file, \
            tempfile.NamedTemporaryFile(delete=False) as for_user_file, \
            tempfile.NamedTemporaryFile(delete=False) as os_login_file:
        try:
            startup_script_file.write(
                create._DATALAB_STARTUP_SCRIPT.format(
                    args.image_name, create._DATALAB_NOTEBOOKS_REPOSITORY,
                    enable_swap))
            startup_script_file.close()
            user_data_file.write(
                _DATALAB_CLOUD_CONFIG.format(args.image_name, enable_backups,
                                             console_log_level, escaped_email,
                                             initial_user_settings,
                                             device_mapping))
            user_data_file.close()
            for_user_file.write(user_email)
            for_user_file.close()
            os_login_file.write("FALSE")
            os_login_file.close()
            metadata_template = ('startup-script={0},' + 'user-data={1},' +
                                 'for-user={2},' + 'enable-oslogin={3}')
            metadata_from_file = (metadata_template.format(
                startup_script_file.name, user_data_file.name,
                for_user_file.name, os_login_file.name))
            cmd.extend([
                '--format=none', '--boot-disk-size=20GB', '--network',
                args.network_name, '--image-family', 'cos-stable',
                '--image-project', 'cos-cloud', '--machine-type',
                args.machine_type, '--accelerator',
                'type=' + args.accelerator_type + ',count=' +
                str(args.accelerator_count), '--maintenance-policy',
                'TERMINATE', '--restart-on-failure', '--metadata-from-file',
                metadata_from_file, '--tags', 'datalab', '--disk', disk_cfg,
                '--service-account', service_account, '--scopes',
                'cloud-platform', args.instance
            ])
            gcloud_beta_compute(args, cmd)
        finally:
            os.remove(startup_script_file.name)
            os.remove(user_data_file.name)
            os.remove(for_user_file.name)
            os.remove(os_login_file.name)

    if (not args.no_connect) and (not args.for_user):
        connect.connect(args, gcloud_beta_compute, email, in_cloud_shell)
    return