Exemple #1
0
def generate_docker_args(job_mode: conf.JobMode,
                         args: Dict[str, Any]) -> Dict[str, Any]:
    """gemerate docker args from args and job mode"""

    # Get extra dependencies in case you want to install your requirements via a
    # setup.py file.
    setup_extras = b.base_extras(job_mode, "setup.py", args.get("extras"))

    # Google application credentials, from the CLI or from an env variable.
    creds_path = conf.extract_cloud_key(args)

    # Application default credentials location.
    adc_loc = csdk.get_application_default_credentials_path()
    adc_path = adc_loc if os.path.isfile(adc_loc) else None

    # TODO we may want to take custom paths, here, in addition to detecting them.
    reqs = "requirements.txt"
    conda_env = "environment.yml"

    # Arguments that make their way down to caliban.docker.build.build_image.
    docker_args = {
        "extra_dirs": args.get("dir"),
        "requirements_path": reqs if os.path.exists(reqs) else None,
        "conda_env_path": conda_env if os.path.exists(conda_env) else None,
        "caliban_config": conf.caliban_config(),
        "credentials_path": creds_path,
        "adc_path": adc_path,
        "setup_extras": setup_extras,
        "no_cache": args.get("no_cache", False),
        'build_path': os.getcwd(),
    }

    return docker_args
Exemple #2
0
def test_extract_cloud_key(monkeypatch):
    k = 'GOOGLE_APPLICATION_CREDENTIALS'
    if os.environ.get(k):
        monkeypatch.delenv(k)

    # initial missing case.
    assert c.extract_cloud_key({}) == None

    monkeypatch.setenv(k, "key.json")

    # env override:
    assert c.extract_cloud_key({}) == "key.json"

    # conf takes precedence.
    assert c.extract_cloud_key({"cloud_key":
                                "mynewkey.json"}) == "mynewkey.json"
Exemple #3
0
def run_app(arg_input):
    """Main function to run the Caliban app. Accepts a Namespace-type output of an
  argparse argument parser.

  """
    args = vars(arg_input)
    script_args = c.extract_script_args(args)

    command = args["command"]

    if command == "cluster":
        return gke.cli.run_cli_command(args)

    job_mode = cli.resolve_job_mode(args)
    docker_args = cli.generate_docker_args(job_mode, args)
    docker_run_args = args.get("docker_run_args", [])

    if command == "shell":
        mount_home = not args['bare']
        image_id = args.get("image_id")
        dlvm = args.get("dlvm")
        shell = args['shell']
        docker.run_interactive(job_mode,
                               dlvm=dlvm,
                               image_id=image_id,
                               run_args=docker_run_args,
                               mount_home=mount_home,
                               shell=shell,
                               **docker_args)

    elif command == "notebook":
        port = args.get("port")
        lab = args.get("lab")
        dlvm = args.get("dlvm")
        version = args.get("jupyter_version")
        mount_home = not args['bare']
        docker.run_notebook(job_mode,
                            dlvm=dlvm,
                            port=port,
                            lab=lab,
                            version=version,
                            run_args=docker_run_args,
                            mount_home=mount_home,
                            **docker_args)

    elif command == "build":
        package = args["module"]
        docker.build_image(job_mode, package=package, **docker_args)

    elif command == 'status':
        caliban.history.cli.get_status(args)

    elif command == 'stop':
        caliban.history.cli.stop(args)

    elif command == 'resubmit':
        caliban.history.cli.resubmit(args)

    elif command == "run":
        dry_run = args["dry_run"]
        package = args["module"]
        image_id = args.get("image_id")
        dlvm = args.get("dlvm")
        exp_config = args.get("experiment_config")
        xgroup = args.get('xgroup')

        docker.run_experiments(job_mode,
                               run_args=docker_run_args,
                               script_args=script_args,
                               image_id=image_id,
                               dlvm=dlvm,
                               experiment_config=exp_config,
                               dry_run=dry_run,
                               package=package,
                               xgroup=xgroup,
                               **docker_args)

    elif command == "cloud":
        project_id = c.extract_project_id(args)
        region = c.extract_region(args)
        cloud_key = c.extract_cloud_key(args)

        dry_run = args["dry_run"]
        package = args["module"]
        job_name = args.get("name")
        gpu_spec = args.get("gpu_spec")
        tpu_spec = args.get("tpu_spec")
        image_tag = args.get("image_tag")
        machine_type = args.get("machine_type")
        dlvm = args.get("dlvm")
        exp_config = args.get("experiment_config")
        labels = u.sanitize_labels(args.get("label") or [])
        xgroup = args.get('xgroup')

        # Arguments to internally build the image required to submit to Cloud.
        docker_m = {"job_mode": job_mode, "package": package, **docker_args}

        cloud.submit_ml_job(
            job_mode=job_mode,
            docker_args=docker_m,
            region=region,
            project_id=project_id,
            credentials_path=cloud_key,
            dry_run=dry_run,
            job_name=job_name,
            dlvm=dlvm,
            machine_type=machine_type,
            gpu_spec=gpu_spec,
            tpu_spec=tpu_spec,
            image_tag=image_tag,
            labels=labels,
            script_args=script_args,
            experiment_config=exp_config,
            xgroup=xgroup,
        )
    else:
        logging.info("Unknown command: {}".format(command))
        sys.exit(1)