Exemple #1
0
def download_func(args: Namespace):
    try:
        dstack_config = get_config()
        # TODO: Support non-default profiles
        profile = dstack_config.get_profile("default")
        user_info = get_user_info(profile)

        jobs = get_jobs(args.run_name, profile)
        if args.workflow_name is not None:
            jobs = list(
                filter(lambda j: j["workflow_name"] == args.workflow_name,
                       jobs))

        for job in jobs:
            artifacts_s3_bucket = job[
                "user_artifacts_s3_bucket"] if user_info.get(
                    "user_configuration") is not None and job.get(
                        "user_artifacts_s3_bucket") is not None else user_info[
                            "default_configuration"]["artifacts_s3_bucket"]
            artifact_paths = job.get("artifact_paths")
            if artifact_paths is None or len(artifact_paths) == 0:
                print("No artifacts")
            else:
                for artifact_path in artifact_paths:
                    download_artifact(boto3_client(user_info, "s3",
                                                   job), artifacts_s3_bucket,
                                      artifact_path, args.output)
    except InvalidGitRepositoryError:
        sys.exit(f"{os.getcwd()} is not a Git repo")
    except ConfigurationError:
        sys.exit(f"Call 'dstack config' first")
Exemple #2
0
def logs_func(args: Namespace):
    try:
        dstack_config = get_config()
        # TODO: Support non-default profiles
        profile = dstack_config.get_profile("default")
        user_info = get_user_info(profile)

        filter_logs_events_kwargs = {
            "interleaved": True,
            "startTime": to_epoch_millis(args.since),
            "logGroupName": f"{user_info['user_name']}/{args.run_name}"
        }
        if args.workflow_name is not None:
            jobs = list(
                filter(lambda j: j["workflow_name"] == args.workflow_name,
                       get_jobs(args.run_name, profile)))
            if len(jobs) == 0:
                # TODO: Handle not found error
                sys.exit(0)
            filter_logs_events_kwargs["logStreamNames"] = [
                job['job_id'] for job in jobs
            ]

        client = boto3_client(user_info, "logs")

        if args.follow is True:
            try:
                for event in _do_filter_log_events(client,
                                                   filter_logs_events_kwargs):
                    print(json.loads(event["message"].strip())["log"])
            except KeyboardInterrupt:
                # The only way to exit from the --follow is to Ctrl-C. So
                # we should exit the iterator rather than having the
                # KeyboardInterrupt propagate to the rest of the command.
                return

        else:
            paginator = client.get_paginator('filter_log_events')
            for page in paginator.paginate(**filter_logs_events_kwargs):
                for event in page['events']:
                    try:
                        print(json.loads(event["message"].strip())["log"])
                    except JSONDecodeError:
                        pass

    except InvalidGitRepositoryError:
        sys.exit(f"{os.getcwd()} is not a Git repo")
    except ConfigurationError:
        sys.exit(f"Call 'dstack config' first")
Exemple #3
0
def restart_func(args: Namespace):
    try:
        dstack_config = get_config()
        # TODO: Support non-default profiles
        profile = dstack_config.get_profile("default")
        headers = {"Content-Type": f"application/json; charset=utf-8"}
        if profile.token is not None:
            headers["Authorization"] = f"Bearer {profile.token}"

            if args.workflow_name is not None:
                jobs = list(
                    filter(lambda j: j["workflow_name"] == args.workflow_name,
                           get_jobs(args.run_name, profile)))
                # TODO: Handle not found error
                for job in jobs:
                    # TODO: Do it in batch
                    # TODO: Do it in the right order
                    data = {
                        "job_id": job["job_id"],
                        "clear": args.clear is True
                    }
                    response = requests.request(
                        method="POST",
                        url=f"{profile.server}/jobs/restart",
                        data=json.dumps(data).encode("utf-8"),
                        headers=headers,
                        verify=profile.verify)
                    if response.status_code != 200:
                        response.raise_for_status()
            else:
                data = {"run_name": args.run_name, "clear": args.clear is True}
                response = requests.request(
                    method="POST",
                    url=f"{profile.server}/runs/restart",
                    data=json.dumps(data).encode("utf-8"),
                    headers=headers,
                    verify=profile.verify)
                if response.status_code != 200:
                    response.raise_for_status()
            print(f"{colorama.Fore.LIGHTBLACK_EX}OK{colorama.Fore.RESET}")
    except ConfigurationError:
        sys.exit(f"Call 'dstack config' first")
Exemple #4
0
def list_func(args: Namespace):
    try:
        dstack_config = get_config()
        # TODO: Support non-default profiles
        profile = dstack_config.get_profile("default")
        user_info = get_user_info(profile)

        jobs = get_jobs(args.run_name, profile)
        if args.workflow_name is not None:
            jobs = list(
                filter(lambda j: j["workflow_name"] == args.workflow_name,
                       jobs))

        for job in jobs:
            artifacts_s3_bucket = job[
                "user_artifacts_s3_bucket"] if user_info.get(
                    "user_configuration") is not None and job.get(
                        "user_artifacts_s3_bucket") is not None else user_info[
                            "default_configuration"]["artifacts_s3_bucket"]
            artifact_paths = job.get("artifact_paths")
            if artifact_paths is None or len(artifact_paths) == 0:
                print("No artifacts")
            else:
                if args.total is True:
                    table_headers = [
                        f"{colorama.Fore.LIGHTMAGENTA_EX}ARTIFACT{colorama.Fore.RESET}",
                        f"{colorama.Fore.LIGHTMAGENTA_EX}SIZE{colorama.Fore.RESET}",
                        f"{colorama.Fore.LIGHTMAGENTA_EX}FILES{colorama.Fore.RESET}"
                    ]
                    table_rows = []
                    for artifact_path in artifact_paths:
                        keys_total, total_size = list_artifact(
                            boto3_client(user_info, "s3", job),
                            artifacts_s3_bucket, artifact_path)
                        table_rows.append([
                            short_artifact_path(artifact_path),
                            sizeof_fmt(total_size), keys_total
                        ])
                    print(
                        tabulate(table_rows,
                                 headers=table_headers,
                                 tablefmt="plain"))
                else:
                    table_headers = [
                        f"{colorama.Fore.LIGHTMAGENTA_EX}ARTIFACT{colorama.Fore.RESET}",
                        f"{colorama.Fore.LIGHTMAGENTA_EX}FILE{colorama.Fore.RESET}",
                        f"{colorama.Fore.LIGHTMAGENTA_EX}SIZE{colorama.Fore.RESET}"
                    ]
                    table_rows = []
                    for artifact_path in artifact_paths:
                        files = list_artifact_files(
                            boto3_client(user_info, "s3", job),
                            artifacts_s3_bucket, artifact_path)
                        artifact_name = short_artifact_path(artifact_path)
                        header_added = False
                        for i in range(len(files)):
                            file, size = files[i]
                            if (len(file) > 0
                                    and not file.endswith('/')) or size > 0:
                                table_rows.append([
                                    artifact_name if not header_added else "",
                                    file,
                                    sizeof_fmt(size)
                                ])
                                header_added = True
                    print(
                        tabulate(table_rows,
                                 headers=table_headers,
                                 tablefmt="plain"))

    except InvalidGitRepositoryError:
        sys.exit(f"{os.getcwd()} is not a Git repo")
    except ConfigurationError:
        sys.exit(f"Call 'dstack config' first")