def ls(ctx, project, io, to_csv, query, sort, limit, offset, columns, offline, offline_path): """List runs for this project. Uses /docs/core/cli/#caching Examples: Get all runs: \b Get all runs with with status {created or running}, and creation date between 2018-01-01 and 2018-01-02, and params activation equal to sigmoid and metric loss less or equal to 0.2 \b $ polyaxon ops ls \ -q "status:created|running, started_at:2018-01-01..2018-01-02, \ params.activation:sigmoid, metrics.loss:<=0.2" Get all runs sorted by update date: \b $ polyaxon ops ls -s "-updated_at" Get all runs of kind job: \b $ polyaxon ops ls -q "kind: job" Get all runs of kind service: \b $ polyaxon ops ls -q "kind: service" """ if offline: offline_path = offline_path or container_contexts.CONTEXT_OFFLINE_ROOT offline_path_format = "{}/{{}}/run_data.json".format(offline_path) if not os.path.exists(offline_path) or not os.path.isdir(offline_path): Printer.print_error( f"Could not list offline runs, the path `{offline_path}` " f"does not exist or is not a directory.") sys.exit(1) results = [] for uid in os.listdir(offline_path): run_path = offline_path_format.format(uid) if os.path.exists(run_path): results.append(RunConfigManager.read_from_path(run_path)) else: Printer.print_warning( f"Skipping run {uid}, offline data not found.") else: owner, project_name = get_project_or_local(project or ctx.obj.get("project"), is_cli=True) try: polyaxon_client = RunClient(owner=owner, project=project_name) response = polyaxon_client.list(limit=limit, offset=offset, query=query, sort=sort) except (ApiException, HTTPError) as e: handle_cli_error( e, message="Could not get runs for project `{}`.".format( project_name)) sys.exit(1) meta = get_meta_response(response) if meta: Printer.print_header("Runs for project `{}/{}`.".format( owner, project_name)) Printer.print_header("Navigation:") dict_tabulate(meta) else: Printer.print_header("No runs found for project `{}/{}`.".format( owner, project_name)) results = response.results objects = [Printer.add_status_color(o.to_dict()) for o in results] columns = validate_tags(columns) if io: objects, prefixed_columns = flatten_keys( objects=objects, columns=["inputs", "outputs"], columns_prefix={ "inputs": "in", "outputs": "out" }, ) if columns: columns = {prefixed_columns.get(col, col) for col in columns} if to_csv: objects = list_dicts_to_csv( objects, include_attrs=columns, exclude_attrs=DEFAULT_EXCLUDE, ) else: objects = list_dicts_to_tabulate( objects, include_attrs=columns, exclude_attrs=DEFAULT_EXCLUDE, humanize_values=True, upper_keys=True, ) else: if to_csv: objects = list_dicts_to_csv( objects, include_attrs=columns, exclude_attrs=DEFAULT_EXCLUDE + ["inputs", "outputs"], ) else: objects = list_dicts_to_tabulate( objects, include_attrs=columns, exclude_attrs=DEFAULT_EXCLUDE + ["inputs", "outputs"], humanize_values=True, upper_keys=True, ) if objects: if to_csv: filename = "./results.csv" write_csv(objects, filename=filename) Printer.print_success("CSV file generated: `{}`".format(filename)) else: Printer.print_header("Runs:") objects.pop("project_name", None) dict_tabulate(objects, is_list_dict=True)
def ls(ctx, io, to_csv, query, sort, limit, offset, columns): """List runs for this project. Uses /docs/core/cli/#caching Examples: Get all runs: \b Get all runs with with status {created or running}, and creation date between 2018-01-01 and 2018-01-02, and params activation equal to sigmoid and metric loss less or equal to 0.2 \b $ polyaxon ops ls \ -q "status:created|running, started_at:2018-01-01..2018-01-02, \ params.activation:sigmoid, metrics.loss:<=0.2" Get all runs sorted by update date: \b $ polyaxon ops ls -s "-updated_at" Get all runs of kind job: \b $ polyaxon ops ls -q "kind: job" Get all runs of kind service: \b $ polyaxon ops ls -q "kind: service" """ owner, project_name = get_project_or_local(ctx.obj.get("project"), is_cli=True) try: polyaxon_client = RunClient(owner=owner, project=project_name) response = polyaxon_client.list(limit=limit, offset=offset, query=query, sort=sort) except (ApiException, HTTPError) as e: handle_cli_error(e, message="Could not get runs for project `{}`.".format( project_name)) sys.exit(1) meta = get_meta_response(response) if meta: Printer.print_header("Runs for project `{}/{}`.".format( owner, project_name)) Printer.print_header("Navigation:") dict_tabulate(meta) else: Printer.print_header("No runs found for project `{}/{}`.".format( owner, project_name)) objects = [Printer.add_status_color(o.to_dict()) for o in response.results] columns = validate_tags(columns) if io: objects, prefixed_columns = flatten_keys( objects=objects, columns=["inputs", "outputs"], columns_prefix={ "inputs": "in", "outputs": "out" }, ) if columns: columns = {prefixed_columns.get(col, col) for col in columns} if to_csv: objects = list_dicts_to_csv( objects, include_attrs=columns, exclude_attrs=DEFAULT_EXCLUDE, ) else: objects = list_dicts_to_tabulate( objects, include_attrs=columns, exclude_attrs=DEFAULT_EXCLUDE, humanize_values=True, upper_keys=True, ) else: if to_csv: objects = list_dicts_to_csv( objects, include_attrs=columns, exclude_attrs=DEFAULT_EXCLUDE, ) else: objects = list_dicts_to_tabulate( objects, include_attrs=columns, exclude_attrs=DEFAULT_EXCLUDE, humanize_values=True, upper_keys=True, ) if objects: if to_csv: write_csv(objects) else: Printer.print_header("Runs:") objects.pop("project_name", None) dict_tabulate(objects, is_list_dict=True)