def _ls_normal_file( gas: GAS, # pylint: disable=unused-argument tbrn_info: TBRN, # pylint: disable=unused-argument list_all_files: bool, # pylint: disable=unused-argument show_total_num: bool, # pylint: disable=unused-argument ) -> None: error("List for specific file is not supported yet")
def _implement_log( # pylint: disable=too-many-arguments obj: ContextInfo, tbrn: str, max_count: Optional[int], oneline: bool, is_all: bool, graph: bool, show_drafts: bool, ) -> None: gas = obj.get_gas() tbrn_info = TBRN(tbrn=tbrn) if tbrn_info.type != TBRNType.DATASET: error(f'To log commits, "{tbrn}" must be a dataset') dataset_client = gas._get_dataset_with_any_type( # pylint: disable=protected-access tbrn_info.dataset_name ) commit_id_to_branches: DefaultDict[str, List[str]] = defaultdict(list) for branch in dataset_client.list_branches(): commit_id_to_branches[branch.commit_id].append(branch.name) if is_all: revisions: List[Optional[str]] = [branch.name for branch in dataset_client.list_branches()] else: revisions = ( [tbrn_info.revision] if tbrn_info.revision else [dataset_client.status.branch_name] ) Printer: Union[Type[_GraphPrinter], Type[_Printer]] = _GraphPrinter if graph else _Printer message_generator = islice( Printer( dataset_client, revisions, commit_id_to_branches, oneline, show_drafts=show_drafts ).generate_commits_and_drafts_messages(), max_count, ) _echo_messages(message_generator)
def _delete_tag(dataset_client: DatasetClientType, tbrn_info: TBRN) -> None: if not tbrn_info.revision: error(f'To delete a tag, "{tbrn_info.get_tbrn()}" must have a tag name') dataset_client.delete_tag(tbrn_info.revision) tag_tbrn = TBRN(dataset_client.name, revision=tbrn_info.revision).get_colored_tbrn() click.echo(f'Successfully deleted tag "{tag_tbrn}"')
def _check_key_and_value(key: str, value: str) -> None: if key in {"timeout", "max_retries"}: if not value.isdigit(): error(f'The option "{key}" need integer value.') elif key == "is_internal": if value.lower() not in {"true", "false", "0", "1"}: error('The option "is_internal" need True(1) or False(0) value.')
def _implement_config(obj: ContextInfo, key: str, value: str, unset: bool) -> None: _check_args_and_options(key, value, unset) config_parser = obj.config_parser if not config_parser.has_section("config"): config_parser.add_section("config") config_section = config_parser["config"] if not key: for config_key, config_value in config_section.items(): click.echo(f"{config_key} = {config_value}\n") return if not value: if key not in config_section: error(f"{key} has not been configured yet") if unset: del config_section[key] obj.write_config(show_message=False) click.echo(f'Successfully unset "{key}"') return click.echo(f"{key} = {config_section[key]}\n") else: _check_key_and_value(key, value) config_section[key] = value obj.write_config()
def _echo_draft( dataset_client: DatasetClientType, title: str = "", description: str = "", branch_name: Optional[str] = None, ) -> None: if not branch_name: error("Draft should be created based on a branch.") branch = dataset_client.get_branch(branch_name) if branch.commit_id != ROOT_COMMIT_ID: commit_id = f"({branch.commit_id})" else: commit_id = "" if not title: title = "<no title>" if description: description = f"\n\n{indent(description, INDENT)}" draft_message = f"{title}{description}" click.echo( _FULL_DRAFT_MESSAGE.format( branch_name, commit_id, draft_message, ))
def _create_branch(dataset_client: DatasetClientType, name: str) -> None: if dataset_client.status.is_draft: error("Branch cannot be created from a draft") dataset_client.create_branch(name) branch_tbrn = TBRN(dataset_client.name, revision=name).get_colored_tbrn() click.echo(f'Successfully created branch "{branch_tbrn}"')
def _implement_dataset(obj: ContextInfo, tbrn: str, is_delete: bool, yes: bool) -> None: gas = obj.get_gas() if not tbrn: if is_delete: error("Missing argument TBRN") for dataset_name in gas.list_dataset_names(): click.echo(TBRN(dataset_name).get_tbrn()) return tbrn_info = TBRN(tbrn=tbrn) if tbrn_info.type != TBRNType.DATASET: error(f'"{tbrn}" is not a dataset') colored_tbrn = tbrn_info.get_colored_tbrn() if is_delete: if not yes: click.confirm( f'Dataset "{colored_tbrn}" will be completely deleted.\nDo you want to continue?', abort=True, ) gas.delete_dataset(tbrn_info.dataset_name) click.echo(f'Successfully deleted dataset "{colored_tbrn}"') return gas.create_dataset(tbrn_info.dataset_name) click.echo(f'Successfully created dataset "{colored_tbrn}"')
def _ls_dataset(gas: GAS, tbrn_info: TBRN, list_all_files: bool, show_total_num: bool) -> None: dataset_client = get_dataset_client(gas, tbrn_info) segment_names = dataset_client.list_segment_names() if not list_all_files: if show_total_num: click.echo(f"total {len(segment_names)}") for segment_name in segment_names: click.echo(TBRN(tbrn_info.dataset_name, segment_name).get_tbrn()) return if isinstance(dataset_client, FusionDatasetClient): error('"-a" flag is not supported for fusion dataset yet') segment_paths = [ dataset_client.get_segment(segment_name).list_data_paths() for segment_name in segment_names ] if show_total_num: total = sum(len(segment_path) for segment_path in segment_paths) click.echo(f"total {total}") for segment_name, segment_path in zip(segment_names, segment_paths): _echo_data( tbrn_info.dataset_name, tbrn_info.draft_number, tbrn_info.revision, segment_name, segment_path, )
def __init__( self, dataset_client: DatasetClientType, revisions: List[Optional[str]], commit_id_to_branches: Dict[str, List[str]], oneline: bool, *, show_drafts: bool, ): all_commit_logs = list(map(dataset_client.list_commits, revisions)) all_drafts: List[Draft] = [] error_message = f'Dataset "{dataset_client.name}" has no commit history' if show_drafts: error_message += " or open drafts" for revision in revisions: all_drafts.extend(dataset_client.list_drafts(branch_name=revision)) if not all_commit_logs[0]: if not all_drafts: error(error_message) self._sorted_commit_logs = [] else: # Sort logs from different branches by the date of the latest commit of each branch. self._sorted_commit_logs = sorted(all_commit_logs, key=lambda x: x[0].committer.date) self._commit_id_to_branches = commit_id_to_branches self._commit_printer, self._draft_printer = ( (_get_oneline_commit_message, _get_oneline_draft_message) if oneline else (_get_full_commit_message, _get_full_draft_message) ) self._sorted_drafts = sorted(all_drafts, key=lambda x: x.updated_at) self._keys = [log[0].committer.date for log in self._sorted_commit_logs]
def _implement_cp( # pylint: disable=too-many-arguments obj: ContextInfo, local_paths: Iterable[str], tbrn: str, is_recursive: bool, jobs: int, skip_uploaded_files: bool, ) -> None: gas = obj.get_gas() tbrn_info = TBRN(tbrn=tbrn) dataset_client = get_dataset_client(gas, tbrn_info, is_fusion=False) if tbrn_info.type not in (TBRNType.SEGMENT, TBRNType.NORMAL_FILE): error(f'"{tbrn}" is not a segment or file type') target_remote_path = tbrn_info.remote_path if tbrn_info.type == TBRNType.NORMAL_FILE else "" local_abspaths = [os.path.abspath(local_path) for local_path in local_paths] if ( len(local_abspaths) == 1 and not os.path.isdir(local_abspaths[0]) and target_remote_path and not target_remote_path.endswith("/") ): segment_client = dataset_client.get_or_create_segment(tbrn_info.segment_name) segment_client.upload_file(local_abspaths[0], target_remote_path) else: segment = _get_segment( tbrn_info.segment_name, local_abspaths, target_remote_path, is_recursive ) dataset_client.upload_segment( segment, jobs=jobs, skip_uploaded_files=skip_uploaded_files, _is_cli=True )
def _delete_branch(dataset_client: DatasetClientType, tbrn_info: TBRN) -> None: if not tbrn_info.revision: error( f'To delete a branch, "{tbrn_info.get_tbrn()}" must have a branch name' ) dataset_client._delete_branch(tbrn_info.revision) # pylint: disable=protected-access click.echo(f'Successfully deleted branch "{tbrn_info.get_colored_tbrn()}"')
def _create_tag(dataset_client: DatasetClientType, name: str) -> None: if dataset_client.status.is_draft: error(f'To create a tag, "{dataset_client.name}" cannot be in draft status') if not dataset_client.status.commit_id: error(f'To create a tag, "{dataset_client.name}" should have commit history') dataset_client.create_tag(name=name) tag_tbrn = TBRN(dataset_client.name, revision=name).get_colored_tbrn() click.echo(f'Successfully created tag "{tag_tbrn}"')
def _unset_auth(obj: ContextInfo, is_all: bool) -> None: config_parser = obj.config_parser if is_all: config_parser.remove_section("profiles") else: try: removed = config_parser.remove_option("profiles", obj.profile_name) except NoSectionError: removed = False if not removed: error(f'Profile "{obj.profile_name}" does not exist.') obj.write_config(show_message=False) hint = "all" if is_all else f'"{obj.profile_name}"' click.echo(f"Successfully unset {hint} auth info")
def _implement_tag(obj: ContextInfo, tbrn: str, name: str, is_delete: bool) -> None: tbrn_info = TBRN(tbrn=tbrn) if tbrn_info.type != TBRNType.DATASET: error(f'To operate a tag, "{tbrn}" must be a dataset') gas = obj.get_gas() dataset_client = get_dataset_client(gas, tbrn_info) if is_delete: _delete_tag(dataset_client, tbrn_info) elif name: _create_tag(dataset_client, name) else: _list_tags(dataset_client)
def _implement_branch(obj: ContextInfo, tbrn: str, name: str, verbose: bool, is_delete: bool) -> None: tbrn_info = TBRN(tbrn=tbrn) if tbrn_info.type != TBRNType.DATASET: error(f'To operate a branch, "{tbrn_info}" must be a dataset') gas = obj.get_gas() dataset_client = get_dataset_client(gas, tbrn_info) if is_delete: _delete_branch(dataset_client, tbrn_info) return if name: _create_branch(dataset_client, name) else: _list_branches(dataset_client, verbose)
def _get_auth(obj: ContextInfo, is_all: bool) -> None: config_parser = obj.config_parser if is_all: try: profiles = config_parser["profiles"] except KeyError: return for key, value in profiles.items(): _echo_formatted_profile(key, value) return profile_name = obj.profile_name try: profile = config_parser["profiles"][profile_name] except KeyError: error(f'Profile "{profile_name}" does not exist.') _echo_formatted_profile(profile_name, profile)
def _list_drafts(dataset_client: DatasetClientType, tbrn_info: TBRN) -> None: if tbrn_info.revision: error( f'list drafts based on given revision "{tbrn_info.get_tbrn()}" is not supported' ) if tbrn_info.is_draft: draft = dataset_client.get_draft(tbrn_info.draft_number) click.echo(f"Draft: {tbrn_info.get_tbrn()}") _echo_draft(dataset_client, draft.title, draft.description, draft.branch_name) else: for draft in dataset_client.list_drafts(): click.echo( f"Draft: {TBRN(tbrn_info.dataset_name, draft_number=draft.number).get_tbrn()}" ) _echo_draft(dataset_client, draft.title, draft.description, draft.branch_name)
def _echo_user_info( access_key: str, url: str, obj: ContextInfo, profile_name: Optional[str] = None ) -> None: gas_client = obj.get_gas(access_key, url) if not profile_name: profile_name = obj.profile_name try: user_info = gas_client.get_user() except UnauthorizedError: error(f"{access_key} is not a valid AccessKey") click.echo(f"{profile_name}\n{INDENT}USER: {user_info.name}") team = user_info.team if team: click.echo(f"{INDENT}TEAM: {team.name}") click.echo(f"{INDENT}{access_key}") if url: click.echo(f"{INDENT}{url}\n")
def _get_segment( segment_name: str, local_abspaths: Iterable[str], remote_path: str, is_recursive: bool, ) -> Segment: """Get the pair of local_path and remote_path. Arguments: segment_name: The name of the segment these data belong to. local_abspaths: A list of local abstract paths, could be folder or file. remote_path: The remote object path, not necessarily end with '/'. is_recursive: Whether copy directories recursively. Returns: A segment contains mapping data. """ segment = Segment(segment_name) for local_abspath in local_abspaths: if not os.path.isdir(local_abspath): data = Data( local_abspath, target_remote_path=str(PurePosixPath(remote_path, os.path.basename(local_abspath))), ) segment.append(data) continue if not is_recursive: error("Local paths include directories, please use -r option") local_abspath = os.path.normpath(local_abspath) folder_name = os.path.basename(local_abspath) for root, _, filenames in os.walk(local_abspath): relpath = os.path.relpath(root, local_abspath) if root != local_abspath else "" for filename in filenames: data = Data( os.path.join(root, filename), target_remote_path=str( PurePosixPath(Path(remote_path, folder_name, relpath, filename)) ), ) segment.append(data) return segment
def _edit_draft( dataset_client: DatasetClientType, tbrn_info: TBRN, message: Tuple[str, ...], config_parser: ConfigParser, ) -> None: if not tbrn_info.is_draft: error("Draft number is required when editing draft") draft = dataset_client.get_draft() hint_message = format_hint(draft.title, draft.description, _DRAFT_HINT) title, description = edit_message(message, hint_message, config_parser) if not title: error("Aborting updating draft due to empty draft message") dataset_client.update_draft(title=title, description=description) click.echo(f'Successfully updated draft "{tbrn_info.get_colored_tbrn()}"') _echo_draft(dataset_client, title, description, dataset_client.status.branch_name)
def _ls_normal_file( gas: GAS, tbrn_info: TBRN, list_all_files: bool, # pylint: disable=unused-argument show_total_num: bool, # pylint: disable=unused-argument ) -> None: dataset_client = get_dataset_client(gas, tbrn_info) if isinstance(dataset_client, FusionDatasetClient): error("List data in fusion segment is not supported yet") remote_path = tbrn_info.remote_path dataset_client.get_segment( tbrn_info.segment_name).get_data(remote_path=remote_path) _echo_data( tbrn_info.dataset_name, tbrn_info.draft_number, tbrn_info.revision, tbrn_info.segment_name, (remote_path, ), )
def _ls_segment( gas: GAS, tbrn_info: TBRN, list_all_files: bool, # pylint: disable=unused-argument show_total_num: bool, ) -> None: dataset_client = get_dataset_client(gas, tbrn_info) if isinstance(dataset_client, FusionDatasetClient): error("List fusion segment is not supported yet") segment_path = dataset_client.get_segment( tbrn_info.segment_name).list_data_paths() if show_total_num: click.echo(f"total {len(segment_path)}") _echo_data( tbrn_info.dataset_name, tbrn_info.draft_number, tbrn_info.revision, tbrn_info.segment_name, segment_path, )
def __init__( self, dataset_client: DatasetClientType, revisions: List[Optional[str]], commit_id_to_branches: Dict[str, List[str]], oneline: bool, ): all_commits = list(map(dataset_client.list_commits, revisions)) if len(all_commits[0]) == 0: error(f'Dataset "{dataset_client.name}" has no commit history') self._commit_id_to_branches = commit_id_to_branches self._printer = _get_oneline_log if oneline else _get_full_log # Sort commits from different branches by the date of the latest commit of each branch. self._sorted_commits = sorted(all_commits, key=lambda x: x[0].committer.date) self._keys = [ commits[0].committer.date for commits in self._sorted_commits ]
def _create_draft( dataset_client: DatasetClientType, tbrn_info: TBRN, message: Tuple[str, ...], config_parser: ConfigParser, ) -> None: if tbrn_info.is_draft: error( f'Create a draft in draft status "{tbrn_info.get_tbrn()}" is not permitted' ) title, description = edit_message(message, _DRAFT_HINT, config_parser) if not title: error("Aborting creating draft due to empty draft message") dataset_client.create_draft(title=title, description=description) status = dataset_client.status draft_tbrn = TBRN(tbrn_info.dataset_name, draft_number=status.draft_number).get_colored_tbrn() click.echo(f'Successfully created draft "{draft_tbrn}"') _echo_draft(dataset_client, title, description, status.branch_name)
def _update_profile(obj: ContextInfo, arg1: str, arg2: str) -> None: access_key, url = (arg2, arg1) if arg2 else (arg1, arg2) profile_name, config_parser = obj.profile_name, obj.config_parser gas_client = obj.get_gas(access_key, url) try: user_info = gas_client.get_user() except UnauthorizedError: error(f"{access_key} is not a valid AccessKey") if not config_parser.has_section("profiles"): config_parser.add_section("profiles") config_parser["profiles"][profile_name] = form_profile_value(access_key, url) obj.write_config(show_message=False) messages = [ f'Successfully set authentication info of "{click.style(user_info.name, bold=True)}"' ] if user_info.team: messages.append(f' in "{click.style(user_info.team.name, bold=True)}" team') if profile_name != "default": messages.append(f' into profile "{click.style(profile_name, bold=True)}"') click.echo("".join(messages))
def __init__( self, dataset_client: DatasetClientType, revisions: List[Optional[str]], commit_id_to_branches: Dict[str, List[str]], oneline: bool, *, show_drafts: bool, ): self._key_to_branches = commit_id_to_branches self._graph_printer = self._add_oneline_graph if oneline else self._add_full_graph self._sorted_leaves = self._build_tree(dataset_client, revisions, show_drafts) error_message = f'Dataset "{dataset_client.name}" has no commit history' if show_drafts: error_message += " or open drafts" if not self._sorted_leaves: error(error_message) self._pointer = 0 self._merge_pointer: Optional[int] = None self._log_colors = cycle(_LOG_COLORS) self._layer_colors: List[str] = [next(self._log_colors)]
def _implement_draft( # pylint: disable=too-many-arguments obj: ContextInfo, tbrn: str, is_list: bool, edit: bool, close: bool, message: Tuple[str, ...], ) -> None: gas = obj.get_gas() tbrn_info = TBRN(tbrn=tbrn) dataset_client = get_dataset_client(gas, tbrn_info) if tbrn_info.type != TBRNType.DATASET: error(f'To operate a draft, "{tbrn}" must be a dataset') if is_list: _list_drafts(dataset_client, tbrn_info) elif edit: _edit_draft(dataset_client, tbrn_info, message, obj.config_parser) elif close: _close_draft(dataset_client, tbrn_info) else: _create_draft(dataset_client, tbrn_info, message, obj.config_parser)
def _interactive_auth(url: Optional[str] = None) -> str: click.secho( "Please visit and login to the TensorBay website to generate your AccessKey", bold=True ) click.secho( "Note: TensorBay has multi-regional websites, " "please visit the corresponding website based on your location for better experience\n", fg="bright_cyan", ) if url: developer_url = click.style(urljoin(url, "/tensorbay/developer"), underline=True) click.echo(f" > {developer_url}\n") else: url_cn = click.style("https://gas.graviti.cn/tensorbay/developer", underline=True) url_com = click.style("https://gas.graviti.com/tensorbay/developer", underline=True) click.echo(f" > {url_com} (Global site)") click.echo(f" > {url_cn} (Chinese site)\n") access_key = click.prompt(click.style("Paste your AccessKey here", bold=True)).strip() if not is_accesskey(access_key): error("Wrong accesskey format") return access_key # type: ignore[no-any-return]
def _check_args_and_options(key: str, value: str, unset: bool) -> None: if unset: if value: error( 'Use "--unset" option to unset config or use "key" and "value" to set config' ) if not key: error('Use "--unset" option with "key"') if key not in {"editor", "timeout", "is_internal", "max_retries", ""}: error(f'The option "{key}" is not supported to configure currently.')