def _ls_dataset(gas: GAS, tbrn_info: TBRN, list_all_files: bool, show_total_num: bool) -> None: dataset_client = get_dataset_client(gas, tbrn_info) segment_names = dataset_client.list_segment_names() if not list_all_files: if show_total_num: click.echo(f"total {len(segment_names)}") for segment_name in segment_names: click.echo(TBRN(tbrn_info.dataset_name, segment_name).get_tbrn()) return if isinstance(dataset_client, FusionDatasetClient): error('"-a" flag is not supported for fusion dataset yet') segment_paths = [ dataset_client.get_segment(segment_name).list_data_paths() for segment_name in segment_names ] if show_total_num: total = sum(len(segment_path) for segment_path in segment_paths) click.echo(f"total {total}") for segment_name, segment_path in zip(segment_names, segment_paths): _echo_data( tbrn_info.dataset_name, tbrn_info.draft_number, tbrn_info.revision, segment_name, segment_path, )
def _implement_cp( # pylint: disable=too-many-arguments obj: ContextInfo, local_paths: Iterable[str], tbrn: str, is_recursive: bool, jobs: int, skip_uploaded_files: bool, ) -> None: gas = obj.get_gas() tbrn_info = TBRN(tbrn=tbrn) dataset_client = get_dataset_client(gas, tbrn_info, is_fusion=False) if tbrn_info.type not in (TBRNType.SEGMENT, TBRNType.NORMAL_FILE): error(f'"{tbrn}" is not a segment or file type') target_remote_path = tbrn_info.remote_path if tbrn_info.type == TBRNType.NORMAL_FILE else "" local_abspaths = [os.path.abspath(local_path) for local_path in local_paths] if ( len(local_abspaths) == 1 and not os.path.isdir(local_abspaths[0]) and target_remote_path and not target_remote_path.endswith("/") ): segment_client = dataset_client.get_or_create_segment(tbrn_info.segment_name) segment_client.upload_file(local_abspaths[0], target_remote_path) else: segment = _get_segment( tbrn_info.segment_name, local_abspaths, target_remote_path, is_recursive ) dataset_client.upload_segment( segment, jobs=jobs, skip_uploaded_files=skip_uploaded_files, _is_cli=True )
def _implement_tag(obj: ContextInfo, tbrn: str, name: str, is_delete: bool) -> None: tbrn_info = TBRN(tbrn=tbrn) if tbrn_info.type != TBRNType.DATASET: error(f'To operate a tag, "{tbrn}" must be a dataset') gas = obj.get_gas() dataset_client = get_dataset_client(gas, tbrn_info) if is_delete: _delete_tag(dataset_client, tbrn_info) elif name: _create_tag(dataset_client, name) else: _list_tags(dataset_client)
def _implement_branch(obj: ContextInfo, tbrn: str, name: str, verbose: bool, is_delete: bool) -> None: tbrn_info = TBRN(tbrn=tbrn) if tbrn_info.type != TBRNType.DATASET: error(f'To operate a branch, "{tbrn_info}" must be a dataset') gas = obj.get_gas() dataset_client = get_dataset_client(gas, tbrn_info) if is_delete: _delete_branch(dataset_client, tbrn_info) return if name: _create_branch(dataset_client, name) else: _list_branches(dataset_client, verbose)
def _ls_normal_file( gas: GAS, tbrn_info: TBRN, list_all_files: bool, # pylint: disable=unused-argument show_total_num: bool, # pylint: disable=unused-argument ) -> None: dataset_client = get_dataset_client(gas, tbrn_info) if isinstance(dataset_client, FusionDatasetClient): error("List data in fusion segment is not supported yet") remote_path = tbrn_info.remote_path dataset_client.get_segment( tbrn_info.segment_name).get_data(remote_path=remote_path) _echo_data( tbrn_info.dataset_name, tbrn_info.draft_number, tbrn_info.revision, tbrn_info.segment_name, (remote_path, ), )
def _ls_segment( gas: GAS, tbrn_info: TBRN, list_all_files: bool, # pylint: disable=unused-argument show_total_num: bool, ) -> None: dataset_client = get_dataset_client(gas, tbrn_info) if isinstance(dataset_client, FusionDatasetClient): error("List fusion segment is not supported yet") segment_path = dataset_client.get_segment( tbrn_info.segment_name).list_data_paths() if show_total_num: click.echo(f"total {len(segment_path)}") _echo_data( tbrn_info.dataset_name, tbrn_info.draft_number, tbrn_info.revision, tbrn_info.segment_name, segment_path, )
def _implement_commit(obj: ContextInfo, tbrn: str, message: Tuple[str, ...]) -> None: gas = obj.get_gas() tbrn_info = TBRN(tbrn=tbrn) dataset_client = get_dataset_client(gas, tbrn_info) if tbrn_info.type != TBRNType.DATASET: error(f'To operate a commit, "{tbrn}" must be a dataset') if not tbrn_info.is_draft: error(f'To commit, "{tbrn}" must be in draft status, like "{tbrn}#1"') dataset_client.checkout(draft_number=tbrn_info.draft_number) draft = dataset_client.get_draft() hint_message = format_hint(draft.title, draft.description, _COMMIT_HINT) title, description = edit_message(message, hint_message, obj.config_parser) if not title: error("Aborting commit due to empty commit message") dataset_client.commit(title, description) commit_tbrn = TBRN(tbrn_info.dataset_name, revision=dataset_client.status.commit_id) click.echo( "Committed successfully: " f"{tbrn_info.get_colored_tbrn()} -> {commit_tbrn.get_colored_tbrn()}" )
def _implement_rm(obj: ContextInfo, tbrn: str, is_recursive: bool) -> None: gas = obj.get_gas() tbrn_info = TBRN(tbrn=tbrn) dataset_client = get_dataset_client(gas, tbrn_info, is_fusion=False) if tbrn_info.type not in (TBRNType.SEGMENT, TBRNType.NORMAL_FILE): error(f'"{tbrn}" is an invalid path to remove') if not tbrn_info.is_draft: error( f'To remove the data, "{tbrn}" must be in draft status, like "{tbrn}#1"' ) if tbrn_info.type == TBRNType.SEGMENT: if not is_recursive: error("Please use -r option to remove the whole segment") dataset_client.delete_segment(tbrn_info.segment_name) else: segment = dataset_client.get_segment(tbrn_info.segment_name) segment.delete_data(tbrn_info.remote_path) click.echo(f'Successfully deleted "{tbrn_info.get_colored_tbrn()}"')
def _implement_draft( # pylint: disable=too-many-arguments obj: ContextInfo, tbrn: str, is_list: bool, edit: bool, close: bool, message: Tuple[str, ...], ) -> None: gas = obj.get_gas() tbrn_info = TBRN(tbrn=tbrn) dataset_client = get_dataset_client(gas, tbrn_info) if tbrn_info.type != TBRNType.DATASET: error(f'To operate a draft, "{tbrn}" must be a dataset') if is_list: _list_drafts(dataset_client, tbrn_info) elif edit: _edit_draft(dataset_client, tbrn_info, message, obj.config_parser) elif close: _close_draft(dataset_client, tbrn_info) else: _create_draft(dataset_client, tbrn_info, message, obj.config_parser)