def mutate_and_get_payload(cls, root, info, owner, labbook_name, confirm, client_mutation_id=None): if confirm is True: # Load config data configuration = Configuration().config # Extract valid Bearer token token = None if hasattr(info.context.headers, 'environ'): if "HTTP_AUTHORIZATION" in info.context.headers.environ: token = parse_token( info.context.headers.environ["HTTP_AUTHORIZATION"]) if not token: raise ValueError( "Authorization header not provided. Cannot perform remote delete operation." ) # Get remote server configuration default_remote = configuration['git']['default_remote'] admin_service = None for remote in configuration['git']['remotes']: if default_remote == remote: admin_service = configuration['git']['remotes'][remote][ 'admin_service'] index_service = configuration['git']['remotes'][remote][ 'index_service'] break if not admin_service: raise ValueError('admin_service could not be found') # Perform delete operation mgr = GitLabManager(default_remote, admin_service, access_token=token) mgr.remove_repository(owner, labbook_name) logger.info( f"Deleted {owner}/{labbook_name} from the remote repository {default_remote}" ) # Call Index service to remove project from cloud index and search # Don't raise an exception if the index delete fails, since this can be handled relatively gracefully # for now, but do return success=false success = True access_token = flask.g.get('access_token', None) id_token = flask.g.get('id_token', None) repo_id = mgr.get_repository_id(owner, labbook_name) response = requests.delete( f"https://{index_service}/index/{repo_id}", headers={ "Authorization": f"Bearer {access_token}", "Identity": id_token }, timeout=10) if response.status_code != 204: logger.error(f"Failed to remove project from cloud index. " f"Status Code: {response.status_code}") logger.error(response.json()) else: logger.info( f"Deleted remote repository {owner}/{labbook_name} from cloud index" ) # Remove locally any references to that cloud repo that's just been deleted. try: username = get_logged_in_username() lb = InventoryManager().load_labbook( username, owner, labbook_name, author=get_logged_in_author()) lb.remove_remote() lb.remove_lfs_remotes() except GigantumException as e: logger.warning(e) return DeleteLabbook(success=True) else: logger.info( f"Dry run deleting {labbook_name} from remote repository -- not deleted." ) return DeleteLabbook(success=False)
def mutate_and_get_payload(cls, root, info, owner, dataset_name, local=False, remote=False, client_mutation_id=None): logged_in_user = get_logged_in_username() local_deleted = False remote_deleted = False if remote: logger.info(f"Deleting remote Dataset {owner}/{dataset_name}") # Extract valid Bearer token access_token = flask.g.get('access_token', None) id_token = flask.g.get('id_token', None) if not access_token or not id_token: raise ValueError( "Deleting a remote Dataset requires a valid session.") try: ds = InventoryManager().load_dataset( logged_in_user, owner, dataset_name, author=get_logged_in_author()) except InventoryException: raise ValueError( "A dataset must exist locally to delete it in the remote.") # Delete the dataset's files if supported if ds.is_managed(): ds.backend.set_default_configuration(logged_in_user, access_token, id_token) ds.backend.delete_contents(ds) # Get remote server configuration config = Configuration() remote_config = config.get_remote_configuration() # Delete the repository mgr = GitLabManager(remote_config['git_remote'], remote_config['admin_service'], access_token=access_token) mgr.remove_repository(owner, dataset_name) logger.info(f"Deleted {owner}/{dataset_name} repository from the" f" remote repository {remote_config['git_remote']}") # Call Index service to remove project from cloud index and search # Don't raise an exception if the index delete fails, since this can be handled relatively gracefully repo_id = mgr.get_repository_id(owner, dataset_name) response = requests.delete( f"https://{remote_config['index_service']}/index/{repo_id}", headers={ "Authorization": f"Bearer {access_token}", "Identity": id_token }, timeout=30) if response.status_code != 204: # Soft failure, still continue logger.error( f"Failed to remove {owner}/{dataset_name} from cloud index. " f"Status Code: {response.status_code}") logger.error(response.json()) else: logger.info( f"Deleted remote repository {owner}/{dataset_name} from cloud index" ) # Remove locally any references to that cloud repo that's just been deleted. try: ds.remove_remote() except GigantumException as e: logger.warning(e) remote_deleted = True if local: logger.info(f"Deleting local Dataset {owner}/{dataset_name}") # Delete the dataset dataset_delete_job = InventoryManager().delete_dataset( logged_in_user, owner, dataset_name) local_deleted = True # Schedule Job to clear file cache if dataset is no longer in use job_metadata = {'method': 'clean_dataset_file_cache'} job_kwargs = { 'logged_in_username': logged_in_user, 'dataset_owner': dataset_delete_job.namespace, 'dataset_name': dataset_delete_job.name, 'cache_location': dataset_delete_job.cache_root } dispatcher = Dispatcher() job_key = dispatcher.dispatch_task(jobs.clean_dataset_file_cache, metadata=job_metadata, kwargs=job_kwargs) logger.info( f"Dispatched clean_dataset_file_cache({owner}/{dataset_name}) to Job {job_key}" ) return DeleteDataset(local_deleted=local_deleted, remote_deleted=remote_deleted)