def test_get_remote_configuration(self): """Test get_remote_configuration""" configuration = Configuration() remote = configuration.get_remote_configuration() assert remote['git_remote'] == 'repo.gigantum.io' assert remote['remote_type'] == 'gitlab' assert remote['admin_service'] == 'usersrv.gigantum.io' assert remote['index_service'] == 'api.gigantum.com/read' assert remote['object_service'] == 'api.gigantum.com/object-v1' remote = configuration.get_remote_configuration("repo.gigantum.io") assert remote['git_remote'] == 'repo.gigantum.io' assert remote['remote_type'] == 'gitlab' assert remote['admin_service'] == 'usersrv.gigantum.io' assert remote['index_service'] == 'api.gigantum.com/read' assert remote['object_service'] == 'api.gigantum.com/object-v1'
def mutate_and_get_payload(cls, root, info, owner, dataset_name, local=False, remote=False, client_mutation_id=None): logged_in_user = get_logged_in_username() local_deleted = False remote_deleted = False if remote: logger.info(f"Deleting remote Dataset {owner}/{dataset_name}") # Extract valid Bearer token access_token = flask.g.get('access_token', None) id_token = flask.g.get('id_token', None) if not access_token or not id_token: raise ValueError( "Deleting a remote Dataset requires a valid session.") try: ds = InventoryManager().load_dataset( logged_in_user, owner, dataset_name, author=get_logged_in_author()) except InventoryException: raise ValueError( "A dataset must exist locally to delete it in the remote.") # Delete the dataset's files if supported if ds.is_managed(): ds.backend.set_default_configuration(logged_in_user, access_token, id_token) ds.backend.delete_contents(ds) # Get remote server configuration config = Configuration() remote_config = config.get_remote_configuration() # Delete the repository mgr = GitLabManager(remote_config['git_remote'], remote_config['admin_service'], access_token=access_token) mgr.remove_repository(owner, dataset_name) logger.info(f"Deleted {owner}/{dataset_name} repository from the" f" remote repository {remote_config['git_remote']}") # Call Index service to remove project from cloud index and search # Don't raise an exception if the index delete fails, since this can be handled relatively gracefully repo_id = mgr.get_repository_id(owner, dataset_name) response = requests.delete( f"https://{remote_config['index_service']}/index/{repo_id}", headers={ "Authorization": f"Bearer {access_token}", "Identity": id_token }, timeout=30) if response.status_code != 204: # Soft failure, still continue logger.error( f"Failed to remove {owner}/{dataset_name} from cloud index. " f"Status Code: {response.status_code}") logger.error(response.json()) else: logger.info( f"Deleted remote repository {owner}/{dataset_name} from cloud index" ) # Remove locally any references to that cloud repo that's just been deleted. try: ds.remove_remote() except GigantumException as e: logger.warning(e) remote_deleted = True if local: logger.info(f"Deleting local Dataset {owner}/{dataset_name}") # Delete the dataset dataset_delete_job = InventoryManager().delete_dataset( logged_in_user, owner, dataset_name) local_deleted = True # Schedule Job to clear file cache if dataset is no longer in use job_metadata = {'method': 'clean_dataset_file_cache'} job_kwargs = { 'logged_in_username': logged_in_user, 'dataset_owner': dataset_delete_job.namespace, 'dataset_name': dataset_delete_job.name, 'cache_location': dataset_delete_job.cache_root } dispatcher = Dispatcher() job_key = dispatcher.dispatch_task(jobs.clean_dataset_file_cache, metadata=job_metadata, kwargs=job_kwargs) logger.info( f"Dispatched clean_dataset_file_cache({owner}/{dataset_name}) to Job {job_key}" ) return DeleteDataset(local_deleted=local_deleted, remote_deleted=remote_deleted)
def test_get_remote_configuration_not_found(self): """Test get_remote_configuration""" configuration = Configuration() with pytest.raises(ValueError): configuration.get_remote_configuration("asdfasdf.asdfasdf.com")