def test_check_and_import_dataset(self, mock_config_file): im = InventoryManager(mock_config_file[0]) ds = im.create_dataset('default', 'default', "dataset100", storage_type="gigantum_object_v1", description="100") # Fake publish to a local bare repo _MOCK_create_remote_repo2(ds, 'test', None, None) remote_url = ds.remote im.delete_dataset('default', 'default', "dataset100") with pytest.raises(InventoryException): im.load_dataset('default', 'default', "dataset100") kwargs = { 'logged_in_username': "******", 'dataset_owner': "default", 'dataset_name': "dataset100", 'remote_url': remote_url, 'config_file': mock_config_file[0] } gtmcore.dispatcher.dataset_jobs.check_and_import_dataset(**kwargs) ds = im.load_dataset('default', 'default', "dataset100") assert ds.name == 'dataset100' assert ds.namespace == 'default'
def test_delete_dataset_while_linked(self, mock_config_file): inv_manager = InventoryManager(mock_config_file[0]) auth = GitAuthor(name="test", email="*****@*****.**") lb = inv_manager.create_labbook("test", "test", "labbook1", description="my first labbook") ds = inv_manager.create_dataset("test", "test", "dataset1", "gigantum_object_v1", description="my first dataset", author=auth) ds_root_dir = ds.root_dir lb_root_dir = lb.root_dir assert os.path.exists(ds_root_dir) is True assert os.path.exists(lb_root_dir) is True # Link dataset inv_manager.link_dataset_to_labbook(f"{ds_root_dir}/.git", "test", "dataset1", lb) m = Manifest(ds, 'test') helper_append_file(m.cache_mgr.cache_root, m.dataset_revision, "test1.txt", "asdfasdf") helper_append_file(m.cache_mgr.cache_root, m.dataset_revision, "test2.txt", "dfg") assert os.path.exists( os.path.join(m.cache_mgr.cache_root, m.dataset_revision, "test1.txt")) is True assert os.path.exists( os.path.join(m.cache_mgr.cache_root, m.dataset_revision, "test2.txt")) is True dataset_delete_job = inv_manager.delete_dataset( "test", "test", "dataset1") assert os.path.exists(ds_root_dir) is False assert os.path.exists(lb_root_dir) is True assert os.path.exists(m.cache_mgr.cache_root) is True assert dataset_delete_job.namespace == "test" assert dataset_delete_job.name == "dataset1" assert dataset_delete_job.cache_root == m.cache_mgr.cache_root jobs.clean_dataset_file_cache("test", dataset_delete_job.namespace, dataset_delete_job.name, dataset_delete_job.cache_root, config_file=mock_config_file[0]) assert os.path.exists(m.cache_mgr.cache_root) is True cache_base, _ = m.cache_mgr.cache_root.rsplit(os.path.sep, 1) assert os.path.exists(cache_base) is True
def test_delete_dataset(self, mock_config_file): inv_manager = InventoryManager(mock_config_file[0]) auth = GitAuthor(name="test", email="*****@*****.**") ds = inv_manager.create_dataset("test", "test", "dataset1", "gigantum_object_v1", description="my first dataset", author=auth) root_dir = ds.root_dir assert os.path.exists(root_dir) is True m = Manifest(ds, 'test') helper_append_file(m.cache_mgr.cache_root, m.dataset_revision, "test1.txt", "asdfasdf") helper_append_file(m.cache_mgr.cache_root, m.dataset_revision, "test2.txt", "dfg") assert os.path.exists(os.path.join(m.cache_mgr.cache_root, m.dataset_revision, "test1.txt")) is True assert os.path.exists(os.path.join(m.cache_mgr.cache_root, m.dataset_revision, "test2.txt")) is True inv_manager.delete_dataset("test", "test", "dataset1") assert os.path.exists(root_dir) is False assert os.path.exists(m.cache_mgr.cache_root) is False cache_base, _ = m.cache_mgr.cache_root.rsplit(os.path.sep, 1) assert os.path.exists(cache_base) is True
def test_delete_labbook_with_linked_dataset( self, fixture_working_dir_env_repo_scoped): """Test deleting a LabBook with a linked dataset that has been deleted as well, should clean up""" class JobResponseMock(object): def __init__(self, key): self.key_str = key def dispatcher_mock(self, function_ref, kwargs, metadata): assert kwargs['logged_in_username'] == 'default' assert kwargs['dataset_owner'] == 'default' assert kwargs['dataset_name'] == 'dataset22' assert ".labmanager/datasets/default/default/dataset22" in kwargs[ 'cache_location'] assert metadata['method'] == 'clean_dataset_file_cache' with open("/tmp/mock_reached", 'wt') as tf: tf.write("reached") return JobResponseMock( "rq:job:00923477-d46b-479c-ad0c-2dffcfdfb6b10") im = InventoryManager(fixture_working_dir_env_repo_scoped[0]) lb = im.create_labbook("default", "default", "labbook1", description="Cats labbook 1") lb_root_dir = lb.root_dir assert os.path.exists(lb_root_dir) assert os.path.exists("/tmp/mock_reached") is False ds = im.create_dataset('default', 'default', "dataset22", storage_type="gigantum_object_v1", description="test") ds_root_dir = ds.root_dir im.link_dataset_to_labbook(f"{ds.root_dir}/.git", "default", "dataset22", lb) im.delete_dataset('default', 'default', "dataset22") delete_query = f""" mutation delete {{ deleteLabbook(input: {{ owner: "default", labbookName: "labbook1", confirm: true }}) {{ success }} }} """ try: with patch.object(Dispatcher, 'dispatch_task', dispatcher_mock): r = fixture_working_dir_env_repo_scoped[2].execute( delete_query) assert 'errors' not in r assert r['data']['deleteLabbook']['success'] is True assert not os.path.exists(lb_root_dir) assert not os.path.exists(ds_root_dir) assert os.path.exists("/tmp/mock_reached") is True finally: if os.path.exists("/tmp/mock_reached"): os.remove("/tmp/mock_reached")