def test_aggregate_with_missing_or_duplicate_id(path): # a hierarchy of three (super/sub)datasets, each with some native metadata ds = Dataset(opj(path, 'origin')).create(force=True) subds = ds.create('sub', force=True) subds.create('subsub', force=True) # aggregate from bottom to top, guess native data, no compacting of graph # should yield 6 meta data sets, one implicit, and one native per dataset # and a second native set for the topmost dataset aggregate_metadata(ds, guess_native_type=True, recursive=True) # no only ask the top superdataset, no recursion, just reading from the cache meta = get_metadata(ds, guess_type=False, ignore_subdatasets=False, ignore_cache=False) # and we know nothing subsub for name in ('grandchild_äöü東', ): assert_true( sum([s.get('name', '') == assure_unicode(name) for s in meta])) # but search should not fail with swallow_outputs(): res1 = list(search('.', regex=True, dataset=ds)) assert res1 # and let's see now if we wouldn't fail if dataset is duplicate if we # install the same dataset twice subds_clone = ds.install(source=subds.path, path="subds2", result_xfm='datasets', return_type='item-or-list') with swallow_outputs(): res2 = list(search('.', regex=True, dataset=ds))
def test_search_outside1_install_default_ds(tdir, default_dspath): with chpwd(tdir): # let's mock out even actual install/search calls with \ patch_config({'datalad.locations.default-dataset': default_dspath}), \ patch('datalad.api.install', return_value=Dataset(default_dspath)) as mock_install, \ patch('datalad.distribution.dataset.Dataset.search', new_callable=_mock_search): _check_mocked_install(default_dspath, mock_install) # now on subsequent run, we want to mock as if dataset already exists # at central location and then do search again from datalad.ui import ui ui.add_responses('yes') mock_install.reset_mock() with patch('datalad.distribution.dataset.Dataset.is_installed', True): _check_mocked_install(default_dspath, mock_install) # and what if we say "no" to install? ui.add_responses('no') mock_install.reset_mock() with assert_raises(NoDatasetArgumentFound): list(search(".")) # and if path exists and is a valid dataset and we say "no" Dataset(default_dspath).create() ui.add_responses('no') mock_install.reset_mock() with assert_raises(NoDatasetArgumentFound): list(search("."))
def test_search_outside1_install_default_ds(tdir, default_dspath): with chpwd(tdir): # let's mock out even actual install/search calls with \ patch_config({'datalad.locations.default-dataset': default_dspath}), \ patch('datalad.api.install', return_value=Dataset(default_dspath)) as mock_install, \ patch('datalad.distribution.dataset.Dataset.search', new_callable=_mock_search): _check_mocked_install(default_dspath, mock_install) # now on subsequent run, we want to mock as if dataset already exists # at central location and then do search again from datalad.ui import ui ui.add_responses('yes') mock_install.reset_mock() with patch( 'datalad.distribution.dataset.Dataset.is_installed', True): _check_mocked_install(default_dspath, mock_install) # and what if we say "no" to install? ui.add_responses('no') mock_install.reset_mock() with assert_raises(NoDatasetArgumentFound): list(search(".")) # and if path exists and is a valid dataset and we say "no" Dataset(default_dspath).create() ui.add_responses('no') mock_install.reset_mock() with assert_raises(NoDatasetArgumentFound): list(search("."))
def test_search_outside1(tdir, newhome): with chpwd(tdir): # should fail since directory exists, but not a dataset # should not even waste our response ;) with patch_config({'datalad.locations.default-dataset': newhome}): gen = search("bu", return_type='generator') assert_is_generator(gen) assert_raises(NoDatasetArgumentFound, next, gen) # and if we point to some non-existing dataset with assert_raises(ValueError): next(search("bu", dataset=newhome))
def test_search_outside1(tdir, newhome): with chpwd(tdir): # should fail since directory exists, but not a dataset # should not even waste our response ;) with patch.object(search_mod, 'LOCAL_CENTRAL_PATH', newhome): gen = search("bu", return_type='generator') assert_is_generator(gen) assert_raises(NoDatasetArgumentFound, next, gen) # and if we point to some non-existing dataset with assert_raises(ValueError): next(search("bu", dataset=newhome))
def test_search_non_dataset(tdir): from datalad.support.gitrepo import GitRepo GitRepo(tdir, create=True) with assert_raises(NoDatasetArgumentFound) as cme: list(search('smth', dataset=tdir)) # Should instruct user how that repo could become a datalad dataset assert_in("datalad create --force", str(cme.exception))
def _check_mocked_install(default_dspath, mock_install): gen = search(".", return_type='generator') assert_is_generator(gen) # we no longer do any custom path tune up from the one returned by search # so should match what search returns assert_equal(list(gen), [report for report in _mocked_search_results]) mock_install.assert_called_once_with(default_dspath, source='///')
def _check_mocked_install(central_dspath, mock_install): gen = search(".", regex=True) assert_is_generator(gen) # we no longer do any custom path tune up from the one returned by search # so should match what search returns assert_equal(list(gen), [(loc, report) for loc, report in _mocked_search_results]) mock_install.assert_called_once_with(central_dspath, source='///')
def _check_mocked_install(default_dspath, mock_install): gen = search(".", return_type='generator') assert_is_generator(gen) # we no longer do any custom path tune up from the one returned by search # so should match what search returns assert_equal( list(gen), [report for report in _mocked_search_results]) mock_install.assert_called_once_with(default_dspath, source='///')
def _check_mocked_install(central_dspath, mock_install): gen = search(".", regex=True) assert_is_generator(gen) # we no longer do any custom path tune up from the one returned by search # so should match what search returns assert_equal( list(gen), [(loc, report) for loc, report in _mocked_search_results]) mock_install.assert_called_once_with(central_dspath, source='///')
def test_search_outside1(tdir, newhome): with chpwd(tdir): # should fail since directory exists, but not a dataset # should not even waste our response ;) always_render = cfg.obtain('datalad.api.alwaysrender') with patch.object(search_mod, 'LOCAL_CENTRAL_PATH', newhome): if always_render: # we do try to render results which actually causes exception # to come right away assert_raises(NoDatasetArgumentFound, search, "bu") else: gen = search("bu") assert_is_generator(gen) assert_raises(NoDatasetArgumentFound, next, gen) # and if we point to some non-existing dataset -- the same in both cases # but might come before even next if always_render with assert_raises(ValueError): next(search("bu", dataset=newhome))
def test_search_outside1_noninteractive_ui(tdir): # we should raise an informative exception with chpwd(tdir): with assert_raises(NoDatasetArgumentFound) as cme: list(search("bu")) assert_in('run interactively', str(cme.exception))