def _check_mocked_install(default_dspath, mock_install): gen = search(".", return_type='generator') assert_is_generator(gen) # we no longer do any custom path tune up from the one returned by search # so should match what search returns assert_equal(list(gen), [report for report in _mocked_search_results]) mock_install.assert_called_once_with(default_dspath, source='///')
def _check_mocked_install(central_dspath, mock_install): gen = search(".", regex=True) assert_is_generator(gen) # we no longer do any custom path tune up from the one returned by search # so should match what search returns assert_equal(list(gen), [(loc, report) for loc, report in _mocked_search_results]) mock_install.assert_called_once_with(central_dspath, source='///')
def _check_mocked_install(default_dspath, mock_install): gen = search(".", return_type='generator') assert_is_generator(gen) # we no longer do any custom path tune up from the one returned by search # so should match what search returns assert_equal( list(gen), [report for report in _mocked_search_results]) mock_install.assert_called_once_with(default_dspath, source='///')
def _check_mocked_install(central_dspath, mock_install): gen = search(".", regex=True) assert_is_generator(gen) # we no longer do any custom path tune up from the one returned by search # so should match what search returns assert_equal( list(gen), [(loc, report) for loc, report in _mocked_search_results]) mock_install.assert_called_once_with(central_dspath, source='///')
def test_search_outside1(tdir, newhome): with chpwd(tdir): # should fail since directory exists, but not a dataset # should not even waste our response ;) with patch_config({'datalad.locations.default-dataset': newhome}): gen = search("bu", return_type='generator') assert_is_generator(gen) assert_raises(NoDatasetArgumentFound, next, gen) # and if we point to some non-existing dataset with assert_raises(ValueError): next(search("bu", dataset=newhome))
def test_search_outside1(tdir, newhome): with chpwd(tdir): # should fail since directory exists, but not a dataset # should not even waste our response ;) with patch.object(search_mod, 'LOCAL_CENTRAL_PATH', newhome): gen = search("bu", return_type='generator') assert_is_generator(gen) assert_raises(NoDatasetArgumentFound, next, gen) # and if we point to some non-existing dataset with assert_raises(ValueError): next(search("bu", dataset=newhome))
def test_search_outside1(tdir, newhome): with chpwd(tdir): # should fail since directory exists, but not a dataset # should not even waste our response ;) always_render = cfg.obtain('datalad.api.alwaysrender') with patch.object(search_mod, 'LOCAL_CENTRAL_PATH', newhome): if always_render: # we do try to render results which actually causes exception # to come right away assert_raises(NoDatasetArgumentFound, search, "bu") else: gen = search("bu") assert_is_generator(gen) assert_raises(NoDatasetArgumentFound, next, gen) # and if we point to some non-existing dataset -- the same in both cases # but might come before even next if always_render with assert_raises(ValueError): next(search("bu", dataset=newhome))
def check_basic_xnat_interface(url, project, empty_project, subjects): nitrc = XNATServer(url) projects = nitrc.get_projects() # verify that we still have projects we want! assert_in(project, projects) if empty_project: all_projects = nitrc.get_projects(drop_empty=False) assert len(all_projects) > len(projects) assert empty_project in all_projects assert empty_project not in projects projects_public = nitrc.get_projects(limit='public') import json print(json.dumps(projects_public, indent=2)) assert len(projects_public) <= len(projects) assert not set(projects_public).difference(projects) eq_(set(projects), set(nitrc.get_projects(limit=PROJECT_ACCESS_TYPES))) subjects_ = nitrc.get_subjects(project) assert len(subjects_) experiments = nitrc.get_experiments(project, subjects[0]) # NOTE: assumption that there is only one experiment files1 = nitrc.get_files(project, subjects[0], experiments.keys()[0]) assert files1 experiments = nitrc.get_experiments(project, subjects[1]) files2 = nitrc.get_files(project, subjects[1], experiments.keys()[0]) assert files2 ok_startswith(files1[0]['uri'], '/data') gen = nitrc.get_all_files_for_project(project, subjects=subjects, experiments=[experiments.keys()[0]]) assert_is_generator(gen) all_files = list(gen) if len(experiments) == 1: eq_(len(all_files), len(files1) + len(files2)) else: # there should be more files due to multiple experiments which we didn't actually check assert len(all_files) > len(files1) + len(files2)