def test_inherit_src_candidates(lcl, storepath, url): lcl = Path(lcl) storepath = Path(storepath) # dataset with a subdataset ds1 = Dataset(lcl / 'ds1').create() ds1sub = ds1.create('sub') # a different dataset into which we install ds1, but do not touch its subds ds2 = Dataset(lcl / 'ds2').create() ds2.clone(source=ds1.path, path='mysub') # we give no dataset a source candidate config! # move all dataset into the store for d in (ds1, ds1sub, ds2): _move2store(storepath, d) # now we must be able to obtain all three datasets from the store riaclone = clone( 'ria+{}#{}'.format( # store URL url, # ID of the root dataset ds2.id), lcl / 'clone', ) # what happens is the the initial clone call sets a source candidate # config, because it sees the dataset coming from a store # all obtained subdatasets get the config inherited on-clone datasets = riaclone.get('.', get_data=False, recursive=True, result_xfm='datasets') # we get two subdatasets eq_(len(datasets), 2) for ds in datasets: eq_(ConfigManager(dataset=ds, source='dataset-local').get( 'datalad.get.subdataset-source-candidate-200origin'), 'ria+%s#{id}' % url)
def test_nonuniform_adjusted_subdataset(path): # https://github.com/datalad/datalad/issues/5107 topds = Dataset(Path(path) / "top").create() subds_url = 'git://github.com/datalad/testrepo--basic--r1' if not topds.repo.is_managed_branch(): raise SkipTest("Test logic assumes default dataset state is adjusted") topds.clone(source='git://github.com/datalad/testrepo--basic--r1', path='subds') eq_( topds.subdatasets(return_type='item-or-list')['gitmodule_url'], subds_url)
def test_copy_file_nourl(serv_path=None, orig_path=None, tst_path=None): """Tests availability transfer to normal git-annex remote""" # prep source dataset that will have the file content srv_ds = Dataset(serv_path).create() (srv_ds.pathobj / 'myfile.dat').write_text('I am content') (srv_ds.pathobj / 'noavail.dat').write_text('null') srv_ds.save() srv_ds.drop('noavail.dat', reckless='kill') # make an empty superdataset, with the test dataset as a subdataset orig_ds = Dataset(orig_path).create() orig_ds.clone(source=serv_path, path='serv') assert_repo_status(orig_ds.path) # now copy the test file into the superdataset no_avail_file = orig_ds.pathobj / 'serv' / 'noavail.dat' assert_in_results( orig_ds.copy_file(no_avail_file, on_failure='ignore'), status='impossible', message='no known location of file content', path=str(no_avail_file), )
def test_relative_submodule_url(path): Dataset(op.join(path, 'origin')).create() ds = Dataset(op.join(path, 'ds')).create() with chpwd(ds.path): ds_cloned = ds.clone( source=op.join(op.pardir, 'origin'), path='sources') # Check that a simple fetch call does not fail. ds_cloned.repo.fetch() subinfo = ds.subdatasets(return_type='item-or-list') eq_(subinfo['gitmodule_url'], # must be a relative URL, not platform-specific relpath! '../../origin')
def test_get_local_file_url_compatibility(path): # smoke test for file:// URL compatibility with other datalad/git/annex # pieces path = Path(path) ds1 = Dataset(path / 'ds1').create() ds2 = Dataset(path / 'ds2').create() testfile = path / 'testfile.txt' testfile.write_text('some') # compat with annex addurl ds1.repo.add_url_to_file( 'test.txt', get_local_file_url(testfile, compatibility='git-annex')) # compat with git clone/submodule assert_status( 'ok', ds1.clone(get_local_file_url(ds2.path, compatibility='git'), result_xfm=None, return_type='generator'))