def test_merge_follow_parentds_subdataset_adjusted_warning(path): path = Path(path) ds_src = Dataset(path / "source").create() ds_src_subds = ds_src.create("subds") ds_clone = install(source=ds_src.path, path=path / "clone", recursive=True, result_xfm="datasets") ds_clone_subds = Dataset(ds_clone.pathobj / "subds") maybe_adjust_repo(ds_clone_subds.repo) # Note: Were we to save ds_clone here, we would get a merge conflict in the # top repo for the submodule (even if using 'git annex sync' rather than # 'git merge'). ds_src_subds.repo.call_git(["checkout", DEFAULT_BRANCH + "^0"]) (ds_src_subds.pathobj / "foo").write_text("foo content") ds_src.save(recursive=True) assert_repo_status(ds_src.path) assert_in_results(ds_clone.update(merge=True, recursive=True, follow="parentds", on_failure="ignore"), status="impossible", path=ds_clone_subds.path, action="update") eq_(ds_clone.repo.get_hexsha(), ds_src.repo.get_hexsha())
def test_save_gitrepo_annex_subds_adjusted(path): ds = Dataset(path).create(annex=False) subds = ds.create("sub") maybe_adjust_repo(subds.repo) (subds.pathobj / "foo").write_text("foo") subds.save() ds.save() assert_repo_status(ds.path)
def test_save_adjusted_partial(path): ds = Dataset(path).create() subds = ds.create("sub") maybe_adjust_repo(subds.repo) (subds.pathobj / "foo").write_text("foo") subds.save() (ds.pathobj / "other").write_text("staged, not for committing") ds.repo.call_git(["add", "other"]) ds.save(path=["sub"]) assert_repo_status(ds.path, added=["other"])
def test_update_adjusted_incompatible_with_ff_only(path): path = Path(path) ds_src = Dataset(path / "source").create() ds_clone = install(source=ds_src.path, path=path / "clone", recursive=True, result_xfm="datasets") maybe_adjust_repo(ds_clone.repo) assert_in_results(ds_clone.update(merge="ff-only", on_failure="ignore"), action="update", status="impossible") assert_in_results(ds_clone.update(on_failure="ignore"), action="update", status="ok")
def check_merge_follow_parentds_subdataset_detached(on_adjusted, path): if on_adjusted and DEFAULT_REMOTE != "origin" and \ external_versions['cmd:annex'] <= "8.20210330": raise SkipTest("'git annex init' with adjusted branch currently fails " "due to hard-coded 'origin'") # Note: For the adjusted case, this is not much more than a smoke test that # on an adjusted branch we fail sensibly. The resulting state is not easy # to reason about nor desirable. path = Path(path) # $path/source/s0/s1 # The additional dataset level is to gain some confidence that this works # for nested datasets. ds_src = Dataset(path / "source").create() ds_src_s0 = ds_src.create("s0") ds_src_s1 = ds_src_s0.create("s1") ds_src.save(recursive=True) if on_adjusted: # Note: We adjust after creating all the datasets above to avoid a bug # fixed in git-annex 7.20191024, specifically bbdeb1a1a (sync: Fix # crash when there are submodules and an adjusted branch is checked # out, 2019-10-23). for ds in [ds_src, ds_src_s0, ds_src_s1]: maybe_adjust_repo(ds.repo) ds_src.save(recursive=True) assert_repo_status(ds_src.path) ds_clone = install(source=ds_src.path, path=path / "clone", recursive=True, result_xfm="datasets") ds_clone_s1 = Dataset(ds_clone.pathobj / "s0" / "s1") ds_src_s1.repo.checkout(DEFAULT_BRANCH + "^0") (ds_src_s1.pathobj / "foo").write_text("foo content") ds_src.save(recursive=True) assert_repo_status(ds_src.path) res = ds_clone.update(merge=True, recursive=True, follow="parentds", on_failure="ignore") if on_adjusted: # The top-level update is okay because there is no parent revision to # update to. assert_in_results(res, status="ok", path=ds_clone.path, action="update") # The subdataset, on the other hand, is impossible. assert_in_results(res, status="impossible", path=ds_clone_s1.path, action="update") return assert_repo_status(ds_clone.path) # We brought in the revision and got to the same state of the remote. # Blind saving here without bringing in the current subdataset revision # would have resulted in a new commit in ds_clone that reverting the # last subdataset ID recorded in ds_src. eq_(ds_clone.repo.get_hexsha(), ds_src.repo.get_hexsha()) # Record a revision in the parent and then move HEAD away from it so that # the explicit revision fetch fails. (ds_src_s1.pathobj / "bar").write_text("bar content") ds_src.save(recursive=True) ds_src_s1.repo.checkout(DEFAULT_BRANCH) # This is the default, but just in case: ds_src_s1.repo.config.set("uploadpack.allowAnySHA1InWant", "false", where="local") # Configure the fetcher to use v0 because Git defaults to v2 as of # v2.26.0, which allows fetching unadvertised objects regardless # of the value of uploadpack.allowAnySHA1InWant. ds_clone_s1.repo.config.set("protocol.version", "0", where="local") res = ds_clone.update(merge=True, recursive=True, follow="parentds", on_failure="ignore") # The fetch with the explicit ref fails because it isn't advertised. assert_in_results(res, status="impossible", path=ds_clone_s1.path, action="update") # Back to the detached head. ds_src_s1.repo.checkout("HEAD@{1}") # Set up a case where update() will not resolve the sibling. ds_clone_s1.repo.call_git(["branch", "--unset-upstream"]) ds_clone_s1.config.reload(force=True) ds_clone_s1.repo.call_git(["remote", "add", "other", ds_src_s1.path]) res = ds_clone.update(recursive=True, follow="parentds", on_failure="ignore") # In this case, update() won't abort if we call with merge=False, but # it does if the revision wasn't brought down in the `fetch(all_=True)` # call. assert_in_results(res, status="impossible", path=ds_clone_s1.path, action="update")