def custom_commit_date(dt: Optional[datetime]) -> Iterator[None]: if dt is not None: with envset("GIT_AUTHOR_NAME", "DANDI User"): with envset("GIT_AUTHOR_EMAIL", "*****@*****.**"): with envset("GIT_AUTHOR_DATE", str(dt)): yield else: yield
def test_envset_unset_error(monkeypatch: pytest.MonkeyPatch) -> None: monkeypatch.delenv(ENVVAR, raising=False) with pytest.raises(RuntimeError, match="Catch this!"): with envset(ENVVAR, "bar"): assert os.environ[ENVVAR] == "bar" raise RuntimeError("Catch this!") assert ENVVAR not in os.environ
def test_envset_error(monkeypatch: pytest.MonkeyPatch) -> None: monkeypatch.setenv(ENVVAR, "foo") with pytest.raises(RuntimeError, match="Catch this!"): with envset(ENVVAR, "bar"): assert os.environ[ENVVAR] == "bar" raise RuntimeError("Catch this!") assert os.environ[ENVVAR] == "foo"
def init_dataset(self, dsdir: Path, create_time: datetime) -> Dataset: ds = Dataset(str(dsdir)) if not ds.is_installed(): log.info("Creating Datalad dataset") with custom_commit_date(create_time): with envset("GIT_CONFIG_PARAMETERS", f"'init.defaultBranch={DEFAULT_BRANCH}'"): ds.create(cfg_proc="text2git") if self.config.backup_remote is not None: ds.repo.init_remote( self.config.backup_remote, [ "type=external", "externaltype=rclone", "chunk=1GB", f"target={self.config.backup_remote}", # I made them matching "prefix=dandi-dandisets/annexstore", "embedcreds=no", "uuid=727f466f-60c3-4778-90b2-b2332856c2f8", "encryption=none", # shared, initialized in 000003 ], ) ds.repo.call_annex(["untrust", self.config.backup_remote]) ds.repo.set_preferred_content( "wanted", "(not metadata=distribution-restrictions=*)", remote=self.config.backup_remote, ) return ds
def setup(self, n, control, tmpdir): cache_id = str(uuid4()) with envset("FSCACHER_CACHE", control): self.cache = PersistentCache(name=cache_id) self.dir = Path(tmpdir, cache_id) self.dir.mkdir() create_tree(self.dir, self.LAYOUT)
def ensure_superdataset(self) -> Dataset: superds = Dataset(self.target_path) if not superds.is_installed(): log.info("Creating Datalad superdataset") with envset("GIT_CONFIG_PARAMETERS", f"'init.defaultBranch={DEFAULT_BRANCH}'"): superds.create(cfg_proc="text2git") return superds
def test_envset_unset_delled(monkeypatch: pytest.MonkeyPatch) -> None: monkeypatch.delenv(ENVVAR, raising=False) with envset(ENVVAR, "bar"): assert os.environ[ENVVAR] == "bar" del os.environ[ENVVAR] assert ENVVAR not in os.environ
def test_envset(monkeypatch: pytest.MonkeyPatch) -> None: monkeypatch.setenv(ENVVAR, "foo") with envset(ENVVAR, "bar"): assert os.environ[ENVVAR] == "bar" assert os.environ[ENVVAR] == "foo"
git("checkout", "-b", f"release-{dandiset.version_id}", matching[0]) update_dandiset_metadata(dandiset, ds) with custom_commit_date(dandiset.version.created): ds.save(message= f"[backups2datalad] {dandiset_metadata_file} updated") else: log.info( "Assets in candidate commits do not match assets in version %s;" " syncing", dandiset.version_id, ) git("checkout", "-b", f"release-{dandiset.version_id}", candidates[0]) self.sync_dataset(dandiset, ds) with envset("GIT_COMMITTER_NAME", "DANDI User"): with envset("GIT_COMMITTER_EMAIL", "*****@*****.**"): with envset("GIT_COMMITTER_DATE", str(dandiset.version.created)): git( "tag", "-m", f"Version {dandiset.version_id} of Dandiset" f" {dandiset.identifier}", dandiset.version_id, ) git("checkout", DEFAULT_BRANCH) git("branch", "-D", f"release-{dandiset.version_id}") if push: git("push", "github", dandiset.version_id)
def setup(self, n, control): with envset("FSCACHER_CACHE", control): self.cache = PersistentCache(name=str(uuid4()))