def test_for_duplicates_sources( doc_cache_path, monkeypatch: pytest.MonkeyPatch, tmp_path: Path, mocker, ): """Ensure duplicate volume mounts are not passed to runner. :param doc_cache_path: The test data :param patch_curses: Fixture to patch curses so it doesn't traceback :param monkeypatch: The monkeypatch fixture :param arg_collector: The fixture used to collect argument passed to a function """ working_dir = tmp_path / "working_dir" working_dir.mkdir() cdc_full_path = working_dir / doc_cache_path.path command = f"ansible-navigator collections '--cdcp={cdc_full_path!s}' --pp never" monkeypatch.setattr("sys.argv", shlex.split(command)) run_cmd_mocked = mocker.patch( "ansible_navigator.runner.command.run_command", side_effect=DuplicateMountException, ) monkeypatch.chdir(working_dir) monkeypatch.setenv("ANSIBLE_NAVIGATOR_ALLOW_UI_TRACEBACK", "true") with pytest.raises(DuplicateMountException): cli.main() _args, kwargs = run_cmd_mocked.call_args host_cwd = Path(kwargs["host_cwd"]) mounts = kwargs["container_volume_mounts"] sources = [Path(mount.split(":")[0]).parents[0] for mount in mounts] assert host_cwd not in sources
def test_delete_instance_mismatch( mocker: MockerFixture, monkeypatch: pytest.MonkeyPatch, text_dandiset: SampleDandiset, ) -> None: monkeypatch.chdir(text_dandiset.dspath) monkeypatch.setenv("DANDI_API_KEY", text_dandiset.api.api_key) instance = text_dandiset.api.instance_id dandiset_id = text_dandiset.dandiset_id delete_spy = mocker.spy(RESTFullAPIClient, "delete") for paths in [ [ "subdir1/apple.txt", f"dandi://dandi/{dandiset_id}/subdir2/coconut.txt", ], [ f"dandi://{instance}/{dandiset_id}/subdir2/coconut.txt", f"dandi://dandi/{dandiset_id}/subdir1/apple.txt", ], ]: with pytest.raises(ValueError) as excinfo: delete(paths, dandi_instance=instance, devel_debug=True, force=True) assert (str(excinfo.value) == "Cannot delete assets from multiple API instances at once") delete_spy.assert_not_called()
def test_ignore_git_in_working_subdirectory( tmp_path: pathlib.Path, monkeypatch: pytest.MonkeyPatch) -> None: (tmp_path / ".git").mkdir() (tmp_path / ".gitignore").write_text("ignored") (tmp_path / "sub").mkdir() monkeypatch.chdir(tmp_path / "sub") assert not is_ignored("src") assert not is_ignored("src/main.py") assert is_ignored("ignored") assert is_ignored("ignored/main.py") assert is_ignored("src/ignored") assert is_ignored("src/ignored/main.py") assert is_ignored("../ignored") assert is_ignored("../ignored/main.py") assert is_ignored("../sub/ignored") assert is_ignored("../sub/ignored/main.py") assert not is_ignored("../../ignored") assert not is_ignored("../../ignored/main.py")
def test_move_both_dest_mismatch(monkeypatch: pytest.MonkeyPatch, moving_dandiset: SampleDandiset) -> None: (moving_dandiset.dspath / "subdir1" / "apple.txt").unlink() (moving_dandiset.dspath / "subdir1" / "apple.txt").mkdir() (moving_dandiset.dspath / "subdir1" / "apple.txt" / "seeds").write_text("12345\n") starting_assets = list(moving_dandiset.dandiset.get_assets()) monkeypatch.chdir(moving_dandiset.dspath) monkeypatch.setenv("DANDI_API_KEY", moving_dandiset.api.api_key) with pytest.raises(AssetMismatchError) as excinfo: move( "file.txt", dest="subdir1/apple.txt", work_on="both", existing="overwrite", dandi_instance=moving_dandiset.api.instance_id, devel_debug=True, ) assert ( str(excinfo.value) == "Mismatch between local and remote servers: asset" " 'file.txt' would be moved to 'subdir1/apple.txt/file.txt' locally" " but to 'subdir1/apple.txt' remotely") check_assets(moving_dandiset, starting_assets, "both", {"subdir1/apple.txt": None})
def test_valid_configurator(ee_enabled, tmp_path: Path, monkeypatch: pytest.MonkeyPatch): """Confirm a valid ansible.cfg is parsed using configurator. :param ee_enabled: Indicate if EE support is enabled :param tmp_path: The path to a test temporary directory :param monkeypatch: The monkeypatch fixture """ cfg_path = tmp_path / "ansible.cfg" with cfg_path.open(mode="w") as fh: fh.write(ANSIBLE_CFG_VALID) monkeypatch.chdir(tmp_path) application_configuration = deepcopy(NavigatorConfiguration) application_configuration.internals.initializing = True configurator = Configurator( params=["--ee", str(ee_enabled)], application_configuration=application_configuration, ) configurator.configure() assert application_configuration.internals.ansible_configuration.contents == { "defaults": {"cow_selection": "milk", "inventory": "inventory.yml"}, } assert application_configuration.internals.ansible_configuration.path == cfg_path assert ( application_configuration.internals.ansible_configuration.text == ANSIBLE_CFG_VALID.splitlines() )
def test_pass_config_dir_ClickPath(tmp_path: pathlib.Path, monkeypatch: pytest.MonkeyPatch): configdir = tmp_path / "myconfigdir" configdir.mkdir() user_config_name = "myconfig" user_config = configdir / f"{user_config_name}.yaml" user_config.touch() expect = str(user_config) runner = CliRunner() @click.command() @click.argument( "config", type=ConfigPath(exists=True, config_dir=(str(configdir))), nargs=-1, ) def config_cmd(config): click.echo(config) def check_cmd(config_arg): return runner.invoke(config_cmd, [config_arg]).output monkeypatch.chdir(configdir) assert expect in check_cmd("myconfig") assert expect in check_cmd("myconfig.yaml") assert expect in check_cmd("./myconfig.yaml") assert str(user_config) in check_cmd(str(configdir / "myconfig.yaml")) assert "file not found" in check_cmd(".tmuxp.json")
def test_cancel_zarr_upload( monkeypatch: pytest.MonkeyPatch, new_dandiset: SampleDandiset ) -> None: client = new_dandiset.client asset_path = "foo/bar/baz.zarr" r = client.post( "/zarr/", json={"name": asset_path, "dandiset": new_dandiset.dandiset_id} ) zarr_id = r["zarr_id"] client.post( f"{new_dandiset.dandiset.version_api_path}assets/", json={"metadata": {"path": asset_path}, "zarr_id": zarr_id}, ) client.post( f"/zarr/{zarr_id}/upload/", json=[ {"path": "0.dat", "etag": "0" * 32}, {"path": "1.dat", "etag": "1" * 32}, ], ) r = client.get(f"/zarr/{zarr_id}/") assert r["upload_in_progress"] is True (new_dandiset.dspath / "foo").mkdir() monkeypatch.chdir(new_dandiset.dspath / "foo") monkeypatch.setenv("DANDI_API_KEY", new_dandiset.api.api_key) r = CliRunner().invoke( service_scripts, ["cancel-zarr-upload", "-i", new_dandiset.api.instance_id, "bar/baz.zarr"], ) assert r.exit_code == 0 r = client.get(f"/zarr/{zarr_id}/") assert r["upload_in_progress"] is False
def test_build_v2_module( tmpdir, modules_dir: str, modules_v2_dir: str, module_name: str, is_v2_module: bool, set_path_argument: bool, monkeypatch: MonkeyPatch, ) -> None: """ Build a V2 package and verify that the required files are present in the resulting wheel. """ module_dir: str if is_v2_module: module_dir = os.path.join(modules_v2_dir, module_name) else: module_dir = os.path.join(modules_dir, module_name) module_copy_dir = os.path.join(tmpdir, "module") shutil.copytree(module_dir, module_copy_dir) assert os.path.isdir(module_copy_dir) if not set_path_argument: monkeypatch.chdir(module_copy_dir) run_module_build_soft(module_copy_dir, set_path_argument) dist_dir = os.path.join(module_copy_dir, "dist") dist_dir_content = os.listdir(dist_dir) assert len(dist_dir_content) == 1 wheel_file = os.path.join(dist_dir, dist_dir_content[0]) assert wheel_file.endswith(".whl") extract_dir = os.path.join(tmpdir, "extract") with zipfile.ZipFile(wheel_file) as zip: zip.extractall(extract_dir) assert os.path.exists( os.path.join(extract_dir, "inmanta_plugins", module_name, "setup.cfg")) assert os.path.exists( os.path.join(extract_dir, "inmanta_plugins", module_name, "__init__.py")) assert os.path.exists( os.path.join(extract_dir, "inmanta_plugins", module_name, "model", "_init.cf")) if "elaborate" in module_name: assert os.path.exists( os.path.join(extract_dir, "inmanta_plugins", module_name, "files", "test.txt")) assert os.path.exists( os.path.join(extract_dir, "inmanta_plugins", module_name, "templates", "template.txt.j2")) assert os.path.exists( os.path.join(extract_dir, "inmanta_plugins", module_name, "model", "other.cf")) assert os.path.exists( os.path.join(extract_dir, "inmanta_plugins", module_name, "other_module.py")) assert os.path.exists( os.path.join(extract_dir, "inmanta_plugins", module_name, "subpkg", "__init__.py"))
def test_docfile_chdir(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None: path = tmp_path / "nested/file" path.parent.mkdir() path.write_text("some text\n") monkeypatch.chdir(path.parent) filename = tmp_path / "test.txt" content = [ "some shell test\n", " nested $ cat file\n", " some other text\n" ] (action, ) = parse_actions(content, file=filename) excpected_action = Action( command=process, content="some other text\n", cwd=Path("nested"), file=filename, indent=2, line=1, target="cat file", ) assert action == excpected_action needed_updates = check_file( path=tmp_path / "example.txt", content=content, tmp_dir=tmp_path / "tmp", normalize=[], ) assert needed_updates # is it copied? assert tmp_path.joinpath("tmp/nested/file").is_file()
def test_defaulting(monkeypatch: pytest.MonkeyPatch, tmp_path: Path) -> None: monkeypatch.chdir(tmp_path) dt = datetime.now(timezone.utc) statefile = StateFile.from_file(dt, None) assert os.listdir() == [] assert statefile.state == State(github=None, travis=None, appveyor=None) assert statefile.path == tmp_path / STATE_FILE assert not statefile.migrating assert not statefile.modified assert statefile.get_since("github") == dt assert os.listdir() == [] newdt = datetime(2021, 6, 11, 14, 55, 1, tzinfo=timezone.utc) statefile.set_since("github", newdt) assert os.listdir() == [STATE_FILE] assert statefile.state == State(github=newdt, travis=None, appveyor=None) assert statefile.modified with open(STATE_FILE) as fp: data = json.load(fp) assert data == { "github": "2021-06-11T14:55:01+00:00", "travis": None, "appveyor": None, }
def test_move_path_to_self( caplog: pytest.LogCaptureFixture, monkeypatch: pytest.MonkeyPatch, moving_dandiset: SampleDandiset, work_on: str, ) -> None: (moving_dandiset.dspath / "newdir").mkdir() starting_assets = list(moving_dandiset.dandiset.get_assets()) monkeypatch.chdir(moving_dandiset.dspath / "subdir1") monkeypatch.setenv("DANDI_API_KEY", moving_dandiset.api.api_key) move( "apple.txt", dest="../subdir1", work_on=work_on, devel_debug=True, dandi_instance=moving_dandiset.api.instance_id, ) for where in ["local", "remote"] if work_on == "both" else [work_on]: assert ( "dandi", logging.DEBUG, f"Would move {where} asset 'subdir1/apple.txt' to itself; ignoring", ) in caplog.record_tuples assert ("dandi", logging.INFO, "Nothing to move") in caplog.record_tuples check_assets(moving_dandiset, starting_assets, work_on, {})
def test_get_abs_path(tmp_path: pathlib.Path, monkeypatch: pytest.MonkeyPatch): expect = str(tmp_path) monkeypatch.chdir(tmp_path) get_abs_path("../") == os.path.dirname(expect) get_abs_path(".") == expect get_abs_path("./") == expect get_abs_path(expect) == expect
def test_move_local_delete_empty_dirs(monkeypatch: pytest.MonkeyPatch, moving_dandiset: SampleDandiset) -> None: starting_assets = list(moving_dandiset.dandiset.get_assets()) monkeypatch.chdir(moving_dandiset.dspath / "subdir4") monkeypatch.setenv("DANDI_API_KEY", moving_dandiset.api.api_key) move( "../subdir1/apple.txt", "../subdir2/banana.txt", "foo.json", dest="../subdir3", work_on="local", devel_debug=True, ) check_assets( moving_dandiset, starting_assets, "local", { "subdir1/apple.txt": "subdir3/apple.txt", "subdir2/banana.txt": "subdir3/banana.txt", "subdir4/foo.json": "subdir3/foo.json", }, ) assert not (moving_dandiset.dspath / "subdir1").exists() assert (moving_dandiset.dspath / "subdir2").exists() assert (moving_dandiset.dspath / "subdir4").exists()
def test_move_dandiset_url( monkeypatch: pytest.MonkeyPatch, moving_dandiset: SampleDandiset, tmp_path: Path, work_on: str, ) -> None: starting_assets = list(moving_dandiset.dandiset.get_assets()) monkeypatch.chdir(tmp_path) monkeypatch.setenv("DANDI_API_KEY", moving_dandiset.api.api_key) move( "file.txt", "subdir2/banana.txt", dest="subdir1", work_on=work_on, dandiset=moving_dandiset.dandiset.api_url, devel_debug=True, ) check_assets( moving_dandiset, starting_assets, "remote", { "file.txt": "subdir1/file.txt", "subdir2/banana.txt": "subdir1/banana.txt", }, )
def test_move_regex_some_to_self( caplog: pytest.LogCaptureFixture, monkeypatch: pytest.MonkeyPatch, moving_dandiset: SampleDandiset, work_on: str, ) -> None: starting_assets = list(moving_dandiset.dandiset.get_assets()) monkeypatch.chdir(moving_dandiset.dspath) monkeypatch.setenv("DANDI_API_KEY", moving_dandiset.api.api_key) move( r"(.+[123])/([^.]+)\.(.+)", dest=r"\1/\2.dat", regex=True, work_on=work_on, dandi_instance=moving_dandiset.api.instance_id, devel_debug=True, ) for path in ["subdir3/red.dat", "subdir3/green.dat", "subdir3/blue.dat"]: for where in ["local", "remote"] if work_on == "both" else [work_on]: assert ( "dandi", logging.DEBUG, f"Would move {where} asset {path!r} to itself; ignoring", ) in caplog.record_tuples check_assets( moving_dandiset, starting_assets, work_on, { "subdir1/apple.txt": "subdir1/apple.dat", "subdir2/banana.txt": "subdir2/banana.dat", "subdir2/coconut.txt": "subdir2/coconut.dat", }, )
def test_valid_home(ee_enabled, tmp_path: Path, monkeypatch: pytest.MonkeyPatch): """Confirm a valid .ansible.cfg is parsed when in the home directory. When EE support is enabled, the .ansible.cfg file is not used When EE support is disabled the .ansible.cfg file is used :param ee_enabled: Indicate if EE support is enabled :param tmp_path: The path to a test temporary directory :param monkeypatch: The monkeypatch fixture """ cfg_path = tmp_path / ".ansible.cfg" with cfg_path.open(mode="w") as fh: fh.write(ANSIBLE_CFG_VALID) monkeypatch.chdir(tmp_path) monkeypatch.setenv("HOME", str(tmp_path)) parsed_cfg = parse_ansible_cfg(ee_enabled=ee_enabled) if ee_enabled: assert parsed_cfg.config.contents is Constants.NONE assert parsed_cfg.config.path is Constants.NONE assert parsed_cfg.config.text is Constants.NONE else: assert parsed_cfg.config.contents == { "defaults": {"cow_selection": "milk", "inventory": "inventory.yml"}, } assert parsed_cfg.config.path == cfg_path assert parsed_cfg.config.text == ANSIBLE_CFG_VALID.splitlines()
def test_migration(monkeypatch: pytest.MonkeyPatch, tmp_path: Path) -> None: monkeypatch.chdir(tmp_path) with open(OLD_STATE_FILE, "w") as fp: json.dump( { "github": "2021-06-11T14:44:17+00:00", "travis": "2021-02-03T04:05:06+00:00", }, fp, ) statefile = StateFile.from_file( datetime(2020, 1, 2, 3, 4, 5, tzinfo=timezone.utc), None) assert os.listdir() == [OLD_STATE_FILE] assert statefile.state == State( github=datetime(2021, 6, 11, 14, 44, 17, tzinfo=timezone.utc), travis=datetime(2021, 2, 3, 4, 5, 6, tzinfo=timezone.utc), appveyor=None, ) assert statefile.path == tmp_path / OLD_STATE_FILE assert statefile.migrating assert not statefile.modified assert statefile.get_since("github") == datetime(2021, 6, 11, 14, 44, 17, tzinfo=timezone.utc) assert os.listdir() == [OLD_STATE_FILE] assert statefile.path == tmp_path / OLD_STATE_FILE assert statefile.migrating assert not statefile.modified statefile.set_since("github", datetime(2021, 6, 11, 14, 44, 17, tzinfo=timezone.utc)) assert os.listdir() == [OLD_STATE_FILE] assert statefile.path == tmp_path / OLD_STATE_FILE assert statefile.migrating assert not statefile.modified newdt = datetime(2021, 6, 11, 14, 48, 39, tzinfo=timezone.utc) statefile.set_since("github", newdt) assert os.listdir() == [STATE_FILE] assert statefile.state == State( github=newdt, travis=datetime(2021, 2, 3, 4, 5, 6, tzinfo=timezone.utc), appveyor=None, ) assert statefile.path == tmp_path / STATE_FILE assert not statefile.migrating assert statefile.modified with open(STATE_FILE) as fp: data = json.load(fp) assert data == { "github": "2021-06-11T14:48:39+00:00", "travis": "2021-02-03T04:05:06+00:00", "appveyor": None, }
def test_ignore_symlink_circular(tmp_path: pathlib.Path, monkeypatch: pytest.MonkeyPatch) -> None: monkeypatch.chdir(tmp_path) (tmp_path / "link1").symlink_to(tmp_path / "link2") (tmp_path / "link2").symlink_to(tmp_path / "link1") assert not is_ignored("link1") assert not is_ignored("link2")
def test_delete_path_pyout( mocker: MockerFixture, monkeypatch: pytest.MonkeyPatch, text_dandiset: SampleDandiset, ) -> None: monkeypatch.chdir(text_dandiset.dspath) monkeypatch.setenv("DANDI_API_KEY", text_dandiset.api.api_key) instance = text_dandiset.api.instance_id delete_spy = mocker.spy(RESTFullAPIClient, "delete") delete(["subdir2/coconut.txt"], dandi_instance=instance, force=True) delete_spy.assert_called()
def test_module_conversion_in_place_cli(tmpdir, monkeypatch: MonkeyPatch): module_name = "elaboratev1module" tmpdir = os.path.join(tmpdir, module_name) path = os.path.normpath( os.path.join(__file__, os.pardir, os.pardir, "data", "modules", module_name)) shutil.copytree(path, tmpdir) monkeypatch.chdir(tmpdir) moduletool.ModuleTool().v1tov2(None) assert_v2_module(module_name, tmpdir) with pytest.raises(ModuleVersionException): moduletool.ModuleTool().v1tov2(None)
def test_move_no_srcs(monkeypatch: pytest.MonkeyPatch, moving_dandiset: SampleDandiset) -> None: starting_assets = list(moving_dandiset.dandiset.get_assets()) monkeypatch.chdir(moving_dandiset.dspath) monkeypatch.setenv("DANDI_API_KEY", moving_dandiset.api.api_key) with pytest.raises(ValueError) as excinfo: move( dest="nowhere", work_on="both", dandi_instance=moving_dandiset.api.instance_id, ) assert str(excinfo.value) == "No source paths given" check_assets(moving_dandiset, starting_assets, "both", {})
def test_ignore_git_symlink_outside_repo( tmp_path: pathlib.Path, monkeypatch: pytest.MonkeyPatch) -> None: monkeypatch.chdir(tmp_path) (tmp_path / "repo" / ".git").mkdir(parents=True) (tmp_path / "repo" / ".gitignore").write_text("link") (tmp_path / "link").mkdir() (tmp_path / "repo" / "link").symlink_to(tmp_path / "link") assert not is_ignored("link") assert not is_ignored("link/main.py") assert is_ignored("repo/link") assert is_ignored("repo/link/main.py")
def test_ignore_git_subdirectory_pattern( tmp_path: pathlib.Path, monkeypatch: pytest.MonkeyPatch) -> None: monkeypatch.chdir(tmp_path) (tmp_path / ".git").mkdir() (tmp_path / ".gitignore").write_text("sub/ignored") (tmp_path / "sub").mkdir() assert not is_ignored("sub") assert not is_ignored("sub/main.py") assert is_ignored("sub/ignored") assert is_ignored("sub/ignored/main.py")
def test_get_configs_cwd(tmp_path: pathlib.Path, monkeypatch: pytest.MonkeyPatch): """config.in_cwd() find config in shell current working directory.""" confdir = tmp_path / "tmuxpconf2" confdir.mkdir() monkeypatch.chdir(confdir) config1 = open(".tmuxp.json", "w+b") config1.close() configs_found = config.in_cwd() assert len(configs_found) == 1 assert ".tmuxp.json" in configs_found
def test_ignore_git_in_subdirectory(tmp_path: pathlib.Path, monkeypatch: pytest.MonkeyPatch) -> None: monkeypatch.chdir(tmp_path) (tmp_path / ".git").mkdir() (tmp_path / ".gitignore").write_text("parent") (tmp_path / "sub").mkdir() (tmp_path / "sub" / ".gitignore").write_text("child") assert not is_ignored("src") assert not is_ignored("src/main.py") assert not is_ignored("sub/src") assert not is_ignored("sub/src/main.py") assert is_ignored("parent") assert is_ignored("parent/main.py") assert is_ignored("sub/parent") assert is_ignored("sub/parent/main.py") assert is_ignored("src/parent") assert is_ignored("src/parent/main.py") assert is_ignored("sub/src/parent") assert is_ignored("sub/src/parent/main.py") assert not is_ignored("../parent") assert not is_ignored("../parent/main.py") assert not is_ignored("../sub/parent") assert not is_ignored("../sub/parent/main.py") assert is_ignored(f"../{tmp_path.name}/parent") assert is_ignored(f"../{tmp_path.name}/parent/main.py") assert is_ignored(f"../{tmp_path.name}/sub/parent") assert is_ignored(f"../{tmp_path.name}/sub/parent/main.py") assert not is_ignored("child") assert not is_ignored("child/main.py") assert is_ignored("sub/child") assert is_ignored("sub/child/main.py") assert not is_ignored("src/child") assert not is_ignored("src/child/main.py") assert is_ignored("sub/src/child") assert is_ignored("sub/src/child/main.py") assert not is_ignored("sub/../child") assert not is_ignored("sub/../child/main.py") assert is_ignored(f"../{tmp_path.name}/sub/child") assert is_ignored(f"../{tmp_path.name}/sub/child/main.py")
def test_move_folder_src_file_dest(monkeypatch: pytest.MonkeyPatch, moving_dandiset: SampleDandiset, work_on: str) -> None: starting_assets = list(moving_dandiset.dandiset.get_assets()) monkeypatch.chdir(moving_dandiset.dspath) monkeypatch.setenv("DANDI_API_KEY", moving_dandiset.api.api_key) with pytest.raises(ValueError) as excinfo: move( "subdir1", dest="subdir2/banana.txt", work_on=work_on, dandi_instance=moving_dandiset.api.instance_id, ) assert str(excinfo.value) == "Cannot move folder 'subdir1' to a file path" check_assets(moving_dandiset, starting_assets, work_on, {})
def test_nested_constraints_file(self, monkeypatch: pytest.MonkeyPatch, tmpdir: Path, session: PipSession) -> None: req_name = "hello" req_file = tmpdir / "parent" / "req_file.txt" req_file.parent.mkdir() req_file.write_text("-c reqs.txt") req_file.parent.joinpath("reqs.txt").write_text(req_name) monkeypatch.chdir(str(tmpdir)) reqs = list(parse_reqfile("./parent/req_file.txt", session=session)) assert len(reqs) == 1 assert reqs[0].name == req_name assert reqs[0].constraint
def test_simple_iterpath_sort_relpath_prefix(monkeypatch: pytest.MonkeyPatch) -> None: monkeypatch.chdir(DATA_DIR) assert list(iterpath("dir01", sort=True)) == [ Path("dir01", ".config"), Path("dir01", ".config", "cfg.ini"), Path("dir01", ".hidden"), Path("dir01", "foo.txt"), Path("dir01", "glarch"), Path("dir01", "glarch", "bar.txt"), Path("dir01", "gnusto"), Path("dir01", "gnusto", "cleesh.txt"), Path("dir01", "gnusto", "quux"), Path("dir01", "gnusto", "quux", "quism.txt"), Path("dir01", "xyzzy.txt"), ]
def test_move_both_dest_is_local_dir_sans_slash( monkeypatch: pytest.MonkeyPatch, moving_dandiset: SampleDandiset) -> None: (moving_dandiset.dspath / "newdir").mkdir() starting_assets = list(moving_dandiset.dandiset.get_assets()) monkeypatch.chdir(moving_dandiset.dspath) monkeypatch.setenv("DANDI_API_KEY", moving_dandiset.api.api_key) move( "file.txt", dest="newdir", work_on="both", devel_debug=True, dandi_instance=moving_dandiset.api.instance_id, ) check_assets(moving_dandiset, starting_assets, "both", {"file.txt": "newdir/file.txt"})
def tmp_doc_build_folder(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> Path: """Generate a temporary source folder and chdir in it. Return the build folder""" source = tmp_path / "source" build = tmp_path / "build" static = source / "_static" for folder in (source, build, static): folder.mkdir() conf_py = BASE_DIR / "source" / "conf.py" examples_rst = BASE_DIR / "source" / "examples.rst" source.joinpath("conf.py").write_bytes(conf_py.read_bytes()) source.joinpath("index.rst").write_bytes(examples_rst.read_bytes()) monkeypatch.chdir(source) return build