def virtualenv_template( request: pytest.FixtureRequest, tmpdir_factory: pytest.TempPathFactory, pip_src: Path, setuptools_install: Path, coverage_install: Path, ) -> Iterator[VirtualEnvironment]: venv_type: VirtualEnvironmentType if request.config.getoption("--use-venv"): venv_type = "venv" else: venv_type = "virtualenv" # Create the virtual environment tmpdir = tmpdir_factory.mktemp("virtualenv") venv = VirtualEnvironment(tmpdir.joinpath("venv_orig"), venv_type=venv_type) # Install setuptools and pip. install_pth_link(venv, "setuptools", setuptools_install) pip_editable = tmpdir_factory.mktemp("pip") / "pip" shutil.copytree(pip_src, pip_editable, symlinks=True) # noxfile.py is Python 3 only assert compileall.compile_dir( str(pip_editable), quiet=1, rx=re.compile("noxfile.py$"), ) subprocess.check_call( [os.fspath(venv.bin / "python"), "setup.py", "-q", "develop"], cwd=pip_editable ) # Install coverage and pth file for executing it in any spawned processes # in this virtual environment. install_pth_link(venv, "coverage", coverage_install) # zz prefix ensures the file is after easy-install.pth. with open(venv.site / "zz-coverage-helper.pth", "a") as f: f.write("import coverage; coverage.process_startup()") # Drop (non-relocatable) launchers. for exe in os.listdir(venv.bin): if not ( exe.startswith("python") or exe.startswith("libpy") # Don't remove libpypy-c.so... ): (venv.bin / exe).unlink() # Enable user site packages. venv.user_site_packages = True # Rename original virtualenv directory to make sure # it's not reused by mistake from one of the copies. venv_template = tmpdir / "venv_template" venv.move(venv_template) yield venv
def test_redis_exec_configuration(request: FixtureRequest, tmp_path_factory: TempPathFactory, parameter, config_option, value): """ Check if RedisExecutor properly processes configuration options. Improperly set options won't be set in redis, and we won't be able to read it out of redis. """ config = get_config(request) tmpdir = tmp_path_factory.mktemp( f"pytest-redis-test-test_redis_exec_configuration") redis_exec = RedisExecutor( executable=config["exec"], databases=4, redis_timeout=config["timeout"], loglevel=config["loglevel"], port=get_port(None), host=config["host"], timeout=30, datadir=tmpdir, **parameter, ) with redis_exec: redis_client = redis.StrictRedis(redis_exec.host, redis_exec.port, 0) assert redis_client.config_get(config_option) == {config_option: value}
def _common_wheel_editable_install( tmpdir_factory: pytest.TempPathFactory, common_wheels: Path, package: str ) -> Path: wheel_candidates = list(common_wheels.glob(f"{package}-*.whl")) assert len(wheel_candidates) == 1, wheel_candidates install_dir = tmpdir_factory.mktemp(package) / "install" lib_install_dir = install_dir / "lib" bin_install_dir = install_dir / "bin" with WheelFile.open(wheel_candidates[0]) as source: install( source, SchemeDictionaryDestination( { "purelib": os.fspath(lib_install_dir), "platlib": os.fspath(lib_install_dir), "scripts": os.fspath(bin_install_dir), }, interpreter=sys.executable, script_kind="posix", ), additional_metadata={}, ) # The scripts are not necessary for our use cases, and they would be installed with # the wrong interpreter, so remove them. # TODO consider a refactoring by adding a install_from_wheel(path) method # to the virtualenv fixture. if bin_install_dir.exists(): shutil.rmtree(bin_install_dir) return lib_install_dir
def organized_nwb_dir2( simple1_nwb_metadata: Dict[str, Any], simple2_nwb: str, tmp_path_factory: pytest.TempPathFactory, ) -> Path: tmp_path = tmp_path_factory.mktemp("organized_nwb_dir2") # need to copy first and then use -f move since we will create one more # file to be "organized" shutil.copy(simple2_nwb, tmp_path) make_nwb_file( str(tmp_path / "simple3.nwb"), subject=pynwb.file.Subject( subject_id="lizard001", date_of_birth=datetime(2016, 12, 1, tzinfo=tzutc()), sex="F", species="Gekko gecko", ), **simple1_nwb_metadata, ) (tmp_path / dandiset_metadata_file).write_text("{}\n") r = CliRunner().invoke(organize, ["-f", "move", "--dandiset-path", str(tmp_path)]) assert r.exit_code == 0, r.stdout assert sum(p.is_dir() for p in tmp_path.iterdir()) == 2 return tmp_path
def pip_src(tmpdir_factory: pytest.TempPathFactory) -> Path: def not_code_files_and_folders(path: str, names: List[str]) -> Iterable[str]: # In the root directory... if os.path.samefile(path, SRC_DIR): # ignore all folders except "src" folders = { name for name in names if os.path.isdir(os.path.join(path, name)) } to_ignore = folders - {"src"} # and ignore ".git" if present (which may be a file if in a linked # worktree). if ".git" in names: to_ignore.add(".git") return to_ignore # Ignore all compiled files and egg-info. ignored = set() for pattern in ("__pycache__", "*.pyc", "pip.egg-info"): ignored.update(fnmatch.filter(names, pattern)) return ignored pip_src = tmpdir_factory.mktemp("pip_src").joinpath("pip_src") # Copy over our source tree so that each use is self contained shutil.copytree( SRC_DIR, pip_src.resolve(), ignore=not_code_files_and_folders, ) return pip_src
def clean_project(tmpdir_factory: pytest.TempPathFactory, data: TestData) -> Path: tmpdir = tmpdir_factory.mktemp("clean_project") new_project_dir = tmpdir.joinpath("FSPkg") path = data.packages.joinpath("FSPkg") shutil.copytree(path, new_project_dir) return new_project_dir
def test_client_cert(self, tmp_path_factory: pytest.TempPathFactory): tmpdir = tmp_path_factory.mktemp("certs") with _build_server(tmpdir, "localhost") as cfg: server, ca, ca_cert_path = cfg client_cert = ca.issue_cert("localhost") client_cert_path = str(tmpdir / "client.pem") client_key_path = str(tmpdir / "client.key") client_cert.cert_chain_pems[0].write_to_path(client_cert_path) client_cert.private_key_pem.write_to_path(client_key_path) hosts = [ { "host": "localhost", "port": server.port }, ] client_options = { "use_ssl": True, "verify_certs": True, "ca_certs": ca_cert_path, "client_cert": client_cert_path, "client_key": client_key_path, } f = client.EsClientFactory(hosts, client_options) es = f.create() assert es.info() == {"version": {"number": "8.0.0"}}
def logging_conf_tmp_path_no_dict(tmp_path_factory: pytest.TempPathFactory) -> Path: """Create temporary logging config file without logging config dict.""" tmp_dir = tmp_path_factory.mktemp("tmp_log_no_dict") tmp_file = tmp_dir / "no_dict.py" with open(Path(tmp_file), "x") as f: f.write("print('Hello, World!')\n") return tmp_dir
def new_dandiset( local_dandi_api: DandiAPI, request: FixtureRequest, tmp_path_factory: pytest.TempPathFactory, ) -> SampleDandiset: d = local_dandi_api.client.create_dandiset( f"Sample Dandiset for {request.node.name}", # Minimal metadata needed to create a publishable Dandiset: { "description": "A test Dandiset", "license": ["spdx:CC0-1.0"], # The contributor needs to be given explicitly here or else it'll # be set based on the user account. For the Docker Compose setup, # that would mean basing it on the admin user, whose name doesn't # validate under dandischema. "contributor": [{ "schemaKey": "Person", "name": "Wodder, John", "roleName": ["dcite:Author", "dcite:ContactPerson"], }], }, ) dspath = tmp_path_factory.mktemp("dandiset") (dspath / dandiset_metadata_file).write_text(f"identifier: '{d.identifier}'\n") return SampleDandiset( api=local_dandi_api, dspath=dspath, dandiset=d, dandiset_id=d.identifier, )
def text_dandiset( dandi_client: DandiAPIClient, tmp_path_factory: pytest.TempPathFactory) -> Iterator[Dict[str, Any]]: d = dandi_client.create_dandiset( "Dandiset for testing backups2datalad", { "schemaKey": "Dandiset", "name": "Dandiset for testing backups2datalad", "description": "A test text Dandiset", "contributor": [{ "schemaKey": "Person", "name": "Wodder, John", "roleName": ["dcite:Author", "dcite:ContactPerson"], }], "license": ["spdx:CC0-1.0"], "manifestLocation": ["https://github.com/dandi/dandi-cli"], }, ) dandiset_id = d.identifier dspath = tmp_path_factory.mktemp("text_dandiset") (dspath / dandiset_metadata_file).write_text(f"identifier: '{dandiset_id}'\n") (dspath / "file.txt").write_text("This is test text.\n") (dspath / "v0.txt").write_text("Version 0\n") (dspath / "subdir1").mkdir() (dspath / "subdir1" / "apple.txt").write_text("Apple\n") (dspath / "subdir2").mkdir() (dspath / "subdir2" / "banana.txt").write_text("Banana\n") (dspath / "subdir2" / "coconut.txt").write_text("Coconut\n") def upload_dandiset(paths: Optional[List[str]] = None, **kwargs: Any) -> None: upload( paths=paths or [dspath], dandi_instance="dandi-staging", devel_debug=True, allow_any_path=True, validation="skip", **kwargs, ) try: upload_dandiset() yield { "client": dandi_client, "dspath": dspath, "dandiset": d, "dandiset_id": dandiset_id, "reupload": upload_dandiset, } finally: for v in d.get_versions(): if v.identifier != "draft": dandi_client.delete(f"{d.api_path}versions/{v.identifier}/") d.delete()
def _make_dataset( tmp_path_factory: pytest.TempPathFactory, builder_cls: Type[dataset_builder.DatasetBuilder], ) -> dataset_builder.DatasetBuilder: tmp_path = tmp_path_factory.mktemp(f'global_{builder_cls.__name__}') builder = builder_cls(data_dir=tmp_path) builder.download_and_prepare() return builder
def rst_stubs(tmp_path_factory: pytest.TempPathFactory, micropython_repo): "Generate stubs from RST files - once for this module" v_tag = micropython_repo # setup our on folder for testing dst_folder = tmp_path_factory.mktemp("stubs") / v_tag rst_folder = Path(MICROPYTHON_FOLDER) / "docs/library" generate_from_rst(rst_folder, dst_folder, v_tag=v_tag, black=True) yield dst_folder
def logging_conf_tmp_path_incorrect_type( tmp_path_factory: pytest.TempPathFactory, ) -> Path: """Create temporary logging config file with incorrect LOGGING_CONFIG type.""" tmp_dir = tmp_path_factory.mktemp("tmp_log_incorrect_type") tmp_file = tmp_dir / "incorrect_type.py" with open(Path(tmp_file), "x") as f: f.write("LOGGING_CONFIG: list = ['Hello', 'World']\n") return tmp_dir
def _common_wheel_editable_install(tmpdir_factory: pytest.TempPathFactory, common_wheels: Path, package: str) -> Path: wheel_candidates = list(common_wheels.glob(f"{package}-*.whl")) assert len(wheel_candidates) == 1, wheel_candidates install_dir = tmpdir_factory.mktemp(package) / "install" Wheel(wheel_candidates[0]).install_as_egg(install_dir) (install_dir / "EGG-INFO").rename(install_dir / f"{package}.egg-info") assert compileall.compile_dir(str(install_dir), quiet=1) return install_dir
def test_parsing_model(tmp_path_factory: TempPathFactory, model: Model) -> None: tmp_path = tmp_path_factory.mktemp(test_parsing_model.__name__) model.write_specification_files(tmp_path) parser = Parser() parser.parse(tmp_path / "test.rflx") parsed_model = parser.create_model() assert parsed_model.types == model.types assert parsed_model == model
def logging_conf_tmp_path_incorrect_extension( tmp_path_factory: pytest.TempPathFactory, ) -> Path: """Create custom temporary logging config file with incorrect extension.""" tmp_dir = tmp_path_factory.mktemp("tmp_log_incorrect_extension") tmp_file = tmp_dir / "tmp_logging_conf" with open(Path(tmp_file), "x") as f: f.write("This file doesn't have the correct extension.\n") return tmp_dir
def no_san_server( loopback_host: str, tmp_path_factory: pytest.TempPathFactory ) -> Generator[ServerConfig, None, None]: tmpdir = tmp_path_factory.mktemp("certs") ca = trustme.CA() server_cert = ca.issue_cert(common_name=loopback_host) with run_server_in_thread("https", loopback_host, tmpdir, ca, server_cert) as cfg: yield cfg
def env_file(tmp_path_factory: pytest.TempPathFactory): tmp_path = tmp_path_factory.mktemp("frappe-docker") file_path = tmp_path / ".env" shutil.copy("example.env", file_path) for var in ("FRAPPE_VERSION", "ERPNEXT_VERSION"): _add_version_var(name=var, env_path=file_path) yield str(file_path) os.remove(file_path)
def no_san_server_with_different_commmon_name( tmp_path_factory: pytest.TempPathFactory, ) -> Generator[ServerConfig, None, None]: tmpdir = tmp_path_factory.mktemp("certs") ca = trustme.CA() server_cert = ca.issue_cert(common_name="example.com") with run_server_in_thread("https", "localhost", tmpdir, ca, server_cert) as cfg: yield cfg
def ip_san_server( tmp_path_factory: pytest.TempPathFactory, ) -> Generator[ServerConfig, None, None]: tmpdir = tmp_path_factory.mktemp("certs") ca = trustme.CA() # IP address in Subject Alternative Name server_cert = ca.issue_cert("127.0.0.1") with run_server_in_thread("https", "127.0.0.1", tmpdir, ca, server_cert) as cfg: yield cfg
def script_with_launchers( tmpdir_factory: pytest.TempPathFactory, script_factory: ScriptFactory, common_wheels: Path, pip_src: Path, ) -> PipTestEnvironment: tmpdir = tmpdir_factory.mktemp("script_with_launchers") script = script_factory(tmpdir.joinpath("workspace")) # Re-install pip so we get the launchers. script.pip_install_local("-f", common_wheels, pip_src) return script
def reload_directory_structure(tmp_path_factory: pytest.TempPathFactory): """ This fixture creates a directory structure to enable reload parameter tests The fixture has the following structure: root ├── [app, app_first, app_second, app_third] │ ├── css │ │ └── main.css │ ├── js │ │ └── main.js │ ├── src │ │ └── main.py │ └── sub │ └── sub.py ├── ext │ └── ext.jpg └── main.py """ root = tmp_path_factory.mktemp("reload_directory") apps = ["app", "app_first", "app_second", "app_third"] root_file = root / "main.py" root_file.touch() dotted_file = root / ".dotted" dotted_file.touch() dotted_dir = root / ".dotted_dir" dotted_dir.mkdir() dotted_dir_file = dotted_dir / "file.txt" dotted_dir_file.touch() for app in apps: app_path = root / app app_path.mkdir() dir_files = [ ("src", ["main.py"]), ("js", ["main.js"]), ("css", ["main.css"]), ("sub", ["sub.py"]), ] for directory, files in dir_files: directory_path = app_path / directory directory_path.mkdir() for file in files: file_path = directory_path / file file_path.touch() ext_dir = root / "ext" ext_dir.mkdir() ext_file = ext_dir / "ext.jpg" ext_file.touch() yield root
def organized_nwb_dir(simple2_nwb: str, tmp_path_factory: pytest.TempPathFactory) -> Path: tmp_path = tmp_path_factory.mktemp("organized_nwb_dir") (tmp_path / dandiset_metadata_file).write_text("{}\n") r = CliRunner().invoke( organize, ["-f", "copy", "--dandiset-path", str(tmp_path), str(simple2_nwb)]) assert r.exit_code == 0, r.stdout return tmp_path
def tdb_wrapper( request: pytest.FixtureRequest, tmp_path_factory: pytest.TempPathFactory, np_array: np.ndarray, ) -> tiledb.Array: uri = str(tmp_path_factory.mktemp("array")) with tiledb.DenseArray.from_numpy(uri, np_array) as tdb: if request.param: yield SingleAttrArrayWrapper(tdb, "") else: yield MultiAttrArrayWrapper(tdb)
def no_san_proxy_with_server( tmp_path_factory: pytest.TempPathFactory, ) -> Generator[Tuple[ServerConfig, ServerConfig], None, None]: tmpdir = tmp_path_factory.mktemp("certs") ca = trustme.CA() # only common name, no subject alternative names proxy_cert = ca.issue_cert(common_name="localhost") server_cert = ca.issue_cert("localhost") with run_server_and_proxy_in_thread("https", "localhost", tmpdir, ca, proxy_cert, server_cert) as cfg: yield cfg
def ipv6_no_san_server( tmp_path_factory: pytest.TempPathFactory, ) -> Generator[ServerConfig, None, None]: if not HAS_IPV6: pytest.skip("Only runs on IPv6 systems") tmpdir = tmp_path_factory.mktemp("certs") ca = trustme.CA() # IP address in Common Name server_cert = ca.issue_cert(common_name="::1") with run_server_in_thread("https", "::1", tmpdir, ca, server_cert) as cfg: yield cfg
def ipv6_san_proxy_with_server( tmp_path_factory: pytest.TempPathFactory, ) -> Generator[Tuple[ServerConfig, ServerConfig], None, None]: tmpdir = tmp_path_factory.mktemp("certs") ca = trustme.CA() # IP addresses in Subject Alternative Name proxy_cert = ca.issue_cert("::1") server_cert = ca.issue_cert("localhost") with run_server_and_proxy_in_thread("https", "::1", tmpdir, ca, proxy_cert, server_cert) as cfg: yield cfg
def test_ip_address(self, tmp_path_factory: pytest.TempPathFactory): tmpdir = tmp_path_factory.mktemp("certs") with _build_server(tmpdir, "127.0.0.1") as cfg: server, _ca, ca_cert_path = cfg hosts = [{"host": "127.0.0.1", "port": server.port}] client_options = { "use_ssl": True, "verify_certs": True, "ca_certs": ca_cert_path, } f = client.EsClientFactory(hosts, client_options) es = f.create() assert es.info() == {"version": {"number": "8.0.0"}}
def pip_test_package_script( tmpdir_factory: pytest.TempPathFactory, script_factory: ScriptFactory, shared_data: TestData, ) -> PipTestEnvironment: tmpdir = tmpdir_factory.mktemp("pip_test_package") script = script_factory(tmpdir.joinpath("workspace")) script.pip("install", "-f", shared_data.find_links, "--no-index", "simple==1.0") script.pip( "install", "-e", "git+https://github.com/pypa/pip-test-package.git#egg=pip-test-package", ) return script
def tmp_home(monkeypatch: pytest.MonkeyPatch, tmp_path_factory: pytest.TempPathFactory) -> Path: home = tmp_path_factory.mktemp("tmp_home") monkeypatch.setenv("HOME", str(home)) monkeypatch.delenv("XDG_CACHE_HOME", raising=False) monkeypatch.delenv("XDG_CONFIG_DIRS", raising=False) monkeypatch.delenv("XDG_CONFIG_HOME", raising=False) monkeypatch.delenv("XDG_DATA_DIRS", raising=False) monkeypatch.delenv("XDG_DATA_HOME", raising=False) monkeypatch.delenv("XDG_RUNTIME_DIR", raising=False) monkeypatch.delenv("XDG_STATE_HOME", raising=False) monkeypatch.setenv("USERPROFILE", str(home)) monkeypatch.setenv("LOCALAPPDATA", str(home)) return home