def new_monkeypatch(): try: return pytest.MonkeyPatch() except AttributeError: # Fallback for Python 3.5 from _pytest.monkeypatch import MonkeyPatch return MonkeyPatch()
def hdfs_fs(): if should_skip(HDFS_URI): yield [None] * NUM_ATR_FS return from pyarrow.fs import HadoopFileSystem monkeypatch = pytest.MonkeyPatch() hdfs_host = os.environ.get("HDFS_HOST") hdfs_port = int(os.environ.get("HDFS_PORT", 9000)) if hdfs_host is None: hdfs_host = socket.gethostbyname(socket.gethostname()) hdfs = HadoopFileSystem(hdfs_host, hdfs_port) def path_to(*args): return ( f"{HDFS_URI}://{hdfs_host}:{hdfs_port}/{posixpath.join(ROOT_PREFIX, *args)}" ) def read(path): f = hdfs.open_input_stream(path) return f.readall() def write(path, body): with hdfs.open_output_stream(path) as f: f.write(body) def mkdirs(path): hdfs.create_dir(path, recursive=True) yield path_to, read, write, mkdirs, posixpath.join, None monkeypatch.undo()
def setUp(self): self.monkeypatch = pytest.MonkeyPatch() self.worker = toil.batchSystems.slurm.SlurmBatchSystem.Worker( newJobsQueue=Queue(), updatedJobsQueue=Queue(), killQueue=Queue(), killedJobsQueue=Queue(), boss=FakeBatchSystem())
def disable_http(): if os.environ.get("DANDI_TESTS_NONETWORK"): with pytest.MonkeyPatch().context() as m: m.setenv("http_proxy", "http://127.0.0.1:9/") m.setenv("https_proxy", "http://127.0.0.1:9/") yield else: yield
def test__load_env_float(val): """Should load a float variable from the environment.""" with pytest.MonkeyPatch().context() as mp: mp.setenv("ELECTIVE_TEST_NUM", str(val)) conf = elective.load_env(prefix="ELECTIVE_TEST_") if math.isnan(val): assert math.isnan(elective.process_float("num", conf)) is True else: assert elective.process_float("num", conf) == val
def az_fs(): if should_skip(AZ_URI): yield [None] * NUM_ATR_FS return from azure.storage.blob import ContainerClient monkeypatch = pytest.MonkeyPatch() container_name = os.environ.get("AZ_TEST_CONTAINER") account = None client = None # This means we are running against emulator. if container_name is None: monkeypatch.setenv("TF_AZURE_USE_DEV_STORAGE", "1") container_name = f"tf-io-bucket-az-{int(time.time())}" account = "devstoreaccount1" conn_str = ( "DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;" "AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq" "/K1SZFPTOtr/KBHBeksoGMGw==;" "BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;" ) client = ContainerClient.from_connection_string(conn_str, container_name) client.create_container() else: # TODO(vnvo2409): Implement for testing against production scenario pass client.upload_blob(ROOT_PREFIX, b"") def parse(path): res = urlparse(path, scheme=AZ_URI, allow_fragments=False) return res.path.split("/", 2)[2] def path_to(*args): return f"{AZ_URI}://{account}/{container_name}/{posixpath.join(ROOT_PREFIX, *args)}" def read(path): key_name = parse(path) return client.download_blob(key_name).content_as_bytes() def write(path, body): key_name = parse(path) client.upload_blob(key_name, body) def mkdirs(path): if path[-1] == "/": write(path, b"") yield path_to, read, write, mkdirs, posixpath.join, ( client, container_name, account, ) monkeypatch.undo()
def test_guess_credentials(): (client_id, client_secret, auth_endpoint, api_endpoint) = mock_read_from_environ() with pytest.MonkeyPatch().context() as mp: mp.setattr('las.credentials.read_from_environ', mock_read_from_environ) credentials = guess_credentials() assert client_id == credentials.client_id assert client_secret == credentials.client_secret assert auth_endpoint == credentials.auth_endpoint assert api_endpoint == credentials.api_endpoint
def s3_fs(): if should_skip(S3_URI): yield [None] * NUM_ATR_FS return import boto3 monkeypatch = pytest.MonkeyPatch() bucket_name = os.environ.get("S3_TEST_BUCKET") client = None # This means we are running against emulator. if bucket_name is None: endpoint_url = "http://localhost:4566" monkeypatch.setenv("AWS_REGION", "us-east-1") monkeypatch.setenv("AWS_ACCESS_KEY_ID", "TEST") monkeypatch.setenv("AWS_SECRET_ACCESS_KEY", "TEST") monkeypatch.setenv("S3_ENDPOINT", endpoint_url) bucket_name = f"tf-io-bucket-s3-{int(time.time())}" client = boto3.client("s3", endpoint_url=endpoint_url) client.create_bucket(Bucket=bucket_name) else: # TODO(vnvo2409): Implement for testing against production scenario pass client.put_object(Bucket=bucket_name, Key=ROOT_PREFIX, Body="") def parse(path): res = urlparse(path, scheme=S3_URI, allow_fragments=False) return res.netloc, res.path[1:] def path_to(*args): return f"{S3_URI}://{bucket_name}/{posixpath.join(ROOT_PREFIX, *args)}" def read(path): bucket_name, key_name = parse(path) response = client.get_object(Bucket=bucket_name, Key=key_name) return response["Body"].read() def write(path, body): bucket_name, key_name = parse(path) client.put_object(Bucket=bucket_name, Key=key_name, Body=body) def mkdirs(path): if path[-1] != "/": path += "/" write(path, b"") yield path_to, read, write, mkdirs, posixpath.join, (client, bucket_name) monkeypatch.undo()
def gcs_fs(): if should_skip(GCS_URI): yield [None] * NUM_ATR_FS return import tensorflow_io_gcs_filesystem from google.cloud import storage monkeypatch = pytest.MonkeyPatch() bucket_name = os.environ.get("GCS_TEST_BUCKET") bucket = None # This means we are running against emulator. if bucket_name is None: monkeypatch.setenv("STORAGE_EMULATOR_HOST", "http://localhost:9099") monkeypatch.setenv("CLOUD_STORAGE_EMULATOR_ENDPOINT", "http://localhost:9099") bucket_name = f"tf-io-bucket-gs-{int(time.time())}" client = storage.Client.create_anonymous_client() client.project = "test_project" bucket = client.create_bucket(bucket_name) else: # TODO(vnvo2409): Implement for testing against production scenario pass def parse(path): res = urlparse(path, scheme=GCS_URI, allow_fragments=False) return res.path[1:] def path_to(*args): return f"{GCS_URI}://{bucket_name}/{posixpath.join(ROOT_PREFIX, *args)}" def read(path): key_name = parse(path) blob = bucket.get_blob(key_name) return blob.download_as_bytes() def write(path, body): key_name = parse(path) blob = bucket.blob(key_name) blob.upload_from_string(body) def mkdirs(path): if path[-1] != "/": path += "/" write(path, b"") yield path_to, read, write, mkdirs, posixpath.join, None monkeypatch.undo()
def upload(self, paths: Optional[List[Union[str, Path]]] = None, **kwargs: Any) -> None: with pytest.MonkeyPatch().context() as m: m.setenv("DANDI_API_KEY", self.api.api_key) upload( paths=paths or [self.dspath], dandi_instance=self.api.instance_id, devel_debug=True, **{ **self.upload_kwargs, **kwargs }, )
def setupEnv(): """MonkeyPatchを使った環境変数の書き換え例 例えばテスト対象のモジュールがグローバルスコープで環境変数を取得していたとする region = os.getenv('AWS_REGION') テスト対象がimportでそのモジュールを読み込むと、テスト関数を呼ぶ前に os.getenvが呼ばれてしまい、setupで環境変数を書き換えても遅い。 この場合、環境変数ではなく、読み込み先変数(例の場合region)を書き換える事で 間接的に環境変数を書き換える事が実現できる。 """ mp = pytest.MonkeyPatch() mp.setattr(target, 'region', 'ap-northeast-1')
import pytest pytest.MonkeyPatch().syspath_prepend("mock")
def new_monkeypatch(): return pytest.MonkeyPatch()
def test__load_env_string(str): """Should load a string variable from the environment.""" with pytest.MonkeyPatch().context() as mp: mp.setenv("ELECTIVE_TEST_FOO", str) conf = elective.load_env(prefix="ELECTIVE_TEST_") assert elective.process_string("foo", conf) == str
def test__load_env_integer(val): """Should load a integer variable from the environment.""" with pytest.MonkeyPatch().context() as mp: mp.setenv("ELECTIVE_TEST_NUM", str(val)) conf = elective.load_env(prefix="ELECTIVE_TEST_") assert elective.process_integer("num", conf) == val
def setup_package(): import tempfile from pathlib import Path from datalad import consts from datalad.support.annexrepo import AnnexRepo from datalad.support.cookies import cookies_db from datalad.support.external_versions import external_versions from datalad.tests import _TEMP_PATHS_GENERATED from datalad.tests.utils_pytest import ( DEFAULT_BRANCH, DEFAULT_REMOTE, OBSCURE_FILENAME, HTTPPath, rmtemp, ) from datalad.ui import ui from datalad.utils import ( make_tempfile, on_osx, ) if on_osx: # enforce honoring TMPDIR (see gh-5307) tempfile.tempdir = os.environ.get('TMPDIR', tempfile.gettempdir()) with pytest.MonkeyPatch().context() as m: m.setattr(consts, "DATASETS_TOPURL", 'https://datasets-tests.datalad.org/') m.setenv('DATALAD_DATASETS_TOPURL', consts.DATASETS_TOPURL) m.setenv( "GIT_CONFIG_PARAMETERS", "'init.defaultBranch={}' 'clone.defaultRemoteName={}'".format( DEFAULT_BRANCH, DEFAULT_REMOTE)) def prep_tmphome(): # re core.askPass: # Don't let git ask for credentials in CI runs. Note, that this variable # technically is not a flag, but an executable (which is why name and value # are a bit confusing here - we just want a no-op basically). The environment # variable GIT_ASKPASS overwrites this, but neither env var nor this config # are supported by git-credential on all systems and git versions (most recent # ones should work either way, though). Hence use both across CI builds. gitconfig = """\ [user] name = DataLad Tester email = [email protected] [core] askPass = [datalad "log"] exc = 1 [annex "security"] # from annex 6.20180626 file:/// and http://localhost access isn't # allowed by default allowed-url-schemes = http https file allowed-http-addresses = all """ # TODO: split into a function + context manager with make_tempfile(mkdir=True) as new_home: pass # register for clean-up on exit _TEMP_PATHS_GENERATED.append(new_home) # populate default config new_home = Path(new_home) new_home.mkdir(parents=True, exist_ok=True) cfg_file = new_home / '.gitconfig' cfg_file.write_text(gitconfig) return new_home, cfg_file if external_versions['cmd:git'] < "2.32": # To overcome pybuild overriding HOME but us possibly wanting our # own HOME where we pre-setup git for testing (name, email) if 'GIT_HOME' in os.environ: m.setenv('HOME', os.environ['GIT_HOME']) else: # we setup our own new HOME, the BEST and HUGE one new_home, _ = prep_tmphome() for v, val in get_home_envvars(new_home).items(): m.setenv(v, val) else: _, cfg_file = prep_tmphome() m.setenv('GIT_CONFIG_GLOBAL', str(cfg_file)) # Re-load ConfigManager, since otherwise it won't consider global config # from new $HOME (see gh-4153 cfg.reload(force=True) # datalad.locations.sockets has likely changed. Discard any cached values. ssh_manager._socket_dir = None # To overcome pybuild by default defining http{,s}_proxy we would need # to define them to e.g. empty value so it wouldn't bother touching them. # But then haskell libraries do not digest empty value nicely, so we just # pop them out from the environment for ev in ('http_proxy', 'https_proxy'): if ev in os.environ and not (os.environ[ev]): lgr.debug("Removing %s from the environment since it is empty", ev) os.environ.pop(ev) # Prevent interactive credential entry (note "true" is the command to run) # See also the core.askPass setting above m.setenv('GIT_ASKPASS', 'true') # Set to non-interactive UI _test_states['ui_backend'] = ui.backend # obtain() since that one consults for the default value ui.set_backend(cfg.obtain('datalad.tests.ui.backend')) # in order to avoid having to fiddle with rather uncommon # file:// URLs in the tests, have a standard HTTP server # that serves an 'httpserve' directory in the test HOME # the URL will be available from datalad.test_http_server.url global test_http_server # Start the server only if not running already # Relevant: we have test_misc.py:test_test which runs datalad.test but # not doing teardown, so the original server might never get stopped if test_http_server is None: serve_path = tempfile.mkdtemp( dir=cfg.get("datalad.tests.temp.dir"), prefix='httpserve', ) test_http_server = HTTPPath(serve_path) test_http_server.start() _TEMP_PATHS_GENERATED.append(serve_path) if cfg.obtain('datalad.tests.setup.testrepos'): lgr.debug("Pre-populating testrepos") from datalad.tests.utils_pytest import with_testrepos with_testrepos()(lambda repo: 1)() yield lgr.debug("Printing versioning information collected so far") # Query for version of datalad, so it is included in ev.dumps below - useful while # testing extensions where version of datalad might differ in the environment. external_versions['datalad'] print(external_versions.dumps(query=True)) try: print("Obscure filename: str=%s repr=%r" % (OBSCURE_FILENAME.encode('utf-8'), OBSCURE_FILENAME)) except UnicodeEncodeError as exc: ce = CapturedException(exc) print("Obscure filename failed to print: %s" % ce) def print_dict(d): return " ".join("%s=%r" % v for v in d.items()) print("Encodings: %s" % print_dict(get_encoding_info())) print("Environment: %s" % print_dict(get_envvars_info())) if os.environ.get('DATALAD_TESTS_NOTEARDOWN'): return ui.set_backend(_test_states['ui_backend']) if test_http_server: test_http_server.stop() test_http_server = None else: lgr.debug( "For some reason global http_server was not set/running, thus not stopping" ) if len(_TEMP_PATHS_GENERATED): msg = "Removing %d dirs/files: %s" % ( len(_TEMP_PATHS_GENERATED), ', '.join(_TEMP_PATHS_GENERATED)) else: msg = "Nothing to remove" lgr.debug("Teardown tests. " + msg) for path in _TEMP_PATHS_GENERATED: rmtemp(str(path), ignore_errors=True) # Re-establish correct global config after changing $HOME. # Might be superfluous, since after teardown datalad.cfg shouldn't be # needed. However, maintaining a consistent state seems a good thing # either way. cfg.reload(force=True) ssh_manager._socket_dir = None cookies_db.close()
def monkeypatch(self): return pytest.MonkeyPatch()
def test_get_language_directory(): pytest.MonkeyPatch().setattr(Path, 'is_dir', lambda x: True) assert languages._get_language_directory()