def test_print(argv, capsys: CaptureFixture): main(argv) out, err = capsys.readouterr() assert 'Hello world!' in out assert not err
def test_train_finetune(run_in_simple_project: Callable[..., RunResult], capsys: CaptureFixture): run_in_simple_project("train", "--finetune") output = capsys.readouterr().out assert "No model for finetuning found" in output
def test_builtin_settings(capsys: CaptureFixture): main(['settings']) captured = capsys.readouterr() assert captured.out == f'{scanpy.settings}\n'
def test_error_wrong_command(capsys: CaptureFixture): with pytest.raises(SystemExit, match='^2$'): main(['idonotexist--']) captured = capsys.readouterr() assert 'No command “idonotexist--”. Choose from' in captured.err
def test_incremental_maintains_data_fidelity_when_new_data_new_and_missing_columns( capsys: CaptureFixture, archive_dir: LocalPath, import_dir: LocalPath, cache_file: LocalPath, hash_file: Optional[LocalPath], verbose: bool, ): """Incremental build maintains data fidelity when new data * has columns not present in the existing data cache. * is missing columns found in the existing data cache. """ pre_datafiles: List[str] = [ os.path.join(get_data_path(), "iris_plus_partial-1-of-2-no-species.csv") ] datafiles: List[str] = [ os.path.join(get_data_path(), "iris_plus_partial-2-of-2-no-petalcolor.csv") ] resolved_hashfile = (cache_file.dirpath(syphon.core.check.DEFAULT_FILE) if hash_file is None else hash_file) assert syphon.archive(archive_dir, pre_datafiles) assert not os.path.exists(os.path.join(get_data_path(), "#lock")) # Pre-build assert syphon.build( cache_file, *get_data_files(archive_dir), hash_filepath=hash_file, incremental=False, overwrite=False, post_hash=True, verbose=False, ) # Get the hash of the cache file before our main build. pre_cache_hash: str = syphon.hash.HashEntry(cache_file).hash # Get the hash of the hash file for easy file change checking. pre_hash_hash: str = syphon.hash.HashEntry(resolved_hashfile).hash # Main build assert syphon.build( cache_file, *datafiles, hash_filepath=hash_file, incremental=True, overwrite=True, post_hash=True, verbose=verbose, ) assert_captured_outerr(capsys.readouterr(), verbose, False) post_cache_hash: str = syphon.hash.HashEntry(cache_file).hash post_hash_hash: str = syphon.hash.HashEntry(resolved_hashfile).hash expected_frame = DataFrame( read_csv( os.path.join( get_data_path(), "iris_plus_partial-new-data-new-and-missing-columns.csv", ), dtype=str, index_col="Index", )) expected_frame.sort_index(inplace=True) assert pre_cache_hash != post_cache_hash assert pre_hash_hash != post_hash_hash with syphon.hash.HashFile(resolved_hashfile) as hashfile: for entry in hashfile: if os.path.samefile(entry.filepath, str(cache_file)): assert post_cache_hash == entry.hash actual_frame = DataFrame( read_csv(cache_file, dtype=str, index_col="Index")) actual_frame.sort_index(inplace=True) assert_frame_equal(expected_frame, actual_frame, check_exact=True)
def test_update_command_with_invalid_study_name(capsys: CaptureFixture): with TEST_CASE.assertRaises(SystemExit) as error: call_command("update", "study-not-in-db") TEST_CASE.assertEqual(1, error.exception.code) TEST_CASE.assertIn("does not exist", capsys.readouterr().err)
def parse_logs_from_stdout(capsys: CaptureFixture) -> List[str]: return capsys.readouterr().out.strip().split("\n")
def test_list_site_keys(capsys: CaptureFixture, recaptcha_site_key: str) -> None: list_site_keys(project_id=GOOGLE_CLOUD_PROJECT) out, _ = capsys.readouterr() assert re.search(f"keys/{recaptcha_site_key}", out)
def test_file_not_exists(self, capsys: CaptureFixture) -> None: assert setup_expected_mismatches("a_file") == ({}, set()) _, err = capsys.readouterr() assert err.startswith("WARNING: Provided")
def test_workload_identity_federation_aws(capsys: CaptureFixture) -> None: import google.auth credentials, project_id = google.auth.default() workload_identity_federation.create_token_aws(project_id, "provider_id", "pool_id") out, _ = capsys.readouterr() assert re.search("URL encoded token:", out)
def test_schema_error(self, setup_mock: Mock, capsys: CaptureFixture) -> None: assert not analyze_stubs("mypy_conf_path", "base_stubs_path") _, err = capsys.readouterr() assert "Boom" in err
def test_json_error(self, setup_mock: Mock, capsys: CaptureFixture) -> None: assert not analyze_stubs("mypy_conf_path", "base_stubs_path") _, err = capsys.readouterr() assert "Boom: line 1 column 5 (char 4)" in err
def test_write_data_to_json_stdout(capsys: CaptureFixture): write_data_to_json_file(EXAMPLE_JSON_DATA, sys.stdout) out, _ = capsys.readouterr() assert out == json.dumps(EXAMPLE_JSON_DATA, indent=4) + "\n"
def assert_stdout_stderr(capsys: CaptureFixture, stdout: str, stderr: str) -> None: out, err = capsys.readouterr() assert out == stdout assert err == stderr
def test_increment_without_metadata_with_schema( self, capsys: CaptureFixture, archive_dir: LocalPath, archive_fixture: "TestArchive.ArchiveCacheAndHashPassthruChecker", schema_file: Optional[LocalPath], verbose: bool, ): # List of (expected frame filename, data filename) tuples targets: List[Tuple[str, str]] = [ ("iris-part-1-of-6-combined.csv", "iris-part-1-of-6-combined.csv"), ("iris-part-1-2.csv", "iris-part-2-of-6-combined.csv"), ("iris-part-1-2-3.csv", "iris-part-3-of-6-combined.csv"), ("iris-part-1-2-3-4.csv", "iris-part-4-of-6-combined.csv"), ("iris-part-1-2-3-4-5.csv", "iris-part-5-of-6-combined.csv"), ("iris_plus.csv", "iris-part-6-of-6-combined.csv"), ] expected_hashfile = ( LocalPath(archive_fixture.cache_file).dirpath(DEFAULT_HASH_FILE) if archive_fixture.hash_file is None else archive_fixture.hash_file) assert not os.path.exists(expected_hashfile) assert not os.path.exists(archive_fixture.cache_file) assert len(archive_dir.listdir()) == 0 expected_schemafile = (archive_dir.join(syphon.schema.DEFAULT_FILE) if schema_file is None else schema_file) assert not os.path.exists(expected_schemafile) syphon.init(SortedDict({ "0": "PetalColor", "1": "Species" }), expected_schemafile) assert os.path.exists(expected_schemafile) for expected_frame_filename, data_filename in targets: assert archive_fixture( archive_dir, [os.path.join(get_data_path(), data_filename)], schema_filepath=schema_file, cache_filepath=archive_fixture.cache_file, hash_filepath=archive_fixture.hash_file, verbose=verbose, ) assert_captured_outerr(capsys.readouterr(), verbose, False) expected_frame = DataFrame( read_csv( os.path.join(get_data_path(), expected_frame_filename), dtype=str, index_col="Index", )) expected_frame.sort_index(inplace=True) actual_frame = DataFrame( read_csv(str(archive_fixture.cache_file), dtype=str, index_col="Index")) actual_frame.sort_index(inplace=True) assert_captured_outerr(capsys.readouterr(), False, False) assert_frame_equal(expected_frame, actual_frame) assert os.path.exists(expected_hashfile) assert syphon.check( archive_fixture.cache_file, hash_filepath=expected_hashfile, verbose=verbose, )
def test_delete_deployment_in_namespace(mock_k8s_module: MagicMock, capsys: CaptureFixture): mock_k8s_module.namespace_exists.return_value = False mock_k8s_module.list_service_stage_deployments.return_value = {"foo": {}} delete_service_deployment_in_namespace("bodywork-dev", "bodywork-test-project--serve") captured_one = capsys.readouterr() assert "namespace=bodywork-dev could not be found" in captured_one.out mock_k8s_module.namespace_exists.return_value = True mock_k8s_module.list_service_stage_deployments.return_value = {"foo": {}} delete_service_deployment_in_namespace("bodywork-dev", "bodywork-test-project--serve") captured_two = capsys.readouterr() assert "deployment=bodywork-test-project--serve not found" in captured_two.out mock_k8s_module.namespace_exists.return_value = True mock_k8s_module.namespace_exists.return_value = True mock_k8s_module.list_service_stage_deployments.return_value = { "bodywork-test-project--serve": {} } mock_k8s_module.namespace_exists.return_value = True mock_k8s_module.list_service_stage_deployments.return_value = { "bodywork-test-project--serve": {} } mock_k8s_module.delete_deployment.side_effect = None mock_k8s_module.is_exposed_as_cluster_service.return_value = False delete_service_deployment_in_namespace("bodywork-dev", "bodywork-test-project--serve") captured_three = capsys.readouterr() assert "" in captured_three.out mock_k8s_module.namespace_exists.return_value = True mock_k8s_module.list_service_stage_deployments.return_value = { "bodywork-test-project--serve": {} } mock_k8s_module.delete_deployment.side_effect = None mock_k8s_module.is_exposed_as_cluster_service.return_value = True mock_k8s_module.stop_exposing_cluster_service.side_effect = None mock_k8s_module.cluster_service_url.return_value = ( "http://bodywork-test-project--serve.bodywork-dev.svc.cluster.local") delete_service_deployment_in_namespace("bodywork-dev", "bodywork-test-project--serve") captured_four = capsys.readouterr() assert ( "http://bodywork-test-project--serve.bodywork-dev.svc.cluster.local deleted" in captured_four.out) mock_k8s_module.namespace_exists.return_value = True mock_k8s_module.list_service_stage_deployments.return_value = { "bodywork-test-project--serve": {} } mock_k8s_module.delete_deployment.side_effect = None mock_k8s_module.is_exposed_as_cluster_service.return_value = False mock_k8s_module.stop_exposing_cluster_service.side_effect = None mock_k8s_module.has_ingress.return_value = False delete_service_deployment_in_namespace("bodywork-dev", "bodywork-test-project--serve") captured_five = capsys.readouterr() assert "" in captured_five.out mock_k8s_module.namespace_exists.return_value = True mock_k8s_module.list_service_stage_deployments.return_value = { "bodywork-test-project--serve": {} } mock_k8s_module.delete_deployment.side_effect = None mock_k8s_module.is_exposed_as_cluster_service.return_value = False mock_k8s_module.stop_exposing_cluster_service.side_effect = None mock_k8s_module.has_ingress.return_value = True mock_k8s_module.delete_deployment_ingress.side_effect = None mock_k8s_module.ingress_route.return_value = ( "/bodywork-dev/bodywork-test-project--serve") delete_service_deployment_in_namespace("bodywork-dev", "bodywork-test-project--serve") captured_six = capsys.readouterr() assert ("ingress route /bodywork-dev/bodywork-test-project--serve" in captured_six.out)
def test_no_datafiles(self, capsys: CaptureFixture, archive_dir: LocalPath, verbose: bool): assert not syphon.archive(archive_dir, [], verbose=verbose) assert_captured_outerr(capsys.readouterr(), verbose, False)
def test_get_site_key(capsys: CaptureFixture, recaptcha_site_key: str) -> None: get_site_key(project_id=GOOGLE_CLOUD_PROJECT, recaptcha_site_key=recaptcha_site_key) out, _ = capsys.readouterr() assert re.search(f"Successfully obtained the key !.+{recaptcha_site_key}", out)
def test_recording_fails_without_middleware(capsys: CaptureFixture) -> None: client3.get("/") out, err = capsys.readouterr() assert err == "" assert out.strip() == "TIMING ERROR: No timer present on request"
def test_handle_pairs_dca(self, capfd: CaptureFixture) -> None: self.kdca.handle_pairs_dca() captured = capfd.readouterr() assert "buy 0.00519042 ETHEUR @ limit 2882.44" in captured.out assert "buy 0.00051336 XBTEUR @ limit 38857.2" in captured.out
def test_silent_timing(capsys: CaptureFixture) -> None: client.get("/untimed") out, err = capsys.readouterr() assert err == "" assert out == ""
async def test_record_messages(monkeypatch: MonkeyPatch, capsys: CaptureFixture): input_output = [ { "in": "Give me a question!", "out": [ { "buttons": [{ "title": "button 1 title", "payload": "button 1 payload", "details": "button 1 details", }], "text": "This is a button 1", }, { "buttons": [{ "title": "button 2 title", "payload": "button 2 payload", "details": "button 2 details", }], "text": "This is a button 2", }, { "buttons": [{ "title": "button 3 title", "payload": "button 3 payload", "details": "button 3 details", }], "text": "This is a button 3", }, ], }, { "in": ENTER, "out": [{ "text": "You've pressed the button" }] }, { "in": "Dummy message", "out": [{ "text": "Dummy response" }] }, ] inp = mock_stdin([m["in"] for m in input_output]) server_url = "http://example.com" endpoint = f"{server_url}/webhooks/rest/webhook" with aioresponses() as mocked: for output in [m["out"] for m in input_output]: if output: mocked.post( url=endpoint, payload=output, ) num_of_messages = await record_messages( "123", server_url=server_url, max_message_limit=len(input_output), use_response_stream=False, ) assert num_of_messages == len(input_output) captured = capsys.readouterr() assert "button 1 payload" in captured.out assert "button 2 payload" in captured.out inp.close()
def test_missing(capsys: CaptureFixture) -> None: client.get("/will-404") out, err = capsys.readouterr() assert err == "" assert out.startswith("TIMING:") assert out.endswith("<Path: /will-404>\n")
def test_help_output(set_path: type(None), capsys: CaptureFixture): with pytest.raises(SystemExit, match='^0$'): main(['-h']) captured = capsys.readouterr() assert re.search(r'^positional arguments:\n\s+\{settings,testbin[^}]*\}$', captured.out, re.MULTILINE)
def before_each(self, capsys: CaptureFixture) -> None: self._capsys = capsys capsys.readouterr()
def test_check_topic_permissions(topic_path: str, capsys: CaptureFixture) -> None: iam.check_topic_permissions(PROJECT_ID, TOPIC_ID) out, _ = capsys.readouterr() assert topic_path in out assert "pubsub.topics.publish" in out
def test_default(capsys: CaptureFixture): main([]) out, err = capsys.readouterr() assert not out assert not err