def test_down_redis(self, service_available, xmlrpc_cache): context = pretend.stub() service = pretend.stub(fetch=pretend.raiser(CacheError), purge=pretend.raiser(CacheError)) if service_available: _find_service = pretend.call_recorder( lambda *args, **kwargs: service) else: _find_service = pretend.raiser(LookupError) request = pretend.stub(find_service=_find_service, rpc_method="rpc_method", rpc_args=(0, 1)) response = pretend.stub() @pretend.call_recorder def view(context, request): return response info = pretend.stub(options={}, exception_only=False) info.options["xmlrpc_cache"] = xmlrpc_cache derived_view = cached_return_view(view, info) # miss derived_view = cached_return_view(view, info) # hit assert derived_view(context, request) is response assert view.calls == [pretend.call(context, request)]
def test_redis_down(self): metric_reporter = pretend.stub( increment=pretend.call_recorder(lambda *args: None) ) down_redis = pretend.stub( hget=pretend.raiser(redis.exceptions.RedisError), pipeline=pretend.raiser(redis.exceptions.RedisError), scan_iter=pretend.raiser(redis.exceptions.RedisError), ) redis_lru = RedisLru(down_redis, metric_reporter=metric_reporter) expected = func_test(0, 1, kwarg0=2, kwarg1=3) assert expected == redis_lru.fetch( func_test, [0, 1], {"kwarg0": 2, "kwarg1": 3}, None, "test", None ) assert expected == redis_lru.fetch( func_test, [0, 1], {"kwarg0": 2, "kwarg1": 3}, None, "test", None ) with pytest.raises(CacheError): redis_lru.purge("test") assert metric_reporter.increment.calls == [ pretend.call("lru.cache.error"), # Failed get pretend.call("lru.cache.miss"), pretend.call("lru.cache.error"), # Failed add pretend.call("lru.cache.error"), # Failed get pretend.call("lru.cache.miss"), pretend.call("lru.cache.error"), # Failed add pretend.call("lru.cache.error"), # Failed purge ]
def test_down_redis(self, service_available, xmlrpc_cache): context = pretend.stub() service = pretend.stub( fetch=pretend.raiser(CacheError), purge=pretend.raiser(CacheError) ) if service_available: _find_service = pretend.call_recorder(lambda *args, **kwargs: service) else: _find_service = pretend.raiser(LookupError) request = pretend.stub( find_service=_find_service, rpc_method="rpc_method", rpc_args=(0, 1) ) response = pretend.stub() @pretend.call_recorder def view(context, request): return response info = pretend.stub(options={}, exception_only=False) info.options["xmlrpc_cache"] = xmlrpc_cache derived_view = cached_return_view(view, info) # miss derived_view = cached_return_view(view, info) # hit assert derived_view(context, request) is response assert view.calls == [pretend.call(context, request)]
def test_redis_down(self): metric_reporter = pretend.stub( increment=pretend.call_recorder(lambda *args: None)) down_redis = pretend.stub( hget=pretend.raiser(redis.exceptions.RedisError), pipeline=pretend.raiser(redis.exceptions.RedisError), scan_iter=pretend.raiser(redis.exceptions.RedisError), ) redis_lru = RedisLru(down_redis, metric_reporter=metric_reporter) expected = func_test(0, 1, kwarg0=2, kwarg1=3) assert expected == redis_lru.fetch(func_test, [0, 1], { "kwarg0": 2, "kwarg1": 3 }, None, "test", None) assert expected == redis_lru.fetch(func_test, [0, 1], { "kwarg0": 2, "kwarg1": 3 }, None, "test", None) with pytest.raises(CacheError): redis_lru.purge("test") assert metric_reporter.increment.calls == [ pretend.call("lru.cache.error"), # Failed get pretend.call("lru.cache.miss"), pretend.call("lru.cache.error"), # Failed add pretend.call("lru.cache.error"), # Failed get pretend.call("lru.cache.miss"), pretend.call("lru.cache.error"), # Failed add pretend.call("lru.cache.error"), # Failed purge ]
def test_processor_raising_DropEvent_silently_aborts_chain(self, capsys): """ If a processor raises DropEvent, the chain is aborted and nothing is proxied to the logger. """ b = build_bl(processors=[raiser(DropEvent), raiser(ValueError)]) b._proxy_to_logger("", None, x=5) assert (("", "") == capsys.readouterr())
def test_processor_raising_DropEvent_silently_aborts_chain(self, capsys): """ If a processor raises DropEvent, the chain is aborted and nothing is proxied to the logger. """ b = build_bl(processors=[raiser(DropEvent), raiser(ValueError)]) b._proxy_to_logger("", None, x=5) assert ("", "") == capsys.readouterr()
def test_retry(self, db_session, monkeypatch): monkeypatch.setattr(tasks, "checks", test_checks) exc = Exception("Scan failed") def scan(self, **kwargs): raise exc monkeypatch.setattr(tasks.checks.ExampleHookedCheck, "scan", scan) MalwareCheckFactory.create(name="ExampleHookedCheck", state=MalwareCheckState.Enabled) task = pretend.stub(retry=pretend.call_recorder( pretend.raiser(celery.exceptions.Retry))) request = pretend.stub( db=db_session, log=pretend.stub( error=pretend.call_recorder(lambda *args, **kwargs: None)), route_url=pretend.call_recorder(lambda *a, **kw: pretend.stub()), ) file = FileFactory.create() with pytest.raises(celery.exceptions.Retry): tasks.run_check(task, request, "ExampleHookedCheck", obj_id=file.id) assert request.log.error.calls == [ pretend.call( "Error executing check ExampleHookedCheck: Scan failed") ] assert task.retry.calls == [pretend.call(exc=exc)]
def test_lookup_owner_other_http_error(self, monkeypatch): response = pretend.stub( # anything that isn't 404 or 403 status_code=422, raise_for_status=pretend.raiser(HTTPError), content=b"fake-content", ) requests = pretend.stub( get=pretend.call_recorder(lambda o, **kw: response), HTTPError=HTTPError) monkeypatch.setattr(forms, "requests", requests) sentry_sdk = pretend.stub( capture_message=pretend.call_recorder(lambda s: None)) monkeypatch.setattr(forms, "sentry_sdk", sentry_sdk) form = forms.GitHubProviderForm(api_token="fake-token") with pytest.raises(wtforms.validators.ValidationError): form._lookup_owner("some-owner") assert requests.get.calls == [ pretend.call( "https://api.github.com/users/some-owner", headers={ "Accept": "application/vnd.github.v3+json", "Authorization": "token fake-token", }, allow_redirects=True, ) ] assert sentry_sdk.capture_message.calls == [ pretend.call("Unexpected error from GitHub user lookup: " "response.content=b'fake-content'") ]
def test_lookup_owner_403(self, monkeypatch): response = pretend.stub( status_code=403, raise_for_status=pretend.raiser(HTTPError), json=lambda: {"message": "fake-message"}, ) requests = pretend.stub( get=pretend.call_recorder(lambda o, **kw: response), HTTPError=HTTPError) monkeypatch.setattr(forms, "requests", requests) sentry_sdk = pretend.stub( capture_message=pretend.call_recorder(lambda s: None)) monkeypatch.setattr(forms, "sentry_sdk", sentry_sdk) form = forms.GitHubProviderForm(api_token="fake-token") with pytest.raises(wtforms.validators.ValidationError): form._lookup_owner("some-owner") assert requests.get.calls == [ pretend.call( "https://api.github.com/users/some-owner", headers={ "Accept": "application/vnd.github.v3+json", "Authorization": "token fake-token", }, allow_redirects=True, ) ] assert sentry_sdk.capture_message.calls == [ pretend.call("Exceeded GitHub rate limit for user lookups. " "Reason: {'message': 'fake-message'}") ]
def test_fips_metadata_excludes_md5_and_blake2(monkeypatch): """Generate a valid metadata dictionary for Nexus when FIPS is enabled. See also: https://github.com/pypa/twine/issues/775 """ replaced_blake2b = pretend.raiser(ValueError("fipsmode")) replaced_md5 = pretend.raiser(ValueError("fipsmode")) monkeypatch.setattr(package_file.hashlib, "md5", replaced_md5) monkeypatch.setattr(package_file.hashlib, "blake2b", replaced_blake2b) filename = "tests/fixtures/twine-1.5.0-py2.py3-none-any.whl" pf = package_file.PackageFile.from_filename(filename, None) mddict = pf.metadata_dictionary() assert "md5_digest" not in mddict assert "blake2_256_digest" not in mddict
def test_analyze_disclosure_invalid_macaroon(): metrics = collections.Counter() def metrics_increment(key): metrics.update([key]) find = pretend.raiser(utils.InvalidMacaroonError("Bla", "bla")) svc = { utils.IMetricsService: pretend.stub(increment=metrics_increment), utils.IMacaroonService: pretend.stub(find_from_raw=find), } request = pretend.stub(find_service=lambda iface, context: svc[iface]) utils.analyze_disclosure( request=request, disclosure_record={ "type": "pypi_api_token", "token": "pypi-1234", "url": "http://example.com", }, origin="github", ) assert metrics == { "warehouse.token_leak.github.recieved": 1, "warehouse.token_leak.github.error.invalid": 1, }
def test_deriver(self, service_available, xmlrpc_cache, fakeredis): context = pretend.stub() purger = pretend.call_recorder(lambda tags: None) service = RedisXMLRPCCache("redis://127.0.0.2:6379/0", purger) service.redis_conn = fakeredis service.redis_lru.conn = fakeredis if service_available: _find_service = pretend.call_recorder(lambda *args, **kwargs: service) else: _find_service = pretend.raiser(LookupError) request = pretend.stub( find_service=_find_service, rpc_method="rpc_method", rpc_args=(0, 1) ) response = {} @pretend.call_recorder def view(context, request): return response info = pretend.stub(options={}, exception_only=False) info.options["xmlrpc_cache"] = xmlrpc_cache derived_view = cached_return_view(view, info) assert derived_view(context, request) is response assert view.calls == [pretend.call(context, request)]
def test_custom_tag(self, service_available, xmlrpc_cache): context = pretend.stub() service = pretend.stub( fetch=pretend.call_recorder( lambda func, args, kwargs, key, tag, expires: func(*args, **kwargs) ) ) if service_available: _find_service = pretend.call_recorder(lambda *args, **kwargs: service) else: _find_service = pretend.raiser(LookupError) request = pretend.stub( find_service=_find_service, rpc_method="rpc_method", rpc_args=("warehouse", "1.0.0"), ) response = {} @pretend.call_recorder def view(context, request): return response info = pretend.stub(options={}, exception_only=False) info.options["xmlrpc_cache"] = xmlrpc_cache info.options["xmlrpc_cache_tag"] = "arg1/%s" info.options["xmlrpc_cache_arg_index"] = 1 derived_view = cached_return_view(view, info) assert derived_view(context, request) is response assert view.calls == [pretend.call(context, request)]
def test_deriver(self, service_available, xmlrpc_cache, fakeredis): context = pretend.stub() purger = pretend.call_recorder(lambda tags: None) service = RedisXMLRPCCache("redis://127.0.0.2:6379/0", purger) service.redis_conn = fakeredis service.redis_lru.conn = fakeredis if service_available: _find_service = pretend.call_recorder( lambda *args, **kwargs: service) else: _find_service = pretend.raiser(LookupError) request = pretend.stub(find_service=_find_service, rpc_method="rpc_method", rpc_args=(0, 1)) response = {} @pretend.call_recorder def view(context, request): return response info = pretend.stub(options={}, exception_only=False) info.options["xmlrpc_cache"] = xmlrpc_cache derived_view = cached_return_view(view, info) assert derived_view(context, request) is response assert view.calls == [pretend.call(context, request)]
def test_custom_tag(self, service_available, xmlrpc_cache): context = pretend.stub() service = pretend.stub( fetch=pretend.call_recorder(lambda func, args, kwargs, key, tag, expires: func(*args, **kwargs))) if service_available: _find_service = pretend.call_recorder( lambda *args, **kwargs: service) else: _find_service = pretend.raiser(LookupError) request = pretend.stub( find_service=_find_service, rpc_method="rpc_method", rpc_args=("warehouse", "1.0.0"), ) response = {} @pretend.call_recorder def view(context, request): return response info = pretend.stub(options={}, exception_only=False) info.options["xmlrpc_cache"] = xmlrpc_cache info.options["xmlrpc_cache_tag"] = "arg1/%s" info.options["xmlrpc_cache_arg_index"] = 1 derived_view = cached_return_view(view, info) assert derived_view(context, request) is response assert view.calls == [pretend.call(context, request)]
def test_validate_password_too_many_failed(self): request = pretend.stub(remote_addr="1.2.3.4") user_service = pretend.stub( find_userid=pretend.call_recorder(lambda userid: 1), check_password=pretend.call_recorder( pretend.raiser(TooManyFailedLogins(resets_in=None))), is_disabled=pretend.call_recorder(lambda userid: (False, None)), ) breach_service = pretend.stub() form = forms.LoginForm( data={"username": "******"}, request=request, user_service=user_service, breach_service=breach_service, ) field = pretend.stub(data="pw") with pytest.raises(wtforms.validators.ValidationError): form.validate_password(field) assert user_service.find_userid.calls == [ pretend.call("my_username"), pretend.call("my_username"), ] assert user_service.is_disabled.calls == [pretend.call(1)] assert user_service.check_password.calls == [ pretend.call(1, "pw", tags=None) ]
def test_exception_for_http_status(verbose, make_settings, capsys): upload_settings = make_settings() upload_settings.verbose = verbose stub_response = pretend.stub( is_redirect=False, status_code=403, text="Invalid or non-existent authentication information", raise_for_status=pretend.raiser(HTTPError)) stub_repository = pretend.stub( upload=lambda package: stub_response, close=lambda: None, ) upload_settings.create_repository = lambda: stub_repository with pytest.raises(HTTPError): upload.upload(upload_settings, [WHEEL_FIXTURE]) captured = capsys.readouterr() assert RELEASE_URL not in captured.out if verbose: assert stub_response.text in captured.out assert '--verbose' not in captured.out else: assert stub_response.text not in captured.out assert '--verbose' in captured.out
def test_credential_invalid(self): request = pretend.stub(remote_addr="127.0.0.1") user_service = pretend.stub( record_event=pretend.call_recorder(lambda *a, **kw: None), verify_webauthn_assertion=pretend.raiser( AuthenticationRejectedError("foo")), ) form = forms.WebAuthnAuthenticationForm( request=request, credential=json.dumps({}), user_id=1, user_service=user_service, challenge=pretend.stub(), origin=pretend.stub(), rp_id=pretend.stub(), ) assert not form.validate() assert form.credential.errors.pop() == "foo" assert user_service.record_event.calls == [ pretend.call( 1, tag="account:login:failure", ip_address="127.0.0.1", additional={"reason": "invalid_webauthn"}, ) ]
def test_new_container(self): exc = ContainerDoesNotExistError(1, 2, 3) driver = stub(get_container=raiser(exc), create_container=lambda arg: arg) expected = "my_container" actual = storage.get_container(driver, expected) self.assertEqual(actual, expected)
def test_reindex_retry_on_lock(self, db_request, monkeypatch): task = pretend.stub( retry=pretend.call_recorder(pretend.raiser(celery.exceptions.Retry)) ) db_request.registry.settings = {"celery.scheduler_url": "redis://redis:6379/0"} le = redis.exceptions.LockError() monkeypatch.setattr( redis.StrictRedis, "from_url", lambda *a, **kw: pretend.stub(lock=pretend.raiser(le)), ) with pytest.raises(celery.exceptions.Retry): reindex_project(task, db_request, "foo") assert task.retry.calls == [pretend.call(countdown=60, exc=le)]
def test_run_gpg_raises_exception_if_no_gpgs(monkeypatch): replaced_check_call = pretend.raiser(FileNotFoundError('not found')) monkeypatch.setattr(package.subprocess, 'check_call', replaced_check_call) gpg_args = ('gpg', '--detach-sign', '-a', 'pypircfile') with pytest.raises(exceptions.InvalidSigningExecutable) as err: package.PackageFile.run_gpg(gpg_args) assert 'executables not available' in err.value.args[0]
def test_run_gpg_raises_exception_if_not_using_gpg(monkeypatch): replaced_check_call = pretend.raiser(FileNotFoundError("not found")) monkeypatch.setattr(package_file.subprocess, "check_call", replaced_check_call) gpg_args = ("not_gpg", "--detach-sign", "-a", "pypircfile") with pytest.raises(exceptions.InvalidSigningExecutable) as err: package_file.PackageFile.run_gpg(gpg_args) assert "not_gpg executable not available" in err.value.args[0]
def test_chain_does_not_swallow_all_exceptions(self): """ If the chain raises anything else than DropEvent, the error is not swallowed. """ b = build_bl(processors=[raiser(ValueError)]) with pytest.raises(ValueError): b._process_event("", "boom", {})
def test_fips_hash_manager(monkeypatch): """Verify the behaviour if hashlib is using FIPS mode.""" replaced_md5 = pretend.raiser(ValueError('fipsmode')) monkeypatch.setattr(package.hashlib, 'md5', replaced_md5) filename = 'tests/fixtures/twine-1.5.0-py2.py3-none-any.whl' hasher = package.HashManager(filename) hasher.hash() hashes = TWINE_1_5_0_WHEEL_HEXDIGEST._replace(md5=None) assert hasher.hexdigest() == hashes
def test_fips_hash_manager_blake2(monkeypatch): """Generate hexdigest without BLAKE2 when hashlib is using FIPS mode.""" replaced_blake2b = pretend.raiser(ValueError("fipsmode")) monkeypatch.setattr(package_file.hashlib, "blake2b", replaced_blake2b) filename = "tests/fixtures/twine-1.5.0-py2.py3-none-any.whl" hasher = package_file.HashManager(filename) hasher.hash() hashes = TWINE_1_5_0_WHEEL_HEXDIGEST._replace(blake2=None) assert hasher.hexdigest() == hashes
def test_deserialize_raw_macaroon(self, monkeypatch, macaroon_service, exception): raw_macaroon = pretend.stub() macaroon_service._extract_raw_macaroon = pretend.call_recorder( lambda a: raw_macaroon) monkeypatch.setattr(pymacaroons.Macaroon, "deserialize", pretend.raiser(exception)) with pytest.raises(services.InvalidMacaroon): macaroon_service._deserialize_raw_macaroon(raw_macaroon)
def test_awsssm_error(monkeypatch): error = ClientError({'Error': {'Code': 'SomethingBad'}}, 'get_parameter') fake_client = pretend.stub(get_parameter=pretend.raiser(error), ) fake_boto3 = pretend.stub(client=lambda service: fake_client, ) monkeypatch.setattr('configstore.backends.awsssm.boto3', fake_boto3) b = AwsSsmBackend('/app1/') with pytest.raises(ClientError): b.get_setting('TEMPLATE_DEBUG')
def test_retrieve_public_key_payload_connection_error(self): session = pretend.stub(get=pretend.raiser(requests.ConnectionError)) verifier = utils.GitHubTokenScanningPayloadVerifier( session=session, metrics=pretend.stub()) with pytest.raises(utils.GitHubPublicKeyMetaAPIError) as exc: verifier._retrieve_public_key_payload() assert str(exc.value) == "Could not connect to GitHub" assert exc.value.reason == "public_key_api.network_error"
def test_verify_registration_response_failure(monkeypatch): response_obj = pretend.stub( verify=pretend.raiser(pywebauthn.webauthn.RegistrationRejectedException) ) response_cls = pretend.call_recorder(lambda *a, **kw: response_obj) monkeypatch.setattr(pywebauthn, "WebAuthnRegistrationResponse", response_cls) with pytest.raises(webauthn.RegistrationRejectedException): webauthn.verify_registration_response( {}, "not_a_real_challenge", rp_id="fake_rp_id", origin="fake_origin" )
def test_recaptcha_error(self): form = forms.RegistrationForm( data={"g_recaptcha_response": "asd"}, user_service=pretend.stub(), recaptcha_service=pretend.stub( verify_response=pretend.raiser(recaptcha.RecaptchaError), enabled=True, ), ) assert not form.validate() assert form.g_recaptcha_response.errors.pop() \ == "Recaptcha error."
def test_lineReceived_error(self): line = ( '2013-12-08T23:24:40Z cache-c31 pypi-cdn[18322]: 199.182.120.6 ' '"Sun, 08 Dec 2013 23:24:40 GMT" "-" "GET ' '/packages/source/I/INITools/INITools-0.2.tar.gz" HTTP/1.1 200 ' '16930 156751 HIT 326 "(null)" "(null)" "pip/1.5rc1 PyPy/2.2.1 ' 'Linux/2.6.32-042stab061.2"\n' ) models = pretend.stub(create_download=pretend.raiser(ValueError)) protocol = FastlySyslogProtocol(models, None) protocol.lineReceived(line)
def test_csp_policy_default(self): response = pretend.stub(headers={}) handler = pretend.call_recorder(lambda request: response) registry = pretend.stub(settings={}) tween = csp.content_security_policy_tween_factory(handler, registry) request = pretend.stub(path="/path/to/nowhere/", find_service=pretend.raiser(ValueError)) assert tween(request) is response assert response.headers == {}
def test_credential_invalid(self): form = forms.WebAuthnAuthenticationForm( credential=json.dumps({}), user_id=pretend.stub(), user_service=pretend.stub(verify_webauthn_assertion=pretend.raiser( AuthenticationRejectedException("foo"))), challenge=pretend.stub(), origin=pretend.stub(), rp_id=pretend.stub(), ) assert not form.validate() assert form.credential.errors.pop() == "foo"
def test_csp_policy_default(self): response = pretend.stub(headers={}) handler = pretend.call_recorder(lambda request: response) registry = pretend.stub(settings={}) tween = csp.content_security_policy_tween_factory(handler, registry) request = pretend.stub( path="/path/to/nowhere/", find_service=pretend.raiser(LookupError) ) assert tween(request) is response assert response.headers == {}
def test_awsssm_missing_with_prefix(monkeypatch): error = ClientError({'Error': { 'Code': 'ParameterNotFound' }}, 'get_parameter') fake_client = pretend.stub(get_parameter=pretend.raiser(error), ) fake_boto3 = pretend.stub(client=lambda service: fake_client, ) monkeypatch.setattr('configstore.backends.awsssm.boto3', fake_boto3) b = AwsSsmBackend('/app1/') value = b.get_setting('TEMPLATE_DEBUG') assert value is None
def test_verify_invalid_signature(self, monkeypatch): verify = pretend.call_recorder( pretend.raiser(MacaroonInvalidSignatureException)) macaroon = pretend.stub() context = pretend.stub() principals = pretend.stub() permission = pretend.stub() key = pretend.stub() verifier = Verifier(macaroon, context, principals, permission) monkeypatch.setattr(verifier.verifier, "verify", verify) assert verifier.verify(key) is False assert verify.calls == [pretend.call(macaroon, key)]
def test_retrieve_public_key_payload_http_error(self): response = pretend.stub( status_code=418, text="I'm a teapot", raise_for_status=pretend.raiser(requests.HTTPError), ) session = pretend.stub(get=lambda *a, **k: response, ) verifier = utils.GitHubTokenScanningPayloadVerifier( session=session, metrics=pretend.stub()) with pytest.raises(utils.GitHubPublicKeyMetaAPIError) as exc: verifier._retrieve_public_key_payload() assert str(exc.value) == "Invalid response code 418: I'm a teapot" assert exc.value.reason == "public_key_api.status.418"
def test_remove_documentation_retry(db_request): project = ProjectFactory.create(name="foo", has_docs=True) task = stub(retry=call_recorder(lambda *a, **kw: None)) service = stub(remove_by_prefix=raiser(Exception)) db_request.find_service = call_recorder(lambda interface, name=None: service) db_request.log = stub(info=call_recorder(lambda *a, **kw: None)) remove_documentation(task, db_request, project.name) assert len(task.retry.calls) == 1 assert db_request.log.info.calls == [ call("Removing documentation for %s", project.name) ]
def test_unindex_fails_when_raising(self, db_request, monkeypatch): task = pretend.stub() class TestException(Exception): pass es_client = FakeESClient() es_client.delete = pretend.raiser(TestException) monkeypatch.setattr( redis.StrictRedis, "from_url", lambda *a, **kw: pretend.stub(lock=NotLock) ) db_request.registry.update( {"elasticsearch.client": es_client, "elasticsearch.index": "warehouse"} ) with pytest.raises(TestException): unindex_project(task, db_request, "foo")
def test_unindex_accepts_defeat(self, db_request, monkeypatch): task = pretend.stub() es_client = FakeESClient() es_client.delete = pretend.call_recorder( pretend.raiser(elasticsearch.exceptions.NotFoundError) ) monkeypatch.setattr( redis.StrictRedis, "from_url", lambda *a, **kw: pretend.stub(lock=NotLock) ) db_request.registry.update( {"elasticsearch.client": es_client, "elasticsearch.index": "warehouse"} ) unindex_project(task, db_request, "foo") assert es_client.delete.calls == [ pretend.call(index="warehouse", doc_type="doc", id="foo") ]
def test_exception_handling(monkeypatch): replaced_dispatch = pretend.raiser( exceptions.InvalidConfiguration('foo') ) monkeypatch.setattr(dunder_main, 'dispatch', replaced_dispatch) assert dunder_main.main() == 'InvalidConfiguration: foo'
def test_call_raiser(self): f = raiser(ValueError) with pytest.raises(ValueError): f()
def test_chain_does_not_swallow_all_exceptions(self): b = build_bl(processors=[raiser(ValueError)]) with pytest.raises(ValueError): b._process_event('', 'boom', {})
def test_processor_raising_DropEvent_silently_aborts_chain(self, capsys): b = build_bl(processors=[raiser(DropEvent), raiser(ValueError)]) b._proxy_to_logger('', None, x=5) assert (('', '') == capsys.readouterr())
def test_call_raiser_exc_value(self): exc = ValueError(14) f = raiser(exc) with pytest.raises(ValueError) as exc_info: f() assert exc_info.value is exc
def test_non_exc_raiser(self): with pytest.raises(TypeError): raiser("test")
def test_leaves_unrelated_exceptions_through(self): exc = IOError with pytest.raises(exc): until_not_interrupted(raiser(exc('not EINTR')))
def test_unexpected_error(self): serv = recaptcha.Service(_REQUEST) serv.request.http.post = pretend.raiser(socket.error) with pytest.raises(recaptcha.UnexpectedError): serv.verify_response("meaningless")