def test_project_detail_invalid_version(): app = pretend.stub( config=pretend.stub( cache=pretend.stub( browser=False, varnish=False, ), ), db=pretend.stub( packaging=pretend.stub( get_project=pretend.call_recorder( lambda proj: "test-project", ), get_releases=pretend.call_recorder( lambda proj: [{"version": "1.0"}], ), ), ), ) request = pretend.stub() project_name = "test-project" with pytest.raises(NotFound): project_detail(app, request, project_name, "2.0") assert app.db.packaging.get_project.calls == [ pretend.call("test-project"), ] assert app.db.packaging.get_releases.calls == [ pretend.call("test-project"), ]
def test_without_a_query(self, monkeypatch, db_request, page): params = MultiDict() if page is not None: params["page"] = page db_request.params = params es_query = pretend.stub() db_request.es = pretend.stub(query=lambda *a, **kw: es_query) page_obj = pretend.stub(page_count=(page or 1) + 10) page_cls = pretend.call_recorder(lambda *a, **kw: page_obj) monkeypatch.setattr(views, "ElasticsearchPage", page_cls) url_maker = pretend.stub() url_maker_factory = pretend.call_recorder(lambda request: url_maker) monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory) assert search(db_request) == { "page": page_obj, "term": params.get("q", ''), "order": params.get("o", ''), "applied_filters": [], "available_filters": [], } assert page_cls.calls == [ pretend.call(es_query, url_maker=url_maker, page=page or 1), ] assert url_maker_factory.calls == [pretend.call(db_request)]
def test_load_manifest(self, monkeypatch, cache): class FakeStream: def __enter__(self): return self def __exit__(self, *args, **kwargs): pass def read(self): return b'{"css/style.css": "css/style-foo.css"}' class FakeAsset: def stream(self): return FakeStream() assetresolver_obj = pretend.stub(resolve=lambda spec: FakeAsset()) assetresolver_cls = pretend.call_recorder(lambda: assetresolver_obj) monkeypatch.setattr(static, "AssetResolver", assetresolver_cls) cachebuster = static.WarehouseCacheBuster("foo", cache=cache) manifest = cachebuster._load_manifest() manifest = cachebuster._load_manifest() assert manifest == {"css/style.css": "css/style-foo.css"} if cache: assert assetresolver_cls.calls == [pretend.call()] else: assert assetresolver_cls.calls == [pretend.call(), pretend.call()]
def test_adds_blacklist_with_deletes(self, db_request): db_request.user = UserFactory.create() db_request.POST["project"] = "foo" db_request.POST["confirm"] = "foo" db_request.POST["comment"] = "This is a comment" db_request.session = pretend.stub( flash=pretend.call_recorder(lambda *a, **kw: None) ) db_request.route_path = lambda a: "/admin/blacklist/" db_request.remote_addr = "192.168.1.1" project = ProjectFactory.create(name="foo") release = ReleaseFactory.create(project=project) FileFactory.create(release=release, filename="who cares") RoleFactory.create(project=project, user=db_request.user) views.add_blacklist(db_request) assert db_request.session.flash.calls == [ pretend.call("Deleted the project 'foo'", queue="success"), pretend.call("Blacklisted 'foo'", queue="success"), ] blacklist = ( db_request.db.query(BlacklistedProject) .filter(BlacklistedProject.name == "foo") .one() ) assert blacklist.name == "foo" assert blacklist.blacklisted_by == db_request.user assert blacklist.comment == "This is a comment" assert not (db_request.db.query(Project).filter(Project.name == "foo").count())
def test_with_valid_password(self, monkeypatch): principals = pretend.stub() authenticate = pretend.call_recorder( lambda userid, request: principals ) monkeypatch.setattr(accounts, "_authenticate", authenticate) userid = pretend.stub() service = pretend.stub( find_userid=pretend.call_recorder(lambda username: userid), check_password=pretend.call_recorder( lambda userid, password: True ), ) request = pretend.stub( find_service=pretend.call_recorder(lambda iface, context: service), ) assert accounts._login("myuser", "mypass", request) is principals assert request.find_service.calls == [ pretend.call(IUserService, context=None), ] assert service.find_userid.calls == [pretend.call("myuser")] assert service.check_password.calls == [pretend.call(userid, "mypass")] assert authenticate.calls == [pretend.call(userid, request)]
def test_send_email_success(self, monkeypatch): message_obj = Message() def mock_message(*args, **kwargs): return message_obj monkeypatch.setattr(email, "Message", mock_message) task = pretend.stub() mailer = pretend.stub( send_immediately=pretend.call_recorder(lambda i: None) ) request = pretend.stub( registry=pretend.stub( settings=pretend.stub( get=pretend.call_recorder(lambda k: 'SENDER'), ), getUtility=pretend.call_recorder(lambda mailr: mailer) ) ) email.send_email( task, request, "body", "subject", recipients=["recipients"], ) assert mailer.send_immediately.calls == [pretend.call(message_obj)] assert request.registry.getUtility.calls == [pretend.call(IMailer)] assert request.registry.settings.get.calls == [ pretend.call("mail.sender")]
def test_wsgi_app_exception(app, monkeypatch): match = pretend.stub(match=pretend.call_recorder(lambda: ("warehouse.fake.view", {}))) urls = pretend.stub(bind_to_environ=pretend.call_recorder(lambda e: match)) response = pretend.call_recorder(lambda e, s: None) class FakeException(HTTPException): # @pretend.call_recorder def __call__(self, *args, **kwargs): return response @pretend.call_recorder def fake_view(*args, **kwargs): raise FakeException("An error has occurred") fake_module = pretend.stub(view=fake_view) import_module = pretend.call_recorder(lambda mod: fake_module) monkeypatch.setattr(importlib, "import_module", import_module) environ = create_environ() start_response = pretend.stub() app.urls = urls app.wsgi_app(environ, start_response) assert match.match.calls == [pretend.call()] assert urls.bind_to_environ.calls == [pretend.call(environ)] assert import_module.calls == [pretend.call("warehouse.fake")] assert fake_view.calls == [pretend.call(app, mock.ANY)]
def test_request_without_tm(self, monkeypatch): async_result = pretend.stub() super_class = pretend.stub( apply_async=pretend.call_recorder(lambda *a, **kw: async_result), ) real_super = __builtins__["super"] inner_super = pretend.call_recorder(lambda *a, **kw: super_class) def fake_super(*args, **kwargs): if not args and not kwargs: return inner_super(*args, **kwargs) else: return real_super(*args, **kwargs) monkeypatch.setitem(__builtins__, "super", fake_super) request = pretend.stub() get_current_request = pretend.call_recorder(lambda: request) monkeypatch.setattr(celery, "get_current_request", get_current_request) task = celery.WarehouseTask() task.app = Celery() assert task.apply_async() is async_result assert super_class.apply_async.calls == [pretend.call()] assert get_current_request.calls == [pretend.call()] assert inner_super.calls == [pretend.call()]
def test_purge_key_unsuccessful(self, monkeypatch, result): cacher = fastly.FastlyCache( api_key="an api key", service_id="the-service-id", purger=None ) response = pretend.stub( raise_for_status=pretend.call_recorder(lambda: None), json=lambda: result ) requests_post = pretend.call_recorder(lambda *a, **kw: response) monkeypatch.setattr(requests, "post", requests_post) with pytest.raises(fastly.UnsuccessfulPurge): cacher.purge_key("one") assert requests_post.calls == [ pretend.call( "https://api.fastly.com/service/the-service-id/purge/one", headers={ "Accept": "application/json", "Fastly-Key": "an api key", "Fastly-Soft-Purge": "1", }, ) ] assert response.raise_for_status.calls == [pretend.call()]
def test_purges_fails(self, monkeypatch, exception_type): exc = exception_type() class Cacher: @staticmethod @pretend.call_recorder def purge_key(key): raise exc class Task: @staticmethod @pretend.call_recorder def retry(exc): raise celery.exceptions.Retry task = Task() cacher = Cacher() request = pretend.stub( find_service=pretend.call_recorder(lambda iface: cacher), log=pretend.stub( info=pretend.call_recorder(lambda *args, **kwargs: None), error=pretend.call_recorder(lambda *args, **kwargs: None), ), ) with pytest.raises(celery.exceptions.Retry): fastly.purge_key(task, request, "foo") assert request.find_service.calls == [pretend.call(IOriginCache)] assert cacher.purge_key.calls == [pretend.call("foo")] assert task.retry.calls == [pretend.call(exc=exc)] assert request.log.info.calls == [pretend.call("Purging %s", "foo")] assert request.log.error.calls == [ pretend.call("Error purging %s: %s", "foo", str(exception_type())) ]
def test_call(self, monkeypatch): request = pretend.stub() registry = pretend.stub() result = pretend.stub() prepared = { "registry": registry, "request": request, "closer": pretend.call_recorder(lambda: None) } prepare = pretend.call_recorder(lambda *a, **kw: prepared) monkeypatch.setattr(scripting, "prepare", prepare) @pretend.call_recorder def runner(irequest): assert irequest is request return result task = celery.WarehouseTask() task.app = Celery() task.app.pyramid_config = pretend.stub(registry=registry) task.run = runner assert task() is result assert prepare.calls == [pretend.call(registry=registry)] assert prepared["closer"].calls == [pretend.call()] assert runner.calls == [pretend.call(request)]
def test_index(app): project_count = pretend.stub() download_count = pretend.stub() updated = pretend.stub() app.db = pretend.stub( packaging=pretend.stub( get_project_count=pretend.call_recorder( lambda: project_count, ), get_download_count=pretend.call_recorder( lambda: download_count, ), get_recently_updated=pretend.call_recorder(lambda: updated), ), ) request = pretend.stub() resp = index(app, request) assert resp.response.template.name == "index.html" assert resp.response.context == { "project_count": project_count, "download_count": download_count, "recently_updated": updated, } assert app.db.packaging.get_project_count.calls == [pretend.call()] assert app.db.packaging.get_download_count.calls == [pretend.call()] assert app.db.packaging.get_recently_updated.calls == [pretend.call()]
def test_valid_session_id_invalid_data(self, pyramid_request): pyramid_request.cookies["session_id"] = "123456" session_factory = SessionFactory( "mysecret", "redis://redis://localhost:6379/0", ) session_factory.signer.unsign = pretend.call_recorder( lambda session_id, max_age: b"123456" ) session_factory.redis = pretend.stub( get=pretend.call_recorder(lambda key: b"invalid data"), ) session_factory._process_response = pretend.stub() session = session_factory(pyramid_request) assert len(pyramid_request.response_callbacks) == 1 assert pyramid_request.response_callbacks[0] is \ session_factory._process_response assert session_factory.signer.unsign.calls == [ pretend.call("123456", max_age=12 * 60 * 60), ] assert session_factory.redis.get.calls == [ pretend.call("warehouse/session/data/123456"), ] assert isinstance(session, Session) assert session._sid is None assert session.new
def test_includeme(monkeypatch): session_factory_obj = pretend.stub() session_factory_cls = pretend.call_recorder( lambda secret, url: session_factory_obj ) monkeypatch.setattr( warehouse.sessions, "SessionFactory", session_factory_cls, ) config = pretend.stub( set_session_factory=pretend.call_recorder(lambda factory: None), registry=pretend.stub( settings={ "sessions.secret": "my secret", "sessions.url": "my url", }, ), add_view_deriver=pretend.call_recorder(lambda *a, **kw: None), ) includeme(config) assert config.set_session_factory.calls == [ pretend.call(session_factory_obj), ] assert session_factory_cls.calls == [pretend.call("my secret", "my url")] assert config.add_view_deriver.calls == [ pretend.call( session_view, over="csrf_view", under=viewderivers.INGRESS, ), ]
def test_delete_by_prefix_with_storage_prefix(self): files = {"Contents": [{"Key": f"docs/foo/{i}.html"} for i in range(150)]} s3_client = pretend.stub( list_objects_v2=pretend.call_recorder( lambda Bucket=None, Prefix=None: files ), delete_objects=pretend.call_recorder(lambda Bucket=None, Delete=None: None), ) storage = S3DocsStorage(s3_client, "bucket-name", prefix="docs") storage.remove_by_prefix("foo") assert s3_client.list_objects_v2.calls == [ pretend.call(Bucket="bucket-name", Prefix="docs/foo") ] assert s3_client.delete_objects.calls == [ pretend.call( Bucket="bucket-name", Delete={"Objects": [{"Key": f"docs/foo/{i}.html"} for i in range(100)]}, ), pretend.call( Bucket="bucket-name", Delete={ "Objects": [{"Key": f"docs/foo/{i}.html"} for i in range(100, 150)] }, ), ]
def test_includeme(): config = pretend.stub( add_directive=pretend.call_recorder(lambda name, callable: None), add_tween=pretend.call_recorder(lambda tween, over, under: None), ) static.includeme(config) assert config.add_directive.calls == [ pretend.call( "whitenoise_serve_static", static.whitenoise_serve_static, ), pretend.call( "whitenoise_add_files", static.whitenoise_add_files, ), ] assert config.add_tween.calls == [ pretend.call( "warehouse.static.whitenoise_tween_factory", over=[ "warehouse.utils.compression.compression_tween_factory", EXCVIEW, ], under=[ "warehouse.csp.content_security_policy_tween_factory", "warehouse.config.require_https_tween_factory", INGRESS, ], ), ]
def test_redis_purge(self, fakeredis): metric_reporter = pretend.stub( increment=pretend.call_recorder(lambda *args: None) ) redis_lru = RedisLru(fakeredis, metric_reporter=metric_reporter) expected = func_test(0, 1, kwarg0=2, kwarg1=3) assert expected == redis_lru.fetch( func_test, [0, 1], {"kwarg0": 2, "kwarg1": 3}, None, "test", None ) assert expected == redis_lru.fetch( func_test, [0, 1], {"kwarg0": 2, "kwarg1": 3}, None, "test", None ) redis_lru.purge("test") assert expected == redis_lru.fetch( func_test, [0, 1], {"kwarg0": 2, "kwarg1": 3}, None, "test", None ) assert expected == redis_lru.fetch( func_test, [0, 1], {"kwarg0": 2, "kwarg1": 3}, None, "test", None ) assert metric_reporter.increment.calls == [ pretend.call("lru.cache.miss"), pretend.call("lru.cache.hit"), pretend.call("lru.cache.purge"), pretend.call("lru.cache.miss"), pretend.call("lru.cache.hit"), ]
def test_includeme(forklift): settings = {} if forklift: settings["forklift.domain"] = forklift config = pretend.stub( get_settings=lambda: settings, include=pretend.call_recorder(lambda n: None), add_legacy_action_route=pretend.call_recorder(lambda *a, **k: None), ) includeme(config) config.include.calls == [pretend.call(".action_routing")] config.add_legacy_action_route.calls == [ pretend.call( "forklift.legacy.file_upload", "file_upload", domain=forklift, ), pretend.call("forklift.legacy.submit", "submit", domain=forklift), pretend.call( "forklift.legacy.submit_pkg_info", "submit_pkg_info", domain=forklift, ), pretend.call( "forklift.legacy.doc_upload", "doc_upload", domain=forklift, ), ]
def test_resets_password(self, db_request, monkeypatch): user = UserFactory.create() db_request.matchdict["user_id"] = str(user.id) db_request.params = {"username": user.username} db_request.route_path = pretend.call_recorder(lambda *a, **kw: "/foobar") db_request.user = UserFactory.create() db_request.remote_addr = "10.10.10.10" service = pretend.stub( find_userid=pretend.call_recorder(lambda username: user.id), disable_password=pretend.call_recorder(lambda userid, reason: None), ) db_request.find_service = pretend.call_recorder(lambda iface, context: service) send_email = pretend.call_recorder(lambda *a, **kw: None) monkeypatch.setattr(views, "send_password_compromised_email", send_email) result = views.user_reset_password(db_request) assert db_request.find_service.calls == [ pretend.call(IUserService, context=None) ] assert send_email.calls == [pretend.call(db_request, user)] assert service.disable_password.calls == [ pretend.call(user.id, reason=DisableReason.CompromisedPassword) ] assert db_request.route_path.calls == [ pretend.call("admin.user.detail", user_id=user.id) ] assert result.status_code == 303 assert result.location == "/foobar"
def test_includeme(monkeypatch): class FakeRegistry(dict): settings = {"database.url": pretend.stub()} engine = pretend.stub() create_engine = pretend.call_recorder(lambda url, isolation_level: engine) config = pretend.stub( add_directive=pretend.call_recorder(lambda *a: None), registry=FakeRegistry(), add_request_method=pretend.call_recorder(lambda f, name, reify: None), ) monkeypatch.setattr(sqlalchemy, "create_engine", create_engine) includeme(config) assert config.add_directive.calls == [ pretend.call("alembic_config", _configure_alembic), ] assert create_engine.calls == [ pretend.call( config.registry.settings["database.url"], isolation_level="SERIALIZABLE", ), ] assert config.registry["sqlalchemy.engine"] is engine assert config.add_request_method.calls == [ pretend.call(_create_session, name="db", reify=True), ]
def test_unauthenticated_userid_no_userid(self, monkeypatch): extract_http_basic_credentials = \ pretend.call_recorder(lambda request: None) monkeypatch.setattr( authentication, "extract_http_basic_credentials", extract_http_basic_credentials, ) policy = auth_policy.BasicAuthAuthenticationPolicy( check=pretend.stub(), ) vary_cb = pretend.stub() add_vary_cb = pretend.call_recorder(lambda *v: vary_cb) monkeypatch.setattr(auth_policy, "add_vary_callback", add_vary_cb) request = pretend.stub( add_response_callback=pretend.call_recorder(lambda cb: None), ) assert policy.unauthenticated_userid(request) is None assert extract_http_basic_credentials.calls == [pretend.call(request)] assert add_vary_cb.calls == [pretend.call("Authorization")] assert request.add_response_callback.calls == [pretend.call(vary_cb)]
def test_delete_by_prefix(self, file_count): files = { 'Contents': [ {'Key': f'foo/{i}.html'} for i in range(file_count) ], } s3_client = pretend.stub( list_objects_v2=pretend.call_recorder( lambda Bucket=None, Prefix=None: files), delete_objects=pretend.call_recorder( lambda Bucket=None, Delete=None: None), ) storage = S3DocsStorage(s3_client, 'bucket-name') storage.remove_by_prefix('foo') assert s3_client.list_objects_v2.calls == [ pretend.call(Bucket='bucket-name', Prefix='foo'), ] assert s3_client.delete_objects.calls == [ pretend.call( Bucket='bucket-name', Delete={ 'Objects': [ {'Key': f'foo/{i}.html'} for i in range(file_count) ] }, ), ]
def test_register_origin_keys(monkeypatch): class Fake1: pass class Fake2: pass key_maker = pretend.stub() key_maker_factory = pretend.call_recorder(lambda **kw: key_maker) monkeypatch.setattr(origin, "key_maker_factory", key_maker_factory) config = pretend.stub(registry={}) origin.register_origin_cache_keys( config, Fake1, cache_keys=["one", "two/{obj.attr}"]) origin.register_origin_cache_keys( config, Fake2, cache_keys=["three"], purge_keys=["lol"], ) assert key_maker_factory.calls == [ pretend.call(cache_keys=["one", "two/{obj.attr}"], purge_keys=None), pretend.call(cache_keys=["three"], purge_keys=["lol"]), ] assert config.registry == { "cache_keys": { Fake1: key_maker, Fake2: key_maker, }, }
def test_render_response(): template = pretend.stub(render=pretend.call_recorder(lambda **k: "test")) app = pretend.stub( config=pretend.stub(), templates=pretend.stub( get_template=pretend.call_recorder(lambda t: template), ), ) request = pretend.stub() resp = render_response(app, request, "template.html", foo="bar") assert resp.get_wsgi_headers(create_environ())["Content-Length"] == "4" assert resp.data == b"test" assert app.templates.get_template.calls == [pretend.call("template.html")] assert template.render.calls == [ pretend.call( foo="bar", config=app.config, csrf_token=mock.ANY, gravatar_url=mock.ANY, url_for=mock.ANY, static_url=mock.ANY, ), ]
def test_includeme_with_origin_cache(): cache_class = pretend.stub(create_service=pretend.stub()) config = pretend.stub( add_directive=pretend.call_recorder(lambda name, func: None), registry=pretend.stub( settings={ "origin_cache.backend": "warehouse.cache.origin.fastly.FastlyCache", }, ), maybe_dotted=pretend.call_recorder(lambda n: cache_class), register_service_factory=pretend.call_recorder(lambda f, iface: None) ) origin.includeme(config) assert config.add_directive.calls == [ pretend.call( "register_origin_cache_keys", origin.register_origin_cache_keys, ), ] assert config.maybe_dotted.calls == [ pretend.call("warehouse.cache.origin.fastly.FastlyCache"), ] assert config.register_service_factory.calls == [ pretend.call(cache_class.create_service, IOriginCache), ]
def test_virtualenv_state(monkeypatch): CONTENT = '{"last_check": "1970-01-02T11:00:00Z", "pypi_version": "1.0"}' fake_file = pretend.stub( read=pretend.call_recorder(lambda: CONTENT), write=pretend.call_recorder(lambda s: None), ) @pretend.call_recorder @contextmanager def fake_open(filename, mode='r'): yield fake_file monkeypatch.setattr(outdated, 'open', fake_open, raising=False) monkeypatch.setattr(outdated, 'running_under_virtualenv', pretend.call_recorder(lambda: True)) monkeypatch.setattr(sys, 'prefix', 'virtually_env') state = outdated.load_selfcheck_statefile() state.save('2.0', datetime.datetime.utcnow()) assert len(outdated.running_under_virtualenv.calls) == 1 expected_path = os.path.join('virtually_env', 'pip-selfcheck.json') assert fake_open.calls == [ pretend.call(expected_path), pretend.call(expected_path, 'w'), ] # json.dumps will call this a number of times assert len(fake_file.write.calls)
def test_response_hook(): class Cache: @staticmethod @pretend.call_recorder def cache(keys, request, response): pass response = pretend.stub() @pretend.call_recorder def view(context, request): return response context = pretend.stub() cacher = Cache() callbacks = [] request = pretend.stub( find_service=lambda iface: cacher, add_response_callback=callbacks.append ) info = pretend.stub(options={"renderer": pretend.stub(name="foo.html")}) derived_view = html_cache_deriver(view, info) assert derived_view(context, request) is response assert view.calls == [pretend.call(context, request)] assert len(callbacks) == 1 callbacks[0](request, response) assert cacher.cache.calls == [ pretend.call(["all-html", "foo.html"], request, response) ]
def test_invalidated_deletes_no_save(self, pyramid_request): session_factory = SessionFactory( "mysecret", "redis://redis://localhost:6379/0", ) session_factory.redis = pretend.stub( delete=pretend.call_recorder(lambda key: None) ) pyramid_request.session.invalidated = ["1", "2"] pyramid_request.session.should_save = pretend.call_recorder( lambda: False ) response = pretend.stub( delete_cookie=pretend.call_recorder(lambda cookie: None), ) session_factory._process_response(pyramid_request, response) assert session_factory.redis.delete.calls == [ pretend.call("warehouse/session/data/1"), pretend.call("warehouse/session/data/2"), ] assert pyramid_request.session.should_save.calls == [ pretend.call(), pretend.call(), ] assert response.delete_cookie.calls == [pretend.call("session_id")]
def test_with_disabled_user_compromised_pw(self, pyramid_request, pyramid_services): user = pretend.stub(id=1) service = pretend.stub( get_user=pretend.call_recorder(lambda user_id: user), find_userid=pretend.call_recorder(lambda username: 1), check_password=pretend.call_recorder( lambda userid, password, tags=None: False ), is_disabled=pretend.call_recorder( lambda user_id: (True, DisableReason.CompromisedPassword) ), ) pyramid_services.register_service(IUserService, None, service) pyramid_services.register_service( IPasswordBreachedService, None, pretend.stub(failure_message_plain="Bad Password!"), ) with pytest.raises(BasicAuthBreachedPassword) as excinfo: assert ( accounts._basic_auth_login("myuser", "mypass", pyramid_request) is None ) assert excinfo.value.status == "401 Bad Password!" assert service.find_userid.calls == [pretend.call("myuser")] assert service.get_user.calls == [pretend.call(1)] assert service.is_disabled.calls == [pretend.call(1)] assert service.check_password.calls == []
def test_404_when_missing_file(self, db_request, pyramid_config): @pretend.call_recorder def raiser(path): raise FileNotFoundError storage_service = pretend.stub(get=raiser) project = ProjectFactory.create() release = ReleaseFactory.create(project=project) file_ = FileFactory.create( release=release, filename="{}-{}.tar.gz".format(project.name, release.version), python_version="source", ) path = "source/{}/{}/{}".format( project.name[0], project.name, file_.filename ) db_request.matchdict["path"] = path db_request.log = pretend.stub( error=pretend.call_recorder(lambda event, **kw: None), ) db_request.find_service = pretend.call_recorder( lambda iface: storage_service ) resp = views.packages(db_request) assert isinstance(resp, HTTPNotFound) assert db_request.find_service.calls == [pretend.call(IFileStorage)] assert storage_service.get.calls == [pretend.call(path)] assert db_request.log.error.calls == [ pretend.call("missing file data", path=path), ]
def test_without_user(self): service = pretend.stub(get_user=pretend.call_recorder(lambda userid: None)) request = pretend.stub(find_service=lambda iface, context: service) assert accounts._authenticate(1, request) is None assert service.get_user.calls == [pretend.call(1)]
def test_detail_renders(self, pyramid_config, db_request): project = ProjectFactory.create(has_docs=True) releases = [ ReleaseFactory.create(project=project, version=v) for v in ["1.0", "2.0", "3.0"] ] files = [ FileFactory.create( release=r, filename="{}-{}.tar.gz".format(project.name, r.version), python_version="source", size=200, has_signature=True, ) for r in releases[:-1] ] user = UserFactory.create() JournalEntryFactory.reset_sequence() je = JournalEntryFactory.create( name=project.name, submitted_by=user, ) daily_stats = pretend.stub() weekly_stats = pretend.stub() monthly_stats = pretend.stub() db_request.find_service = lambda x: pretend.stub( get_daily_stats=lambda p: daily_stats, get_weekly_stats=lambda p: weekly_stats, get_monthly_stats=lambda p: monthly_stats, ) url = "/the/fake/url/" db_request.route_url = pretend.call_recorder(lambda *args, **kw: url) result = json.json_release(releases[1], db_request) assert set(db_request.route_url.calls) == { pretend.call("packaging.file", path=files[0].path), pretend.call("packaging.file", path=files[1].path), pretend.call("packaging.project", name=project.name), pretend.call( "packaging.release", name=project.name, version=releases[1].version, ), pretend.call("legacy.docs", project=project.name), } headers = db_request.response.headers assert headers["Access-Control-Allow-Origin"] == "*" assert headers["Access-Control-Allow-Headers"] == ( "Content-Type, If-Match, If-Modified-Since, If-None-Match, " "If-Unmodified-Since") assert headers["Access-Control-Allow-Methods"] == "GET" assert headers["Access-Control-Max-Age"] == "86400" assert headers["Access-Control-Expose-Headers"] == "X-PyPI-Last-Serial" assert headers["X-PyPI-Last-Serial"] == str(je.id) assert result == { "info": { "author": None, "author_email": None, "bugtrack_url": None, "classifiers": [], "description": None, "docs_url": "/the/fake/url/", "download_url": None, "downloads": { "last_day": daily_stats, "last_week": weekly_stats, "last_month": monthly_stats, }, "home_page": None, "keywords": None, "license": None, "maintainer": None, "maintainer_email": None, "name": project.name, "platform": None, "project_url": "/the/fake/url/", "release_url": "/the/fake/url/", "requires_dist": [], "requires_python": None, "summary": None, "version": "2.0", }, "releases": { "1.0": [ { "comment_text": None, "downloads": 0, "filename": files[0].filename, "has_sig": True, "md5_digest": files[0].md5_digest, "digests": { "md5": files[0].md5_digest, "sha256": files[0].sha256_digest, }, "packagetype": None, "python_version": "source", "size": 200, "upload_time": files[0].upload_time.strftime("%Y-%m-%dT%H:%M:%S", ), "url": "/the/fake/url/", }, ], "2.0": [ { "comment_text": None, "downloads": 0, "filename": files[1].filename, "has_sig": True, "md5_digest": files[1].md5_digest, "digests": { "md5": files[1].md5_digest, "sha256": files[1].sha256_digest, }, "packagetype": None, "python_version": "source", "size": 200, "upload_time": files[1].upload_time.strftime("%Y-%m-%dT%H:%M:%S", ), "url": "/the/fake/url/", }, ], "3.0": [], }, "urls": [ { "comment_text": None, "downloads": 0, "filename": files[1].filename, "has_sig": True, "md5_digest": files[1].md5_digest, "digests": { "md5": files[1].md5_digest, "sha256": files[1].sha256_digest, }, "packagetype": None, "python_version": "source", "size": 200, "upload_time": files[1].upload_time.strftime("%Y-%m-%dT%H:%M:%S", ), "url": "/the/fake/url/", }, ], }
def test_includeme(monkeypatch): basic_authn_obj = pretend.stub() basic_authn_cls = pretend.call_recorder(lambda check: basic_authn_obj) session_authn_obj = pretend.stub() session_authn_cls = pretend.call_recorder(lambda callback: session_authn_obj) authn_obj = pretend.stub() authn_cls = pretend.call_recorder(lambda *a: authn_obj) authz_obj = pretend.stub() authz_cls = pretend.call_recorder(lambda: authz_obj) monkeypatch.setattr(accounts, "BasicAuthAuthenticationPolicy", basic_authn_cls) monkeypatch.setattr(accounts, "SessionAuthenticationPolicy", session_authn_cls) monkeypatch.setattr(accounts, "MultiAuthenticationPolicy", authn_cls) monkeypatch.setattr(accounts, "ACLAuthorizationPolicy", authz_cls) config = pretend.stub( register_service_factory=pretend.call_recorder( lambda factory, iface, name=None: None ), add_request_method=pretend.call_recorder(lambda f, name, reify: None), set_authentication_policy=pretend.call_recorder(lambda p: None), set_authorization_policy=pretend.call_recorder(lambda p: None), ) accounts.includeme(config) assert config.register_service_factory.calls == [ pretend.call(database_login_factory, IUserService), pretend.call( TokenServiceFactory(name="password"), ITokenService, name="password" ), pretend.call(TokenServiceFactory(name="email"), ITokenService, name="email"), pretend.call(hibp_password_breach_factory, IPasswordBreachedService), pretend.call(RateLimit("10 per 5 minutes"), IRateLimiter, name="user.login"), pretend.call( RateLimit("1000 per 5 minutes"), IRateLimiter, name="global.login" ), ] assert config.add_request_method.calls == [ pretend.call(accounts._user, name="user", reify=True) ] assert config.set_authentication_policy.calls == [pretend.call(authn_obj)] assert config.set_authorization_policy.calls == [pretend.call(authz_obj)] assert basic_authn_cls.calls == [pretend.call(check=accounts._login_via_basic_auth)] assert session_authn_cls.calls == [pretend.call(callback=accounts._authenticate)] assert authn_cls.calls == [pretend.call([session_authn_obj, basic_authn_obj])] assert authz_cls.calls == [pretend.call()]
def test_detail_renders(self, pyramid_config, db_request, db_session): project = ProjectFactory.create(has_docs=True) description_content_type = "text/x-rst" url = "/the/fake/url/" project_urls = [ "url," + url, "Homepage,https://example.com/home2/", "Source Code,https://example.com/source-code/", "uri,http://[email protected]:123/forum/questions/?tag=networking&order=newest#top", # noqa: E501 "ldap,ldap://[2001:db8::7]/c=GB?objectClass?one", "tel,tel:+1-816-555-1212", "telnet,telnet://192.0.2.16:80/", "urn,urn:oasis:names:specification:docbook:dtd:xml:4.1.2", "reservedchars,http://example.com?&$+/:;=@#", # Commas don't work! "unsafechars,http://example.com <>[]{}|\^%", ] expected_urls = [] for project_url in reversed(project_urls): expected_urls.append(tuple(project_url.split(","))) expected_urls = OrderedDict(tuple(expected_urls)) releases = [ ReleaseFactory.create(project=project, version=v) for v in ["0.1", "1.0", "2.0"] ] releases += [ ReleaseFactory.create( project=project, version="3.0", description_content_type=description_content_type, ) ] for urlspec in project_urls: db_session.add( Dependency( name=releases[3].project.name, version="3.0", kind=DependencyKind.project_url.value, specifier=urlspec, )) files = [ FileFactory.create( release=r, filename="{}-{}.tar.gz".format(project.name, r.version), python_version="source", size=200, has_signature=True, ) for r in releases[1:] ] user = UserFactory.create() JournalEntryFactory.reset_sequence() je = JournalEntryFactory.create(name=project.name, submitted_by=user) db_request.route_url = pretend.call_recorder(lambda *args, **kw: url) result = json.json_release(releases[3], db_request) assert set(db_request.route_url.calls) == { pretend.call("packaging.file", path=files[0].path), pretend.call("packaging.file", path=files[1].path), pretend.call("packaging.file", path=files[2].path), pretend.call("packaging.project", name=project.name), pretend.call("packaging.release", name=project.name, version=releases[3].version), pretend.call("legacy.docs", project=project.name), } _assert_has_cors_headers(db_request.response.headers) assert db_request.response.headers["X-PyPI-Last-Serial"] == str(je.id) assert result == { "info": { "author": None, "author_email": None, "bugtrack_url": None, "classifiers": [], "description_content_type": description_content_type, "description": None, "docs_url": "/the/fake/url/", "download_url": None, "downloads": { "last_day": -1, "last_week": -1, "last_month": -1 }, "home_page": None, "keywords": None, "license": None, "maintainer": None, "maintainer_email": None, "name": project.name, "package_url": "/the/fake/url/", "platform": None, "project_url": "/the/fake/url/", "project_urls": expected_urls, "release_url": "/the/fake/url/", "requires_dist": None, "requires_python": None, "summary": None, "version": "3.0", }, "releases": { "0.1": [], "1.0": [{ "comment_text": None, "downloads": -1, "filename": files[0].filename, "has_sig": True, "md5_digest": files[0].md5_digest, "digests": { "md5": files[0].md5_digest, "sha256": files[0].sha256_digest, }, "packagetype": None, "python_version": "source", "size": 200, "upload_time": files[0].upload_time.strftime("%Y-%m-%dT%H:%M:%S"), "url": "/the/fake/url/", "requires_python": None, }], "2.0": [{ "comment_text": None, "downloads": -1, "filename": files[1].filename, "has_sig": True, "md5_digest": files[1].md5_digest, "digests": { "md5": files[1].md5_digest, "sha256": files[1].sha256_digest, }, "packagetype": None, "python_version": "source", "size": 200, "upload_time": files[1].upload_time.strftime("%Y-%m-%dT%H:%M:%S"), "url": "/the/fake/url/", "requires_python": None, }], "3.0": [{ "comment_text": None, "downloads": -1, "filename": files[2].filename, "has_sig": True, "md5_digest": files[2].md5_digest, "digests": { "md5": files[2].md5_digest, "sha256": files[2].sha256_digest, }, "packagetype": None, "python_version": "source", "size": 200, "upload_time": files[2].upload_time.strftime("%Y-%m-%dT%H:%M:%S"), "url": "/the/fake/url/", "requires_python": None, }], }, "urls": [{ "comment_text": None, "downloads": -1, "filename": files[2].filename, "has_sig": True, "md5_digest": files[2].md5_digest, "digests": { "md5": files[2].md5_digest, "sha256": files[2].sha256_digest, }, "packagetype": None, "python_version": "source", "size": 200, "upload_time": files[2].upload_time.strftime("%Y-%m-%dT%H:%M:%S"), "url": "/the/fake/url/", "requires_python": None, }], "last_serial": je.id, }
def test_routes(warehouse): docs_route_url = pretend.stub() class FakeConfig: def __init__(self): self.registry = pretend.stub(settings={ "docs.url": docs_route_url, "files.url": "https://files.example.com/packages/{path}", }) if warehouse: self.registry.settings["warehouse.domain"] = warehouse def get_settings(self): return self.registry.settings @staticmethod @pretend.call_recorder def add_route(*args, **kwargs): pass @staticmethod @pretend.call_recorder def add_template_view(*args, **kwargs): pass @staticmethod @pretend.call_recorder def add_redirect(*args, **kwargs): pass @staticmethod @pretend.call_recorder def add_pypi_action_route(name, action, **kwargs): pass @staticmethod @pretend.call_recorder def add_pypi_action_redirect(action, target, **kwargs): pass @staticmethod @pretend.call_recorder def add_xmlrpc_endpoint(endpoint, pattern, header, domain=None): pass @staticmethod @pretend.call_recorder def add_policy(name, filename): pass config = FakeConfig() includeme(config) assert config.add_route.calls == [ pretend.call("health", "/_health/"), pretend.call("force-status", "/_force-status/{status:[45]\d\d}/"), pretend.call('index', '/', domain=warehouse), pretend.call("robots.txt", "/robots.txt", domain=warehouse), pretend.call("opensearch.xml", "/opensearch.xml", domain=warehouse), pretend.call("index.sitemap.xml", "/sitemap.xml", domain=warehouse), pretend.call( "bucket.sitemap.xml", "/{bucket}.sitemap.xml", domain=warehouse, ), pretend.call( "includes.current-user-indicator", "/_includes/current-user-indicator/", domain=warehouse, ), pretend.call( "includes.flash-messages", "/_includes/flash-messages/", domain=warehouse, ), pretend.call( "includes.current-user-profile-callout", "/_includes/current-user-profile-callout/{username}", factory="warehouse.accounts.models:UserFactory", traverse="/{username}", domain=warehouse, ), pretend.call( "includes.edit-project-button", "/_includes/edit-project-button/{project_name}", factory="warehouse.packaging.models:ProjectFactory", traverse="/{project_name}", domain=warehouse, ), pretend.call( "includes.profile-actions", "/_includes/profile-actions/{username}", factory="warehouse.accounts.models:UserFactory", traverse="/{username}", domain=warehouse, ), pretend.call("classifiers", "/classifiers/", domain=warehouse), pretend.call("search", "/search/", domain=warehouse), pretend.call( "accounts.profile", "/user/{username}/", factory="warehouse.accounts.models:UserFactory", traverse="/{username}", domain=warehouse, ), pretend.call("accounts.login", "/account/login/", domain=warehouse), pretend.call("accounts.logout", "/account/logout/", domain=warehouse), pretend.call( "accounts.register", "/account/register/", domain=warehouse, ), pretend.call( "accounts.request-password-reset", "/account/request-password-reset/", domain=warehouse, ), pretend.call( "accounts.reset-password", "/account/reset-password/", domain=warehouse, ), pretend.call( "accounts.verify-email", "/account/verify-email/", domain=warehouse, ), pretend.call( "manage.account", "/manage/account/", domain=warehouse ), pretend.call( "manage.projects", "/manage/projects/", domain=warehouse ), pretend.call( "manage.project.settings", "/manage/project/{project_name}/settings/", factory="warehouse.packaging.models:ProjectFactory", traverse="/{project_name}", domain=warehouse, ), pretend.call( "manage.project.delete_project", "/manage/project/{project_name}/delete_project/", factory="warehouse.packaging.models:ProjectFactory", traverse="/{project_name}", domain=warehouse, ), pretend.call( "manage.project.destroy_docs", "/manage/project/{project_name}/delete_project_docs/", factory="warehouse.packaging.models:ProjectFactory", traverse="/{project_name}", domain=warehouse, ), pretend.call( "manage.project.releases", "/manage/project/{project_name}/releases/", factory="warehouse.packaging.models:ProjectFactory", traverse="/{project_name}", domain=warehouse, ), pretend.call( "manage.project.release", "/manage/project/{project_name}/release/{version}/", factory="warehouse.packaging.models:ProjectFactory", traverse="/{project_name}/{version}", domain=warehouse, ), pretend.call( "manage.project.roles", "/manage/project/{project_name}/collaboration/", factory="warehouse.packaging.models:ProjectFactory", traverse="/{project_name}", domain=warehouse, ), pretend.call( "manage.project.change_role", "/manage/project/{project_name}/collaboration/change/", factory="warehouse.packaging.models:ProjectFactory", traverse="/{project_name}", domain=warehouse, ), pretend.call( "manage.project.delete_role", "/manage/project/{project_name}/collaboration/delete/", factory="warehouse.packaging.models:ProjectFactory", traverse="/{project_name}", domain=warehouse, ), pretend.call( "manage.project.documentation", "/manage/project/{project_name}/documentation/", factory="warehouse.packaging.models:ProjectFactory", traverse="/{project_name}", domain=warehouse, ), pretend.call( "manage.project.history", "/manage/project/{project_name}/history/", factory="warehouse.packaging.models:ProjectFactory", traverse="/{project_name}", domain=warehouse, ), pretend.call( "packaging.project", "/project/{name}/", factory="warehouse.packaging.models:ProjectFactory", traverse="/{name}", domain=warehouse, ), pretend.call( "packaging.release", "/project/{name}/{version}/", factory="warehouse.packaging.models:ProjectFactory", traverse="/{name}/{version}", domain=warehouse, ), pretend.call( "packaging.file", "https://files.example.com/packages/{path}", ), pretend.call("ses.hook", "/_/ses-hook/", domain=warehouse), pretend.call("rss.updates", "/rss/updates.xml", domain=warehouse), pretend.call("rss.packages", "/rss/packages.xml", domain=warehouse), pretend.call("legacy.api.simple.index", "/simple/", domain=warehouse), pretend.call( "legacy.api.simple.detail", "/simple/{name}/", factory="warehouse.packaging.models:ProjectFactory", traverse="/{name}/", read_only=True, domain=warehouse, ), pretend.call( "legacy.api.json.project", "/pypi/{name}/json", factory="warehouse.packaging.models:ProjectFactory", traverse="/{name}", read_only=True, domain=warehouse, ), pretend.call( "legacy.api.json.release", "/pypi/{name}/{version}/json", factory="warehouse.packaging.models:ProjectFactory", traverse="/{name}/{version}", read_only=True, domain=warehouse, ), pretend.call("legacy.docs", docs_route_url), ] assert config.add_template_view.calls == [ pretend.call("help", "/help/", "pages/help.html"), pretend.call("security", "/security/", "pages/security.html"), pretend.call( "sponsors", "/sponsors/", "warehouse:templates/pages/sponsors.html", ), ] assert config.add_redirect.calls == [ pretend.call("/p/{name}/", "/project/{name}/", domain=warehouse), pretend.call("/pypi/{name}/", "/project/{name}/", domain=warehouse), pretend.call( "/pypi/{name}/{version}/", "/project/{name}/{version}/", domain=warehouse, ), pretend.call( "/packages/{path:.*}", "https://files.example.com/packages/{path}", domain=warehouse, ), ] assert config.add_pypi_action_route.calls == [ pretend.call( "legacy.api.pypi.file_upload", "file_upload", domain=warehouse, ), pretend.call("legacy.api.pypi.submit", "submit", domain=warehouse), pretend.call( "legacy.api.pypi.submit_pkg_info", "submit_pkg_info", domain=warehouse, ), pretend.call( "legacy.api.pypi.doc_upload", "doc_upload", domain=warehouse, ), pretend.call("legacy.api.pypi.doap", "doap", domain=warehouse), pretend.call( "legacy.api.pypi.list_classifiers", "list_classifiers", domain=warehouse, ), pretend.call( 'legacy.api.pypi.search', 'search', domain=warehouse, ), pretend.call( 'legacy.api.pypi.browse', 'browse', domain=warehouse, ), pretend.call( 'legacy.api.pypi.files', 'files', domain=warehouse, ), pretend.call( 'legacy.api.pypi.display', 'display', domain=warehouse, ), ] assert config.add_pypi_action_redirect.calls == [ pretend.call("rss", "/rss/updates.xml", domain=warehouse), pretend.call("packages_rss", "/rss/packages.xml", domain=warehouse), ] assert config.add_xmlrpc_endpoint.calls == [ pretend.call( "pypi", pattern="/pypi", header="Content-Type:text/xml", domain=warehouse, ), pretend.call( "pypi_slash", pattern="/pypi/", header="Content-Type:text/xml", domain=warehouse, ), pretend.call( "RPC2", pattern="/RPC2", header="Content-Type:text/xml", domain=warehouse, ), ] assert config.add_policy.calls == [ pretend.call("terms-of-use", "terms.md"), ]
def test_no_modification(): app = pretend.call_recorder(lambda environ, start_response: None) LegacyRewriteMiddleware(app)({"PATH_INFO": "/foo/bar"}, None) assert app.calls == [pretend.call({"PATH_INFO": "/foo/bar"}, None)]
def test_minimal_renders(self, pyramid_config, db_request): project = ProjectFactory.create(has_docs=False) release = ReleaseFactory.create(project=project, version="0.1") file = FileFactory.create( release=release, filename="{}-{}.tar.gz".format(project.name, release.version), python_version="source", size=200, has_signature=True, ) user = UserFactory.create() JournalEntryFactory.reset_sequence() je = JournalEntryFactory.create(name=project.name, submitted_by=user) url = "/the/fake/url/" db_request.route_url = pretend.call_recorder(lambda *args, **kw: url) result = json.json_release(release, db_request) assert set(db_request.route_url.calls) == { pretend.call("packaging.file", path=file.path), pretend.call("packaging.project", name=project.name), pretend.call("packaging.release", name=project.name, version=release.version), } _assert_has_cors_headers(db_request.response.headers) assert db_request.response.headers["X-PyPI-Last-Serial"] == str(je.id) assert result == { "info": { "author": None, "author_email": None, "bugtrack_url": None, "classifiers": [], "description_content_type": None, "description": None, "docs_url": None, "download_url": None, "downloads": { "last_day": -1, "last_week": -1, "last_month": -1 }, "home_page": None, "keywords": None, "license": None, "maintainer": None, "maintainer_email": None, "name": project.name, "package_url": "/the/fake/url/", "platform": None, "project_url": "/the/fake/url/", "project_urls": None, "release_url": "/the/fake/url/", "requires_dist": None, "requires_python": None, "summary": None, "version": "0.1", }, "releases": { "0.1": [{ "comment_text": None, "downloads": -1, "filename": file.filename, "has_sig": True, "md5_digest": file.md5_digest, "digests": { "md5": file.md5_digest, "sha256": file.sha256_digest, }, "packagetype": None, "python_version": "source", "size": 200, "upload_time": file.upload_time.strftime("%Y-%m-%dT%H:%M:%S"), "url": "/the/fake/url/", "requires_python": None, }] }, "urls": [{ "comment_text": None, "downloads": -1, "filename": file.filename, "has_sig": True, "md5_digest": file.md5_digest, "digests": { "md5": file.md5_digest, "sha256": file.sha256_digest }, "packagetype": None, "python_version": "source", "size": 200, "upload_time": file.upload_time.strftime("%Y-%m-%dT%H:%M:%S"), "url": "/the/fake/url/", "requires_python": None, }], "last_serial": je.id, }
def test_post_validate_redirects(self, monkeypatch, pyramid_request, with_user): remember = pretend.call_recorder( lambda request, user_id: [("foo", "bar")]) monkeypatch.setattr(views, "remember", remember) new_session = {} user_id = uuid.uuid4() user_service = pretend.stub( find_userid=pretend.call_recorder(lambda username: user_id), update_user=pretend.call_recorder(lambda *a, **kw: None), ) pyramid_request.find_service = pretend.call_recorder( lambda iface, context: user_service) pyramid_request.method = "POST" pyramid_request.session = pretend.stub( items=lambda: [("a", "b"), ("foo", "bar")], update=new_session.update, invalidate=pretend.call_recorder(lambda: None), new_csrf_token=pretend.call_recorder(lambda: None), ) pyramid_request.set_property( lambda r: str(uuid.uuid4()) if with_user else None, name="unauthenticated_userid", ) form_obj = pretend.stub( validate=pretend.call_recorder(lambda: True), username=pretend.stub(data="theuser"), ) form_class = pretend.call_recorder(lambda d, user_service: form_obj) pyramid_request.route_path = pretend.call_recorder( lambda a: '/the-redirect') now = datetime.datetime.utcnow() with freezegun.freeze_time(now): result = views.login(pyramid_request, _form_class=form_class) assert pyramid_request.registry.datadog.increment.calls == [ pretend.call('warehouse.authentication.start', tags=['auth_method:login_form']), pretend.call('warehouse.authentication.complete', tags=['auth_method:login_form']), ] assert isinstance(result, HTTPSeeOther) assert pyramid_request.route_path.calls == [ pretend.call('manage.projects') ] assert result.headers["Location"] == "/the-redirect" assert result.headers["foo"] == "bar" assert form_class.calls == [ pretend.call(pyramid_request.POST, user_service=user_service), ] assert form_obj.validate.calls == [pretend.call()] assert user_service.find_userid.calls == [pretend.call("theuser")] assert user_service.update_user.calls == [ pretend.call(user_id, last_login=now), ] if with_user: assert new_session == {} else: assert new_session == {"a": "b", "foo": "bar"} assert remember.calls == [pretend.call(pyramid_request, str(user_id))] assert pyramid_request.session.invalidate.calls == [pretend.call()] assert pyramid_request.find_service.calls == [ pretend.call(IUserService, context=None), pretend.call(IUserService, context=None), ] assert pyramid_request.session.new_csrf_token.calls == [pretend.call()]
def test_pypi_passes_through(): app = pretend.call_recorder(lambda environ, start_response: None) LegacyRewriteMiddleware(app)({"PATH_INFO": "/pypi"}, None) assert app.calls == [pretend.call({"PATH_INFO": "/pypi"}, None)]