def test_response_hook(self, seconds, keys): class Fake: pass class Cache: @staticmethod @pretend.call_recorder def cache( keys, request, response, seconds, stale_while_revalidate, stale_if_error ): pass response = pretend.stub() deco = origin.origin_cache(seconds, keys=keys) @deco def view(context, request): return response key_maker = pretend.call_recorder( lambda obj: origin.CacheKeys(cache=["one", "two"], purge=[]) ) cacher = Cache() context = Fake() callbacks = [] request = pretend.stub( registry={"cache_keys": {Fake: key_maker}}, find_service=lambda iface: cacher, add_response_callback=callbacks.append, ) assert view(context, request) is response assert key_maker.calls == [pretend.call(context)] assert len(callbacks) == 1 callbacks[0](request, response) assert cacher.cache.calls == [ pretend.call( ["one", "two"] + ([] if keys is None else keys), request, response, seconds=seconds, stale_while_revalidate=None, stale_if_error=None, ) ]
def test_response_hook(self, seconds, keys): class Fake: pass class Cache: @staticmethod @pretend.call_recorder def cache(keys, request, response, seconds, stale_while_revalidate, stale_if_error): pass response = pretend.stub() deco = origin.origin_cache(seconds, keys=keys) @deco def view(context, request): return response key_maker = pretend.call_recorder( lambda obj: origin.CacheKeys(cache=["one", "two"], purge=[]) ) cacher = Cache() context = Fake() callbacks = [] request = pretend.stub( registry={"cache_keys": {Fake: key_maker}}, find_service=lambda iface: cacher, add_response_callback=callbacks.append, ) assert view(context, request) is response assert key_maker.calls == [pretend.call(context)] assert len(callbacks) == 1 callbacks[0](request, response) assert cacher.cache.calls == [ pretend.call( sorted(["one", "two"] + ([] if keys is None else keys)), request, response, seconds=seconds, stale_while_revalidate=None, stale_if_error=None, ), ]
def test_response_hook(self, seconds): class Fake: pass class Cache: @staticmethod @pretend.call_recorder def cache(keys, request, response, seconds): pass response = pretend.stub() if seconds is None: deco = origin.origin_cache else: deco = origin.origin_cache(seconds) @deco def view(context, request): return response key_maker = pretend.call_recorder(lambda obj: ["one", "two"]) cacher = Cache() context = Fake() callbacks = [] request = pretend.stub( registry={"cache_keys": { Fake: key_maker }}, find_service=lambda iface: cacher, add_response_callback=callbacks.append, ) assert view(context, request) is response assert key_maker.calls == [pretend.call(context)] assert len(callbacks) == 1 callbacks[0](request, response) assert cacher.cache.calls == [ pretend.call(["one", "two"], request, response, seconds=seconds), ]
def test_response_hook(self, seconds): class Fake: pass class Cache: @staticmethod @pretend.call_recorder def cache(keys, request, response, seconds): pass response = pretend.stub() if seconds is None: deco = origin.origin_cache else: deco = origin.origin_cache(seconds) @deco def view(context, request): return response key_maker = pretend.call_recorder(lambda obj: ["one", "two"]) cacher = Cache() context = Fake() callbacks = [] request = pretend.stub( registry={"cache_keys": {Fake: key_maker}}, find_service=lambda iface: cacher, add_response_callback=callbacks.append, ) assert view(context, request) is response assert key_maker.calls == [pretend.call(context)] assert len(callbacks) == 1 callbacks[0](request, response) assert cacher.cache.calls == [ pretend.call(["one", "two"], request, response, seconds=seconds), ]
from pyramid.httpexceptions import HTTPMovedPermanently, HTTPNotFound from pyramid.view import view_config from sqlalchemy.orm.exc import NoResultFound from warehouse.accounts.models import User from warehouse.cache.origin import origin_cache from warehouse.packaging.models import Release, Role @view_config( route_name="packaging.project", renderer="packaging/detail.html", decorator=[ origin_cache( 1 * 24 * 60 * 60, # 1 day stale_while_revalidate=1 * 24 * 60 * 60, # 1 day stale_if_error=5 * 24 * 60 * 60, # 5 days ), ], ) def project_detail(project, request): if project.name != request.matchdict.get("name", project.name): return HTTPMovedPermanently( request.current_route_path(name=project.name), ) try: release = ( request.db.query(Release) .filter(Release.project == project) .order_by(
from pyramid.view import view_config from sqlalchemy.orm import joinedload from warehouse.cache.origin import origin_cache from warehouse.packaging.models import Project, Release from warehouse.xml import XML_CSP @view_config( route_name="rss.updates", renderer="rss/updates.xml", decorator=[ origin_cache( 1 * 24 * 60 * 60, # 1 day stale_while_revalidate=1 * 24 * 60 * 60, # 1 day stale_if_error=5 * 24 * 60 * 60, # 5 days keys=["all-projects"], ) ], ) def rss_updates(request): request.response.content_type = "text/xml" request.find_service(name="csp").merge(XML_CSP) latest_releases = ( request.db.query(Release) .options(joinedload(Release.project)) .order_by(Release.created.desc()) .limit(40) .all()
from warehouse.cache.http import cache_control from warehouse.cache.origin import origin_cache from warehouse.packaging.models import JournalEntry, File, Project, Release @view_config( route_name="legacy.api.simple.index", renderer="legacy/api/simple/index.html", decorator=[ cache_control( 10 * 60, # 10 minutes stale_while_revalidate=1 * 24 * 60 * 60, # 1 day stale_if_error=1 * 24 * 60 * 60, # 1 day ), origin_cache(7 * 24 * 60 * 60), # 7 days ], ) def simple_index(request): # Get the latest serial number serial = request.db.query(func.max(JournalEntry.id)).scalar() or 0 request.response.headers["X-PyPI-Last-Serial"] = serial # Fetch the name and normalized name for all of our projects projects = (request.db.query(Project.name, Project.normalized_name).order_by( Project.normalized_name).all()) return {"projects": projects}
@forbidden_view_config(path_info=r"^/_includes/") @exception_view_config(PredicateMismatch, path_info=r"^/_includes/") def forbidden_include(exc, request): # If the forbidden error is for a client-side-include, just return an empty # response instead of redirecting return Response(status=403) @view_config( route_name="robots.txt", renderer="robots.txt", decorator=[ cache_control(1 * 24 * 60 * 60), # 1 day origin_cache( 1 * 24 * 60 * 60, # 1 day stale_while_revalidate=6 * 60 * 60, # 6 hours stale_if_error=1 * 24 * 60 * 60, # 1 day ), ], ) def robotstxt(request): request.response.content_type = "text/plain" return {} @view_config( route_name="opensearch.xml", renderer="opensearch.xml", decorator=[ cache_control(1 * 24 * 60 * 60), # 1 day origin_cache(
) # TODO: This is kind of gross, but we need it for as long as the legacy # upload API exists and is supported. Once we get rid of that we can # get rid of this as well. resp.status = "{} {}".format(resp.status_code, "Too Many Failed Login Attempts") return resp @view_config( route_name="accounts.profile", context=User, renderer="accounts/profile.html", decorator=[ origin_cache(1 * 24 * 60 * 60, stale_if_error=1 * 24 * 60 * 60) # 1 day each. ], ) def profile(user, request): if user.username != request.matchdict.get("username", user.username): return HTTPMovedPermanently(request.current_route_path(username=user.username)) projects = ( request.db.query(Project) .filter(Project.users.contains(user)) .join(Project.releases) .order_by(Release.created.desc()) .all() ) return {"user": user, "projects": projects}
from pyramid.view import view_config from sqlalchemy.orm.exc import NoResultFound from warehouse.utils import readme from warehouse.accounts.models import User from warehouse.cache.origin import origin_cache from warehouse.packaging.models import Project, Release, Role @view_config( route_name="packaging.project", context=Project, renderer="packaging/detail.html", decorator=[ origin_cache( 1 * 24 * 60 * 60, stale_if_error=5 * 24 * 60 * 60 # 1 day, 5 days stale ) ], ) def project_detail(project, request): if project.name != request.matchdict.get("name", project.name): return HTTPMovedPermanently(request.current_route_path(name=project.name)) try: release = ( request.db.query(Release) .filter(Release.project == project) .order_by(Release.is_prerelease.nullslast(), Release._pypi_ordering.desc()) .limit(1) .one() )
from sqlalchemy.orm.exc import NoResultFound from warehouse.accounts.models import User from warehouse.cache.http import cache_control from warehouse.cache.origin import origin_cache from warehouse.packaging.interfaces import IDownloadStatService, IFileStorage from warehouse.packaging.models import Release, File, Role @view_config( route_name="packaging.project", renderer="packaging/detail.html", decorator=[ origin_cache( 1 * 24 * 60 * 60, # 1 day stale_while_revalidate=1 * 24 * 60 * 60, # 1 day stale_if_error=5 * 24 * 60 * 60, # 5 days ), ], ) def project_detail(project, request): if project.name != request.matchdict.get("name", project.name): return HTTPMovedPermanently( request.current_route_path(name=project.name), ) try: release = ( request.db.query(Release) .options(joinedload(Release.uploader)) .filter(Release.project == project)
from warehouse.cache.http import cache_control from warehouse.csrf import csrf_protect from warehouse.sessions import uses_session from warehouse.utils.http import is_safe_url @view_config( route_name="accounts.profile", renderer="accounts/profile.html", decorator=[ cache_control( 1 * 24 * 60 * 60, # 1 day stale_while_revalidate=1 * 24 * 60 * 60, # 1 day stale_if_error=1 * 24 * 60 * 60, # 1 day ), origin_cache(30 * 24 * 60 * 60), # 30 days ], ) def profile(user, request): if user.username != request.matchdict.get("username", user.username): return HTTPMovedPermanently( request.current_route_path(username=user.username), ) return {"user": user} @forbidden_view_config() @view_config( route_name="accounts.login", renderer="accounts/login.html",
from warehouse.cache.http import cache_control from warehouse.cache.origin import origin_cache from warehouse.packaging.models import JournalEntry, File, Project, Release @view_config( route_name="legacy.api.simple.index", renderer="legacy/api/simple/index.html", decorator=[ cache_control( 10 * 60, # 10 minutes stale_while_revalidate=1 * 24 * 60 * 60, # 1 day stale_if_error=1 * 24 * 60 * 60, # 1 day ), origin_cache(7 * 24 * 60 * 60), # 7 days ], ) def simple_index(request): # Get the latest serial number serial = request.db.query(func.max(JournalEntry.id)).scalar() or 0 request.response.headers["X-PyPI-Last-Serial"] = serial # Fetch the name and normalized name for all of our projects projects = ( request.db.query(Project.name, Project.normalized_name) .order_by(Project.normalized_name) .all() ) return {"projects": projects}
SITEMAP_MAXSIZE = 50000 Bucket = collections.namedtuple("Bucket", ["name", "modified"]) @view_config( route_name="index.sitemap.xml", renderer="sitemap/index.xml", decorator=[ cache_control(1 * 60 * 60), # 1 hour origin_cache( 1 * 24 * 60 * 60, # 1 day stale_while_revalidate=6 * 60 * 60, # 6 hours stale_if_error=1 * 24 * 60 * 60, # 1 day keys=["all-projects"], ), ], ) def sitemap_index(request): request.response.content_type = "text/xml" request.find_service(name="csp").merge(XML_CSP) # We have > 50,000 URLs on PyPI and a single sitemap file can only support # a maximum of 50,000 URLs. We need to split our URLs up into multiple # files so we need to stick all of our URLs into buckets, in addition we # want our buckets to remain stable so that an URL won't change what bucket # it is in just because another URL is added or removed. Finally we also # want to minimize the number of buckets we have to reduce the number of
from sqlalchemy.orm.exc import NoResultFound from warehouse.accounts.models import User from warehouse.cache.http import cache_control from warehouse.cache.origin import origin_cache from warehouse.packaging.interfaces import IDownloadStatService, IFileStorage from warehouse.packaging.models import Release, File, Role @view_config( route_name="packaging.project", renderer="packaging/detail.html", decorator=[ origin_cache( 1 * 24 * 60 * 60, # 1 day stale_while_revalidate=1 * 24 * 60 * 60, # 1 day stale_if_error=5 * 24 * 60 * 60, # 5 days ), ], ) def project_detail(project, request): if project.name != request.matchdict.get("name", project.name): return HTTPMovedPermanently( request.current_route_path(name=project.name), ) try: release = (request.db.query(Release).filter( Release.project == project).order_by( Release._pypi_ordering.desc()).limit(1).one()) except NoResultFound: return HTTPNotFound()
return Response(status=403) @view_config(context=DatabaseNotAvailable) def service_unavailable(exc, request): return httpexception_view(HTTPServiceUnavailable(), request) @view_config( route_name="robots.txt", renderer="robots.txt", decorator=[ cache_control(1 * 24 * 60 * 60), # 1 day origin_cache( 1 * 24 * 60 * 60, # 1 day stale_while_revalidate=6 * 60 * 60, # 6 hours stale_if_error=1 * 24 * 60 * 60, # 1 day ), ], ) def robotstxt(request): request.response.content_type = "text/plain" return {} @view_config( route_name="opensearch.xml", renderer="opensearch.xml", decorator=[ cache_control(1 * 24 * 60 * 60), # 1 day origin_cache(
from warehouse.cache.http import cache_control from warehouse.cache.origin import origin_cache from warehouse.packaging.interfaces import IDownloadStatService from warehouse.packaging.models import Release, File, Role @view_config( route_name="packaging.project", renderer="packaging/detail.html", decorator=[ cache_control( 1 * 24 * 60 * 60, # 1 day stale_while_revalidate=1 * 24 * 60 * 60, # 1 day stale_if_error=1 * 24 * 60 * 60, # 1 day ), origin_cache(7 * 24 * 60 * 60), # 7 days ], ) def project_detail(project, request): if project.name != request.matchdict.get("name", project.name): return HTTPMovedPermanently( request.current_route_path(name=project.name), ) try: release = project.releases.order_by( Release._pypi_ordering.desc()).limit(1).one() except NoResultFound: return HTTPNotFound() return release_detail(release, request)
from warehouse.cache.http import cache_control from warehouse.cache.origin import origin_cache from warehouse.packaging.interfaces import IDownloadStatService from warehouse.packaging.models import File, Release, JournalEntry @view_config( route_name="legacy.api.json.project", renderer="json", decorator=[ cache_control( 1 * 24 * 60 * 60, # 1 day stale_while_revalidate=1 * 24 * 60 * 60, # 1 day stale_if_error=1 * 24 * 60 * 60, # 1 day ), origin_cache(7 * 24 * 60 * 60), # 7 days ], ) def json_project(project, request): if project.name != request.matchdict.get("name", project.name): return HTTPMovedPermanently( request.current_route_path(name=project.name), ) try: release = project.releases.order_by( Release._pypi_ordering.desc() ).limit(1).one() except NoResultFound: return HTTPNotFound()
from sqlalchemy.orm.exc import NoResultFound from warehouse.cache.http import cache_control from warehouse.cache.origin import origin_cache from warehouse.packaging.interfaces import IDownloadStatService from warehouse.packaging.models import File, Release, JournalEntry @view_config( route_name="legacy.api.json.project", renderer="json", decorator=[ cache_control(15 * 60), # 15 minutes origin_cache( 1 * 24 * 60 * 60, # 1 day stale_while_revalidate=5 * 60, # 5 minutes stale_if_error=1 * 24 * 60 * 60, # 1 day ), ], ) def json_project(project, request): if project.name != request.matchdict.get("name", project.name): return HTTPMovedPermanently( request.current_route_path(name=project.name), ) try: release = (request.db.query(Release).filter( Release.project == project).order_by( Release._pypi_ordering.desc()).limit(1).one()) except NoResultFound: return HTTPNotFound()
from warehouse.packaging.models import Project from warehouse.xml import XML_CSP SITEMAP_MAXSIZE = 50000 Bucket = collections.namedtuple("Bucket", ["name", "modified"]) @view_config( route_name="index.sitemap.xml", renderer="sitemap/index.xml", decorator=[ cache_control(1 * 60 * 60), # 1 hour origin_cache( 1 * 24 * 60 * 60, # 1 day stale_while_revalidate=6 * 60 * 60, # 6 hours stale_if_error=1 * 24 * 60 * 60, # 1 day keys=["all-projects"], ), ], ) def sitemap_index(request): request.response.content_type = "text/xml" request.find_service(name="csp").merge(XML_CSP) # We have > 50,000 URLs on PyPI and a single sitemap file can only support # a maximum of 50,000 URLs. We need to split our URLs up into multiple # files so we need to stick all of our URLs into buckets, in addition we # want our buckets to remain stable so that an URL won't change what bucket # it is in just because another URL is added or removed. Finally we also # want to minimize the number of buckets we have to reduce the number of
from sqlalchemy.orm.exc import NoResultFound from warehouse.cache.http import cache_control from warehouse.cache.origin import origin_cache from warehouse.packaging.interfaces import IDownloadStatService from warehouse.packaging.models import File, Release @view_config( route_name="legacy.api.json.project", renderer="json", decorator=[ cache_control(15 * 60), # 15 minutes origin_cache( 1 * 24 * 60 * 60, # 1 day stale_while_revalidate=5 * 60, # 5 minutes stale_if_error=1 * 24 * 60 * 60, # 1 day ), ], ) def json_project(project, request): if project.name != request.matchdict.get("name", project.name): return HTTPMovedPermanently( request.current_route_path(name=project.name), ) try: release = ( request.db.query(Release) .filter(Release.project == project) .order_by(Release._pypi_ordering.desc())
# Require all valid looking return None author_emails.append(author_email) if not author_emails: return None return ", ".join(author_emails) @view_config( route_name="rss.updates", renderer="rss/updates.xml", decorator=[ origin_cache( 1 * 24 * 60 * 60, # 1 day stale_while_revalidate=1 * 24 * 60 * 60, # 1 day stale_if_error=5 * 24 * 60 * 60, # 5 days keys=["all-projects"], ) ], ) def rss_updates(request): request.response.content_type = "text/xml" request.find_service(name="csp").merge(XML_CSP) latest_releases = ( request.db.query(Release) .options(joinedload(Release.project)) .order_by(Release.created.desc()) .limit(40) .all()
# TODO: This is kind of gross, but we need it for as long as the legacy # upload API exists and is supported. Once we get rid of that we can # get rid of this as well. resp.status = "{} {}".format(resp.status_code, "Too Many Failed Login Attempts") return resp @view_config( route_name="accounts.profile", context=User, renderer="accounts/profile.html", decorator=[ origin_cache(1 * 24 * 60 * 60, stale_if_error=1 * 24 * 60 * 60) # 1 day each. ], ) def profile(user, request): if user.username != request.matchdict.get("username", user.username): return HTTPMovedPermanently( request.current_route_path(username=user.username)) projects = (request.db.query(Project).filter( Project.users.contains(user)).join(Project.releases).order_by( Release.created.desc()).all()) return {"user": user, "projects": projects} @view_config(
from warehouse.cache.origin import origin_cache from warehouse.cache.http import cache_control from warehouse.csrf import csrf_protect from warehouse.sessions import uses_session @view_config( route_name="accounts.profile", renderer="accounts/profile.html", decorator=[ cache_control( 1 * 24 * 60 * 60, # 1 day stale_while_revalidate=1 * 24 * 60 * 60, # 1 day stale_if_error=1 * 24 * 60 * 60, # 1 day ), origin_cache(30 * 24 * 60 * 60), # 30 days ], ) def profile(user, request): if user.username != request.matchdict.get("username", user.username): return HTTPMovedPermanently( request.current_route_path(username=user.username), ) return {"user": user} @view_config( route_name="accounts.login", renderer="accounts/login.html", decorator=[csrf_protect("accounts.login"), uses_session], )