return httpexception_view(exc, request) @forbidden_view_config(path_info=r"^/_includes/") @exception_view_config(PredicateMismatch, path_info=r"^/_includes/") def forbidden_include(exc, request): # If the forbidden error is for a client-side-include, just return an empty # response instead of redirecting return Response(status=403) @view_config( route_name="robots.txt", renderer="robots.txt", decorator=[ cache_control(1 * 24 * 60 * 60), # 1 day origin_cache( 1 * 24 * 60 * 60, # 1 day stale_while_revalidate=6 * 60 * 60, # 6 hours stale_if_error=1 * 24 * 60 * 60, # 1 day ), ], ) def robotstxt(request): request.response.content_type = "text/plain" return {} @view_config( route_name="opensearch.xml", renderer="opensearch.xml",
from sqlalchemy import func from sqlalchemy.orm.exc import NoResultFound from warehouse.cache.http import cache_control from warehouse.cache.origin import origin_cache from warehouse.packaging.interfaces import IDownloadStatService from warehouse.packaging.models import File, Release, JournalEntry @view_config( route_name="legacy.api.json.project", renderer="json", decorator=[ cache_control( 1 * 24 * 60 * 60, # 1 day stale_while_revalidate=1 * 24 * 60 * 60, # 1 day stale_if_error=1 * 24 * 60 * 60, # 1 day ), origin_cache(7 * 24 * 60 * 60), # 7 days ], ) def json_project(project, request): if project.name != request.matchdict.get("name", project.name): return HTTPMovedPermanently( request.current_route_path(name=project.name), ) try: release = project.releases.order_by( Release._pypi_ordering.desc() ).limit(1).one()
from pyramid.httpexceptions import HTTPMovedPermanently, HTTPNotFound from pyramid.view import view_config from sqlalchemy.orm.exc import NoResultFound from warehouse.cache.http import cache_control from warehouse.cache.origin import origin_cache from warehouse.packaging.interfaces import IDownloadStatService from warehouse.packaging.models import File, Release @view_config( route_name="legacy.api.json.project", renderer="json", decorator=[ cache_control(15 * 60), # 15 minutes origin_cache( 1 * 24 * 60 * 60, # 1 day stale_while_revalidate=5 * 60, # 5 minutes stale_if_error=1 * 24 * 60 * 60, # 1 day ), ], ) def json_project(project, request): if project.name != request.matchdict.get("name", project.name): return HTTPMovedPermanently( request.current_route_path(name=project.name), ) try: release = (
from sqlalchemy.orm.exc import NoResultFound from warehouse.accounts.models import User from warehouse.cache.http import cache_control from warehouse.cache.origin import origin_cache from warehouse.packaging.interfaces import IDownloadStatService from warehouse.packaging.models import Release, File, Role @view_config( route_name="packaging.project", renderer="packaging/detail.html", decorator=[ cache_control( 1 * 24 * 60 * 60, # 1 day stale_while_revalidate=1 * 24 * 60 * 60, # 1 day stale_if_error=1 * 24 * 60 * 60, # 1 day ), origin_cache(7 * 24 * 60 * 60), # 7 days ], ) def project_detail(project, request): if project.name != request.matchdict.get("name", project.name): return HTTPMovedPermanently( request.current_route_path(name=project.name), ) try: release = project.releases.order_by( Release._pypi_ordering.desc()).limit(1).one() except NoResultFound: return HTTPNotFound()
from pyramid.httpexceptions import HTTPMovedPermanently, HTTPNotFound from pyramid.view import view_config from sqlalchemy import func from sqlalchemy.orm.exc import NoResultFound from warehouse.cache.http import cache_control from warehouse.cache.origin import origin_cache from warehouse.packaging.interfaces import IDownloadStatService from warehouse.packaging.models import File, Release, JournalEntry @view_config( route_name="legacy.api.json.project", renderer="json", decorator=[ cache_control(15 * 60), # 15 minutes origin_cache( 1 * 24 * 60 * 60, # 1 day stale_while_revalidate=5 * 60, # 5 minutes stale_if_error=1 * 24 * 60 * 60, # 1 day ), ], ) def json_project(project, request): if project.name != request.matchdict.get("name", project.name): return HTTPMovedPermanently( request.current_route_path(name=project.name), ) try: release = (request.db.query(Release).filter( Release.project == project).order_by(
from pyramid.httpexceptions import HTTPMovedPermanently from pyramid.view import view_config from sqlalchemy import func from warehouse.cache.http import cache_control from warehouse.cache.origin import origin_cache from warehouse.packaging.models import JournalEntry, File, Project, Release @view_config( route_name="legacy.api.simple.index", renderer="legacy/api/simple/index.html", decorator=[ cache_control( 10 * 60, # 10 minutes stale_while_revalidate=1 * 24 * 60 * 60, # 1 day stale_if_error=1 * 24 * 60 * 60, # 1 day ), origin_cache(7 * 24 * 60 * 60), # 7 days ], ) def simple_index(request): # Get the latest serial number serial = request.db.query(func.max(JournalEntry.id)).scalar() or 0 request.response.headers["X-PyPI-Last-Serial"] = serial # Fetch the name and normalized name for all of our projects projects = (request.db.query(Project.name, Project.normalized_name).order_by( Project.normalized_name).all())
def forbidden_include(exc, request): # If the forbidden error is for a client-side-include, just return an empty # response instead of redirecting return Response(status=403) @view_config(context=DatabaseNotAvailable) def service_unavailable(exc, request): return httpexception_view(HTTPServiceUnavailable(), request) @view_config( route_name="robots.txt", renderer="robots.txt", decorator=[ cache_control(1 * 24 * 60 * 60), # 1 day origin_cache( 1 * 24 * 60 * 60, # 1 day stale_while_revalidate=6 * 60 * 60, # 6 hours stale_if_error=1 * 24 * 60 * 60, # 1 day ), ], ) def robotstxt(request): request.response.content_type = "text/plain" return {} @view_config( route_name="opensearch.xml", renderer="opensearch.xml",
from warehouse.accounts.models import User from warehouse.cache.origin import origin_cache from warehouse.cache.http import cache_control from warehouse.packaging.models import Project from warehouse.xml import XML_CSP SITEMAP_MAXSIZE = 50000 Bucket = collections.namedtuple("Bucket", ["name", "modified"]) @view_config( route_name="index.sitemap.xml", renderer="sitemap/index.xml", decorator=[ cache_control(1 * 60 * 60), # 1 hour origin_cache( 1 * 24 * 60 * 60, # 1 day stale_while_revalidate=6 * 60 * 60, # 6 hours stale_if_error=1 * 24 * 60 * 60, # 1 day keys=["all-projects"], ), ], ) def sitemap_index(request): request.response.content_type = "text/xml" request.find_service(name="csp").merge(XML_CSP) # We have > 50,000 URLs on PyPI and a single sitemap file can only support # a maximum of 50,000 URLs. We need to split our URLs up into multiple
from packaging.version import parse from pyramid.httpexceptions import HTTPMovedPermanently from pyramid.view import view_config from sqlalchemy import func from sqlalchemy.orm import joinedload from warehouse.cache.http import cache_control from warehouse.cache.origin import origin_cache from warehouse.packaging.models import JournalEntry, File, Project, Release @view_config( route_name="legacy.api.simple.index", renderer="legacy/api/simple/index.html", decorator=[ cache_control(10 * 60), # 10 minutes origin_cache( 1 * 24 * 60 * 60, # 1 day stale_while_revalidate=5 * 60, # 5 minutes stale_if_error=1 * 24 * 60 * 60, # 1 day ), ], ) def simple_index(request): # Get the latest serial number serial = request.db.query(func.max(JournalEntry.id)).scalar() or 0 request.response.headers["X-PyPI-Last-Serial"] = str(serial) # Fetch the name and normalized name for all of our projects projects = ( request.db.query(Project.name, Project.normalized_name)
request.current_route_path(name=project.name), ) stats_svc = request.find_service(IDownloadStatService) return { "daily": stats_svc.get_daily_stats(project.name), "weekly": stats_svc.get_weekly_stats(project.name), "monthly": stats_svc.get_monthly_stats(project.name), } @view_config( route_name="packaging.file", decorator=[ cache_control(365 * 24 * 60 * 60), # 1 year origin_cache( 365 * 24 * 60 * 60, # 1 year stale_while_revalidate=1 * 24 * 60 * 60, # 1 day stale_if_error=5 * 24 * 60 * 60, # 5 days ), ], ) def packages(request): # The amount of logic that we can do in this view is very limited, this # view needs to be able to be handled by Fastly directly hitting S3 instead # of actually hitting this view. This more or less means that we're limited # to just serving the actual file. # Grab the path of the file that we're attempting to serve path = request.matchdict["path"]
from pyramid.httpexceptions import HTTPMovedPermanently from pyramid.view import view_config from sqlalchemy import func from warehouse.cache.http import cache_control from warehouse.cache.origin import origin_cache from warehouse.packaging.models import JournalEntry, File, Project, Release @view_config( route_name="legacy.api.simple.index", renderer="legacy/api/simple/index.html", decorator=[ cache_control( 10 * 60, # 10 minutes stale_while_revalidate=1 * 24 * 60 * 60, # 1 day stale_if_error=1 * 24 * 60 * 60, # 1 day ), origin_cache(7 * 24 * 60 * 60), # 7 days ], ) def simple_index(request): # Get the latest serial number serial = request.db.query(func.max(JournalEntry.id)).scalar() or 0 request.response.headers["X-PyPI-Last-Serial"] = serial # Fetch the name and normalized name for all of our projects projects = ( request.db.query(Project.name, Project.normalized_name) .order_by(Project.normalized_name) .all()
from warehouse.cache.http import cache_control from warehouse.packaging.models import Project from warehouse.xml import XML_CSP SITEMAP_MAXSIZE = 50000 Bucket = collections.namedtuple("Bucket", ["name", "modified"]) @view_config( route_name="index.sitemap.xml", renderer="sitemap/index.xml", decorator=[ cache_control(1 * 60 * 60), # 1 hour origin_cache( 1 * 24 * 60 * 60, # 1 day stale_while_revalidate=6 * 60 * 60, # 6 hours stale_if_error=1 * 24 * 60 * 60, # 1 day keys=["all-projects"], ), ], ) def sitemap_index(request): request.response.content_type = "text/xml" request.find_service(name="csp").merge(XML_CSP) # We have > 50,000 URLs on PyPI and a single sitemap file can only support # a maximum of 50,000 URLs. We need to split our URLs up into multiple
return HTTPMovedPermanently( request.current_route_path(name=project.name), ) stats_svc = request.find_service(IDownloadStatService) return { "daily": stats_svc.get_daily_stats(project.name), "weekly": stats_svc.get_weekly_stats(project.name), "monthly": stats_svc.get_monthly_stats(project.name), } @view_config( route_name="packaging.file", decorator=[ cache_control(365 * 24 * 60 * 60), # 1 year origin_cache( 365 * 24 * 60 * 60, # 1 year stale_while_revalidate=1 * 24 * 60 * 60, # 1 day stale_if_error=5 * 24 * 60 * 60, # 5 days ), ], ) def packages(request): # The amount of logic that we can do in this view is very limited, this # view needs to be able to be handled by Fastly directly hitting S3 instead # of actually hitting this view. This more or less means that we're limited # to just serving the actual file. # Grab the path of the file that we're attempting to serve path = request.matchdict["path"]
def forbidden_include(exc, request): # If the forbidden error is for a client-side-include, just return an empty # response instead of redirecting return Response(status=403) @view_config(context=DatabaseNotAvailableError) def service_unavailable(exc, request): return httpexception_view(HTTPServiceUnavailable(), request) @view_config( route_name="robots.txt", renderer="robots.txt", decorator=[ cache_control(1 * 24 * 60 * 60), # 1 day origin_cache( 1 * 24 * 60 * 60, # 1 day stale_while_revalidate=6 * 60 * 60, # 6 hours stale_if_error=1 * 24 * 60 * 60, # 1 day ), ], ) def robotstxt(request): request.response.content_type = "text/plain" return {} @view_config( route_name="opensearch.xml", renderer="opensearch.xml",
from sqlalchemy.orm.exc import NoResultFound from warehouse.accounts.models import User from warehouse.cache.http import cache_control from warehouse.cache.origin import origin_cache from warehouse.packaging.interfaces import IDownloadStatService, IFileStorage from warehouse.packaging.models import Release, File, Role @view_config( route_name="packaging.project", renderer="packaging/detail.html", decorator=[ cache_control( 1 * 24 * 60 * 60, # 1 day stale_while_revalidate=1 * 24 * 60 * 60, # 1 day stale_if_error=1 * 24 * 60 * 60, # 1 day ), origin_cache(7 * 24 * 60 * 60), # 7 days ], ) def project_detail(project, request): if project.name != request.matchdict.get("name", project.name): return HTTPMovedPermanently( request.current_route_path(name=project.name), ) try: release = project.releases.order_by( Release._pypi_ordering.desc() ).limit(1).one()