def build_page_data(ctx): app = ctx.app page = ctx.page first_uri, _ = split_sub_uri(app, ctx.uri) pgn_source = ctx.pagination_source or get_default_pagination_source(page) pc_data = PieCrustData() config_data = PageData(page, ctx) paginator = Paginator(page, pgn_source, page_num=ctx.page_num, pgn_filter=ctx.pagination_filter) assetor = Assetor(page, first_uri) linker = PageLinkerData(page.source, page.rel_path) data = { 'piecrust': pc_data, 'page': config_data, 'assets': assetor, 'pagination': paginator, 'family': linker } #TODO: handle slugified taxonomy terms. site_data = app.config.getAll() providers_data = DataProvidersData(page) data = MergedMapping([data, providers_data, site_data]) # Do this at the end because we want all the data to be ready to be # displayed in the debugger window. if (app.config.get('site/show_debug_info') and not app.config.get('baker/is_baking')): pc_data._enableDebugInfo(page, data) return data
def build_page_data(ctx): app = ctx.app page = ctx.page first_uri, _ = split_sub_uri(app, ctx.uri) pc_data = PieCrustData() pgn_source = ctx.pagination_source or get_default_pagination_source(page) paginator = Paginator(page, pgn_source, page_num=ctx.page_num, pgn_filter=ctx.pagination_filter) assetor = Assetor(page, first_uri) linker = PageLinkerData(page.source, page.rel_path) data = {"piecrust": pc_data, "page": {}, "assets": assetor, "pagination": paginator, "family": linker} page_data = data["page"] page_data.update(copy.deepcopy(page.source_metadata)) page_data.update(page.config.getDeepcopy(app.debug)) page_data["url"] = ctx.uri page_data["timestamp"] = time.mktime(page.datetime.timetuple()) date_format = app.config.get("site/date_format") if date_format: page_data["date"] = page.datetime.strftime(date_format) # TODO: handle slugified taxonomy terms. site_data = build_site_data(page) merge_dicts(data, site_data) # Do this at the end because we want all the data to be ready to be # displayed in the debugger window. if app.config.get("site/show_debug_info") and not app.config.get("baker/is_baking"): pc_data._enableDebugInfo(page, data) return data
def build_page_data(ctx): app = ctx.app page = ctx.page first_uri, _ = split_sub_uri(app, ctx.uri) pgn_source = ctx.pagination_source or get_default_pagination_source(page) pc_data = PieCrustData() config_data = PageData(page, ctx) paginator = Paginator(page, pgn_source, page_num=ctx.page_num, pgn_filter=ctx.pagination_filter) assetor = Assetor(page, first_uri) linker = PageLinkerData(page.source, page.rel_path) data = { 'piecrust': pc_data, 'page': config_data, 'assets': assetor, 'pagination': paginator, 'family': linker } #TODO: handle slugified taxonomy terms. site_data = app.config.getAll() providers_data = DataProvidersData(page) data = MergedMapping([data, providers_data, site_data]) # Do this at the end because we want all the data to be ready to be # displayed in the debugger window. if (app.config.get('site/show_debug_info') and not app.config.get('baker/is_baking')): pc_data.enableDebugInfo(page) return data
def get_requested_pages(app, req_path): # Remove the trailing slash to simplify how we parse URLs. root_url = app.config.get('site/root') if req_path != root_url: req_path = req_path.rstrip('/') # Try to find what matches the requested URL. # It could also be a sub-page (i.e. the URL ends with a page number), so # we try to also match the base URL (without the number). req_path_no_sub, sub_num = split_sub_uri(app, req_path) routes = find_routes(app.routes, req_path, (req_path_no_sub, sub_num)) if len(routes) == 0: raise RouteNotFoundError("Can't find route for: %s" % req_path) req_pages = [] not_founds = [] for route, route_params, route_sub_num in routes: cur_req_path = req_path if route_sub_num > 1: cur_req_path = req_path_no_sub page = _get_requested_page_for_route(app, route, route_params) if page is not None: req_page = RequestedPage() req_page.page = page req_page.sub_num = route_sub_num req_page.req_path = cur_req_path req_pages.append(req_page) else: not_founds.append(PageNotFoundError( "No path found for '%s' in source '%s'." % (cur_req_path, route.source_name))) return req_pages, not_founds
def get_requested_pages(app, req_path): # Remove the trailing slash to simplify how we parse URLs. root_url = app.config.get('site/root') if req_path != root_url: req_path = req_path.rstrip('/') # Try to find what matches the requested URL. # It could also be a sub-page (i.e. the URL ends with a page number), so # we try to also match the base URL (without the number). req_path_no_sub, sub_num = split_sub_uri(app, req_path) routes = find_routes(app.routes, req_path, (req_path_no_sub, sub_num)) if len(routes) == 0: raise RouteNotFoundError("Can't find route for: %s" % req_path) req_pages = [] not_founds = [] for route, route_params, route_sub_num in routes: cur_req_path = req_path if route_sub_num > 1: cur_req_path = req_path_no_sub page = _get_requested_page_for_route(app, route, route_params) if page is not None: req_page = RequestedPage() req_page.page = page req_page.sub_num = route_sub_num req_page.req_path = cur_req_path req_pages.append(req_page) else: not_founds.append( PageNotFoundError("No path found for '%s' in source '%s'." % (cur_req_path, route.source_name))) return req_pages, not_founds
def test_split_sub_uri_with_root(uri, expected, pretty_urls): app = mock.MagicMock() app.config = { 'site/root': '/whatever/', 'site/pretty_urls': pretty_urls, '__cache/pagination_suffix_re': '/(?P<num>\\d+)$'} actual = split_sub_uri(app, '/whatever' + uri) assert actual == ('/whatever' + expected[0], expected[1])
def _paginate(self, value, items_per_page=5): cpi = self.app.env.exec_info_stack.current_page_info if cpi is None or cpi.page is None or cpi.render_ctx is None: raise Exception("Can't paginate when no page has been pushed " "on the execution stack.") first_uri, _ = split_sub_uri(self.app, cpi.render_ctx.uri) return Paginator(cpi.page, value, page_num=cpi.render_ctx.page_num, items_per_page=items_per_page)
def test_split_sub_uri_with_root(uri, expected, pretty_urls): app = mock.MagicMock() app.config = { 'site/root': '/whatever/', 'site/pretty_urls': pretty_urls, '__cache/pagination_suffix_re': '/(?P<num>\\d+)$' } actual = split_sub_uri(app, '/whatever' + uri) assert actual == ('/whatever' + expected[0], expected[1])
def test_split_sub_uri_trailing_slash(uri, expected, pretty_urls): app = mock.MagicMock() app.config = { 'site/root': '/', 'site/pretty_urls': pretty_urls, 'site/trailing_slash': True, '__cache/pagination_suffix_re': '/(?P<num>\\d+)$'} actual = split_sub_uri(app, uri) assert actual == (expected[0], expected[1])
def test_split_sub_uri_trailing_slash(uri, expected, pretty_urls): app = mock.MagicMock() app.config = { 'site/root': '/', 'site/pretty_urls': pretty_urls, 'site/trailing_slash': True, '__cache/pagination_suffix_re': '/(?P<num>\\d+)$' } actual = split_sub_uri(app, uri) assert actual == (expected[0], expected[1])
def build_page_data(ctx): app = ctx.app page = ctx.page first_uri, _ = split_sub_uri(app, ctx.uri) pc_data = PieCrustData() pgn_source = ctx.pagination_source or get_default_pagination_source(page) paginator = Paginator(page, pgn_source, page_num=ctx.page_num, pgn_filter=ctx.pagination_filter) assetor = Assetor(page, first_uri) linker = PageLinkerData(page.source, page.rel_path) data = { 'piecrust': pc_data, 'page': {}, 'assets': assetor, 'pagination': paginator, 'family': linker } page_data = data['page'] page_data.update(copy.deepcopy(page.source_metadata)) page_data.update(page.config.getDeepcopy(app.debug)) page_data['url'] = ctx.uri page_data['timestamp'] = time.mktime(page.datetime.timetuple()) date_format = app.config.get('site/date_format') if date_format: page_data['date'] = page.datetime.strftime(date_format) #TODO: handle slugified taxonomy terms. site_data = build_site_data(page) merge_dicts(data, site_data) # Do this at the end because we want all the data to be ready to be # displayed in the debugger window. if (app.config.get('site/show_debug_info') and not app.config.get('baker/is_baking')): pc_data._enableDebugInfo(page, data) return data
def get_requested_page(app, req_path): # Try to find what matches the requested URL. req_path, page_num = split_sub_uri(app, req_path) routes = find_routes(app.routes, req_path) if len(routes) == 0: raise RouteNotFoundError("Can't find route for: %s" % req_path) qp = None not_found_errors = [] for route, route_metadata in routes: try: qp = _get_requested_page_for_route( app, route, route_metadata, req_path) if qp is not None: break except PageNotFoundError as nfe: not_found_errors.append(nfe) req_page = RequestedPage(qp) req_page.req_path = req_path req_page.page_num = page_num req_page.not_found_errors = not_found_errors return req_page
def get_requested_page(app, req_path): # Remove the trailing slash to simplify how we parse URLs. root_url = app.config.get('site/root') if req_path != root_url: req_path = req_path.rstrip('/') # Try to find what matches the requested URL. routes = find_routes(app.routes, req_path) # It could also be a sub-page (i.e. the URL ends with a page number), so # we try to also match the base URL (without the number). req_path_no_num, page_num = split_sub_uri(app, req_path) if page_num > 1: routes += find_routes(app.routes, req_path_no_num, True) if len(routes) == 0: raise RouteNotFoundError("Can't find route for: %s" % req_path) req_page = RequestedPage() for route, route_metadata, is_sub_page in routes: try: cur_req_path = req_path if is_sub_page: cur_req_path = req_path_no_num qp = _get_requested_page_for_route( app, route, route_metadata, cur_req_path) if qp is not None: req_page.qualified_page = qp req_page.req_path = cur_req_path if is_sub_page: req_page.page_num = page_num break except PageNotFoundError as nfe: req_page.not_found_errors.append(nfe) return req_page
def _try_serve_page(self, app, environ, request): # Try to find what matches the requested URL. req_path, page_num = split_sub_uri(app, request.path) routes = find_routes(app.routes, req_path) if len(routes) == 0: raise RouteNotFoundError("Can't find route for: %s" % req_path) rendered_page = None first_not_found = None for route, route_metadata in routes: try: logger.debug("Trying to render match from source '%s'." % route.source_name) rendered_page = self._try_render_page( app, route, route_metadata, page_num, req_path) if rendered_page is not None: break except NotFound as nfe: if first_not_found is None: first_not_found = nfe else: raise SourceNotFoundError( "Can't find path for: %s (looked in: %s)" % (req_path, [r.source_name for r, _ in routes])) # If we haven't found any good match, raise whatever exception we # first got. Otherwise, raise a generic exception. if rendered_page is None: first_not_found = first_not_found or NotFound( "This page couldn't be found.") raise first_not_found # Start doing stuff. page = rendered_page.page rp_content = rendered_page.content # Profiling. if app.config.get('site/show_debug_info'): now_time = time.clock() timing_info = ( '%8.1f ms' % ((now_time - app.env.start_time) * 1000.0)) rp_content = rp_content.replace( '__PIECRUST_TIMING_INFORMATION__', timing_info) # Build the response. response = Response() etag = hashlib.md5(rp_content.encode('utf8')).hexdigest() if not app.debug and etag in request.if_none_match: response.status_code = 304 return response response.set_etag(etag) response.content_md5 = etag cache_control = response.cache_control if app.debug: cache_control.no_cache = True cache_control.must_revalidate = True else: cache_time = (page.config.get('cache_time') or app.config.get('site/cache_time')) if cache_time: cache_control.public = True cache_control.max_age = cache_time content_type = page.config.get('content_type') if content_type and '/' not in content_type: mimetype = content_type_map.get(content_type, content_type) else: mimetype = content_type if mimetype: response.mimetype = mimetype if ('gzip' in request.accept_encodings and app.config.get('site/enable_gzip')): try: with io.BytesIO() as gzip_buffer: with gzip.open(gzip_buffer, mode='wt', encoding='utf8') as gzip_file: gzip_file.write(rp_content) rp_content = gzip_buffer.getvalue() response.content_encoding = 'gzip' except Exception: logger.exception("Error compressing response, " "falling back to uncompressed.") response.set_data(rp_content) return response
def _try_serve_page(self, app, environ, request): # Try to find what matches the requested URL. req_path, page_num = split_sub_uri(app, request.path) routes = find_routes(app.routes, req_path) if len(routes) == 0: raise RouteNotFoundError("Can't find route for: %s" % req_path) rendered_page = None first_not_found = None for route, route_metadata in routes: try: logger.debug("Trying to render match from source '%s'." % route.source_name) rendered_page = self._try_render_page(app, route, route_metadata, page_num, req_path) if rendered_page is not None: break except NotFound as nfe: if first_not_found is None: first_not_found = nfe else: raise SourceNotFoundError( "Can't find path for: %s (looked in: %s)" % (req_path, [r.source_name for r, _ in routes])) # If we haven't found any good match, raise whatever exception we # first got. Otherwise, raise a generic exception. if rendered_page is None: first_not_found = first_not_found or NotFound( "This page couldn't be found.") raise first_not_found # Start doing stuff. page = rendered_page.page rp_content = rendered_page.content # Profiling. if app.config.get('site/show_debug_info'): now_time = time.clock() timing_info = ('%8.1f ms' % ((now_time - app.env.start_time) * 1000.0)) rp_content = rp_content.replace('__PIECRUST_TIMING_INFORMATION__', timing_info) # Build the response. response = Response() etag = hashlib.md5(rp_content.encode('utf8')).hexdigest() if not app.debug and etag in request.if_none_match: response.status_code = 304 return response response.set_etag(etag) response.content_md5 = etag cache_control = response.cache_control if app.debug: cache_control.no_cache = True cache_control.must_revalidate = True else: cache_time = (page.config.get('cache_time') or app.config.get('site/cache_time')) if cache_time: cache_control.public = True cache_control.max_age = cache_time content_type = page.config.get('content_type') if content_type and '/' not in content_type: mimetype = content_type_map.get(content_type, content_type) else: mimetype = content_type if mimetype: response.mimetype = mimetype if ('gzip' in request.accept_encodings and app.config.get('site/enable_gzip')): try: with io.BytesIO() as gzip_buffer: with gzip.open(gzip_buffer, mode='wt', encoding='utf8') as gzip_file: gzip_file.write(rp_content) rp_content = gzip_buffer.getvalue() response.content_encoding = 'gzip' except Exception: logger.exception("Error compressing response, " "falling back to uncompressed.") response.set_data(rp_content) return response