def cache_merged_console(self, localpath): # Remove any query args that we don't want to keep. VARY_ARGS = ['numrevs='] args = self.request.query_string.split('&') args = [ arg for arg in args if any([arg.startswith(pre) for pre in VARY_ARGS]) ] if args: localpath += '?' + '&'.join(args) # See if we already have the appropriate page cached. unquoted_localpath = urllib.unquote(localpath) page_data = app.get_and_cache_pagedata(unquoted_localpath) # If we got the page and it was generated recently enough, just serve that. if page_data.get('content') and recent_page(page_data): return page_data # If they specified a number of revs, figure out how many they want. num_revs = self.request.get('numrevs') if num_revs: num_revs = utils.clean_int(num_revs, -1) if num_revs <= 0: num_revs = None app.console_merger(unquoted_localpath, 'console/chromium', page_data, num_rows_to_merge=num_revs) return app.get_and_cache_pagedata(unquoted_localpath)
def get(self): class TemplateData(object): def __init__(self, rhs, numrevs): self.ordered_rows = sorted(rhs.rows.keys(), reverse=True)[:numrevs] self.ordered_masters = rhs.ordered_masters self.ordered_categories = rhs.ordered_categories self.ordered_builders = rhs.ordered_builders self.status = rhs.status self.rows = {} for row in self.ordered_rows: self.rows[row] = rhs.rows[row].purge_unicode() self.category_count = sum([ len(self.ordered_categories[master]) for master in self.ordered_masters ]) num_revs = self.request.get('numrevs') if num_revs: num_revs = utils.clean_int(num_revs, -1) if not num_revs or num_revs <= 0: num_revs = 25 out = TemplateData(data, num_revs) template = template_environment.get_template('merger_b.html') self.response.out.write(template.render(data=out))
def get_saisons(self): page = 1 tree = self.__get_seasons(page) snumber = utils.clean_float( tree.xpath( "string(//*[@id=\"content \"]/section/section/section/section/header/text())" )) seasons = [] while (len(seasons) < snumber): rows = tree.xpath( "//*[@id=\"content \"]/section/section/section/section/div/table/tbody/tr" ) for row in rows: cells = row.xpath("td") if (len(cells) == 4): season = {} season["name"] = utils.clean_spaces( cells[0].xpath("string(a/text())")) season["id"] = utils.clean_int( cells[0].xpath("string(a/@href)")) season["current"] = utils.clean_spaces( cells[0].xpath("string(a/span/text())")) == "en cours" season["start"] = utils.clean_date(cells[1].text) season["end"] = utils.clean_date(cells[2].text) seasons.append(season) page += 1 tree = self.__get_seasons(page) return seasons
def get(self): class TemplateData(object): def __init__(self, rhs, numrevs): self.ordered_rows = sorted(rhs.rows.keys(), reverse=True)[:numrevs] self.ordered_masters = rhs.ordered_masters self.ordered_categories = rhs.ordered_categories self.ordered_builders = rhs.ordered_builders self.status = rhs.status self.rows = {} for row in self.ordered_rows: self.rows[row] = rhs.rows[row].purge_unicode() self.category_count = sum([len(self.ordered_categories[master]) for master in self.ordered_masters]) num_revs = self.request.get('numrevs') if num_revs: num_revs = utils.clean_int(num_revs, -1) if not num_revs or num_revs <= 0: num_revs = 25 out = TemplateData(data, num_revs) template = template_environment.get_template('merger_b.html') self.response.out.write(template.render(data=out))
def _do_almost_everything(self, localpath): # Does almost all of the work except for writing the content to # the response. Returns the page_data, or None either if an error # occurred or if the processing of the request was fully handled # in this method (this is done for the console). unquoted_localpath = urllib.unquote(localpath) if self.request.path.endswith('/chromium/console'): page_data = self.cache_merged_console(unquoted_localpath) else: page_data = app.get_and_cache_pagedata(unquoted_localpath) if page_data.get('content') is None: app.logging.error('Page %s not found.' % unquoted_localpath) self.error(404) # file not found return None self.response.headers['Content-Type'] = app.path_to_mime_type( unquoted_localpath) if self.request.path.endswith('/console'): template_values = self.InitializeTemplate() template_values['body_class'] = page_data.get('body_class') template_values['content'] = page_data.get('content') template_values['offsite_base'] = page_data.get('offsite_base') template_values['title'] = page_data.get('title') if self.user: reloadarg = utils.clean_int(self.request.get('reload'), -1) if reloadarg != -1: reloadarg = max(reloadarg, 30) template_values['reloadarg'] = reloadarg else: # Make the Google Frontend capable of caching this request for 60 # seconds. # TODO: Caching is not working yet. self.response.headers['Cache-Control'] = 'public, max-age=60' self.response.headers['Pragma'] = 'Public' self.DisplayTemplate('base.html', template_values) return None self.response.headers['Cache-Control'] = 'public, max-age=60' self.response.headers['Pragma'] = 'Public' return page_data
def cache_merged_console(self, localpath): # Remove any query args that we don't want to keep. VARY_ARGS = ['numrevs='] args = self.request.query_string.split('&') args = [arg for arg in args if any([arg.startswith(pre) for pre in VARY_ARGS])] if args: localpath += '?' + '&'.join(args) # See if we already have the appropriate page cached. unquoted_localpath = urllib.unquote(localpath) page_data = app.get_and_cache_pagedata(unquoted_localpath) # If we got the page and it was generated recently enough, just serve that. if page_data.get('content') and recent_page(page_data): return page_data # If they specified a number of revs, figure out how many they want. num_revs = self.request.get('numrevs') if num_revs: num_revs = utils.clean_int(num_revs, -1) if num_revs <= 0: num_revs = None app.console_merger(unquoted_localpath, 'console/chromium', page_data, num_rows_to_merge=num_revs) return app.get_and_cache_pagedata(unquoted_localpath)