def index_plot_data_cache(summary_form): key = summary_form.caching_key() cached = flask_cache.get(key) if cached is not None: return cached reports = [] hist_table, hist_field = get_history_target( summary_form.resolution.data) component_ids = component_names_to_ids(summary_form.component_names.data) (since_date, to_date) = summary_form.daterange.data if summary_form.opsysreleases.data: opsysreleases = summary_form.opsysreleases.data else: opsysreleases = ( db.session.query(OpSysRelease) .filter(OpSysRelease.status != "EOL") .order_by(OpSysRelease.releasedate) .all()) for osr in opsysreleases: counts = ( db.session.query(hist_field.label("time"), func.sum(hist_table.count).label("count")) .group_by(hist_field) .order_by(hist_field)) counts = counts.filter(hist_table.opsysrelease_id == osr.id) if component_ids: counts = (counts.join(Report) .filter(Report.component_id.in_(component_ids))) counts = (counts.filter(hist_field >= since_date) .filter(hist_field <= to_date)) counts = counts.all() dates = set(date_iterator(since_date, summary_form.resolution.data, to_date)) for time, count in counts: dates.remove(time) for date in dates: counts.append((date, 0)) counts = sorted(counts, key=itemgetter(0)) reports.append((str(osr), counts)) cached = render_template("summary/index_plot_data.html", reports=reports, resolution=summary_form.resolution.data[0]) flask_cache.set(key, cached, timeout=60*60) return cached
def index(): summary_form = SummaryForm(request.args) summary_form.components.choices = component_list() reports = [] if summary_form.validate(): hist_table, hist_field = get_history_target( summary_form.resolution.data) component_ids = [] for comp in summary_form.components.data or []: component_ids += map(int, comp.split(',')) (since_date, to_date) = summary_form.daterange.data if summary_form.opsysreleases.data: opsysreleases = summary_form.opsysreleases.data else: opsysreleases = ( db.session.query(OpSysRelease) .filter(OpSysRelease.status != "EOL") .order_by(OpSysRelease.releasedate) .all()) for osr in opsysreleases: counts = ( db.session.query(hist_field.label("time"), func.sum(hist_table.count).label("count")) .group_by(hist_field) .order_by(hist_field)) counts = counts.filter(hist_table.opsysrelease_id == osr.id) if component_ids: counts = (counts.join(Report) .filter(Report.component_id.in_(component_ids))) counts = (counts.filter(hist_field >= since_date) .filter(hist_field <= to_date)) counts = counts.all() dates = set(date_iterator(since_date, summary_form.resolution.data, to_date)) for time, count in counts: dates.remove(time) for date in dates: counts.append((date, 0)) counts = sorted(counts, key=itemgetter(0)) reports.append((str(osr), counts)) return render_template("summary/index.html", summary_form=summary_form, reports=reports, resolution=summary_form.resolution.data[0])
def index_plot_data_cache(summary_form): key = summary_form.caching_key() cached = flask_cache.get(key) if cached is not None: return cached reports = [] hist_table, hist_field = get_history_target(summary_form.resolution.data) component_ids = component_names_to_ids(summary_form.component_names.data) (since_date, to_date) = summary_form.daterange.data if summary_form.opsysreleases.data: opsysreleases = summary_form.opsysreleases.data else: opsysreleases = (db.session.query(OpSysRelease).filter( OpSysRelease.status != "EOL").order_by( OpSysRelease.opsys_id).order_by(OpSysRelease.version).all()) for osr in opsysreleases: counts = (db.session.query( hist_field.label("time"), func.sum(hist_table.count).label("count")).group_by( hist_field).order_by(hist_field)) counts = counts.filter(hist_table.opsysrelease_id == osr.id) if component_ids: counts = (counts.join(Report).filter( Report.component_id.in_(component_ids))) counts = (counts.filter(hist_field >= since_date).filter( hist_field <= to_date)) counts = counts.all() dates = set( date_iterator(since_date, summary_form.resolution.data, to_date)) for time, _ in counts: dates.remove(time) for date in dates: counts.append((date, 0)) counts = sorted(counts, key=itemgetter(0)) reports.append((str(osr), counts)) cached = render_template("summary/index_plot_data.html", reports=reports, resolution=summary_form.resolution.data[0]) flask_cache.set(key, cached, timeout=60 * 60) return cached
def compute_totals(summary_form): component_ids = component_names_to_ids(summary_form.component_names.data) from_date, to_date = summary_form.daterange.data resolution = summary_form.resolution.data table, date_column = get_history_target(summary_form.resolution.data) # Generate sequence of days/weeks/months in the specified range. from_date, to_date, delta = interval_delta(from_date, to_date, resolution) dates = (db.session.query( func.generate_series(from_date, to_date, delta).label('date')).subquery()) if summary_form.opsysreleases.data: # Query only requested opsys releases. releases = (db.session.query(OpSysRelease).filter( OpSysRelease.id.in_([ osr.id for osr in summary_form.opsysreleases.data ])).subquery()) else: # Query all active opsys releases. releases = (db.session.query(OpSysRelease).filter( OpSysRelease.status != 'EOL').subquery()) # Sum daily counts for each date in the range and each opsys release. history = (db.session.query( date_column.label('date'), func.sum(table.count).label('count'), table.opsysrelease_id).filter(from_date <= date_column).filter( date_column <= to_date).group_by(table.opsysrelease_id, date_column)) if component_ids: history = history.join(Report).filter( Report.component_id.in_(component_ids)) history = history.subquery() q = (db.session.query( dates.c.date, func.coalesce(history.c.count, 0).label('count'), OpSys.name, releases.c.version).outerjoin( releases, dates.c.date == dates.c.date).outerjoin( history, (history.c.date == dates.c.date) & (history.c.opsysrelease_id == releases.c.id)).join( OpSys, OpSys.id == releases.c.opsys_id).order_by( OpSys.id, releases.c.version, dates.c.date)) by_opsys = dict() groups = groupby(q.all(), lambda r: f'{r.name} {r.version}') for osr, rows in groups: counts = [(r.date, r.count) for r in rows] by_opsys[osr] = counts result = {'by_opsys': by_opsys, 'from_date': from_date, 'to_date': to_date} return result
def get_problems(filter_form, pagination): opsysrelease_ids = [ osr.id for osr in (filter_form.opsysreleases.data or []) ] component_ids = component_names_to_ids(filter_form.component_names.data) if filter_form.associate.data: associate_id = filter_form.associate.data.id else: associate_id = None arch_ids = [arch.id for arch in (filter_form.arch.data or [])] types = filter_form.type.data or [] exclude_taintflag_ids = [ tf.id for tf in (filter_form.exclude_taintflags.data or []) ] (since_date, to_date) = filter_form.daterange.data date_delta = to_date - since_date if date_delta < datetime.timedelta(days=16): resolution = "daily" elif date_delta < datetime.timedelta(weeks=10): resolution = "weekly" else: resolution = "monthly" hist_table, hist_field = get_history_target(resolution) probable_fix_osr_ids = [ osr.id for osr in (filter_form.probable_fix_osrs.data or []) ] p = query_problems( db, hist_table, hist_field, opsysrelease_ids=opsysrelease_ids, component_ids=component_ids, associate_id=associate_id, arch_ids=arch_ids, exclude_taintflag_ids=exclude_taintflag_ids, types=types, rank_filter_fn=lambda query: (query.filter(hist_field >= since_date).filter(hist_field <= to_date)), function_names=filter_form.function_names.data, binary_names=filter_form.binary_names.data, source_file_names=filter_form.source_file_names.data, since_version=filter_form.since_version.data, since_release=filter_form.since_release.data, to_version=filter_form.to_version.data, to_release=filter_form.to_release.data, probable_fix_osr_ids=probable_fix_osr_ids, bug_filter=filter_form.bug_filter.data, limit=pagination.limit, offset=pagination.offset, solution=filter_form.solution) return p
def get_problems(filter_form, pagination): opsysrelease_ids = [ osr.id for osr in (filter_form.opsysreleases.data or []) ] component_ids = component_names_to_ids(filter_form.component_names.data) if filter_form.associate.data: associate_id = filter_form.associate.data.id else: associate_id = None arch_ids = [arch.id for arch in (filter_form.arch.data or [])] types = filter_form.type.data or [] exclude_taintflag_ids = [ tf.id for tf in (filter_form.exclude_taintflags.data or []) ] (since_date, to_date) = filter_form.daterange.data date_delta = to_date - since_date if date_delta < datetime.timedelta(days=16): resolution = "daily" elif date_delta < datetime.timedelta(weeks=10): resolution = "weekly" else: resolution = "monthly" hist_table, hist_field = get_history_target(resolution) probable_fix_osr_ids = [ osr.id for osr in (filter_form.probable_fix_osrs.data or []) ] p = query_problems( db, hist_table, hist_field, opsysrelease_ids=opsysrelease_ids, component_ids=component_ids, associate_id=associate_id, arch_ids=arch_ids, exclude_taintflag_ids=exclude_taintflag_ids, types=types, rank_filter_fn= lambda query: (query.filter(hist_field >= since_date).filter(hist_field <= to_date)), function_names=filter_form.function_names.data, binary_names=filter_form.binary_names.data, source_file_names=filter_form.source_file_names.data, since_version=filter_form.since_version.data, since_release=filter_form.since_release.data, to_version=filter_form.to_version.data, to_release=filter_form.to_release.data, probable_fix_osr_ids=probable_fix_osr_ids, bug_filter=filter_form.bug_filter.data, limit=pagination.limit, offset=pagination.offset) return p
def by_daterange(since, to): ''' Render date-based report statistics including reports `since` date until `to` date. ''' try: if isinstance(since, str) or isinstance(since, unicode): since = datetime.datetime.strptime(since, "%Y-%m-%d").date() if isinstance(to, str) or isinstance(to, unicode): to = datetime.datetime.strptime(to, "%Y-%m-%d").date() except: return abort(400) since = min(since, to) to = max(since, to) history = 'daily' day_count = (to - since).days if day_count > 30: history = 'weekly' if day_count > 360: history = 'monthly' def date_filter(query): return query.filter(hist_field >= since).filter(hist_field < to) hist_table, hist_field = queries.get_history_target(history) total_query = queries.get_history_sum(db, history=history) total = date_filter(total_query).one()[0] release_data = [] for release in queries.get_releases(db): release_sum = queries.get_history_sum(db, release.opsys.name, release.version, history=history) release_sum = date_filter(release_sum).one()[0] if not release_sum: continue percentage = int(release_sum * 100.0 / total) comps = queries.get_report_count_by_component(db, release.opsys.name, release.version, history=history) comp_data = [] for comp, count in date_filter(comps).all(): comp_percentage = int(count * 100.0 / release_sum) comp_data.append((comp, count, comp_percentage)) release_data.append({ 'release': release, 'sum': release_sum, 'comps': comp_data, 'percentage': percentage, }) data = { 'since': since, 'to': to, 'total': total, 'releases': sorted(release_data, key=lambda x: x['sum'], reverse=True), } if request_wants_json(): return jsonify(data) return render_template("stats/by_date.html", **data)
def by_daterange(request, since, to, template_name='stats/by_date.html', extra_context={}): ''' Render date-based report statistics including reports `since` date until `to` date. View accepts `template_name` to be used and `extra_context` to pass to it. ''' db = pyfaf.storage.getDatabase() since = min(since, to) to = max(since, to) history = 'daily' day_count = (to - since).days if day_count > 30: history = 'weekly' if day_count > 360: history = 'monthly' def date_filter(query): return query.filter(hist_field >= since).filter(hist_field < to) hist_table, hist_field = queries.get_history_target(history) total_query = queries.query_history_sum(db, history=history) total = date_filter(total_query).one()[0] release_data = [] for release in queries.query_releases(db): release_sum = queries.query_history_sum( db, release.opsys.name, release.version, history=history) release_sum = date_filter(release_sum).one()[0] if not release_sum: continue percentage = int(release_sum * 100.0 / total) comps = queries.query_report_count_per_component( db, release.opsys.name, release.version, history=history) comp_data = [] for comp, count in date_filter(comps).all(): comp_percentage = int(count * 100.0 / release_sum) comp_data.append((comp, count, comp_percentage)) release_data.append({ 'release': release, 'sum': release_sum, 'comps': comp_data, 'percentage': percentage, }) data = { 'since': since, 'to': to, 'total': total, 'releases': sorted(release_data, key=lambda x: x['sum'], reverse=True), } data.update(extra_context) return render_to_response(template_name, data, context_instance=RequestContext(request))
def components(self, cmdline, db, opsys, release): """ Get statistics for most crashing components """ hist_table, hist_field = get_history_target(self.history_type) total = get_history_sum(db, opsys, release) comps = get_report_count_by_component(db, opsys, release) if cmdline.last: now = datetime.datetime.now() since = now - datetime.timedelta(days=int(cmdline.last)) comps = comps.filter(hist_field >= since) total = total.filter(hist_field >= since) total_num = total.first()[0] limit = int(cmdline.count) limit_details = int(cmdline.detail_count) results = [] for num, (comp, count) in enumerate(comps): if num >= limit: break if comp in self.comps_filter: continue reports = get_report_stats_by_component(db, comp, opsys, release, self.history_type) if cmdline.last: reports = reports.filter(hist_field >= since) problem_ids = set() attached_reports = [] for report, report_count in reports: if len(problem_ids) >= limit_details: break if not report.problem: continue if report.problem.id in problem_ids: continue if report.quality < 0 and not cmdline.include_low_quality: continue problem_ids.add(report.problem.id) problem_url = "" if webfaf_installed(): problem_url = reverse("problems.item", problem_id=report.problem.id) attached_reports.append((problem_url, report.bugs)) results.append((comp, count, attached_reports)) if not results: return "" out = "Components:\n\n" for num, (comp, count, reports) in enumerate(results): if reports: out += ("{0}. {1} seen {2} times ({3:.0%} of all reports)\n". format(num + 1, comp, count, count / float(total_num))) for problem_url, bugs in reports: if problem_url or bugs: out += " {0} {1}\n".format( problem_url, ", ".join(map(str, bugs))) return out
def trends(self, cmdline, db, opsys, release): """ Get trends for crashing components """ hist_table, hist_field = get_history_target(self.history_type) num_days = 7 if cmdline.last: num_days = int(cmdline.last) last_date = datetime.date.today() - datetime.timedelta(days=num_days) comp_detail = [] comps = get_report_count_by_component(db, opsys, release) comps = comps.filter(hist_field >= last_date) for (comp, count) in comps: if comp.name in self.comps_filter: continue report_ids = (db.session.query( Report.id).join(OpSysComponent).filter( OpSysComponent.id == comp.id)).subquery() history = (db.session.query( hist_field, func.sum(hist_table.count).label("count")).filter( hist_table.report_id.in_(report_ids)).filter( hist_field >= last_date).filter( hist_field < datetime.date.today()).group_by( hist_field).order_by(hist_field).all()) if len(history) < 2: continue hist_dict = collections.defaultdict(int) for key, value in history: hist_dict[key] = value # Compute linear regression xsum, ysum, xysum, xxsum, yysum = 0., 0., 0., 0., 0. for x, day in enumerate(prev_days(num_days)): y = hist_dict[day] xsum += x ysum += y xysum += x * y xxsum += x * x yysum += y * y # y = bx + a b = xysum - xsum * ysum // num_days b //= xxsum - xsum**2 // num_days a = ysum - b * xsum a //= num_days first_day = hist_dict[prev_days(num_days)[0]] last_day = hist_dict[prev_days(num_days)[-1]] Comp = collections.namedtuple("Component", "name jump a b history") comp_tuple = Comp(name=comp.name, jump=last_day - first_day, a=a, b=b, history=hist_dict) comp_detail.append(comp_tuple) trend_data = sorted(comp_detail, key=lambda x: x.b, reverse=True) if not trend_data: return "" # render trend data render_fn = self._trends_render if cmdline.graph: render_fn = self._trends_render_with_graph out = "Most destabilized components:\n\n" out += render_fn(trend_data, cmdline.count, num_days) out += "\n" out += "Most stabilized components:\n\n" trend_data.reverse() out += render_fn(trend_data, cmdline.count, num_days) out += "\n" return out
def by_daterange(request, since, to, template_name='stats/by_date.html', extra_context={}): ''' Render date-based report statistics including reports `since` date until `to` date. View accepts `template_name` to be used and `extra_context` to pass to it. ''' if isinstance(since, str) or isinstance(since, unicode): since = datetime.datetime.strptime(since, "%Y-%m-%d").date() if isinstance(to, str) or isinstance(to, unicode): to = datetime.datetime.strptime(to, "%Y-%m-%d").date() db = pyfaf.storage.getDatabase() since = min(since, to) to = max(since, to) history = 'daily' day_count = (to - since).days if day_count > 30: history = 'weekly' if day_count > 360: history = 'monthly' def date_filter(query): return query.filter(hist_field >= since).filter(hist_field < to) hist_table, hist_field = queries.get_history_target(history) total_query = queries.get_history_sum(db, history=history) total = date_filter(total_query).one()[0] release_data = [] for release in queries.get_releases(db): release_sum = queries.get_history_sum(db, release.opsys.name, release.version, history=history) release_sum = date_filter(release_sum).one()[0] if not release_sum: continue percentage = int(release_sum * 100.0 / total) comps = queries.get_report_count_by_component(db, release.opsys.name, release.version, history=history) comp_data = [] for comp, count in date_filter(comps).all(): comp_percentage = int(count * 100.0 / release_sum) comp_data.append((comp, count, comp_percentage)) release_data.append({ 'release': release, 'sum': release_sum, 'comps': comp_data, 'percentage': percentage, }) data = { 'since': since, 'to': to, 'total': total, 'releases': sorted(release_data, key=lambda x: x['sum'], reverse=True), } data.update(extra_context) if "application/json" in request.META.get("HTTP_ACCEPT"): return HttpResponse(json.dumps(data, cls=WebfafJSONEncoder), status=200, mimetype="application/json") else: return render_to_response(template_name, data, context_instance=RequestContext(request))
def components(self, cmdline, db, opsys, release): """ Get statistics for most crashing components """ hist_table, hist_field = get_history_target(self.history_type) total = get_history_sum(db, opsys, release) comps = get_report_count_by_component(db, opsys, release) if cmdline.last: now = datetime.datetime.now() since = now - datetime.timedelta(days=int(cmdline.last)) comps = comps.filter(hist_field >= since) total = total.filter(hist_field >= since) total_num = total.first()[0] limit = int(cmdline.count) limit_details = int(cmdline.detail_count) results = [] for num, (comp, count) in enumerate(comps): if num >= limit: break if comp in self.comps_filter: continue reports = get_report_stats_by_component(db, comp, opsys, release, self.history_type) if cmdline.last: reports = reports.filter(hist_field >= since) problem_ids = set() attached_reports = [] for report, report_count in reports: if len(problem_ids) >= limit_details: break if not report.problem: continue if report.problem.id in problem_ids: continue if report.quality < 0 and not cmdline.include_low_quality: continue problem_ids.add(report.problem.id) problem_url = "" if webfaf_installed(): problem_url = reverse("problems.item", problem_id=report.problem.id) attached_reports.append((problem_url, report.bugs)) results.append((comp, count, attached_reports)) if not results: return "" out = "Components:\n\n" for num, (comp, count, reports) in enumerate(results): if reports: out += ("{0}. {1} seen {2} times ({3:.0%} of all reports)\n" .format(num + 1, comp, count, count / float(total_num))) for problem_url, bugs in reports: if problem_url or bugs: out += " {0} {1}\n".format( problem_url, ", ".join(map(str, bugs))) return out
def trends(self, cmdline, db, opsys, release): """ Get trends for crashing components """ hist_table, hist_field = get_history_target(self.history_type) num_days = 7 if cmdline.last: num_days = int(cmdline.last) last_date = datetime.date.today() - datetime.timedelta(days=num_days) comp_detail = [] comps = get_report_count_by_component(db, opsys, release) comps = comps.filter(hist_field >= last_date) for (comp, count) in comps: if comp.name in self.comps_filter: continue report_ids = (db.session.query(Report.id) .join(OpSysComponent) .filter(OpSysComponent.id == comp.id)).subquery() history = (db.session.query(hist_field, func.sum(hist_table.count) .label("count")) .filter(hist_table.report_id.in_(report_ids)) .filter(hist_field >= last_date) .filter(hist_field < datetime.date.today()) .group_by(hist_field) .order_by(hist_field).all()) if len(history) < 2: continue hist_dict = collections.defaultdict(int) for key, value in history: hist_dict[key] = value # Compute linear regression xsum, ysum, xysum, xxsum, yysum = 0., 0., 0., 0., 0. for x, day in enumerate(prev_days(num_days)): y = hist_dict[day] xsum += x ysum += y xysum += x * y xxsum += x * x yysum += y * y # y = bx + a b = xysum - xsum * ysum // num_days b //= xxsum - xsum ** 2 // num_days a = ysum - b * xsum a //= num_days first_day = hist_dict[prev_days(num_days)[0]] last_day = hist_dict[prev_days(num_days)[-1]] Comp = collections.namedtuple("Component", "name jump a b history") comp_tuple = Comp( name=comp.name, jump=last_day - first_day, a=a, b=b, history=hist_dict) comp_detail.append(comp_tuple) trend_data = sorted(comp_detail, key=lambda x: x.b, reverse=True) if not trend_data: return "" # render trend data render_fn = self._trends_render if cmdline.graph: render_fn = self._trends_render_with_graph out = "Most destabilized components:\n\n" out += render_fn(trend_data, cmdline.count, num_days) out += "\n" out += "Most stabilized components:\n\n" trend_data.reverse() out += render_fn(trend_data, cmdline.count, num_days) out += "\n" return out
def by_daterange(since, to): """ Render date-based report statistics including reports `since` date until `to` date. """ try: if isinstance(since, str) or isinstance(since, unicode): since = datetime.datetime.strptime(since, "%Y-%m-%d").date() if isinstance(to, str) or isinstance(to, unicode): to = datetime.datetime.strptime(to, "%Y-%m-%d").date() except: return abort(400) since = min(since, to) to = max(since, to) history = "daily" day_count = (to - since).days if day_count > 30: history = "weekly" if day_count > 360: history = "monthly" def date_filter(query): return query.filter(hist_field >= since).filter(hist_field < to) hist_table, hist_field = queries.get_history_target(history) total_query = queries.get_history_sum(db, history=history) total = date_filter(total_query).one()[0] release_data = [] for release in queries.get_releases(db): release_sum = queries.get_history_sum(db, release.opsys.name, release.version, history=history) release_sum = date_filter(release_sum).one()[0] if not release_sum: continue percentage = int(release_sum * 100.0 / total) comps = queries.get_report_count_by_component(db, release.opsys.name, release.version, history=history) comp_data = [] for comp, count in date_filter(comps).all(): comp_percentage = int(count * 100.0 / release_sum) comp_data.append((comp, count, comp_percentage)) release_data.append({"release": release, "sum": release_sum, "comps": comp_data, "percentage": percentage}) data = { "since": since, "to": to, "total": total, "releases": sorted(release_data, key=lambda x: x["sum"], reverse=True), } if request_wants_json(): return jsonify(data) return render_template("stats/by_date.html", **data)
def by_daterange(request, since, to, template_name='stats/by_date.html', extra_context={}): ''' Render date-based report statistics including reports `since` date until `to` date. View accepts `template_name` to be used and `extra_context` to pass to it. ''' if isinstance(since, str) or isinstance(since, unicode): since = datetime.datetime.strptime(since, "%Y-%m-%d").date() if isinstance(to, str) or isinstance(to, unicode): to = datetime.datetime.strptime(to, "%Y-%m-%d").date() db = pyfaf.storage.getDatabase() since = min(since, to) to = max(since, to) history = 'daily' day_count = (to - since).days if day_count > 30: history = 'weekly' if day_count > 360: history = 'monthly' def date_filter(query): return query.filter(hist_field >= since).filter(hist_field < to) hist_table, hist_field = queries.get_history_target(history) total_query = queries.get_history_sum(db, history=history) total = date_filter(total_query).one()[0] release_data = [] for release in queries.get_releases(db): release_sum = queries.get_history_sum( db, release.opsys.name, release.version, history=history) release_sum = date_filter(release_sum).one()[0] if not release_sum: continue percentage = int(release_sum * 100.0 / total) comps = queries.get_report_count_by_component( db, release.opsys.name, release.version, history=history) comp_data = [] for comp, count in date_filter(comps).all(): comp_percentage = int(count * 100.0 / release_sum) comp_data.append((comp, count, comp_percentage)) release_data.append({ 'release': release, 'sum': release_sum, 'comps': comp_data, 'percentage': percentage, }) data = { 'since': since, 'to': to, 'total': total, 'releases': sorted(release_data, key=lambda x: x['sum'], reverse=True), } data.update(extra_context) if "application/json" in request.META.get("HTTP_ACCEPT"): return HttpResponse(json.dumps(data, cls=WebfafJSONEncoder), status=200, mimetype="application/json") else: return render_to_response(template_name, data, context_instance=RequestContext(request))
def by_daterange(since, to): ''' Render date-based report statistics including reports `since` date until `to` date. ''' try: if isinstance(since, str): since = datetime.datetime.strptime(since, "%Y-%m-%d").date() if isinstance(to, str): to = datetime.datetime.strptime(to, "%Y-%m-%d").date() except: # pylint: disable=bare-except return abort(400) since = min(since, to) to = max(since, to) history = 'daily' day_count = (to - since).days if day_count > 30: history = 'weekly' if day_count > 360: history = 'monthly' def date_filter(query): return query.filter(hist_field >= since).filter(hist_field < to) _, hist_field = queries.get_history_target(history) total_query = queries.get_history_sum(db, history=history) total = date_filter(total_query).one()[0] release_data = [] for release in queries.get_releases(db): release_sum = queries.get_history_sum( db, release.opsys.name, release.version, history=history) release_sum = date_filter(release_sum).one()[0] if not release_sum: continue percentage = int(release_sum * 100.0 / total) comps = queries.get_report_count_by_component( db, release.opsys.name, release.version, history=history) comp_data = [] for comp, count in date_filter(comps).all(): comp_percentage = int(count * 100.0 / release_sum) comp_data.append((comp, count, comp_percentage)) release_data.append({ 'release': release, 'sum': release_sum, 'comps': comp_data, 'percentage': percentage, }) data = { 'since': since, 'to': to, 'total': total, 'releases': sorted(release_data, key=lambda x: x['sum'], reverse=True), } if request_wants_json(): return jsonify(data) return render_template("stats/by_date.html", **data)