def run(self, form): form_data = json.dumps(dict(form.data._iterlists())) size = form.cleaned_data['size'] offset = form.cleaned_data['offset'] show_aggregation = form.cleaned_data['aggregations'] with Timer() as timer: selection = SelectionSearch(form) self.monitor.update(1, "Executing query..") narticles = selection.get_count() self.monitor.update(39, "Fetching mediums..".format(**locals())) mediums = selection.get_mediums() self.monitor.update(59, "Fetching articles..".format(**locals())) articles = selection.get_articles(size=size, offset=offset) if show_aggregation: self.monitor.update(69, "Aggregating..".format(**locals())) date_aggr = selection.get_aggregate(x_axis="date", y_axis="total", interval="day") medium_aggr = selection.get_aggregate(x_axis="medium", y_axis="date", interval="day") self.monitor.update(79, "Rendering results..".format(**locals())) return TEMPLATE.render( Context( dict(locals(), **{ "project": self.project, "user": self.user })))
def run(self, form): form_data = json.dumps(dict(form.data._iterlists())) size = form.cleaned_data['size'] offset = form.cleaned_data['offset'] show_aggregation = form.cleaned_data['aggregations'] with Timer() as timer: selection = SelectionSearch(form) self.monitor.update(1, "Executing query..") narticles = selection.get_count() self.monitor.update(39, "Fetching mediums..".format(**locals())) mediums = selection.get_mediums() self.monitor.update(59, "Fetching articles..".format(**locals())) articles = selection.get_articles(size=size, offset=offset) if show_aggregation: self.monitor.update(69, "Aggregating..".format(**locals())) date_aggr = selection.get_aggregate(x_axis="date", y_axis="total", interval="day") medium_aggr = selection.get_aggregate(x_axis="medium", y_axis="date", interval="day") self.monitor.update(79, "Rendering results..".format(**locals())) return TEMPLATE.render(Context(dict(locals(), **{ "project": self.project, "user": self.user })))
def run(self, form): selection = SelectionSearch(form) try: # Try to retrieve cache values primary, secondary, categories, aggregation = self.get_cache() except NotInCacheError: self.monitor.update(message="Executing query..") narticles = selection.get_count() self.monitor.update(message="Found {narticles} articles. Aggregating..".format(**locals())) # Get aggregation order_by = form.cleaned_data["order_by"] primary = form.cleaned_data["primary"] secondary = form.cleaned_data["secondary"] categories = list(filter(None, [primary, secondary])) aggregation = list(selection.get_aggregate(categories, flat=False)) aggregation = sorted_aggregation(*order_by, aggregation) self.set_cache([primary, secondary, categories, aggregation]) else: self.monitor.update(2) # Matrices are very annoying to construct in javascript due to missing hashtables. If # the user requests a table, we thus first convert it to a different format which should # be easier to render. if form.cleaned_data["output_type"] == "text/json+aggregation+table": aggregation = aggregation_to_matrix(aggregation, categories) if form.cleaned_data["output_type"] == "text/csv": return aggregation_to_csv(aggregation, categories, [CountArticlesValue()]) self.monitor.update(message="Serialising..".format(**locals())) return json.dumps(aggregation, cls=AggregationEncoder, check_circular=False)
def run(self, form): selection = SelectionSearch(form) try: # Try to retrieve cache values primary, secondary, categories, aggregation = self.get_cache() except NotInCacheError: self.monitor.update(message="Executing query..") narticles = selection.get_count() self.monitor.update(message="Found {narticles} articles. Aggregating..".format(**locals())) # Get aggregation primary = form.cleaned_data["primary"] secondary = form.cleaned_data["secondary"] categories = list(filter(None, [primary, secondary])) aggregation = list(selection.get_aggregate(categories, flat=False)) self.set_cache([primary, secondary, categories, aggregation]) else: self.monitor.update(2) # Matrices are very annoying to construct in javascript due to missing hashtables. If # the user requests a table, we thus first convert it to a different format which should # be easier to render. if form.cleaned_data["output_type"] == "text/json+aggregation+table": aggregation = aggregation_to_matrix(aggregation, categories) if form.cleaned_data["output_type"] == "text/csv": return aggregation_to_csv(aggregation, categories, [CountArticlesValue()]) self.monitor.update(message="Serialising..".format(**locals())) return json.dumps(aggregation, cls=AggregationEncoder, check_circular=False)
def run(self, form): form_data = dict(form.data.lists()) for value in form_data.values(): if value == [None]: value.pop() form_data = json.dumps(form_data, indent=4) size = form.cleaned_data['size'] offset = form.cleaned_data['offset'] number_of_fragments = form.cleaned_data['number_of_fragments'] fragment_size = form.cleaned_data['fragment_size'] show_fields = sorted(form.cleaned_data['show_fields']) show_aggregation = form.cleaned_data['aggregations'] with Timer() as timer: selection = SelectionSearch(form) self.monitor.update(message="Executing query..") narticles = selection.get_count() self.monitor.update(message="Fetching articles..".format( **locals())) articles = selection.get_articles(size=size, offset=offset).as_dicts() articles = get_fragments(selection.get_query(), [a["id"] for a in articles], fragment_size, number_of_fragments) if show_aggregation: self.monitor.update(message="Aggregating..".format(**locals())) statistics = selection.get_statistics() try: delta_start_end = statistics.end_date - statistics.start_date interval = next( interval for (interval, delta) in TIMEDELTAS if MAX_DATE_GROUPS * delta > delta_start_end) except (StopIteration, TypeError): interval = "day" date_aggr = selection.get_aggregate( [IntervalCategory(interval)], objects=False) else: # Increase progress without doing anything (because we don't have to aggregate) self.monitor.update() self.monitor.update(message="Rendering results..".format( **locals())) return TEMPLATE.render( Context( dict(locals(), **{ "project": self.project, "user": self.user })))
def run(self, form): self.monitor.update(1, "Executing query..") selection = SelectionSearch(form) narticles = selection.get_count() self.monitor.update(10, "Found {narticles} articles. Aggregating..".format(**locals())) # Get aggregation aggregation = selection.get_aggregate( form.cleaned_data['x_axis'], form.cleaned_data['y_axis'], form.cleaned_data['interval'] ) # self.monitor.update(20, "Calculating relative values..".format(**locals())) column = form.cleaned_data['relative_to'] if column is not None: aggregation = list(get_relative(aggregation, column)) self.monitor.update(60, "Serialising..".format(**locals())) return json.dumps(list(aggregation), cls=AggregationEncoder, check_circular=False)
def run(self, form): self.monitor.update(1, "Executing query..") selection = SelectionSearch(form) narticles = selection.get_count() self.monitor.update( 10, "Found {narticles} articles. Aggregating..".format(**locals())) # Get aggregation aggregation = selection.get_aggregate(form.cleaned_data['x_axis'], form.cleaned_data['y_axis'], form.cleaned_data['interval']) # self.monitor.update(20, "Calculating relative values..".format(**locals())) column = form.cleaned_data['relative_to'] if column is not None: aggregation = list(get_relative(aggregation, column)) self.monitor.update(60, "Serialising..".format(**locals())) return json.dumps(list(aggregation), cls=AggregationEncoder, check_circular=False)