def data_last_updated(self, update=False, view=None, pval=None): pval = resolve_pval(self.parametisation, view=view, pval=pval) if pval: if self._lud_cache and self._lud_cache.get(pval.id) and not update: return self._lud_cache[pval.id] if not self._lud_cache: self._lud_cache = {} latest = None Statistic = apps.get_app_config("widget_def").get_model("Statistic") for s in Statistic.objects.filter(tile__widget=self): slu = s.data_last_updated(update, view, pval) if latest is None: latest = slu elif slu and slu > latest: latest = slu self._lud_cache[pval.id] = latest return self._lud_cache[pval.id] else: if self._lud_cache and not update: return self._lud_cache lud_statdata = StatisticData.objects.filter(statistic__tile__widget=self).aggregate(lud=models.Max('last_updated'))['lud'] lud_listdata = StatisticListItem.objects.filter(statistic__tile__widget=self).aggregate(lud=models.Max('last_updated'))['lud'] lud_graphdata = GraphData.objects.filter(graph__tile__widget=self).aggregate(lud=models.Max("last_updated"))["lud"] luds_mapdata = [None] for t in self.tiledefinition_set.all(): for ds in t.geo_datasets.all(): luds_mapdata.append(ds.data_last_updated(update)) self._lud_cache = max_with_nulls(lud_statdata, lud_listdata, lud_graphdata, *luds_mapdata) return self._lud_cache
def data_last_updated(self, update=False, view=None, pval=None): pval=resolve_pval(self.tile.widget.parametisation,view=view,pval=pval) if pval: if self._lud_cache and self._lud_cache.get(pval.id) and not update: return self._lud_cache[pval.id] if not self._lud_cache: self._lud_cache = {} if self.is_data_list(): self._lud_cache[pval.id] = StatisticListItem.objects.filter(statistic=self,param_value=pval).aggregate(lud=models.Max('last_updated'))['lud'] else: try: self._lud_cache[pval.id] = StatisticData.objects.get(statistic=self,param_value=pval).last_updated except StatisticData.DoesNotExist: self._lud_cache[pval.id] = None return self._lud_cache[pval.id] else: if self._lud_cache and not update: return self._lud_cache if self.is_data_list(): self._lud_cache = StatisticListItem.objects.filter(statistic=self).aggregate(lud=models.Max('last_updated'))['lud'] else: try: self._lud_cache = StatisticData.objects.get(statistic=self).last_updated except StatisticData.DoesNotExist: self._lud_cache = None return self._lud_cache
def data_last_updated(self, update=False, view=None, pval=None): """ Return the date the data for this widget was last updated. update: If true, the objects last-updated cache is flushed and recalculated. view, pval: One of these must be supplied for a parametised widget (unless a non-None wd has been passed in) """ pval = resolve_pval(self.parametisation, view=view, pval=pval) if pval: if self._lud_cache and self._lud_cache.get(pval.id) and not update: return self._lud_cache[pval.id] if not self._lud_cache: self._lud_cache = {} latest = None Statistic = apps.get_app_config("widget_def").get_model("Statistic") for s in Statistic.objects.filter(tile__widget=self): slu = s.data_last_updated(update, view, pval) if latest is None: latest = slu elif slu and slu > latest: latest = slu self._lud_cache[pval.id] = latest return self._lud_cache[pval.id] else: if self._lud_cache and not update: return self._lud_cache lud_statdata = StatisticData.objects.filter(statistic__tile__widget=self).aggregate(lud=models.Max('last_updated'))['lud'] lud_listdata = StatisticListItem.objects.filter(statistic__tile__widget=self).aggregate(lud=models.Max('last_updated'))['lud'] lud_graphdata = GraphData.objects.filter(graph__tile__widget=self).aggregate(lud=models.Max("last_updated"))["lud"] luds_mapdata = [None] for t in self.tiledefinition_set.all(): for ds in t.geo_datasets.all(): luds_mapdata.append(ds.data_last_updated(update)) self._lud_cache = max_with_nulls(lud_statdata, lud_listdata, lud_graphdata, *luds_mapdata) return self._lud_cache
def data_last_updated(self, update=False, view=None, pval=None): """ Return when the data for this statistic was last updated (with parametisation if necessary). update: If true and the last_updated value is cached, then the cached value is dropped and recalculated. """ pval=resolve_pval(self.tile.widget.parametisation,view=view,pval=pval) if pval: if self._lud_cache and self._lud_cache.get(pval.id) and not update: return self._lud_cache[pval.id] if not self._lud_cache: self._lud_cache = {} if self.is_data_list(): self._lud_cache[pval.id] = StatisticListItem.objects.filter(statistic=self,param_value=pval).aggregate(lud=models.Max('last_updated'))['lud'] else: try: self._lud_cache[pval.id] = StatisticData.objects.get(statistic=self,param_value=pval).last_updated except StatisticData.DoesNotExist: self._lud_cache[pval.id] = None return self._lud_cache[pval.id] else: if self._lud_cache and not update: return self._lud_cache if self.is_data_list(): self._lud_cache = StatisticListItem.objects.filter(statistic=self).aggregate(lud=models.Max('last_updated'))['lud'] else: try: self._lud_cache = StatisticData.objects.get(statistic=self).last_updated except StatisticData.DoesNotExist: self._lud_cache = None return self._lud_cache
def api_get_graph_data(widget, view=None, pval=None, verbose=False): pval = resolve_pval(widget.parametisation, view=view, pval=pval) graph_json = {} for graph in GraphDefinition.objects.filter(tile__widget=widget): graph_json[graph.tile.url] = api_get_single_graph_data(graph, view, pval=pval, verbose=verbose) return graph_json
def csv(self, writer, view=None): """Write out a CSV for the dataset to the provided writer (e.g. an HttpResponse object)""" pval = resolve_pval(self.widget.parametisation, view=view) writer.write(self.csv_header(view)) if pval: for rec in self.rawdatarecord_set.filter(param_value=pval): writer.write(rec.csv) else: for rec in self.rawdatarecord_set.all(): writer.write(rec.csv)
def json(self, pval=None, view=None): """Return a json-serialisable dump of this dataset.""" result = [] pval = resolve_pval(self.widget.parametisation, view=view, pval=pval) if pval: for rec in self.rawdatarecord_set.all(param_value=pval): yield rec.json() else: for rec in self.rawdatarecord_set.all(): yield rec.json()
def csv(self, writer, view=None): """Write out a CSV for the dataset to the provided writer (e.g. an HttpResponse object)""" pval = resolve_pval(self.widget.parametisation, view=view) writer.write(self.csv_header(view)) if pval: for rec in self.rawdatarecord_set.all(param_value=pval): writer.write(rec.csv) else: for rec in self.rawdatarecord_set.all(): writer.write(rec.csv)
def api_get_raw_data(widget, request, rds_url, view=None, pval=None): try: rds = RawDataSet.objects.get(widget=widget, url=rds_url) except RawDataSet.DoesNotExist: return HttpResponseNotFound("This Raw Data Set does not exist") pval = resolve_pval(rds.widget.parametisation, view=view, pval=pval) if request.GET.get("format", "csv") != "csv": return rds.json(pval=pval) response = HttpResponse() response['content-type'] = 'application/csv' response['content-disposition'] = 'attachment; filename=%s' % rds.filename rds.csv(response, view=view) return response
def data_last_updated(self, update=False, view=None, pval=None): pval = resolve_pval(self.widget().parametisation, view=view, pval=pval) if pval: if self._lud_cache and self._lud_cache.get(pval.id) and not update: return self._lud_cache[pval.id] if not self._lud_cache: self._lud_cache = {} self._lud_cache[pval.id] = GraphData.objects.filter(graph=self,param_value=pval).aggregate(lud=models.Max("last_updated"))["lud"] return self._lud_cache[pval.id] else: if self._lud_cache and not update: return self._lud_cache self._lud_cache = GraphData.objects.filter(graph=self,param_value__isnull=True).aggregate(lud=models.Max("last_updated"))["lud"] return self._lud_cache
def widget_data(self, view=None, pval=None): try: pval = resolve_pval(self.parametisation, view=view, pval=pval) except ParametisationException: pval = None if pval: try: return WidgetData.objects.get(widget=self, param_value=pval) except WidgetData.DoesNotExist: pass try: return WidgetData.objects.get(widget=self, param_value__isnull=True) except WidgetData.DoesNotExist: return None
def get_data(self, view=None, pval=None): pval=resolve_pval(self.tile.widget.parametisation,view=view,pval=pval) if self.is_data_list(): data = StatisticListItem.objects.filter(statistic=self) if pval: return data.filter(param_value=pval) else: return data.filter(param_value__isnull=True) else: try: if pval: return StatisticData.objects.get(statistic=self, param_value=pval) else: return StatisticData.objects.get(statistic=self, param_value__isnull=True) except StatisticData.DoesNotExist: return None
def data_last_updated(self, update=False, view=None, pval=None): """ Return when the data for this graph was last updated (with parametisation if necessary). update: If true and the last_updated value is cached, then the cached value is dropped and recalculated. """ pval = resolve_pval(self.widget().parametisation, view=view, pval=pval) if pval: if self._lud_cache and self._lud_cache.get(pval.id) and not update: return self._lud_cache[pval.id] if not self._lud_cache: self._lud_cache = {} self._lud_cache[pval.id] = GraphData.objects.filter(graph=self,param_value=pval).aggregate(lud=models.Max("last_updated"))["lud"] return self._lud_cache[pval.id] else: if self._lud_cache and not update: return self._lud_cache self._lud_cache = GraphData.objects.filter(graph=self,param_value__isnull=True).aggregate(lud=models.Max("last_updated"))["lud"] return self._lud_cache
def widget_data(self, view=None, pval=None): """ Return the :model:`widget_data.WidgetData` object for this widget. view, pval: One of these must be supplied for a parametised widget. """ try: pval = resolve_pval(self.parametisation, view=view, pval=pval) except ParametisationException: pval = None if pval: try: return WidgetData.objects.get(widget=self, param_value=pval) except WidgetData.DoesNotExist: pass try: return WidgetData.objects.get(widget=self, param_value=pval) except WidgetData.DoesNotExist: return None
def data_last_updated(self, update=False, view=None, pval=None): """ Return the date the data for this widget was last updated. update: If true, the objects last-updated cache is flushed and recalculated. view, pval: One of these must be supplied for a parametised widget (unless a non-None wd has been passed in) """ pval = resolve_pval(self.parametisation, view=view, pval=pval) if pval: if self._lud_cache and self._lud_cache.get(pval.id) and not update: return self._lud_cache[pval.id] if not self._lud_cache: self._lud_cache = {} latest = None Statistic = apps.get_app_config("widget_def").get_model( "Statistic") for s in Statistic.objects.filter(tile__widget=self): slu = s.data_last_updated(update, view, pval) if latest is None: latest = slu elif slu and slu > latest: latest = slu self._lud_cache[pval.id] = latest return self._lud_cache[pval.id] else: if self._lud_cache and not update: return self._lud_cache lud_statdata = StatisticData.objects.filter( statistic__tile__widget=self).aggregate( lud=models.Max('last_updated'))['lud'] lud_listdata = StatisticListItem.objects.filter( statistic__tile__widget=self).aggregate( lud=models.Max('last_updated'))['lud'] lud_graphdata = GraphData.objects.filter( graph__tile__widget=self).aggregate( lud=models.Max("last_updated"))["lud"] luds_mapdata = [None] for t in self.tiles.all(): for ds in t.geo_datasets.all(): luds_mapdata.append(ds.data_last_updated(update)) self._lud_cache = max_with_nulls(lud_statdata, lud_listdata, lud_graphdata, *luds_mapdata) return self._lud_cache
def get_data(self, view=None, pval=None): """ Return the data for this widget (with parametisation if necessary). Data returned as a single :model:`widget_data.StatisticData` object or a :model:`widget_data.StatisticListItem` query result if is_data_list() returns True. """ pval=resolve_pval(self.tile.widget.parametisation,view=view,pval=pval) if self.is_data_list(): data = StatisticListItem.objects.filter(statistic=self) if pval: return data.filter(param_value=pval) else: return data.filter(param_value__isnull=True) else: try: if pval: return StatisticData.objects.get(statistic=self, param_value=pval) else: return StatisticData.objects.get(statistic=self, param_value__isnull=True) except StatisticData.DoesNotExist: return None
def data_last_updated(self, update=False, view=None, pval=None): """ Return when the data for this statistic was last updated (with parametisation if necessary). update: If true and the last_updated value is cached, then the cached value is dropped and recalculated. """ pval = resolve_pval(self.widget().parametisation, view=view, pval=pval) if pval: if self._lud_cache and self._lud_cache.get(pval.id) and not update: return self._lud_cache[pval.id] if not self._lud_cache: self._lud_cache = {} if self.is_data_list(): self._lud_cache[pval.id] = StatisticListItem.objects.filter( statistic=self, param_value=pval).aggregate( lud=models.Max('last_updated'))['lud'] else: try: self._lud_cache[pval.id] = StatisticData.objects.get( statistic=self, param_value=pval).last_updated except StatisticData.DoesNotExist: self._lud_cache[pval.id] = None return self._lud_cache[pval.id] else: if self._lud_cache and not update: return self._lud_cache if self.is_data_list(): self._lud_cache = StatisticListItem.objects.filter( statistic=self).aggregate( lud=models.Max('last_updated'))['lud'] else: try: self._lud_cache = StatisticData.objects.get( statistic=self).last_updated except StatisticData.DoesNotExist: self._lud_cache = None return self._lud_cache return state
def get_data(self, view=None, pval=None): """ Return the data for this widget (with parametisation if necessary). Data returned as a single :model:`widget_data.StatisticData` object or a :model:`widget_data.StatisticListItem` query result if is_data_list() returns True. """ pval = resolve_pval(self.widget().parametisation, view=view, pval=pval) if self.is_data_list(): data = StatisticListItem.objects.filter(statistic=self) if pval: return data.filter(param_value=pval) else: return data.filter(param_value__isnull=True) else: try: if pval: return StatisticData.objects.get(statistic=self, param_value=pval) else: return StatisticData.objects.get(statistic=self, param_value__isnull=True) except StatisticData.DoesNotExist: return None
def api_get_single_graph_data(graph, view, pval=None, verbose=False): pval = resolve_pval(graph.widget().parametisation, view=view, pval=pval) graph_json = {"data": {}} if verbose: graph_json["data"] = [] else: if graph.use_clusters(): for cluster in graph.clusters(pval): graph_json["data"][cluster.url] = {} else: for dataset in graph.datasets.all(): graph_json["data"][dataset.url] = [] numeric_min = None numeric_max = None numeric2_min = None numeric2_max = None horiz_min = None horiz_max = None for gd in graph.get_data(pval=pval): if graph.use_numeric_axes(): if graph.use_secondary_numeric_axis and gd.dataset.use_secondary_numeric_axis: (numeric2_min, numeric2_max) = update_graph_maxmin(gd, numeric2_min, numeric2_max) else: (numeric_min, numeric_max) = update_graph_maxmin(gd, numeric_min, numeric_max) if not graph.use_clusters(): (horiz_min, horiz_max) = update_maxmin(gd.horiz_value(), horiz_min, horiz_max) if verbose: if graph.use_numeric_axes(): if gd.dataset.use_secondary_numeric_axis: data_label = graph.secondary_numeric_axis_label else: data_label = graph.numeric_axis_label data_label = parametise_label(graph.widget(), view, data_label) else: data_label = "value" graph_datum = { parametise_label(graph.widget(), view, graph.dataset_label): parametise_label( graph.widget(), view, get_graph_subset_displayname(gd.dataset, pval)), data_label: gd.value } if graph.use_clusters(): graph_datum[parametise_label( graph.widget(), view, graph.cluster_label)] = parametise_label( graph.widget(), view, gd.get_cluster().label) else: graph_datum[parametise_label( graph.widget(), view, graph.horiz_axis_label)] = gd.horiz_json_value() if gd.dataset.use_error_bars: graph_datum[data_label + "_min"] = gd.err_valmin graph_datum[data_label + "_max"] = gd.err_valmax graph_json["data"].append(graph_datum) else: if gd.dataset.use_error_bars: json_val = { "value": gd.value, "min": gd.err_valmin, "max": gd.err_valmax, } else: json_val = gd.value if graph.use_clusters(): graph_json["data"][gd.get_cluster().url][ gd.dataset.url] = json_val else: if gd.dataset.use_error_bars: json_val["horizontal_value"] = gd.horiz_json_value() else: json_val = (gd.horiz_json_value(), json_val) graph_json["data"][gd.dataset.url].append(json_val) if graph.use_numeric_axes(): numeric_min, numeric_max = apply_vertical_axis_buffer( graph, numeric_min, numeric_max) graph_json["%s_scale" % graph.numeric_axis_name()] = { "min": numeric_min, "max": numeric_max } if graph.use_secondary_numeric_axis: numeric2_min, numeric2_max = apply_vertical_axis_buffer( graph, numeric2_min, numeric2_max) graph_json["%s_2_scale" % graph.numeric_axis_name()] = { "min": numeric2_min, "max": numeric2_max } if not graph.use_clusters(): graph_json["horizontal_axis_scale"] = { "min": graph.jsonise_horiz_value(horiz_min), "max": graph.jsonise_horiz_value(horiz_max) } if graph.use_clusters(): if graph.is_histogram(): clusters_attrib = "clusters" else: clusters_attrib = "pies" graph_json[clusters_attrib] = [ c.__getstate__(view=view) for c in graph.clusters(pval) ] overrides = get_graph_overrides(graph.datasets, GraphDatasetData, "dataset", pval) datasets = {} for ds in graph.datasets.all(): datasets[ds.url] = ds.__getstate__(view=view) del datasets[ds.url]["dynamic_name_display"] if ds.url in overrides: datasets[ds.url]["name"] = overrides[ds.url] if graph.is_histogram(): datasets_attrib = "datasets" else: datasets_attrib = "sectors" graph_json[datasets_attrib] = datasets if overrides: graph_json["dataset_name_overrides"] = overrides return graph_json
def api_get_single_graph_data(graph, view, pval=None, verbose=False): pval = resolve_pval(graph.widget().parametisation, view=view, pval=pval) graph_json = { "data": {} } if verbose: graph_json["data"] = [] else: if graph.use_clusters(): for cluster in graph.clusters(pval): graph_json["data"][cluster.url] = {} else: for dataset in graph.graphdataset_set.all(): graph_json["data"][dataset.url] = [] numeric_min = None numeric_max = None numeric2_min = None numeric2_max = None horiz_min = None horiz_max = None for gd in graph.get_data(pval=pval): if graph.use_numeric_axes(): if graph.use_secondary_numeric_axis and gd.dataset.use_secondary_numeric_axis: (numeric2_min, numeric2_max)=update_maxmin(gd.value, numeric2_min, numeric2_max) else: (numeric_min, numeric_max)=update_maxmin(gd.value, numeric_min, numeric_max) if not graph.use_clusters(): (horiz_min, horiz_max) = update_maxmin(gd.horiz_value(), horiz_min, horiz_max) if verbose: if graph.use_numeric_axes(): if gd.dataset.use_secondary_numeric_axis: data_label = graph.secondary_numeric_axis_label else: data_label = graph.numeric_axis_label data_label = parametise_label(graph.widget(), view, data_label) else: data_label = "value" graph_datum = { parametise_label(graph.widget(), view, graph.dataset_label): parametise_label(graph.widget(), view, get_graph_subset_displayname(gd.dataset,pval)), data_label: gd.value } if graph.use_clusters(): graph_datum[parametise_label(graph.widget(), view, graph.cluster_label)] = parametise_label(graph.widget(), view, gd.get_cluster().label) else: graph_datum[parametise_label(graph.widget(), view, graph.horiz_axis_label)] = gd.horiz_json_value() if gd.dataset.use_error_bars: graph_datum[data_label + "_min"] = gd.err_valmin graph_datum[data_label + "_max"] = gd.err_valmax graph_json["data"].append(graph_datum) else: if gd.dataset.use_error_bars: json_val = { "value": gd.value, "min": gd.err_valmin, "max": gd.err_valmax, } else: json_val = gd.value if graph.use_clusters(): graph_json["data"][gd.get_cluster().url][gd.dataset.url] = json_val else: if gd.dataset.use_error_bars: json_val["horizontal_value"] = gd.horiz_json_value() else: json_val = ( gd.horiz_json_value(), json_val ) graph_json["data"][gd.dataset.url].append(json_val) if graph.use_numeric_axes(): graph_json["%s_scale" % graph.numeric_axis_name()] = { "min": numeric_min, "max": numeric_max } if graph.use_secondary_numeric_axis: graph_json["%s_2_scale" % graph.numeric_axis_name()] = { "min": numeric2_min, "max": numeric2_max } if not graph.use_clusters(): graph_json["horizontal_axis_scale"] = { "min": graph.jsonise_horiz_value(horiz_min), "max": graph.jsonise_horiz_value(horiz_max) } if graph.use_clusters() and graph.dynamic_clusters: if graph.is_histogram(): graph_json["clusters"] = [ c.__getstate__(view) for c in graph.clusters(pval) ] else: graph_json["pies"] = [ c.__getstate__(view) for c in graph.clusters(pval) ] overrides = get_graph_overrides(graph.graphdataset_set, GraphDatasetData, "dataset", pval) if overrides: graph_json["dataset_name_overrides"] = overrides return graph_json
def get_data(self, view=None, pval=None): """Get the data for this graph (parametised if necessary)""" pval = resolve_pval(self.widget().parametisation, view=view, pval=pval) return GraphData.objects.filter(graph=self,param_value=pval).natural_order(self)
def api_get_single_graph_data(graph, view, pval=None, verbose=False): pval = resolve_pval(graph.widget().parametisation, view=view, pval=pval) graph_json = { "data": {} } if verbose: graph_json["data"] = [] else: if graph.use_clusters(): for cluster in graph.graphcluster_set.all(): graph_json["data"][cluster.url] = {} else: for dataset in graph.graphdataset_set.all(): graph_json["data"][dataset.url] = [] numeric_min = None numeric_max = None numeric2_min = None numeric2_max = None horiz_min = None horiz_max = None for gd in graph.get_data(pval=pval): if graph.use_numeric_axes(): if graph.use_secondary_numeric_axis and gd.dataset.use_secondary_numeric_axis: (numeric2_min, numeric2_max)=update_maxmin(gd.value, numeric2_min, numeric2_max) else: (numeric_min, numeric_max)=update_maxmin(gd.value, numeric_min, numeric_max) if not graph.use_clusters(): (horiz_min, horiz_max) = update_maxmin(gd.horiz_value(), horiz_min, horiz_max) if verbose: if graph.use_clusters(): if gd.dataset.use_secondary_numeric_axis: data_label = graph.secondary_numeric_axis_label else: data_label = graph.numeric_axis_label data_label = parametise_label(graph.widget(), view, data_label) graph_json["data"].append({ parametise_label(graph.widget(), view, graph.cluster_label): parametise_label(graph.widget(), view, get_graph_subset_displayname(gd.cluster,pval)), parametise_label(graph.widget(), view, graph.dataset_label): parametise_label(graph.widget(), view, get_graph_subset_displayname(gd.dataset,pval)), data_label: gd.value }) else: graph_json["data"].append({ parametise_label(graph.widget(), view, graph.horiz_axis_label): gd.horiz_json_value(), parametise_label(graph.widget(), view, graph.dataset_label): parametise_label(graph.widget(), view, get_graph_subset_displayname(gd.dataset,pval)), data_label: gd.value }) else: if graph.use_clusters(): graph_json["data"][gd.cluster.url][gd.dataset.url] = gd.value else: graph_json["data"][gd.dataset.url].append([ gd.horiz_json_value(), gd.value ]) if graph.use_numeric_axes(): graph_json["%s_scale" % graph.numeric_axis_name()] = { "min": numeric_min, "max": numeric_max } if graph.use_secondary_numeric_axis: graph_json["%s_2_scale" % graph.numeric_axis_name()] = { "min": numeric2_min, "max": numeric2_max } if not graph.use_clusters(): graph_json["horizontal_axis_scale"] = { "min": graph.jsonise_horiz_value(horiz_min), "max": graph.jsonise_horiz_value(horiz_max) } overrides = get_graph_overrides(graph.graphcluster_set, GraphClusterData, "cluster", pval) if overrides: graph_json["cluster_name_overrides"] = overrides overrides = get_graph_overrides(graph.graphdataset_set, GraphDatasetData, "dataset", pval) if overrides: graph_json["dataset_name_overrides"] = overrides return graph_json
def get_data(self, view=None, pval=None): pval = resolve_pval(self.widget().parametisation, view=view, pval=pval) return GraphData.objects.filter(graph=self,param_value=pval).natural_order(self)