def get_at_chart(request, harvester_id, screen_name): description = {"date_val": ("date", "Date"), "status_count": ("number", "Status count"), } data = [] try: search = TWSearch.objects.get(term__exact="@%s" % screen_name) except ObjectDoesNotExist: data_table = gviz_api.DataTable(description) data_table.LoadData(data) response = HttpResponse(data_table.ToJSon(), mimetype='application/javascript') return response count = search.status_list.all().count() fromto = search.status_list.all().order_by(u"created_at") base = fromto[0].created_at if count != 0 else dt.datetime.now() to = fromto[count-1].created_at if count != 0 else dt.datetime.now() days = (to - base).days + 1 dateList = [ base + dt.timedelta(days=x) for x in range(0,days) ] for date in dateList: c = search.status_list.all().filter(created_at__year=date.year,created_at__month=date.month,created_at__day=date.day).count() data.append({"date_val":date, "status_count":c}) data_table = gviz_api.DataTable(description) data_table.LoadData(data) response = HttpResponse(data_table.ToJSon(), mimetype='application/javascript') return response
def main(): scraperwiki.sqlite.attach("at_herold_branches") description_chart = {"city": ("string", "City"), "business": ("number", "Number")} script="" divs="" #Overview------------------------------------------------------ data_chart = scraperwiki.sqlite.select( """city, count(*) as business from at_herold_branches.swdata group by city""" ) data_table_chart = gviz_api.DataTable(description_chart) data_table_chart.LoadData(data_chart) jscode_chart = data_table_chart.ToJSCode("jscode_data_chart_overview", columns_order=("city", "business"), order_by="city") #charts[str(i)]=jscode_chart script_tmp = jscode_chart+""" var chart = new google.visualization.BarChart(document.getElementById('chart_div_overview')); chart.draw(jscode_data_chart_overview, {title: 'Entries per city', isStacked: true});\n""" script += script_tmp divs += '<div id="chart_div_overview" style="width: 900px; height: 500px;"></div>\n' branches = scraperwiki.sqlite.select( '''branch from at_herold_branches.swdata group by branch''' ) #Per branch---------------------------------------------------- i=0 for b in branches: branch=b["branch"] data_chart = scraperwiki.sqlite.select( """city, count(*) as business from at_herold_branches.swdata where branch='"""+branch+"""' group by city""" ) data_table_chart = gviz_api.DataTable(description_chart) data_table_chart.LoadData(data_chart) jscode_chart = data_table_chart.ToJSCode("jscode_data_chart_"+str(i), columns_order=("city", "business"), order_by="city") #charts[str(i)]=jscode_chart script_tmp = jscode_chart+""" var chart = new google.visualization.PieChart(document.getElementById('chart_div_"""+str(i)+"""')); chart.draw(jscode_data_chart_"""+str(i)+""", {title: '"""+branch+"""'});\n""" script += script_tmp divs += '<div id="chart_div_'+str(i)+'" style="width: 900px; height: 500px;"></div>\n' i+=1 page_template=page_template_1+script+page_template_2+divs+page_template_3 print page_template
def finances(): API = '989e1c94-6dfd-4bc1-af04-9c64e59d96b3' ID = '002a0d5d-d7c7-44e0-94b5-f9325477c3c6' url = 'http://data.opendata.durban/api/action/datastore_search?resource_id=%s&limit=100' % ID req = urllib2.Request(url) req.add_header('Authorization', API) resp = urllib2.urlopen(req) content = loads(resp.read()) dataFrame = json_normalize(content['result']['records']) types = dataFrame.apply(lambda x: pd.lib.infer_dtype(x.values)) for col in types[types == 'unicode'].index: dataFrame[col] = dataFrame[col].astype(str) dataFrame = dataFrame.convert_objects(convert_numeric=True) schema = [('City', 'string'), ('Color 1', 'number'), ('Color 2', 'number'), ('Expenditure', 'number'), ('Percentage', 'number'), ('Revenue', 'number'), ('Year', 'string'), ('ID', 'number')] data_table = gviz_api.DataTable(schema) data_table.LoadData(dataFrame.values) # json = data_table.ToJSon(columns_order=('City', 'Year', 'Expenditure', # 'Revenue', 'Percentage', 'Color 1', # 'Color 2', 'ID')) json = data_table.ToJSon(columns_order=('City', 'Percentage', 'Year')) return json
def test_recommendation_simple(self): recommendation = self.create_mock_recommendation() (table_description, data, custom_properties) = \ overview_page_proto_to_gviz.get_recommendation_table_args( recommendation) data_table = gviz_api.DataTable(table_description, data, custom_properties) # Data is a list of 12 rows: 3 rows for each tip type. self.assertLen(data, len(list(self.mock_tips))) self.assertLen(list(self.mock_tips), data_table.NumberOfRows(), "Simple table has 12 rows.") # Check the number of columns in table descriptor and data table. self.assertLen(table_description, 2) self.assertLen(data_table.columns, 2) # Check data against mock values. for idx, row in enumerate(data): self.assertEqual(list(self.mock_tips[idx]), row) # Check DataTable against mock values. # Only way to access DataTable contents is by CSV csv_file = io.StringIO(data_table.ToCsv()) reader = csv.reader(csv_file) for (rr, row_values) in enumerate(reader): if rr == 0: self.check_header_row(data, table_description, row_values) else: self.check_row_types(data, table_description, row_values, rr) self.assertEqual(list(self.mock_tips[rr - 1]), row_values)
def CrossPerformanceGvizTable(datatable, metric, codecs, criterion): """Build a square table of codecs and relative performance.""" # pylint: disable=too-many-locals videofile_name_list = datatable[codecs[0]].keys() description = {} description['codec'] = ('string', 'Codec') data = [] for codec in codecs: description[codec] = ('string', codec) for codec1 in codecs: lineitem = {'codec': codec1} for codec2 in codecs: if codec1 != codec2: count = 0 overall = 0.0 for filename in videofile_name_list: if (codec1 in datatable and filename in datatable[codec1] and codec2 in datatable and filename in datatable[codec2]): overall += DataSetBetter( ExtractBitrateAndPsnr(datatable, codec2, filename), ExtractBitrateAndPsnr(datatable, codec1, filename), metric) count += 1 if count > 0: display = ('<a href=/results/show_result.html?' + 'codec1=%s&codec2=%s&criterion=%s>%5.2f</a>' ) % (codec2, codec1, criterion, overall / count) lineitem[codec2] = (overall / count, display) data.append(lineitem) gviz_data_table = gviz_api.DataTable(description) gviz_data_table.LoadData(data) return gviz_data_table
def to_tsv_excel(self, order=None, labels=None, formatting=None, properties=None): """ Does _not_ return a new QuerySet. Returns a file in tab-separated-format readable by MS Excel. Returns a file in UTF-16 little endian encoding, with tabs separating the values. kwargs: order: Iterable with field names in which the columns should be ordered. If columns order are specified, any field not specified will be discarded. labels: Dictionary mapping {'field': 'label'} where field is the name of the field in model, and label is the desired label on the chart. formatting: string.format() compatible expression. properties: Dictionary with custom properties. """ table_descr = self.table_description(labels) fields = table_descr.keys() if formatting is not None: data = self.formatting(fields, formatting) else: data = self.values(*fields) data_table = gviz_api.DataTable(table_descr, data, properties) return data_table.ToTsvExcel(columns_order=order)
def to_html(self, order=None, labels=None, formatting=None, properties=None): """ Does _not_ return a new QuerySet. Return QuerySet data as a html table code string. kwargs: order: Iterable with field names in which the columns should be ordered. If columns order are specified, any field not specified will be discarded. labels: Dictionary mapping {'field': 'label'} where field is the name of the field in model, and label is the desired label on the chart. formatting: string.format() compatible expression. properties: Dictionary with custom properties. """ table_descr = self.table_description(labels) fields = table_descr.keys() if formatting is not None: data = self.formatting(fields, formatting) else: data = self.values(*fields) data_table = gviz_api.DataTable(table_descr, data, properties) return data_table.ToHtml(columns_order=order)
def main(): # Load data from CVS file afile = "median-dpi-countries.csv" datarows = [] with open(afile, 'r') as f: reader = csv.reader(f) reader.next() # skip header for row in reader: datarows.append(row) # Describe data description = {"country": ("string", "Country"), "dpi": ("number", "EUR"), } # Build list of dictionaries from loaded data data = [] for each in datarows: data.append({"country": each[0], "dpi": (float(each[1]), each[1])}) # Instantiate DataTable with structure defined in 'description' data_table = gviz_api.DataTable(description) # Load it into gviz_api.DataTable data_table.LoadData(data) # Creating a JSon string json = data_table.ToJSon(columns_order=("country", "dpi"), order_by="country", ) # Put JSON string into the template # and save to output.html with open('output.html', 'w') as out: out.write(get_page_template() % (json,))
def test_graph_table(self): combined_tf_data_stats = self.create_mock_combined_tf_data_stats() (table_description, data, custom_properties) = tf_data_stats_proto_to_gviz.get_graph_table_args( combined_tf_data_stats) data_table = gviz_api.DataTable(table_description, data, custom_properties) expected = [ [ MockValues.HOST_NAME, MockValues.INPUT_PIPELINE_NAME, 0, MockValues.FIRST_ITERATOR_ID, "", 1, ], [ MockValues.HOST_NAME, MockValues.INPUT_PIPELINE_NAME, 0, MockValues.SECOND_ITERATOR_ID, MockValues.FIRST_ITERATOR_ID, 2, ], ] self.check_data_table(table_description, data, data_table, expected)
def line_gviz(): # Creating the data description = {"name": ("string", "Name"), "salary": ("number", "Salary")} data = [{ "name": "Mike", "salary": (10000, "$10,000") }, { "name": "Jim", "salary": (800, "$800") }, { "name": "Alice", "salary": (12500, "$12,500") }, { "name": "Bob", "salary": (7000, "$7,000") }] # Loading it into gviz_api.DataTable data_table = gviz_api.DataTable(description) data_table.LoadData(data) # Creating a JavaScript code string jscode = data_table.ToJSCode("jscode_data", columns_order=("name", "salary"), order_by="salary") # Creating a JSon string json = data_table.ToJSon(columns_order=("name", "salary"), order_by="salary") # Putting the JS code and JSon string into the template #string = "Content-type: text/html\n" string = line_template % vars() return string
def get_otherwall_chart(request, harvester_id, userfid): user = get_list_or_404(FBUser, fid=userfid)[0] count = FBPost.objects.filter(ffrom=user).exclude(user=user).count() fromto = FBPost.objects.filter(ffrom=user).exclude( user=user).order_by(u"created_time") base = fromto[0].created_time if count != 0 else dt.datetime.now() to = fromto[count - 1].created_time if count != 0 else dt.datetime.now() days = (to - base).days + 1 dateList = [base + dt.timedelta(days=x) for x in range(0, days)] description = { "date_val": ("date", "Date"), "post_count": ("number", "Post count"), } data = [] for date in dateList: c = FBPost.objects.filter(ffrom=user).exclude(user=user).filter( created_time__year=date.year, created_time__month=date.month, created_time__day=date.day).count() data.append({"date_val": date, "post_count": c}) data_table = gviz_api.DataTable(description) data_table.LoadData(data) logger.debug(data_table.ToJSon()) response = HttpResponse(data_table.ToJSon(), mimetype='application/javascript') return response
def convert_to_gviz_json_for_display(description, data, columns_order, output_file=None): ''' A utility method for writing gviz format json file for data display using Google charts :param description, A dictionary for the data table description :param data, A dictionary containing the data table :column_order, A tuple of data table column order :param output_file, Output filename, default None :returns: None if output_file name is present, or else json_data string ''' try: data_table = gviz_api.DataTable( description) # load description to gviz api data_table.LoadData(data) # load data to gviz_api final_data = data_table.ToJSon( columns_order=columns_order) # create final data structure if output_file is None: return final_data else: with open(output_file, 'w') as jf: jf.write(final_data) # write final data to output file return None except: raise
def get(self): str_values = self.request.get('str_value') description = {"x": ("number", "x"), "y": ("number", "y")} data = [] query = db.Query(Promising) filter_query = query.filter('str_values =', str_values) result = filter_query.get() point_blob = result.point_blob point_list = point_blob.split(',') del point_list[-1] # due to trailing comma for i in range(len(point_list)): if i % 2 == 0: data.append({ 'x': float(point_list[i]), 'y': float(point_list[i + 1]) }) data_table = gviz_api.DataTable(description) data_table.LoadData(data) logging.info(data_table.ToJSonResponse()) self.response.out.write(data_table.ToJSonResponse())
def motion(): API = '989e1c94-6dfd-4bc1-af04-9c64e59d96b3' ID = 'f47e6ba8-f65a-411d-806c-17acc26b51b0' url = 'http://data.opendata.durban/api/action/datastore_search?resource_id=%s&limit=500' % ID req = urllib2.Request(url) req.add_header('Authorization', API) resp = urllib2.urlopen(req) content = loads(resp.read()) dataFrame = json_normalize(content['result']['records']) types = dataFrame.apply(lambda x: pd.lib.infer_dtype(x.values)) for col in types[types == 'unicode'].index: dataFrame[col] = dataFrame[col].astype(str) dataFrame = dataFrame.convert_objects(convert_numeric=True) schema = [('City', 'string'), ('GVA', 'number'), ('Gini Coefficient', 'number'), ('HDI', 'number'), ('Household Numbers', 'number'), ('Population Growth', 'number'), ('Unemployment', 'string'), ('Year', 'number'), ('ID', 'number')] data_table = gviz_api.DataTable(schema) data_table.LoadData(dataFrame.values) json = data_table.ToJSon(columns_order=('City', 'Year', 'Population Growth', 'Household Numbers', 'HDI', 'Gini Coefficient', 'GVA', 'Unemployment')) return json
def ensembleDecay(self, **kwargs): cherrypy.response.headers['Content-Type'] = "application/json" import gviz_api t = np.linspace(1, 1e5, 1000) for sp in self.system.species: if sp in kwargs.keys(): self.system.initialConditions[sp] = float(kwargs[sp]) res = self.system.solve(t) print(kwargs.keys()) table = gviz_api.DataTable([('t', 'number')] + [(n, 'number') for n in res.dtype.names]) table.LoadData([[ ti * 1e-6, ] + list(r) for ti, r in zip(t, res)]) tqx = {} for var in kwargs['tqx'].split(';'): n, v = var.split(':') tqx[n] = v return table.ToJSonResponse(req_id=int(tqx['reqId']))
def CrossPerformanceGvizTable(datatable, metric, codecs, criterion): """Build a square table of codecs and relative performance.""" videofile_name_list = datatable[codecs[0]].keys() description = {} description['codec'] = ('string', 'Codec') data = [] for codec in codecs: description[codec] = ('string', codec) for codec1 in codecs: lineitem = {'codec': codec1} for codec2 in codecs: if codec1 != codec2: count = 0 overall = 0.0 for filename in videofile_name_list: if (codec1 in datatable and filename in datatable[codec1] and codec2 in datatable and filename in datatable[codec2]): overall += DataSetBetter(datatable[codec1][filename], datatable[codec2][filename], metric) count += 1 if count > 0: display = '<a href=/results/generated/%s-%s-%s.html>%5.2f</a>' % ( codec1, codec2, criterion, overall / count) lineitem[codec2] = (overall / count, display) data.append(lineitem) gviz_data_table = gviz_api.DataTable(description) gviz_data_table.LoadData(data) return gviz_data_table
def __repr__(self): print "DEBUG: Entered __repr__" if not self.description_and_data_up_to_date: # Load description and data into gviz_api.DataTable self.data_table = gviz_api.DataTable(self.description) self.data_table.LoadData(self.data) # Create JavaScript code as string. self.jscode = self.data_table.ToJSCode( "jscode_data", columns_order=self.column_order, order_by=self.ORDER_DATA_BY) # Create a JSON string to represent DataTable self.json = self.data_table.ToJSon(columns_order=self.column_order, order_by=self.ORDER_DATA_BY) self.description_and_data_up_to_date = True return self.PAGE_TEMPLATE.format( jscode=self.jscode, json=self.json, chart_title=self.chart_title, isStacked=self.CONSTANTS['isStacked'], width=self.CONSTANTS['width'], height=self.CONSTANTS['length'], chartArea_width=self.CONSTANTS['chartArea_width'], chartArea_height=self.CONSTANTS['chartArea_height'], hAxis_title=self.CONSTANTS['hAxis_title'], vAxis_minValue=self.CONSTANTS['vAxis_minValue'], vAxis_maxValue=self.CONSTANTS['vAxis_maxValue'], quantities=self.quantities_str)
def create_map_data_source(data, vname): ''' Creates a JSON datasource compatible with ''geomap'' from google visualization API data: list of data tuples (lat,long,number,'hoverstring') ''' description = { "LATITUDE": ('number', 'Latitude'), "LONGITUDE": ('number', 'Longitude'), "VALUE": ('number', vname), "HOVER": ('string', 'HoverText') } dados = [] for d in data: try: dados.append({ "LATITUDE": d[0], "LONGITUDE": d[1], "VALUE": d[2], "HOVER": d[3] }) except: pass data_table = GV.DataTable(description) data_table.LoadData(dados) return data_table
def to_javascript(self, name, order=None, labels=None, formatting=None, properties=None): """ Does _not_ return a new QuerySet. Return QuerySet data as javascript code string. This method writes a string of JS code that can be run to generate a DataTable with the specified data. Typically used for debugging only. kwargs: name: Name of the variable which the data table is saved. order: Iterable with field names in which the columns should be ordered. If columns order are specified, any field not specified will be discarded. labels: Dictionary mapping {'field': 'label'} where field is the name of the field in model, and label is the desired label on the chart. formatting: string.format() compatible expression. properties: Dictionary with custom properties. """ table_descr = self.table_description(labels) fields = table_descr.keys() if formatting is not None: data = self.formatting(fields, formatting) else: data = self.values(*fields) data_table = gviz_api.DataTable(table_descr, data, properties) return data_table.ToJSCode(name=name, columns_order=order)
def get_status_chart(request, harvester_id, screen_name): user = get_list_or_404(TWUser, screen_name=screen_name)[0] count = TWStatus.objects.filter(user=user).count() fromto = TWStatus.objects.filter(user=user).order_by(u"created_at") base = fromto[0].created_at if count != 0 else dt.datetime.now() to = fromto[count-1].created_at if count != 0 else dt.datetime.now() days = (to - base).days + 1 dateList = [ base + dt.timedelta(days=x) for x in range(0,days) ] description = {"date_val": ("date", "Date"), "status_count": ("number", "Status count"), } data = [] for date in dateList: c = TWStatus.objects.filter(user=user).filter(created_at__year=date.year,created_at__month=date.month,created_at__day=date.day).count() data.append({"date_val":date, "status_count":c}) data_table = gviz_api.DataTable(description) data_table.LoadData(data) logger.debug(data_table.ToJSon()) response = HttpResponse(data_table.ToJSon(), mimetype='application/javascript') return response
def to_csv(self, order=None, labels=None, formatting=None, properties=None, separator=","): """ Does _not_ return a new QuerySet. Return QuerySet data as a csv string. Output is encoded in UTF-8 because the Python "csv" module can't handle Unicode properly according to its documentation. kwargs: separator: character to be used as separator. Defaults to comma(,). order: Iterable with field names in which the columns should be ordered. If columns order are specified, any field not specified will be discarded. labels: Dictionary mapping {'field': 'label'} where field is the name of the field in model, and label is the desired label on the chart. formatting: string.format() compatible expression. properties: Dictionary with custom properties. """ table_descr = self.table_description(labels) fields = table_descr.keys() if formatting is not None: data = self.formatting(fields, formatting) else: data = self.values(*fields) data_table = gviz_api.DataTable(table_descr, data, properties) return data_table.ToCsv(columns_order=order, separator=separator)
def convert_uv_data(self, data): description = { "date": ("datetime", "Time", { "role": "domain" }), "forecast": ("number", "Forecast"), "forecast_annotation": ('string', '', { "role": "annotation" }), "forecast_annotation_text": ('string', '', { "role": "annotationText" }), "measured": ("number", "Measured"), "measured_annotation": ('string', '', { "role": "annotation" }), "measured_annotation_text": ('string', '', { "role": "annotationText" }), "low": ("number", "Low"), "low_tooltip": ("string", "Low", { "role": "tooltip" }), "medium": ("number", "Medium"), "medium_tooltip": ("string", "Medium", { "role": "tooltip" }), "high": ("number", "High"), "high_tooltip": ("string", "High", { "role": "tooltip" }), "very_high": ("number", "Very High"), "very_high_tooltip": ("string", "Very High", { "role": "tooltip" }), "extreme": ("number", "Extreme"), "extreme_tooltip": ("string", "Extreme", { "role": "tooltip" }), } data = list(map(DataConverter.convert_uv_value, data['GraphData'])) for e in reversed(data): if e['measured'] is not None: e['measured_annotation'] = "%.2f @ %s" % ( e['measured'], datetime.strftime(e['date'], '%H:%M')) break data_table = gviz_api.DataTable(description) data_table.LoadData(data) return data_table.ToJSon(columns_order=[ 'date', 'low', 'low_tooltip', 'medium', 'medium_tooltip', 'high', 'high_tooltip', 'very_high', 'very_high_tooltip', 'extreme', 'extreme_tooltip', 'forecast', 'forecast_annotation', "forecast_annotation_text", 'measured', 'measured_annotation', 'measured_annotation_text' ])
def get_context_data(self, **kwargs): context = super(BaseCommodityDetailView, self).get_context_data(**kwargs) base_commodity = self.object # if required, update the dataset if base_commodity.needs_update(): status = base_commodity.update_data() # print ("Update status: " + status) # Fetch from database ts_queryset = BaseCommodityRecords.objects.filter( base_commodity=base_commodity) # Convert to json l = [] for ts in ts_queryset: l.append([ts.date, ts.value]) lod = ([{ 'date': datetime.combine(date, datetime.min.time()), 'value': value } for date, value in l]) # Loading it into gviz_api.DataTable desc = {'date': ('datetime', 'Date'), 'value': ('number', 'Value')} data_table = gviz_api.DataTable(desc) data_table.LoadData(lod) # Creating a JSon string json_output = data_table.ToJSon(columns_order=("date", "value"), order_by="date") context['dataset'] = json_output return context
def history_datasource(request, track_id): t = get_object_or_404(Track, user=request.user, local_id=track_id) description = { 'created': ('datetime', 'Date Checked'), 'position': ('number', 'Ranking Position'), } data_table = gviz_api.DataTable(description) #Now add our data. checks is a list of dictionaries. checks = t.check_set.filter(is_done=True).values('created', 'position') #Make the positions negative. Could we do this in a better way? Also, #if position is None, we'll make it our maximum limit so that we still have #a data point. for c in checks: if c['position'] == None: c['position'] = settings.MAX_SEARCH_RESULTS c['position'] *= -1 data_table.AppendData(checks) #tqx contains custom paramaters/configs. ToResponse parses the 'out' part #of tqx, which states what format is requested for return. tqx = request.GET.get('tqx', '') return HttpResponse(data_table.ToResponse(tqx=tqx))
def test_overview_page_analysis_simple(self): analysis = self.create_mock_overview_page_analysis() (table_description, data, custom_properties) = \ overview_page_proto_to_gviz.get_overview_page_analysis_table_args( analysis) data_table = gviz_api.DataTable(table_description, data, custom_properties) # Data is a list of 3 rows. self.assertLen(data, 3) self.assertEqual(3, data_table.NumberOfRows(), "Simple table has 3 rows.") # Check the number of columns in table descriptor and data table. self.assertLen(table_description, len(list(self.mock_tf_op))) self.assertLen(data_table.columns, len(list(self.mock_tf_op))) # Prepare expectation to check against. mock_csv_tf_op = [str(x) for x in list(self.mock_tf_op)] # Check data against mock values. for row in data: self.assertEqual(list(self.mock_tf_op), row) # Check DataTable against mock values. # Only way to access DataTable contents is by CSV csv_file = io.StringIO(data_table.ToCsv()) reader = csv.reader(csv_file) for (rr, row_values) in enumerate(reader): if rr == 0: self.check_header_row(data, table_description, row_values) else: self.check_row_types(data, table_description, row_values, rr) self.assertEqual(mock_csv_tf_op, row_values)
def to_json_response(self, order=None, labels=None, formatting=None, properties=None, req_id=0, handler="google.visualization.Query.setResponse"): """ Does _not_ return a new QuerySet. Writes a table as a JSON response that can be returned as-is to a client. This method writes a JSON response to return to a client in response to a Google Visualization API query. This string can be processed by the calling page, and is used to deliver a data table to a visualization hosted on a different page. kwargs: req_id: Response id, as retrieved by the request. handler: The response handler, as retrieved by the request. order: Iterable with field names in which the columns should be ordered. If columns order are specified, any field not specified will be discarded. labels: Dictionary mapping {'field': 'label'} where field is the name of the field in model, and label is the desired label on the chart. formatting: string.format() compatible expression. properties: Dictionary with custom properties. """ table_descr = self.table_description(labels) fields = table_descr.keys() if formatting is not None: data = self.formatting(fields, formatting) else: data = self.values(*fields) data_table = gviz_api.DataTable(table_descr, data, properties) return data_table.ToJSonResponse(columns_order=order, req_id=req_id, response_handler=handler)
def get_search_status_chart(request, harvester_id, search_term): try: search = get_list_or_404(TWSearch, term=search_term)[0] count = search.status_list.count() fromto = search.status_list.order_by(u"created_at") base = fromto[0].created_at if count != 0 else dt.datetime.now() order = 1 while fromto[0].created_at == None and order < len(fromto): base = fromto[order].created_at order += 1 to = fromto[count-1].created_at if count != 0 else dt.datetime.now() logger.debug("to: %s"%to) logger.debug("base: %s"%base) days = (to - base).days + 1 dateList = [ base + dt.timedelta(days=x) for x in range(0,days) ] description = {"date_val": ("date", "Date"), "status_count": ("number", "Status count"), } data = [] for date in dateList: c = search.status_list.filter(created_at__year=date.year,created_at__month=date.month,created_at__day=date.day).count() data.append({"date_val":date, "status_count":c}) data_table = gviz_api.DataTable(description) data_table.LoadData(data) logger.debug(data_table.ToJSon()) response = HttpResponse(data_table.ToJSon(), mimetype='application/javascript') return response except: dLogger.exception("AN ERROR HAS OCCURED WHILE RENDERING STATUS CHART: SEARCH_TERM: %s"%search_term)
def main(): form = cgi.FieldStorage() # CGI variables: # key = project, lab, data, freeze, status keyField = form.getvalue('key') if not keyField: keyField = 'project' switch = {'project':0, 'lab':1, 'data':2, 'freeze':5, 'status':8 } titleTag = {'project':"Project", 'lab':"Lab", 'data':"Data_Type", 'freeze':"Freeze", 'status':"Status" } if keyField not in switch: keyField = 'project' keyIndex = switch[keyField] reportFile, currentDate = encodeReportLib.getRecentReport() matrix, labels = processReportFile(reportFile, keyIndex) # Headers for the columns in the data matrix description = [("Time", "string")] for label in labels: tmpDesc = [(label, 'number')] description += tmpDesc # Create the data table data_table = gviz_api.DataTable(description) data_table.LoadData(matrix) # Convert to JavaScript code jscode = data_table.ToJSCode("jscode_data") # Set variables for HTML output template_vars = {} template_vars['jscode'] = jscode template_vars['dateStamp'] = encodeReportLib.dateIntToDateStr(currentDate) template_vars['title'] = "ENCODE Amount of Time Until Release by %s" % titleTag[keyField] template_vars['packageName'] = 'columnchart' template_vars['visClass'] = 'ColumnChart' template_vars['style'] = "" # Set the chart specific configuration options chart_config = {} chart_config['isStacked'] = 'true' chart_config['legendFontSize'] = 16 chart_config['width'] = 1200 chart_config['height'] = 480 chart_config['legend'] = 'bottom' chart_config['titleX'] = '# of Weeks' chart_config['titleY'] = '# of Submissions' chart_config['tooltipFontSize'] = 16 if (keyField == 'freeze'): chart_config['colors'] = encodeReportLib.getColorArray(len(labels)) template_vars['chart_config'] = json.dumps(chart_config) encodeReportLib.renderHtml(template_vars, 1, 0) return
def checkouts(request): print 'in get_stats' rides = Ride.objects.all() description = [ ('date', 'date','Checkout Date'), ('checkouts','number','Checkouts'), ('cum','number','Cumulative'), ] columns_order = ('date', 'checkouts', 'cum') order_by = columns_order[0] data = Counter([r.checkout_time.date() for r in rides]).items() data = sorted(data, key=operator.itemgetter(0), reverse=False) print data n=0 longdata = [] for d in data: n+=d[1] longdata.append(d+(n,)) print longdata # Loading it into gviz_api.DataTable data_table = gviz_api.DataTable(description) data_table.LoadData(longdata) json = data_table.ToJSon(columns_order=columns_order, order_by=order_by) return HttpResponse(json, content_type="application/json")
def sanitation(): API = '989e1c94-6dfd-4bc1-af04-9c64e59d96b3' ID = '92299119-da69-4ebd-a366-42006fe2b737' url = 'http://data.opendata.durban/api/action/datastore_search?resource_id=%s&limit=500' % ID req = urllib2.Request(url) req.add_header('Authorization', API) resp = urllib2.urlopen(req) content = loads(resp.read()) dataFrame = json_normalize(content['result']['records']) types = dataFrame.apply(lambda x: pd.lib.infer_dtype(x.values)) for col in types[types == 'unicode'].index: dataFrame[col] = dataFrame[col].astype(str) dataFrame = dataFrame.convert_objects(convert_numeric=True) schema = [('Access to sanitation', 'number'), ('Bucket Systems', 'number'), ('City', 'string'), ('Flush toilet', 'number'), ('No toilet', 'number'), ('Pit toilets', 'number'), ('Total households', 'number'), ('Ventilation Improved Pits', 'number'), ('Year', 'number'), ('ID', 'number')] data_table = gviz_api.DataTable(schema) data_table.LoadData(dataFrame.values) json = data_table.ToJSon(columns_order=('City', 'Year', 'Access to sanitation', 'Bucket Systems', 'Flush toilet', 'No toilet', 'Pit toilets', 'Ventilation Improved Pits', 'Total households')) return json