def default(self,**kw): #pull data on this view from DB conn = MySQLdb.connect(configuration.DB_HOSTNAME,configuration.DB_USERNAME,configuration.DB_PASSWORD,configuration.DB_NAME,use_unicode=True) curs = conn.cursor() curs.execute("SELECT title,xdata,ydata,xlabel,ylabel,filters,y_xform,tbl FROM _wsviews WHERE url = %s", (self.url,)) view_data = curs.fetchall()[0] log.debug("pie chart request, here is SQL data for this view:") log.debug(view_data) tbl = view_data[7] #fetch the relevant filters for this template and their possible values available_filters = view_data[5].split(',') fdata = dict() for filter in available_filters: curs.execute("SELECT DISTINCT "+filter+" FROM "+tbl) #curs.fetchall() returns a list of lists, we convert this to a plain list for ease of handling raw_fdata = curs.fetchall() fdata[filter] = [] for i in raw_fdata: fdata[filter].append(i[0]) #print fdata filters = "" used_filters = helperlib.intersect(kw.keys(),available_filters) ufilters_vals = dict() startdate = "" enddate = "" for filter in used_filters: kw[filter] = helperlib.listfix(kw[filter]) filter_vals = helperlib.intersect(kw[filter],fdata[filter]) filters = helperlib.fconstruct(filters,filter,filter_vals) ufilters_vals[filter] = filter_vals if 'startdate' in kw and 'enddate' in kw and helperlib.is_valid_date(kw['startdate']) and helperlib.is_valid_date(kw['enddate']): filters = helperlib.rangeconstruct(filters,"timestamp",kw['startdate'],kw['enddate']) used_filters.append("dates") ufilters_vals["dates"] = [kw['startdate'] + " to " + kw['enddate']] startdate = kw['startdate'] enddate = kw['enddate'] #get columns and column transformations for this view y_xforms = view_data[6].split(',') y_data = view_data[2].split(',') y_data_str = "" #they must be equal! assert len(y_data) == len(y_xforms) for i in range(len(y_data)): y_data_str += y_xforms[i] + "(" + y_data[i] + ")," y_data_str = y_data_str[0:len(y_data_str)-1] query = "SELECT %s,%s FROM %s %s GROUP BY %s" % (view_data[1],y_data_str,tbl,filters,view_data[1]) log.debug("SQL query:") log.debug(query) results = helperlib.scaled_query(curs,tbl,query,100,evaluators.count_eval) log.debug("query result:") log.debug(results) #generate JS datafields here because genshi templating can't emit JS... data = "" for i in range(0,len(results)): data += "data.setValue("+str(i)+",0,'"+results[i][0]+"');\n" data += "data.setValue("+str(i)+",1,"+str(results[i][1])+");\n" return dict(title=view_data[0],xlabel=view_data[3],ylabel=view_data[4],piedata=results, data=data,filters=available_filters,used_filters=used_filters, ufilters_vals=ufilters_vals,fdata=fdata,startdate=startdate,enddate=enddate)
def default(self, **kw): #pull data on this view from DB conn = MySQLdb.connect(configuration.DB_HOSTNAME, configuration.DB_USERNAME, configuration.DB_PASSWORD, configuration.DB_NAME, use_unicode=True) curs = conn.cursor() curs.execute( "SELECT title,xdata,ydata,xlabel,ylabel,filters,y_xform FROM _wsviews WHERE url = %s", (self.url, )) view_data = curs.fetchall()[0] log.debug("pie chart request, here is SQL data for this view:") log.debug(view_data) #fetch the relevant filters for this template and their possible values available_filters = view_data[5].split(',') fdata = dict() for filter in available_filters: curs.execute("SELECT DISTINCT " + filter + " FROM GAMES_SP") #curs.fetchall() returns a list of lists, we convert this to a plain list for ease of handling raw_fdata = curs.fetchall() fdata[filter] = [] for i in raw_fdata: fdata[filter].append(i[0]) #print fdata filters = "" used_filters = helperlib.intersect(kw.keys(), available_filters) ufilters_vals = dict() for filter in used_filters: kw[filter] = helperlib.listfix(kw[filter]) filter_vals = helperlib.intersect(kw[filter], fdata[filter]) filters = helperlib.fconstruct(filters, filter, filter_vals) ufilters_vals[filter] = filter_vals if 'startdate' in kw and 'enddate' in kw and helperlib.isvalid( kw['startdate']) and helperlib.isvalid(kw['enddate']): filters = helperlib.dateconstruct(filters, kw['startdate'], kw['enddate']) used_filters.append("dates") ufilters_vals["dates"] = [kw['startdate'] + "-" + kw['enddate']] #get columns and column transformations for this view y_xforms = view_data[6].split(',') y_data = view_data[2].split(',') y_data_str = "" #they must be equal! assert len(y_data) == len(y_xforms) for i in range(len(y_data)): y_data_str += y_xforms[i] + "(" + y_data[i] + ")," y_data_str = y_data_str[0:len(y_data_str) - 1] query = "SELECT " + view_data[ 1] + "," + y_data_str + " FROM GAMES_SP " + filters + " GROUP BY " + view_data[ 1] log.debug("SQL query:") log.debug(query) results = helperlib.scaled_query(curs, query, 100, evaluators.count_eval) log.debug("query result:") log.debug(results) #generate JS datafields here because genshi templating can't emit JS... data = "" for i in range(0, len(results)): data += "data.setValue(" + str( i) + ",0,'" + results[i][0] + "');\n" data += "data.setValue(" + str(i) + ",1," + str( results[i][1]) + ");\n" return dict(title=view_data[0], xlabel=view_data[3], ylabel=view_data[4], piedata=results, data=data, filters=available_filters, used_filters=used_filters, ufilters_vals=ufilters_vals, fdata=fdata)
def default(self,**kw): conn = MySQLdb.connect(configuration.DB_HOSTNAME,configuration.DB_USERNAME,configuration.DB_PASSWORD,configuration.DB_NAME,use_unicode=True) curs = conn.cursor() curs.execute("SELECT title,xdata,ydata,xlabel,ylabel,filters,y_xform,tbl FROM _wsviews WHERE url = %s", (self.url,)) view_data = curs.fetchall()[0] log.debug("kill map request, here is SQL data for this view:") log.debug(view_data) games_tbl = view_data[7] #fetch the relevant filters for this template and their possible values available_filters = view_data[5].split(',') #@TODO: right now I overloaded the filters for the KILLMAPS table into y_xform, there needs to be a better solution for this in the future available_filters_map = view_data[6].split(',') fdata = dict() for filter in available_filters: curs.execute("SELECT DISTINCT "+filter+" FROM "+games_tbl) #curs.fetchall() returns a list of lists, we convert this to a plain list for ease of handling raw_fdata = curs.fetchall() fdata[filter] = [] for i in raw_fdata: fdata[filter].append(i[0]) for filter in available_filters_map: curs.execute("SELECT DISTINCT "+filter+" FROM KILLMAP WHERE game_type='"+view_data[2]+"'") #curs.fetchall() returns a list of lists, we convert this to a plain list for ease of handling raw_fdata = curs.fetchall() fdata[filter] = [] for i in raw_fdata: fdata[filter].append(i[0]) #print fdata filters = "" filters_map = "" used_filters = helperlib.intersect(kw.keys(),available_filters) used_filters_map = helperlib.intersect(kw.keys(),available_filters_map) ufilters_vals = dict() used_filters += used_filters_map for filter in used_filters: kw[filter] = helperlib.listfix(kw[filter]) filter_vals = helperlib.intersect(kw[filter],fdata[filter]) filters = helperlib.fconstruct(filters,filter,filter_vals) ufilters_vals[filter] = filter_vals startdate = "" enddate = "" if 'startdate' in kw and 'enddate' in kw and helperlib.is_valid_date(kw['startdate']) and helperlib.is_valid_date(kw['enddate']): filters = helperlib.rangeconstruct(filters,"timestamp",kw['startdate'],kw['enddate']) used_filters.append("dates") ufilters_vals["dates"] = [kw['startdate'] + " to " + kw['enddate']] startdate = kw['startdate'] enddate = kw['enddate'] minkillerlev = "" maxkillerlev = "" minkilledlev = "" maxkilledlev = "" if 'minkillerlev' in kw and 'maxkillerlev' in kw and helperlib.is_valid_level(kw['minkillerlev']) and helperlib.is_valid_level(kw['maxkillerlev']): filters = helperlib.rangeconstruct(filters,"killer_level",kw['minkillerlev'],kw['maxkillerlev']) minkillerlev = kw['minkillerlev'] maxkillerlev = kw['maxkillerlev'] used_filters.append("killer level range") ufilters_vals["killer level range"] = [kw['minkillerlev'] + " to " + kw['maxkillerlev']] if 'minkilledlev' in kw and 'maxkilledlev' in kw and helperlib.is_valid_level(kw['minkilledlev']) and helperlib.is_valid_level(kw['maxkilledlev']): filters = helperlib.rangeconstruct(filters,"killed_level",kw['minkilledlev'],kw['maxkilledlev']) minkilledlev = kw['minkilledlev'] maxkilledlev = kw['maxkilledlev'] used_filters.append("killed level range") ufilters_vals["killed level range"] = [kw['minkilledlev'] + " to " + kw['maxkilledlev']] curs.execute("SELECT DISTINCT scenario_name,map_id FROM `KILLMAP` WHERE game_type='"+view_data[2]+"' GROUP BY map_id") maps = curs.fetchall() cur_map = kw.setdefault("map","mWF1l_gRtBBQvkNoqpYBPY3z2idTMM493ck_YjEk_WU=") #check for input sanity, we will be fetching map tiles based on this name if not ( cur_map.count("/") == 0 and cur_map.count(".") == 0 and len(cur_map) == 44): cur_map = "" cur_map_name = "None" for map in maps: if map[1] == cur_map: cur_map_name = map[0] break m_dimensions = () if cur_map != "": m_dimensions = helperlib.get_map_dimensions(configuration.MAP_DIR+cur_map) #compute kill frequency per hex filters = helperlib.fconstruct(filters,"map_id",[cur_map]) query = "SELECT position,COUNT(position) FROM `KILLMAP` INNER JOIN `" + games_tbl + "` USING (game_id) "+filters+" GROUP BY position ORDER BY COUNT(position) DESC LIMIT 0,100" curs.execute(query) hexdata = curs.fetchall() conn.close() #first item is 'hottest' -> red (255,0,0), last item is 'coldest' -> blue (0,0,255) #linearly interpolate hotness of all values inbetween grid_colors = "" if len(hexdata) != 0: max = hexdata[0][1] min = hexdata[len(hexdata)-1][1] #if there are very few results, the minimum frequency == maximum frequency -> divide by 0 if max == min: max += 1 for hex in hexdata: v = hex[1] red_val = 255.0*float(v-min)/float(max-min) blue_val = 255.0 * (1 - (float(v-min)/float(max-min))) hex_color = "%.2x00%.2x" % (int(red_val),int(blue_val)) #construct the javascript dicitonary that will store hex->color mappings grid_colors += '"%s":"%s",' % (hex[0],hex_color) grid_colors = grid_colors[:-1] return dict(maps=maps,cur_map=cur_map,dimensions=m_dimensions, grid_colors=grid_colors,startdate=startdate,enddate=enddate, minkillerlev=minkillerlev,maxkillerlev=maxkillerlev, minkilledlev=minkilledlev,maxkilledlev=minkilledlev, used_filters=used_filters,ufilters_vals=ufilters_vals, filters=available_filters+available_filters_map, fdata=fdata,cur_map_name=cur_map_name,title=view_data[0])
def default(self, **kw): #pull data on this view from DB conn = MySQLdb.connect(configuration.DB_HOSTNAME, configuration.DB_USERNAME, configuration.DB_PASSWORD, configuration.DB_NAME, use_unicode=True) curs = conn.cursor() curs.execute( "SELECT title,xdata,ydata,xlabel,ylabel,filters,y_xform,tbl FROM _wsviews WHERE url = %s", (self.url, )) view_data = curs.fetchall()[0] log.debug("line chart request, here is SQL data for this view:") log.debug(view_data) tbl = view_data[7] #fetch the relevant filters for this template and their possible values available_filters = view_data[5].split(',') fdata = dict() for filter in available_filters: curs.execute("SELECT DISTINCT " + filter + " FROM " + tbl) #curs.fetchall() returns a list of lists, we convert this to a plain list for ease of handling raw_fdata = curs.fetchall() fdata[filter] = [] for i in raw_fdata: fdata[filter].append(i[0]) #print fdata filters = "" used_filters = helperlib.intersect(kw.keys(), available_filters) ufilters_vals = dict() startdate = "" enddate = "" for filter in used_filters: kw[filter] = helperlib.listfix(kw[filter]) filter_vals = helperlib.intersect(kw[filter], fdata[filter]) filters = helperlib.fconstruct(filters, filter, filter_vals) ufilters_vals[filter] = filter_vals if 'startdate' in kw and 'enddate' in kw and helperlib.is_valid_date( kw['startdate']) and helperlib.is_valid_date(kw['enddate']): filters = helperlib.rangeconstruct(filters, "timestamp", kw['startdate'], kw['enddate']) used_filters.append("dates") ufilters_vals["dates"] = [kw['startdate'] + " to " + kw['enddate']] startdate = kw['startdate'] enddate = kw['enddate'] #calculate the number of days in the range daterange = TWOYEARS + 1 date_sampling_filter = "" if used_filters.__contains__("dates"): daterange = helperlib.get_date_range(kw['startdate'], kw['enddate']) if daterange > TWOYEARS: date_sampling_filter = " DAYOFMONTH(timestamp) = '1' AND MONTH(timestamp) % 2 " elif daterange > SIXMONTHS: date_sampling_filter = " DAYOFMONTH(timestamp) = '1' " elif daterange > THREEMONTHS: date_sampling_filter = " DAYOFMONTH(timestamp) % '14' = 0 " elif daterange > ONEMONTH: date_sampling_filter = " DAYOFMONTH(timestamp) % '7' = 0 " elif daterange > TWOWEEKS: date_sampling_filter = " DAYOFMONTH(timestamp) % '3' = 0 " elif daterange > ONEWEEK: date_sampling_filter = " " #must do special routines for hourly sampling elif daterange > ONEDAY: pass else: pass filters = helperlib.fconstruct_helper(filters, date_sampling_filter) #get columns and column transformations for this view y_xforms = view_data[6].split(',') y_data = view_data[2].split(',') y_data_str = "" y_group_str = "" #number of y columns and y transformations must be equal assert len(y_data) == len(y_xforms) #we currently only support time as the x axis assert view_data[1] == "timestamp" for i in range(len(y_data)): y_data_str += y_xforms[i] + "(" + y_data[i] + ")," + y_data[i] + "," y_group_str += y_data[i] + "," y_data_str = y_data_str[0:len(y_data_str) - 1] y_group_str = y_group_str[0:len(y_group_str) - 1] query = "SELECT CAST(timestamp as DATE),%s FROM %s %s GROUP BY CAST(timestamp as DATE),%s" % ( y_data_str, tbl, filters, y_group_str) log.debug("SQL query:") log.debug(query) results = helperlib.scaled_query(curs, tbl, query, 100, evaluators.simple_eval) #log.debug("query result:") #log.debug(results) data = LineGraphController.reformat_data(self, results) #generate JS datafields here because genshi templating can't emit JS... js_celldata = "" js_columnnames = "" dates = data[0].keys() dates.sort() for y in data[1]: js_columnnames += "data.addColumn('number', '" + y + "');\n" for i in range(len(dates)): js_celldata += "data.setCell(" + str( i) + ", 0, '" + dates[i].__str__() + "');\n" for j in range(len(data[0][dates[i]])): js_celldata += "data.setCell(" + str(i) + ", " + str( j + 1) + ", " + str(data[0][dates[i]][j][1]) + ");\n" return dict(title=view_data[0], xlabel=view_data[3], js_celldata=js_celldata, filters=available_filters, used_filters=used_filters, js_columnnames=js_columnnames, ufilters_vals=ufilters_vals, fdata=fdata, numdates=len(dates), startdate=startdate, enddate=enddate)
def default(self, **kw): conn = MySQLdb.connect(configuration.DB_HOSTNAME, configuration.DB_USERNAME, configuration.DB_PASSWORD, configuration.DB_NAME, use_unicode=True) curs = conn.cursor() curs.execute( "SELECT title,xdata,ydata,xlabel,ylabel,filters,y_xform,tbl FROM _wsviews WHERE url = %s", (self.url, )) view_data = curs.fetchall()[0] log.debug("kill map request, here is SQL data for this view:") log.debug(view_data) games_tbl = view_data[7] #fetch the relevant filters for this template and their possible values available_filters = view_data[5].split(',') #@TODO: right now I overloaded the filters for the KILLMAPS table into y_xform, there needs to be a better solution for this in the future available_filters_map = view_data[6].split(',') fdata = dict() for filter in available_filters: curs.execute("SELECT DISTINCT " + filter + " FROM " + games_tbl) #curs.fetchall() returns a list of lists, we convert this to a plain list for ease of handling raw_fdata = curs.fetchall() fdata[filter] = [] for i in raw_fdata: fdata[filter].append(i[0]) for filter in available_filters_map: curs.execute("SELECT DISTINCT " + filter + " FROM KILLMAP WHERE game_type='" + view_data[2] + "'") #curs.fetchall() returns a list of lists, we convert this to a plain list for ease of handling raw_fdata = curs.fetchall() fdata[filter] = [] for i in raw_fdata: fdata[filter].append(i[0]) #print fdata filters = "" filters_map = "" used_filters = helperlib.intersect(kw.keys(), available_filters) used_filters_map = helperlib.intersect(kw.keys(), available_filters_map) ufilters_vals = dict() used_filters += used_filters_map for filter in used_filters: kw[filter] = helperlib.listfix(kw[filter]) filter_vals = helperlib.intersect(kw[filter], fdata[filter]) filters = helperlib.fconstruct(filters, filter, filter_vals) ufilters_vals[filter] = filter_vals startdate = "" enddate = "" if 'startdate' in kw and 'enddate' in kw and helperlib.is_valid_date( kw['startdate']) and helperlib.is_valid_date(kw['enddate']): filters = helperlib.rangeconstruct(filters, "timestamp", kw['startdate'], kw['enddate']) used_filters.append("dates") ufilters_vals["dates"] = [kw['startdate'] + " to " + kw['enddate']] startdate = kw['startdate'] enddate = kw['enddate'] minkillerlev = "" maxkillerlev = "" minkilledlev = "" maxkilledlev = "" if 'minkillerlev' in kw and 'maxkillerlev' in kw and helperlib.is_valid_level( kw['minkillerlev']) and helperlib.is_valid_level( kw['maxkillerlev']): filters = helperlib.rangeconstruct(filters, "killer_level", kw['minkillerlev'], kw['maxkillerlev']) minkillerlev = kw['minkillerlev'] maxkillerlev = kw['maxkillerlev'] used_filters.append("killer level range") ufilters_vals["killer level range"] = [ kw['minkillerlev'] + " to " + kw['maxkillerlev'] ] if 'minkilledlev' in kw and 'maxkilledlev' in kw and helperlib.is_valid_level( kw['minkilledlev']) and helperlib.is_valid_level( kw['maxkilledlev']): filters = helperlib.rangeconstruct(filters, "killed_level", kw['minkilledlev'], kw['maxkilledlev']) minkilledlev = kw['minkilledlev'] maxkilledlev = kw['maxkilledlev'] used_filters.append("killed level range") ufilters_vals["killed level range"] = [ kw['minkilledlev'] + " to " + kw['maxkilledlev'] ] curs.execute( "SELECT DISTINCT scenario_name,map_id FROM `KILLMAP` WHERE game_type='" + view_data[2] + "' GROUP BY map_id") maps = curs.fetchall() cur_map = kw.setdefault( "map", "mWF1l_gRtBBQvkNoqpYBPY3z2idTMM493ck_YjEk_WU=") #check for input sanity, we will be fetching map tiles based on this name if not (cur_map.count("/") == 0 and cur_map.count(".") == 0 and len(cur_map) == 44): cur_map = "" cur_map_name = "None" for map in maps: if map[1] == cur_map: cur_map_name = map[0] break m_dimensions = () if cur_map != "": m_dimensions = helperlib.get_map_dimensions(configuration.MAP_DIR + cur_map) #compute kill frequency per hex filters = helperlib.fconstruct(filters, "map_id", [cur_map]) query = "SELECT position,COUNT(position) FROM `KILLMAP` INNER JOIN `" + games_tbl + "` USING (game_id) " + filters + " GROUP BY position ORDER BY COUNT(position) DESC LIMIT 0,100" curs.execute(query) hexdata = curs.fetchall() conn.close() #first item is 'hottest' -> red (255,0,0), last item is 'coldest' -> blue (0,0,255) #linearly interpolate hotness of all values inbetween grid_colors = "" if len(hexdata) != 0: max = hexdata[0][1] min = hexdata[len(hexdata) - 1][1] #if there are very few results, the minimum frequency == maximum frequency -> divide by 0 if max == min: max += 1 for hex in hexdata: v = hex[1] red_val = 255.0 * float(v - min) / float(max - min) blue_val = 255.0 * (1 - (float(v - min) / float(max - min))) hex_color = "%.2x00%.2x" % (int(red_val), int(blue_val)) #construct the javascript dicitonary that will store hex->color mappings grid_colors += '"%s":"%s",' % (hex[0], hex_color) grid_colors = grid_colors[:-1] return dict(maps=maps, cur_map=cur_map, dimensions=m_dimensions, grid_colors=grid_colors, startdate=startdate, enddate=enddate, minkillerlev=minkillerlev, maxkillerlev=maxkillerlev, minkilledlev=minkilledlev, maxkilledlev=minkilledlev, used_filters=used_filters, ufilters_vals=ufilters_vals, filters=available_filters + available_filters_map, fdata=fdata, cur_map_name=cur_map_name, title=view_data[0])
def default(self,**kw): #pull data on this view from DB conn = MySQLdb.connect(configuration.DB_HOSTNAME,configuration.DB_USERNAME,configuration.DB_PASSWORD,configuration.DB_NAME,use_unicode=True) curs = conn.cursor() curs.execute("SELECT title,xdata,ydata,xlabel,ylabel,filters,y_xform,tbl FROM _wsviews WHERE url = %s", (self.url,)) view_data = curs.fetchall()[0] log.debug("line chart request, here is SQL data for this view:") log.debug(view_data) tbl = view_data[7] #fetch the relevant filters for this template and their possible values available_filters = view_data[5].split(',') fdata = dict() for filter in available_filters: curs.execute("SELECT DISTINCT " + filter + " FROM " + tbl) #curs.fetchall() returns a list of lists, we convert this to a plain list for ease of handling raw_fdata = curs.fetchall() fdata[filter] = [] for i in raw_fdata: fdata[filter].append(i[0]) #print fdata filters = "" used_filters = helperlib.intersect(kw.keys(),available_filters) ufilters_vals = dict() startdate = "" enddate = "" for filter in used_filters: kw[filter] = helperlib.listfix(kw[filter]) filter_vals = helperlib.intersect(kw[filter],fdata[filter]) filters = helperlib.fconstruct(filters,filter,filter_vals) ufilters_vals[filter] = filter_vals if 'startdate' in kw and 'enddate' in kw and helperlib.is_valid_date(kw['startdate']) and helperlib.is_valid_date(kw['enddate']): filters = helperlib.rangeconstruct(filters,"timestamp",kw['startdate'],kw['enddate']) used_filters.append("dates") ufilters_vals["dates"] = [kw['startdate'] + " to " + kw['enddate']] startdate = kw['startdate'] enddate = kw['enddate'] #calculate the number of days in the range daterange = TWOYEARS+1 date_sampling_filter = "" if used_filters.__contains__("dates"): daterange = helperlib.get_date_range(kw['startdate'],kw['enddate']) if daterange > TWOYEARS: date_sampling_filter = " DAYOFMONTH(timestamp) = '1' AND MONTH(timestamp) % 2 " elif daterange > SIXMONTHS: date_sampling_filter = " DAYOFMONTH(timestamp) = '1' " elif daterange > THREEMONTHS: date_sampling_filter = " DAYOFMONTH(timestamp) % '14' = 0 " elif daterange > ONEMONTH: date_sampling_filter = " DAYOFMONTH(timestamp) % '7' = 0 " elif daterange > TWOWEEKS: date_sampling_filter = " DAYOFMONTH(timestamp) % '3' = 0 " elif daterange > ONEWEEK: date_sampling_filter = " " #must do special routines for hourly sampling elif daterange > ONEDAY: pass else: pass filters = helperlib.fconstruct_helper(filters,date_sampling_filter) #get columns and column transformations for this view y_xforms = view_data[6].split(',') y_data = view_data[2].split(',') y_data_str = "" y_group_str = "" #number of y columns and y transformations must be equal assert len(y_data) == len(y_xforms) #we currently only support time as the x axis assert view_data[1] == "timestamp" for i in range(len(y_data)): y_data_str += y_xforms[i] + "(" + y_data[i] + ")," + y_data[i] + "," y_group_str += y_data[i] + "," y_data_str = y_data_str[0:len(y_data_str)-1] y_group_str = y_group_str[0:len(y_group_str)-1] query = "SELECT CAST(timestamp as DATE),%s FROM %s %s GROUP BY CAST(timestamp as DATE),%s" % (y_data_str,tbl,filters,y_group_str) log.debug("SQL query:") log.debug(query) results = helperlib.scaled_query(curs,tbl,query,100,evaluators.simple_eval) #log.debug("query result:") #log.debug(results) data = LineGraphController.reformat_data(self,results) #generate JS datafields here because genshi templating can't emit JS... js_celldata = "" js_columnnames = "" dates = data[0].keys() dates.sort() for y in data[1]: js_columnnames += "data.addColumn('number', '" + y + "');\n" for i in range(len(dates)): js_celldata += "data.setCell(" + str(i) + ", 0, '" + dates[i].__str__() + "');\n" for j in range(len(data[0][dates[i]])): js_celldata += "data.setCell(" + str(i) + ", " + str(j+1) + ", " + str(data[0][dates[i]][j][1]) + ");\n" return dict(title=view_data[0],xlabel=view_data[3],js_celldata=js_celldata, filters=available_filters,used_filters=used_filters,js_columnnames=js_columnnames, ufilters_vals=ufilters_vals,fdata=fdata,numdates=len(dates),startdate=startdate, enddate=enddate)