def draw_graph(request): """ execute sql and draw flash. """ if request.method == 'GET': t = request.GET.copy() tid = int(t['tid']) u = Flashurl.objects.get(id=tid) url = eval(u.url) data={} for key,value in url.items(): if key: try: data[key]=value.decode("utf-8") except: data[key]=value u.delete() try: user_id = request.user.id view_id = data.get('view_id') data.pop('view_id') v = View.objects.get(id=view_id) except: raise Http404 type = data.get('type') # default chart type is bar if not type: type = "bar" indicator = data['indicator'].split(",") data.pop('indicator') x_axis = BAR_FORMAT_FIELD if type == "bar" else DATE_FORMAT_FIELD data.pop('type') view_obj = ViewObj(v, request) u_d = get_user_dimension(user_id,view_id) u_dimension = u_d.split(",") + NON_NUMBER_FIELD sql = SQLGenerator(data, view_obj, u_d, request,x_axis).get_sql().encode('utf-8') res = execute_sql(sql) chart = Chart() chart.title.text = v.cname chart.title.style = "{font-size: 17px; font-family: Verdana; text-align: center;}" headers = view_obj.get_headers() header_name = [ i['name']['value'] for i in headers] header_cname = [ i['cname']['value'] for i in headers] # generate x labels indexes = [] label_keys = X_LABELS.get(type, {}) for name in label_keys: try: index = header_name.index(name) except: index = -1 indexes.append(index) is_day_report = True if view_obj.get_body()['time_type']['name'] == 'day' else False labels = [] for line in res: label = [] for i, line_index in enumerate(indexes): if line_index >= 0: value = line[line_index] # try: # value = value.encode("utf-8") # except EOFError: # value = str(value) # except:pass else: value = '' label.append(value) if i == len(indexes) - 1 and type != "bar": if is_day_report: label.pop() label[-1] = format_date(label[-1]) else: end_date = label.pop() begin_date = label.pop() date_list = [] if begin_date: date_list.append(format_date(begin_date)) if end_date: date_list.append(format_date(end_date)) label.append("~".join(date_list)) labels.append("\n".join(label)) if labels == ['\n']:labels=['全国\n'.decode("utf-8")] chart.x_axis = {'labels': {"labels": labels,"size":12}} graph_els = filter(lambda x:x not in u_dimension, header_name) els = [] max_values = [] # add chart elements one by one. if res: for i, el in enumerate(graph_els): if el not in indicator: index = header_name.index(el) try: values = [int(line[index]) for line in res] max_values.append(max(values)) except: values = [line[index] for line in res] graph = Chart() graph.type = type graph.values = values graph.text = header_cname[index] graph.alpha = 0.5 graph.fontsize = 13 graph.tip = '#key#<br>[#x_label#]:#val#' graph.colour = CHART_COLOR[i] els.append(graph) chart.elements = els if res: max_value = max(max_values) step = max_value/10 chart.y_axis = {'max': max_value, 'min': 0, 'steps': step,'labels': {"size":11}} chart_c=chart.create() #log try: log_id = "%s%05d" % (datetime.datetime.now().strftime("%Y%m%d%H%M%S"),random.randint(0,10000)) data_log = "%s-%s"%(v.cname,v.get_time_type_display()) log = "%s|%s,%s"%("draw_graph", type.encode("utf-8"), data_log.encode("utf-8")) tolog(request,log,log_id) except: pass return HttpResponse(chart_c)
def show_table(request): """ execute sql and fetch results. """ user_id = request.user.id data = request.POST.copy() data.pop('timestamp') provlist = len(data['provname'].split(",")) if data.has_key('provname') else 0 try: view_id = data.get('view_id') data.pop('view_id') v = View.objects.get(id=view_id) except: raise Http404 # get container div, sent it back to client and put table in that container. try: page = data['current_page'] data.pop('current_page') page = int(page) except: page = 1 container_id = data.get('container') data.pop('container') view_obj = ViewObj(v, request) view_id = view_obj.obj['view_id'] t = loader.get_template('results.html') try: v_query = view_obj.get_query() v_query = query_session(v_query) u_d = get_user_dimension(user_id,view_id) # !!! 生成sql语句 sql_sum = SQLGenerator(data, view_obj, u_d,request).get_sum().encode('utf-8') #求和 object_sql = SQLGenerator(data, view_obj, u_d,request) sql = object_sql.get_sql().encode('utf-8') #查询内容的sql语句 sql_sum_column = object_sql.sum_column #求和的字段 #分页:例 第1页显示 0,30--limit 0,30 第2页显示 30,30 -- limit 30,30 #所以公式为 (页数-1)*每页显示 , 页数*每页显示 sql_limit = "%s limit %s,%s"%(sql,(page-1)*MAX_DATA,MAX_DATA+1) #+1多取1条判断是否有下页 # !!! 双线程执行sql语句,t_sum求和 t_sql取前50条 t_sum = Mythread(args=(sql_sum,)) t_sql = Mythread(args=(sql_limit,)) t_sum.start() t_sql.start() t_sum.join() t_sql.join() try: sql_column_sum = t_sum.v sum_data = dict(zip(sql_sum_column,sql_column_sum[0])) except: #某些报表没有指标! 只有维度,跑sql语句会报错的! 擦! sum_data = None res = t_sql.v #分页 contacts = MyPaginator(len(res), page, MAX_DATA) res = res[:MAX_DATA] u_dimension = u_d.split(",") if len(u_d)>0 else [] res = format_table(res, view_obj,u_dimension,sum_data) head,res = (res[0],res[1:]) if res else ("",[]) tips,u_session="",True if country_session(u_d) and provlist<HIGHEST_AUTHORITY and v_query: tips = "如果要看分省/市数据,请在维度设置中勾选省/市<p>如果查看全国数据,请将省条件全选" u_session = False #log try: log_id = "%s%05d" % (datetime.datetime.now().strftime("%Y%m%d%H%M%S"),random.randint(0,10000)) sql_limit_log = "%s|Query_time: %s, sql: %s"%("show_table", t_sql.time, sql_limit.encode("utf-8")) tolog(request,sql_limit_log,log_id) sql_sum_log = "%s|Query_time: %s, sql: %s"%("show_table", t_sum.time, sql_sum.encode("utf-8")) tolog(request,sql_sum_log,log_id) except: pass html = t.render(Context({'view_id': view_id, 'res': res, 'contacts': contacts, 'u_session': u_session, 'tips': tips, 'head': head, 'ud': u_dimension, 'container_id': container_id, 'headers': view_obj.get_headers(), 'table_name': view_obj.get_body()['dataset'].cname, })) json_text = simplejson.dumps({'container':container_id,'content':html}) return HttpResponse(json_text) except: json_text = showtable_500(t,container_id,view_obj) return HttpResponse(json_text)