def ajax_spatial_news(): """ 地域演化的页面,从database获取数据返回topic_spatial.html """ stylenum = request.args.get( 'style', '') # stylenum表示返回count是origin:1,forward:2, sum:3 stylenum = int(stylenum) topic = request.args.get('topic', '') during = request.args.get('pointInterval', 60 * 60) # 默认查询时间粒度为3600秒 during = int(during) end_ts = request.args.get('end_ts', '') end_ts = long(end_ts) start_ts = request.args.get('start_ts', '') start_ts = long(start_ts) incremental = request.args.get('incremental', 0) incremental = int(incremental) pointnum = (end_ts - start_ts) / during # 时间点数 top_city_news = get_city_news(topic, start_ts, end_ts) #{} # top_city_news = {city:[weibo1,weibo2...],...} spatial_dict = {} global_max_count = 0 if incremental == 0: global_first_timestamp = end_ts global_first_city = "" for i in range(pointnum + 1): # 增长量 end_ts = start_ts + during * i max_count, topic_spatial_info, first_item = readNews( stylenum, topic, end_ts, during) # 查询在一定时间范围内,某个topic的stylenum信息各个省市的数量 if global_max_count < max_count: global_max_count = max_count try: if first_item['timestamp'] <= global_first_timestamp: global_first_timestamp = first_item['timestamp'] global_first_city = '' except KeyError: pass spatial_dict[ str(end_ts )] = topic_spatial_info # spatial_dict = {end_ts:map_data} elif incremental == 1: # 累计量 global_max_count, spatial_dict, global_first_city = readAcumNews( stylenum, topic, start_ts, end_ts, during) # 查询在一定时间范围内,某个topic的stylenum信息各个省市的数量 map_data = info2map(spatial_dict, incremental) map_data['max_count'] = global_max_count map_data['first_city'] = global_first_city map_data['top_city_news'] = top_city_news return json.dumps(map_data)
def ajax_spatial_news(): """ 地域演化的页面,从database获取数据返回topic_spatial.html """ stylenum = request.args.get('style', '') # stylenum表示返回count是origin:1,forward:2, sum:3 stylenum = int(stylenum) topic = request.args.get('topic', '') during = request.args.get('pointInterval', 60 * 60) # 默认查询时间粒度为3600秒 during = int(during) end_ts = request.args.get('end_ts', '') end_ts = long(end_ts) start_ts = request.args.get('start_ts', '') start_ts = long(start_ts) incremental = request.args.get('incremental', 0) incremental = int(incremental) pointnum = (end_ts - start_ts) / during # 时间点数 top_city_news = get_city_news(topic, start_ts, end_ts) #{} # top_city_news = {city:[weibo1,weibo2...],...} spatial_dict = {} global_max_count = 0 if incremental == 0: global_first_timestamp = end_ts global_first_city = "" for i in range(pointnum + 1): # 增长量 end_ts = start_ts + during * i max_count, topic_spatial_info, first_item = readNews(stylenum, topic, end_ts , during) # 查询在一定时间范围内,某个topic的stylenum信息各个省市的数量 if global_max_count < max_count: global_max_count = max_count try: if first_item['timestamp'] <= global_first_timestamp: global_first_timestamp = first_item['timestamp'] global_first_city = '' except KeyError: pass spatial_dict[str(end_ts)] = topic_spatial_info # spatial_dict = {end_ts:map_data} elif incremental == 1: # 累计量 global_max_count, spatial_dict, global_first_city = readAcumNews(stylenum, topic, start_ts, end_ts , during) # 查询在一定时间范围内,某个topic的stylenum信息各个省市的数量 map_data = info2map(spatial_dict, incremental) map_data['max_count'] = global_max_count map_data['first_city'] = global_first_city map_data['top_city_news'] = top_city_news return json.dumps(map_data)
def in_out_map_news(): topic = request.args.get('topic', '') during = request.args.get('pointInterval', 60 * 60) # 默认查询时间粒度为3600秒 during = int(during) end_ts = request.args.get('end_ts', '') end_ts = long(end_ts) start_ts = request.args.get('start_ts', '') start_ts = long(start_ts) ts_arr = [] results = [] top_city_news = get_city_news(topic, start_ts, end_ts) # top_city_news = {city:[weibo1,weibo2...],...} items = db.session.query(CityRepostNews).filter(CityRepostNews.topic == topic).all() if items: for item in items: r = {} r['original'] = item.original r['topic'] = item.topic r['mid'] = item.mid r['ts'] = item.ts r['origin_location'] = item.origin_location r['repost_location'] = item.repost_location if (r['origin_location'] == u'未知') or (r['repost_location'] == u'未知'): continue if (r['origin_location'] == u'海外') or (r['repost_location'] == u'海外'): continue ts_arr.append(r['ts']) results.append(r) ts_arr = sorted(list(set(ts_arr))) raw_ts_series, raw_groups = partition_time(ts_arr, results, during) ts_series, groups = select_groups(raw_ts_series, raw_groups, start_ts, end_ts) # draw_circle_data = map_circle_data(groups, True) max_repost_num, draw_line_data = map_line_data(groups, True) in_out_results = work_in_out(draw_line_data) repost_series, origin_series, post_series, statistic_data = statistics_data(groups, draw_line_data, True) return json.dumps({'draw_line_data': draw_line_data, 'in_out_results': in_out_results, 'statistics_data': statistic_data, 'top_city_news': top_city_news}) ''' return json.dumps({'ts_arr':ts_arr, 'results':results, 'ts_series':ts_series, 'groups': groups, \ 'draw_circle_data':draw_circle_data, 'draw_line_data': draw_line_data, 'max_repost_num': max_repost_num, \ 'repost_series':repost_series, 'origin_series':origin_series, 'post_series':post_series, 'statistics_data':statistic_data}) ''' else: return json.dumps({'draw_line_data': [], 'in_out_results': [],'statistics_data': [], 'top_city_news': top_city_news})
def in_out_map_news(): topic = request.args.get('topic', '') during = request.args.get('pointInterval', 60 * 60) # 默认查询时间粒度为3600秒 during = int(during) end_ts = request.args.get('end_ts', '') end_ts = long(end_ts) start_ts = request.args.get('start_ts', '') start_ts = long(start_ts) ts_arr = [] results = [] top_city_news = get_city_news(topic, start_ts, end_ts) # top_city_news = {city:[weibo1,weibo2...],...} items = db.session.query(CityRepostNews).filter( CityRepostNews.topic == topic).all() if items: for item in items: r = {} r['original'] = item.original r['topic'] = item.topic r['mid'] = item.mid r['ts'] = item.ts r['origin_location'] = item.origin_location r['repost_location'] = item.repost_location if (r['origin_location'] == u'未知') or (r['repost_location'] == u'未知'): continue if (r['origin_location'] == u'海外') or (r['repost_location'] == u'海外'): continue ts_arr.append(r['ts']) results.append(r) ts_arr = sorted(list(set(ts_arr))) raw_ts_series, raw_groups = partition_time(ts_arr, results, during) ts_series, groups = select_groups(raw_ts_series, raw_groups, start_ts, end_ts) # draw_circle_data = map_circle_data(groups, True) max_repost_num, draw_line_data = map_line_data(groups, True) in_out_results = work_in_out(draw_line_data) repost_series, origin_series, post_series, statistic_data = statistics_data( groups, draw_line_data, True) return json.dumps({ 'draw_line_data': draw_line_data, 'in_out_results': in_out_results, 'statistics_data': statistic_data, 'top_city_news': top_city_news }) ''' return json.dumps({'ts_arr':ts_arr, 'results':results, 'ts_series':ts_series, 'groups': groups, \ 'draw_circle_data':draw_circle_data, 'draw_line_data': draw_line_data, 'max_repost_num': max_repost_num, \ 'repost_series':repost_series, 'origin_series':origin_series, 'post_series':post_series, 'statistics_data':statistic_data}) ''' else: return json.dumps({ 'draw_line_data': [], 'in_out_results': [], 'statistics_data': [], 'top_city_news': top_city_news })