def topic_detail(topic_id): if request.method == 'GET': topic = db_topics.Topics().get_topic_by_id(topic_id) first_page_datas = get_topic_detail_question_datas(page_num=1, page_size=15, topic_id=topic_id) focus_count = db_topic_focus.Topic_focus().get_foucs_count(topic_id) focus_users = db_topics.Topics().get_focus_users(topic_id) question_count = db_topic_question.Topic_question().get_question_count(topic_id) c_time = timestamp_datetime(time()) if is_login(): user = db_users.Users().get_user(session['username']) return render_template('login/login-topic_detail.html', has_focus=db_topic_focus.Topic_focus().user_focus_topic(user.uid,topic_id), user=user, topic=topic, datas=first_page_datas, focus_count=focus_count, focus_users=focus_users, question_count=question_count, c_time=c_time) return render_template('topic_detail.html', topic=topic, datas=first_page_datas, focus_count=focus_count, focus_users=focus_users, question_count=question_count, c_time=c_time) elif request.method == 'POST': return 'error'
def udf_entity_hash(id, id_value, apply_dt, dbg=False): if id_value is None or id is None: return 'null' id_value = udf_distinct_v(id_value) if len(apply_dt) == 10: apply_dt = '%s 00:00:00' % apply_dt ft_v = [] for x in id_value: event_time = utils.timestamp_datetime(x.get('eventoccurtime')) date_diff = utils.get_day(apply_dt, event_time) if (event_time < apply_dt) and (date_diff <= 720): ft_v.append(x) if len(ft_v) == 0: return 'null' e_cnts = [] for e in [ 'accountmobile', 'idnumber', 'deviceid', 'accountemail', 'qqnumber' ]: cnt = len(set([x[e] for x in ft_v if x.get(e, '') != ''])) e_cnts.append(min(cnt, 9)) return '%s_%s_%s' % (id, apply_dt, ''.join([str(x) for x in e_cnts]))
def get_topic_detail_questions_by_page(topic_id, page_num): questions = db_questions.Questions().get_questions_by_topic_id_and_page(topic_id=topic_id, page_num=page_num, page_size=15) datas = [] for question in questions: data = { 'question_id': question.question_id, 'title': question.title, 'username': db_users.Users().get_user_by_id(question.questioner_uid).username, 'is_anonymous': question.is_anonymous, 'question_focus_count': db_question_focus.Question_focus().get_question_foucs_count(question.question_id), 'question_answer_count': db_answers.Answers().get_answer_count(question.question_id), 'question_view_count': db_questions.Questions().get_question_view_count(question.question_id), 'publish_time': timestamp_datetime(question.publish_time), 'user_url': get_user_url(question.question_id), 'dynamic_str': get_dynamic_str(question.question_id), 'avatar_url': db_users.Users().get_user_by_id(question.questioner_uid).avatar_url } datas.append(data) return json.dumps(datas, ensure_ascii=False)
def update_vsim_estsucc_last_succ(): begin_datetime = generic_query( database('GSVC_SQL').get_db(), "SELECT MAX(update_time) FROM `t_term_vsim_estsucc_last_succ`")[0][0] begin_datetime = mkdatetime(str(begin_datetime)) now = datetime.datetime.now() today = datetime.datetime(year=now.year, month=now.month, day=now.day) mgo = database('PERF_MGO').get_db() succ_info = {} # 1. 循环更新succ信息, 按天避免查询范围过大。返回最终要插入的表 while begin_datetime < today: begin_time = datetime_timestamp(begin_datetime) end_time = datetime_timestamp(begin_datetime + datetime.timedelta(days=1)) # choose collection返回的是list, 此场景下只有一个值 col = choose_perf_collection(begin_datetime, begin_datetime + datetime.timedelta(days=1), prefix='t_term_vsim_estsucc')[0] match = {'createTime': {'$gte': begin_time, '$lt': end_time}} # $吐槽: 为了使用mongo自带的project重命名, 写pipeline的代价可是真的大 pipeline = [{ '$match': match }, { '$sort': { 'createTime': -1 } }, { '$group': { '_id': '$vsimImsi', 'succ_time': { '$first': '$succTime' }, 'succ_mcc': { '$first': '$mcc' }, 'succ_mnc': { '$first': '$mnc' }, 'succ_lac': { '$first': '$lac' }, } }, { '$project': { '_id': 0, 'imsi': '$_id', 'succ_time': 1, 'succ_mcc': 1, 'succ_mnc': 1, 'succ_lac': 1 } }] for info in mgo[col].aggregate(pipeline, allowDiskUse=True): if info['imsi'] == "": continue info['update_time'] = today # 记录更新日期 info['create_date'] = today # 插入数据库时生效,update不生效 if len(str(info['succ_time'])) != 13: info['succ_time'] = mkdatetime('1900-01-01') else: info['succ_time'] = timestamp_datetime(info['succ_time']) info['succ_country'] = mcc_country(info['succ_mcc'], info['succ_mnc'], info['succ_lac']) if info['imsi'] in succ_info.keys(): succ_info[info['imsi']] = info else: succ_info.update({info['imsi']: info}) begin_datetime += datetime.timedelta(days=1) # 2. format查询结果,便于插入与update succ_list = [x for x in succ_info.values()] succ_tuple_key = succ_list[1].keys() succ_tuple_val = [tuple(v.values()) for v in succ_list] update_stmt = ( "INSERT INTO `{target_table}` (`{colname}`) VALUES ({placeholder}) ON DUPLICATE KEY " "UPDATE `succ_time` = values(`succ_time`), " "`update_time` = values(`update_time`)," "`succ_lac` = values(`succ_lac`)," "`succ_mcc` = values(`succ_mcc`)," "`succ_country` = values(`succ_country`)," "`succ_mnc` = values(`succ_mnc`)").format( target_table='t_term_vsim_estsucc_last_succ', colname="`,`".join(succ_tuple_key), placeholder=','.join(['%s' for x in range(len(succ_tuple_key))])) con = database('GSVC_SQL_ADMIN').get_db() try: with con.cursor() as cur: effect_row = cur.executemany(update_stmt, succ_tuple_val) con.commit() except Exception as e: con.close() print('INSERT ROWS:{}'.format(effect_row)) raise e print('INSERT ROWS:{}'.format(effect_row)) con.close()