def get(self): token = request.args.get("utoken") if not psd_handler.user_is_admin(token): return jsonify("登录已过期或没有权限进行这个操作"), 400 db_connection = MySQLConnection() cursor = db_connection.get_cursor() # 查询所有用户信息 select_statement = "SELECT id,name,fixed_code,join_time,phone,email,update_time,is_active,is_admin,org_id FROM user_info;" cursor.execute(select_statement) # 重新组织用户数据 user_data = list() for user_item in cursor.fetchall(): user_dict = dict() user_dict['id'] = user_item['id'] user_dict['name'] = user_item['name'] user_dict['fixed_code'] = user_item['fixed_code'] user_dict['join_time'] = user_item['join_time'].strftime( '%Y-%m-%d %H:%M:%S') user_dict['update_time'] = user_item['update_time'].strftime( '%Y-%m-%d %H:%M:%S') user_dict['is_active'] = user_item['is_active'] user_dict['is_admin'] = user_item['is_admin'] user_dict['organization'] = ORGANIZATIONS.get( user_item['org_id'], '未知') user_dict['phone'] = user_item['phone'] user_dict['email'] = user_item['email'] user_data.append(user_dict) db_connection.close() return jsonify(user_data)
def get(self): params = request.args utoken = params.get('utoken') user_info = verify_json_web_token(utoken) if not user_info: return jsonify("登录已过期!刷新网页重新登录."), 400 try: start_date = params.get('startDate') end_date = params.get('endDate') end_date = datetime.datetime.strptime( end_date, '%Y-%m-%d') + datetime.timedelta(days=1) end_date = ( end_date + datetime.timedelta(seconds=-1)).strftime('%Y-%m-%d %H:%M:%S') except Exception: return jsonify("参数错误:DATE FORMAT ERROR!") query_statement = "SELECT usertb.name,usertb.org_id,ondmsgtb.custom_time,ondmsgtb.content,ondmsgtb.note " \ "FROM `user_info` AS usertb INNER JOIN `onduty_message` AS ondmsgtb ON " \ "usertb.id=%s AND usertb.id=ondmsgtb.author_id AND (ondmsgtb.custom_time BETWEEN %s AND %s) " \ "ORDER BY ondmsgtb.custom_time ASC;" db_connection = MySQLConnection() cursor = db_connection.get_cursor() cursor.execute(query_statement, (user_info['uid'], start_date, end_date)) records_all = cursor.fetchall() db_connection.close() # 生成承载数据的文件 t = "%.4f" % time.time() md5_hash = hashlib.md5() md5_hash.update(t.encode('utf-8')) md5_hash.update(user_info['name'].encode('utf-8')) md5_str = md5_hash.hexdigest() file_folder = os.path.join(BASE_DIR, 'fileStore/exports/') if not os.path.exists(file_folder): os.makedirs(file_folder) file_path = os.path.join(file_folder, '{}.xlsx'.format(md5_str)) file_records = list() for record_item in records_all: row_content = list() row_content.append(record_item['custom_time'].strftime("%Y-%m-%d")) row_content.append(ORGANIZATIONS.get(record_item['org_id'], '未知')) row_content.append(record_item['name']) row_content.append(record_item['content']) row_content.append(record_item['note']) file_records.append(row_content) export_df = pd.DataFrame(file_records) export_df.columns = ['日期', '部门小组', '姓名', '信息内容', '备注'] export_df.to_excel(excel_writer=file_path, index=False, sheet_name='值班信息记录') return send_from_directory( directory=file_folder, filename='{}.xlsx'.format(md5_str), as_attachment=True, attachment_filename='{}.xlsx'.format(md5_str))
def get(self): organizations = [] for key, value in ORGANIZATIONS.items(): o_item = dict() o_item['id'] = key o_item['name'] = value organizations.append(o_item) return jsonify(organizations)
def get(self): params = request.args # 解析用户信息 token = params.get('utoken') user_info = verify_json_web_token(token) if not user_info: return jsonify("您的登录已过期,请重新登录查看.") user_id = user_info['uid'] try: start_date = params.get('startDate') end_date = params.get('endDate') end_date = datetime.datetime.strptime(end_date, '%Y-%m-%d') + datetime.timedelta(days=1) end_date = (end_date + datetime.timedelta(seconds=-1)).strftime('%Y-%m-%d %H:%M:%S') current_page = int(params.get('page', 1)) - 1 page_size = int(params.get('pagesize', 30)) except Exception: return jsonify("参数错误:INT TYPE REQUIRED!") start_id = current_page * page_size db_connection = MySQLConnection() cursor = db_connection.get_cursor() # sql内联查询 inner_join_statement = "SELECT usertb.name,usertb.org_id,smsgtb.id,smsgtb.custom_time,smsgtb.content,smsgtb.msg_type,smsgtb.effect_variety,smsgtb.note " \ "FROM `user_info` AS usertb INNER JOIN `short_message` AS smsgtb " \ "ON usertb.id=%s AND usertb.id=smsgtb.author_id AND (smsgtb.custom_time BETWEEN %s AND %s) " \ "ORDER BY smsgtb.custom_time DESC " \ "limit %s,%s;" cursor.execute(inner_join_statement, (user_id, start_date, end_date, start_id, page_size)) result_records = cursor.fetchall() # print("内连接查短讯通结果", result_records) # 查询总条数 count_statement = "SELECT COUNT(smsgtb.id) as total FROM `user_info` AS usertb INNER JOIN `short_message`AS smsgtb " \ "ON usertb.id=%s AND usertb.id=smsgtb.author_id AND (smsgtb.custom_time BETWEEN %s AND %s);" cursor.execute(count_statement, (user_id, start_date, end_date)) # print("条目记录:", cursor.fetchone()) 打开注释下行将无法解释编译 # 计算总页数 total_count = cursor.fetchone()['total'] db_connection.close() total_page = int((total_count + page_size - 1) / page_size) # print('total_page',total_page) # 组织数据返回 response_data = dict() response_data['records'] = list() for record_item in result_records: record_item['custom_time'] = record_item['custom_time'].strftime('%Y-%m-%d') record_item['org_name'] = ORGANIZATIONS.get(int(record_item['org_id']), "未知") response_data['records'].append(record_item) response_data['current_page'] = current_page + 1 # 查询前给减1处理了,加回来 response_data['total_page'] = total_page response_data['current_count'] = len(result_records) response_data['total_count'] = total_count return jsonify(response_data)
def export_investrategy(self, userid, start_date, end_date): table_headers = [ '日期', '部门小组', '姓名', '策略内容', '品种', '合约', '方向', '手数', '策略开仓', '策略平仓', '结果' ] query_statement = "SELECT usertb.name,usertb.org_id,invsgytb.custom_time,invsgytb.content,invsgytb.variety_id,invsgytb.contract,invsgytb.direction,invsgytb.hands," \ "invsgytb.open_position,invsgytb.close_position,invsgytb.profit " \ "FROM `user_info` AS usertb INNER JOIN `investrategy` AS invsgytb " \ "ON (usertb.id=%s AND usertb.id=invsgytb.author_id) AND (invsgytb.custom_time BETWEEN %s AND %s) " \ "ORDER BY invsgytb.custom_time ASC;" db_connection = MySQLConnection() cursor = db_connection.get_cursor() # 准备品种信息 query_variety = "SELECT `id`,`name` FROM `variety` WHERE `parent_id` IS NOT NULL;" cursor.execute(query_variety) variety_all = cursor.fetchall() variety_dict = { variety_item["id"]: variety_item['name'] for variety_item in variety_all } cursor.execute(query_statement, (userid, start_date, end_date)) query_result = cursor.fetchall() db_connection.close() file_records = list() for record_item in query_result: row_content = list() row_content.append(record_item['custom_time'].strftime("%Y-%m-%d")) row_content.append(ORGANIZATIONS.get(record_item['org_id'], '未知')) row_content.append(record_item['name']) row_content.append(record_item['content']) row_content.append( variety_dict.get(record_item['variety_id'], '未知')) row_content.append(record_item['contract']) row_content.append(record_item['direction']) row_content.append(record_item['hands']) row_content.append(float(record_item['open_position'])) row_content.append(float(record_item['close_position'])) row_content.append(float(record_item['profit'])) file_records.append(row_content) export_df = pd.DataFrame(file_records) export_df.columns = table_headers file_folder, md5_str = self.generate_file_path(userid) file_path = os.path.join(file_folder, '{}.xlsx'.format(md5_str)) export_df.to_excel(excel_writer=file_path, index=False, sheet_name="投顾策略记录") return send_from_directory( directory=file_folder, filename='{}.xlsx'.format(md5_str), as_attachment=True, attachment_filename='{}.xlsx'.format(md5_str))
def get(self): token = request.args.get('utoken') user_info = psd_handler.verify_json_web_token(token) if not user_info: # 状态保持错误 return jsonify('登录已过期.'), 400 org_id = user_info.get('org_id', None) if org_id: user_info['org_name'] = ORGANIZATIONS.get(int(org_id), '未知') else: user_info['org_name'] = '无' return jsonify(user_info)
def get(self, rid): db_connection = MySQLConnection() cursor = db_connection.get_cursor() select_statement = "SELECT usertb.name,usertb.org_id,smsgtb.id,smsgtb.custom_time,smsgtb.content,smsgtb.msg_type,smsgtb.effect_variety,smsgtb.note " \ "FROM `user_info` AS usertb INNER JOIN `short_message` AS smsgtb ON " \ "smsgtb.id=%d AND usertb.id=smsgtb.author_id;" %rid cursor.execute(select_statement) record_item = cursor.fetchone() if record_item: record_item['custom_time'] = record_item['custom_time'].strftime('%Y-%m-%d') record_item['org_name'] = ORGANIZATIONS.get(int(record_item['org_id']), "未知") else: record_item = {} db_connection.close() return jsonify(record_item)
def export_abnormal_work(self, userid, start_date, end_date): table_headers = [ '日期', '部门小组', '姓名', '标题', '类型', '主办方', '申请部门', '申请者', '联系电话', '瑞币', '补贴', '备注' ] query_statement = "SELECT usertb.name,usertb.org_id,abworktb.custom_time,abworktb.task_type,abworktb.title,abworktb.sponsor,abworktb.applied_org,abworktb.applicant,abworktb.tel_number,abworktb.swiss_coin,abworktb.allowance,abworktb.note " \ "FROM `user_info` AS usertb INNER JOIN `abnormal_work` AS abworktb " \ "ON (abworktb.is_examined=1 AND usertb.id=%s AND usertb.id=abworktb.author_id) AND (abworktb.custom_time BETWEEN %s AND %s) " \ "ORDER BY abworktb.custom_time ASC;" db_connection = MySQLConnection() cursor = db_connection.get_cursor() cursor.execute(query_statement, (userid, start_date, end_date)) query_result = cursor.fetchall() db_connection.close() file_records = list() for record_item in query_result: row_content = list() row_content.append(record_item['custom_time'].strftime("%Y-%m-%d")) row_content.append(ORGANIZATIONS.get(record_item['org_id'], '未知')) row_content.append(record_item['name']) row_content.append(record_item['title']) row_content.append(ABNORMAL_WORK.get(record_item['task_type'], '')) row_content.append(record_item['sponsor']) row_content.append(record_item['applied_org']) row_content.append(record_item['applicant']) row_content.append(record_item['tel_number']) row_content.append( record_item['swiss_coin'] if record_item['swiss_coin'] else '') row_content.append(int(record_item['allowance'])) row_content.append(record_item['note']) file_records.append(row_content) export_df = pd.DataFrame(file_records) export_df.columns = table_headers file_folder, md5_str = self.generate_file_path(userid) file_path = os.path.join(file_folder, '{}.xlsx'.format(md5_str)) export_df.to_excel(excel_writer=file_path, index=False, sheet_name="非常态工作记录") return send_from_directory( directory=file_folder, filename='{}.xlsx'.format(md5_str), as_attachment=True, attachment_filename='{}.xlsx'.format(md5_str))
def get(self, work_id): db_connection = MySQLConnection() cursor = db_connection.get_cursor() select_statement = "SELECT usertb.name,usertb.org_id,abworktb.custom_time,abworktb.task_type,abworktb.title," \ "abworktb.sponsor,abworktb.applied_org,abworktb.applicant,abworktb.tel_number,abworktb.swiss_coin," \ "abworktb.allowance,abworktb.note,abworktb.annex,abworktb.annex_url " \ "FROM `user_info` AS usertb INNER JOIN `abnormal_work` AS abworktb ON " \ "abworktb.id=%s AND abworktb.author_id=usertb.id;" cursor.execute(select_statement, work_id) work_item = cursor.fetchone() work_item['custom_time'] = work_item['custom_time'].strftime('%Y-%m-%d') work_item['allowance'] = int(work_item['allowance']) work_item['org_name'] = ORGANIZATIONS.get(int(work_item['org_id']), '') work_item['task_type_name'] = ABNORMAL_WORK.get(int(work_item['task_type']), '') work_item['work_types'] = ABNORMAL_WORK db_connection.close() return jsonify(work_item)
def export_article_publish(self, userid, start_date, end_date): table_headers = [ '日期', '部门小组', '姓名', '题目', '发表/采访媒体', '稿件形式', '字数', '审核人', '收入奖励', '合作人', '备注' ] query_statement = "SELECT usertb.name,usertb.org_id,atltb.custom_time,atltb.title,atltb.media_name,atltb.rough_type,atltb.words,atltb.checker," \ "atltb.allowance,atltb.partner,atltb.note " \ "FROM `user_info` AS usertb INNER JOIN `article_publish` AS atltb " \ "ON (usertb.id=%s AND usertb.id=atltb.author_id) AND (atltb.custom_time BETWEEN %s AND %s) " \ "ORDER BY atltb.custom_time ASC;" db_connection = MySQLConnection() cursor = db_connection.get_cursor() cursor.execute(query_statement, (userid, start_date, end_date)) query_result = cursor.fetchall() db_connection.close() file_records = list() for record_item in query_result: row_content = list() row_content.append(record_item['custom_time'].strftime("%Y-%m-%d")) row_content.append(ORGANIZATIONS.get(record_item['org_id'], '未知')) row_content.append(record_item['name']) row_content.append(record_item['title']) row_content.append(record_item['media_name']) row_content.append(record_item['rough_type']) row_content.append(record_item['words']) row_content.append(record_item['checker']) row_content.append(record_item['allowance']) row_content.append(record_item['partner']) row_content.append(record_item['note']) file_records.append(row_content) export_df = pd.DataFrame(file_records) export_df.columns = table_headers file_folder, md5_str = self.generate_file_path(userid) file_path = os.path.join(file_folder, '{}.xlsx'.format(md5_str)) export_df.to_excel(excel_writer=file_path, index=False, sheet_name="文章发表记录") return send_from_directory( directory=file_folder, filename='{}.xlsx'.format(md5_str), as_attachment=True, attachment_filename='{}.xlsx'.format(md5_str))
def get(self, rid): db_connection = MySQLConnection() cursor = db_connection.get_cursor() select_statement = "SELECT usertb.name,usertb.org_id,mgpctb.custom_time,mgpctb.title,mgpctb.words,mgpctb.is_publish," \ "mgpctb.level,mgpctb.score,mgpctb.partner,mgpctb.note,mgpctb.annex,mgpctb.annex_url " \ "FROM `user_info` AS usertb INNER JOIN `monographic` AS mgpctb ON " \ "mgpctb.id=%s AND mgpctb.author_id=usertb.id;" cursor.execute(select_statement, rid) record_item = cursor.fetchone() record_item['custom_time'] = record_item['custom_time'].strftime( '%Y-%m-%d') record_item['org_name'] = ORGANIZATIONS.get(int(record_item['org_id']), '未知') record_item['is_publish'] = 1 if record_item['is_publish'] else 0 record_item['level_types'] = ["A", "B", "C"] db_connection.close() return jsonify(record_item)
def get(self, rid): db_connection = MySQLConnection() cursor = db_connection.get_cursor() select_statement = "SELECT usertb.name,usertb.org_id,invsgytb.id,invsgytb.custom_time,invsgytb.content,invsgytb.variety_id,invsgytb.contract,invsgytb.direction,invsgytb.hands,invsgytb.open_position," \ "invsgytb.close_position,invsgytb.profit " \ "FROM `user_info` AS usertb INNER JOIN `investrategy` AS invsgytb ON " \ "invsgytb.id=%s AND usertb.id=invsgytb.author_id;" cursor.execute(select_statement, rid) record_item = cursor.fetchone() record_item['custom_time'] = record_item['custom_time'].strftime( '%Y-%m-%d') record_item['org_name'] = ORGANIZATIONS.get(int(record_item['org_id']), '未知') record_item['open_position'] = float(record_item['open_position']) record_item['close_position'] = float(record_item['close_position']) record_item['profit'] = int(record_item['profit']) db_connection.close() return jsonify(record_item)
def get(self, rid): db_connection = MySQLConnection() cursor = db_connection.get_cursor() select_statement = "SELECT usertb.name,usertb.org_id,atltb.id,atltb.custom_time,atltb.title,atltb.media_name,atltb.rough_type,atltb.words,atltb.checker,atltb.allowance, " \ "atltb.partner,atltb.note,atltb.annex,atltb.annex_url " \ "FROM `user_info` AS usertb INNER JOIN `article_publish` AS atltb ON " \ "atltb.id=%d AND usertb.id=atltb.author_id;" % rid cursor.execute(select_statement) record_item = cursor.fetchone() if record_item: record_item['custom_time'] = record_item['custom_time'].strftime( '%Y-%m-%d') record_item['org_name'] = ORGANIZATIONS.get( int(record_item['org_id']), "未知") else: record_item = {} db_connection.close() return jsonify(record_item)
def get(self): token = request.args.get('utoken') user_info = psd_handler.verify_json_web_token(token) if not user_info: # 状态保持错误 return jsonify('登录已过期.'), 400 # 通过id查询用户信息 user_id = user_info['uid'] db_connection = MySQLConnection() cursor = db_connection.get_cursor() query_statement = "SELECT `id`,`name`,`fixed_code`,`join_time`,`phone`,`email`,`org_id` " \ "FROM `user_info` " \ "WHERE `id`=%s;" cursor.execute(query_statement, user_id) user_info = cursor.fetchone() user_info['join_time'] = user_info['join_time'].strftime( "%Y-%d-%m %H:%M:%S") user_info['org_name'] = ORGANIZATIONS.get(int(user_info['org_id']), '未知') return jsonify({'message': '查询成功!', 'userinfo': user_info})
def export_monographic(self, userid, start_date, end_date): table_headers = [ '日期', '部门小组', '姓名', '题目', '字数', '外发情况', '等级', '得分', '备注' ] query_statement = "SELECT usertb.name,usertb.org_id,mgpctb.custom_time,mgpctb.title,mgpctb.words,mgpctb.is_publish,mgpctb.level,mgpctb.score,mgpctb.note " \ "FROM `user_info` AS usertb INNER JOIN `monographic` AS mgpctb " \ "ON (usertb.id=%s AND usertb.id=mgpctb.author_id) AND (mgpctb.custom_time BETWEEN %s AND %s) " \ "ORDER BY mgpctb.custom_time ASC;" db_connection = MySQLConnection() cursor = db_connection.get_cursor() cursor.execute(query_statement, (userid, start_date, end_date)) query_result = cursor.fetchall() db_connection.close() file_records = list() for record_item in query_result: row_content = list() row_content.append(record_item['custom_time'].strftime("%Y-%m-%d")) row_content.append(ORGANIZATIONS.get(record_item['org_id'], '未知')) row_content.append(record_item['name']) row_content.append(record_item['title']) row_content.append(record_item['words']) row_content.append("是" if record_item['is_publish'] else "否") row_content.append(record_item['level']) row_content.append(record_item['score']) row_content.append(record_item['note']) file_records.append(row_content) export_df = pd.DataFrame(file_records) export_df.columns = table_headers file_folder, md5_str = self.generate_file_path(userid) file_path = os.path.join(file_folder, '{}.xlsx'.format(md5_str)) export_df.to_excel(excel_writer=file_path, index=False, sheet_name="专题研究记录") return send_from_directory( directory=file_folder, filename='{}.xlsx'.format(md5_str), as_attachment=True, attachment_filename='{}.xlsx'.format(md5_str))
def get(self, rid): db_connection = MySQLConnection() cursor = db_connection.get_cursor() select_statement = "SELECT usertb.name,usertb.org_id,invstb.custom_time,invstb.variety_id,invstb.title,invstb.contract,invstb.direction,invstb.build_time,invstb.build_price, " \ "invstb.build_hands,invstb.out_price,invstb.cutloss_price,invstb.expire_time,invstb.is_publish,invstb.profit,invstb.annex,invstb.annex_url,invstb.note,invstb.level " \ "FROM `user_info` AS usertb INNER JOIN `investment` AS invstb ON " \ "invstb.id=%s AND invstb.author_id=usertb.id;" cursor.execute(select_statement, rid) record_item = cursor.fetchone() record_item['custom_time'] = record_item['custom_time'].strftime('%Y-%m-%d') record_item['build_time'] = record_item['build_time'].strftime('%Y-%m-%dT%H:%M:%S') record_item['expire_time'] = record_item['expire_time'].strftime('%Y-%m-%dT%H:%M:%S') record_item['is_publish'] = "是" if record_item['is_publish'] else "否" record_item['org_name'] = ORGANIZATIONS.get(int(record_item['org_id']), '未知') record_item['build_price'] = float(record_item['build_price']) record_item['out_price'] = float(record_item['out_price']) record_item['cutloss_price'] = float(record_item['cutloss_price']) record_item['profit'] = float(record_item['profit']) db_connection.close() return jsonify(record_item)
def export_short_message(self, userid, start_date, end_date): table_headers = ['日期', '部门小组', '姓名', '信息内容', '类别', '影响品种', '备注'] query_statement = "SELECT usertb.name,usertb.org_id,smsgtb.custom_time,smsgtb.content,smsgtb.msg_type,smsgtb.effect_variety,smsgtb.note " \ "FROM `user_info` AS usertb INNER JOIN `short_message` AS smsgtb " \ "ON (usertb.id=%s AND usertb.id=smsgtb.author_id) AND (smsgtb.custom_time BETWEEN %s AND %s) " \ "ORDER BY smsgtb.custom_time ASC;" db_connection = MySQLConnection() cursor = db_connection.get_cursor() cursor.execute(query_statement, (userid, start_date, end_date)) query_result = cursor.fetchall() db_connection.close() file_records = list() for record_item in query_result: row_content = list() row_content.append(record_item['custom_time'].strftime("%Y-%m-%d")) row_content.append(ORGANIZATIONS.get(record_item['org_id'], '未知')) row_content.append(record_item['name']) row_content.append(record_item['content']) row_content.append(record_item['msg_type']) row_content.append(record_item['effect_variety']) row_content.append(record_item['note']) file_records.append(row_content) export_df = pd.DataFrame(file_records) export_df.columns = table_headers file_folder, md5_str = self.generate_file_path(userid) file_path = os.path.join(file_folder, '{}.xlsx'.format(md5_str)) export_df.to_excel(excel_writer=file_path, index=False, sheet_name="短信通记录") return send_from_directory( directory=file_folder, filename='{}.xlsx'.format(md5_str), as_attachment=True, attachment_filename='{}.xlsx'.format(md5_str))
def get(self): params = request.args # 解析用户信息 token = params.get('utoken') user_info = verify_json_web_token(token) if not user_info: return jsonify("您的登录已过期,请重新登录查看.") user_id = user_info['uid'] # print(user_id) try: start_date = params.get('startDate') end_date = params.get('endDate') end_date = datetime.datetime.strptime( end_date, '%Y-%m-%d') + datetime.timedelta(days=1) end_date = ( end_date + datetime.timedelta(seconds=-1)).strftime('%Y-%m-%d %H:%M:%S') current_page = int(params.get('page', 1)) - 1 page_size = int(params.get('pagesize', 30)) except Exception: return jsonify("参数错误:DATE FORMAT ERROR & INT TYPE REQUIRED!") start_id = current_page * page_size db_connection = MySQLConnection() cursor = db_connection.get_cursor() # sql内联查询 inner_join_statement = "SELECT usertb.name,usertb.org_id,mgpctb.id,mgpctb.custom_time,mgpctb.title,mgpctb.words," \ "mgpctb.is_publish,mgpctb.level,mgpctb.score,mgpctb.note,mgpctb.annex,mgpctb.annex_url " \ "FROM `user_info` AS usertb INNER JOIN `monographic` AS mgpctb ON " \ "usertb.id=%s AND usertb.id=mgpctb.author_id AND (mgpctb.custom_time BETWEEN %s AND %s) " \ "ORDER BY mgpctb.custom_time DESC " \ "limit %s,%s;" cursor.execute(inner_join_statement, (user_id, start_date, end_date, start_id, page_size)) result_records = cursor.fetchall() # print("内连接查询专题研究结果", result_records) # 查询总条数 count_statement = "SELECT COUNT(mgpctb.id) as total FROM `user_info` AS usertb INNER JOIN `monographic`AS mgpctb ON " \ "usertb.id=%s AND usertb.id=mgpctb.author_id AND (mgpctb.custom_time BETWEEN %s AND %s);" cursor.execute(count_statement, (user_id, start_date, end_date)) # print("条目记录:", cursor.fetchone()) 打开注释下行将无法解释编译 # 计算总页数 total_ = cursor.fetchone() db_connection.close() total_count = total_['total'] total_page = int((total_count + page_size - 1) / page_size) # print('total_page',total_page) # 组织数据返回 response_data = dict() response_data['records'] = list() for record_item in result_records: record_item['custom_time'] = record_item['custom_time'].strftime( '%Y-%m-%d') record_item['org_name'] = ORGANIZATIONS.get( int(record_item['org_id']), '未知') record_item[ 'is_publish'] = "是" if record_item['is_publish'] else "否" response_data['records'].append(record_item) response_data['current_page'] = current_page + 1 # 查询前给减1处理了,加回来 response_data['total_page'] = total_page response_data['current_count'] = len(result_records) response_data['total_count'] = total_count return jsonify(response_data)
def get(self): query_params = request.args try: examine_status = int(query_params.get('status', 0)) current_page = int(query_params.get('page', 1)) - 1 page_size = int(query_params.get('page_size', 35)) except Exception: return jsonify({"message": "参数错误", "records": []}), 400 # 通过参数查询数据 start_id = current_page * page_size db_connection = MySQLConnection() cursor = db_connection.get_cursor() if examine_status == 0: inner_join_statement = "SELECT usertb.name,usertb.org_id,abworktb.id,abworktb.custom_time,abworktb.task_type," \ "abworktb.title,abworktb.sponsor,abworktb.applied_org,abworktb.applicant,abworktb.tel_number," \ "abworktb.swiss_coin,abworktb.allowance,abworktb.note,abworktb.annex,abworktb.annex_url,abworktb.is_examined " \ "FROM `user_info` AS usertb INNER JOIN `abnormal_work` AS abworktb ON " \ "usertb.id=abworktb.author_id " \ "ORDER BY abworktb.custom_time DESC " \ "limit %s,%s;" cursor.execute(inner_join_statement, (start_id, page_size)) # 查询总条数 total_count_statement = "SELECT COUNT(abworktb.id) as total FROM `user_info` AS usertb INNER JOIN `abnormal_work`AS abworktb " \ "ON usertb.id=abworktb.author_id;" abworks = cursor.fetchall() cursor.execute(total_count_statement) total_ = cursor.fetchone() else: examine_status = 0 if examine_status == 1 else 1 # 原生sql内联查询 inner_join_statement = "SELECT usertb.name,usertb.org_id,abworktb.id,abworktb.custom_time,abworktb.task_type," \ "abworktb.title,abworktb.sponsor,abworktb.applied_org,abworktb.applicant,abworktb.tel_number," \ "abworktb.swiss_coin,abworktb.allowance,abworktb.note,abworktb.annex,abworktb.annex_url,abworktb.is_examined " \ "FROM `user_info` AS usertb INNER JOIN `abnormal_work` AS abworktb ON " \ "usertb.id=abworktb.author_id AND abworktb.is_examined=%s " \ "ORDER BY abworktb.custom_time DESC " \ "limit %s,%s;" cursor.execute(inner_join_statement, (examine_status, start_id, page_size)) # 查询总条数 total_count_statement = "SELECT COUNT(abworktb.id) as total FROM `user_info` AS usertb INNER JOIN `abnormal_work`AS abworktb " \ "ON usertb.id=abworktb.author_id AND abworktb.is_examined=%s;" abworks = cursor.fetchall() cursor.execute(total_count_statement, (examine_status, )) total_ = cursor.fetchone() total_count = total_['total'] # 计算总页数 db_connection.close() total_page = int((total_count + page_size - 1) / page_size) # print('total_page',total_page) # 组织数据返回 response_data = dict() response_data['abworks'] = list() for work_item in abworks: work_item['custom_time'] = work_item['custom_time'].strftime( '%Y-%m-%d') work_item['task_type'] = ABNORMAL_WORK.get(work_item['task_type'], '') work_item['org_name'] = ORGANIZATIONS.get(int(work_item['org_id']), '未知') work_item['swiss_coin'] = work_item['swiss_coin'] if work_item[ 'swiss_coin'] else '' work_item['allowance'] = int(work_item['allowance']) response_data['abworks'].append(work_item) response_data['current_page'] = current_page + 1 # 查询前给减1处理了,加回来 response_data['total_page'] = total_page response_data['current_count'] = len(abworks) response_data['total_count'] = total_count response_data['message'] = "查询成功!" return jsonify(response_data)
def get(self): params = request.args utoken = params.get('utoken') user_info = verify_json_web_token(utoken) if not user_info: return jsonify("登录已过期!刷新网页重新登录."), 400 try: start_date = params.get('startDate') end_date = params.get('endDate') end_date = datetime.datetime.strptime( end_date, '%Y-%m-%d') + datetime.timedelta(days=1) end_date = ( end_date + datetime.timedelta(seconds=-1)).strftime('%Y-%m-%d %H:%M:%S') except Exception: return jsonify("参数错误:DATE FORMAT ERROR!") query_statement = "SELECT usertb.name,usertb.org_id,invsgytb.custom_time,invsgytb.content,invsgytb.variety_id,invsgytb.contract,invsgytb.direction,invsgytb.hands," \ "invsgytb.open_position,invsgytb.close_position,invsgytb.profit,invsgytb.note " \ "FROM `user_info` AS usertb INNER JOIN `investrategy` AS invsgytb ON " \ "usertb.id=%s AND usertb.id=invsgytb.author_id AND (invsgytb.custom_time BETWEEN %s AND %s) " \ "ORDER BY invsgytb.custom_time ASC;" db_connection = MySQLConnection() cursor = db_connection.get_cursor() # 查询品种 query_variety = "SELECT `id`,`name` FROM `variety` WHERE `parent_id` IS NOT NULL;" cursor.execute(query_variety) variety_all = cursor.fetchall() variety_dict = { variety_item["id"]: variety_item['name'] for variety_item in variety_all } cursor.execute(query_statement, (user_info['uid'], start_date, end_date)) records_all = cursor.fetchall() db_connection.close() # 生成承载数据的文件 t = "%.4f" % time.time() md5_hash = hashlib.md5() md5_hash.update(t.encode('utf-8')) md5_hash.update(user_info['name'].encode('utf-8')) md5_str = md5_hash.hexdigest() file_folder = os.path.join(BASE_DIR, 'fileStore/exports/') if not os.path.exists(file_folder): os.makedirs(file_folder) file_path = os.path.join(file_folder, '{}.xlsx'.format(md5_str)) file_records = list() for record_item in records_all: row_content = list() row_content.append(record_item['custom_time'].strftime("%Y-%m-%d")) row_content.append(ORGANIZATIONS.get(record_item['org_id'], '未知')) row_content.append(record_item['name']) row_content.append(record_item['content']) row_content.append(variety_dict.get(record_item['variety_id'], '')) row_content.append(record_item['contract']) row_content.append(record_item['direction']) row_content.append(record_item['hands']) row_content.append(float(record_item['open_position'])) row_content.append(float(record_item['close_position'])) row_content.append(float(record_item['profit'])) row_content.append(record_item['note']) file_records.append(row_content) export_df = pd.DataFrame(file_records) export_df.columns = [ '日期', '部门小组', '姓名', '策略内容', '品种', '合约', '方向', '手数', '策略开仓', '策略平仓', '策略结果', '备注' ] export_df.to_excel(excel_writer=file_path, index=False, sheet_name='投顾策略记录') return send_from_directory( directory=file_folder, filename='{}.xlsx'.format(md5_str), as_attachment=True, attachment_filename='{}.xlsx'.format(md5_str))
def get(self): params = request.args # 解析用户信息 token = params.get('utoken') user_info = verify_json_web_token(token) if not user_info: return jsonify("您的登录已过期,请重新登录查看.") user_id = user_info['uid'] try: start_date = params.get('startDate') end_date = params.get('endDate') end_date = datetime.datetime.strptime( end_date, '%Y-%m-%d') + datetime.timedelta(days=1) end_date = ( end_date + datetime.timedelta(seconds=-1)).strftime('%Y-%m-%d %H:%M:%S') current_page = int(params.get('page', 1)) - 1 page_size = int(params.get('pagesize', 30)) except Exception: return jsonify("参数错误:DATE FORMAT ERROR & INT TYPE REQUIRED!") start_id = current_page * page_size db_connection = MySQLConnection() cursor = db_connection.get_cursor() # sql内联查询 inner_join_statement = "SELECT usertb.name,usertb.org_id,invsgytb.id,invsgytb.custom_time,invsgytb.content,invsgytb.variety_id, varietytb.name AS variety, invsgytb.contract,invsgytb.direction,invsgytb.hands,invsgytb.open_position," \ "invsgytb.close_position,invsgytb.profit " \ "FROM `user_info` AS usertb INNER JOIN `investrategy` AS invsgytb INNER JOIN `variety` as varietytb ON " \ "(usertb.id=%s AND usertb.id=invsgytb.author_id) AND invsgytb.variety_id=varietytb.id AND (invsgytb.custom_time BETWEEN %s AND %s) " \ "ORDER BY invsgytb.custom_time DESC " \ "limit %s,%s;" cursor.execute(inner_join_statement, (user_id, start_date, end_date, start_id, page_size)) result_records = cursor.fetchall() # print("内连接查投顾策略自方案结果", result_records) # 查询总条数 count_statement = "SELECT COUNT(invsgytb.id) as total, SUM(invsgytb.profit) AS `sumprofit` " \ "FROM `user_info` AS usertb INNER JOIN `investrategy`AS invsgytb " \ "ON usertb.id=%s AND usertb.id=invsgytb.author_id AND (invsgytb.custom_time BETWEEN %s AND %s);" cursor.execute(count_statement, (user_id, start_date, end_date)) fetch_one = cursor.fetchone() # print(fetch_one) db_connection.close() if fetch_one: total_count = fetch_one['total'] sum_porfit = fetch_one['sumprofit'] else: total_count = sum_porfit = 0 total_page = int((total_count + page_size - 1) / page_size) # print('total_page',total_page) # 组织数据返回 response_data = dict() response_data['records'] = list() for record_item in result_records: record_item['custom_time'] = record_item['custom_time'].strftime( '%Y-%m-%d') record_item['variety'] = (record_item['variety'] if record_item['variety'] else '') + str( record_item['contract']) record_item['org_name'] = ORGANIZATIONS.get( int(record_item['org_id']), '未知') record_item['open_position'] = float(record_item['open_position']) record_item['close_position'] = float( record_item['close_position']) record_item['profit'] = float(record_item['profit']) response_data['records'].append(record_item) response_data['current_page'] = current_page + 1 # 查询前给减1处理了,加回来 response_data['total_page'] = total_page response_data['current_count'] = len(result_records) response_data['total_count'] = total_count response_data['sum_profit'] = float(sum_porfit) if sum_porfit else 0 return jsonify(response_data)
def get(self): params = request.args utoken = params.get('utoken') user_info = verify_json_web_token(utoken) if not user_info: return jsonify("登录已过期!刷新网页重新登录."), 400 try: start_date = params.get('startDate') end_date = params.get('endDate') end_date = datetime.datetime.strptime(end_date, '%Y-%m-%d') + datetime.timedelta(days=1) end_date = (end_date + datetime.timedelta(seconds=-1)).strftime('%Y-%m-%d %H:%M:%S') except Exception: return jsonify("参数错误:DATE FORMAT ERROR!") # 查询当前用户的非常态工作记录 query_statement = "SELECT usertb.name,usertb.org_id,abworktb.custom_time,abworktb.task_type,abworktb.title,abworktb.sponsor,abworktb.applied_org,abworktb.applicant,abworktb.tel_number,abworktb.swiss_coin,abworktb.allowance,abworktb.note " \ "FROM `user_info` AS usertb INNER JOIN `abnormal_work` AS abworktb " \ "ON usertb.id=%s AND usertb.id=abworktb.author_id AND (abworktb.custom_time BETWEEN %s AND %s) " \ "ORDER BY abworktb.custom_time ASC;" db_connection = MySQLConnection() cursor = db_connection.get_cursor() cursor.execute(query_statement, (user_info['uid'], start_date, end_date)) records_all = cursor.fetchall() db_connection.close() # 生成承载数据的文件 t = "%.4f" % time.time() md5_hash = hashlib.md5() md5_hash.update(t.encode('utf-8')) md5_hash.update(user_info['name'].encode('utf-8')) md5_str = md5_hash.hexdigest() file_folder = os.path.join(BASE_DIR, 'fileStore/exports/') if not os.path.exists(file_folder): os.makedirs(file_folder) file_path = os.path.join(file_folder, '{}.xlsx'.format(md5_str)) file_records = list() for record_item in records_all: row_content = list() row_content.append(record_item['custom_time'].strftime("%Y-%m-%d")) row_content.append(ORGANIZATIONS.get(record_item['org_id'], '未知')) row_content.append(record_item['name']) row_content.append(ABNORMAL_WORK.get(record_item['task_type'], '')) row_content.append(record_item['title']) row_content.append(record_item['sponsor']) row_content.append(record_item['applied_org']) row_content.append(record_item['applicant']) row_content.append(record_item['tel_number']) row_content.append(record_item['swiss_coin'] if record_item['swiss_coin'] else '') row_content.append(int(record_item['allowance'])) row_content.append(record_item['note']) file_records.append(row_content) export_df = pd.DataFrame(file_records) export_df.columns = ['日期', '部门小组','姓名', '任务类型', '主题/标题', '主办方', '申请部门/受用单位', '申请者', '联系电话', '瑞币情况', '收入补贴','备注'] export_df.to_excel( excel_writer=file_path, index=False, sheet_name='非常态工作记录' ) # 将文件返回 return send_from_directory(directory=file_folder, filename='{}.xlsx'.format(md5_str), as_attachment=True, attachment_filename='{}.xlsx'.format(md5_str) )
def get(self): params = request.args # 解析用户信息 token = params.get('utoken') user_info = verify_json_web_token(token) if not user_info: return jsonify("您的登录已过期,请重新登录查看.") user_id = user_info['uid'] try: start_date = params.get('startDate') end_date = params.get('endDate') end_date = datetime.datetime.strptime(end_date, '%Y-%m-%d') + datetime.timedelta(days=1) end_date = (end_date + datetime.timedelta(seconds=-1)).strftime('%Y-%m-%d %H:%M:%S') current_page = int(params.get('page', 1)) - 1 page_size = int(params.get('pagesize', 30)) except Exception: return jsonify("参数错误:DATE FORMAT ERROR & INT TYPE REQUIRED!") start_id = current_page * page_size db_connection = MySQLConnection() cursor = db_connection.get_cursor() # 原生sql内联查询 inner_join_statement = "SELECT usertb.name,usertb.org_id,abworktb.id,abworktb.custom_time,abworktb.task_type," \ "abworktb.title,abworktb.sponsor,abworktb.applied_org,abworktb.applicant,abworktb.tel_number," \ "abworktb.swiss_coin,abworktb.allowance,abworktb.note,abworktb.annex,abworktb.annex_url,abworktb.is_examined " \ "FROM `user_info` AS usertb INNER JOIN `abnormal_work` AS abworktb ON " \ "usertb.id=%s AND usertb.id=abworktb.author_id AND (abworktb.custom_time BETWEEN %s AND %s) " \ "ORDER BY abworktb.custom_time DESC " \ "limit %s,%s;" # 内联查询另一写法:where子句(INNER JOIN->','(逗号); ON->WHERE) # "SELECT usertb.name,abworktb.title FROM `user_info` AS usertb,`abnormal_work`AS abworktb WHERE usertb.id=%s AND usertb.id=abworktb.worker;" # 连表查询语句两种方式都可以去除'AS'关键字 cursor.execute(inner_join_statement,(user_id, start_date, end_date, start_id, page_size)) abworks = cursor.fetchall() # print("内连接查询结果", abworks) # 查询总条数 inner_join_statement = "SELECT COUNT(abworktb.id) as total FROM `user_info` AS usertb INNER JOIN `abnormal_work`AS abworktb " \ "ON usertb.id=%s AND usertb.id=abworktb.author_id AND (abworktb.custom_time BETWEEN %s AND %s);" cursor.execute(inner_join_statement, (user_id, start_date, end_date)) total_ = cursor.fetchone() total_count = total_['total'] # 计算总页数 db_connection.close() total_page = int((total_count + page_size - 1) / page_size) # print('total_page',total_page) # 组织数据返回 response_data = dict() response_data['abworks'] = list() for work_item in abworks: work_item['custom_time'] = work_item['custom_time'].strftime('%Y-%m-%d') work_item['task_type'] = ABNORMAL_WORK.get(work_item['task_type'], '') work_item['org_name'] = ORGANIZATIONS.get(int(work_item['org_id']), '未知') work_item['swiss_coin'] = work_item['swiss_coin'] if work_item['swiss_coin'] else '' work_item['allowance'] = int(work_item['allowance']) response_data['abworks'].append(work_item) response_data['current_page'] = current_page + 1 # 查询前给减1处理了,加回来 response_data['total_page'] = total_page response_data['current_count'] = len(abworks) response_data['total_count'] = total_count return jsonify(response_data)