def post(self): try: # method style 1 # data = json.loads(request.get_data(as_text=True)) # method style 2 data = request.get_json() fcode = data.get('fcode') # opcode = data.get('opcode') opcode = 2 opcount = data.get('opcount') opmsg = data.get('opmsg') # timestamp = data.get('timestamp') timestamp = get_datetime_now_obj() kwargs_oplog = { 'userid': None, 'fcode': fcode, 'opcode': opcode, 'opcount': opcount, 'opmsg': opmsg, 'timestamp': timestamp, } # print('==kwargs_oplog==', kwargs_oplog) insert_operation_log(**kwargs_oplog) resp_obj = { 'errno': 0, 'msg': 'rasp upgrade notice success', } except Exception as e: logger.error('rasp upgrade notice error') logger.error(e) resp_obj = { 'errno': 1, 'msg': 'rasp upgrade notice fail', } return resp_obj
def insert_operation_log_legacy(fcode, opcode, opcount, opmsg, timestamp): try: record = Oplog(fcode, opcode, opcount, opmsg, timestamp) db_mysql.session.add(record) except Exception as e: db_mysql.session.rollback() logger.error('insert_operation_log:') logger.error(str(e)) else: db_mysql.session.commit()
def update_sqlite_lastuploadtime(fcode, p_datetime): try: stat = Stat.query.filter_by(fcode=fcode).first() stat.last_upload_time = p_datetime except Exception as e: db_sqlite.session.rollback() logger.error('update_sqlite_lastuploadtime:') logger.error(str(e)) else: db_sqlite.session.commit()
def insert_operation_log(**kwargs): try: record = Oplog(**kwargs) db_mysql.session.add(record) except Exception as e: db_mysql.session.rollback() logger.error('insert_operation_log:') logger.error(str(e)) raise (e) else: db_mysql.session.commit()
def vf_oplog(): # 1. fetch fcode # fcode is not possibly None try: fcode_page = fetch_fcode() except KeyError as e: logger.error('KeyError session["fcode"]') return redirect(url_for('blue_rasp.vf_stat')) except FcodeNotSupportError as e: logger.error(e.err_msg) return redirect(url_for('blue_rasp.vf_stat')) # 2. get vendor list from fcode myquery_mysql_factories = forge_myquery_mysql_factories_by_fcode(g.myquery_mysql_factories, fcode_page) factories = myquery_mysql_factories.all() # 3. get operations list operations = filter(lambda x: x.type == 1, operations_opcode) # 4. get myqury_mysql_oplogs myquery_mysql_oplogs = forge_myquery_mysql_oplogs_by_fcode(g.myquery_mysql_oplogs, fcode_page) # 5. search handling code clearsearchsession = fetch_clearsearchsession() search_kwargs_page, search_kwargs_db = fetch_search_kwargs_oplogs_vendor(clearsearchsession) # print('==search_kwargs_page==', search_kwargs_page) # print('==search_kwargs_db==', search_kwargs_db) search_args_db = list(search_kwargs_db.values()) if len(list(filter(lambda x: x is not None, search_args_db))) > 0: myquery_mysql_oplogs = forge_myquery_mysql_oplogs_vendor_by_search(myquery_mysql_oplogs, **search_kwargs_db) # 7. pagination code total_count = myquery_mysql_oplogs.count() PER_PAGE = 10 page = request.args.get(get_page_parameter(), type=int, default=1) #获取页码,默认为第一页 start = (page-1)*PER_PAGE end = page * PER_PAGE if total_count > page * PER_PAGE else total_count pagination = Pagination(page=page, total=total_count, per_page=PER_PAGE, bs_version=3) # datas = myquery_mysql_oplogs.all() datas = myquery_mysql_oplogs.slice(start, end) # 8. collect params and return page_kwargs = { 'fcode_page': fcode_page, 'datas': datas, 'pagination': pagination, 'factories': factories, 'operations': operations, } # return render_template('vendor_oplog.html', datas=datas, pagination=pagination) return render_template('vendor_oplog.html', **page_kwargs, **search_kwargs_page)
def my_check_retcode(cmd): try: cmd_list = cmd.split(' ') # todo: ret is -15 when success, not 0 ret = s.call(cmd_list, shell=False) logger.info('==ret:{}=='.format(ret)) # except s.CalledProcessError as e: except Exception as e: logger.error(str(e)) return -100 else: return ret
def vf_index(): try: fcode_page = fetch_fcode() # print('==fcode_page_index==', fcode_page) except KeyError as e: logger.error('KeyError session["fcode"]') return redirect(url_for('blue_rasp.vf_stat')) except FcodeNotSupportError as e: logger.error(e.err_msg) return redirect(url_for('blue_rasp.vf_stat')) # return render_template('vendor_index.html', fcode = fcode_page) return render_template('vendor_index.html')
def fix_testdatascloud_bool_qualified_overall(): try: datas = TestdataCloud.query.filter( TestdataCloud.bool_qualified_overall == None).all() for data in datas: if data.bool_qualified_signal and data.bool_qualified_check and data.bool_qualified_scan and data.bool_qualified_deviceid and data.reserve_bool_1: data.bool_qualified_overall = True else: data.bool_qualified_overall = False except Exception as e: db_mysql.session.rollback() logger.error('update_testdatascloud_bool_qualified_overall:') logger.error(str(e)) else: db_mysql.session.commit()
def put(self): # get current datetime cur_datetime = get_datetime_now_obj() try: # data = json.loads(request.get_data()) # data = json.loads(request.get_data().decode('utf-8')) data = json.loads(request.get_data(as_text=True)) except Exception as e: print(e) response_msg = {'errno': 5, 'msg': 'load request data error'} logger.error('response_msg: {}'.format(response_msg)) # return resp return response_msg try: fcode = data.get('fcode') except Exception: response_msg = {'errno': 1, 'msg': 'get fcode error type1'} logger.error('response_msg: {}'.format(response_msg)) # return resp return response_msg else: if fcode is None: try: fcode = data.get('testdatas')[0].get('factorycode') except IndexError as e: response_msg = {'errno': 2, 'msg': 'get fcode error type2'} logger.error('response_msg: {}'.format(response_msg)) # return resp return response_msg pin = data.get('pin') testdatas = data.get('testdatas') num_recv_1 = data.get('count') num_recv_2 = len(testdatas) if not num_recv_1 == num_recv_2: response_msg = { 'errno': 3, 'fcode': fcode, 'msg': 'count number mismatch' } logger.error('response_msg: {}'.format(response_msg)) #record oplog kwargs_oplog = { 'userid': None, 'fcode': fcode, 'opcode': 1, 'opcount': None, 'opmsg': 'error: count number mismatch', 'timestamp': cur_datetime, } insert_operation_log(**kwargs_oplog) # return resp return response_msg num_recv = num_recv_1 try: save_to_database(testdatas, num_recv) # 1. exception except Exception as e: response_msg = {'errno': 4, 'fcode': fcode, 'msg': str(e)} logger.error('response_msg: {}'.format(response_msg)) #record oplog kwargs_oplog = { 'userid': None, 'fcode': fcode, 'opcode': 1, 'opcount': num_recv, 'opmsg': 'error: save to database failed', 'timestamp': cur_datetime, } insert_operation_log(**kwargs_oplog) # return resp return response_msg # 2. success else: # 2.1 update upload time if fcode != None and num_recv > 0: update_sqlite_lastuploadtime(fcode, cur_datetime) # 2.2 record oplog database # opcode = 1 # opcount = num_recv # opmsg = 'upload success' # timestamp = cur_datetime # insert_operation_log(fcode, opcode, opcount, opmsg, timestamp) kwargs_oplog = { 'userid': None, 'fcode': fcode, 'opcode': 1, 'opcount': num_recv, 'opmsg': 'upload success', 'timestamp': cur_datetime, } insert_operation_log(**kwargs_oplog) # 2.3 record log file response_msg = { 'errno': 0, 'fcode': fcode, 'msg': 'upload success', 'pin': pin, 'count': num_recv } logger.info('response_msg: {}'.format(response_msg)) return response_msg
def get_name_by_fcode(fcode): try: return Factory.query.filter(Factory.code == fcode).first().name except Exception as e: logger.error(str(e)) return None
def get_username_by_userid(id): try: return User.query.get(id).username except Exception as e: logger.error(str(e)) return None
def update_sqlite_stat(fcode): set_update_running_state_done() # 1. get fcodes list, two cases # 1.1 fcode = 0, fcodes = [1, 2, 3, 4, 5] # 1.2 fcode = 1/2/3/4/5, fcodes = [1, ] or [2, ] or [3, ] or [4, ] or [5, ] fcodes = list() # fcodes_all is like [1, 2, 3, 4, 5, 6, 0] fcodes_all = list( map(lambda x: x[0], Stat.query.with_entities(Stat.fcode).all())) if fcode == 0: fcodes = fcodes_all fcodes.remove(0) elif fcode != 0 and fcode in fcodes_all: # fcodes = [fcode,] # fcodes = list() fcodes.append(fcode) else: pass # mimic time consuming # time.sleep(20) # 2. caculate data from mysql/testdatascloud # this section will not affect all/fcode=0 row cur_datetime = get_datetime_now_obj() try: for fcode in fcodes: # num_total = len(TestdataCloud.query.filter_by(factorycode=fcode).yield_per(PER_QUERY_COUNT).all()) # num_success = len(TestdataCloud.query.filter(TestdataCloud.factorycode==fcode, TestdataCloud.bool_qualified_overall==True).yield_per(PER_QUERY_COUNT).all()) # num_failed = len(TestdataCloud.query.filter(TestdataCloud.factorycode==fcode, TestdataCloud.bool_qualified_overall==False).yield_per(PER_QUERY_COUNT).all()) num_total = TestdataCloud.query.filter_by( factorycode=fcode).yield_per(PER_QUERY_COUNT).count() num_success = TestdataCloud.query.filter( TestdataCloud.factorycode == fcode, TestdataCloud.bool_qualified_overall == True).yield_per( PER_QUERY_COUNT).count() num_failed = TestdataCloud.query.filter( TestdataCloud.factorycode == fcode, TestdataCloud.bool_qualified_overall == False).yield_per( PER_QUERY_COUNT).count() num_srate = 0 if num_total == 0 else round(num_success / num_total, 4) stat = Stat.query.filter_by(fcode=fcode).first() stat.total = num_total stat.success = num_success stat.failed = num_failed stat.srate = num_srate stat.last_update_time = cur_datetime # stat.save() db_sqlite.session.commit() except Exception as e: db_sqlite.session.rollback() logger.error(str(e)) reset_update_running_state_done() return -1 # errno = -1 # else: # db_sqlite.session.commit() # errno = 0 # finally: # reset_update_running_state_done() # return errno # 3. caculate data from sqlite/stats itself # this section only affect all/fcode=0 row data_pack = Stat.query.filter(Stat.fcode > 0).with_entities( Stat.total, Stat.success, Stat.failed, Stat.last_upload_time, Stat.last_update_time).all() stat_total_list = list(map(lambda x: x[0], data_pack)) stat_success_list = list(map(lambda x: x[1], data_pack)) stat_failed_list = list(map(lambda x: x[2], data_pack)) # stat_upload_list = list(map(lambda x:x[3], data_pack)) # stat_update_list = list(map(lambda x:x[4], data_pack)) stat_total = reduce(lambda x, y: x + y, stat_total_list) stat_success = reduce(lambda x, y: x + y, stat_success_list) stat_failed = reduce(lambda x, y: x + y, stat_failed_list) stat_srate = 0 if stat_total == 0 else round(stat_success / stat_total, 4) # stat_upload = reduce(lambda x,y: x if x >= y else y, list(filter(lambda x: x is not None, stat_upload_list))) # stat_update = reduce(lambda x,y: x if x >= y else y, list(filter(lambda x: x is not None, stat_update_list))) stat_update = cur_datetime try: stat_all = Stat.query.filter(Stat.fcode == 0).first() stat_all.total = stat_total stat_all.success = stat_success stat_all.failed = stat_failed stat_all.srate = stat_srate # stat_all.last_upload_time = stat_upload stat_all.last_update_time = stat_update stat_all.save() except Exception as e: db_sqlite.session.rollback() logger.error(str(e)) reset_update_running_state_done() return -2 # reset running parameter reset_update_running_state_done() return 0
def vf_testdata(): # 1. get device list from userid devices = g.myquery_mysql_devices.all() # 2. fetch fcode from request.form and session try: fcode = fetch_fcode() except KeyError as e: # logger.error(e) logger.error('KeyError session["fcode"]') return redirect(url_for('blue_rasp.vf_stat')) except FcodeNotSupportError as e: logger.error(str(e)) return redirect(url_for('blue_rasp.vf_stat')) # 3. get factory list from userid myquery_mysql_factories = forge_myquery_mysql_factories_by_fcode( g.myquery_mysql_factories, fcode) factories = myquery_mysql_factories.all() # 4. get testdatscloud basic query myquery_mysql_testdatascloud = forge_myquery_mysql_testdatascloud_by_fcode( g.myquery_mysql_testdatascloud, fcode) # search handling code clearsearchsession = fetch_clearsearchsession() search_kwargs_page, search_kwargs_db = fetch_search_kwargs_testdatas( clearsearchsession) search_args_db = list(search_kwargs_db.values()) # if any of search params is not None, filter further if len(list(filter(lambda x: x is not None, search_args_db))) > 0: # myquery_mysql_testdatascloud = get_myquery_testdatas_by_search(myquery_mysql_testdatascloud, **search_kwargs_db) myquery_mysql_testdatascloud = forge_myquery_mysql_testdatascloud_by_search( myquery_mysql_testdatascloud, **search_kwargs_db) # 5. pagination code total_count = myquery_mysql_testdatascloud.count() # todo: enlarge PER_PAGE PER_PAGE = 100 page = request.args.get(get_page_parameter(), type=int, default=1) #获取页码,默认为第一页 # start/end is like 0/100, 100/200, .etc start = (page - 1) * PER_PAGE end = page * PER_PAGE if total_count > page * PER_PAGE else total_count pagination = Pagination(page=page, total=total_count, per_page=PER_PAGE, bs_version=3) testdatascloud = myquery_mysql_testdatascloud.slice(start, end) page_kwargs = { 'testdatascloud': testdatascloud, 'factories': factories, 'devices': devices, 'fcode': fcode, 'pagination': pagination, } # 6. return return render_template('rasp_testdata.html', **page_kwargs, **search_kwargs_page)
def cmd_download_testdata(): # 1. determine csv or excel download_type = request.form.get('download_type', type=str) if download_type not in ['csv', 'xls']: abort() # 2. fetch fcode from request.form and session try: fcode = fetch_fcode() except KeyError as e: # logger.error(e) logger.error('KeyError session["fcode"]') return redirect(url_for('blue_rasp.vf_stat')) except FcodeNotSupportError as e: logger.warn(str(e)) return redirect(url_for('blue_rasp.vf_stat')) # 3. get testdatscloud basic query myquery_mysql_testdatascloud = forge_myquery_mysql_testdatascloud_by_fcode( g.myquery_mysql_testdatascloud, fcode) # 4. search handling code # for download, fetch_clearsearchsession will always return False # clearsearchsession = fetch_clearsearchsession() clearsearchsession = None search_kwargs_page, search_kwargs_db = fetch_search_kwargs_testdatas( clearsearchsession) search_args_db = list(search_kwargs_db.values()) # if any of search params is not None, filter further # if any(search_args): if len(list(filter(lambda x: x is not None, search_args_db))) > 0: myquery_mysql_testdatascloud = forge_myquery_mysql_testdatascloud_by_search( myquery_mysql_testdatascloud, **search_kwargs_db) total_count = myquery_mysql_testdatascloud.count() if total_count >= 65535: return redirect(url_for('blue_error.vf_downloadoverflow')) # 5. gen csv/excel file and return # timestamp = datetime.datetime.now().strftime('%Y_%m_%d_%H_%M_%S') datetime_obj = get_datetime_now_obj() timestamp = datetime_obj.strftime('%Y_%m_%d_%H_%M_%S') shortname = 'TestdataCloud-' + timestamp + '.' + download_type genfolder = os.path.join(topdir, 'pub', download_type) filename = os.path.join(genfolder, shortname) empty_folder_filesonly(genfolder) if download_type == 'csv': gen_csv_by_query(myquery_mysql_testdatascloud, filename) # todo elif download_type == 'xls': pass else: pass # 普通下载 # return send_from_directory(genfolder, excelname, as_attachment=True) # 流式读取 response = Response(send_file(filename), content_type='application/octet-stream') response.headers[ "Content-disposition"] = 'attachment; filename=%s' % shortname # 6. record oplog # timestamp = get_datetime_now_obj() userid = current_user.id fcode = None opcode = 3 opcount = total_count opmsg = 'download csv' kwargs_oplog = { 'userid': userid, 'fcode': fcode, 'opcode': opcode, 'opcount': opcount, 'opmsg': opmsg, 'timestamp': datetime_obj, } insert_operation_log(**kwargs_oplog) # 7. return return response