def update_uncentain_data(meta_info, batch_id=0): """ [ summary ] Args: meta_info ([type]): [description] batch_id (int, optional): [description]. Defaults to 0. """ data_oid = meta_info["data_oid"] # todo 米桂田. for db in database.get_db(): query_result_list = db.query(database.XUncertainCategoryTable).filter( database.XUncertainCategoryTable.data_oid == data_oid).filter( database.XUncertainCategoryTable.batch_id == batch_id) if query_result_list.count() > 0: rule_predict_ret_obj = query_result_list.one() rule_predict_ret_obj.category = meta_info["category"] rule_predict_ret_obj.category_from = meta_info["category_from"] rule_predict_ret_obj.prob = meta_info["prob"] rule_predict_ret_obj.finished = 0 rule_predict_ret_obj.judgment_type = 0 rule_predict_ret_obj.judgment_category = "null" else: rule_predict_ret_obj = database.XUncertainCategoryTable( batch_id=batch_id, data_oid=data_oid, category=meta_info["category"], prob=meta_info["prob"], category_from=meta_info["category_from"]) db.add(rule_predict_ret_obj) db.commit() pass
def unknown_faces(): db = get_db() images = faces.find_unknown_in_db(cursor=db.cursor, api=api) existing_people = get_person().list() return render_template('unknown_faces.html.jinja', images=images, people=existing_people, crop_size=100)
def get_ai_prediction_result(batch_id=0, ai_id=0): """[summary] Args: batch_id (int, optional): [批量号码]. Defaults to 0. ai_id (int, optional): [模型预测密码]. Defaults to 0. Returns: List[database.AiPredictonOfResultsTable]: [ [{ "data_id":1, "category":"", "proba":0.2, }] ] """ ans_data = {} # ans_data={ # "data_oid":{}, # } for db in database.get_db(): data_list = db.query( database.AiPredictonOfResultsTable.data_oid, database.AiPredictonOfResultsTable.meta_info, database.AiPredictonOfResultsTable.category).filter( database.AiPredictonOfResultsTable.batch_id == batch_id ).filter(database.AiPredictonOfResultsTable.ai_id == ai_id) metric_info = get_metric_info(ai_id, "ai_{}".format(ai_id)) for data in data_list: data_info_dict = json.loads(data.meta_info) proba = 0.1 try: data_info_dict = json.loads(data.meta_info) category_id = data_info_dict["category_id"] # 这一步用来计算可能,数据可能是这一类真正的概率. # category = data_info_dict["category"] current_proba = data_info_dict["proba_list"][category_id - 1] # 获取概率 precision = metric_info["precision"].get(category, 1) proba = current_proba * precision except Exception as identifier: print(identifier) ans_data[data.data_oid] = { "data_oid": data.data_oid, "category": data.category, "category_id": category_id, "proba": proba } return ans_data
def start_scan(): db = get_db() if threads['photos'] == None or not threads['photos'].is_alive(): queue = PhotoQueue(db.cnx) queue.fill_queue() threads['photos'] = Thread(target=queue.consume, kwargs=dict(process=process_photos.process( api=api, cursor=db.cnx.cursor()))) threads['photos'].start() return {'result': 'Scan has started'} return {'result': 'in_progress'}
def addUser(): conn = get_db() c = conn.cursor() print("Opened database successfully") # 业务逻辑 c.execute("INSERT INTO user (ID,USERNAME,password) \ VALUES (1, 'Paul', '12321' )") conn.commit() print("Records created successfully") conn.close() return 'add success'
def updateUser(): conn = get_db() c = conn.cursor() print("Opened database successfully") # 业务逻辑 c.execute("UPDATE user set username = '******' where ID=1") conn.commit() print("Total number of rows updated :", conn.total_changes) print("Records created successfully") conn.close() return 'update success'
def getUser(): conn = get_db() c = conn.cursor() print("Opened database successfully") # 业务逻辑 cursor = c.execute("SELECT id, username, password from user") for row in cursor: print(row[1]) print(row["password"]) print("Records created successfully") conn.close() return 'get success'
def possible_faces_matches(): db = get_db() ids = [int(id) for id in request.args.getlist('id')] print('Looking for', ids) faces_encodings = faces.get_faces_encodings(faces_ids=ids, cursor=db.cursor) possible_faces = faces.find_closest_matches_in_db( faces_encodings=faces_encodings, ignore_known=True, exclude_list=ids, cursor=db.cnx.cursor(dictionary=True)) result = list(map(format_faces_response, possible_faces)) return {'result': {'faces': result}}
def possible_face_matches(id): db = get_db() face_encodings = faces.get_face_encodings(face_id=id, cursor=db.cursor) possible_faces = faces.find_closest_match_in_db( face_encodings=face_encodings, ignore_known=True, exclude_list=(id, ), cursor=db.cnx.cursor(dictionary=True)) return { 'result': { 'id': id, 'faces': list(map(format_faces_response, possible_faces)) } } @ app.route('/faces/<int:id>/matches')
def get_rule_model_group(batch_id: str) -> dict: """ 根据batch_id,获得此批次的规则模型组,供加载使用 Args: batch_id (int): 批次ID Returns: {category:{rule_model}} """ if not isinstance(batch_id, str): batch_id = str(batch_id) rule_model_group = {} category_list = [] for db in database.get_db(): query_categry_list = db.query( database.BatchRegularInfoTable.category_id).filter( database.BatchRegularInfoTable.batch_id == batch_id).group_by( database.BatchRegularInfoTable.category_id) if query_categry_list.count() == 0: return rule_model_group #TODO: 返回是空如何处理 else: for query_categry in query_categry_list: category_list.append(query_categry[0]) for category_id in category_list: pos_patterns_list = [] neg_patterns_list = [] query_result_list = db.query( database.BatchRegularInfoTable).filter( database.BatchRegularInfoTable.batch_id == batch_id, database.BatchRegularInfoTable.category_id == category_id) for query_result in query_result_list: if query_result.regular_type == 'pos': pos_patterns_list.append(query_result.content) elif query_result.regular_type == 'neg': neg_patterns_list.append(query_result.content) else: pass #TODO: 是否还会有其他类型的规则,是否需要处理 category = db.query( database.BatchCategoryInfoTable.category).filter( database.BatchCategoryInfoTable.category_id == category_id).one() rule_model_group[category[0]] = { "origin_pos_patterns": pos_patterns_list, "origin_neg_patterns": neg_patterns_list } return rule_model_group
def update_predict_data_rule(meta_info, batch_id=0, model_id=0): # 更新一下数据 """ [ ] Args: meta_info ([dict]): meta_info ([dict]): { "category_id": 2, "category": "预计的业绩", "data_oid": 7 } batch_id (int, optional): [description]. Defaults to 0. ai_id (int, optional): [description]. Defaults to 0. """ data_oid = meta_info["data_oid"] for db in database.get_db(): query_result_list = db.query( database.RulePredictionOfResultsTable).filter( database.RulePredictionOfResultsTable.batch_id == batch_id).filter( database.RulePredictionOfResultsTable.model_id == model_id ).filter( database.RulePredictionOfResultsTable.data_oid == data_oid) str_meta_info = json.dumps(meta_info, ensure_ascii=False, indent=4, cls=NpEncoder) if query_result_list.count() > 0: rule_predict_ret_obj = query_result_list.one() db_meta_info = json.loads(rule_predict_ret_obj.meta_info) print(db_meta_info) if meta_info["category"] != rule_predict_ret_obj.category: rule_predict_ret_obj.category = meta_info["category"] rule_predict_ret_obj.meta_info = str_meta_info db.commit() # update data in database else: rule_predict_ret_obj = database.RulePredictionOfResultsTable( data_oid=data_oid, category=meta_info["category"], batch_id=batch_id, model_id=model_id, meta_info=str_meta_info) db.add(rule_predict_ret_obj) db.commit()
def update_predict_data(meta_info, batch_id=0, ai_id=0): """[summary] Args: data_id ([int]): [description] meta_info ([dict]): { "category_id": 2, "decision_list": [ 0.849419160098665, 3.2599307808093747, 2.118750077455952, -0.25528040257805545 ], "proba_list": [ 0.0013409690326336684, 0.9808453085781627, 0.01730583467187295, 0.0005078877173305729 ], "category": "预计的业绩", "data_oid": 7 } batch_id (int, optional): [description]. Defaults to 0. ai_id (int, optional): [description]. Defaults to 0. Returns: [type]: [description] """ data_oid = meta_info["data_oid"] for db in database.get_db(): query_result_list = db.query( database.AiPredictonOfResultsTable).filter( database.AiPredictonOfResultsTable.batch_id == batch_id ).filter(database.AiPredictonOfResultsTable.ai_id == ai_id).filter( database.AiPredictonOfResultsTable.data_oid == data_oid) str_meta_info = json.dumps(meta_info, ensure_ascii=False, indent=4, cls=NpEncoder) if query_result_list.count() > 0: ai_predict_ret_obj = query_result_list.one() ai_predict_ret_obj.meta_info db_meta_info = json.loads(ai_predict_ret_obj.meta_info) #print(db_meta_info) if meta_info["category"] != db_meta_info["category"]: ai_predict_ret_obj.category = meta_info["category"] ai_predict_ret_obj.meta_info = str_meta_info db.commit() # update data in database else: ai_ret_obj = database.AiPredictonOfResultsTable( data_oid=data_oid, category=meta_info["category"], batch_id=batch_id, ai_id=ai_id, meta_info=str_meta_info) db.add(ai_ret_obj) db.commit() return 0 return 1 #print(list_vec) pass
def get_person(): db = get_db() person = People(cnx=db.cnx) return person
def unasign_face(id): db = get_db() faces.unlink_from_person(id=id, cursor=db.cursor, cnx=db.cnx) return {'result': {'id': id, 'ok': True}}