def face_comp(str_relative_path,str_img_save): str_query_top = "SELECT * FROM face_capture2 where realtive_path='" + str_relative_path + "' and " \ "is_proccessed=0 " result_fetch_last_rec = db_opr.fetch_result(str_query_top) for row_fetch_last_rec in result_fetch_last_rec: comp_result = recognize.compare_dat_in_out(row_fetch_last_rec["id"]) print(comp_result) if comp_result is False: os.remove(str_img_save) str_delete_query = "DELETE FROM face_capture2 where reative_path='" + str_relative_path + "'" db_opr.execute_insert(str_delete_query) print("GARBAGE REMOVED")
def train(): str_reinit = "UPDATE person_details2 set data_set=0" db_opr.execute_insert(str_reinit) global train_face_encode_list query_fetch_faces = "SELECT * from person_details2 where is_train=1" result_fetch_faces = db_opr.fetch_result(query_fetch_faces) for row_fetch_faces in result_fetch_faces: train_face_fetch = None train_image = None train_face_encode = None print(row_fetch_faces) # person_face_name,train_face_encode = fast_encode(row_fetch_faces) with ThreadPoolExecutor(max_workers=3) as executor: data = executor.submit(fast_encode, row_fetch_faces) print("ALL ENCODINGS DONE") with open('DATASET/dataset_faces_full.dat', 'wb') as f: pickle.dump(train_face_encode_list, f)
def train_stud(): str_reinit = "UPDATE student set data_set=0" db_opr.execute_insert(str_reinit) global train_face_encode_list query_fetch_faces = "SELECT * from student where is_train=1" result_fetch_faces = db_opr.fetch_result(query_fetch_faces) print("DB DONE") for row_fetch_faces in result_fetch_faces: train_face_fetch = None train_image = None train_face_encode = None print(row_fetch_faces) person_face_name = str(row_fetch_faces["id"]) train_face_fetch = str(config_obj.str_views_dir) + str("\\assets\\uploads\\files\\") + str( row_fetch_faces["img_file_path"]) train_image = face_recognition.load_image_file(train_face_fetch) train_face_encode = face_recognition.face_encodings(train_image)[0] train_face_encode_list[person_face_name] = train_face_encode str_query_update = "UPDATE student set data_set=1 where id='" + person_face_name + "" db_opr.execute_insert(str_query_update) with open('TRAINDATASET/dataset_faces_full_students.dat', 'wb') as f: pickle.dump(train_face_encode_list, f)
# loop over the face detections for i, d in enumerate(face_rects): x1, y1, x2, y2, w, h = d.left(), d.top(), d.right() + 1, d.bottom() + 1, d.width(), d.height() crop = overlay[d.top():d.bottom(), d.left():d.right()] face_found(crop,0) draw_border(overlay, (x1, y1), (x2, y2), (162, 255, 0), 2, 10, 10) # make semi-transparent bounding box cv2.addWeighted(overlay, alpha, output, 1 - alpha, 0, output) # show the frame cv2.imshow(frame_title, output) key = cv2.waitKey(1) # press q to break out of the loop if key == ord("q"): break # cleanup cv2.destroyAllWindows() stream.release() if __name__ == '__main__': from multiprocessing import Process str_query_camera_master = "SELECT * FROM camera_master" result_camera_master = db_opr.fetch_result(str_query_camera_master) processes = [] count = 0 for row_camera_master in result_camera_master: if row_camera_master is not None: processes.append(Process(target=main, args=(row_camera_master['id'], row_camera_master['source_URL'],row_camera_master['cam_nam'])).start())