def update_request(message): print('Receive', message) name = message['name'] try: if name == 'landmarks': update_landmark() if name == 'model': update_model() if name == 'lfw': update_lfw() if name == 'output': update_output() if name == 'prediction': make_tests() except Exception as e: socketio_app.emit('finish-' + str(name), {'error': "{}: {}".format(type(e).__name__, str(e))})
def check_request(message): name = message['name'] try: info = None if name == 'landmarks': info = get_landmark_info() if name == 'model': info = get_model_info() if name == 'lfw': info = get_lfw_info() if name == 'output': info = get_output_info() #if name == 'prediction': return if info: socketio_app.emit('finish-' + str(name), info) except Exception as e: socketio_app.emit('finish-' + str(name), {'error': "{}: {}".format(type(e).__name__, str(e))})
def download_and_extract_model(model_name, data_dir): if not os.path.exists(data_dir): os.makedirs(data_dir) file_id = model_dict[model_name] destination = os.path.join(data_dir, model_name + '.zip') if not os.path.exists(destination): print('Downloading model to %s' % destination) socketio_app.emit('log-landmark', {'message': 'Downloading model to %s' % destination }) # move emits to logging download_file_from_google_drive(file_id, destination) with zipfile.ZipFile(destination, 'r') as zip_ref: print('Extracting model to %s' % data_dir) socketio_app.emit('log-landmark', {'message': 'Extracting model to %s' % data_dir }) # move emits to logging zip_ref.extractall(data_dir)
def update_lfw(): socketio_app.emit('log-lfw', {'message': 'Start downloading...'}) archive = absolute(app.config['FACE_NET_DATA_DIR']) + '.tar.gz' data = absolute(app.config['FACE_NET_DATA_DIR']) urllib.request.urlretrieve(app.config['FACE_NET_LWF_URL'], archive) socketio_app.emit('log-lfw', {'message': 'Extracting...'}) os.makedirs(data, exist_ok=True) tar = tarfile.open(archive, "r:gz") tar.extractall(data) tar.close() os.remove(archive) socketio_app.emit('log-lfw', {'message': 'Done'}) socketio_app.emit('finish-lfw', get_lfw_info())
def preprocess(input_dir, output_dir, crop_dim): start_time = time.time() pool = mp.Pool(processes=mp.cpu_count()) if not os.path.exists(output_dir): os.makedirs(output_dir) for image_dir in os.listdir(input_dir): image_output_dir = os.path.join(output_dir, os.path.basename(os.path.basename(image_dir))) if not os.path.exists(image_output_dir): os.makedirs(image_output_dir) image_paths = glob.glob(os.path.join(input_dir, '**/*.jpg')) for index, image_path in enumerate(image_paths): image_output_dir = os.path.join(output_dir, os.path.basename(os.path.dirname(image_path))) output_path = os.path.join(image_output_dir, os.path.basename(image_path)) pool.apply_async(preprocess_image, (image_path, output_path, crop_dim)) pool.close() pool.join() logger.info('Completed in {} seconds'.format(time.time() - start_time)) socketio_app.emit('log-lfw', {'message': 'Completed in {} seconds'.format(time.time() - start_time)}) # move emits to logging
def update_landmark(): socketio_app.emit('log-landmark', {'message': 'Start downloading...'}) file = absolute(app.config['FACE_NET_LANDMARKS_FILE']) archive = file + '.bz2' urllib.request.urlretrieve(app.config['FACE_NET_LANDMARKS_URL'], archive) socketio_app.emit('log-landmark', {'message': 'Extracting...'}) with open(file, 'wb') as new_file, bz2.BZ2File(archive, 'rb') as bz2_file: for bytes in iter(lambda: bz2_file.read(100 * 1024), b''): #for bytes in file.read(): new_file.write(bytes) os.remove(archive) socketio_app.emit('log-landmark', {'message': 'Done'}) socketio_app.emit('finish-landmark', get_landmark_info())
def update_model(): socketio_app.emit('log-model', {'message': 'Start updating...'}) download_and_extract_model('20170511-185253', absolute(app.config['FACE_NET_WEIGHTS_DIR'])) socketio_app.emit('log-model', {'message': 'Done'}) socketio_app.emit('finish-model', get_model_info())
def make_tests(): crop_dim = 180 print('make_tests') socketio_app.emit('log-prediction', {'message': 'Start making tests...'}) im1 = '/home/srivoknovski/Python/flask/acme/Networks/FaceNet/data/lfw/Aaron_Peirsol/Aaron_Peirsol_0002.jpg' im2 = '/home/srivoknovski/Python/flask/acme/Networks/FaceNet/data/lfw/Aaron_Peirsol/Aaron_Peirsol_0004.jpg' im3 = '/home/srivoknovski/Python/flask/acme/Networks/FaceNet/data/lfw/Aaron_Tippin/Aaron_Tippin_0001.jpg' with open(im1, "rb") as image_file: socketio_app.emit('log-prediction', {'image': (image_file.read())}) with open(im2, "rb") as image_file: socketio_app.emit('log-prediction', {'image': (image_file.read())}) with open(im3, "rb") as image_file: socketio_app.emit('log-prediction', {'image': (image_file.read())}) socketio_app.emit( 'log-prediction', { 'message': 'preprocessing test images..., Crop dimension {}'.format(crop_dim) }) images = [] images.append(face_net_instance.process_image(im1, crop_dim)) images.append(face_net_instance.process_image(im2, crop_dim)) images.append(face_net_instance.process_image(im3, crop_dim)) socketio_app.emit('log-prediction', {'message': 'loading model...'}) model_path = absolute(app.config['FACE_NET_WEIGHTS_FILE']) embs = get_emmbedings(images=images, model_path=model_path) socketio_app.emit( 'log-prediction', { 'message': 'Model path {} {}'.format(model_path, os.path.getsize(model_path)) }) diff1 = np.linalg.norm(embs[0] - embs[1]) diff2 = np.linalg.norm(embs[0] - embs[2]) print(im1, im2, np.linalg.norm(embs[0] - embs[1])) print(im1, im3, np.linalg.norm(embs[0] - embs[2])) socketio_app.emit('log-prediction', {'message': 'Done'}) socketio_app.emit('finish-prediction', { 'The same persons': str(diff1), 'The different persons': str(diff2) })
def update_output(): socketio_app.emit('log-output', {'message': 'Start updating...'}) output_dir = absolute(app.config['FACE_NET_OUTPUT_DIR']) preprocess(absolute(app.config['FACE_NET_DATA_DIR']), output_dir, 180) socketio_app.emit('log-output', {'message': 'Done'}) socketio_app.emit('finish-output', get_output_info())