def get_cluster_faces(): params = request.get_json() db.connect(os.path.join(params['data_dir'], params['dbfile']), args.debug) res = db.get_cluster_faces(**params) db.close() return jsonify(res)
def compute_similarities(): params = request.get_json() start_time = time.time() db.connect(os.path.join(params['data_dir'], params['dbfile']), args.debug) num_faces, num_similarities, num_clusters = faceid.compute_similarities( **params) db.close() elapsed = time.time() - start_time res = { 'num_similarities': num_similarities, 'num_clusters': num_clusters, 'num_faces': num_faces, 'elapsed': elapsed } return jsonify(res)
def scan(): params = request.get_json() data_dir = params['data_dir'] dbfile = params['dbfile'] source = params['source'] files = params['files'] if faceid.args.save_resized: faceid.makedirs(os.path.join(data_dir, faceid.args.save_resized)) if faceid.args.save_faces: faceid.makedirs(os.path.join(data_dir, faceid.args.save_faces)) start_time = time.time() db.connect(os.path.join(data_dir, dbfile), args.debug) num_files = 0 num_images = 0 num_faces = 0 results = [] for file in files: relpath = file['relpath'] del file['relpath'] file_images, file_faces, res = faceid.process_file( data_dir, relpath, source, **file) num_files += 1 num_images += file_images num_faces += file_faces results += res db.close() elapsed = time.time() - start_time res = { 'num_files': num_files, 'num_images': num_images, 'num_faces': num_faces, 'elapsed': elapsed, 'images_per_s': num_images / elapsed, 'files': results } return jsonify(res)
parser.add_argument("db") parser.add_argument("--debug", action='store_true') parser.add_argument("--limit", type=int, default=5) parser.add_argument("--similarity_threshold", type=float, default=0.35) subparsers = parser.add_subparsers(dest='command') clusters_parser = subparsers.add_parser('get_clusters') clusters_parser.add_argument("--with_gps", action='store_true') faces_parser = subparsers.add_parser('get_cluster_faces') faces_parser.add_argument("cluster_num", type=int) faces_parser.add_argument("--with_gps", action='store_true') similar_parser = subparsers.add_parser('get_similar_faces') similar_parser.add_argument("face_id", type=int) selfies_parser = subparsers.add_parser('get_selfies') criminals_parser = subparsers.add_parser('get_criminals') criminals_parser.add_argument("face_id", type=int) #contact_parser = subparsers.add_parser('get_contacts') args = parser.parse_args() db.connect(args.db, args.debug) if args.command == 'get_clusters': print(json.dumps(db.get_clusters(**vars(args)))) elif args.command == 'get_cluster_faces': print(json.dumps(db.get_cluster_faces(**vars(args)))) elif args.command == 'get_similar_faces': print(json.dumps(db.get_similar_faces(**vars(args)))) elif args.command == 'get_selfies': print(json.dumps(db.get_selfies(**vars(args)))) elif args.command == 'get_criminals': print(json.dumps(db.get_criminals(**vars(args))))
if args.save_resized: resized_dir = os.path.join(args.dir, args.save_resized) shutil.rmtree(resized_dir) makedirs(resized_dir) if args.save_faces: faces_dir = os.path.join(args.dir, args.save_faces) shutil.rmtree(faces_dir) makedirs(faces_dir) if args.save_clusters: clusters_dir = os.path.join(args.dir, args.save_clusters) shutil.rmtree(clusters_dir) makedirs(clusters_dir) db.connect(args.db) print("Processing files...") start_time = time.time() num_files = 0 num_images = 0 num_faces = 0 for dirpath, dirnames, filenames in os.walk(args.dir): for filename in filenames: filepath = os.path.join(dirpath, filename) relpath = os.path.relpath(filepath, args.dir) file_images, file_faces, results = process_file(args.dir, relpath, args.source) num_files += 1 num_images += file_images num_faces += file_faces elapsed = time.time() - start_time