def list_images() -> List[str]: with get_db_session() as session: image_ids = [ mapping.img_id for mapping in session.query(FaceApiMapping).all() ] logger.debug('Got %d images', len(image_ids)) return image_ids
def upload_image(stream: IO[bytes], img_id: str, filename: str): store_image(stream, filename) img_status = ImageStatus(img_id=img_id, status=ImageStatusEnum.uploaded.name, error_msg=None) with get_db_session(commit=True) as session: session.add(img_status) logger.debug('Image %s uploaded', img_id)
def get_processing_status(img_id: str) -> Tuple[str, str]: with get_db_session() as session: img_status = session.query(ImageStatus) \ .filter(ImageStatus.img_id == img_id) \ .first() if img_status is None: raise ImageDoesNotExist() logger.debug('Image %s is in status %s', img_id, img_status.status) return img_status.status, img_status.error_msg
def _update_img_status(img_id: str, status: Optional[ImageStatusEnum] = None, error_msg: Optional[str] = None): update_fields = {} if status: update_fields['status'] = status.name if error_msg: update_fields['error_msg'] = error_msg with get_db_session(commit=True) as session: session.query(ImageStatus)\ .filter(ImageStatus.img_id == img_id)\ .update(update_fields)
def process_image(img_id: str): with get_db_session() as session: img_status = session.query(ImageStatus) \ .filter(ImageStatus.img_id == img_id) \ .first() if img_status is None: raise ImageDoesNotExist() if img_status.status != ImageStatusEnum.uploaded.name: raise ImageAlreadyProcessed() tasks.process_image.delay(img_id) logger.debug('Image %s queued for processing', img_id)
def _load_image_ids_and_face_vectors() -> Tuple[List[str], np.array]: logger.debug('getting all img ids and respective features') with get_db_session() as session: rows = session.query(FeatureMapping)\ .all() known_features = [] img_ids = [] for row in rows: img_ids.append(row.img_id) current_features = np.array(face_vector_from_text(row.features)) known_features.append(current_features) return img_ids, np.array(known_features)
def lookup_matching_images(img_id: str) -> Tuple[List[str], List[float]]: with get_db_session() as session: matches = session.query(Match) \ .filter(Match.this_img_id == img_id) \ .order_by(Match.distance_score.desc()) \ .all() images = [] distances = [] for match in matches: images.append(match.that_img_id) distances.append(match.distance_score) logger.debug('Image %s has %d matches', img_id, len(distances)) return images, distances
def process_image(img_id: str): logger.info('Processing image %s', img_id) try: img_path = get_image_path(img_id) except StorageError: logger.error("Can't process image %s since it doesn't exist", img_id) _update_img_status(img_id, error_msg='Image processed before uploaded') return start = datetime.utcnow() _update_img_status(img_id, status=ImageStatusEnum.processing) prev_img_ids, prev_face_vectors = _load_image_ids_and_face_vectors() face_vectors = get_face_vectors(img_path, FACE_VECTORIZE_ALGORITHM) logger.info('Found %d faces in image %s', len(face_vectors), img_id) _update_img_status(img_id, status=ImageStatusEnum.face_vector_computed) with get_db_session(commit=True) as session: _add_entry_to_session(Image, session, img_id=img_id) matches = [] # type: List[dict] for face_vector in face_vectors: _store_face_vector(face_vector, img_id, session) distances = _compute_distances(prev_face_vectors, face_vector) for that_img_id, distance in zip(prev_img_ids, distances): if img_id == that_img_id: continue distance = float(distance) if distance >= DISTANCE_SCORE_THRESHOLD: continue _prepare_matches(matches, that_img_id, distance) logger.info('Found %d face matches for image %s', len(matches), img_id) for match in matches: _store_matches(img_id, match["that_img_id"], match["distance_score"], session) _update_img_status(img_id, status=ImageStatusEnum.finished_processing, error_msg=('No faces found in image' if not face_vectors else None)) delete_image(img_id) processing_time = (datetime.utcnow() - start).total_seconds() logger.info('Processed image %s in %d seconds', img_id, processing_time)
def upload_image(stream: IO[bytes], filename: str) -> str: model_id, _ = _get_model_id() img_id = filename[:filename.find('.')] storage.store_image(stream, filename) image_path = storage.get_image_path(img_id) faces = cognitive_face.face.detect(image_path) if len(faces) != 1: face_id = '' else: response = cognitive_face.large_face_list_face.add( image=image_path, large_face_list_id=model_id, user_data=img_id) face_id = response['persistedFaceId'] with get_db_session(commit=True) as session: mapping = FaceApiMapping(face_id=face_id, img_id=img_id) session.add(mapping) return img_id
def _fetch_faces_for_person(img_id: str) -> Tuple[List[str], str]: with get_db_session() as session: mapping = session.query(FaceApiMapping)\ .filter(FaceApiMapping.img_id == img_id) \ .first() if not mapping: logger.debug('No mapping found for image %s', img_id) return [], '' # FIXME: manage deletion of the files img_path = storage.get_image_path(img_id) faces = cognitive_face.face.detect(img_path) face_ids = [face['faceId'] for face in faces] if not face_ids: logger.debug('No faces found for image %s', img_id) return [], mapping.face_id return face_ids, mapping.face_id
def list_images() -> List[str]: with get_db_session() as session: image_ids = [image.img_id for image in session.query(Image).all()] logger.debug('Got %d images overall', len(image_ids)) return image_ids
def _fetch_mappings_for_faces(face_ids: Iterable[str]) -> List[FaceApiMapping]: with get_db_session() as session: mappings = session.query(FaceApiMapping) \ .filter(FaceApiMapping.face_id.in_(face_ids)) \ .all() return mappings