def create(self, validated_data): title = validated_data['title'] image_hashes = validated_data['photos'] user = None request = self.context.get("request") if request and hasattr(request, "user"): user = request.user # check if an album exists with the given title and call the update method if it does # ipdb.set_trace() instance, created = AlbumUser.objects.get_or_create(title=title, owner=user) if not created: return self.update(instance, validated_data) photos = Photo.objects.in_bulk(image_hashes) for pk, obj in photos.items(): instance.photos.add(obj) if instance.cover_photos.count() < 4: instance.cover_photos.add(obj) instance.save() logger.info(u'Created user album {} with {} photos'.format( instance.id, len(photos))) return instance
def get_location_clusters(user): start = datetime.now() photos = Photo.objects.filter(owner=user).exclude(geolocation_json={}) level = -3 coord_names = [] names = [] for p in photos: for feature in p.geolocation_json['features']: names.append(feature['text']) if not feature['text'].isdigit(): coord_names.append([feature['text'], feature['center']]) # try: # names.append(p.geolocation_json['features'][level]['text']) # coord_names.append([ # p.geolocation_json['features'][level]['text'], # p.geolocation_json['features'][level]['center'] # ]) # except: # pass groups = [] uniquekeys = [] coord_names.sort(key=lambda x: x[0]) for k, g in groupby(coord_names, lambda x: x[0]): groups.append(list(g)) # Store group iterator as a list uniquekeys.append(k) res = [[g[0][1][1], g[0][1][0], g[0][0]] for g in groups] elapsed = (datetime.now() - start).total_seconds() logger.info('location clustering took %.2f seconds' % elapsed) return res
def get_location_clusters(): start = datetime.now() photos = Photo.objects.exclude(geolocation_json={}) level = -3 coord_names = [] names = [] for p in photos: try: names.append(p.geolocation_json['features'][level]['text']) coord_names.append([ p.geolocation_json['features'][level]['text'], p.geolocation_json['features'][level]['center'] ]) except: pass groups = [] uniquekeys = [] coord_names.sort(key=lambda x: x[0]) for k, g in groupby(coord_names, lambda x: x[0]): groups.append(list(g)) # Store group iterator as a list uniquekeys.append(k) res = [[g[0][1][1], g[0][1][0]] for g in groups] elapsed = (datetime.now() - start).total_seconds() logger.info('location clustering took %.2f seconds' % elapsed) return res
def create(self, validated_data): # ipdb.set_trace() if 'scan_directory' in validated_data.keys(): validated_data.pop('scan_directory') user = User.objects.create_user(**validated_data) logger.info("Created user {}".format(user.id)) return user
def update(self, instance, validated_data): if 'scan_directory' in validated_data: new_scan_directory = validated_data.pop('scan_directory') if os.path.exists(new_scan_directory): instance.scan_directory = new_scan_directory instance.save() logger.info("Updated scan directory for user {}".format( instance.scan_directory)) return instance
def generate_captions(overwrite=False): if overwrite: photos = Photo.objects.all() else: photos = Photo.objects.filter(search_captions=None) logger.info('%d photos to be processed for caption generation'%photos.count()) for photo in photos: logger.info('generating captions for %s'%photo.image_path) photo._generate_captions() photo.save()
def create(self, validated_data): # if 'scan_directory' in validated_data.keys(): # validated_data.pop('scan_directory') user = User.objects.create_user(**validated_data) # if 'scan_directory' not in validated_data.keys() or validated_data['scan_directory'] == "initial": # validated_data['scan_directory'] = "/var/lib/librephotos/data/" + user.id+"/" # logger.info("Create directory "+ validated_data['scan_directory']) logger.info("Created user {}".format(user.id)) cache.clear() return user
def create(self, validated_data): name = validated_data.pop('name') qs = Person.objects.filter(name=name) if qs.count() > 0: return qs[0] else: new_person = Person() new_person.name = name new_person.save() logger.info('created person {}' % new_person.id) return new_person
def geolocate(overwrite=False): if overwrite: photos = Photo.objects.all() else: photos = Photo.objects.filter(geolocation_json={}) logger.info('%d photos to be geolocated'%photos.count()) for photo in photos: try: logger.info('geolocating %s'%photo.image_path) photo._geolocate_mapbox() except: print('could not geolocate photo:',photo)
def get_photos(self, obj): start = datetime.now() user = None request = self.context.get("request") if request and hasattr(request, "user"): user = request.user res = PhotoSuperSimpleSerializer(obj.get_photos(user), many=True).data elapsed = (datetime.now() - start).total_seconds() logger.info('serializing photos of faces took %.2f seconds' % elapsed) return res
def regenerate_event_titles(user): job_id = rq.get_current_job().id if LongRunningJob.objects.filter(job_id=job_id).exists(): lrj = LongRunningJob.objects.get(job_id=job_id) lrj.started_at = datetime.now().replace(tzinfo=pytz.utc) lrj.save() else: lrj = LongRunningJob.objects.create( started_by=user, job_id=job_id, queued_at=datetime.now().replace(tzinfo=pytz.utc), started_at=datetime.now().replace(tzinfo=pytz.utc), job_type=LongRunningJob.JOB_GENERATE_AUTO_ALBUM_TITLES) lrj.save() try: aus = AlbumAuto.objects.filter(owner=user).prefetch_related('photos') target_count = len(aus) for idx, au in enumerate(aus): logger.info('job {}: {}'.format(job_id, idx)) au._autotitle() au.save() lrj.result = { 'progress': { "current": idx + 1, "target": target_count } } lrj.save() status = True message = 'success' res = {'status': status, 'message': message} lrj.finished = True lrj.finished_at = datetime.now().replace(tzinfo=pytz.utc) lrj.save() logger.info('job {}: updated lrj entry to db'.format(job_id)) except: lrj.failed = True lrj.finished = True lrj.finished_at = datetime.now().replace(tzinfo=pytz.utc) lrj.save() return 1
def update(self, instance, validated_data): image_hashes = validated_data['photos'] photos = Photo.objects.in_bulk(image_hashes) photos_already_in_album = instance.photos.all() cnt = 0 for pk, obj in photos.items(): if obj not in photos_already_in_album: cnt += 1 instance.photos.add(obj) instance.save() cache.clear() logger.info(u'Added {} photos to user album {}'.format( cnt, instance.id)) return instance
def get(self, request, format=None): try: res = scan_photos.delay(request.user) logger.info('queued job {}'.format(res.id)) if not LongRunningJob.objects.filter(job_id=res.id).exists(): lrj = LongRunningJob.objects.create( started_by=request.user, job_id=res.id, queued_at=datetime.datetime.now().replace(tzinfo=pytz.utc), job_type=LongRunningJob.JOB_SCAN_PHOTOS) lrj.save() return Response({'status': True, 'job_id': res.id}) except BaseException as e: logger.error(str(e)) return Response({'status': False})
def group(photos_with_timestamp, dt=timedelta(hours=6)): photos_with_timestamp = sorted(photos_with_timestamp, key=lambda x: x[0]) groups = [] for idx, photo in enumerate(photos_with_timestamp): if len(groups) == 0: groups.append([]) groups[-1].append(photo[1]) else: if photo[0] - groups[-1][-1].exif_timestamp < dt: groups[-1].append(photo[1]) else: groups.append([]) groups[-1].append(photo[1]) logger.info('job {}: {}'.format(job_id, idx)) return groups
def _add_to_album_place(self): if self.geolocation_json and len(self.geolocation_json) > 0: if 'features' in self.geolocation_json.keys(): for geolocation_level,feature in enumerate(self.geolocation_json['features']): if 'text' in feature.keys(): if not feature['text'].isnumeric(): album_place = get_album_place(feature['text'])[0] if album_place.photos.filter(image_hash=self.image_hash).count() == 0: album_place.geolocation_level = len(self.geolocation_json['features']) - geolocation_level album_place.photos.add(self) album_place.save() logger.info('album place title: %s, level: %d, added photo: %s'%(feature['text'],album_place.geolocation_level,self.image_hash)) print('album place title: %s, level: %d, added photo: %s'%(feature['text'],album_place.geolocation_level,self.image_hash)) else: logger.warning('photo not addded to album place') print('photo not added to album place')
def update(self, instance, validated_data): if 'scan_directory' in validated_data: new_scan_directory = validated_data.pop('scan_directory') if os.path.exists(new_scan_directory): instance.scan_directory = new_scan_directory instance.save() logger.info("Updated scan directory for user {}".format( instance.scan_directory)) if 'confidence' in validated_data: new_confidence = validated_data.pop('confidence') instance.confidence = new_confidence instance.save() logger.info("Updated confidence for user {}".format( instance.confidence)) cache.clear() return instance
def _extract_faces(self): qs_unknown_person = Person.objects.filter(name='unknown') if qs_unknown_person.count() == 0: unknown_person = Person(name='unknown') unknown_person.save() else: unknown_person = qs_unknown_person[0] image = PIL.Image.open(self.thumbnail) image = np.array(image.convert('RGB')) face_encodings = face_recognition.face_encodings(image) face_locations = face_recognition.face_locations(image) faces = [] if len(face_locations) > 0: for idx_face, face in enumerate(zip(face_encodings, face_locations)): face_encoding = face[0] face_location = face[1] top, right, bottom, left = face_location face_image = image[top:bottom, left:right] face_image = PIL.Image.fromarray(face_image) face = Face() face.image_path = self.image_hash + "_" + str( idx_face) + '.jpg' face.person = unknown_person face.photo = self face.location_top = face_location[0] face.location_right = face_location[1] face.location_bottom = face_location[2] face.location_left = face_location[3] face.encoding = face_encoding.tobytes().hex() # face.encoding = face_encoding.dumps() face_io = BytesIO() face_image.save(face_io, format="JPEG") face.image.save(face.image_path, ContentFile(face_io.getvalue())) face_io.close() face.save() logger.info('image {}: {} face(s) saved'.format( self.image_hash, len(face_locations)))
def update(self, instance, validated_data): name = validated_data.pop('person')['name'] p = Person.objects.filter(name=name) if p.count() > 0: instance.person = p[0] else: p = Person() p.name = name p.save() instance.person = p logger.info('created person with name %s' % name) if instance.person.name == 'unknown': instance.person_label_is_inferred = None instance.person_label_probability = 0. else: instance.person_label_is_inferred = False instance.person_label_probability = 1. logger.info('updated label for face %d to %s' % (instance.id, instance.person.name)) instance.save() return instance
def build_image_similarity_index(user): logger.info('builing similarity index for user {}'.format(user.username)) photos = Photo.objects.filter(Q(hidden=False) & Q(owner=user)).exclude( encoding=None).only('encoding') image_hashes = [] image_embeddings = [] for photo in photos: image_hashes.append(photo.image_hash) image_embedding = np.array(np.frombuffer(bytes.fromhex( photo.encoding)), dtype=np.float32) image_embeddings.append(image_embedding.tolist()) post_data = { "user_id": user.id, "image_hashes": image_hashes, "image_embeddings": image_embeddings } res = requests.post(IMAGE_SIMILARITY_SERVER + '/build/', json=post_data) return res.json()
def generate_event_albums(user, job_id): if LongRunningJob.objects.filter(job_id=job_id).exists(): lrj = LongRunningJob.objects.get(job_id=job_id) lrj.started_at = datetime.now().replace(tzinfo=pytz.utc) lrj.save() else: lrj = LongRunningJob.objects.create( started_by=user, job_id=job_id, queued_at=datetime.now().replace(tzinfo=pytz.utc), started_at=datetime.now().replace(tzinfo=pytz.utc), job_type=LongRunningJob.JOB_GENERATE_AUTO_ALBUMS) lrj.save() try: photos = Photo.objects.filter(owner=user).only('exif_timestamp') photos_with_timestamp = [(photo.exif_timestamp, photo) for photo in photos if photo.exif_timestamp] def group(photos_with_timestamp, dt=timedelta(hours=6)): photos_with_timestamp = sorted(photos_with_timestamp, key=lambda x: x[0]) groups = [] for idx, photo in enumerate(photos_with_timestamp): if len(groups) == 0: groups.append([]) groups[-1].append(photo[1]) else: if photo[0] - groups[-1][-1].exif_timestamp < dt: groups[-1].append(photo[1]) else: groups.append([]) groups[-1].append(photo[1]) logger.info('job {}: {}'.format(job_id, idx)) return groups groups = group(photos_with_timestamp, dt=timedelta(days=1, hours=12)) logger.info('job {}: made groups'.format(job_id)) album_locations = [] target_count = len(groups) date_format = "%Y:%m:%d %H:%M:%S" for idx, group in enumerate(groups): key = group[0].exif_timestamp logger.info( 'job {}: processing auto album with date: '.format(job_id) + key.strftime(date_format)) items = group if len(group) >= 2: qs = AlbumAuto.objects.filter(timestamp=key).filter(owner=user) if qs.count() == 0: album = AlbumAuto( created_on=datetime.utcnow().replace(tzinfo=pytz.utc), owner=user) album.timestamp = key album.save() locs = [] for item in items: album.photos.add(item) item.save() if item.exif_gps_lat and item.exif_gps_lon: locs.append([item.exif_gps_lat, item.exif_gps_lon]) if len(locs) > 0: album_location = np.mean(np.array(locs), 0) album_locations.append(album_location) album.gps_lat = album_location[0] album.gps_lon = album_location[1] else: album_locations.append([]) album._autotitle() album.save() logger.info('job {}: generated auto album {}'.format( job_id, album.id)) lrj.result = { 'progress': { "current": idx + 1, "target": target_count } } lrj.save() status = True message = 'success' res = {'status': status, 'message': message} lrj.finished = True lrj.finished_at = datetime.now().replace(tzinfo=pytz.utc) lrj.save() except: logger.exception("An error occured") lrj.failed = True lrj.finished = True lrj.finished_at = datetime.now().replace(tzinfo=pytz.utc) lrj.save() return 1
def update(self, instance, validated_data): #import pdb; pdb.set_trace() logger.info("Removed pdb to avoid blocks")
def train_faces(user, job_id): if LongRunningJob.objects.filter(job_id=job_id).exists(): lrj = LongRunningJob.objects.get(job_id=job_id) lrj.started_at = datetime.datetime.now().replace(tzinfo=pytz.utc) lrj.save() else: lrj = LongRunningJob.objects.create( started_by=user, job_id=job_id, queued_at=datetime.datetime.now().replace(tzinfo=pytz.utc), started_at=datetime.datetime.now().replace(tzinfo=pytz.utc), job_type=LongRunningJob.JOB_TRAIN_FACES) lrj.save() try: faces = Face.objects.filter( photo__owner=user).prefetch_related('person') id2face_unknown = {} id2face_known = {} face_encodings_unknown = [] face_encodings_known = [] face_encodings_all = [] for face in faces: face_encoding = np.frombuffer(bytes.fromhex(face.encoding)) face_image = face.image.read() face.image.close() face_image_path = face.image_path face_id = face.id face_encodings_all.append(face_encoding) if face.person_label_is_inferred is not False or face.person.name == 'unknown': face_encodings_unknown.append(face_encoding) id2face_unknown[face_id] = {} id2face_unknown[face_id]['encoding'] = face_encoding id2face_unknown[face_id]['image'] = face_image id2face_unknown[face_id]['image_path'] = face_image_path id2face_unknown[face_id]['id'] = face_id else: person_name = face.person.name person_id = face.person.id face_encodings_known.append(face_encoding) id2face_known[face_id] = {} id2face_known[face_id]['encoding'] = face_encoding id2face_known[face_id]['image'] = face_image id2face_known[face_id]['image_path'] = face_image_path id2face_known[face_id]['person_name'] = person_name id2face_known[face_id]['person_id'] = person_id if(len(id2face_known) == 0): logger.warning("No labeled faces found") lrj.finished = True lrj.failed = False lrj.finished_at = datetime.datetime.now().replace(tzinfo=pytz.utc) lrj.save() return True face_encodings_known = np.array( [f['encoding'] for f in id2face_known.values()]) person_names_known = np.array( [f['person_name'] for f in id2face_known.values()]) logger.info("Before fitting") clf = MLPClassifier( solver='adam', alpha=1e-5, random_state=1, max_iter=1000).fit(face_encodings_known, person_names_known) logger.info("After fitting") face_encodings_unknown = np.array( [f['encoding'] for f in id2face_unknown.values()]) face_ids_unknown = [f['id'] for f in id2face_unknown.values()] pred = clf.predict(face_encodings_unknown) probs = np.max(clf.predict_proba(face_encodings_unknown), 1) target_count = len(face_ids_unknown) for idx, (face_id, person_name, probability) in enumerate(zip(face_ids_unknown, pred, probs)): person = Person.objects.get(name=person_name) face = Face.objects.get(id=face_id) face.person = person face.person_label_is_inferred = True face.person_label_probability = probability face.save() lrj.result = { 'progress': { "current": idx + 1, "target": target_count } } lrj.save() lrj.finished = True lrj.failed = False lrj.finished_at = datetime.datetime.now().replace(tzinfo=pytz.utc) lrj.save() cache.clear() return True except BaseException: logger.exception("An error occured") res = [] lrj.failed = True lrj.finished = True lrj.finished_at = datetime.datetime.now().replace(tzinfo=pytz.utc) lrj.save() return False return res