Esempio n. 1
0
def generate_event_albums():
    lrj = LongRunningJob(job_id=rq.get_current_job().id,
                         started_at=datetime.now(),
                         job_type=LongRunningJob.JOB_GENERATE_AUTO_ALBUMS)
    lrj.save()

    if is_auto_albums_being_processed()['status']:
        status = False
        message = "There are even albums being created at the moment. Please try again later."
        return {'status': status, 'message': message}

    set_auto_album_processing_flag_on()
    photo_count = Photo.objects.count()
    if photo_count == 0:
        status = False
        message = "Please add some more photos!"
        set_auto_album_processing_flag_off()
        return {'status': status, 'message': message}
    else:
        if is_photos_being_added()['status']:
            status = False
            message = "There are photos being added to the library. Please try again later."
            set_auto_album_processing_flag_off()
            return {'status': status, 'message': message}

    try:
        photos = Photo.objects.all()

        photos_with_timestamp = [(photo.exif_timestamp, photo)
                                 for photo in photos if photo.exif_timestamp]
        timestamps = [
            photo.exif_timestamp for photo in photos if photo.exif_timestamp
        ]

        def group(photos_with_timestamp, dt=timedelta(hours=6)):
            photos_with_timestamp = sorted(photos_with_timestamp,
                                           key=lambda x: x[0])
            groups = []
            for photo in photos_with_timestamp:
                if len(groups) == 0:
                    groups.append([])
                    groups[-1].append(photo[1])
                else:
                    if photo[0] - groups[-1][-1].exif_timestamp < dt:
                        groups[-1].append(photo[1])
                    else:
                        groups.append([])
                        groups[-1].append(photo[1])
            return groups

        groups = group(photos_with_timestamp, dt=timedelta(days=1, hours=12))

        album_locations = []

        for group in groups:
            key = group[0].exif_timestamp
            print(key)
            items = group
            if len(group) >= 2:
                qs = AlbumAuto.objects.filter(timestamp=key)
                if qs.count() == 0:
                    album = AlbumAuto(created_on=datetime.utcnow())
                    album.timestamp = key
                    album.save()

                    locs = []
                    for item in items:
                        album.photos.add(item)
                        item.save()
                        if item.exif_gps_lat and item.exif_gps_lon:
                            locs.append([item.exif_gps_lat, item.exif_gps_lon])
                        print('-', item.image_hash, item.exif_gps_lat,
                              item.exif_gps_lon)
                    if len(locs) > 0:
                        album_location = np.mean(np.array(locs), 0)
                        album_locations.append(album_location)
                        album.gps_lat = album_location[0]
                        album.gps_lon = album_location[1]
                    else:
                        album_locations.append([])
                    album._autotitle()
                    album.save()
        status = True
        message = 'success'
        res = {'status': status, 'message': message}

        lrj = LongRunningJob.objects.get(job_id=rq.get_current_job().id)
        lrj.finished = True
        lrj.finished_at = datetime.now()
        lrj.result = res
        lrj.save()

    except:
        status = False
        res = {'status': status, 'message': 'failed'}

        lrj = LongRunningJob.objects.get(job_id=rq.get_current_job().id)
        lrj.failed = True
        lrj.finished = True
        lrj.finished_at = datetime.now()
        lrj.save()

    set_auto_album_processing_flag_off()
    return 1
Esempio n. 2
0
 def get(self, requests, format=None):
     res = is_photos_being_added()
     return Response(res)
Esempio n. 3
0
def scan_photos():
    lrj = LongRunningJob(
        job_id=rq.get_current_job().id,
        started_at=datetime.datetime.now(),
        job_type=LongRunningJob.JOB_SCAN_PHOTOS)
    lrj.save()

    for _ in tqdm(range(100000)):
        yy = np.random.randn(1000).dot(np.random.randn(1000))

    if is_photos_being_added()['status']:
        return {"new_photo_count": 0, "status": False, 'message':'photos are being added'}

    image_paths = []
    for image_dir in image_dirs:
        image_paths.extend([os.path.join(dp, f) for dp, dn, fn in os.walk(image_dir) for f in fn])

    image_paths = [p for p in image_paths if p.lower().endswith('.jpg') and 'thumb' not in p.lower()]
    image_paths.sort()

    set_photo_scan_flag_on(1)

    existing_hashes = [p.image_hash for p in Photo.objects.all()]

    image_paths_to_add = []
    for image_path in tqdm(image_paths):
        # hash_md5 = hashlib.md5()
        # with open(image_path, "rb") as f:
        #     for chunk in iter(lambda: f.read(4096), b""):
        #         hash_md5.update(chunk)
        # image_hash = hash_md5.hexdigest()
        # if image_hash not in existing_hashes:
        #     image_paths_to_add.append(image_path)


        if not Photo.objects.filter(image_path=image_path).exists():
            # ipdb.set_trace()    
            image_paths_to_add.append(image_path)

    set_photo_scan_flag_on(len(image_paths_to_add))


    added_photo_count = 0
    already_existing_photo = 0
    counter = 0
    for image_path in tqdm(image_paths_to_add):
        set_num_photos_added(counter)
        counter += 1
        if image_path.lower().endswith('.jpg'):
            try:
                img_abs_path = image_path

                start = datetime.datetime.now()
                hash_md5 = hashlib.md5()
                with open(img_abs_path, "rb") as f:
                    for chunk in iter(lambda: f.read(4096), b""):
                        hash_md5.update(chunk)
                image_hash = hash_md5.hexdigest()
                elapsed = (datetime.datetime.now() - start).total_seconds()
                util.logger.info('generating md5 took %.2f'%elapsed)

                # qs = Photo.objects.filter(image_hash=image_hash)

                photo_exists = Photo.objects.filter(image_hash=image_hash).exists()

                if not photo_exists:
                    photo = Photo(image_path=img_abs_path)
                    photo.added_on = datetime.datetime.now().replace(tzinfo=pytz.utc)
                    photo.geolocation_json = {}
                    photo.save()
                    photo._generate_md5()
                    
                    start = datetime.datetime.now()
                    photo._generate_thumbnail()
                    elapsed = (datetime.datetime.now() - start).total_seconds()
                    util.logger.info('thumbnail get took %.2f'%elapsed)

                    start = datetime.datetime.now()
                    photo._generate_captions()
                    elapsed = (datetime.datetime.now() - start).total_seconds()
                    util.logger.info('caption generation took %.2f'%elapsed)

                    start = datetime.datetime.now()
                    photo._save_image_to_db()
                    elapsed = (datetime.datetime.now() - start).total_seconds()
                    util.logger.info('image save took %.2f'%elapsed)

                    start = datetime.datetime.now()
                    photo._extract_exif()
                    photo.save()
                    elapsed = (datetime.datetime.now() - start).total_seconds()
                    util.logger.info('exif extraction took %.2f'%elapsed)

                    start = datetime.datetime.now()
                    photo._geolocate_mapbox()
                    photo.save()
                    elapsed = (datetime.datetime.now() - start).total_seconds()
                    util.logger.info('geolocation took %.2f'%elapsed)

                    start = datetime.datetime.now()
                    photo._add_to_album_place()
                    photo.save()
                    elapsed = (datetime.datetime.now() - start).total_seconds()
                    util.logger.info('add to AlbumPlace took %.2f'%elapsed)

                    start = datetime.datetime.now()
                    photo._extract_faces()
                    elapsed = (datetime.datetime.now() - start).total_seconds()
                    util.logger.info('face extraction took %.2f'%elapsed)

                    start = datetime.datetime.now()
                    photo._add_to_album_date()
                    elapsed = (datetime.datetime.now() - start).total_seconds()
                    util.logger.info('adding to AlbumDate took %.2f'%elapsed)

                    start = datetime.datetime.now()
                    photo._add_to_album_thing()
                    elapsed = (datetime.datetime.now() - start).total_seconds()
                    util.logger.info('adding to AlbumThing took %.2f'%elapsed)

                    added_photo_count += 1
                    util.logger.info("Image processed: {}".format(img_abs_path))
                else:
                    already_existing_photo += 1
                    util.logger.info("photo already exists in db")
                    print("photo already exists in db %s"%img_abs_path)
            except Exception as e:
                try: 
                    util.logger.error("Could not load image {}. reason: {}".format(image_path,e.__repr__()))
                except:
                    util.logger.error("Could not load image {}".format(image_path))

    util.logger.info("Added {}/{} photos".format(added_photo_count, len(image_paths) - already_existing_photo))
    
    set_photo_scan_flag_off()

    lrj = LongRunningJob.objects.get(job_id=rq.get_current_job().id)
    lrj.finished = True
    lrj.finished_at = datetime.datetime.now()
    lrj.result = {"new_photo_count": added_photo_count}
    lrj.save()
    return {"new_photo_count": added_photo_count, "status": True}
Esempio n. 4
0
 def get(self, requests, format=None):
     res = is_photos_being_added()
     return Response(res)
def scan_photos():
    if is_photos_being_added()['status']:
        return {"new_photo_count": 0, "status": False, 'message':'photos are being added'}

    image_paths = []
    for image_dir in image_dirs:
        image_paths.extend([os.path.join(dp, f) for dp, dn, fn in os.walk(image_dir) for f in fn])

    image_paths = [p for p in image_paths if p.lower().endswith('.jpg')]
    image_paths.sort()

    set_photo_scan_flag_on(1)

    image_paths_to_add = []
    for image_path in tqdm(image_paths):
        if not Photo.objects.filter(image_path=image_path).exists():
            image_paths_to_add.append(image_path)

    set_photo_scan_flag_on(len(image_paths_to_add))


    added_photo_count = 0
    already_existing_photo = 0
    counter = 0
    for image_path in tqdm(image_paths_to_add):
        set_num_photos_added(counter)
        counter += 1
        if image_path.lower().endswith('.jpg'):
            try:
                img_abs_path = image_path

                # start = datetime.datetime.now()
                # hash_md5 = hashlib.md5()
                # with open(img_abs_path, "rb") as f:
                #     for chunk in iter(lambda: f.read(4096), b""):
                #         hash_md5.update(chunk)
                # image_hash = hash_md5.hexdigest()
                # elapsed = (datetime.datetime.now() - start).total_seconds()
                # util.logger.info('generating md5 took %.2f'%elapsed)

                # qs = Photo.objects.filter(image_hash=image_hash)

                photo_exists = Photo.objects.filter(image_path=img_abs_path).exists()

                if not photo_exists:
                    photo = Photo(image_path=img_abs_path)
                    photo.added_on = datetime.datetime.now().replace(tzinfo=pytz.utc)
                    photo.geolocation_json = {}
                    photo.save()
                    photo._generate_md5()
                    
                    start = datetime.datetime.now()
                    photo._generate_thumbnail()
                    elapsed = (datetime.datetime.now() - start).total_seconds()
                    util.logger.info('thumbnail get took %.2f'%elapsed)

                    start = datetime.datetime.now()
                    photo._generate_captions()
                    elapsed = (datetime.datetime.now() - start).total_seconds()
                    util.logger.info('caption generation took %.2f'%elapsed)

                    start = datetime.datetime.now()
                    photo._save_image_to_db()
                    elapsed = (datetime.datetime.now() - start).total_seconds()
                    util.logger.info('image save took %.2f'%elapsed)

                    start = datetime.datetime.now()
                    photo._extract_exif()
                    photo.save()
                    elapsed = (datetime.datetime.now() - start).total_seconds()
                    util.logger.info('exif extraction took %.2f'%elapsed)

                    start = datetime.datetime.now()
                    photo._geolocate_mapbox()
                    photo.save()
                    elapsed = (datetime.datetime.now() - start).total_seconds()
                    util.logger.info('geolocation took %.2f'%elapsed)

                    start = datetime.datetime.now()
                    photo._add_to_album_place()
                    photo.save()
                    elapsed = (datetime.datetime.now() - start).total_seconds()
                    util.logger.info('add to AlbumPlace took %.2f'%elapsed)

                    start = datetime.datetime.now()
                    photo._extract_faces()
                    elapsed = (datetime.datetime.now() - start).total_seconds()
                    util.logger.info('face extraction took %.2f'%elapsed)

                    start = datetime.datetime.now()
                    photo._add_to_album_date()
                    elapsed = (datetime.datetime.now() - start).total_seconds()
                    util.logger.info('adding to AlbumDate took %.2f'%elapsed)

                    start = datetime.datetime.now()
                    photo._add_to_album_thing()
                    elapsed = (datetime.datetime.now() - start).total_seconds()
                    util.logger.info('adding to AlbumThing took %.2f'%elapsed)

                    added_photo_count += 1
                    util.logger.info("Image processed: {}".format(img_abs_path))
                else:
                    already_existing_photo += 1
                    util.logger.info("photo already exists in db")
            except Exception as e:
                util.logger.error("Could not load image {}".format(image_path))

    util.logger.info("Added {}/{} photos".format(added_photo_count, len(image_paths) - already_existing_photo))
    set_photo_scan_flag_off()
    return {"new_photo_count": added_photo_count, "status": True}
Esempio n. 6
0
def generate_event_albums():
    lrj = LongRunningJob(
        job_id=rq.get_current_job().id,
        started_at=datetime.now(),
        job_type=LongRunningJob.JOB_GENERATE_AUTO_ALBUMS)
    lrj.save()

    if is_auto_albums_being_processed()['status']:
        status = False
        message = "There are even albums being created at the moment. Please try again later."
        return {'status':status, 'message':message}


    set_auto_album_processing_flag_on()
    photo_count = Photo.objects.count()
    if photo_count == 0:
        status = False
        message = "Please add some more photos!"
        set_auto_album_processing_flag_off()
        return {'status':status, 'message':message}
    else:
        if is_photos_being_added()['status']:
            status = False
            message = "There are photos being added to the library. Please try again later."
            set_auto_album_processing_flag_off()
            return {'status':status, 'message':message}






    try:
        photos = Photo.objects.all()

        photos_with_timestamp = [(photo.exif_timestamp,photo) for photo in photos if photo.exif_timestamp]
        timestamps = [photo.exif_timestamp for photo in photos if photo.exif_timestamp]

        def group(photos_with_timestamp,dt=timedelta(hours=6)):
            photos_with_timestamp = sorted(photos_with_timestamp, key=lambda x: x[0])
            groups = []
            for photo in photos_with_timestamp:
                if len(groups) == 0:
                    groups.append([])
                    groups[-1].append(photo[1])
                else:
                    if photo[0]-groups[-1][-1].exif_timestamp < dt:
                        groups[-1].append(photo[1])
                    else:
                        groups.append([])
                        groups[-1].append(photo[1])
            return groups


        groups = group(photos_with_timestamp,dt=timedelta(days=1,hours=12))

        album_locations = []

        for group in groups:
            key = group[0].exif_timestamp
            print(key)
            items = group
            if len(group) >= 2:
                qs = AlbumAuto.objects.filter(timestamp=key)
                if qs.count() == 0:
                    album = AlbumAuto(created_on=datetime.utcnow())
                    album.timestamp = key
                    album.save()

                    locs = []
                    for item in items:
                        album.photos.add(item)
                        item.save()
                        if item.exif_gps_lat and item.exif_gps_lon:
                            locs.append([item.exif_gps_lat,item.exif_gps_lon])
                        print('-', item.image_hash, item.exif_gps_lat, item.exif_gps_lon)
                    if len(locs) > 0:
                        album_location = np.mean(np.array(locs),0)
                        album_locations.append(album_location)
                        album.gps_lat = album_location[0]
                        album.gps_lon = album_location[1]
                    else:
                        album_locations.append([])
                    album._autotitle()
                    album.save()
        status = True
        message = 'success'
        res = {'status':status, 'message':message}

        lrj = LongRunningJob.objects.get(job_id=rq.get_current_job().id)
        lrj.finished = True
        lrj.finished_at = datetime.now()
        lrj.result = res
        lrj.save()


    except:
        status = False
        res = {'status':status, 'message':'failed'}

        lrj = LongRunningJob.objects.get(job_id=rq.get_current_job().id)
        lrj.failed = True
        lrj.finished = True
        lrj.finished_at = datetime.now()
        lrj.save()    

    set_auto_album_processing_flag_off()
    return 1