def add_photo_by_upload(img,b,user,default,caption,title): try: print("trying to save!\n") bp = Photo(user=user, business=b, image=img, title=title, caption=caption,is_default=default) bp.save(isUpload = True,isTextMod = False) except Exception as e: bp = None print(str(e)) return bp
def setUp(self): photo = Photo(place='spb', img='cube.png', date='2019-12-06T11:44:05.575178Z') photo.save() photo = Photo(place='moscow', img='cup.jpg', date='2019-11-06T10:44:05.575178Z') photo.save()
def add_photo_by_url(phurl, business,user,default,caption,title): outpath =settings.STATIC_ROOT+"/"+str(business.id)+"_"+str(business.city)+"_"+str(business.state) logger.debug('retrieve') try: urlretrieve(phurl, outpath) except Exception as e: logger.debug('exception') logger.debug(e) return None logger.debug('done') p = Photo(user=user, business=business, image=outpath, title=title, caption=caption,is_default=default) p.save(isUpload = False,isTextMod = False) business.profile_photo = p business.save() logger.debug(p) return p
def photo_service(id): try: u = Photo.query().filter(Photo.filename == id).fetch()[0] if u: response = make_response(u.file) response.headers['Content-Disposition'] = 'attachment; filename=%s'%u.filename response.headers['Content-Type'] = "image/jpg" return response except Exception as e: print e return "Blob not found"
def process_item(self, item, spider): """Checks whether the Scrapy item produced by spider already exists in database. If it exists - skips and moves on to process another. If item does not exist - maps Scrapy item onto Django Offer, Photo and OfferPhoto models and saves new instances in the database.""" # psycopg2 and raw SQL approach # self.cur.execute("""INSERT INTO API_OFFER(URL, BRAND, MODEL, TITLE, PRICE, DESCRIPTION) VALUES(%s, %s, %s, %s, # %s, %s);""", (item['url'], item['brand'], item['model'], item['title'], item['price'], item['description'])) # for photo_url in item['photos']: # self.cur.execute(f"""INSERT INTO API_PHOTO(URL) VALUE({photo_url});""") # self.connection.commit() # return item # Django ORM approach # Basic duplicates filter: try: offer = Offer.objects.get(url=item["url"]) print("Offer already exists") return item except Offer.DoesNotExist: pass offer = Offer() offer.url = item["url"] offer.brand = item["brand"] offer.model = item["model"] offer.title = item["title"] offer.price = item["price"] offer.description = item["description"] offer.save() for photo_url in item["photos"]: photo = Photo() photo.url = photo_url photo.save() offerphoto = OfferPhoto.objects.create(offer=offer, photo=photo) return item
def listPhotos(): try: u = Photo.query().fetch() return render_template("listphotos.html", photos=u) except Exception as e: return e
def scan_photos(): image_paths = [] for image_dir in image_dirs: image_paths.extend([ os.path.join(dp, f) for dp, dn, fn in os.walk(image_dir) for f in fn ]) added_photo_count = 0 already_existing_photo = 0 for image_path in tqdm(image_paths): if image_path.lower().endswith('.jpg'): try: img_abs_path = image_path hash_md5 = hashlib.md5() with open(img_abs_path, "rb") as f: for chunk in iter(lambda: f.read(4096), b""): hash_md5.update(chunk) image_hash = hash_md5.hexdigest() qs = Photo.objects.filter(image_hash=image_hash) if qs.count() < 1: photo = Photo(image_path=img_abs_path) photo.added_on = datetime.datetime.now().replace( tzinfo=pytz.utc) photo.geolocation_json = {} photo.save() photo._generate_md5() start = datetime.datetime.now() photo._generate_thumbnail() elapsed = (datetime.datetime.now() - start).total_seconds() util.logger.info('thumbnail get took %.2f' % elapsed) start = datetime.datetime.now() photo._generate_captions() elapsed = (datetime.datetime.now() - start).total_seconds() util.logger.info('caption generation took %.2f' % elapsed) start = datetime.datetime.now() photo._save_image_to_db() elapsed = (datetime.datetime.now() - start).total_seconds() util.logger.info('image save took %.2f' % elapsed) start = datetime.datetime.now() photo._extract_exif() photo.save() elapsed = (datetime.datetime.now() - start).total_seconds() util.logger.info('exif extraction took %.2f' % elapsed) start = datetime.datetime.now() photo._geolocate_mapbox() photo.save() elapsed = (datetime.datetime.now() - start).total_seconds() util.logger.info('geolocation took %.2f' % elapsed) start = datetime.datetime.now() photo._add_to_album_place() photo.save() elapsed = (datetime.datetime.now() - start).total_seconds() util.logger.info('add to AlbumPlace took %.2f' % elapsed) start = datetime.datetime.now() photo._extract_faces() elapsed = (datetime.datetime.now() - start).total_seconds() util.logger.info('face extraction took %.2f' % elapsed) start = datetime.datetime.now() photo._add_to_album_date() elapsed = (datetime.datetime.now() - start).total_seconds() util.logger.info('adding to AlbumDate took %.2f' % elapsed) start = datetime.datetime.now() photo._add_to_album_thing() elapsed = (datetime.datetime.now() - start).total_seconds() util.logger.info('adding to AlbumThing took %.2f' % elapsed) added_photo_count += 1 util.logger.info( "Image processed: {}".format(img_abs_path)) else: already_existing_photo += 1 util.logger.info("photo already exists in db") except Exception as e: util.logger.error("Could not load image {}".format(image_path)) util.logger.info("Added {}/{} photos".format( added_photo_count, len(image_paths) - already_existing_photo)) return {"new_photo_count": added_photo_count, "status": True}
def scan_photos(): image_paths = [] for image_dir in image_dirs: image_paths.extend([ os.path.join(dp, f) for dp, dn, fn in os.walk(image_dir) for f in fn ]) added_photo_count = 0 for image_path in tqdm(image_paths): if image_path.lower().endswith('.jpg'): try: img_abs_path = image_path hash_md5 = hashlib.md5() with open(img_abs_path, "rb") as f: for chunk in iter(lambda: f.read(4096), b""): hash_md5.update(chunk) image_hash = hash_md5.hexdigest() qs = Photo.objects.filter(image_hash=image_hash) if qs.count() < 1: photo = Photo(image_path=img_abs_path) photo.added_on = datetime.datetime.now() photo.save() photo._generate_md5() start = datetime.datetime.now() photo._generate_thumbnail() elapsed = (datetime.datetime.now() - start).total_seconds() print('thumbnail get', elapsed) start = datetime.datetime.now() photo._save_image_to_db() elapsed = (datetime.datetime.now() - start).total_seconds() print('image save', elapsed) start = datetime.datetime.now() photo._extract_exif() photo.save() elapsed = (datetime.datetime.now() - start).total_seconds() print('exif extraction', elapsed) # start = datetime.datetime.now() # photo._geolocate() # photo.save() # elapsed = (datetime.datetime.now() - start).total_seconds() # print('geolocation', elapsed) start = datetime.datetime.now() photo._extract_faces() elapsed = (datetime.datetime.now() - start).total_seconds() print('face extraction', elapsed) start = datetime.datetime.now() photo._add_to_album_date() elapsed = (datetime.datetime.now() - start).total_seconds() added_photo_count += 1 print(img_abs_path) else: print("photo already exists in db") except Exception as e: print("could not load image %s" % image_path) try: print(e.message) except: pass return {"new_photo_count": added_photo_count, "status": True}
]) added_photo_count = 0 for image_path in tqdm(image_paths): if image_path.lower().endswith('.jpg'): try: img_abs_path = image_path hash_md5 = hashlib.md5() with open(img_abs_path, "rb") as f: for chunk in iter(lambda: f.read(4096), b""): hash_md5.update(chunk) image_hash = hash_md5.hexdigest() qs = Photo.objects.filter(image_hash=image_hash) if qs.count() < 1: photo = Photo(image_path=img_abs_path) photo.added_on = datetime.datetime.now() photo.save() photo._generate_md5() start = datetime.datetime.now() photo._generate_thumbnail() elapsed = (datetime.datetime.now() - start).total_seconds() print('thumbnail get', elapsed) start = datetime.datetime.now() photo._save_image_to_db() elapsed = (datetime.datetime.now() - start).total_seconds() print('image save', elapsed) start = datetime.datetime.now()
def scan_photos(): lrj = LongRunningJob( job_id=rq.get_current_job().id, started_at=datetime.datetime.now(), job_type=LongRunningJob.JOB_SCAN_PHOTOS) lrj.save() for _ in tqdm(range(100000)): yy = np.random.randn(1000).dot(np.random.randn(1000)) if is_photos_being_added()['status']: return {"new_photo_count": 0, "status": False, 'message':'photos are being added'} image_paths = [] for image_dir in image_dirs: image_paths.extend([os.path.join(dp, f) for dp, dn, fn in os.walk(image_dir) for f in fn]) image_paths = [p for p in image_paths if p.lower().endswith('.jpg') and 'thumb' not in p.lower()] image_paths.sort() set_photo_scan_flag_on(1) existing_hashes = [p.image_hash for p in Photo.objects.all()] image_paths_to_add = [] for image_path in tqdm(image_paths): # hash_md5 = hashlib.md5() # with open(image_path, "rb") as f: # for chunk in iter(lambda: f.read(4096), b""): # hash_md5.update(chunk) # image_hash = hash_md5.hexdigest() # if image_hash not in existing_hashes: # image_paths_to_add.append(image_path) if not Photo.objects.filter(image_path=image_path).exists(): # ipdb.set_trace() image_paths_to_add.append(image_path) set_photo_scan_flag_on(len(image_paths_to_add)) added_photo_count = 0 already_existing_photo = 0 counter = 0 for image_path in tqdm(image_paths_to_add): set_num_photos_added(counter) counter += 1 if image_path.lower().endswith('.jpg'): try: img_abs_path = image_path start = datetime.datetime.now() hash_md5 = hashlib.md5() with open(img_abs_path, "rb") as f: for chunk in iter(lambda: f.read(4096), b""): hash_md5.update(chunk) image_hash = hash_md5.hexdigest() elapsed = (datetime.datetime.now() - start).total_seconds() util.logger.info('generating md5 took %.2f'%elapsed) # qs = Photo.objects.filter(image_hash=image_hash) photo_exists = Photo.objects.filter(image_hash=image_hash).exists() if not photo_exists: photo = Photo(image_path=img_abs_path) photo.added_on = datetime.datetime.now().replace(tzinfo=pytz.utc) photo.geolocation_json = {} photo.save() photo._generate_md5() start = datetime.datetime.now() photo._generate_thumbnail() elapsed = (datetime.datetime.now() - start).total_seconds() util.logger.info('thumbnail get took %.2f'%elapsed) start = datetime.datetime.now() photo._generate_captions() elapsed = (datetime.datetime.now() - start).total_seconds() util.logger.info('caption generation took %.2f'%elapsed) start = datetime.datetime.now() photo._save_image_to_db() elapsed = (datetime.datetime.now() - start).total_seconds() util.logger.info('image save took %.2f'%elapsed) start = datetime.datetime.now() photo._extract_exif() photo.save() elapsed = (datetime.datetime.now() - start).total_seconds() util.logger.info('exif extraction took %.2f'%elapsed) start = datetime.datetime.now() photo._geolocate_mapbox() photo.save() elapsed = (datetime.datetime.now() - start).total_seconds() util.logger.info('geolocation took %.2f'%elapsed) start = datetime.datetime.now() photo._add_to_album_place() photo.save() elapsed = (datetime.datetime.now() - start).total_seconds() util.logger.info('add to AlbumPlace took %.2f'%elapsed) start = datetime.datetime.now() photo._extract_faces() elapsed = (datetime.datetime.now() - start).total_seconds() util.logger.info('face extraction took %.2f'%elapsed) start = datetime.datetime.now() photo._add_to_album_date() elapsed = (datetime.datetime.now() - start).total_seconds() util.logger.info('adding to AlbumDate took %.2f'%elapsed) start = datetime.datetime.now() photo._add_to_album_thing() elapsed = (datetime.datetime.now() - start).total_seconds() util.logger.info('adding to AlbumThing took %.2f'%elapsed) added_photo_count += 1 util.logger.info("Image processed: {}".format(img_abs_path)) else: already_existing_photo += 1 util.logger.info("photo already exists in db") print("photo already exists in db %s"%img_abs_path) except Exception as e: try: util.logger.error("Could not load image {}. reason: {}".format(image_path,e.__repr__())) except: util.logger.error("Could not load image {}".format(image_path)) util.logger.info("Added {}/{} photos".format(added_photo_count, len(image_paths) - already_existing_photo)) set_photo_scan_flag_off() lrj = LongRunningJob.objects.get(job_id=rq.get_current_job().id) lrj.finished = True lrj.finished_at = datetime.datetime.now() lrj.result = {"new_photo_count": added_photo_count} lrj.save() return {"new_photo_count": added_photo_count, "status": True}
def scan_photos(): if is_photos_being_added()['status']: return {"new_photo_count": 0, "status": False, 'message':'photos are being added'} image_paths = [] for image_dir in image_dirs: image_paths.extend([os.path.join(dp, f) for dp, dn, fn in os.walk(image_dir) for f in fn]) image_paths = [p for p in image_paths if p.lower().endswith('.jpg')] image_paths.sort() set_photo_scan_flag_on(1) image_paths_to_add = [] for image_path in tqdm(image_paths): if not Photo.objects.filter(image_path=image_path).exists(): image_paths_to_add.append(image_path) set_photo_scan_flag_on(len(image_paths_to_add)) added_photo_count = 0 already_existing_photo = 0 counter = 0 for image_path in tqdm(image_paths_to_add): set_num_photos_added(counter) counter += 1 if image_path.lower().endswith('.jpg'): try: img_abs_path = image_path # start = datetime.datetime.now() # hash_md5 = hashlib.md5() # with open(img_abs_path, "rb") as f: # for chunk in iter(lambda: f.read(4096), b""): # hash_md5.update(chunk) # image_hash = hash_md5.hexdigest() # elapsed = (datetime.datetime.now() - start).total_seconds() # util.logger.info('generating md5 took %.2f'%elapsed) # qs = Photo.objects.filter(image_hash=image_hash) photo_exists = Photo.objects.filter(image_path=img_abs_path).exists() if not photo_exists: photo = Photo(image_path=img_abs_path) photo.added_on = datetime.datetime.now().replace(tzinfo=pytz.utc) photo.geolocation_json = {} photo.save() photo._generate_md5() start = datetime.datetime.now() photo._generate_thumbnail() elapsed = (datetime.datetime.now() - start).total_seconds() util.logger.info('thumbnail get took %.2f'%elapsed) start = datetime.datetime.now() photo._generate_captions() elapsed = (datetime.datetime.now() - start).total_seconds() util.logger.info('caption generation took %.2f'%elapsed) start = datetime.datetime.now() photo._save_image_to_db() elapsed = (datetime.datetime.now() - start).total_seconds() util.logger.info('image save took %.2f'%elapsed) start = datetime.datetime.now() photo._extract_exif() photo.save() elapsed = (datetime.datetime.now() - start).total_seconds() util.logger.info('exif extraction took %.2f'%elapsed) start = datetime.datetime.now() photo._geolocate_mapbox() photo.save() elapsed = (datetime.datetime.now() - start).total_seconds() util.logger.info('geolocation took %.2f'%elapsed) start = datetime.datetime.now() photo._add_to_album_place() photo.save() elapsed = (datetime.datetime.now() - start).total_seconds() util.logger.info('add to AlbumPlace took %.2f'%elapsed) start = datetime.datetime.now() photo._extract_faces() elapsed = (datetime.datetime.now() - start).total_seconds() util.logger.info('face extraction took %.2f'%elapsed) start = datetime.datetime.now() photo._add_to_album_date() elapsed = (datetime.datetime.now() - start).total_seconds() util.logger.info('adding to AlbumDate took %.2f'%elapsed) start = datetime.datetime.now() photo._add_to_album_thing() elapsed = (datetime.datetime.now() - start).total_seconds() util.logger.info('adding to AlbumThing took %.2f'%elapsed) added_photo_count += 1 util.logger.info("Image processed: {}".format(img_abs_path)) else: already_existing_photo += 1 util.logger.info("photo already exists in db") except Exception as e: util.logger.error("Could not load image {}".format(image_path)) util.logger.info("Added {}/{} photos".format(added_photo_count, len(image_paths) - already_existing_photo)) set_photo_scan_flag_off() return {"new_photo_count": added_photo_count, "status": True}