def save_on_s3(filename, remotename='', content_type='', bucket='', folder=''): """ Save contents from file named `filename` to `remotename` on S3. """ b = bucket or get_s3_bucket() folder = get_s3_folder(folder) with open(path_for(filename)) as fp: filename = remotename or filename k = b.new_key(folder+filename) headers = { 'Cache-Control': 'max-age=31536000', # 60*60*24*365 'Content-Type': get_file_type(fp), } k.set_contents_from_file(fp, policy='public-read', headers=headers) return filename
def save(fp, profile, title=None): """ Attaches the image to the profile and uploads it to S3. """ id_ = newid() title = title or secure_filename(fp.filename) content_type = get_file_type(fp) name, extn = os.path.splitext(fp.filename) extn = guess_extension(content_type, extn) img_name = "%s%s" % (id_, extn) local_path = path_for(img_name) with open(local_path, 'w') as img: img.write(fp.read()) stored_file = save_img_in_db(name=id_, title=title, local_path=local_path, profile=profile, mimetype=content_type, orig_extn=extn) job = queueit('save_on_s3', img_name, content_type=content_type, taskid=img_name) return title, job, stored_file
def resize_and_save(img, size, is_thumbnail=False): """ Get the original image from local disk cache, download it from S3 if it misses. Resize the image and save resized image on S3 and size details in db. """ src_path = download_frm_s3(img.name + img.extn) if 'thumb_extn' in ALLOWED_MIMETYPES[img.mimetype]: format = ALLOWED_MIMETYPES[img.mimetype]['thumb_extn'] else: format = img.extn format = format.lstrip('.') resized_filename = get_resized_filename(img, size) if not resize_img(src_path, path_for(resized_filename), size, img.mimetype, format, is_thumbnail=is_thumbnail): img.no_previews = True db.session.add(img) db.session.commit() return False save_on_s3(resized_filename, content_type=img.mimetype) return save_tn_in_db(img, resized_filename, size)