def delete(stored_file): """ Delete all the thumbnails and images associated with a file, from local cache and S3. Wait for the upload/resize to complete if queued for the same image. """ if not app.config.get('CELERY_ALWAYS_EAGER'): wait_for_asynctasks(stored_file) # remove locally cache_path = app.config.get('UPLOADED_FILES_DEST') cached_img_path = os.path.join(cache_path, '%s*' % stored_file.name) for f in glob(cached_img_path): os.remove(f) # remove on s3 extn = stored_file.extn # lazy loads don't work - so, no `stored_file.thumbnails` thumbnails = Thumbnail.query.filter_by(stored_file=stored_file).all() keys = [(get_s3_folder() + thumbnail.name + extn) for thumbnail in thumbnails] keys.append(get_s3_folder() + stored_file.name + extn) bucket = get_s3_bucket() bucket.delete_keys(keys) # remove from the db # remove thumbnails explicitly. # cascade rules don't work as lazy loads don't work in async mode Thumbnail.query.filter_by(stored_file=stored_file).delete() db.session.delete(stored_file) db.session.commit()
def save_on_s3(filename, remotename='', content_type='', bucket='', folder=''): """ Save contents from file named `filename` to `remotename` on S3. """ b = bucket or get_s3_bucket() folder = get_s3_folder(folder) with open(path_for(filename)) as fp: filename = remotename or filename k = b.new_key(folder+filename) headers = { 'Cache-Control': 'max-age=31536000', # 60*60*24*365 'Content-Type': get_file_type(fp), } k.set_contents_from_file(fp, policy='public-read', headers=headers) return filename