def add(bkt, key, img, form = 'JPEG'): bucket = conn.get_bucket(bkt) newKeyObj = Key(bucket) newKeyObj.key = key newKeyObj.set_metadata('Content-Type', 'image/jpg') buf = s.StringIO() img.save(buf, form) newKeyObj.set_contents_from_string(buf.getvalue()) newKeyObj.set_acl('public-read')
def upload(self, local_filepath, aws_filepath): """ Uploads `local_filepath` to `aws_filepath`. Returns the published URL for the file. """ logging.info('Publishing %s to %s' % (local_filepath, aws_filepath)) key = Key(bucket=self.bucket, name=aws_filepath) key.key = aws_filepath key.set_contents_from_filename(local_filepath) key.set_acl('public-read')
def upload_file(conn, full_path): b = Bucket(conn, BUCKET) k = Key(b) k.key = full_path expires = datetime.utcnow() + timedelta(days=(25 * 365)) expires = expires.strftime("%a, %d %b %Y %H:%M:%S GMT") k.set_metadata("Content-Type", mimetypes.guess_type(full_path)[0]) k.set_metadata("Expires", expires) k.set_metadata("Cache-Control", "max-age={0}, public".format(25 * 365 * 36400)) k.set_contents_from_filename(full_path) k.set_acl('public-read') print "{} -> http://s3.amazonaws.com/yaluandmike/{}".format(full_path, full_path)
def upload(self, local_filepath, aws_filepath): """ Uploads `local_filepath` to `aws_filepath`. Returns the published URL for the file. """ logging.info('Publishing %s to %s' % ( local_filepath, aws_filepath)) key = Key(bucket=self.bucket, name=aws_filepath) key.key = aws_filepath key.set_contents_from_filename(local_filepath) key.set_acl('public-read')
def s3_put_directory(s3_dir, local_dir): logger.info("Putting file://%s to s3://%s/%s" % ( os.path.abspath(local_dir), aws_bucket.name, s3_dir) + " (DRY RUN)" if settings.S3_DRY_RUN else "") if not settings.S3_DRY_RUN: for dirname, dirnames, filenames in os.walk(local_dir): for filename in filenames: if filename == ".DS_Store": continue filepath = os.path.join(dirname, filename) relpath = os.path.relpath(filepath, local_dir) key = os.path.join(s3_dir, relpath) aws_key = aws_bucket.get_key(key) if aws_key: # assume the content of file did not change if md5 hashes are consistent. if aws_key.etag.strip("\"") == calc_file_md5(filepath): continue else: aws_key = Key(aws_bucket, key) aws_key.set_contents_from_filename(filepath) aws_key.set_acl('public-read')
def s3_put_directory(s3_dir, local_dir): logger.info("Putting file://%s to s3://%s/%s" % (os.path.abspath(local_dir), aws_bucket.name, s3_dir) + " (DRY RUN)" if settings.S3_DRY_RUN else "") if not settings.S3_DRY_RUN: for dirname, dirnames, filenames in os.walk(local_dir): for filename in filenames: if filename == ".DS_Store": continue filepath = os.path.join(dirname, filename) relpath = os.path.relpath(filepath, local_dir) key = os.path.join(s3_dir, relpath) aws_key = aws_bucket.get_key(key) if aws_key: # assume the content of file did not change if md5 hashes are consistent. if aws_key.etag.strip("\"") == calc_file_md5(filepath): continue else: aws_key = Key(aws_bucket, key) aws_key.set_contents_from_filename(filepath) aws_key.set_acl('public-read')
def upload_image(file_path, file_name): '''Get a upload url Return key_name, url ''' conn = S3Connection(S3_ACCESS_KEY, S3_SECRET_ACCESS_KEY) bucket = conn.get_bucket(S3_BUCKET_NAME) def percent_cb(complete, total): sys.stdout.write('.') sys.stdout.flush() k = Key(bucket) k.key = file_name if file_path: file_path = file_path + file_name else: file_path = "/home/saksham/startupportal/portal/media/" + (str(file_name)) k.set_contents_from_filename(file_path, cb=percent_cb, num_cb=10) k.set_acl('public-read') win_botohelper_logger.debug("upload_image doc upload with %s name" % str(file_name) ) return file_name
def post(): data = request.form files = request.files # adds a modicum of security... code = data.get('code') if code != os.environ.get('SEKKRIT_CODE'): err = Response(response="{'error':'unauthorized'}", status=401, mimetype="application/json") return err f = files.get('image') b = conn.get_bucket(S3_BUCKET) k = Key(b) path = data.get('filename') k.key = path k.set_contents_from_file(f) k.set_acl("public-read") #update the list of URLs stored in redis get_latest() #return the file name because reasons return json.dumps({"file" : path})
def process_file(aws_conn, filepath): mtime = get_mtime(filepath) name_200 = add_size_name(filepath, '200') name_800 = add_size_name(filepath, '800') mtime_200 = get_mtime(name_200) mtime_800 = get_mtime(name_800) im = None if mtime_200 is None or mtime_200 < mtime: try: im = Image.open(filepath) except: return None generate_200(im, name_200) if mtime_800 is None or mtime_800 < mtime: if im is None: try: im = Image.open(filepath) except: return None generate_800(im, name_800) names = { 'original': filepath, 'thumbnail': name_200, 'display': name_800, } b = Bucket(aws_conn, BUCKET) image_result = {} for image_type, name in names.items(): aws_tag_path = add_size_name(name, 's3t') + '.meta' aws_key_path = name[len(GALLERY_DIR):].strip('/') image_result[image_type] = { 'url': 'http://s3.amazonaws.com/{}/{}'.format( BUCKET, aws_key_path) } if not is_newer(name, aws_tag_path): try: resolution = load_data(aws_tag_path) resolution['width'] except: resolution = get_resolution(name) save_data(aws_tag_path, resolution) image_result[image_type].update(resolution) continue resolution = get_resolution(name) image_result.update(resolution) save_data(aws_tag_path, resolution) s3key = b.get_key(aws_key_path) mtime = get_mtime(name) if s3key and s3key.last_modified: print datetime.datetime(*parsedate(s3key.last_modified)[:6]) print mtime if datetime.datetime(*parsedate(s3key.last_modified)[:6]) > mtime: with open(aws_tag_path, 'a'): os.utime(aws_tag_path, None) continue print 'Sending {} to S3'.format(name) k = Key(b) k.key = aws_key_path expires = datetime.datetime.utcnow() + datetime.timedelta(days=25 * 365) expires = expires.strftime("%a, %d %b %Y %H:%M:%S GMT") k.set_metadata("Content-Type", mimetypes.guess_type(name)[0]) k.set_metadata("Expires", expires) k.set_metadata("Cache-Control", "max-age={0}, public".format(86400 * 365 * 25)) k.set_contents_from_filename(name) k.set_acl('public-read') with open(aws_tag_path, 'a'): os.utime(aws_tag_path, None) photo_age = get_photo_age(filepath) image_result['caption'] = get_caption(filepath) return photo_age, image_result