def update_file(finfo, sample_info, config): """Update the file to an Amazon S3 bucket, using server side encryption. """ ffinal = filesystem.update_file(finfo, sample_info, config, pass_uptodate=True) if os.path.isdir(ffinal): to_transfer = [] for path, dirs, files in os.walk(ffinal): for f in files: full_f = os.path.join(path, f) k = full_f.replace(os.path.abspath(config["dir"]) + "/", "") to_transfer.append((full_f, k)) else: k = ffinal.replace(os.path.abspath(config["dir"]) + "/", "") to_transfer = [(ffinal, k)] region = "@%s" % config["region"] if config.get("region") else "" fname = "s3://%s%s/%s" % (config["bucket"], region, to_transfer[0][1]) conn = objectstore.connect(fname) bucket = conn.lookup(config["bucket"]) if not bucket: bucket = conn.create_bucket(config["bucket"], location=config.get("region", "us-east-1")) for fname, orig_keyname in to_transfer: keyname = os.path.join(config.get("folder", ""), orig_keyname) key = bucket.get_key(keyname) if bucket else None modified = datetime.datetime.fromtimestamp(email.utils.mktime_tz( email.utils.parsedate_tz(key.last_modified))) if key else None no_upload = key and modified >= finfo["mtime"] if not no_upload: _upload_file_aws_cli(fname, config["bucket"], keyname, config, finfo)
def update_file(finfo, sample_info, config): """Update the file to an Amazon S3 bucket, using server side encryption. """ conn = boto.connect_s3() ffinal = filesystem.update_file(finfo, sample_info, config, pass_uptodate=True) if os.path.isdir(ffinal): to_transfer = [] for path, dirs, files in os.walk(ffinal): for f in files: full_f = os.path.join(path, f) k = full_f.replace(os.path.abspath(config["dir"]) + "/", "") to_transfer.append((full_f, k)) else: k = ffinal.replace(os.path.abspath(config["dir"]) + "/", "") to_transfer = [(ffinal, k)] bucket = conn.lookup(config["bucket"]) if not bucket: bucket = conn.create_bucket(config["bucket"]) for fname, orig_keyname in to_transfer: keyname = os.path.join(config.get("folder", ""), orig_keyname) key = bucket.get_key(keyname) if bucket else None modified = datetime.datetime.fromtimestamp(email.utils.mktime_tz( email.utils.parsedate_tz(key.last_modified))) if key else None no_upload = key and modified >= finfo["mtime"] if not no_upload: upload_file(fname, config["bucket"], keyname, finfo)
def update_file(finfo, sample_info, config): """ Update the file to an iRODS repository. """ ffinal = filesystem.update_file(finfo, sample_info, config, pass_uptodate=True) _upload_dir_icommands_cli(config.get("dir"), config.get("folder"), config)
def update_file(finfo, sample_info, config): """Update the file to an Amazon S3 bucket, using server side encryption. """ conn = boto.connect_s3() ffinal = filesystem.update_file(finfo, sample_info, config, pass_uptodate=True) if os.path.isdir(ffinal): to_transfer = [] for path, dirs, files in os.walk(ffinal): for f in files: full_f = os.path.join(path, f) k = full_f.replace(os.path.abspath(config["dir"]) + "/", "") to_transfer.append((full_f, k)) else: k = ffinal.replace(os.path.abspath(config["dir"]) + "/", "") to_transfer = [(ffinal, k)] bucket = conn.lookup(config["bucket"]) if not bucket: bucket = conn.create_bucket(config["bucket"]) for fname, orig_keyname in to_transfer: keyname = os.path.join(config.get("folder", ""), orig_keyname) key = bucket.get_key(keyname) if bucket else None modified = datetime.datetime.fromtimestamp( email.utils.mktime_tz(email.utils.parsedate_tz( key.last_modified))) if key else None no_upload = key and modified >= finfo["mtime"] if not no_upload: upload_file(fname, config["bucket"], keyname, finfo)
def update_file(finfo, sample_info, config): """Update the file to an Amazon S3 bucket, using server side encryption. """ conn = boto.connect_s3() ffinal = filesystem.update_file(finfo, sample_info, config, pass_uptodate=True) if os.path.isdir(ffinal): to_transfer = [] for path, dirs, files in os.walk(ffinal): for f in files: full_f = os.path.join(path, f) k = full_f.replace(os.path.abspath(config["dir"]) + "/", "") to_transfer.append((full_f, k)) else: k = ffinal.replace(os.path.abspath(config["dir"]) + "/", "") to_transfer = [(ffinal, k)] bucket = conn.lookup(config["bucket"]) if not bucket: bucket = conn.create_bucket(config["bucket"]) for fname, orig_keyname in to_transfer: keyname = os.path.join(config.get("folder", ""), orig_keyname) key = bucket.get_key(keyname) if bucket else None modified = datetime.datetime.fromtimestamp(email.utils.mktime_tz( email.utils.parsedate_tz(key.last_modified))) if key else None no_upload = key and modified >= finfo["mtime"] if not no_upload: metadata = ["-m", "x-amz-server-side-encryption:AES256"] for name, val in finfo.iteritems(): val = _update_val(name, val) if val: metadata += ["-m", "x-amz-meta-%s:%s" % (name, val)] cmd = ["gof3r", "put", "--no-md5", "-b", config["bucket"], "-k", keyname, "-p", fname] + metadata do.run(cmd, "Upload to s3: %s %s" % (config["bucket"], keyname))
def update_file(finfo, sample_info, config): """Update the file to an Amazon S3 bucket, using server side encryption. """ ffinal = filesystem.update_file(finfo, sample_info, config, pass_uptodate=True) if os.path.isdir(ffinal): to_transfer = [] for path, dirs, files in os.walk(ffinal): for f in files: full_f = os.path.join(path, f) k = full_f.replace(os.path.abspath(config["dir"]) + "/", "") to_transfer.append((full_f, k)) else: k = ffinal.replace(os.path.abspath(config["dir"]) + "/", "") to_transfer = [(ffinal, k)] region = "@%s" % config["region"] if config.get("region") else "" fname = "s3://%s%s/%s" % (config["bucket"], region, to_transfer[0][1]) conn = objectstore.connect(fname) bucket = conn.lookup(config["bucket"]) if not bucket: bucket = conn.create_bucket(config["bucket"], location=config.get("region", "us-east-1")) for fname, orig_keyname in to_transfer: checksum_type = config.get("checksum", None) if checksum_type is not None: file_checksum = getattr(checksum, checksum_type)(fname) finfo['checksum-%s' % checksum_type] = file_checksum keyname = os.path.join(config.get("folder", ""), orig_keyname) key = bucket.get_key(keyname) if bucket else None modified = datetime.datetime.fromtimestamp( email.utils.mktime_tz(email.utils.parsedate_tz( key.last_modified))) if key else None no_upload = key and modified >= finfo["mtime"] if not no_upload: if config.get("region") in objectstore.REGIONS_NEWPERMS["s3"]: _upload_file_aws_cli(fname, config["bucket"], keyname, config, finfo) else: _upload_file(fname, config["bucket"], keyname, config, finfo)