def upload_directory(context, dir_path, upload_prefix, check_items=[]): """ Upload all the files in the directory 'dir_path' into the s3 bucket. The variable 'upload_prefix' is the path relative to the s3 bucket. The list item 'check_items' is a list of existing s3 items at this path. If an item to be uploaded is found in check_items, it is skipped. """ items_by_name = dict(zip(map(lambda x: os.path.basename(x.name), check_items), check_items)) # Upload RPM's: for filename in os.listdir(dir_path): filepath = os.path.join(dir_path, filename) remote_item = items_by_name.get(filename, None) # Skip any non-file arguments: if not os.path.isfile(filepath): continue # Skip anything that doesn't need to be uploaded: if not should_upload(filepath, remote_item, context.opts.force_upload): verbose('File "%s" already exists in S3 location "%s" skipping upload', filename, upload_prefix) continue # Perform the upload: dest_path = s3join(upload_prefix, filename) item_key = boto.s3.key.Key(context.s3_bucket) item_key.key = dest_path if not context.opts.dry_run: item_key.set_contents_from_filename( filepath, cb=get_progress_fn(context.opts.verbose, "Uploading: %s" % dest_path) ) else: verbose("Uploading: %s" % dest_path) return
def download_items(context, items, dest_dir, force_download=False): """ Download the s3 items given by 'items' into the destination directory given by 'dest_dir'. If force_download is true, download *everything* in the list. Otherwise, skip downloads for items which are already present in the working directory. """ try: no_items = 0 for item in items: # Skip folder keys: if item.name.find(FOLDER_SUFFIX) != -1: verbose("Not downloading: %s", item.name) continue filename = os.path.basename(item.name) filepath = os.path.join(dest_dir, filename) if should_download(item, filepath, force_download): f = open(filepath, 'w') item.get_file(f, cb=get_progress_fn(context.opts.verbose, "Downloading %s" % item.name)) f.close() # Verify the checksum of the downloaded item: if not md5_matches(filepath, get_s3item_md5(item)): raise ServiceError( "\nDownload failed: md5 mismatch for %s" % (filename)) else: verbose('File "%s" already exists in "%s" skipping download', filename, dest_dir) no_items += 1 return no_items except IOError as ex: err_msg = "Error opening %s: %s (%i)" % (ex.filename, ex.strerror, ex.errno) raise ServiceError(err_msg) return
def download_items(context, items, dest_dir, force_download=False): """ Download the s3 items given by 'items' into the destination directory given by 'dest_dir'. If force_download is true, download *everything* in the list. Otherwise, skip downloads for items which are already present in the working directory. """ try: no_items = 0 for item in items: # Skip folder keys: if item.name.find(FOLDER_SUFFIX) != -1: verbose("Not downloading: %s", item.name) continue filename = os.path.basename(item.name) filepath = os.path.join(dest_dir, filename) if should_download(item, filepath, force_download): f = open(filepath, 'w') item.get_file(f, cb=get_progress_fn( context.opts.verbose, "Downloading %s" % item.name)) f.close() # Verify the checksum of the downloaded item: if not md5_matches(filepath, get_s3item_md5(item)): raise ServiceError( "\nDownload failed: md5 mismatch for %s" % (filename)) else: verbose('File "%s" already exists in "%s" skipping download', filename, dest_dir) no_items += 1 return no_items except IOError as ex: err_msg = "Error opening %s: %s (%i)" % ( ex.filename, ex.strerror, ex.errno) raise ServiceError(err_msg) return
def upload_directory(context, dir_path, upload_prefix, check_items=[]): """ Upload all the files in the directory 'dir_path' into the s3 bucket. The variable 'upload_prefix' is the path relative to the s3 bucket. The list item 'check_items' is a list of existing s3 items at this path. If an item to be uploaded is found in check_items, it is skipped. """ items_by_name = dict( zip(map(lambda x: os.path.basename(x.name), check_items), check_items)) # Upload RPM's: for filename in os.listdir(dir_path): filepath = os.path.join(dir_path, filename) remote_item = items_by_name.get(filename, None) # Skip any non-file arguments: if not os.path.isfile(filepath): continue # Skip anything that doesn't need to be uploaded: if not should_upload(filepath, remote_item, context.opts.force_upload): verbose( 'File "%s" already exists in S3 location "%s" skipping upload', filename, upload_prefix) continue # Perform the upload: dest_path = s3join(upload_prefix, filename) item_key = boto.s3.key.Key(context.s3_bucket) item_key.key = dest_path if not context.opts.dry_run: item_key.set_contents_from_filename( filepath, cb=get_progress_fn(context.opts.verbose, "Uploading: %s" % dest_path)) else: verbose("Uploading: %s" % dest_path) return