def main(self): sources = list(self.args['sources']) label_template = build_progressbar_label_template(sources) for index, source_filename in enumerate(sources, 1): if self.args.get('literal_dest', False): (bucket, __, keyname) = self.args['dest'].partition('/') if not keyname: raise ArgumentError('destination must contain a key name') else: (bucket, __, prefix) = self.args['dest'].partition('/') keyname = prefix + os.path.basename(source_filename) self.path = bucket + '/' + keyname self.headers['Content-Length'] = os.path.getsize(source_filename) self.headers.pop('Content-Type', None) if self.args.get('acl'): self.headers['x-amz-acl'] = self.args['acl'] if self.args.get('guess_mime_type', False): mtype = mimetypes.guess_type(source_filename) if mtype: self.headers['Content-Type'] = mtype self.log.info('uploading %s to %s', source_filename, self.path) with self._lock: self.last_upload_error = None with open(source_filename) as source: upload_thread = threading.Thread( target=self.try_send, args=(source, ), kwargs={'retries_left': self.args['retries']}) # The upload thread is daemonic so ^C will kill the program # more cleanly. upload_thread.daemon = True upload_thread.start() label = label_template.format( index=index, fname=os.path.basename(source_filename)) pbar = self.get_progressbar( label=label, maxval=os.path.getsize(source_filename)) pbar.start() while upload_thread.is_alive(): pbar.update(source.tell()) time.sleep(0.01) pbar.finish() upload_thread.join() with self._lock: if self.last_upload_error is not None: # pylint: disable=E0702 raise self.last_upload_error
def main(self): sources = list(self.args['sources']) label_template = build_progressbar_label_template(sources) for index, source_filename in enumerate(sources, 1): if self.args.get('literal_dest', False): (bucket, __, keyname) = self.args['dest'].partition('/') if not keyname: raise ArgumentError('destination must contain a key name') else: (bucket, __, prefix) = self.args['dest'].partition('/') keyname = prefix + os.path.basename(source_filename) self.path = bucket + '/' + keyname self.headers['Content-Length'] = os.path.getsize(source_filename) self.headers.pop('Content-Type', None) if self.args.get('acl'): self.headers['x-amz-acl'] = self.args['acl'] if self.args.get('guess_mime_type', False): mtype = mimetypes.guess_type(source_filename) if mtype: self.headers['Content-Type'] = mtype self.log.info('uploading %s to %s', source_filename, self.path) with self._lock: self.last_upload_error = None with open(source_filename) as source: upload_thread = threading.Thread(target=self.try_send, args=(source,), kwargs={'retries_left': self.args['retries']}) # The upload thread is daemonic so ^C will kill the program # more cleanly. upload_thread.daemon = True upload_thread.start() label = label_template.format(index=index, fname=os.path.basename(source_filename)) pbar = self.get_progressbar(label=label, maxval=os.path.getsize(source_filename)) pbar.start() while upload_thread.is_alive(): pbar.update(source.tell()) time.sleep(0.01) pbar.finish() upload_thread.join() with self._lock: if self.last_upload_error is not None: # pylint: disable=E0702 raise self.last_upload_error
def main(self): opath = self.args['opath'] label_template = build_progressbar_label_template(self.args['paths']) if opath.endswith('/') and not os.path.isdir(opath): # Ends with '/' and does not exist -> create it os.mkdir(opath) if os.path.isdir(opath): # Download one per directory for index, path in enumerate(self.args['paths'], 1): ofile_name = os.path.join(opath, path.rsplit('/', 1)[-1]) self.path = path response = self.send() if 'Content-Length' in response.headers: maxval = int(response.headers['Content-Length']) else: maxval = None label = label_template.format(index=index, fname=path) pbar = self.get_progressbar(label=label, maxval=maxval) pbar.start() with open(ofile_name, 'w') as ofile: for chunk in response.iter_content(chunk_size=16384): ofile.write(chunk) pbar.update(ofile.tell()) ofile.flush() pbar.finish() else: # Download everything to one file with open(opath, 'w') as ofile: for index, path in enumerate(self.args['paths'], 1): self.path = path response = self.send() bytes_written = 0 if 'Content-Length' in response.headers: maxval = int(response.headers['Content-Length']) else: maxval = None label = label_template.format(index=index, fname=path) pbar = self.get_progressbar(label=label, maxval=maxval) pbar.start() for chunk in response.iter_content(chunk_size=16384): ofile.write(chunk) bytes_written += len(chunk) pbar.update(bytes_written) pbar.finish() ofile.flush()
def main(self): sha1_dict = {} opath = self.args['opath'] paths = self.args['paths'] show_progress = self.args.get('show_progress') self.log.debug('GOT SHOW PROGRESS: ' + str(show_progress)) label = None if show_progress: label_template = build_progressbar_label_template(paths) if (opath) and (os.path.isdir(opath) or opath.endswith('/')): #Download paths to individual files under provided directory... if not os.path.isdir(opath): # Ends with '/' and does not exist -> create it os.mkdir(opath) # Download one per directory for index, path in enumerate(paths, 1): ofile_name = os.path.join(opath, path.rsplit('/', 1)[-1]) if show_progress: label = label_template.format(index=index, fname=path) with open(ofile_name, 'w') as ofile: sha1sum = (self._download_to_fileobj( path=path, outfile=ofile, show_progress=show_progress, pbar_label=label)) sha1_dict[path] = sha1sum else: # Download everything to one file ofile = self.args.get('fileobj') or open(opath, 'w') try: for index, path in enumerate(paths, 1): if show_progress: label = label_template.format(index=index, fname=path) sha1sum = (self._download_to_fileobj( path=path, outfile=ofile, show_progress=show_progress, pbar_label=label)) sha1_dict[path] = sha1sum finally: #only close the file if it was opened within this method... if opath and ofile: ofile.close() return sha1_dict