def progstr(): if size_is_known: if size == 0: pct = 100.0 # Seems logical. else: pct = 100.0 * bytes_read / size print("\r\t%s (file progress %s/%s [%.0f%%])" % (pfx, IECUnitConverter.bytes_to_unit(bytes_read), sizestr, pct), end='') else: print("\r%s (download %s/unknown)" % (pfx, IECUnitConverter.bytes_to_unit(bytes_read)), end='')
def progstr(): if size_is_known: if size == 0: pct = 100.0 # Seems logical. else: pct = 100.0 * bytes_read / size print ("\r\t%s (file progress %s/%s [%.0f%%])" % (pfx, IECUnitConverter.bytes_to_unit( bytes_read), sizestr, pct), end='') else: print("\r%s (download %s/unknown)" % (pfx, IECUnitConverter.bytes_to_unit(bytes_read)), end='')
def fetch_contents(fpath, opts, root='', no_trim=False, for_filehash=None, short_name=None, file_count_number=None, file_count_total=None, remote_flag=True, include_in_total=True): ''' Wrap a fetch, which may be from a file or URL depending on the options. Returns None on a 404, re-raises the Exception otherwise. ''' fullpath = fpath if not no_trim and opts.trim_path and root: fullpath = os.path.join(opts.source_url, fpath) log.debug("fetch_contents: %s", fullpath) fh = None if for_filehash is not None: fh = for_filehash # Try to get a friendly name. fname = fpath if short_name: fname = short_name elif fh is not None: fname = fh.fpath filecountstr = '' if file_count_number is not None: if file_count_total is not None: if file_count_total == 0: pct = 100.0 else: pct = 100.0 * file_count_number / file_count_total filecountstr = ' [object %d/%d (%.0f%%)]' % (file_count_number, file_count_total, pct) else: filecountstr = ' [file %d]' % file_count_number # This part of the progress meter doesn't change, so cache it. pfx = "%s" % (filecountstr) if not opts.quiet: if opts.progress: progress_spacer = "\n\t" pfx = "\r" + pfx else: progress_spacer = '' print('F: %s%s%s' % (fname, progress_spacer, pfx), end='') outfile = '' try: if remote_flag and include_in_total: opts.stats.content_fetches += 1 else: opts.stats.metadata_fetches += 1 url = urllib2.urlopen(fullpath) except urllib2.HTTPError as e: if e.code == 404: resp = BaseHTTPRequestHandler.responses log.warn("Failed to retrieve '%s': %s", fullpath, resp[404][0]) return None except urllib2.URLError as e: log.warn("Failed to retrieve '%s': %s", fullpath, e) return None size = 0 size_is_known = False block_size = int(opts.fetch_blocksize) if fh is not None: size = fh.size size_is_known = True elif 'content-length' in url.info()['content-length']: size = int(url.info()['content-length']) size_is_known = True if opts.progress and size_is_known: sizestr = IECUnitConverter.bytes_to_unit(size) bytes_read = 0 more_to_read = True def progstr(): if size_is_known: if size == 0: pct = 100.0 # Seems logical. else: pct = 100.0 * bytes_read / size print("\r\t%s (file progress %s/%s [%.0f%%])" % (pfx, IECUnitConverter.bytes_to_unit(bytes_read), sizestr, pct), end='') else: print("\r%s (download %s/unknown)" % (pfx, IECUnitConverter.bytes_to_unit(bytes_read)), end='') if opts.progress: progstr() while more_to_read: # if log.isEnabledFor(logging.DEBUG): # log.debug("Read: %d bytes (%d/%d)", # block_size,bytes_read, size) try: new_bytes = url.read(block_size) nblen = len(new_bytes) if not new_bytes: more_to_read = False else: bytes_read += nblen if remote_flag: if include_in_total: opts.stats.bytes_transferred += nblen else: opts.stats.metadata_bytes_transferred += nblen outfile += new_bytes if opts.progress: progstr() except urllib2.URLError as e: log.warn("'%s' fetch failed: %s", str(e)) raise e if not opts.progress: # In progress mode, give the caller a chance to add information. print('') if size_is_known and bytes_read != size: # That's an error. No need for a cryptochecksum to tell that. log.warn("'%s': Fetched %d bytes, expected %d bytes", fname, bytes_read, size) return None return outfile
def fetch_contents(fpath, opts, root='', no_trim=False, for_filehash=None, short_name=None, file_count_number=None, file_count_total=None, remote_flag=True, include_in_total=True): ''' Wrap a fetch, which may be from a file or URL depending on the options. Returns None on a 404, re-raises the Exception otherwise. ''' fullpath = fpath if not no_trim and opts.trim_path and root: fullpath = os.path.join(opts.source_url, fpath) log.debug("fetch_contents: %s", fullpath) fh = None if for_filehash is not None: fh = for_filehash # Try to get a friendly name. fname = fpath if short_name: fname = short_name elif fh is not None: fname = fh.fpath filecountstr = '' if file_count_number is not None: if file_count_total is not None: if file_count_total == 0: pct = 100.0 else: pct = 100.0 * file_count_number / file_count_total filecountstr = ' [object %d/%d (%.0f%%)]' % ( file_count_number, file_count_total, pct) else: filecountstr = ' [file %d]' % file_count_number # This part of the progress meter doesn't change, so cache it. pfx = "%s" % (filecountstr) if not opts.quiet: if opts.progress: progress_spacer = "\n\t" pfx = "\r" + pfx else: progress_spacer = '' print('F: %s%s%s' % (fname, progress_spacer, pfx), end='') outfile = '' try: if remote_flag and include_in_total: opts.stats.content_fetches += 1 else: opts.stats.metadata_fetches += 1 url = urllib2.urlopen(fullpath) except urllib2.HTTPError as e: if e.code == 404: resp = BaseHTTPRequestHandler.responses log.warn("Failed to retrieve '%s': %s", fullpath, resp[404][0]) return None except urllib2.URLError as e: log.warn("Failed to retrieve '%s': %s", fullpath, e) return None size = 0 size_is_known = False block_size = int(opts.fetch_blocksize) if fh is not None: size = fh.size size_is_known = True elif 'content-length' in url.info()['content-length']: size = int(url.info()['content-length']) size_is_known = True if opts.progress and size_is_known: sizestr = IECUnitConverter.bytes_to_unit(size) bytes_read = 0 more_to_read = True def progstr(): if size_is_known: if size == 0: pct = 100.0 # Seems logical. else: pct = 100.0 * bytes_read / size print ("\r\t%s (file progress %s/%s [%.0f%%])" % (pfx, IECUnitConverter.bytes_to_unit( bytes_read), sizestr, pct), end='') else: print("\r%s (download %s/unknown)" % (pfx, IECUnitConverter.bytes_to_unit(bytes_read)), end='') if opts.progress: progstr() while more_to_read: # if log.isEnabledFor(logging.DEBUG): # log.debug("Read: %d bytes (%d/%d)", # block_size,bytes_read, size) try: new_bytes = url.read(block_size) nblen = len(new_bytes) if not new_bytes: more_to_read = False else: bytes_read += nblen if remote_flag: if include_in_total: opts.stats.bytes_transferred += nblen else: opts.stats.metadata_bytes_transferred += nblen outfile += new_bytes if opts.progress: progstr() except urllib2.URLError as e: log.warn("'%s' fetch failed: %s", str(e)) raise e if not opts.progress: # In progress mode, give the caller a chance to add information. print('') if size_is_known and bytes_read != size: # That's an error. No need for a cryptochecksum to tell that. log.warn("'%s': Fetched %d bytes, expected %d bytes", fname, bytes_read, size) return None return outfile