def post_threadpool_actions(pool, args, expected_filesize, SmartDL_obj): "Run function after thread pool is done. Run this in a thread." while not pool.done(): time.sleep(0.1) if pool.get_exceptions(): SmartDL_obj.logger.warning(unicode(pool.get_exceptions()[0])) SmartDL_obj.retry(unicode(pool.get_exceptions()[0])) if SmartDL_obj._killed: return if SmartDL_obj._failed: SmartDL_obj.logger.warning("Task has errors. Exiting...") return if expected_filesize: # if not zero, etc expected filesize is not known threads = len(args[0]) total_filesize = sum([os.path.getsize(x) for x in args[0]]) diff = math.fabs(expected_filesize - total_filesize) # if the difference is more than 4*thread numbers (because a thread may download 4KB more per thread because of NTFS's block size) if diff > 4 * threads: SmartDL_obj.logger.warning( 'Diff between downloaded files and expected filesizes is %dKB. Retrying...' % diff) SmartDL_obj.retry( 'Diff between downloaded files and expected filesizes is %dKB.' % diff) return SmartDL_obj.status = "combining" utils.combine_files(*args) if SmartDL_obj.verify_hash: dest_path = args[-1] with open(dest_path, 'rb') as f: hash = hashlib.new(SmartDL_obj.hash_algorithm, f.read()).hexdigest() if hash == SmartDL_obj.hash_code: SmartDL_obj.logger.debug('Hash verification succeeded.') else: SmartDL_obj.logger.debug('Hash verification failed.') SmartDL_obj.try_next_mirror( HashFailedException(os.path.basename(dest_path), hash, SmartDL_obj.hash_code))
def post_threadpool_actions(pool, args, expected_filesize, SmartDL_obj): "Run function after thread pool is done. Run this in a thread." while not pool.done(): time.sleep(0.1) if pool.get_exceptions(): SmartDL_obj.logger.warning(unicode(pool.get_exceptions()[0])) SmartDL_obj.retry(unicode(pool.get_exceptions()[0])) if SmartDL_obj._killed: return if SmartDL_obj._failed: SmartDL_obj.logger.warning("Task has errors. Exiting...") return if expected_filesize: # if not zero, etc expected filesize is not known threads = len(args[0]) total_filesize = sum([os.path.getsize(x) for x in args[0]]) diff = math.fabs(expected_filesize - total_filesize) # if the difference is more than 4*thread numbers (because a thread may download 4KB more per thread because of NTFS's block size) if diff > 4*threads: SmartDL_obj.logger.warning('Diff between downloaded files and expected filesizes is %dKB. Retrying...' % diff) SmartDL_obj.retry('Diff between downloaded files and expected filesizes is %dKB.' % diff) return SmartDL_obj.status = "combining" utils.combine_files(*args) if SmartDL_obj.verify_hash: dest_path = args[-1] with open(dest_path, 'rb') as f: hash = hashlib.new(SmartDL_obj.hash_algorithm, f.read()).hexdigest() if hash == SmartDL_obj.hash_code: SmartDL_obj.logger.debug('Hash verification succeeded.') else: SmartDL_obj.logger.debug('Hash verification failed.') SmartDL_obj.try_next_mirror(HashFailedException(os.path.basename(dest_path), hash, SmartDL_obj.hash_code))