def async_job_loop(jobs, force, p): results = [] for job in jobs: if 'target' not in job: job['target'] = None if 'dependency' not in job: job['dependency'] = None if force is True or check_dependency(job['target'], job['dependency']): if 'callback' in job: if isinstance(job['args'], dict): results.append( p.apply_async(job['job'], kwds=job['args'], callback=job['callback'])) else: results.append( p.apply_async(job['job'], args=job['args'], callback=job['callback'])) else: if isinstance(job['args'], dict): results.append(p.apply_async(job['job'], kwds=job['args'])) else: results.append(p.apply_async(job['job'], args=job['args'])) return results
def async_job_loop(jobs, force, p): results = [] for job in jobs: if 'target' not in job: job['target'] = None if 'dependency' not in job: job['dependency'] = None if force is True or check_dependency(job['target'], job['dependency']): if 'callback' in job: if isinstance(job['args'], dict): r = p.apply_async(job['job'], kwds=job['args'], callback=job['callback']) else: r = p.apply_async(job['job'], args=job['args'], callback=job['callback']) else: if isinstance(job['args'], dict): r = p.apply_async(job['job'], kwds=job['args']) else: r = p.apply_async(job['job'], args=job['args']) results.append( ( job, r ) ) return results
def manual_single_html(input_file, output_file): # don't rebuild this if its not needed. if check_dependency(output_file, input_file) is False: logging.info('singlehtml not changed, not reprocessing.') return False else: text_lines = decode_lines_from_file(input_file) regexes = [(re.compile('href="contents.html'), 'href="index.html'), (re.compile('name="robots" content="index"'), 'name="robots" content="noindex"'), (re.compile('href="genindex.html'), 'href="../genindex/')] for regex, subst in regexes: text_lines = [regex.sub(subst, text) for text in text_lines] encode_lines_to_file(output_file, text_lines) logging.info('processed singlehtml file.')
def sync_runner(jobs, force): results = [] for job in jobs: if 'target' not in job: job['target'] = None if 'dependency' not in job: job['dependency'] = None if force is True or check_dependency(job['target'], job['dependency']): if isinstance(job['args'], dict): r = job['job'](**job['args']) else: r = job['job'](*job['args']) results.append(r) if 'callback' in job: job['callback'](r) return results
def manual_single_html(input_file, output_file): # don't rebuild this if its not needed. if check_dependency(output_file, input_file) is False: logging.info('singlehtml not changed, not reprocessing.') return False else: text_lines = decode_lines_from_file(input_file) regexes = [ (re.compile('href="contents.html'), 'href="index.html'), (re.compile('name="robots" content="index"'), 'name="robots" content="noindex"'), (re.compile('href="genindex.html'), 'href="../genindex/') ] for regex, subst in regexes: text_lines = [ regex.sub(subst, text) for text in text_lines ] encode_lines_to_file(output_file, text_lines) logging.info('processed singlehtml file.')