def main(): thisdir = os.path.dirname(__file__) indir = os.path.normpath(os.path.join(thisdir, '..', 'posts')) outdir = os.path.normpath(os.path.join(thisdir, '..', 'output')) if not os.path.exists(outdir): os.mkdir(outdir) paths = tuple( glob(os.path.join(indir, '*.rst')) + glob(os.path.join(indir, '*.ipynb'))) for path in sorted_posts(paths): render(paths, path) project.verbose = True while True: print('=' * 72) print('Watching for files to change') changed_paths = looping_wait_on(paths) print('=' * 72) print('Reloading:', ' '.join(changed_paths)) with project.cache_off(): for path in changed_paths: read_text_file(path) project.rebuild()
def main(): thisdir = os.path.dirname(__file__) paths = tuple( glob(os.path.join(thisdir, '*.txt')) + glob(os.path.join(thisdir, '*.out'))) print(paths) for path in sorted_files(paths): render(path) project.verbose = True count = 0 while count < 1: count += 1 print('=' * 72) print('Watching for files to change') changed_paths = looping_wait_on(paths) print('=' * 72) print('Reloading:', ' '.join(changed_paths)) with project.cache_off(): for path in changed_paths: read_text_file(path) project.rebuild()
def main(): global text_paths thisdir = os.path.dirname(__file__) # indir = os.path.normpath(os.path.join(thisdir, '..', 'texts')) outdir = os.path.normpath(os.path.join(thisdir, '..', 'output')) if not os.path.exists(outdir): os.mkdir(outdir) text_paths = tuple( [] + glob('texts/brandon/*/*.md') + glob('texts/brandon/*/*.rst') + glob('texts/brandon/*/*.html') + glob('texts/brandon/*/*.ipynb') + glob('texts/brandon/*.html') ) for path in text_paths: outpath = os.path.join(outdir, path.split('/', 1)[1]) if not path.endswith('/index.html'): outpath = os.path.splitext(outpath)[0] + '/index.html' os.makedirs(os.path.dirname(outpath), exist_ok=True) save_text(text_paths, path, outpath) save_rss_feed(text_paths, 'output/brandon/feed/index.xml') save_atom_feed(text_paths, 'output/brandon/feed/atom/index.xml') save_rss_feed(text_paths, 'output/brandon/category/python/feed/index.xml') static_paths = tuple(find('static')) for path in static_paths: outpath = os.path.join(outdir, path.split('/', 1)[1]) os.makedirs(os.path.dirname(outpath), exist_ok=True) save_static(path, outpath) all_paths = text_paths + static_paths project.verbose = True if len(sys.argv) > 1: return while True: print('=' * 72) print('Watching for files to change') changed_paths = looping_wait_on(all_paths) print('=' * 72) print('Reloading:', ' '.join(changed_paths)) for path in changed_paths: project.invalidate((read_text_file, (path,))) project.rebuild()
def main(): global text_paths thisdir = os.path.dirname(__file__) # indir = os.path.normpath(os.path.join(thisdir, '..', 'texts')) outdir = os.path.normpath(os.path.join(thisdir, '..', 'output')) if not os.path.exists(outdir): os.mkdir(outdir) text_paths = tuple([] + glob('texts/brandon/*/*.md') + glob('texts/brandon/*/*.rst') + glob('texts/brandon/*/*.html') + glob('texts/brandon/*/*.ipynb') + glob('texts/brandon/*.html')) for path in text_paths: outpath = os.path.join(outdir, path.split('/', 1)[1]) if not path.endswith('/index.html'): outpath = os.path.splitext(outpath)[0] + '/index.html' os.makedirs(os.path.dirname(outpath), exist_ok=True) save_text(text_paths, path, outpath) save_rss_feed(text_paths, 'output/brandon/feed/index.xml') save_atom_feed(text_paths, 'output/brandon/feed/atom/index.xml') save_rss_feed(text_paths, 'output/brandon/category/python/feed/index.xml') static_paths = tuple(find('static')) for path in static_paths: outpath = os.path.join(outdir, path.split('/', 1)[1]) os.makedirs(os.path.dirname(outpath), exist_ok=True) save_static(path, outpath) all_paths = text_paths + static_paths project.verbose = True if len(sys.argv) > 1: return while True: print('=' * 72) print('Watching for files to change') changed_paths = looping_wait_on(all_paths) print('=' * 72) print('Reloading:', ' '.join(changed_paths)) for path in changed_paths: project.invalidate((read_text_file, (path, ))) project.rebuild()
def main(): project.verbose = True project.start_tracing() for path in get_paths(): render(path) print(project.stop_tracing(True)) open('chapter.dot', 'w').write(as_graphviz(project._graph)) while True: print('=' * 72) print('Watching for files to change') changed_paths = looping_wait_on(get_paths()) print('=' * 72) print('Reloading:', ' '.join(changed_paths)) with project.cache_off(): for path in changed_paths: read_text_file(path) project.start_tracing() project.rebuild() print(project.stop_tracing(True))
def main(): thisdir = os.path.dirname(__file__) indir = os.path.normpath(os.path.join(thisdir, '..', 'posts')) outdir = os.path.normpath(os.path.join(thisdir, '..', 'output')) if not os.path.exists(outdir): os.mkdir(outdir) paths = tuple(glob(os.path.join(indir, '*.rst')) + glob(os.path.join(indir, '*.ipynb'))) for path in sorted_posts(paths): render(paths, path) project.verbose = True while True: print('=' * 72) print('Watching for files to change') changed_paths = looping_wait_on(paths) print('=' * 72) print('Reloading:', ' '.join(changed_paths)) with project.cache_off(): for path in changed_paths: read_text_file(path) project.rebuild()