def slides_tarball(builder, conf): copy_if_needed(os.path.join(conf.paths.projectroot, conf.paths.includes, 'hash.rst'), os.path.join(conf.paths.projectroot, conf.paths.branch_output, builder, 'release.txt')) basename = os.path.join(conf.paths.projectroot, conf.paths.public_site_output, hyph_concat(conf.project.name, conf.git.branches.current, builder)) tarball_fn = basename + '.tar.gz' tarball(name=tarball_fn, path=builder, cdir=os.path.join(conf.paths.projectroot, conf.paths.branch_output), newp=os.path.basename(basename)) link_name = os.path.join(conf.paths.projectroot, conf.paths.public_site_output, hyph_concat(conf.project.name, 'slides') + '.tar.gz') if os.path.exists(link_name): os.remove(link_name) create_link(input_fn=os.path.basename(tarball_fn), output_fn=link_name)
def _render_tex_into_pdf(fn, deployed_path, path): pdflatex = 'TEXINPUTS=".:{0}:" pdflatex --interaction batchmode --output-directory {0} {1}'.format( path, fn) base_fn = os.path.basename(fn) cmds = [ pdflatex, "makeindex -s {0}/python.ist {0}/{1}.idx ".format(path, base_fn[:-4]), pdflatex, pdflatex ] for idx, cmd in enumerate(cmds): r = command(command=cmd, ignore=True) if r.succeeded is True: logger.info( 'pdf completed rendering stage {0} of {1} successfully.'. format(idx, len(cmds))) else: if idx <= 1: logger.warning( 'pdf build encountered error early on {0}, continuing cautiously.' .format(base_fn)) continue else: logger.error( 'pdf build encountered error running pdflatex, investigate on {0}. terminating' .format(base_fn)) return False pdf_fn = os.path.splitext(fn)[0] + '.pdf' copy_if_needed(pdf_fn, deployed_path, 'pdf')
def finalize_single_html_jobs(builder, conf): pjoin = os.path.join single_html_dir = get_single_html_dir(conf) if not os.path.exists(single_html_dir): os.makedirs(single_html_dir) try: manual_single_html(input_file=pjoin(conf.paths.branch_output, builder, 'contents.html'), output_file=pjoin(single_html_dir, 'index.html')) except (IOError, OSError): manual_single_html(input_file=pjoin(conf.paths.branch_output, builder, 'index.html'), output_file=pjoin(single_html_dir, 'index.html')) copy_if_needed(source_file=pjoin(conf.paths.branch_output, builder, 'objects.inv'), target_file=pjoin(single_html_dir, 'objects.inv')) single_path = pjoin(single_html_dir, '_static') for fn in expand_tree(pjoin(conf.paths.branch_output, builder, '_static'), None): yield { 'job': copy_if_needed, 'args': [fn, pjoin(single_path, os.path.basename(fn))], 'target': None, 'dependency': None }
def json_output(conf): list_file = os.path.join(conf.paths.branch_output, 'json-file-list') public_list_file = os.path.join(conf.paths.projectroot, conf.paths.public_site_output, 'json', '.file_list') cmd = ('rsync --recursive --times --delete ' '--exclude="*pickle" --exclude=".buildinfo" --exclude="*fjson" ' '{src} {dst}') json_dst = os.path.join(conf.paths.projectroot, conf.paths.public_site_output, 'json') safe_create_directory(json_dst) builder = 'json' if 'edition' in conf.project and conf.project.edition != conf.project.name: builder += '-' + conf.project.edition cmd_str = cmd.format(src=os.path.join(conf.paths.projectroot, conf.paths.branch_output, builder) + '/', dst=json_dst) try: subprocess.check_call(cmd_str.split()) copy_if_needed(list_file, public_list_file) logger.info('deployed json files to local staging.') except subprocess.CalledProcessError: logger.error('error migrating json artifacts to local staging')
def json_output(conf): list_file = os.path.join(conf.paths.branch_output, 'json-file-list') public_list_file = os.path.join(conf.paths.projectroot, conf.paths.public_site_output, 'json', '.file_list') cmd = 'rsync --recursive --times --delete --exclude="*pickle" --exclude=".buildinfo" --exclude="*fjson" {src} {dst}' json_dst = os.path.join(conf.paths.projectroot, conf.paths.public_site_output, 'json') if not os.path.exists(json_dst): logger.debug('created directories for {0}'.format(json_dst)) os.makedirs(json_dst) builder = 'json' if 'edition' in conf.project and conf.project.edition != conf.project.name: builder += '-' + conf.project.edition command( cmd.format(src=os.path.join(conf.paths.projectroot, conf.paths.branch_output, builder) + '/', dst=json_dst)) copy_if_needed(list_file, public_list_file) logger.info('deployed json files to local staging.')
def _render_tex_into_pdf(fn, deployed_path, path, output_format="pdf"): """ Runs ``pdflatex`` operations, can generate ``dvi`` and ``pdf``. Runs pdflatex multiple times to correctly index and cross reference the PDF. """ inputs_path = ".:{0}:".format(path) os.environ['TEXINPUTS'] = inputs_path if output_format == 'dvi': cmd = 'pdflatex --output-format dvi --interaction batchmode --output-directory {0} {1}' pdflatex = cmd.format(path, fn) elif output_format == 'pdf': pdflatex = 'pdflatex --interaction batchmode --output-directory {0} {1}'.format( path, fn) else: logger.error( 'not rendering pdf because {0} is not an output format'.format( output_format)) return base_fn = os.path.basename(fn) cmds = [ pdflatex, "makeindex -s {0}/python.ist {0}/{1}.idx ".format(path, base_fn[:-4]), pdflatex, pdflatex ] if output_format == 'dvi': cmds.append("dvipdf {0}.dvi".format(base_fn[:-4])) with open(os.devnull, 'w') as null: for idx, cmd in enumerate(cmds): ret = subprocess.call(args=shlex.split(cmd), cwd=path, stdout=null, stderr=null) if ret == 0: m = 'pdf completed rendering stage {0} of {1} successfully ({2}, {3}).' logger.info(m.format(idx + 1, len(cmds), base_fn, ret)) continue else: if idx <= 1: m = 'pdf build encountered error early on {0}, continuing cautiously.' logger.warning(m.format(base_fn)) continue else: m = 'pdf build encountered error running pdflatex, investigate {0}. terminating' logger.error(m.format(base_fn)) logger.error(' '.join([ 'TEXINPUTS={0} '.format(inputs_path), cmd.replace('--interaction batchmode', '') ])) return False pdf_fn = os.path.splitext(fn)[0] + '.pdf' copy_if_needed(pdf_fn, deployed_path, 'pdf')
def finalize_single_html(single_html_dir, artifact_dir, conf): for fn in [os.path.join(artifact_dir, f) for f in ('contents.html', 'index.html')]: src_fn = os.path.join(conf.paths.projectroot, conf.paths.branch_output, fn) if os.path.exists(src_fn): manual_single_html(input_file=src_fn, output_file=os.path.join(single_html_dir, 'index.html')) copy_if_needed(source_file=os.path.join(artifact_dir, 'objects.inv'), target_file=os.path.join(single_html_dir, 'objects.inv'))
def _process_page(fn, output_fn, regex, copy, builder): tmp_fn = fn + '~' munge_page(fn=fn, out_fn=tmp_fn, regex=regex) cp_args = dict(source_file=tmp_fn, target_file=output_fn, name=builder) if copy == 'always': copy_always(**cp_args) else: copy_if_needed(**cp_args)
def finalize_epub_build(builder, conf): epub_name = "-".join(conf.project.title.lower().split()) epub_branched_filename = epub_name + "-" + conf.git.branches.current + ".epub" epub_src_filename = epub_name + ".epub" copy_if_needed( source_file=os.path.join(conf.paths.projectroot, conf.paths.branch_output, builder, epub_src_filename), target_file=os.path.join(conf.paths.projectroot, conf.paths.public_site_output, epub_branched_filename), ) create_link( input_fn=epub_branched_filename, output_fn=os.path.join(conf.paths.projectroot, conf.paths.public_site_output, epub_src_filename), )
def finalize_dirhtml_build(sconf, conf): builder = sconf.builder single_html_dir = get_single_html_dir(conf) search_page = os.path.join(conf.paths.branch_output, builder, 'index.html') if os.path.exists(search_page): copy_if_needed(source_file=search_page, target_file=os.path.join(single_html_dir, 'search.html')) dest = os.path.join(conf.paths.projectroot, conf.paths.public_site_output) cmd_str = 'rsync -a {source}/ {destination}'.format( source=sconf.fq_build_output, destination=dest) with open(os.devnull, 'w') as f: return_code = subprocess.call(args=cmd_str.split(), stdout=f, stderr=f) m = '"{0}" migrated build from {1} to {2}, with result {3}' logger.info( m.format(sconf.name, sconf.fq_build_output, dest, return_code)) if 'excluded_files' in sconf: fns = [ os.path.join(conf.paths.projectroot, conf.paths.public_site_output, fn) for fn in sconf['dirhtml']['excluded_files'] ] for fn in fns: if os.path.isdir(fn): shutil.rmtree(fn) elif os.path.isfile(fn): os.remove(fn) else: continue logger.info('removed file from dirhtml output directory: ' + fn) if conf.git.branches.current in conf.git.branches.published: sitemap_exists = sitemap(config_path=None, conf=conf) legacy_sitemap_fn = os.path.join(conf.paths.projectroot, conf.paths.branch_output, 'sitemap.xml.gz') if os.path.exists(legacy_sitemap_fn) and sitemap_exists is True: copy_if_needed(source_file=legacy_sitemap_fn, target_file=os.path.join( conf.paths.projectroot, conf.paths.public_site_output, 'sitemap.xml.gz'))
def _render_tex_into_pdf(fn, deployed_path, path, output_format="pdf"): """ Runs ``pdflatex`` operations, can generate ``dvi`` and ``pdf``. Runs pdflatex multiple times to correctly index and cross reference the PDF. """ inputs_path = ".:{0}:".format(path) os.environ['TEXINPUTS'] = inputs_path if output_format == 'dvi': cmd = 'pdflatex --output-format dvi --interaction batchmode --output-directory {0} {1}' pdflatex = cmd.format(path, fn) elif output_format == 'pdf': pdflatex = 'pdflatex --interaction batchmode --output-directory {0} {1}'.format(path, fn) else: logger.error('not rendering pdf because {0} is not an output format'.format(output_format)) return base_fn = os.path.basename(fn) cmds = [pdflatex, "makeindex -s {0}/python.ist {0}/{1}.idx ".format(path, base_fn[:-4]), pdflatex, pdflatex] if output_format == 'dvi': cmds.append("dvipdf {0}.dvi".format(base_fn[:-4])) with open(os.devnull, 'w') as null: for idx, cmd in enumerate(cmds): ret = subprocess.call(args=shlex.split(cmd), cwd=path, stdout=null, stderr=null) if ret == 0: m = 'pdf completed rendering stage {0} of {1} successfully ({2}, {3}).' logger.info(m.format(idx + 1, len(cmds), base_fn, ret)) continue else: if idx <= 1: m = 'pdf build encountered error early on {0}, continuing cautiously.' logger.warning(m.format(base_fn)) continue else: m = 'pdf build encountered error running pdflatex, investigate {0}. terminating' logger.error(m.format(base_fn)) logger.error(' '.join(['TEXINPUTS={0} '.format(inputs_path), cmd.replace('--interaction batchmode', '')])) return False pdf_fn = os.path.splitext(fn)[0] + '.pdf' copy_if_needed(pdf_fn, deployed_path, 'pdf')
def _render_tex_into_pdf(fn, deployed_path, path, output_format="pdf"): """ Runs ``pdflatex`` operations, can generate ``dvi`` and ``pdf``. Runs pdflatex multiple times to correctly index and cross reference the PDF. """ if output_format == 'dvi': pdflatex = 'TEXINPUTS=".:{0}:" pdflatex --output-format dvi --interaction batchmode --output-directory {0} {1}'.format( path, fn) elif output_format == 'pdf': pdflatex = 'TEXINPUTS=".:{0}:" pdflatex --interaction batchmode --output-directory {0} {1}'.format( path, fn) else: logger.error( 'not rendering pdf because {0} is not an output format'.format( output_format)) return base_fn = os.path.basename(fn) cmds = [ pdflatex, "makeindex -s {0}/python.ist {0}/{1}.idx ".format(path, base_fn[:-4]), pdflatex, pdflatex ] if output_format == 'dvi': cmds.append("cd {0}; dvipdf {1}.dvi".format(path, base_fn[:-4])) for idx, cmd in enumerate(cmds): r = command(command=cmd, ignore=True) if r.succeeded is True: logger.info( 'pdf completed rendering stage {0} of {1} successfully ({2}).'. format(idx, len(cmds), base_fn)) else: if idx <= 1: logger.warning( 'pdf build encountered error early on {0}, continuing cautiously.' .format(base_fn)) continue else: logger.error( 'pdf build encountered error running pdflatex, investigate on {0}. terminating' .format(base_fn)) logger.error(cmd) return False pdf_fn = os.path.splitext(fn)[0] + '.pdf' copy_if_needed(pdf_fn, deployed_path, 'pdf')
def finalize_epub_build(builder, conf): epub_name = '-'.join(conf.project.title.lower().split()) epub_branched_filename = epub_name + '-' + conf.git.branches.current + '.epub' epub_src_filename = epub_name + '.epub' copy_if_needed(source_file=os.path.join(conf.paths.projectroot, conf.paths.branch_output, builder, epub_src_filename), target_file=os.path.join(conf.paths.projectroot, conf.paths.public_site_output, epub_branched_filename)) create_link(input_fn=epub_branched_filename, output_fn=os.path.join(conf.paths.projectroot, conf.paths.public_site_output, epub_src_filename))
def finalize_single_html_tasks(builder, conf, app): single_html_dir = get_single_html_dir(conf) safe_create_directory(single_html_dir) found_src = False for base_path in (builder, hyph_concat(builder, conf.project.edition)): if found_src is True: break for fn in [ os.path.join(base_path, f) for f in ('contents.html', 'index.html') ]: src_fn = os.path.join(conf.paths.projectroot, conf.paths.branch_output, fn) if os.path.exists(src_fn): manual_single_html(input_file=src_fn, output_file=os.path.join( single_html_dir, 'index.html')) copy_if_needed( source_file=os.path.join(conf.paths.projectroot, conf.paths.branch_output, base_path, 'objects.inv'), target_file=os.path.join(single_html_dir, 'objects.inv')) found_src = True break if found_src is not True: raise FileNotFoundError('singlehtml source file') single_path = os.path.join(single_html_dir, '_static') for fn in expand_tree(os.path.join(os.path.dirname(src_fn), '_static'), None): target_fn = os.path.join(single_path, os.path.basename(fn)) task = app.add('task') task.job = copy_if_needed task.target = target_fn task.dependency = fn task.args = [fn, target_fn] task.description = "migrating static files to the HTML build"
def finalize_single_html(single_html_dir, artifact_dir, conf): for fn in [ os.path.join(artifact_dir, f) for f in ('contents.html', 'index.html') ]: src_fn = os.path.join(conf.paths.projectroot, conf.paths.branch_output, fn) if os.path.exists(src_fn): manual_single_html(input_file=src_fn, output_file=os.path.join( single_html_dir, 'index.html')) copy_if_needed(source_file=os.path.join(artifact_dir, 'objects.inv'), target_file=os.path.join(single_html_dir, 'objects.inv'))
def finalize_dirhtml_build(sconf, conf): builder = sconf.builder single_html_dir = get_single_html_dir(conf) search_page = os.path.join(conf.paths.branch_output, builder, "index.html") if os.path.exists(search_page): copy_if_needed(source_file=search_page, target_file=os.path.join(single_html_dir, "search.html")) dest = os.path.join(conf.paths.projectroot, conf.paths.public_site_output) cmd_str = "rsync -a {source}/ {destination}".format(source=sconf.fq_build_output, destination=dest) with open(os.devnull, "w") as f: return_code = subprocess.call(args=cmd_str.split(), stdout=f, stderr=f) m = '"{0}" migrated build from {1} to {2}, with result {3}' logger.info(m.format(sconf.name, sconf.fq_build_output, dest, return_code)) if "excluded_files" in sconf: fns = [ os.path.join(conf.paths.projectroot, conf.paths.public_site_output, fn) for fn in sconf["dirhtml"]["excluded_files"] ] for fn in fns: if os.path.isdir(fn): shutil.rmtree(fn) elif os.path.isfile(fn): os.remove(fn) else: continue logger.info("removed file from dirhtml output directory: " + fn) if conf.git.branches.current in conf.git.branches.published: sitemap_exists = sitemap(config_path=None, conf=conf) legacy_sitemap_fn = os.path.join(conf.paths.projectroot, conf.paths.branch_output, "sitemap.xml.gz") if os.path.exists(legacy_sitemap_fn) and sitemap_exists is True: copy_if_needed( source_file=legacy_sitemap_fn, target_file=os.path.join(conf.paths.projectroot, conf.paths.public_site_output, "sitemap.xml.gz"), )
def slides_tarball(builder, conf): copy_if_needed( os.path.join(conf.paths.projectroot, conf.paths.includes, 'hash.rst'), os.path.join(conf.paths.projectroot, conf.paths.branch_output, builder, 'release.txt')) tarball_name = get_tarball_name('slides', conf) tarball(name=tarball_name, path=builder, cdir=os.path.join(conf.paths.projectroot, conf.paths.branch_output), newp=os.path.splitext(os.path.basename(tarball_name))[0]) link_name = get_tarball_name('link-slides', conf) if os.path.exists(link_name): os.remove(link_name) create_link(input_fn=os.path.basename(tarball_name), output_fn=link_name)
def finalize_single_html_tasks(builder, conf, app): single_html_dir = get_single_html_dir(conf) if not os.path.exists(single_html_dir): os.makedirs(single_html_dir) found_src = False for base_path in (builder, hyph_concat(builder, conf.project.edition)): if found_src is True: break for fn in [ os.path.join(base_path, f) for f in ('contents.html', 'index.html') ]: src_fn = os.path.join(conf.paths.projectroot, conf.paths.branch_output, fn) if os.path.exists(src_fn): manual_single_html(input_file=src_fn, output_file=os.path.join(single_html_dir, 'index.html')) copy_if_needed(source_file=os.path.join(conf.paths.projectroot, conf.paths.branch_output, base_path, 'objects.inv'), target_file=os.path.join(single_html_dir, 'objects.inv')) found_src = True break if found_src is not True: raise FileNotFoundError('singlehtml source file') single_path = os.path.join(single_html_dir, '_static') for fn in expand_tree(os.path.join(os.path.dirname(src_fn), '_static'), None): target_fn = os.path.join(single_path, os.path.basename(fn)) task = app.add('task') task.job = copy_if_needed task.target = target_fn task.dependency = fn task.args = [fn, target_fn] task.description = "migrating static files to the HTML build"
def finalize_dirhtml_build(sconf, conf): builder = sconf.builder single_html_dir = get_single_html_dir(conf) search_page = os.path.join(conf.paths.branch_output, builder, 'index.html') if os.path.exists(search_page): copy_if_needed(source_file=search_page, target_file=os.path.join(single_html_dir, 'search.html')) dest = os.path.join(conf.paths.projectroot, conf.paths.public_site_output) m_cmd = command('rsync -a {source}/ {destination}'.format( source=sconf.fq_build_output, destination=dest)) logger.info('"{0}" migrated build from {1} to {2}, with result {3}'.format( sconf.name, sconf.fq_build_output, dest, m_cmd.return_code)) if 'excluded_files' in sconf: fns = [ os.path.join(conf.paths.projectroot, conf.paths.public_site_output, fn) for fn in sconf['dirhtml']['excluded_files'] ] cleaner(fns) logging.info('removed excluded files from dirhtml output directory') if conf.git.branches.current in conf.git.branches.published: sitemap_exists = sitemap(config_path=None, conf=conf) legacy_sitemap_fn = os.path.join(conf.paths.projectroot, conf.paths.branch_output, 'sitemap.xml.gz') if os.path.exists(legacy_sitemap_fn) and sitemap_exists is True: copy_if_needed(source_file=legacy_sitemap_fn, target_file=os.path.join( conf.paths.projectroot, conf.paths.public_site_output, 'sitemap.xml.gz'))
def slides_tarball(builder, conf): copy_if_needed(os.path.join(conf.paths.projectroot, conf.paths.includes, 'hash.rst'), os.path.join(conf.paths.projectroot, conf.paths.branch_output, builder, 'release.txt')) tarball_name = get_tarball_name('slides', conf) tarball(name=tarball_name, path=builder, cdir=os.path.join(conf.paths.projectroot, conf.paths.branch_output), newp=os.path.splitext(os.path.basename(tarball_name))[0]) link_name = get_tarball_name('link-slides', conf) if os.path.exists(link_name): os.remove(link_name) create_link(input_fn=os.path.basename(tarball_name), output_fn=link_name)
def json_output(conf): list_file = os.path.join(conf.paths.branch_output, 'json-file-list') public_list_file = os.path.join(conf.paths.public_site_output, 'json', '.file_list') cmd = 'rsync --recursive --times --delete --exclude="*pickle" --exclude=".buildinfo" --exclude="*fjson" {src} {dst}' json_dst = os.path.join(conf.paths.public_site_output, 'json') if not os.path.exists(json_dst): logger.debug('created directories for {0}'.format(json_dst)) os.makedirs(json_dst) builder = 'json' if 'edition' in conf.project and conf.project.edition != conf.project.name: builder += '-' + conf.project.edition command(cmd.format(src=os.path.join(conf.paths.branch_output, builder) + '/', dst=json_dst)) copy_if_needed(list_file, public_list_file) logger.info('deployed json files to local staging.')
def finalize_dirhtml_build(sconf, conf): pjoin = os.path.join builder = sconf.builder single_html_dir = get_single_html_dir(conf) search_page = pjoin(conf.paths.branch_output, builder, 'index.html') if os.path.exists(search_page): copy_if_needed(source_file=search_page, target_file=pjoin(single_html_dir, 'search.html')) dest = pjoin(conf.paths.projectroot, conf.paths.public_site_output) m_cmd = command('rsync -a {source}/ {destination}'.format(source=sconf.build_output, destination=dest)) logger.info('"{0}" migrated build from {1} to {2}, with result {3}'.format(sconf.name, sconf.build_output, dest, m_cmd.return_code)) if 'excluded_files' in sconf: fns = [ pjoin(conf.paths.projectroot, conf.paths.public_site_output, fn) for fn in sconf['dirhtml']['excluded_files'] ] cleaner(fns) logging.info('removed excluded files from dirhtml output directory') if conf.git.branches.current in conf.git.branches.published: sitemap_exists = sitemap(config_path=None, conf=conf) if sitemap_exists is True: copy_if_needed(source_file=pjoin(conf.paths.projectroot, conf.paths.branch_output, 'sitemap.xml.gz'), target_file=pjoin(conf.paths.projectroot, conf.paths.public_site_output, 'sitemap.xml.gz'))
def _render_tex_into_pdf(fn, deployed_path, path): pdflatex = 'TEXINPUTS=".:{0}:" pdflatex --interaction batchmode --output-directory {0} {1}'.format(path, fn) base_fn = os.path.basename(fn) cmds = [ pdflatex, "makeindex -s {0}/python.ist {0}/{1}.idx ".format(path, base_fn[:-4]), pdflatex, pdflatex ] for idx, cmd in enumerate(cmds): r = command(command=cmd, ignore=True) if r.succeeded is True: logger.info('pdf completed rendering stage {0} of {1} successfully.'.format(idx, len(cmds))) else: if idx <= 1: logger.warning('pdf build encountered error early on {0}, continuing cautiously.'.format(base_fn)) continue else: logger.error('pdf build encountered error running pdflatex, investigate on {0}. terminating'.format(base_fn)) return False pdf_fn = os.path.splitext(fn)[0] + '.pdf' copy_if_needed(pdf_fn, deployed_path, 'pdf')