def finalize_single_html_jobs(builder, conf): pjoin = os.path.join single_html_dir = get_single_html_dir(conf) if not os.path.exists(single_html_dir): os.makedirs(single_html_dir) try: manual_single_html(input_file=pjoin(conf.paths.branch_output, builder, 'contents.html'), output_file=pjoin(single_html_dir, 'index.html')) except (IOError, OSError): manual_single_html(input_file=pjoin(conf.paths.branch_output, builder, 'index.html'), output_file=pjoin(single_html_dir, 'index.html')) copy_if_needed(source_file=pjoin(conf.paths.branch_output, builder, 'objects.inv'), target_file=pjoin(single_html_dir, 'objects.inv')) single_path = pjoin(single_html_dir, '_static') for fn in expand_tree(pjoin(conf.paths.branch_output, builder, '_static'), None): yield { 'job': copy_if_needed, 'args': [fn, pjoin(single_path, os.path.basename(fn))], 'target': None, 'dependency': None }
def html_tarball(builder, conf): if conf.project.name == 'mms' and mms_should_migrate(builder, conf) is False: return False copy_if_needed( os.path.join(conf.paths.projectroot, conf.paths.includes, 'hash.rst'), os.path.join(conf.paths.projectroot, conf.paths.branch_output, 'html', 'release.txt')) basename = os.path.join( conf.paths.projectroot, conf.paths.public_site_output, conf.project.name + '-' + conf.git.branches.current) tarball_name = basename + '.tar.gz' tarball(name=tarball_name, path='html', cdir=os.path.join(conf.paths.projectroot, conf.paths.branch_output), sourcep='html', newp=os.path.basename(basename)) create_link(input_fn=os.path.basename(tarball_name), output_fn=os.path.join(conf.paths.projectroot, conf.paths.public_site_output, conf.project.name + '.tar.gz'))
def html_tarball(builder, conf): if conf.project.name == 'mms' and mms_should_migrate(builder, conf) is False: return False copy_if_needed(os.path.join(conf.paths.projectroot, conf.paths.includes, 'hash.rst'), os.path.join(conf.paths.projectroot, conf.paths.branch_output, 'html', 'release.txt')) basename = os.path.join(conf.paths.projectroot, conf.paths.public_site_output, conf.project.name + '-' + conf.git.branches.current) tarball_name = basename + '.tar.gz' tarball(name=tarball_name, path='html', cdir=os.path.join(conf.paths.projectroot, conf.paths.branch_output), sourcep='html', newp=os.path.basename(basename)) create_link(input_fn=os.path.basename(tarball_name), output_fn=os.path.join(conf.paths.projectroot, conf.paths.public_site_output, conf.project.name + '.tar.gz'))
def json_output(conf): list_file = os.path.join(conf.paths.branch_output, 'json-file-list') public_list_file = os.path.join(conf.paths.public_site_output, 'json', '.file_list') cmd = 'rsync --recursive --times --delete --exclude="*pickle" --exclude=".buildinfo" --exclude="*fjson" {src} {dst}' json_dst = os.path.join(conf.paths.public_site_output, 'json') if not os.path.exists(json_dst): logger.debug('created directories for {0}'.format(json_dst)) os.makedirs(json_dst) builder = 'json' if 'edition' in conf.project and conf.project.edition != conf.project.name: try: builder += '-' + conf.project.edition except: pass command( cmd.format(src=os.path.join(conf.paths.branch_output, builder) + '/', dst=json_dst)) copy_if_needed(list_file, public_list_file) logger.info('deployed json files to local staging.')
def json_output(conf): list_file = os.path.join(conf.paths.branch_output, 'json-file-list') public_list_file = os.path.join(conf.paths.public_site_output, 'json', '.file_list') cmd = 'rsync --recursive --times --delete --exclude="*pickle" --exclude=".buildinfo" --exclude="*fjson" {src} {dst}' json_dst = os.path.join(conf.paths.public_site_output, 'json') if not os.path.exists(json_dst): logger.debug('created directories for {0}'.format(json_dst)) os.makedirs(json_dst) builder = 'json' if 'edition' in conf.project and conf.project.edition != conf.project.name: try: builder += '-' + conf.project.edition except: pass command(cmd.format(src=os.path.join(conf.paths.branch_output, builder) + '/', dst=json_dst)) copy_if_needed(list_file, public_list_file) logger.info('deployed json files to local staging.')
def finalize_epub_build(builder, conf): epub_name = '-'.join(conf.project.title.lower().split()) epub_branched_filename = epub_name + '-' + conf.git.branches.current + '.epub' epub_src_filename = epub_name + '.epub' copy_if_needed(source_file=os.path.join(conf.paths.projectroot, conf.paths.branch_output, builder, epub_src_filename), target_file=os.path.join(conf.paths.projectroot, conf.paths.public_site_output, epub_branched_filename)) create_link(input_fn=epub_branched_filename, output_fn=os.path.join(conf.paths.projectroot, conf.paths.public_site_output, epub_src_filename))
def finalize_dirhtml_build(builder, conf): pjoin = os.path.join error_pages(conf) single_html_dir = get_single_html_dir(conf) search_page = pjoin(conf.paths.branch_output, builder, 'index.html') if conf.project.name == 'mms' and mms_should_migrate(builder, conf) is False: return False if os.path.exists(search_page): copy_if_needed(source_file=search_page, target_file=pjoin(single_html_dir, 'search.html')) dest = pjoin(conf.paths.projectroot, conf.paths.public_site_output) command('rsync -a {source}/ {destination}'.format(source=pjoin(conf.paths.projectroot, conf.paths.branch_output, builder), destination=dest)) print('[{0}]: migrated build to {1}'.format(builder, dest)) if conf.git.branches.current in conf.git.branches.published: sitemap_exists = sitemap(config_path=None, conf=conf) if sitemap_exists is True: copy_if_needed(source_file=pjoin(conf.paths.projectroot, conf.paths.branch_output, 'sitemap.xml.gz'), target_file=pjoin(conf.paths.projectroot, conf.paths.public_site_output, 'sitemap.xml.gz')) sconf = BuildConfiguration('sphinx.yaml', pjoin(conf.paths.projectroot, conf.paths.builddata)) if 'dirhtml' in sconf and 'excluded_files' in sconf.dirhtml: fns = [ pjoin(conf.paths.projectroot, conf.paths.public_site_output, fn) for fn in sconf.dirhtml.excluded_files ] cleaner(fns) print('[dirhtml] [clean]: removed excluded files from output directory')
def json_output(conf=None): conf = lazy_conf(conf) list_file = os.path.join(conf.paths.branch_staging, 'json-file-list') public_list_file = os.path.join(conf.paths.public_site_output, 'json', '.file_list') cmd = 'rsync --recursive --times --delete --exclude="*pickle" --exclude=".buildinfo" --exclude="*fjson" {src} {dst}' json_dst = os.path.join(conf.paths.public_site_output, 'json') if not os.path.exists(json_dst): os.makedirs(json_dst) command(cmd.format(src=os.path.join(conf.paths.branch_output, 'json') + '/', dst=json_dst)) copy_if_needed(list_file, public_list_file) print('[json]: deployed json files to local staging.')
def finalize_epub_build(conf): epub_name = '-'.join(conf.project.title.lower().split()) epub_branched_filename = epub_name + '-' + conf.git.branches.current + '.epub' epub_src_filename = epub_name + '.epub' if conf.project.name == 'mms' and mms_should_migrate(builder, conf) is False: return False copy_if_needed(source_file=os.path.join(conf.paths.projectroot, conf.paths.branch_output, 'epub', epub_src_filename), target_file=os.path.join(conf.paths.projectroot, conf.paths.public_site_output, epub_branched_filename)) create_link(input_fn=epub_branched_filename, output_fn=os.path.join(conf.paths.projectroot, conf.paths.public_site_output, epub_src_filename))
def json_output(conf=None): conf = lazy_conf(conf) list_file = os.path.join(conf.paths.branch_staging, 'json-file-list') public_list_file = os.path.join(conf.paths.public_site_output, 'json', '.file_list') cmd = 'rsync --recursive --times --delete --exclude="*pickle" --exclude=".buildinfo" --exclude="*fjson" {src} {dst}' json_dst = os.path.join(conf.paths.public_site_output, 'json') if not os.path.exists(json_dst): os.makedirs(json_dst) command( cmd.format(src=os.path.join(conf.paths.branch_output, 'json') + '/', dst=json_dst)) copy_if_needed(list_file, public_list_file) print('[json]: deployed json files to local staging.')
def build_sffms(conf): munge_script = os.path.join(conf.paths.buildsystem, 'bin', 'sffms-cleanup') base_dir = os.path.join(conf.paths.projectroot, conf.paths.output, 'sffms') preprocess = [ { 'job': command, 'args': [' '.join([munge_script, fn])] } for fn in expand_tree(base_dir, 'tex') ] pdfs = ingest_yaml_list(os.path.join(conf.paths.builddata, 'pdfs.yaml')) count = runner(preprocess) print("[pdf] [sffms]: prepossessed {0} sffms files".format(count )) for pdf in pdfs: copy_if_needed(source_file=os.path.join(base_dir, pdf['input']), target_file=os.path.join(base_dir, pdf['output']), name='sffms') pdf_processor(conf, pdfs, None)
def man_tarball(conf): basename = os.path.join(conf.paths.projectroot, conf.paths.branch_output, 'manpages-' + conf.git.branches.current) tarball_name = basename + '.tar.gz' tarball(name=tarball_name, path='man', cdir=os.path.dirname(basename), sourcep='man', newp=conf.project.name + '-manpages') copy_if_needed( tarball_name, os.path.join(conf.paths.projectroot, conf.paths.public_site_output, os.path.basename(tarball_name))) create_link(input_fn=os.path.basename(tarball_name), output_fn=os.path.join(conf.paths.projectroot, conf.paths.public_site_output, 'manpages' + '.tar.gz'))
def man_tarball(conf): basename = os.path.join(conf.paths.projectroot, conf.paths.branch_output, 'manpages-' + conf.git.branches.current) tarball_name = basename + '.tar.gz' tarball(name=tarball_name, path='man', cdir=os.path.dirname(basename), sourcep='man', newp=conf.project.name + '-manpages') copy_if_needed(tarball_name, os.path.join(conf.paths.projectroot, conf.paths.public_site_output, os.path.basename(tarball_name))) create_link(input_fn=os.path.basename(tarball_name), output_fn=os.path.join(conf.paths.projectroot, conf.paths.public_site_output, 'manpages' + '.tar.gz'))
def finalize_dirhtml_build(builder, conf): pjoin = os.path.join single_html_dir = get_single_html_dir(conf) search_page = pjoin(conf.paths.branch_output, builder, 'index.html') if os.path.exists(search_page): copy_if_needed(source_file=search_page, target_file=pjoin(single_html_dir, 'search.html')) dest = pjoin(conf.paths.projectroot, conf.paths.public_site_output) command('rsync -a {source}/ {destination}'.format(source=pjoin(conf.paths.projectroot, conf.paths.branch_output, builder), destination=dest)) logger.info('{0}: migrated build to {1}'.format(builder, dest)) if 'published' in conf.git.branches and conf.git.branches.current in conf.git.branches.published: sitemap_exists = sitemap(config_path=None, conf=conf) if sitemap_exists is True: copy_if_needed(source_file=pjoin(conf.paths.projectroot, conf.paths.branch_output, 'sitemap.xml.gz'), target_file=pjoin(conf.paths.projectroot, conf.paths.public_site_output, 'sitemap.xml.gz')) sconf = BuildConfiguration('sphinx.yaml', pjoin(conf.paths.projectroot, conf.paths.builddata)) if 'dirhtml' in sconf and 'excluded_files' in sconf.dirhtml: fns = [ pjoin(conf.paths.projectroot, conf.paths.public_site_output, fn) for fn in sconf.dirhtml.excluded_files ] cleaner(fns) logging.info('removed excluded files from dirhtml output directory')
def finalize_dirhtml_build(builder, conf): pjoin = os.path.join single_html_dir = get_single_html_dir(conf) search_page = pjoin(conf.paths.branch_output, builder, 'index.html') if os.path.exists(search_page): copy_if_needed(source_file=search_page, target_file=pjoin(single_html_dir, 'search.html')) dest = pjoin(conf.paths.projectroot, conf.paths.public_site_output) command('rsync -a {source}/ {destination}'.format(source=pjoin( conf.paths.projectroot, conf.paths.branch_output, builder), destination=dest)) logger.info('{0}: migrated build to {1}'.format(builder, dest)) if 'published' in conf.git.branches and conf.git.branches.current in conf.git.branches.published: sitemap_exists = sitemap(config_path=None, conf=conf) if sitemap_exists is True: copy_if_needed(source_file=pjoin(conf.paths.projectroot, conf.paths.branch_output, 'sitemap.xml.gz'), target_file=pjoin(conf.paths.projectroot, conf.paths.public_site_output, 'sitemap.xml.gz')) sconf = BuildConfiguration( 'sphinx.yaml', pjoin(conf.paths.projectroot, conf.paths.builddata)) if 'dirhtml' in sconf and 'excluded_files' in sconf.dirhtml: fns = [ pjoin(conf.paths.projectroot, conf.paths.public_site_output, fn) for fn in sconf.dirhtml.excluded_files ] cleaner(fns) logging.info('removed excluded files from dirhtml output directory')
def html_tarball(builder, conf): copy_if_needed(os.path.join(conf.paths.projectroot, conf.paths.includes, 'hash.rst'), os.path.join(conf.paths.projectroot, conf.paths.branch_output, builder, 'release.txt')) basename = os.path.join(conf.paths.projectroot, conf.paths.public_site_output, conf.project.name + '-' + conf.git.branches.current) tarball_name = basename + '.tar.gz' tarball(name=tarball_name, path=builder, cdir=os.path.join(conf.paths.projectroot, conf.paths.branch_output), newp=os.path.basename(basename)) create_link(input_fn=os.path.basename(tarball_name), output_fn=os.path.join(conf.paths.projectroot, conf.paths.public_site_output, conf.project.name + '.tar.gz'))