def finalize_single_html_jobs(builder, conf): pjoin = os.path.join single_html_dir = get_single_html_dir(conf) if not os.path.exists(single_html_dir): os.makedirs(single_html_dir) try: manual_single_html(input_file=pjoin(conf.paths.branch_output, builder, 'contents.html'), output_file=pjoin(single_html_dir, 'index.html')) except (IOError, OSError): manual_single_html(input_file=pjoin(conf.paths.branch_output, builder, 'index.html'), output_file=pjoin(single_html_dir, 'index.html')) copy_if_needed(source_file=pjoin(conf.paths.branch_output, builder, 'objects.inv'), target_file=pjoin(single_html_dir, 'objects.inv')) single_path = pjoin(single_html_dir, '_static') for fn in expand_tree(pjoin(conf.paths.branch_output, builder, '_static'), None): yield { 'job': copy_if_needed, 'args': [fn, pjoin(single_path, os.path.basename(fn))], 'target': None, 'dependency': None }
def finalize_epub_build(builder, conf): epub_name = '-'.join(conf.project.title.lower().split()) epub_branched_filename = epub_name + '-' + conf.git.branches.current + '.epub' epub_src_filename = epub_name + '.epub' copy_if_needed(source_file=os.path.join(conf.paths.projectroot, conf.paths.branch_output, builder, epub_src_filename), target_file=os.path.join(conf.paths.projectroot, conf.paths.public_site_output, epub_branched_filename)) create_link(input_fn=epub_branched_filename, output_fn=os.path.join(conf.paths.projectroot, conf.paths.public_site_output, epub_src_filename))
def finalize_dirhtml_build(builder, conf): pjoin = os.path.join single_html_dir = get_single_html_dir(conf) search_page = pjoin(conf.paths.branch_output, builder, 'index.html') if os.path.exists(search_page): copy_if_needed(source_file=search_page, target_file=pjoin(single_html_dir, 'search.html')) dest = pjoin(conf.paths.projectroot, conf.paths.public_site_output) command('rsync -a {source}/ {destination}'.format(source=pjoin(conf.paths.projectroot, conf.paths.branch_output, builder), destination=dest)) logger.info('{0}: migrated build to {1}'.format(builder, dest)) if conf.git.branches.current in conf.git.branches.published: sitemap_exists = sitemap(config_path=None, conf=conf) if sitemap_exists is True: copy_if_needed(source_file=pjoin(conf.paths.projectroot, conf.paths.branch_output, 'sitemap.xml.gz'), target_file=pjoin(conf.paths.projectroot, conf.paths.public_site_output, 'sitemap.xml.gz')) sconf_path = pjoin(conf.paths.projectroot, conf.paths.builddata, 'sphinx.yaml') sconf = ingest_yaml_doc(sconf_path) if 'dirhtml' in sconf and 'excluded_files' in sconf['dirhtml']: fns = [ pjoin(conf.paths.projectroot, conf.paths.public_site_output, fn) for fn in sconf['dirhtml']['excluded_files'] ] cleaner(fns) logging.info('removed excluded files from dirhtml output directory')
def json_output(conf): list_file = os.path.join(conf.paths.branch_output, 'json-file-list') public_list_file = os.path.join(conf.paths.public_site_output, 'json', '.file_list') cmd = 'rsync --recursive --times --delete --exclude="*pickle" --exclude=".buildinfo" --exclude="*fjson" {src} {dst}' json_dst = os.path.join(conf.paths.public_site_output, 'json') if not os.path.exists(json_dst): logger.debug('created directories for {0}'.format(json_dst)) os.makedirs(json_dst) builder = 'json' if 'edition' in conf.project and conf.project.edition != conf.project.name: builder += '-' + conf.project.edition command(cmd.format(src=os.path.join(conf.paths.branch_output, builder) + '/', dst=json_dst)) copy_if_needed(list_file, public_list_file) logger.info('deployed json files to local staging.')
def html_tarball(builder, conf): copy_if_needed(os.path.join(conf.paths.projectroot, conf.paths.includes, 'hash.rst'), os.path.join(conf.paths.projectroot, conf.paths.branch_output, builder, 'release.txt')) basename = os.path.join(conf.paths.projectroot, conf.paths.public_site_output, conf.project.name + '-' + conf.git.branches.current) tarball_name = basename + '.tar.gz' tarball(name=tarball_name, path=builder, cdir=os.path.join(conf.paths.projectroot, conf.paths.branch_output), newp=os.path.basename(basename)) create_link(input_fn=os.path.basename(tarball_name), output_fn=os.path.join(conf.paths.projectroot, conf.paths.public_site_output, conf.project.name + '.tar.gz'))
def finalize_dirhtml_build(sconf, conf): pjoin = os.path.join builder = sconf.builder single_html_dir = get_single_html_dir(conf) search_page = pjoin(conf.paths.branch_output, builder, 'index.html') if os.path.exists(search_page): copy_if_needed(source_file=search_page, target_file=pjoin(single_html_dir, 'search.html')) dest = pjoin(conf.paths.projectroot, conf.paths.public_site_output) m_cmd = command('rsync -a {source}/ {destination}'.format(source=sconf.build_output, destination=dest)) logger.info('"{0}" migrated build from {1} to {2}, with result {3}'.format(sconf.name, sconf.build_output, dest, m_cmd.return_code)) if 'excluded_files' in sconf: fns = [ pjoin(conf.paths.projectroot, conf.paths.public_site_output, fn) for fn in sconf['dirhtml']['excluded_files'] ] cleaner(fns) logging.info('removed excluded files from dirhtml output directory') if conf.git.branches.current in conf.git.branches.published: sitemap_exists = sitemap(config_path=None, conf=conf) if sitemap_exists is True: copy_if_needed(source_file=pjoin(conf.paths.projectroot, conf.paths.branch_output, 'sitemap.xml.gz'), target_file=pjoin(conf.paths.projectroot, conf.paths.public_site_output, 'sitemap.xml.gz'))