def toc_tasks(conf): toc_sources = conf.system.content.toc.sources tocs = TocDataCache(toc_sources, conf) if len(toc_sources) > 0 and not os.path.isdir( conf.system.content.toc.output_dir): safe_create_directory(conf.system.content.toc.output_dir) tasks = [] for dep_fn, toc_data in tocs.file_iter(): deps = [dep_fn] if 'ref-toc-' in dep_fn: base_offset = 8 else: base_offset = 4 fn_basename = os.path.basename(dep_fn)[base_offset:].replace( 'yaml', 'rst') toc_items = toc_data.ordered_items() if toc_data.is_spec() is False: out_fn = os.path.join(conf.system.content.toc.output_dir, fn_basename) t = Task(job=write_toc_tree_output, target=out_fn, dependency=dep_fn, description="writing toctree to '{0}'".format(out_fn)) t.args = (out_fn, toc_items) tasks.append(t) else: deps.extend(toc_data.spec_deps()) if 'ref-toc' in dep_fn: out_fn = os.path.join(conf.system.content.toc.output_dir, hyph_concat('table', fn_basename)) reft = Task( job=write_toc_table, target=out_fn, dependency=deps, description="write table of contents generator".format(out_fn)) reft.args = (out_fn, toc_items) tasks.append(reft) else: out_fn = os.path.join(conf.system.content.toc.output_dir, hyph_concat('dfn-list', fn_basename)) dt = Task(job=write_dfn_list_output, target=out_fn, dependency=deps, description="write definition list toc to '{0}'".format( out_fn)) dt.args = (out_fn, toc_items) tasks.append(dt) logger.info('added tasks for {0} toc generation tasks'.format(len(tasks))) return tasks
def slides_tarball(builder, conf): copy_if_needed(os.path.join(conf.paths.projectroot, conf.paths.includes, 'hash.rst'), os.path.join(conf.paths.projectroot, conf.paths.branch_output, builder, 'release.txt')) basename = os.path.join(conf.paths.projectroot, conf.paths.public_site_output, hyph_concat(conf.project.name, conf.git.branches.current, builder)) tarball_fn = basename + '.tar.gz' tarball(name=tarball_fn, path=builder, cdir=os.path.join(conf.paths.projectroot, conf.paths.branch_output), newp=os.path.basename(basename)) link_name = os.path.join(conf.paths.projectroot, conf.paths.public_site_output, hyph_concat(conf.project.name, 'slides') + '.tar.gz') if os.path.exists(link_name): os.remove(link_name) create_link(input_fn=os.path.basename(tarball_fn), output_fn=link_name)
def resolve_builder_path(builder, edition, language, conf): dirname = builder if edition is not None and edition != conf.project.name: dirname = hyph_concat(dirname, edition) if language is not None and language != 'en': dirname = hyph_concat(dirname, language) return os.path.join(conf.paths.projectroot, conf.paths.branch_output, dirname)
def resolve_builder_path(builder, edition, language, conf): dirname = builder if edition is not None and edition != conf.project.name: dirname = hyph_concat(dirname, edition) if language is not None and language != 'en': dirname = hyph_concat(dirname, language) return dirname
def package_build_env(builders, editions, languages, conf): arc_fn = hyph_concat('cache', conf.project.name, conf.git.branches.current, datetime.datetime.utcnow().strftime('%s'), conf.git.commit[:8]) + ".tar.gz" archive_path = os.path.join(conf.paths.buildarchive, arc_fn) safe_create_directory(conf.paths.buildarchive) existing_archives = os.listdir(conf.paths.buildarchive) for arc in existing_archives: if conf.git.commit[:8] in arc: logger.warning( 'archive "{0}" exists for current git hash, not recreating'. format(archive_path)) return logger.debug("no archive for commit '{0}' continuing".format( conf.git.commit)) with cd(conf.paths.projectroot): files_to_archive = set() for edition, language, builder in itertools.product( editions, languages, builders): rconf, sconf = get_sphinx_build_configuration( edition, language, builder, copy.deepcopy(conf.runstate)) builder_dirname = resolve_builder_path(builder, edition, language, rconf) files_to_archive.add(rconf.paths.branch_source) files_to_archive.add( os.path.join(rconf.paths.branch_output, builder_dirname)) files_to_archive.add( os.path.join(rconf.paths.branch_output, hyph_concat('doctrees', builder_dirname))) files_to_archive.add(rconf.system.dependency_cache_fn) files_to_archive = list(files_to_archive) logger.info('prepped build cache archive. writing file now.') for fn in files_to_archive: if not os.path.exists(fn): raise FileNotFoundError(fn) try: with tarfile.open(archive_path, 'w:gz') as t: for fn in files_to_archive: t.add(fn, exclude=is_git_dir) logger.info("created build-cache archive: " + archive_path) except Exception as e: os.remove(archive_path) logger.critical("failed to create archive: " + archive_path) logger.error(e)
def migrate_legacy_apiarg(task, fn, conf, silent=False): legacy_data = ingest_yaml_list(fn) new_data, meta = transform_data( task, legacy_data, fn[len(os.path.join(conf.paths.projectroot, conf.paths.branch_output) ) + 1:], silent, conf) old_base = os.path.basename(fn) if not old_base.startswith(meta['operation']): meta['operation'] = old_base[:-5].split('-', 1)[0] tag = old_base[:-5][len(meta['operation']) + 1:] if tag.startswith('-'): tag = tag[1:] if tag == 'fields': tag = 'field' new_fn_base = hyph_concat('apiargs', meta['interface'], meta['operation'], tag) new_fn_base = new_fn_base + '.yaml' if task == 'source': new_fn = os.path.join(conf.paths.projectroot, conf.paths.includes, new_fn_base) elif task == 'branch': new_fn = os.path.join(conf.paths.projectroot, conf.paths.branch_includes, new_fn_base) return new_data, new_fn
def option_tasks(conf): option_sources = conf.system.content.options.sources o = OptionDataCache(option_sources, conf) if len(option_sources) > 0 and not os.path.isdir( conf.system.content.options.output_dir): safe_create_directory(conf.system.content.options.output_dir) tasks = [] for dep_fn, option in o.content_iter(): if option.program.startswith('_'): continue out_fn = hyph_concat(option.directive, option.program, option.name) + '.rst' output_fn = os.path.join(conf.system.content.options.fn_prefix, out_fn) t = Task(job=write_options, description='generating option file "{0}" from "{1}"'.format( output_fn, dep_fn), target=output_fn, dependency=[dep_fn]) t.args = (option, output_fn, conf) tasks.append(t) logger.info("added tasks for {0} option generation tasks".format( len(tasks))) return tasks
def get_tarball_name(builder, conf): if builder == 'link-html': fn = conf.project.name + '.tar.gz' elif builder == 'link-man': fn = "manpages.tar.gz" elif builder == 'link-slides': fn = hyph_concat(conf.project.name, 'slides') + '.tar.gz' elif builder.startswith('man'): fn = hyph_concat('manpages', conf.git.branches.current) + '.tar.gz' elif builder.startswith('html'): fn = hyph_concat(conf.project.name, conf.git.branches.current) + '.tar.gz' else: fn = hyph_concat(conf.project.name, conf.git.branches.current, builder) + '.tar.gz' return os.path.join(conf.paths.projectroot, conf.paths.public_site_output, fn)
def finalize_single_html_tasks(builder, conf, app): single_html_dir = get_single_html_dir(conf) safe_create_directory(single_html_dir) found_src = False for base_path in (builder, hyph_concat(builder, conf.project.edition)): if found_src is True: break for fn in [ os.path.join(base_path, f) for f in ('contents.html', 'index.html') ]: src_fn = os.path.join(conf.paths.projectroot, conf.paths.branch_output, fn) if os.path.exists(src_fn): manual_single_html(input_file=src_fn, output_file=os.path.join( single_html_dir, 'index.html')) copy_if_needed( source_file=os.path.join(conf.paths.projectroot, conf.paths.branch_output, base_path, 'objects.inv'), target_file=os.path.join(single_html_dir, 'objects.inv')) found_src = True break if found_src is not True: raise FileNotFoundError('singlehtml source file') single_path = os.path.join(single_html_dir, '_static') for fn in expand_tree(os.path.join(os.path.dirname(src_fn), '_static'), None): target_fn = os.path.join(single_path, os.path.basename(fn)) task = app.add('task') task.job = copy_if_needed task.target = target_fn task.dependency = fn task.args = [fn, target_fn] task.description = "migrating static files to the HTML build"
def update(conf): logger.info('updating translation artifacts. Long running.') project_name = conf.project.title.lower().split() if conf.project.edition is not None and conf.project.edition != conf.project.name: project_name.append(conf.project.edition) project_name = hyph_concat(*project_name) logger.info('starting translation upload with sphinx-intl') flogger = FileLogger(logger) update_txconfig_resources(transifex_project_name=project_name, locale_dir=conf.paths.locale, pot_dir=os.path.join(conf.paths.locale, 'pot'), out=flogger) logger.info('sphinx-intl: updated pot directory')
def start(args): """Start an HTTP server rooted in the build directory.""" conf = fetch_config(args) if conf.runstate.is_publish_target(): RequestHandler.root = conf.paths.public_site_output elif conf.runstate.edition is not None: RequestHandler.root = os.path.join(conf.paths.projectroot, conf.paths.branch_output, hyph_concat(args.builder[0], args.edition)) else: RequestHandler.root = os.path.join(conf.paths.projectroot, conf.paths.branch_output, args.builder[0]) httpd = socket_server.TCPServer(('', conf.runstate.port), RequestHandler) logger.info('Hosting {0} at http://localhost:{1}/'.format(RequestHandler.root, conf.runstate.port)) httpd.serve_forever()
def image_clean(conf, app): if 'images' not in conf.system.files.data: logger.info('no images to clean') return for image in conf.system.files.data.images: source_base = os.path.join(conf.paths.projectroot, conf.paths.images, image['name']) rm_rst = app.add('task') rm_rst.job = verbose_remove rm_rst.args = dot_concat(source_base, 'rst') for output in image['output']: rm_tag_image = app.add('task') rm_tag_image.job = verbose_remove if 'tag' in output: rm_tag_image.args = dot_concat(hyph_concat(source_base, output['tag']), 'png') else: rm_tag_image.args = dot_concat(source_base, 'png')
def start(args): """Start an HTTP server rooted in the build directory.""" conf = fetch_config(args) if conf.runstate.is_publish_target(): RequestHandler.root = conf.paths.public_site_output elif conf.runstate.edition is not None: RequestHandler.root = os.path.join( conf.paths.projectroot, conf.paths.branch_output, hyph_concat(args.builder[0], args.edition)) else: RequestHandler.root = os.path.join(conf.paths.projectroot, conf.paths.branch_output, args.builder[0]) httpd = socket_server.TCPServer(('', conf.runstate.port), RequestHandler) logger.info('Hosting {0} at http://localhost:{1}/'.format( RequestHandler.root, conf.runstate.port)) httpd.serve_forever()
def register(self, builder, language, edition): self.language = language self.builder = builder self.edition = edition if edition is None: lookup = self.builder else: lookup = hyph_concat(self.builder, self.edition) self.name = lookup base = self._raw[lookup] for i in ['excluded', 'tags', 'languages']: if i in base: setattr(self, i, base[i]) m = 'registered language, builder, and edition options: ({0}, {1}, {2})' logger.debug(m.format(language, builder, edition))
def transfer_images(conf, sconf): image_dir = os.path.join(conf.paths.projectroot, conf.paths.branch_images) if not os.path.isdir(image_dir): return False elif sconf.builder == 'latex': if 'edition' in sconf and sconf.edition is not None: builder_dir = hyph_concat(sconf.builder, sconf.edition) else: builder_dir = sconf.builder builder_dir = os.path.join(conf.paths.projectroot, conf.paths.branch_output, builder_dir) safe_create_directory(builder_dir) cmd = 'rsync -am --include="*.png" --include="*.jpg" --include="*.eps" --exclude="*" {0}/ {1} '.format(image_dir, builder_dir) command(cmd) command(cmd.replace('images', 'figures'), ignore=True) logger.info('migrated images for latex build')
def finalize_single_html_tasks(builder, conf, app): single_html_dir = get_single_html_dir(conf) if not os.path.exists(single_html_dir): os.makedirs(single_html_dir) found_src = False for base_path in (builder, hyph_concat(builder, conf.project.edition)): if found_src is True: break for fn in [ os.path.join(base_path, f) for f in ('contents.html', 'index.html') ]: src_fn = os.path.join(conf.paths.projectroot, conf.paths.branch_output, fn) if os.path.exists(src_fn): manual_single_html(input_file=src_fn, output_file=os.path.join(single_html_dir, 'index.html')) copy_if_needed(source_file=os.path.join(conf.paths.projectroot, conf.paths.branch_output, base_path, 'objects.inv'), target_file=os.path.join(single_html_dir, 'objects.inv')) found_src = True break if found_src is not True: raise FileNotFoundError('singlehtml source file') single_path = os.path.join(single_html_dir, '_static') for fn in expand_tree(os.path.join(os.path.dirname(src_fn), '_static'), None): target_fn = os.path.join(single_path, os.path.basename(fn)) task = app.add('task') task.job = copy_if_needed task.target = target_fn task.dependency = fn task.args = [fn, target_fn] task.description = "migrating static files to the HTML build"
def add(self, doc): if 'collection' in doc: self.collection = ExampleData(doc, self.conf) if not self.collection.is_resolved(): self.collection.resolve(self.data) return self.collection else: op = ExampleCase(doc, self.conf) if 'edition' in op: op.ref = hyph_concat(str(op.ref), op.edition) if op.ref in self.content: m = 'example named {0} already exists'.format(op.ref) logger.error(m) raise ExampleError(m) else: self.content[op.ref] = op if not op.is_resolved(): op.resolve(self.data) logger.debug('added operation {0}'.format(op.name)) return op
def register(self, builder, language, edition): self.language = language self.builder = builder self.edition = edition lookup_opts = [] if edition is None: lookup_opts.append(self.builder) else: lookup_opts.append(hyph_concat(self.builder, self.edition)) base = {} for opt in lookup_opts: if opt in self._raw: base = self._raw[opt] break for i in ['excluded', 'tags', 'languages']: if i in base: setattr(self, i, base[i]) m = 'registered language, builder, and edition options: ({0}, {1}, {2})' logger.debug(m.format(language, builder, edition))
def run_make_operations(targets, conf): """ :param list targets: A list of tuples in the form of ``(<action>, [option, option])`` that define build targets. :param Configuration conf: The top level configuration object. Parses the ``targets`` list and runs tasks defined, including all specified sphinx targets, all ``push`` deployment targets, and will create the ``env`` packages. Noteworthy behavior: - The order of options *except* for the action in the first option is not important. - If you run ``push`` target with the ``deploy`` option (i.e. ``push-deploy`` or ``push-<edition>-deploy``), ``giza`` will *not* run the ``publish`` Sphinx build. - This interface assumes that all deployment targets (defined in each project begin with ``push-`` or ``stage-``.) If you have a project with different deployment targets, you will need to call ``giza deploy`` directly. - The ``env`` cache targets take the same options as the Sphinx builders and package the environment for only those builders. If you specify ``env`` after a Sphinx target, ``giza`` will build the cache for only that package. """ sphinx_opts = { "worker": sphinx_publication, "languages": set(), "editions": set(), "builders": set() } push_opts = {"worker": deploy_tasks, "targets": set(), "type": None} packaging_opts = {} sphinx_builders = avalible_sphinx_builders() deploy_configs = dict_from_list('target', conf.system.files.data.push) tasks = [] for action, options in targets: if action in sphinx_builders: tasks.append(sphinx_opts) add_sphinx_build_options(sphinx_opts, action, options, conf) elif action in ('stage', 'push'): tasks.append(push_opts) push_opts['type'] = action if 'deploy' not in options: sphinx_opts['builders'].add('publish') tasks.append(sphinx_opts) add_sphinx_build_options(sphinx_opts, action, options, conf) conf.runstate.fast = False if action in deploy_configs: push_opts['targets'].add(action) for build_option in options: deploy_target_name = hyph_concat(action, build_option) if build_option in deploy_configs: push_opts['targets'].add(build_option) elif deploy_target_name in deploy_configs: push_opts['targets'].add(deploy_target_name) elif action.startswith('env'): if len(packaging_opts) > 0: packaging_opts = copy.copy(sphinx_opts) packaging_opts['worker'] = env_package_worker tasks.append(packaging_opts) add_sphinx_build_options(packaging_opts, False, options, conf) else: logger.error( 'target: {0} not defined in the make interface'.format(action)) with BuildApp.context(conf) as app: if sphinx_opts in tasks: conf.runstate.languages_to_build = list(sphinx_opts['languages']) conf.runstate.editions_to_build = list(sphinx_opts['editions']) conf.runstate.builder = list(sphinx_opts['builders']) if 'publish' in conf.runstate.builder: conf.runstate.fast = False derive_command('sphinx', conf) sphinx_opts['worker'](conf, conf.runstate, app) if push_opts in tasks: if len(push_opts['targets']) == 0: for lang, edition in itertools.product( conf.runstate.languages_to_build, conf.runstate.editions_to_build): push_target_name = [push_opts['type']] for opt in (edition, lang): if opt is not None: push_target_name.append(opt) push_target_name = '-'.join(push_target_name) push_opts['targets'].add(push_target_name) conf.runstate.push_targets = list(push_opts['targets']) push_opts['worker'](conf, app) derive_command('deploy', conf) if packaging_opts in tasks: derive_command('env', conf) task = app.add('task') task.job = env_package_worker task.args = (conf.runstate, conf) task.target = False task.dependency = False
def _get_list_table_output_name(fn): base, leaf = os.path.split(os.path.splitext(fn)[0]) return dot_concat( hyph_concat(os.path.join(base, 'table', leaf[6:]), 'list'), 'rst')
def pdf_tasks(sconf, conf, app): target = sconf.builder if 'pdfs' not in conf.system.files.data: return tex_regexes = [ (re.compile(r'(index|bfcode)\{(.*)--(.*)\}'), r'\1\{\2-\{-\}\3\}'), (re.compile(r'\\PYGZsq{}'), "'"), (re.compile( r'\\code\{/(?!.*{}/|etc|usr|data|var|srv|data|bin|dev|opt|proc|24|private)' ), r'\code{' + conf.project.url + r'/' + conf.project.tag + r'/') ] process_app = app.add('app') render_app = app.add('app') migrate_app = app.add('app') link_app = app.add('app') for i in conf.system.files.data.pdfs: if edition_check(i, conf) is False: continue i = i.dict() tagged_name = i['output'][:-4] + '-' + i['tag'] deploy_fn = tagged_name + '-' + conf.git.branches.current + '.pdf' link_name = deploy_fn.replace('-' + conf.git.branches.current, '') if 'edition' in conf.project and conf.project.edition != conf.project.name: if 'edition' in i and conf.project.edition != i['edition']: continue latex_dir = os.path.join(conf.paths.projectroot, conf.paths.branch_output, hyph_concat(target, conf.project.edition)) else: latex_dir = os.path.join(conf.paths.projectroot, conf.paths.branch_output, target) deploy_path = os.path.join(conf.paths.projectroot, conf.paths.public_site_output) i['source'] = os.path.join(latex_dir, i['output']) i['processed'] = os.path.join(latex_dir, tagged_name + '.tex') i['pdf'] = os.path.join(latex_dir, tagged_name + '.pdf') i['deployed'] = os.path.join(deploy_path, deploy_fn) i['link'] = os.path.join(deploy_path, link_name) i['path'] = latex_dir process_page(i['source'], i['processed'], tex_regexes, process_app, builder='tex-munge', copy='ifNeeded') render_task = render_app.add('task') render_task.dependency = i['processed'] render_task.target = i['pdf'] render_task.job = _render_tex_into_pdf render_task.args = (i['processed'], i['deployed'], i['path']) if i['link'] != i['deployed']: link_task = link_app.add('task') link_task.dependency = i['deployed'] link_task.target = i['link'] link_task.job = create_link link_task.args = (deploy_fn, i['link'])
def pdf_tasks(sconf, conf, app): """Adds tasks to a BuildApp() to generate all PDFs.""" target = sconf.builder if 'pdfs' not in conf.system.files.data: return # a list of tuples in (compileRegex, substitution) format. tex_regexes = [ (re.compile(r'(index|bfcode)\{(.*)--(.*)\}'), r'\1\{\2-\{-\}\3\}'), (re.compile(r'\\PYGZsq{}'), "'"), (re.compile( r'\\code\{/(?!.*{}/|etc|usr|data|var|srv|data|bin|dev|opt|proc|24|private)' ), r'\code{' + conf.project.url + r'/' + conf.project.tag + r'/') ] # the ordering of tasks. First post-process the tex generated by sphinx: process_app = app.add('app') # then convert tex to pdf render_app = app.add('app') # then migrate to build/public/ migrate_app = app.add('app') # then create symlinks for alternate named files. link_app = app.add('app') # the path that sphinx writes tex files to are are different for editions. if 'edition' in conf.project and conf.project.edition != conf.project.name: latex_dir = os.path.join(conf.paths.projectroot, conf.paths.branch_output, hyph_concat(target, conf.project.edition)) else: latex_dir = os.path.join(conf.paths.projectroot, conf.paths.branch_output, target) deploy_path = os.path.join(conf.paths.projectroot, conf.paths.public_site_output) # special case operations on "offset pdfs", which use EPS images. if 'tags' in sconf and "offset" in sconf.tags: output_format = "dvi" sty_file = os.path.join(latex_dir, 'sphinx.sty') process_page(fn=sty_file, output_fn=sty_file, regex=(re.compile(r'\\usepackage\[pdftex\]\{graphicx\}'), r'\usepackage{graphicx}'), app=process_app, builder='sphinx-latex', copy='ifNeeded') else: output_format = "pdf" for i in conf.system.files.data.pdfs: if edition_check(i, conf) is False: continue #compatibility shim for new/old images i = i.dict() tagged_name = i['output'][:-4] + '-' + i['tag'] deploy_fn = tagged_name + '-' + conf.git.branches.current + '.pdf' link_name = deploy_fn.replace('-' + conf.git.branches.current, '') i['source'] = os.path.join(latex_dir, i['output']) i['processed'] = os.path.join(latex_dir, tagged_name + '.tex') i['pdf'] = os.path.join(latex_dir, tagged_name + '.pdf') i['deployed'] = os.path.join(deploy_path, deploy_fn) i['link'] = os.path.join(deploy_path, link_name) i['path'] = latex_dir # add the processing task process_page(i['source'], i['processed'], tex_regexes, process_app, builder='tex-munge', copy='ifNeeded') # add task for changing TEX to PDF. render_task = render_app.add('task') render_task.dependency = None #i['processed'] render_task.target = i['pdf'] render_task.job = _render_tex_into_pdf render_task.args = (i['processed'], i['deployed'], i['path'], output_format) # if needed create links. if i['link'] != i['deployed']: link_task = link_app.add('task') link_task.dependency = i['deployed'] link_task.target = i['link'] link_task.job = create_link link_task.args = (deploy_fn, i['link'])
def build_makefile(m, conf): m.section_break('giza build integration') m.newline() m.section_break('content generation targets') for gen_target in [ 'api', 'assets', 'images', 'intersphinx', 'options', 'primer', 'steps', 'tables', 'toc']: m.target([gen_target, hyph_concat('giza', gen_target)]) m.job('giza generate ' + gen_target) m.target([hyph_concat('force', gen_target), hyph_concat('giza', 'force', gen_target)]) m.job('giza --force generate ' + gen_target) m.newline() m.section_break('sphinx targets') sconf = ingest_yaml_doc(os.path.join(conf.paths.projectroot, conf.paths.builddata, 'sphinx.yaml')) builders = [b for b in sconf if not b.endswith('base') and b not in ('prerequisites', 'generated-source', 'languages', 'editions', 'sphinx_builders')] if 'editions' in sconf: editions = sconf['editions'] else: editions = [] if 'root-base' in sconf and 'languages' in sconf['root-base']: languages = sconf['root-base']['languages'] else: languages = [] complete = [] for builder in builders: if '-' in builder: builder = builder.split('-')[0] if builder in complete: continue m.comment(builder + ' targets') for edition in editions: m.target([hyph_concat(builder, edition), hyph_concat('giza', builder, edition)]) m.job('giza sphinx --builder {0} --edition {1}'.format(builder, edition)) for language in languages: m.target([hyph_concat(builder, edition, language), hyph_concat('giza', builder, edition, language)]) m.job('giza sphinx --builder {0} --edition {1} --language {2}'.format(builder, edition, language)) if len(editions) == 0: m.target([hyph_concat(builder), hyph_concat('giza', builder)]) m.job('giza sphinx --builder ' + builder) for language in languages: m.target([hyph_concat(builder, language), hyph_concat('giza', builder, language)]) m.job('giza sphinx --builder {0} --language {1}'.format(builder, language)) else: m.target([hyph_concat(builder), hyph_concat('giza', builder)]) m.job('giza sphinx --builder {0} --edition {1}'.format(builder, ' '.join(editions))) m.newline() complete.append(builder) m.section_break('deploy targets') if 'push' in conf.system.files.data: for ptarget in conf.system.files.data.push: name = ptarget['target'] m.target(hyph_concat('deploy', name)) m.job('giza deploy --target ' + name) m.newline() m.section_break('integration and publish targets') m.target(['giza-publish', 'publish']) base_job = 'giza sphinx --builder publish' if len(editions) > 0: base_job += " --serial_sphinx --edition " + ' '.join(editions) m.job(base_job) m.newline() for lang in languages: m.target([hyph_concat('publish', lang), hyph_concat('giza', 'publish', lang)]) m.job(base_job + ' --language ' + lang) m.newline() # following targets build a group of sphinx targets followed by running # one or more deploy actions. m.section_break('push targets') if 'push' in conf.system.files.data: for ptarget in conf.system.files.data.push: push_base_job = 'giza push --deploy {0} --builder publish'.format(ptarget['target']) if len(editions) > 0: push_base_job += " --serial_sphinx --edition " + ' '.join(editions) m.target([ptarget['target'], hyph_concat('giza', ptarget['target'])]) m.job(push_base_job) m.newline() for lang in languages: m.target([ hyph_concat(ptarget['target'], lang), hyph_concat('giza', ptarget['target'], lang) ]) m.job(push_base_job + ' --language ' + lang) m.newline() return m
def pdf_tasks(sconf, conf, app): target = sconf.builder if 'pdfs' not in conf.system.files.data: return app.pool = 'thread' tex_regexes = [ ( re.compile(r'(index|bfcode)\{(.*)--(.*)\}'), r'\1\{\2-\{-\}\3\}'), ( re.compile(r'\\PYGZsq{}'), "'"), ( re.compile(r'\\code\{/(?!.*{}/|etc|usr|data|var|srv)'), r'\code{' + conf.project.url + r'/' + conf.project.tag) ] clean_app = app.add('app') cache_app = app.add('app') render_app = app.add('app') migrate_app = app.add('app') link_app = app.add('app') for i in conf.system.files.data.pdfs: if edition_check(i, conf) is False: continue i = i.dict() tagged_name = i['output'][:-4] + '-' + i['tag'] deploy_fn = tagged_name + '-' + conf.git.branches.current + '.pdf' link_name = deploy_fn.replace('-' + conf.git.branches.current, '') if 'edition' in conf.project and conf.project.edition != conf.project.name: if 'edition' in i and conf.project.edition != i['edition']: continue latex_dir = os.path.join(conf.paths.branch_output, hyph_concat(target, conf.project.edition)) else: latex_dir = os.path.join(conf.paths.branch_output, target) deploy_path = conf.paths.public_site_output i['source'] = os.path.join(latex_dir, i['output']) i['processed'] = os.path.join(latex_dir, tagged_name + '.tex') i['pdf'] = os.path.join(latex_dir, tagged_name + '.pdf') i['deployed'] = os.path.join(deploy_path, deploy_fn) i['link'] = os.path.join(deploy_path, link_name) i['path'] = latex_dir clean_task = clean_app.add('task') clean_task.target = i['source'] clean_task.job = _clean_sphinx_latex clean_task.args = (i['source'], tex_regexes) cache_task = cache_app.add('task') cache_task.dependency = i['source'] cache_task.target = i['processed'] cache_task.job = copy_if_needed cache_task.args = (i['source'], i['processed'], 'pdf') render_task = render_app.add('task') render_task.dependency = i['processed'] render_task.target = i['pdf'] render_task.job = _render_tex_into_pdf render_task.args = (i['processed'], i['path']) migrate_task = migrate_app.add('task') migrate_task.dependency = i['pdf'] migrate_task.target = i['deployed'] migrate_task.job = copy_if_needed migrate_task.args = (i['pdf'], i['deployed'], 'pdf') if i['link'] != i['deployed']: link_task = link_app.add('task') link_task.dependency = i['deployed'] link_task.target = i['link'] link_task.job = create_link link_task.args = (deploy_fn, i['link'])
def locale(self): if self.conf.project.edition is not None and self.conf.project.edition != self.conf.project.name: return hyph_concat(self.state['locale'], self.conf.project.edition) else: return self.state['locale']
def _get_list_table_output_name(fn): base, leaf = os.path.split(os.path.splitext(fn)[0]) return dot_concat(hyph_concat(os.path.join(base, 'table', leaf[6:]), 'list'), 'rst')