def main(args): """ Removes build artifacts from ``build/`` directory. """ c = fetch_config(args) app = BuildApp(c) to_remove = set() if c.runstate.git_branch is not None: to_remove.add(os.path.join(c.paths.projectroot, c.paths.branch_output)) if c.runstate.builder != []: for edition, language, builder in get_builder_jobs(c): builder_path = resolve_builder_path(builder, edition, language, c) builder_path = os.path.join(conf.paths.projectroot, conf.paths.branch_output, builder_path) to_remove.add(builder_path) dirpath, base = os.path.split(builder_path) to_remove.add(os.path.join(dirpath, 'doctrees-' + base)) m = 'remove artifacts associated with the {0} builder in {1} ({2}, {3})' logger.debug( m.format(builder, c.git.branches.current, edition, language)) if c.runstate.days_to_save is not None: published_branches = [ 'docs-tools', 'archive', 'public', 'primer', c.git.branches.current ] published_branches.extend(c.git.branches.published) for build in os.listdir( os.path.join(c.paths.projectroot, c.paths.output)): build = os.path.join(c.paths.projectroot, c.paths.output, build) branch = os.path.split(build)[1] if branch in published_branches: continue elif not os.path.isdir(build): continue elif os.stat(build).st_mtime > c.runstate.days_to_save: to_remove.add(build) to_remove.add( os.path.join(c.paths.projectroot, c.paths.output, 'public', branch)) logger.debug( 'removed stale artifacts: "{0}" and "build/public/{0}"'. format(branch)) for fn in to_remove: t = app.add() t.job = rm_rf t.args = fn m = 'removing artifact: {0}'.format(fn) t.description = m logger.critical(m) app.run()
def main(args): """ Removes build artifacts from ``build/`` directory. """ c = fetch_config(args) app = BuildApp.new(pool_type=c.runstate.runner, pool_size=c.runstate.pool_size, force=c.runstate.force) to_remove = set() if c.runstate.git_branch is not None: to_remove.add(os.path.join(c.paths.projectroot, c.paths.branch_output)) if c.runstate.builder != []: for edition, language, builder in get_builder_jobs(c): builder_path = resolve_builder_path(builder, edition, language, c) builder_path = os.path.join(c.paths.projectroot, c.paths.branch_output, builder_path) to_remove.add(builder_path) dirpath, base = os.path.split(builder_path) to_remove.add(os.path.join(dirpath, 'doctrees-' + base)) m = 'remove artifacts associated with the {0} builder in {1} ({2}, {3})' logger.debug(m.format(builder, c.git.branches.current, edition, language)) if c.runstate.days_to_save is not None: published_branches = ['docs-tools', 'archive', 'public', 'primer', c.git.branches.current] published_branches.extend(c.git.branches.published) for build in os.listdir(os.path.join(c.paths.projectroot, c.paths.output)): build = os.path.join(c.paths.projectroot, c.paths.output, build) branch = os.path.split(build)[1] if branch in published_branches: continue elif not os.path.isdir(build): continue elif os.stat(build).st_mtime > c.runstate.days_to_save: to_remove.add(build) to_remove.add(os.path.join(c.paths.projectroot, c.paths.output, 'public', branch)) logger.debug('removed stale artifacts: "{0}" and "build/public/{0}"'.format(branch)) for fn in to_remove: if os.path.isdir(fn): job = shutil.rmtree else: job = os.remove t = app.add('task') t.job = job t.args = fn m = 'removing artifact: {0}'.format(fn) t.description = m logger.critical(m) app.run()
def sphinx_builder_tasks(app, conf): for ((edition, language, builder), (build_config, sconf)) in get_builder_jobs(conf): sphinx_job = sphinx_tasks(sconf, build_config) sphinx_job.finalizers = finalize_sphinx_build(sconf, build_config) app.extend_queue(sphinx_job) logger.info("adding builder job for {0} ({1}, {2})".format( builder, language, edition)) logger.debug("sphinx build configured, running the build now.") app.run() logger.debug("sphinx build complete.") logger.info('builds finalized. sphinx output and errors to follow') # process the sphinx build. These oeprations allow us to de-duplicate # messages between builds. results = [o for o in app.results if isinstance(o, tuple) and len(o) == 2] if len(results) == 0: # this happens (rarely) if the deps on the sphinx task do *not* trigger # sphinx-build to run. output = [] ret_code = 0 else: # add all builders response codes. If they're all then we can return 0, # otherwise, exit. ret_code = sum([o[0] for o in results]) # Depending on Python version, output may be a str or bytes instance. # Assume output is in UTF-8. outputs = [o[1] for o in results] if outputs and not isinstance(outputs[0], str): outputs = [o.decode('utf-8') for o in outputs] output = [o.split('\n') for o in outputs if o != ''] sphinx_output = list(reduce(itertools.chain, output)) try: output_sphinx_stream(sphinx_output, conf) except: logger.error('problem parsing sphinx output, exiting') raise SystemExit(1) if ret_code != 0: raise SystemExit(ret_code) return ret_code
def package_build_env(builders, editions, languages, conf): arc_fn = '-'.join([ 'cache', conf.project.name, conf.git.branches.current, datetime.datetime.utcnow().strftime('%s'), conf.git.commit[:8] ]) + ".tar.gz" archive_path = os.path.join(conf.paths.buildarchive, arc_fn) safe_create_directory(conf.paths.buildarchive) existing_archives = os.listdir(conf.paths.buildarchive) for arc in existing_archives: if conf.git.commit[:8] in arc: m = 'archive "{0}" exists for current git hash, not recreating' logger.warning(m.format(archive_path)) return logger.debug("no archive for commit '{0}' continuing".format( conf.git.commit)) with cd(conf.paths.projectroot): files_to_archive = set() for ((edition, language, builder), (rconf, sconf)) in get_builder_jobs(conf): files_to_archive.add(rconf.paths.branch_source) files_to_archive.add( os.path.join(rconf.paths.branch_output, sconf.build_output)) files_to_archive.add( os.path.join(rconf.paths.branch_output, '-'.join( ('doctrees', sconf.build_output)))) files_to_archive.add(rconf.system.dependency_cache_fn) files_to_archive = list(files_to_archive) logger.info('prepped build cache archive. writing file now.') for fn in files_to_archive: if not os.path.exists(fn): raise FileNotFoundError(fn) try: with tarfile.open(archive_path, 'w:gz') as t: for fn in files_to_archive: t.add(fn, exclude=is_git_dir) logger.info("created build-cache archive: " + archive_path) except Exception as e: os.remove(archive_path) logger.critical("failed to create archive: " + archive_path) logger.error(e)
def package_build_env(builders, editions, languages, conf): arc_fn = '-'.join(['cache', conf.project.name, conf.git.branches.current, datetime.datetime.utcnow().strftime('%s'), conf.git.commit[:8]]) + ".tar.gz" archive_path = os.path.join(conf.paths.buildarchive, arc_fn) safe_create_directory(conf.paths.buildarchive) existing_archives = os.listdir(conf.paths.buildarchive) for arc in existing_archives: if conf.git.commit[:8] in arc: m = 'archive "{0}" exists for current git hash, not recreating' logger.warning(m.format(archive_path)) return logger.debug("no archive for commit '{0}' continuing".format(conf.git.commit)) with cd(conf.paths.projectroot): files_to_archive = set() for ((edition, language, builder), (rconf, sconf)) in get_builder_jobs(conf): files_to_archive.add(rconf.paths.branch_source) files_to_archive.add(os.path.join(rconf.paths.branch_output, sconf.build_output)) files_to_archive.add(os.path.join(rconf.paths.branch_output, '-'.join(('doctrees', sconf.build_output)))) files_to_archive.add(rconf.system.dependency_cache_fn) files_to_archive = list(files_to_archive) logger.info('prepped build cache archive. writing file now.') for fn in files_to_archive: if not os.path.exists(fn): raise FileNotFoundError(fn) try: with tarfile.open(archive_path, 'w:gz') as t: for fn in files_to_archive: t.add(fn, exclude=is_git_dir) logger.info("created build-cache archive: " + archive_path) except Exception as e: os.remove(archive_path) logger.critical("failed to create archive: " + archive_path) logger.error(e)
def sphinx_builder_tasks(app, conf): for ((edition, language, builder), (build_config, sconf)) in get_builder_jobs(conf): sphinx_job = sphinx_tasks(sconf, build_config) sphinx_job.finalizers = finalize_sphinx_build(sconf, build_config) app.extend_queue(sphinx_job) logger.info("adding builder job for {0} ({1}, {2})".format(builder, language, edition)) logger.info("sphinx build configured, running the build now.") app.run() logger.info("sphinx build complete.") logger.info('builds finalized. sphinx output and errors to follow') # process the sphinx build. These oeprations allow us to de-duplicate # messages between builds. results = [o for o in app.results if isinstance(o, tuple) and len(o) == 2] if len(results) == 0: # this happens (rarely) if the deps on the sphinx task do *not* trigger # sphinx-build to run. output = [] ret_code = 0 else: # add all builders response codes. If they're all then we can return 0, # otherwise, exit. ret_code = sum([o[0] for o in results]) output = [o[1].split('\n') for o in results if o != ''] sphinx_output = list(reduce(itertools.chain, output)) try: output_sphinx_stream(sphinx_output, conf) except: logger.error('problem parsing sphinx output, exiting') raise SystemExit(1) if ret_code != 0: raise SystemExit(ret_code) return ret_code
def sphinx_publication(c, args, app): """ :arg Configuration c: A :class:`giza.config.main.Configuration()` object. :arg RuntimeStateConfig args: A :class:`giza.config.runtime.RuntimeState()` object. :arg BuildApp app: A :class:`giza.core.app.BuildApp()` object. Adds all required tasks to build a Sphinx site. Specifically: 1. Iterates through the (language * builder * edition) combination and adds tasks to generate the content in the <build>/<branch>/source<-edition<-language>> directory. There is one version of the <build>/<branch>/source directory for every language/edition combination, but multiple builders can use the same diretory as needed. 2. Add a task to run the ``sphinx-build`` task. 3. Run all tasks in proper order. 4. Process and print the output of ``sphinx-build``. :return: The sum of all return codes from all ``sphinx-build`` tasks. All non-zero statuses represent errors. :rtype: int """ # sphinx-build tasks are separated into their own app. sphinx_app = BuildApp(c) sphinx_app.pool = app.pool # this loop will produce an app for each language/edition/builder combination build_source_copies = set() for edition, language, builder in get_builder_jobs(c): build_config, sconf = get_sphinx_build_configuration( edition, language, builder, args) # only do these tasks once per-language+edition combination if build_config.paths.branch_source not in build_source_copies: build_source_copies.add(build_config.paths.branch_source) prep_app = app.add('app') prep_app.conf = build_config # this is where we add tasks to transfer the source into the # ``build/<branch>/source`` directory. source_tasks(build_config, sconf, prep_app) # this function runs the entire prep_app compiled until now, so that # the content generation tasks are created properly # these operation groups each execute in isolation of each-other and should. build_content_generation_tasks(build_config, prep_app.add('app')) refresh_dependency_tasks(build_config, prep_app.add('app')) # once the source is prepared, we dump a dict with md5 hashes of all # files, so we can do better dependency resolution the next time. dump_file_hash_tasks(build_config, prep_app) # we transfer images to the latex directory directly because offset # images are included using raw latex, and Sphinx doesn't know how # to copy images in this case. latex_image_transfer_tasks(build_config, sconf, prep_app) msg = 'added source tasks for ({0}, {1}, {2}) in {3}' logger.info( msg.format(builder, language, edition, build_config.paths.branch_source)) # Add sphinx tasks for this builder/language/edition combination sphinx_tasks(sconf, build_config, sphinx_app) logger.info("adding builder job for {0} ({1}, {2})".format( builder, language, edition)) # Connect the special sphinx app to the main app. app.add(sphinx_app) logger.info("sphinx build configured, running the build now.") app.run() logger.info("sphinx build complete.") logger.info('builds finalized. sphinx output and errors to follow') # process the sphinx build. These oeprations allow us to de-duplicate # messages between builds. sphinx_output = '\n'.join([o[1] for o in sphinx_app.results]) output_sphinx_stream(sphinx_output, c) # if entry points return this value, giza will inherit the sum of the Sphinx # build return codes. ret_code = sum([o[0] for o in sphinx_app.results]) return ret_code