def test_add_existing_app_object(self): self.assertEqual(self.app.queue, []) app = BuildApp(self.c) self.app.add(app) self.assertIs(app, self.app.queue[0]) self.assertIsNot(app, BuildApp(self.c)) self.assertIsNot(BuildApp(self.c), self.app.queue[0])
def main(args): """ Removes build artifacts from ``build/`` directory. """ c = fetch_config(args) app = BuildApp(c) to_remove = set() if c.runstate.git_branch is not None: to_remove.add(os.path.join(c.paths.projectroot, c.paths.branch_output)) if c.runstate.builder != []: for edition, language, builder in get_builder_jobs(c): builder_path = resolve_builder_path(builder, edition, language, c) builder_path = os.path.join(conf.paths.projectroot, conf.paths.branch_output, builder_path) to_remove.add(builder_path) dirpath, base = os.path.split(builder_path) to_remove.add(os.path.join(dirpath, 'doctrees-' + base)) m = 'remove artifacts associated with the {0} builder in {1} ({2}, {3})' logger.debug( m.format(builder, c.git.branches.current, edition, language)) if c.runstate.days_to_save is not None: published_branches = [ 'docs-tools', 'archive', 'public', 'primer', c.git.branches.current ] published_branches.extend(c.git.branches.published) for build in os.listdir( os.path.join(c.paths.projectroot, c.paths.output)): build = os.path.join(c.paths.projectroot, c.paths.output, build) branch = os.path.split(build)[1] if branch in published_branches: continue elif not os.path.isdir(build): continue elif os.stat(build).st_mtime > c.runstate.days_to_save: to_remove.add(build) to_remove.add( os.path.join(c.paths.projectroot, c.paths.output, 'public', branch)) logger.debug( 'removed stale artifacts: "{0}" and "build/public/{0}"'. format(branch)) for fn in to_remove: t = app.add() t.job = rm_rf t.args = fn m = 'removing artifact: {0}'.format(fn) t.description = m logger.critical(m) app.run()
def push_translations(args): conf = fetch_config(args) app = BuildApp(conf) push_tasks(conf, app) update(conf) app.run()
def images(args): c = fetch_config(args) app = BuildApp(c) with BuildApp.context(c) as app: if c.runstate.clean_generated is True: image_clean(c, app) else: image_tasks(c, app)
def build_translation_model(args): conf = fetch_config(args) if args.t_translate_config is None: tconf = conf.system.files.data.translate elif os.path.isfile(args.t_translate_config): tconf = TranslateConfig(ingest_yaml_doc(args.t_translate_config), conf) else: logger.error(args.t_translate_config + " doesn't exist") return if os.path.exists(tconf.paths.project) is False: os.makedirs(tconf.paths.project) elif os.path.isfile(tconf.paths.project): logger.error(tconf.paths.project + " is a file") sys.exit(1) elif os.listdir(tconf.paths.project) != []: logger.error(tconf.paths.project + " must be empty") sys.exit(1) with open(os.path.join(tconf.paths.project, "translate.yaml"), 'w') as f: yaml.dump(tconf.dict(), f, default_flow_style=False) tconf.conf.runstate.pool_size = tconf.settings.pool_size run_args = get_run_args(tconf) app = BuildApp(conf) os.environ['IRSTLM'] = tconf.paths.irstlm setup_train(tconf) setup_tune(tconf) setup_test(tconf) for idx, parameter_set in enumerate(run_args): parameter_set = list(parameter_set) parameter_set.append(idx) parameter_set.append(tconf) t = app.add() t.job = build_model t.args = parameter_set t.description = "model_" + str(parameter_set[9]) app.run() aggregate_model_data(tconf.paths.project) from_addr = "*****@*****.**" to_addr = [tconf.settings.email] with open(tconf.paths.project + "/data.csv") as data: msg = MIMEText(data.read()) msg['Subject'] = "Model Complete" msg['From'] = from_addr msg['To'] = ", ".join(to_addr) server = smtplib.SMTP("localhost") server.sendmail(from_addr, to_addr, msg.as_string()) server.quit()
def publish_and_deploy(args): c = fetch_config(args) app = BuildApp(c) sphinx_ret = sphinx_publication(c, args, app) if sphinx_ret == 0 or c.runstate.force is True: deploy_worker(c, app) else: logger.warning(sphinx_ret + ' sphinx build(s) failed, and build not forced. not deploying.')
def main(args): """ Use Sphinx to generate build artifacts. Can generate artifacts for multiple output types, content editions and translations. """ c = fetch_config(args) app = BuildApp(c) with Timer("full sphinx build process"): return sphinx_publication(c, args, app)
def triage(args): conf = fetch_config(args) app = BuildApp(conf) app.pool = 'thread' j = JeerahClient(conf) j.connect() query_data = giza.jeerah.triage.query(j, app, conf) pprint(giza.jeerah.triage.report(query_data, conf))
def test_single_runner_app(self): self.assertEqual(self.app.queue, []) self.assertEqual(self.app.results, []) app = BuildApp() t = app.add('task') t.job = sum t.args = [[ 1 , 2 ], 0] self.app._run_single(app) self.assertEqual(self.app.results[0], 3)
def create_branch(args): """ Takes a single branch name and (if necessary) creates a new branch. Then, populates the ``build/<branch>`` directory for the new branch using either the parent branch or ``master``. Safe to run multiple times (after a rebase) to update the build cache from master. Also calls :method:`~giza.operations.build_env.fix_build_environment()` to tweak the new build output to update hashes and on-disk copies of the environment to prevent unnecessary full-rebuilds from sphinx. """ conf = fetch_config(args) g = GitRepo(conf.paths.projectroot) branch = conf.runstate.git_branch base_branch = g.current_branch() if base_branch == branch: base_branch = 'master' logger.warning( 'seeding build data for branch "{0}" from "master"'.format(branch)) branch_builddir = os.path.join(conf.paths.projectroot, conf.paths.output, branch) base_builddir = os.path.join(conf.paths.projectroot, conf.paths.output, base_branch) if g.branch_exists(branch): logger.info('checking out branch "{0}"'.format(branch)) else: logger.info( 'creating and checking out a branch named "{0}"'.format(branch)) g.checkout_branch(branch) cmd = "rsync -r --times --checksum {0}/ {1}".format( base_builddir, branch_builddir) logger.info('seeding build directory for "{0}" from "{1}"'.format( branch, base_branch)) command(cmd) logger.info('branch creation complete.') # get a new config here for the new branch conf = fetch_config(args) builders = get_existing_builders(conf) app = BuildApp(conf) app.pool = 'process' fix_build_env_tasks(builders, conf, app) app.run()
def planning(args): conf = fetch_config(args) app = BuildApp(conf) app.pool = 'thread' j = JeerahClient(conf) j.connect() query_data = giza.jeerah.progress.query(j, app, conf) pprint(giza.jeerah.planning.report(query_data, conf))
def actions(args): conf = fetch_config(args) app = BuildApp(conf) gh = get_connection(conf) results = [] for pull in mine_github_pulls(gh, app, conf): if pull['merge_safe'] is True: results.append(pull) pprint(results)
def main(args): conf = fetch_config(args) targets = [t.split('-') for t in args.make_target] build_sphinx = True deploy_action = [] sphinx_targets = set() for t in targets: should_build, sp = determine_workload(deploy_action, t, conf) sphinx_targets.update(sp) if build_sphinx is True and should_build is False: build_sphinx = should_build sphinx_targets = list(sphinx_targets) editions = [] languages = [] for rt in targets: for t in rt: if t in conf.project.edition_list: editions.append(t) rtarget = '-'.join(rt) if rtarget in conf.system.files.data.integration: languages.append(rtarget) if not editions: if len(conf.project.editions) > 0: editions = conf.project.edition_list else: editions = [None] if not languages: languages = [None] cmd = build_reporter(sphinx_targets, deploy_action, build_sphinx, editions, languages, args) logger.info('running: ' + cmd) args.push_targets = deploy_action args.languages_to_build = languages args.editions_to_build = editions args.builder = sphinx_targets conf.runstate = args app = BuildApp(conf) if build_sphinx: sphinx_publication(conf, args, app) if deploy_action: deploy_worker(conf, app)
def main(args): """ Uploads all build artifacts to the production environment. Does not build or render artifacts. """ c = fetch_config(args) app = BuildApp(c) deploy_tasks(c, app) if c.runstate.dry_run is False: app.run()
def sphinx_publication(c, args, app): build_prep_tasks(c, app) # this loop will produce an app for each language/edition/builder combination build_source_copies = set() sphinx_app = BuildApp(c) sphinx_app.pool = app.pool jobs = itertools.product(args.editions_to_build, args.languages_to_build, args.builder) for edition, language, builder in jobs: args.language = language args.edition = edition args.builder = builder build_config = fetch_config(args) prep_app = app.add('app') prep_app.conf = build_config primer_app = prep_app.add('app') primer_migration_tasks(build_config, primer_app) sconf = render_sconf(edition, builder, language, build_config) if build_config.paths.branch_source not in build_source_copies: build_source_copies.add(build_config.paths.branch_source) source_tasks(build_config, sconf, prep_app) source_app = prep_app.add('app') build_content_generation_tasks(build_config, source_app) refresh_dependency_tasks(build_config, prep_app) sphinx_tasks(sconf, build_config, sphinx_app) logger.info("adding builder job for {0} ({1}, {2})".format(builder, language, edition)) app.add(sphinx_app) logger.info("sphinx build setup, running now.") app.run() logger.info("sphinx build complete.") logger.info('builds finalized. sphinx output and errors to follow') sphinx_output = '\n'.join([ o[1] for o in sphinx_app.results ]) ret_code = sum([ o[0] for o in sphinx_app.results ]) output_sphinx_stream(sphinx_output, c) ret_code = 0 return ret_code
def test_single_runner_app_with_many_subtasks(self): self.assertEqual(self.app.queue, []) self.assertEqual(self.app.results, []) app = BuildApp() for _ in range(10): t = app.add('task') t.job = sum t.args = [[ 1 , 2 ], 0] self.app._run_single(app) self.assertEqual(len(self.app.results), 10) self.assertEqual(self.app.results[0], 3) self.assertEqual(sum(self.app.results), 30)
def main(args): c = Configuration() c.ingest(args.conf_path) c.runstate = args app = BuildApp(c) to_remove = [] if c.runstate.builder_to_delete is not None: builder = c.runstate.builder_to_delete to_remove.append( os.path.join(c.paths.branch_output, 'doctrees-' + builder)) to_remove.append(os.path.join(c.paths.branch_output, builder)) m = 'remove artifacts associated with the {0} builder in {1}' logger.debug(m.format(builder, c.git.branches.current)) if c.runstate.days_to_save is not None: published_branches = [ 'docs-tools', 'archive', 'public', 'primer', c.git.branches.current ] published_branches.extend(c.git.branches.published) for build in os.listdir( os.path.join(c.paths.projectroot, c.paths.output)): build = os.path.join(c.paths.projectroot, c.paths.output, build) branch = os.path.split(build)[1] if branch in published_branches: continue elif not os.path.isdir(build): continue elif os.stat(build).st_mtime > c.runstate.days_to_save: to_remove.append(build) to_remove.append( os.path.join(c.paths.projectroot, c.paths.output, 'public', branch)) logger.debug( 'removed stale artifacts: "{0}" and "build/public/{0}"'. format(branch)) for fn in to_remove: t = app.add() t.job = rm_rf t.args = fn m = 'removing artifact: {0}'.format(fn) t.description = m logger.critical(m) app.run()
def run_sphinx(builder, sconf, conf): dirpath = os.path.join(conf.paths.projectroot, conf.paths.branch_output, builder) if not os.path.exists(dirpath): os.makedirs(dirpath) logger.info('created directories "{1}" for sphinx builder {0}'.format( builder, dirpath)) if 'language' in sconf and sconf.language is not None: command('sphinx-intl build --language=' + sconf.language) logger.info('compiled all PO files for translated build.') logger.info('starting sphinx build {0} at {1}'.format( builder, timestamp())) cmd = 'sphinx-build {0} -d {1}/doctrees-{2} {3} {4}' # per-builder-doctreea sphinx_cmd = cmd.format( get_sphinx_args(sconf, conf), os.path.join(conf.paths.projectroot, conf.paths.branch_output), os.path.basename(sconf.build_output), os.path.join(conf.paths.projectroot, conf.paths.branch_source), sconf.build_output) logger.debug(sphinx_cmd) out = command(sphinx_cmd, capture=True, ignore=True) # out = sphinx_native_worker(sphinx_cmd) logger.info('completed sphinx build {0} at {1}'.format( builder, timestamp())) if True: # out.return_code == 0: logger.info('successfully completed {0} sphinx build at {1}'.format( builder, timestamp())) finalizer_app = BuildApp(conf) finalizer_app.root_app = False finalize_sphinx_build(sconf, conf, finalizer_app) finalizer_app.run() else: logger.warning( 'the sphinx build {0} was not successful. not running finalize operation' .format(builder)) output = '\n'.join([out.err, out.out]) return out.return_code, output
def run_sphinx(builder, sconf, conf): if safe_create_directory(sconf.fq_build_output): logger.info('created directory "{1}" for sphinx builder {0}'.format( builder, sconf.fq_build_output)) if 'language' in sconf and sconf.language is not None: command('sphinx-intl build --language=' + sconf.language) logger.info('compiled all PO files for translated build.') logger.info('starting sphinx build {0}'.format(builder)) cmd = 'sphinx-build {0} -d {1}/doctrees-{2} {3} {4}' # per-builder-doctree sphinx_cmd = cmd.format( get_sphinx_args(sconf, conf), os.path.join(conf.paths.projectroot, conf.paths.branch_output), sconf.build_output, os.path.join(conf.paths.projectroot, conf.paths.branch_source), sconf.fq_build_output) logger.debug(sphinx_cmd) with Timer("running sphinx build for: {0}, {1}, {2}".format( builder, sconf.language, sconf.edition)): out = command(sphinx_cmd, capture=True, ignore=True) logger.info('completed sphinx build {0}'.format(builder)) if True: # out.return_code == 0: logger.info('successfully completed {0} sphinx build ({1})'.format( builder, out.return_code)) finalizer_app = BuildApp(conf) finalizer_app.pool = "thread" finalizer_app.root_app = False finalize_sphinx_build(sconf, conf, finalizer_app) with Timer("finalize sphinx {0} build".format(builder)): finalizer_app.run() else: logger.warning( 'the sphinx build {0} was not successful. not running finalize operation' .format(builder)) output = '\n'.join([out.err, out.out]) return out.return_code, output
def stats(args): conf = fetch_config(args) app = BuildApp(conf) gh = get_connection(conf) users = set() result = {'merge_safe': 0, 'total': 0} for pull in mine_github_pulls(gh, app, conf): result['total'] += 1 if pull['merge_safe'] is True: result['merge_safe'] += 1 users.add(pull['user']) result['user_count'] = len(users) result['users'] = list(users) pprint(result)
def publish_and_deploy(args): """ Combines the work of ``giza sphinx`` and ``giza deploy``, to produce build artifacts and then upload those artifacts to the servers. """ c = fetch_config(args) app = BuildApp(c) sphinx_ret = sphinx_publication(c, args, app) if sphinx_ret == 0 or c.runstate.force is True: deploy_tasks(c, app) if c.runstate.dry_run is False: app.run() else: logger.warning( sphinx_ret + ' sphinx build(s) failed, and build not forced. not deploying.')
def _weak_bootstrapping(args): args.languages_to_build = args.editions_to_build = [] args.builder = 'html' conf = fetch_config(args) app = BuildApp(conf) mod_path = os.path.dirname(inspect.getfile(giza)) qstart_path = os.path.join(mod_path, 'quickstart') command('rsync --ignore-existing --recursive {0}/. {1}'.format( qstart_path, os.getcwd())) logger.info('migrated new site files') try: sphinx_publication(conf, args, app) except: sphinx_publication(conf, args, app) shutil.rmtree('docs-tools') command('python build/docs-tools/makecloth/meta.py build/makefile.meta') logger.info('bootstrapped makefile system') logger.info('updated project skeleton in current directory.')
def main(args): c = fetch_config(args) app = BuildApp(c) sphinx_publication(c, args, app)
def setUp(self): self.app = BuildApp() self.c = None
def setUp(self): self.c = Configuration() self.c.runstate = RuntimeStateConfig() self.app = BuildApp(self.c)
def mine(args): conf = fetch_config(args) app = BuildApp(conf) gh = get_connection(conf) pprint(mine_github_pulls(gh, app, conf))
def main(args): c = fetch_config(args) app = BuildApp(c) deploy_worker(c, app)
def sphinx_publication(c, args, app): """ :arg Configuration c: A :class:`giza.config.main.Configuration()` object. :arg RuntimeStateConfig args: A :class:`giza.config.runtime.RuntimeState()` object. :arg BuildApp app: A :class:`giza.core.app.BuildApp()` object. Adds all required tasks to build a Sphinx site. Specifically: 1. Iterates through the (language * builder * edition) combination and adds tasks to generate the content in the <build>/<branch>/source<-edition<-language>> directory. There is one version of the <build>/<branch>/source directory for every language/edition combination, but multiple builders can use the same diretory as needed. 2. Add a task to run the ``sphinx-build`` task. 3. Run all tasks in proper order. 4. Process and print the output of ``sphinx-build``. :return: The sum of all return codes from all ``sphinx-build`` tasks. All non-zero statuses represent errors. :rtype: int """ # sphinx-build tasks are separated into their own app. sphinx_app = BuildApp(c) sphinx_app.pool = app.pool # this loop will produce an app for each language/edition/builder combination build_source_copies = set() for edition, language, builder in get_builder_jobs(c): build_config, sconf = get_sphinx_build_configuration( edition, language, builder, args) # only do these tasks once per-language+edition combination if build_config.paths.branch_source not in build_source_copies: build_source_copies.add(build_config.paths.branch_source) prep_app = app.add('app') prep_app.conf = build_config # this is where we add tasks to transfer the source into the # ``build/<branch>/source`` directory. source_tasks(build_config, sconf, prep_app) # this function runs the entire prep_app compiled until now, so that # the content generation tasks are created properly # these operation groups each execute in isolation of each-other and should. build_content_generation_tasks(build_config, prep_app.add('app')) refresh_dependency_tasks(build_config, prep_app.add('app')) # once the source is prepared, we dump a dict with md5 hashes of all # files, so we can do better dependency resolution the next time. dump_file_hash_tasks(build_config, prep_app) # we transfer images to the latex directory directly because offset # images are included using raw latex, and Sphinx doesn't know how # to copy images in this case. latex_image_transfer_tasks(build_config, sconf, prep_app) msg = 'added source tasks for ({0}, {1}, {2}) in {3}' logger.info( msg.format(builder, language, edition, build_config.paths.branch_source)) # Add sphinx tasks for this builder/language/edition combination sphinx_tasks(sconf, build_config, sphinx_app) logger.info("adding builder job for {0} ({1}, {2})".format( builder, language, edition)) # Connect the special sphinx app to the main app. app.add(sphinx_app) logger.info("sphinx build configured, running the build now.") app.run() logger.info("sphinx build complete.") logger.info('builds finalized. sphinx output and errors to follow') # process the sphinx build. These oeprations allow us to de-duplicate # messages between builds. sphinx_output = '\n'.join([o[1] for o in sphinx_app.results]) output_sphinx_stream(sphinx_output, c) # if entry points return this value, giza will inherit the sum of the Sphinx # build return codes. ret_code = sum([o[0] for o in sphinx_app.results]) return ret_code