def examples(args): c = fetch_config(args) app = BuildApp(c) example_tasks(c, app) app.run()
def mine(args): conf = fetch_config(args) app = BuildApp(conf) app.pool = 'thread' gh = get_connection(conf) pprint(mine_github_pulls(gh, app, conf))
class TestBuildAppStandardConfig(CommonAppSuite, TestCase): @classmethod def setUp(self): self.c = Configuration() self.c.runstate = RuntimeStateConfig() self.app = BuildApp(self.c) def test_conf_objet_consistent_in_task(self): self.assertEqual(self.app.queue, []) t = self.app.add('task') self.assertIs(self.c, t.conf) self.assertIs(self.c, self.app.queue[0].conf) def test_conf_objet_consistent_in_app(self): self.assertEqual(self.app.queue, []) app = self.app.add('app') self.assertIs(self.c, app.conf) self.assertIs(self.c, self.app.queue[0].conf) def test_conf_objet_consistent_in_new_task(self): self.assertEqual(self.app.queue, []) t = Task() self.assertIsNone(t.conf) self.app.add(t) self.assertIsNotNone(t.conf) self.assertIs(self.c, self.app.queue[0].conf) self.assertIs(self.c, t.conf)
def main(args): """ Removes build artifacts from ``build/`` directory. """ c = fetch_config(args) app = BuildApp(c) to_remove = set() if c.runstate.git_branch is not None: to_remove.add(os.path.join(c.paths.projectroot, c.paths.branch_output)) if c.runstate.builder != []: for edition, language, builder in get_builder_jobs(c): builder_path = resolve_builder_path(builder, edition, language, c) builder_path = os.path.join(conf.paths.projectroot, conf.paths.branch_output, builder_path) to_remove.add(builder_path) dirpath, base = os.path.split(builder_path) to_remove.add(os.path.join(dirpath, 'doctrees-' + base)) m = 'remove artifacts associated with the {0} builder in {1} ({2}, {3})' logger.debug( m.format(builder, c.git.branches.current, edition, language)) if c.runstate.days_to_save is not None: published_branches = [ 'docs-tools', 'archive', 'public', 'primer', c.git.branches.current ] published_branches.extend(c.git.branches.published) for build in os.listdir( os.path.join(c.paths.projectroot, c.paths.output)): build = os.path.join(c.paths.projectroot, c.paths.output, build) branch = os.path.split(build)[1] if branch in published_branches: continue elif not os.path.isdir(build): continue elif os.stat(build).st_mtime > c.runstate.days_to_save: to_remove.add(build) to_remove.add( os.path.join(c.paths.projectroot, c.paths.output, 'public', branch)) logger.debug( 'removed stale artifacts: "{0}" and "build/public/{0}"'. format(branch)) for fn in to_remove: t = app.add() t.job = rm_rf t.args = fn m = 'removing artifact: {0}'.format(fn) t.description = m logger.critical(m) app.run()
def test_add_existing_app_object(self): self.assertEqual(self.app.queue, []) app = BuildApp(self.c) self.app.add(app) self.assertIs(app, self.app.queue[0]) self.assertIsNot(app, BuildApp(self.c)) self.assertIsNot(BuildApp(self.c), self.app.queue[0])
def push_translations(args): conf = fetch_config(args) app = BuildApp(conf) push_tasks(conf, app) update(conf) app.run()
def robots(args): c = fetch_config(args) app = BuildApp(c) app.pool = "serial" robots_txt_tasks(c, app) app.run()
def primer(args): c = fetch_config(args) if c.runstate.clean_generated is True: primer_clean(c) else: app = BuildApp(c) primer_migration_tasks(c, app) app.run()
def build_translation_model(args): conf = fetch_config(args) if args.t_translate_config is None: tconf = conf.system.files.data.translate elif os.path.isfile(args.t_translate_config): tconf = TranslateConfig(ingest_yaml_doc(args.t_translate_config), conf) else: logger.error(args.t_translate_config + " doesn't exist") return if os.path.exists(tconf.paths.project) is False: os.makedirs(tconf.paths.project) elif os.path.isfile(tconf.paths.project): logger.error(tconf.paths.project + " is a file") sys.exit(1) elif os.listdir(tconf.paths.project) != []: logger.error(tconf.paths.project + " must be empty") sys.exit(1) with open(os.path.join(tconf.paths.project, "translate.yaml"), 'w') as f: yaml.dump(tconf.dict(), f, default_flow_style=False) tconf.conf.runstate.pool_size = tconf.settings.pool_size run_args = get_run_args(tconf) app = BuildApp(conf) os.environ['IRSTLM'] = tconf.paths.irstlm setup_train(tconf) setup_tune(tconf) setup_test(tconf) for idx, parameter_set in enumerate(run_args): parameter_set = list(parameter_set) parameter_set.append(idx) parameter_set.append(tconf) t = app.add() t.job = build_model t.args = parameter_set t.description = "model_" + str(parameter_set[9]) app.run() aggregate_model_data(tconf.paths.project) from_addr = "*****@*****.**" to_addr = [tconf.settings.email] with open(tconf.paths.project + "/data.csv") as data: msg = MIMEText(data.read()) msg['Subject'] = "Model Complete" msg['From'] = from_addr msg['To'] = ", ".join(to_addr) server = smtplib.SMTP("localhost") server.sendmail(from_addr, to_addr, msg.as_string()) server.quit()
def images(args): c = fetch_config(args) app = BuildApp(c) with BuildApp.context(c) as app: if c.runstate.clean_generated is True: image_clean(c, app) else: image_tasks(c, app)
def build_translation_model(args): conf = fetch_config(args) if args.t_translate_config is None: tconf = conf.system.files.data.translate elif os.path.isfile(args.t_translate_config): tconf = TranslateConfig(ingest_yaml_doc(args.t_translate_config), conf) else: logger.error(args.t_translate_config + " doesn't exist") return if os.path.exists(tconf.paths.project) is False: os.makedirs(tconf.paths.project) elif os.path.isfile(tconf.paths.project): logger.error(tconf.paths.project + " is a file") sys.exit(1) elif os.listdir(tconf.paths.project) != []: logger.error(tconf.paths.project + " must be empty") sys.exit(1) with open(os.path.join(tconf.paths.project, "translate.yaml"), 'w') as f: yaml.dump(tconf.dict(), f, default_flow_style=False) tconf.conf.runstate.pool_size = tconf.settings.pool_size run_args = get_run_args(tconf) app = BuildApp(conf) os.environ['IRSTLM'] = tconf.paths.irstlm setup_train(tconf) setup_tune(tconf) setup_test(tconf) for idx, parameter_set in enumerate(run_args): parameter_set = list(parameter_set) parameter_set.append(idx) parameter_set.append(tconf) t = app.add() t.job = build_model t.args = parameter_set t.description = "model_" + str(parameter_set[9]) app.run() aggregate_model_data(tconf.paths.project) from_addr = "*****@*****.**" to_addr = [tconf.settings.email] with open(tconf.paths.project+"/data.csv") as data: msg = MIMEText(data.read()) msg['Subject'] = "Model Complete" msg['From'] = from_addr msg['To'] = ", ".join(to_addr) server = smtplib.SMTP("localhost") server.sendmail(from_addr, to_addr, msg.as_string()) server.quit()
def toc(args): c = fetch_config(args) if c.runstate.clean_generated is True: toc_clean(c) else: app = BuildApp(c) toc_tasks(c, app) app.run()
def options(args): c = fetch_config(args) if c.runstate.clean_generated is True: option_clean(c) else: app = BuildApp(c) option_tasks(c, app) app.run()
def intersphinx(args): c = fetch_config(args) app = BuildApp(c) if c.runstate.clean_generated is True: intersphinx_clean(c, app) else: intersphinx_tasks(c, app) app.run()
def redirects(args): c = fetch_config(args) if args.dry_run is True: print("".join(make_redirect(c))) else: app = BuildApp(c) redirect_tasks(c, app) app.run()
def assets(args): c = fetch_config(args) app = BuildApp(c) if c.runstate.clean_generated is True: assets_clean(c, app) else: assets_tasks(c, app) app.run()
def images(args): c = fetch_config(args) app = BuildApp(c) if c.runstate.clean_generated is True: image_clean(c, app) else: image_tasks(c, app) app.run()
def test_single_runner_app(self): self.assertEqual(self.app.queue, []) self.assertEqual(self.app.results, []) app = BuildApp() t = app.add('task') t.job = sum t.args = [[ 1 , 2 ], 0] self.app._run_single(app) self.assertEqual(self.app.results[0], 3)
def triage(args): conf = fetch_config(args) app = BuildApp(conf) app.pool = 'thread' j = JeerahClient(conf) j.connect() query_data = giza.jeerah.triage.query(j, app, conf) pprint(giza.jeerah.triage.report(query_data, conf))
def progress(args): conf = fetch_config(args) app = BuildApp(conf) app.pool = "thread" j = JeerahClient(conf) j.connect() query_data = giza.jeerah.progress.query(j, app, conf) pprint(giza.jeerah.progress.report(query_data, conf))
def planning(args): conf = fetch_config(args) app = BuildApp(conf) app.pool = 'thread' j = JeerahClient(conf) j.connect() query_data = giza.jeerah.progress.query(j, app, conf) pprint(giza.jeerah.planning.report(query_data, conf))
def create_branch(args): """ Takes a single branch name and (if necessary) creates a new branch. Then, populates the ``build/<branch>`` directory for the new branch using either the parent branch or ``master``. Safe to run multiple times (after a rebase) to update the build cache from master. Also calls :method:`~giza.operations.build_env.fix_build_environment()` to tweak the new build output to update hashes and on-disk copies of the environment to prevent unnecessary full-rebuilds from sphinx. """ conf = fetch_config(args) g = GitRepo(conf.paths.projectroot) branch = conf.runstate.git_branch base_branch = g.current_branch() if base_branch == branch: base_branch = 'master' logger.warning( 'seeding build data for branch "{0}" from "master"'.format(branch)) branch_builddir = os.path.join(conf.paths.projectroot, conf.paths.output, branch) base_builddir = os.path.join(conf.paths.projectroot, conf.paths.output, base_branch) if g.branch_exists(branch): logger.info('checking out branch "{0}"'.format(branch)) else: logger.info( 'creating and checking out a branch named "{0}"'.format(branch)) g.checkout_branch(branch) cmd = "rsync -r --times --checksum {0}/ {1}".format( base_builddir, branch_builddir) logger.info('seeding build directory for "{0}" from "{1}"'.format( branch, base_branch)) command(cmd) logger.info('branch creation complete.') # get a new config here for the new branch conf = fetch_config(args) builders = get_existing_builders(conf) app = BuildApp(conf) app.pool = 'process' fix_build_env_tasks(builders, conf, app) app.run()
def test_single_runner_app(self): self.assertEqual(self.app.queue, []) self.assertEqual(self.app.results, []) app = BuildApp(self.c) t = app.add('task') t.job = sum t.args = [[ 1 , 2 ], 0] self.app._run_single(app) self.assertEqual(self.app.results[0], 3)
def actions(args): conf = fetch_config(args) app = BuildApp(conf) app.pool = 'thread' gh = get_connection(conf) results = [] for pull in mine_github_pulls(gh, app, conf): if pull['merge_safe'] is True: results.append(pull) pprint(results)
def main(args): """ Uploads all build artifacts to the production environment. Does not build or render artifacts. """ c = fetch_config(args) app = BuildApp(c) deploy_tasks(c, app) if c.runstate.dry_run is False: app.run()
def sphinx_publication(c, args, app): build_prep_tasks(c, app) # this loop will produce an app for each language/edition/builder combination build_source_copies = set() sphinx_app = BuildApp(c) sphinx_app.pool = app.pool jobs = itertools.product(args.editions_to_build, args.languages_to_build, args.builder) for edition, language, builder in jobs: args.language = language args.edition = edition args.builder = builder build_config = fetch_config(args) prep_app = app.add('app') prep_app.conf = build_config primer_app = prep_app.add('app') primer_migration_tasks(build_config, primer_app) sconf = render_sconf(edition, builder, language, build_config) if build_config.paths.branch_source not in build_source_copies: build_source_copies.add(build_config.paths.branch_source) source_tasks(build_config, sconf, prep_app) source_app = prep_app.add('app') build_content_generation_tasks(build_config, source_app) refresh_dependency_tasks(build_config, prep_app) sphinx_tasks(sconf, build_config, sphinx_app) logger.info("adding builder job for {0} ({1}, {2})".format(builder, language, edition)) app.add(sphinx_app) logger.info("sphinx build setup, running now.") app.run() logger.info("sphinx build complete.") logger.info('builds finalized. sphinx output and errors to follow') sphinx_output = '\n'.join([ o[1] for o in sphinx_app.results ]) ret_code = sum([ o[0] for o in sphinx_app.results ]) output_sphinx_stream(sphinx_output, c) ret_code = 0 return ret_code
def test_single_runner_app_with_many_subtasks(self): self.assertEqual(self.app.queue, []) self.assertEqual(self.app.results, []) app = BuildApp(self.c) for _ in range(10): t = app.add('task') t.job = sum t.args = [[ 1 , 2 ], 0] self.app._run_single(app) self.assertEqual(len(self.app.results), 10) self.assertEqual(self.app.results[0], 3) self.assertEqual(sum(self.app.results), 30)
def test_single_runner_app_with_many_subtasks(self): self.assertEqual(self.app.queue, []) self.assertEqual(self.app.results, []) app = BuildApp() for _ in range(10): t = app.add('task') t.job = sum t.args = [[ 1 , 2 ], 0] self.app._run_single(app) self.assertEqual(len(self.app.results), 10) self.assertEqual(self.app.results[0], 3) self.assertEqual(sum(self.app.results), 30)
def main(args): c = Configuration() c.ingest(args.conf_path) c.runstate = args app = BuildApp(c) to_remove = [] if c.runstate.builder_to_delete is not None: builder = c.runstate.builder_to_delete to_remove.append( os.path.join(c.paths.branch_output, 'doctrees-' + builder)) to_remove.append(os.path.join(c.paths.branch_output, builder)) m = 'remove artifacts associated with the {0} builder in {1}' logger.debug(m.format(builder, c.git.branches.current)) if c.runstate.days_to_save is not None: published_branches = [ 'docs-tools', 'archive', 'public', 'primer', c.git.branches.current ] published_branches.extend(c.git.branches.published) for build in os.listdir( os.path.join(c.paths.projectroot, c.paths.output)): build = os.path.join(c.paths.projectroot, c.paths.output, build) branch = os.path.split(build)[1] if branch in published_branches: continue elif not os.path.isdir(build): continue elif os.stat(build).st_mtime > c.runstate.days_to_save: to_remove.append(build) to_remove.append( os.path.join(c.paths.projectroot, c.paths.output, 'public', branch)) logger.debug( 'removed stale artifacts: "{0}" and "build/public/{0}"'. format(branch)) for fn in to_remove: t = app.add() t.job = rm_rf t.args = fn m = 'removing artifact: {0}'.format(fn) t.description = m logger.critical(m) app.run()
def tables(args): c = fetch_config(args) with BuildApp.context(c) as app: if c.runstate.clean_generated is True: table_clean(c, app) else: table_tasks(c, app)
def run_sphinx(builder, sconf, conf): dirpath = os.path.join(conf.paths.projectroot, conf.paths.branch_output, builder) if not os.path.exists(dirpath): os.makedirs(dirpath) logger.info('created directories "{1}" for sphinx builder {0}'.format( builder, dirpath)) if 'language' in sconf and sconf.language is not None: command('sphinx-intl build --language=' + sconf.language) logger.info('compiled all PO files for translated build.') logger.info('starting sphinx build {0} at {1}'.format( builder, timestamp())) cmd = 'sphinx-build {0} -d {1}/doctrees-{2} {3} {4}' # per-builder-doctreea sphinx_cmd = cmd.format( get_sphinx_args(sconf, conf), os.path.join(conf.paths.projectroot, conf.paths.branch_output), os.path.basename(sconf.build_output), os.path.join(conf.paths.projectroot, conf.paths.branch_source), sconf.build_output) logger.debug(sphinx_cmd) out = command(sphinx_cmd, capture=True, ignore=True) # out = sphinx_native_worker(sphinx_cmd) logger.info('completed sphinx build {0} at {1}'.format( builder, timestamp())) if True: # out.return_code == 0: logger.info('successfully completed {0} sphinx build at {1}'.format( builder, timestamp())) finalizer_app = BuildApp(conf) finalizer_app.root_app = False finalize_sphinx_build(sconf, conf, finalizer_app) finalizer_app.run() else: logger.warning( 'the sphinx build {0} was not successful. not running finalize operation' .format(builder)) output = '\n'.join([out.err, out.out]) return out.return_code, output
def intersphinx(args): c = fetch_config(args) with BuildApp.context(c) as app: if c.runstate.clean_generated is True: intersphinx_clean(c, app) else: intersphinx_tasks(c, app)
def primer(args): c = fetch_config(args) with BuildApp.context(c) as app: if c.runstate.clean_generated is True: primer_clean(c, app) else: primer_migration_tasks(c, app)
def redirects(args): c = fetch_config(args) if args.dry_run is True: print(''.join(make_redirect(c))) else: with BuildApp.context(c) as app: redirect_tasks(c, app)
def steps(args): c = fetch_config(args) with BuildApp.context(c) as app: if c.runstate.clean_generated is True: step_clean(c, app) else: app.extend_queue(step_tasks(c))
def options(args): c = fetch_config(args) if c.runstate.clean_generated is True: option_clean(c) else: with BuildApp.context(c) as app: app.extend_queue(option_tasks(c))
def publish_and_deploy(args): c = fetch_config(args) app = BuildApp(c) sphinx_ret = sphinx_publication(c, args, app) if sphinx_ret == 0 or c.runstate.force is True: deploy_worker(c, app) else: logger.warning(sphinx_ret + ' sphinx build(s) failed, and build not forced. not deploying.')
def source(args): conf = fetch_config(args) sconf = render_sconf(args.edition, 'html', args.language, conf) with BuildApp.context(conf) as app: with app.context(conf) as prep_app: source_tasks(conf, sconf, prep_app) build_content_generation_tasks(conf, app.add('app')) refresh_dependency_tasks(conf, app.add('app'))
def stats(args): conf = fetch_config(args) app = BuildApp(conf) app.pool = 'thread' gh = get_connection(conf) users = set() result = {'merge_safe': 0, 'total': 0 } for pull in mine_github_pulls(gh, app, conf): result['total'] += 1 if pull['merge_safe'] is True: result['merge_safe'] += 1 users.add(pull['user']) result['user_count'] = len(users) result['users'] = list(users) pprint(result)
def main(args): """ Use Sphinx to generate build artifacts. Can generate artifacts for multiple output types, content editions and translations. """ c = fetch_config(args) app = BuildApp(c) with Timer("full sphinx build process"): return sphinx_publication(c, args, app)
def publish_and_deploy(args): """ Combines the work of ``giza sphinx`` and ``giza deploy``, to produce build artifacts and then upload those artifacts to the servers. """ c = fetch_config(args) app = BuildApp(c) sphinx_ret = sphinx_publication(c, args, app) if sphinx_ret == 0 or c.runstate.force is True: deploy_tasks(c, app) if c.runstate.dry_run is False: app.run() else: logger.warning( sphinx_ret + ' sphinx build(s) failed, and build not forced. not deploying.')
def actions(args): conf = fetch_config(args) app = BuildApp(conf) gh = get_connection(conf) results = [] for pull in mine_github_pulls(gh, app, conf): if pull['merge_safe'] is True: results.append(pull) pprint(results)
def clean(args): c = Configuration() c.ingest(args.conf_path) c.runstate = args app = BuildApp(c) to_remove = [] if c.runstate.builder_to_delete is not None: builder = c.runstate.builder_to_delete to_remove.append( os.path.join(c.paths.branch_output, 'doctrees-' + builder)) to_remove.append( os.path.join(c.paths.branch_output, builder)) m = 'remove artifacts associated with the {0} builder in {1}' logger.debug(m.format(builder, c.git.branches.current)) if c.runstate.days_to_save is not None: published_branches = [ 'docs-tools', 'archive', 'public', 'primer', c.git.branches.current ] published_branches.extend(c.git.branches.published) for build in os.listdir(os.path.join(c.paths.projectroot, c.paths.output)): build = os.path.join(c.paths.projectroot, c.paths.output, build) branch = os.path.split(build)[1] if branch in published_branches: continue elif not os.path.isdir(build): continue elif os.stat(build).st_mtime > c.runstate.days_to_save: to_remove.append(build) to_remove.append(os.path.join(c.paths.projectroot, c.paths.output, 'public', branch)) logger.debug('removed stale artifacts: "{0}" and "build/public/{0}"'.format(branch)) for fn in to_remove: t = app.add() t.job = rm_rf t.args = fn m = 'removing artifact: {0}'.format(fn) t.description = m logger.critical(m) app.run()
def main(args): conf = fetch_config(args) targets = [t.split('-') for t in args.make_target] build_sphinx = True deploy_action = [] sphinx_targets = set() for t in targets: should_build, sp = determine_workload(deploy_action, t, conf) sphinx_targets.update(sp) if build_sphinx is True and should_build is False: build_sphinx = should_build sphinx_targets = list(sphinx_targets) editions = [] languages = [] for rt in targets: for t in rt: if t in conf.project.edition_list: editions.append(t) rtarget = '-'.join(rt) if rtarget in conf.system.files.data.integration: languages.append(rtarget) if not editions: if len(conf.project.editions) > 0: editions = conf.project.edition_list else: editions = [None] if not languages: languages = [None] cmd = build_reporter(sphinx_targets, deploy_action, build_sphinx, editions, languages, args) logger.info('running: ' + cmd) args.push_targets = deploy_action args.languages_to_build = languages args.editions_to_build = editions args.builder = sphinx_targets conf.runstate = args app = BuildApp(conf) if build_sphinx: sphinx_publication(conf, args, app) if deploy_action: deploy_worker(conf, app)
def run_sphinx(builder, sconf, conf): dirpath = os.path.join(conf.paths.projectroot, conf.paths.branch_output, builder) if not os.path.exists(dirpath): os.makedirs(dirpath) logger.info('created directories "{1}" for sphinx builder {0}'.format(builder, dirpath)) if 'language' in sconf and sconf.language is not None: command('sphinx-intl build --language=' + sconf.language) logger.info('compiled all PO files for translated build.') logger.info('starting sphinx build {0} at {1}'.format(builder, timestamp())) cmd = 'sphinx-build {0} -d {1}/doctrees-{2} {3} {4}' # per-builder-doctreea sphinx_cmd = cmd.format(get_sphinx_args(sconf, conf), os.path.join(conf.paths.projectroot, conf.paths.branch_output), os.path.basename(sconf.build_output), os.path.join(conf.paths.projectroot, conf.paths.branch_source), sconf.build_output) logger.debug(sphinx_cmd) out = command(sphinx_cmd, capture=True, ignore=True) # out = sphinx_native_worker(sphinx_cmd) logger.info('completed sphinx build {0} at {1}'.format(builder, timestamp())) if True: # out.return_code == 0: logger.info('successfully completed {0} sphinx build at {1}'.format(builder, timestamp())) finalizer_app = BuildApp(conf) finalizer_app.root_app = False finalize_sphinx_build(sconf, conf, finalizer_app) finalizer_app.run() else: logger.warning('the sphinx build {0} was not successful. not running finalize operation'.format(builder)) output = '\n'.join([out.err, out.out]) return out.return_code, output
def stats(args): conf = fetch_config(args) app = BuildApp(conf) gh = get_connection(conf) users = set() result = {'merge_safe': 0, 'total': 0} for pull in mine_github_pulls(gh, app, conf): result['total'] += 1 if pull['merge_safe'] is True: result['merge_safe'] += 1 users.add(pull['user']) result['user_count'] = len(users) result['users'] = list(users) pprint(result)
def run_sphinx(builder, sconf, conf): if safe_create_directory(sconf.fq_build_output): logger.info('created directory "{1}" for sphinx builder {0}'.format( builder, sconf.fq_build_output)) if 'language' in sconf and sconf.language is not None: command('sphinx-intl build --language=' + sconf.language) logger.info('compiled all PO files for translated build.') logger.info('starting sphinx build {0}'.format(builder)) cmd = 'sphinx-build {0} -d {1}/doctrees-{2} {3} {4}' # per-builder-doctree sphinx_cmd = cmd.format( get_sphinx_args(sconf, conf), os.path.join(conf.paths.projectroot, conf.paths.branch_output), sconf.build_output, os.path.join(conf.paths.projectroot, conf.paths.branch_source), sconf.fq_build_output) logger.debug(sphinx_cmd) with Timer("running sphinx build for: {0}, {1}, {2}".format( builder, sconf.language, sconf.edition)): out = command(sphinx_cmd, capture=True, ignore=True) logger.info('completed sphinx build {0}'.format(builder)) if True: # out.return_code == 0: logger.info('successfully completed {0} sphinx build ({1})'.format( builder, out.return_code)) finalizer_app = BuildApp(conf) finalizer_app.pool = "thread" finalizer_app.root_app = False finalize_sphinx_build(sconf, conf, finalizer_app) with Timer("finalize sphinx {0} build".format(builder)): finalizer_app.run() else: logger.warning( 'the sphinx build {0} was not successful. not running finalize operation' .format(builder)) output = '\n'.join([out.err, out.out]) return out.return_code, output
def _weak_bootstrapping(args): args.languages_to_build = args.editions_to_build = [] args.builder = 'html' conf = fetch_config(args) app = BuildApp(conf) mod_path = os.path.dirname(inspect.getfile(giza)) qstart_path = os.path.join(mod_path, 'quickstart') command('rsync --ignore-existing --recursive {0}/. {1}'.format( qstart_path, os.getcwd())) logger.info('migrated new site files') try: sphinx_publication(conf, args, app) except: sphinx_publication(conf, args, app) shutil.rmtree('docs-tools') command('python build/docs-tools/makecloth/meta.py build/makefile.meta') logger.info('bootstrapped makefile system') logger.info('updated project skeleton in current directory.')
class TestBuildApp(TestCase): @classmethod def setUp(self): self.c = Configuration() self.c.runstate = RuntimeStateConfig() self.app = BuildApp(self.c) def test_add_make_test_default(self): self.assertEqual(self.app.queue, []) self.app.add() self.assertTrue(len(self.app.queue) == 1) self.assertTrue(isinstance(self.app.queue[0], Task)) def test_add_make_test_task(self): self.assertEqual(self.app.queue, []) self.app.add('task') self.assertTrue(len(self.app.queue) == 1) self.assertTrue(isinstance(self.app.queue[0], Task)) def test_add_make_test_app(self): self.assertEqual(self.app.queue, []) self.app.add('app') self.assertTrue(len(self.app.queue) == 1) self.assertIsInstance(self.app.queue[0], BuildApp) def test_add_existing_task_object(self): self.assertEqual(self.app.queue, []) t = Task() self.app.add(t) self.assertIs(t, self.app.queue[0]) self.assertIsNot(t, Task()) self.assertIsNot(Task(), self.app.queue[0]) def test_add_existing_app_object(self): self.assertEqual(self.app.queue, []) app = BuildApp(self.c) self.app.add(app) self.assertIs(app, self.app.queue[0]) self.assertIsNot(app, BuildApp(self.c)) self.assertIsNot(BuildApp(self.c), self.app.queue[0]) def test_conf_objet_consistent_in_task(self): self.assertEqual(self.app.queue, []) t = self.app.add('task') self.assertIs(self.c, t.conf) self.assertIs(self.c, self.app.queue[0].conf) def test_conf_objet_consistent_in_app(self): self.assertEqual(self.app.queue, []) app = self.app.add('app') self.assertIs(self.c, app.conf) self.assertIs(self.c, self.app.queue[0].conf) def test_conf_objet_consistent_in_task(self): self.assertEqual(self.app.queue, []) t = Task() self.assertIsNone(t.conf) self.app.add(t) self.assertIsNotNone(t.conf) self.assertIs(self.c, self.app.queue[0].conf) self.assertIs(self.c, t.conf) def test_pool_setter_default(self): self.assertIsNone(self.app.worker_pool) self.app.pool = None self.assertIsNotNone(self.app.worker_pool) self.assertIsInstance(self.app.pool, ProcessPool) def test_pool_setter_process(self): self.assertIsNone(self.app.worker_pool) a = self.app.pool = 'process' self.assertIsNotNone(self.app.worker_pool) self.assertIsInstance(self.app.pool, ProcessPool) def test_pool_setter_thread(self): self.assertIsNone(self.app.worker_pool) a = self.app.pool = 'thread' self.assertIsNotNone(self.app.worker_pool) self.assertIsInstance(self.app.pool, ThreadPool) def test_pool_setter_serial(self): self.assertIsNone(self.app.worker_pool) a = self.app.pool = 'serial' self.assertIsNotNone(self.app.worker_pool) self.assertIsInstance(self.app.pool, SerialPool) def test_pool_setter_process_by_ref(self): self.assertIsNone(self.app.worker_pool) a = self.app.pool = ProcessPool self.assertIsNotNone(self.app.worker_pool) self.assertIsInstance(self.app.pool, ProcessPool) def test_pool_setter_thread_by_ref(self): self.assertIsNone(self.app.worker_pool) self.app.pool = ThreadPool self.assertIsNotNone(self.app.worker_pool) self.assertIsInstance(self.app.pool, ThreadPool) def test_pool_setter_serial_by_ref(self): self.assertIsNone(self.app.worker_pool) self.app.pool = SerialPool self.assertIsNotNone(self.app.worker_pool) self.assertIsInstance(self.app.pool, SerialPool) def test_pool_setter_invalid_input(self): self.assertIsNone(self.app.worker_pool) a = self.app.pool = 1 self.assertIsInstance(self.app.pool, ProcessPool) def test_pool_setter_existing_pool_thread(self): self.assertIsNone(self.app.worker_pool) p = ThreadPool(self.c) self.app.pool = p self.assertIs(self.app.pool, p) def test_pool_setter_existing_pool_process(self): self.assertIsNone(self.app.worker_pool) p = ProcessPool(self.c) self.app.pool = p self.assertIs(self.app.pool, p) def test_pool_setter_existing_pool_serial(self): self.assertIsNone(self.app.worker_pool) p = SerialPool(self.c) self.app.pool = p self.assertIs(self.app.pool, p) def test_pool_closer(self): self.assertIsNone(self.app.worker_pool) self.app.pool = 'thread' self.assertIsInstance(self.app.pool, ThreadPool) self.app.close_pool() self.assertIsNone(self.app.worker_pool) def test_pool_type_checker_thread(self): self.assertTrue(self.app.is_pool_type('thread')) def test_pool_type_checker_process(self): self.assertTrue(self.app.is_pool_type('process')) def test_pool_type_checker_serial(self): self.assertTrue(self.app.is_pool_type('serial')) def test_pool_type_checker_serial_invalid(self): self.assertFalse(self.app.is_pool_type('serialized')) def test_pool_type_checker_process_invalid(self): self.assertFalse(self.app.is_pool_type('proc')) def test_pool_type_checker_thread_invalid(self): self.assertFalse(self.app.is_pool_type('threaded')) def test_is_pool_predicate_thead(self): self.assertTrue(self.app.is_pool(ThreadPool(self.c))) def test_is_pool_predicate_process(self): self.assertTrue(self.app.is_pool(ProcessPool(self.c))) def test_is_pool_predicate_serial(self): self.assertTrue(self.app.is_pool(SerialPool())) def test_is_pool_predicate_invalid(self): self.assertFalse(self.app.is_pool(self.c)) self.assertFalse(self.app.is_pool(self.app)) def test_add_invalid_object(self): with self.assertRaises(TypeError): self.app.add(1) def test_run_invalid_task(self): self.app.queue.append(1) with self.assertRaises(TypeError): self.app.run() def test_single_runner_task(self): self.assertEqual(self.app.queue, []) self.assertEqual(self.app.results, []) t = Task() t.job = sum t.args = [[ 1 , 2 ], 0] self.app._run_single(t) self.assertEqual(self.app.results[0], 3) def test_single_runner_task_integrated(self): self.assertEqual(self.app.queue, []) self.assertEqual(self.app.results, []) t = self.app.add('task') t.job = sum t.args = [[ 1 , 2 ], 0] self.app.run() self.assertEqual(self.app.results[0], 3) def test_single_runner_app(self): self.assertEqual(self.app.queue, []) self.assertEqual(self.app.results, []) app = BuildApp(self.c) t = app.add('task') t.job = sum t.args = [[ 1 , 2 ], 0] self.app._run_single(app) self.assertEqual(self.app.results[0], 3) def test_single_runner_app_integrated(self): self.assertEqual(self.app.queue, []) self.assertEqual(self.app.results, []) app = self.app.add('app') t = app.add('task') t.job = sum t.args = [[ 1 , 2 ], 0] self.app.run() self.assertEqual(self.app.results[0], 3) def test_single_runner_app_with_many_subtasks(self): self.assertEqual(self.app.queue, []) self.assertEqual(self.app.results, []) app = BuildApp(self.c) for _ in range(10): t = app.add('task') t.job = sum t.args = [[ 1 , 2 ], 0] self.app._run_single(app) self.assertEqual(len(self.app.results), 10) self.assertEqual(self.app.results[0], 3) self.assertEqual(sum(self.app.results), 30) def test_results_ordering(self): expected_results = [12, 13, 14, 15, 7, 17, 18, 10, 20, 12] self.assertEqual(self.app.queue, []) self.assertEqual(self.app.results, []) for inc in range(10): t = self.app.add('task') t.job = sum if inc in (4, 7, 9): t.args = [[ 1 , 2, inc ], 0] else: t.args = [[ 20 , 2, inc - 10 ], 0] self.app.run() self.assertEqual(self.app.results, expected_results) def test_single_runner_app_integrated_with_many_subtasks(self): self.assertEqual(self.app.queue, []) self.assertEqual(self.app.results, []) app = self.app.add('app') for _ in range(10): t = app.add('task') t.job = sum t.args = [[ 1 , 2 ], 0] self.app.run() self.assertEqual(len(self.app.results), 10) self.assertEqual(self.app.results[0], 3) self.assertEqual(sum(self.app.results), 30) def test_has_apps_predicate_single(self): self.assertEqual(self.app.queue, []) self.app.queue.append(None) self.assertFalse(self.app.queue_has_apps) def test_has_apps_predicate_empty(self): self.assertEqual(self.app.queue, []) self.assertFalse(self.app.queue_has_apps) def test_has_apps_predicate_all_tasks(self): self.assertEqual(self.app.queue, []) for _ in range(10): self.app.add('task') self.assertEqual(len(self.app.queue), 10) self.assertFalse(self.app.queue_has_apps) def test_has_apps_predicate_all_apps(self): self.assertEqual(self.app.queue, []) for _ in range(10): self.app.add('app') self.assertEqual(len(self.app.queue), 10) self.assertTrue(self.app.queue_has_apps) def test_has_apps_predicate_mixed(self): self.assertEqual(self.app.queue, []) for _ in range(10): self.app.add('task') for _ in range(10): self.app.add('app') self.assertEqual(len(self.app.queue), 20) self.assertTrue(self.app.queue_has_apps) def test_running_mixed_queue_all_apps_integrated(self): self.assertEqual(self.app.queue, []) self.assertEqual(self.app.results, []) self.app.pool = 'serial' for _ in range(10): app = self.app.add('app') for _ in range(10): t = app.add('task') t.job = sum t.args = [[1,2], 0] self.app.run() self.assertEqual(len(self.app.queue), 0) self.assertEqual(sum(self.app.results), 300) def test_running_mixed_queue_mixed_queue_integrated(self): self.assertEqual(self.app.queue, []) self.assertEqual(self.app.results, []) self.app.pool = 'serial' for _ in range(10): t = self.app.add('task') t.job = sum t.args = [[1,2], 0] for _ in range(10): app = self.app.add('app') for _ in range(10): t = app.add('task') t.job = sum t.args = [[1,2], 0] self.app.run() self.assertEqual(len(self.app.queue), 0) self.assertEqual(sum(self.app.results), 330) def test_running_mixed_queue_all_apps_direct(self): self.assertEqual(self.app.queue, []) self.assertEqual(self.app.results, []) self.app.pool = 'serial' for _ in range(10): app = self.app.add('app') for _ in range(10): t = app.add('task') t.job = sum t.args = [[1,2], 0] self.app._run_mixed_queue() self.assertEqual(sum(self.app.results), 300) self.assertEqual(len(self.app.queue), 10) def test_running_mixed_queue_mixed_queue_direct(self): self.assertEqual(self.app.queue, []) self.assertEqual(self.app.results, []) self.app.pool = 'serial' for _ in range(10): t = self.app.add('task') t.job = sum t.args = [[1,2], 0] for _ in range(10): app = self.app.add('app') for _ in range(10): t = app.add('task') t.job = sum t.args = [[1,2], 0] self.app._run_mixed_queue() self.assertEqual(len(self.app.queue), 20) self.assertEqual(sum(self.app.results), 330) def test_running_tasks_ordering_serial(self): self.assertEqual(self.app.queue, []) self.assertEqual(self.app.results, []) self.app.pool = 'serial' for _ in range(5): t = self.app.add('task') t.job = sum t.args = [[1,2], 0] for _ in range(5): t = self.app.add('task') t.job = sum t.args = [[2,2], 0] self.app.run() self.assertEqual(len(self.app.queue), 0) self.assertEqual(self.app.results, [ 3, 3, 3, 3, 3, 4, 4, 4, 4, 4 ]) def test_task_results_ordering_with_apps(self): self.assertEqual(self.app.queue, []) self.assertEqual(self.app.results, []) for _ in range(3): app = self.app.add('app') for _ in range(5): t = app.add('task') t.job = sum t.args = [[1,2], 0] for _ in range(5): t = app.add('task') t.job = sum t.args = [[2,2], 0] self.app.run() self.assertEqual(self.app.results, [ 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4 ]) def test_task_results_ordering_varried_with_apps(self): self.assertEqual(self.app.queue, []) self.assertEqual(self.app.results, []) app = self.app.add('app') t = app.add('task') t.job = sum t.args = [[1, 8], 0] for _ in range(3): app = self.app.add('app') for _ in range(5): t = app.add('task') t.job = sum t.args = [[1,2], 0] for _ in range(5): t = app.add('task') t.job = sum t.args = [[2,2], 0] app = self.app.add('app') t = app.add('task') t.job = sum t.args = [[2, 8], 0] for _ in range(5): t = app.add('task') t.job = sum t.args = [[2,2], 0] self.app.run() self.assertEqual(self.app.results, [ 9, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 10, 4, 4, 4, 4, 4 ]) def test_task_results_lack_of_order(self): self.assertEqual(self.app.queue, []) self.assertEqual(self.app.results, []) for _ in range(5): t = self.app.add('task') t.job = sum t.args = [[1,2], 0] for _ in range(5): t = self.app.add('task') t.job = sum t.args = [[2,2], 0] self.app.run() # there's a small chance that this could randomly fail without # indicating a correctness bug. self.assertNotEqual(self.app.results, [ 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4 ]) def test_task_results_task_and_apps0(self): self.assertEqual(self.app.queue, []) self.assertEqual(self.app.results, []) for _ in range(6): t = self.app.add('task') t.job = sum t.args = [[1,1], 0] for _ in range(3): app0 = self.app.add('app') for _ in range(5): t = app0.add('task') t.job = sum t.args = [[1,2], 0] t = self.app.add('task') t.job = sum t.args = [[1,1], 0] app1 = self.app.add('app') for _ in range(5): t = app1.add('task') t.job = sum t.args = [[2,2], 0] for _ in range(10): t = self.app.add('task') t.job = sum t.args = [[1,1], 0] self.app.run() print(self.app.results) self.assertEqual(self.app.results, [ 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 2, 4, 4, 4, 4, 4, 3, 3, 3, 3, 3, 2, 4, 4, 4, 4, 4, 3, 3, 3, 3, 3, 2, 4, 4, 4, 4, 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2 ]) def test_task_results_task_and_apps1(self): self.assertEqual(self.app.queue, []) self.assertEqual(self.app.results, []) for _ in range(6): t = self.app.add('task') t.job = sum t.args = [[1,1], 0] for _ in range(3): app = self.app.add('app') for _ in range(5): t = app.add('task') t.job = sum t.args = [[1,2], 0] t = self.app.add('task') t.job = sum t.args = [[1,1], 0] for _ in range(5): t = app.add('task') t.job = sum t.args = [[2,2], 0] for _ in range(10): t = self.app.add('task') t.job = sum t.args = [[1,1], 0] self.app.run() print(self.app.results) self.assertEqual(self.app.results, [2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 2, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 2, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2 ])
def setUp(self): self.c = Configuration() self.c.runstate = RuntimeStateConfig() self.app = BuildApp(self.c)