def generate_meta(conf): m = MakefileCloth() m.section_break('branch/release meta', block='rel') if 'manual' in conf.git.branches and conf.project.name != 'primer': m.var('manual-branch', conf.git.branches.manual, block='rel') m.var('current-if-not-manual', conf.git.branches.manual, block='rel') m.var('current-branch', str(get_branch()), block='rel') m.var('last-commit', str(get_commit()), block='rel') m.section_break('file system paths', block='paths') m.var('output', conf.paths.output, block='paths') m.var('public-output', conf.paths.public, block='paths') m.var('branch-output', conf.paths.branch_output, block='paths') m.var('rst-include', conf.paths.includes, block='paths') m.var('branch-source', conf.paths.branch_source, block='paths') m.var('public-branch-output', conf.paths.branch_staging, block='paths') generated_makefiles = [] if 'static' in conf.system.make: m.section_break('static makefile includes') for mfile in conf.system.make.static: if mfile.startswith('/'): m.include(mfile[1:], ignore=False) else: m.include(os.path.join(os.path.abspath(os.path.join(__file__, '../../makefiles')), mfile)) m.newline() m.section_break('generated makefiles') for target in conf.system.make.generated: if target == 'sphinx': generator_fn = 'sphinx_builders' else: generator_fn = target fn = os.path.sep.join([conf.paths.output, "makefile." + target]) cloth = os.path.join(conf.paths.buildsystem, "makecloth", generator_fn + '.py') generated_makefiles.append(fn) if target != 'meta': m.include(conf.paths.output + '/makefile.' + target, ignore=True) m.target(target=fn, dependency=cloth, block='makefiles') m.job(' '.join(['python', cloth, fn])) m.newline() m.newline() m.target('.PHONY', generated_makefiles) m.newline(3) m = giza_build.build_makefile(m, conf) return m
def generate_meta(conf): m = MakefileCloth() m.section_break('branch/release meta', block='rel') if 'manual' in conf.git.branches and conf.project.name != 'primer': m.var('manual-branch', conf.git.branches.manual, block='rel') m.var('current-if-not-manual', conf.git.branches.manual, block='rel') m.var('current-branch', str(get_branch()), block='rel') m.var('last-commit', str(get_commit()), block='rel') m.section_break('file system paths', block='paths') m.var('output', conf.paths.output, block='paths') m.var('public-output', conf.paths.public, block='paths') m.var('branch-output', conf.paths.branch_output, block='paths') m.var('rst-include', conf.paths.includes, block='paths') m.var('branch-source', conf.paths.branch_source, block='paths') m.var('public-branch-output', conf.paths.branch_staging, block='paths') generated_makefiles = [] if 'static' in conf.system.make: m.section_break('static makefile includes') for mfile in conf.system.make.static: if mfile.startswith('/'): m.include(mfile[1:], ignore=False) else: m.include( os.path.join( os.path.abspath( os.path.join(__file__, '../../makefiles')), mfile)) m.newline() m.section_break('generated makefiles') for target in conf.system.make.generated: fn = os.path.sep.join([conf.paths.output, "makefile." + target]) cloth = os.path.join(conf.paths.buildsystem, "makecloth", target + '.py') generated_makefiles.append(fn) if target != 'meta': m.raw(['-include ' + conf.paths.output + '/makefile.' + target]) m.target(target=fn, dependency=cloth, block='makefiles') m.job(' '.join([conf.system.python, cloth, fn])) m.newline() m.newline() m.target('.PHONY', generated_makefiles) return m
def generate_meta(conf): m = MakefileCloth() m.section_break('branch/release meta', block='rel') m.var('manual-branch', conf.git.branches.manual, block='rel') m.var('current-branch', str(utils.get_branch()), block='rel') m.var('last-commit', str(utils.get_commit()), block='rel') m.var('current-if-not-manual', conf.git.branches.manual, block='rel') m.section_break('file system paths', block='paths') m.var('output', conf.build.paths.output, block='paths') m.var('public-output', conf.build.paths.public, block='paths') m.var('branch-output', conf.build.paths['branch-output'], block='paths') m.var('rst-include', conf.build.paths.includes, block='paths') m.var('branch-source', conf.build.paths['branch-source'], block='paths') m.var('public-branch-output', conf.build.paths['branch-staging'], block='paths') generated_makefiles = [] if 'static' in conf.build.system: m.section_break('static makefile includes') for mfile in conf.build.system.static: if mfile.startswith('/'): m.include(mfile[1:], ignore=False) else: m.include(os.path.join(os.path.abspath(os.path.join(__file__, '../../makefiles')), mfile)) m.newline() m.target('.PHONY', 'meta.yaml') m.target('meta.yaml', block='metaymal') m.job('fab process.output:meta.yaml process.meta', block='metaymal') m.msg('[meta]: regenerated "meta.yaml"', block='metaymal') m.section_break('generated makefiles') for target in conf.build.system.files: fn = os.path.sep.join([conf.build.paths.output, "makefile." + target]) cloth = os.path.join(conf.build.paths.buildsystem, "makecloth", target + '.py') generated_makefiles.append(fn) if target != 'meta': m.raw(['-include ' + conf.build.paths.output + '/makefile.' + target]) m.target(target=fn, dependency=cloth, block='makefiles') m.job(' '.join([conf.build.system.python, cloth, fn])) m.newline() m.newline() m.target('.PHONY', generated_makefiles) return m
def generate_json_output_meta(): m = MakefileCloth() m.section_break('json output coordination.') paths = render_paths('dict') if get_conf().git.remote.upstream.endswith('ecosystem'): public_json_output = os.path.join(paths['public'], 'json') else: public_json_output = os.path.join(paths['branch-staging'], 'json') build_json_output = os.path.join(paths['branch-output'], 'json') branch_json_list_file = os.path.join(paths['branch-output'], 'json-file-list') public_json_list_file = os.path.join(public_json_output, '.file_list') m.section_break('meta') m.target('json-output', ['json']) m.job('fab process.json_output') rsync_cmd = 'rsync --recursive --times --delete --exclude="*pickle" --exclude=".buildinfo" --exclude="*fjson" {0}/ {1}' m.job(rsync_cmd.format(build_json_output, public_json_output)) m.msg('[json]: migrated all .json files to staging.') m.msg('[json]: processed all json files.') m.section_break('list file') m.comment( 'the meta build system generates "{0}" when it generates this file'. format(branch_json_list_file)) fab_cmd = 'fab process.input:{0} process.output:{1} process.copy_if_needed:json' m.target('json-file-list', public_json_list_file) m.target(public_json_list_file, 'json-output') m.job(fab_cmd.format(branch_json_list_file, public_json_list_file)) m.msg('[json]: rebuilt inventory of json output.') m.target(build_json_output, 'json') m.target( '.PHONY', ['clean-json-output', 'clean-json', 'json-output', 'json-file-list']) m.target('clean-json-output', 'clean-json') m.job(' '.join([ 'rm -rf ', public_json_list_file, branch_json_list_file, public_json_output ])) m.msg('[json]: removed all processed json.') return m
def generate_json_output_meta(): m = MakefileCloth() m.section_break('json output coordination.') paths = render_paths('dict') if get_conf().git.remote.upstream.endswith('ecosystem'): public_json_output = os.path.join(paths['public'], 'json') else: public_json_output = os.path.join(paths['branch-staging'], 'json') build_json_output = os.path.join(paths['branch-output'], 'json') branch_json_list_file = os.path.join(paths['branch-output'], 'json-file-list') public_json_list_file = os.path.join(public_json_output, '.file_list') m.section_break('meta') m.target('json-output', ['json']) m.job('fab process.json_output') rsync_cmd = 'rsync --recursive --times --delete --exclude="*pickle" --exclude=".buildinfo" --exclude="*fjson" {0}/ {1}' m.job(rsync_cmd.format(build_json_output, public_json_output)) m.msg('[json]: migrated all .json files to staging.') m.msg('[json]: processed all json files.') m.section_break('list file') m.comment('the meta build system generates "{0}" when it generates this file'.format(branch_json_list_file)) fab_cmd = 'fab process.input:{0} process.output:{1} process.copy_if_needed:json' m.target('json-file-list', public_json_list_file) m.target(public_json_list_file, 'json-output') m.job(fab_cmd.format(branch_json_list_file , public_json_list_file)) m.msg('[json]: rebuilt inventory of json output.') m.target(build_json_output, 'json') m.target('.PHONY', ['clean-json-output', 'clean-json', 'json-output', 'json-file-list']) m.target('clean-json-output', 'clean-json') m.job(' '.join(['rm -rf ', public_json_list_file, branch_json_list_file, public_json_output])) m.msg('[json]: removed all processed json.') return m
def main(): m = MakefileCloth() conf = new_config() output_file = os.path.join(conf.paths.projectroot, conf.paths.output, 'makefile.giza_build') m.section_break('giza build integration') m.newline() m.section_break('content generation targets') for gen_target in [ 'api', 'assets', 'images', 'intersphinx', 'options', 'primer', 'steps', 'tables', 'toc']: m.target(hyph_concat('giza', gen_target)) m.job('giza generate ' + gen_target) m.target(hyph_concat('giza', 'force', gen_target)) m.job('giza --force generate ' + gen_target) m.newline() m.section_break('sphinx targets') sconf = ingest_yaml_doc(os.path.join(conf.paths.projectroot, conf.paths.builddata, 'sphinx.yaml')) builders = [b for b in sconf if not b.endswith('base') and b not in ('prerequisites', 'generated-source', 'languages', 'editions', 'sphinx_builders')] if 'editions' in sconf: editions = sconf['editions'] else: editions = [] if 'root-base' in sconf and 'languages' in sconf['root-base']: languages = sconf['root-base']['languages'] else: languages = [] complete = [] for builder in builders: if '-' in builder: builder = builder.split('-')[0] if builder in complete: continue m.comment(builder + ' targets') for edition in editions: m.target(hyph_concat('giza', builder, edition)) m.job('giza sphinx --builder {0} --edition {1}'.format(builder, edition)) for language in languages: m.target(hyph_concat('giza', builder, edition, language)) m.job('giza sphinx --builder {0} --edition {1} --language {2}'.format(builder, edition, language)) if len(editions) == 0: m.target(hyph_concat('giza', builder)) m.job('giza sphinx --builder ' + builder) for language in languages: m.target(hyph_concat('giza', builder, language)) m.job('giza sphinx --builder {0} --language {1}'.format(builder, language)) else: m.target(hyph_concat('giza', builder)) m.job('giza sphinx --builder {0} --edition {1}'.format(builder, ' '.join(editions))) m.newline() complete.append(builder) m.section_break('deploy targets') if 'push' in conf.system.files.data: for ptarget in conf.system.files.data.push: name = ptarget['target'] m.target(hyph_concat('giza-deploy', name)) m.job('giza deploy --target ' + name) m.newline() integration_path = os.path.join(conf.paths.projectroot, conf.paths.builddata, 'integration.yaml') if os.path.exists(integration_path): m.section_break('integration and publish targets') iconf = ingest_yaml_doc(integration_path) if 'base' in iconf: languages = [ k for k in iconf.keys() if not k.endswith('base') ] iconf = iconf['base'] else: languages = [] targets = set([ target.split('-')[0] for target in iconf['targets'] if '/' not in target and not target.startswith('htaccess') ]) base_job = 'giza sphinx --builder {0}'.format(' '.join(targets)) if len(editions) > 0: base_job += " --serial_sphinx --edition " + ' '.join(editions) m.target('giza-publish') m.job(base_job) m.newline() for lang in languages: m.target('giza-publish-' + lang) m.job(base_job + ' --language ' + lang) m.newline() # following targets build a group of sphinx targets followed by running # one or more deploy actions. m.section_break('push targets') if 'push' in conf.system.files.data: for ptarget in conf.system.files.data.push: push_base_job = 'giza push --deploy {0} --builder {1}'.format(ptarget['target'], ' '.join(targets)) if len(editions) > 0: push_base_job += " --serial_sphinx --edition " + ' '.join(editions) m.target('giza-' + ptarget['target']) m.job(push_base_job) m.newline() for lang in languages: m.target('giza-{0}-{1}'.format(ptarget['target'], lang)) m.job(push_base_job + ' --language ' + lang) m.newline() m.write(output_file) print('[meta-build]: built "build/makefile.giza_build" to integrate giza')