def discover_config_file(): root_dirs = [ os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..')), os.path.abspath(os.path.join(os.path.dirname(__file__), '..')), os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..')), os.path.abspath(os.path.dirname(__file__)), ] conf_dirs = [ 'config', 'bin' ] conf_file_names = ['build_conf.yaml', 'docs_meta.yaml'] cur_branch = get_branch() for project_root_dir in root_dirs: cached_file = os.path.join(project_root_dir, 'build', cur_branch, 'conf-cache.json') if os.path.exists(cached_file): cached_conf = BuildConfiguration(cached_file) if cached_conf.git.commit == get_commit(): return None, None, cached_conf, True for path, filename in itertools.product(conf_dirs, conf_file_names): conf_file = os.path.join(path, filename) abs_conf_file = os.path.join(project_root_dir, conf_file) if not os.path.exists(abs_conf_file): continue else: conf = BuildConfiguration(abs_conf_file) return project_root_dir, conf_file, conf, False raise ConfigurationError('no conf file found')
def generate_hash_file(fn): r = RstCloth() if os.path.exists(fn): with open(fn, 'r') as f: existing = f.read() else: existing = [] commit = get_commit() r.directive('|commit| replace', '``{0}``'.format(commit)) try: if r.data == existing[:-1]: logger.info('no new commit(s), not updating {0} ({1})'.format( fn, commit[:10])) return True except TypeError: logger.warning('problem generating {0}, continuing'.format(fn)) with file(fn, 'a'): os.utime(fn, times) else: r.write(fn) logger.info('regenerated {0} with new commit hash: {1}'.format( fn, commit[:10]))
def generate_meta(conf): m = MakefileCloth() m.section_break('branch/release meta', block='rel') if 'manual' in conf.git.branches and conf.project.name != 'primer': m.var('manual-branch', conf.git.branches.manual, block='rel') m.var('current-if-not-manual', conf.git.branches.manual, block='rel') m.var('current-branch', str(get_branch()), block='rel') m.var('last-commit', str(get_commit()), block='rel') m.section_break('file system paths', block='paths') m.var('output', conf.paths.output, block='paths') m.var('public-output', conf.paths.public, block='paths') m.var('branch-output', conf.paths.branch_output, block='paths') m.var('rst-include', conf.paths.includes, block='paths') m.var('branch-source', conf.paths.branch_source, block='paths') m.var('public-branch-output', conf.paths.branch_staging, block='paths') generated_makefiles = [] if 'static' in conf.system.make: m.section_break('static makefile includes') for mfile in conf.system.make.static: if mfile.startswith('/'): m.include(mfile[1:], ignore=False) else: m.include(os.path.join(os.path.abspath(os.path.join(__file__, '../../makefiles')), mfile)) m.newline() m.section_break('generated makefiles') for target in conf.system.make.generated: if target == 'sphinx': generator_fn = 'sphinx_builders' else: generator_fn = target fn = os.path.sep.join([conf.paths.output, "makefile." + target]) cloth = os.path.join(conf.paths.buildsystem, "makecloth", generator_fn + '.py') generated_makefiles.append(fn) if target != 'meta': m.include(conf.paths.output + '/makefile.' + target, ignore=True) m.target(target=fn, dependency=cloth, block='makefiles') m.job(' '.join(['python', cloth, fn])) m.newline() m.newline() m.target('.PHONY', generated_makefiles) m.newline(3) m = giza_build.build_makefile(m, conf) return m
def generate_meta(conf): m = MakefileCloth() m.section_break('branch/release meta', block='rel') if 'manual' in conf.git.branches and conf.project.name != 'primer': m.var('manual-branch', conf.git.branches.manual, block='rel') m.var('current-if-not-manual', conf.git.branches.manual, block='rel') m.var('current-branch', str(get_branch()), block='rel') m.var('last-commit', str(get_commit()), block='rel') m.section_break('file system paths', block='paths') m.var('output', conf.paths.output, block='paths') m.var('public-output', conf.paths.public, block='paths') m.var('branch-output', conf.paths.branch_output, block='paths') m.var('rst-include', conf.paths.includes, block='paths') m.var('branch-source', conf.paths.branch_source, block='paths') m.var('public-branch-output', conf.paths.branch_staging, block='paths') generated_makefiles = [] if 'static' in conf.system.make: m.section_break('static makefile includes') for mfile in conf.system.make.static: if mfile.startswith('/'): m.include(mfile[1:], ignore=False) else: m.include( os.path.join( os.path.abspath( os.path.join(__file__, '../../makefiles')), mfile)) m.newline() m.section_break('generated makefiles') for target in conf.system.make.generated: fn = os.path.sep.join([conf.paths.output, "makefile." + target]) cloth = os.path.join(conf.paths.buildsystem, "makecloth", target + '.py') generated_makefiles.append(fn) if target != 'meta': m.raw(['-include ' + conf.paths.output + '/makefile.' + target]) m.target(target=fn, dependency=cloth, block='makefiles') m.job(' '.join([conf.system.python, cloth, fn])) m.newline() m.newline() m.target('.PHONY', generated_makefiles) return m
def render_git_info(conf): if is_processed('git_info', conf): return conf else: if 'branches' not in conf.git: conf.git.branches = AttributeDict() conf.git.branches.current = get_branch() conf.git.commit = get_commit() conf.system.processed.git_info = True return conf
def get_conf(): project_root_dir, conf_file, conf, cached = discover_config_file() if cached is True: return conf conf = schema_migration_0(conf) conf_file = crawl_up_tree(conf_file, 2) conf.paths.projectroot = os.path.abspath(os.path.join(os.path.dirname(conf_file), '..')) conf.system.conf_file = conf_file if os.path.exists('/etc/arch-release'): conf.system.python = 'python2' else: conf.system.python = 'python' conf.system.processed = AttributeDict() conf.system.processed.paths = False conf.system.processed.edition = False conf.system.processed.project_paths = False conf.system.processed.project_conf = False conf.system.processed.versions = False conf = mangle_configuration(conf) conf = render_versions(conf) conf.git.branches.current = get_branch() conf.git.commit = get_commit() conf.project.basepath = get_manual_path(conf) conf = render_paths(conf) conf = mangle_paths(conf) conf.system.dependency_cache = os.path.join(conf.paths.projectroot, conf.paths.branch_output, 'dependencies.json') conf_cache_dir = os.path.join(conf.paths.projectroot, conf.paths.branch_output) conf_cache = os.path.join(conf_cache_dir, 'conf-cache.json') if not os.path.exists(conf_cache_dir): os.makedirs(conf_cache_dir) with open(conf_cache, 'w') as f: json.dump(conf, f) return conf
def generate_hash_file(fn): r = RstCloth() if os.path.exists(fn): with open(fn, 'r') as f: existing = f.read() else: existing = [] commit = get_commit() r.directive('|commit| replace', '``{0}``'.format(commit)) try: if r.get_block('_all')[0] == existing[:-1]: logger.info('no new commit(s), not updating {0} ({1})'.format(fn, commit[:10])) return True except TypeError: logger.warning('problem generating {0}, continuing'.format(fn)) with file(fn, 'a'): os.utime(fn, times) else: r.write(fn) logger.info('regenerated {0} with new commit hash: {1}'.format(fn, commit[:10]))
def generate_hash_file(fn): r = RstCloth() if os.path.exists(fn): with open(fn, 'r') as f: existing = f.read() else: existing = [] commit = get_commit() r.directive('|commit| replace', '``{0}``'.format(commit)) try: if r.get_block('_all')[0] == existing[:-1]: print('[build]: no new commit(s), not updating {0} ({1})'.format(fn, commit[:10])) return True except TypeError: print('[ERROR] [build]: problem generating {0}, continuing'.format(fn)) with file(fn, 'a'): os.utime(fn, times) else: r.write(fn) print('[build]: regenerated {0} with new commit hash: {1}'.format(fn, commit[:10]))