def main(): push_conf = ingest_yaml_list(get_conf_file(file=__file__, directory=conf.paths.builddata)) generate_new_deploy_system(push_conf) m.write(sys.argv[1]) print('[meta-build]: built "' + sys.argv[1] + '" to specify dependencies files.')
def image_jobs(conf=None): conf = lazy_conf(None) paths = conf.paths meta_file = os.path.join(paths.images, 'metadata') + '.yaml' if not os.path.exists(meta_file): raise StopIteration images_meta = ingest_yaml_list(meta_file) if images_meta is None: raise StopIteration for image in images_meta: image['dir'] = paths.images source_base = os.path.join(image['dir'], image['name']) source_file = dot_concat(source_base, 'svg') rst_file = dot_concat(source_base, 'rst') image['conf'] = conf yield { 'target': rst_file, 'dependency': [ meta_file, os.path.join(paths.buildsystem, 'utils', 'rstcloth', 'images.py') ], 'job': generate_image_pages, 'args': image } for output in image['output']: if 'tag' in output: tag = '-' + output['tag'] else: tag = '' target_img = source_base + tag + '.png' inkscape_cmd = '{cmd} -z -d {dpi} -w {width} -y 0.0 -e >/dev/null {target} {source}' yield { 'target': target_img, 'dependency': [source_file, meta_file], 'job': _generate_images, 'args': [ inkscape_cmd, output['dpi'], output['width'], target_img, source_file ], }
def build_pdfs(conf): regexes = [ ( re.compile(r'(index|bfcode)\{(.*)--(.*)\}'), r'\1\{\2-\{-\}\3\}'), ( re.compile(r'\\PYGZsq{}'), "'"), ( re.compile(r'\\code\{/(?!.*{}/|etc|usr|data|var|srv)'), r'\code{' + conf.project.url + r'/' + conf.project.tag) ] pdfs = ingest_yaml_list(os.path.join(conf.paths.builddata, 'pdfs.yaml')) pdf_processor(conf=conf, pdfs=pdfs, regexes=regexes)
def __init__(self, fn, cache=None): if cache is None: cache = dict() self.source_fn = fn self.agg_sources = cache self.source_list = ingest_yaml_list(self.source_fn) self.source_dir = os.path.dirname(self.source_fn) self.source = dict() sort_needed = False for idx, step in enumerate(self.source_list): if 'stepnum' not in step: step['stepnum'] = idx+1 else: sort_needed = True if 'source' in step or 'inherit' in step: if 'source' in step: source_file = step['source']['file'] source_ref = step['source']['ref'] elif 'inherit' in step: source_file = step['inherit']['file'] source_ref = step['inherit']['ref'] if source_file in self.agg_sources: current_step = self.agg_sources[source_file].get_step(source_ref) else: if not os.path.exists(fn): raise Exception('[ERROR]: file {0} does not exist'.format(fn)) elif fn in self.agg_sources or source_file in self.agg_sources: raise Exception('[ERROR]: hitting recursion issue on {0}'.format(fn)) steps = Steps(os.path.join(self.source_dir, source_file), self.agg_sources) current_step = steps.get_step(source_ref) self.agg_sources[source_file] = steps self.agg_sources.update(steps.agg_sources) if current_step is None: msg = 'Missing ref for {0}:"{1}" in step file "{2}"'.format(source_file, source_ref, os.path.basename(self.source_fn)) print("[steps]: " + msg) raise InvalidStep(msg) current_step.update(step) self.source_list[idx] = current_step self.source[source_ref] = current_step else: self.source[step['ref']] = step if sort_needed is True: self.source_list.sort(key=lambda k:k['stepnum'])
def image_jobs(conf=None): conf = lazy_conf(None) paths = conf.paths meta_file = os.path.join(paths.images, 'metadata') + '.yaml' if not os.path.exists(meta_file): raise StopIteration images_meta = ingest_yaml_list(meta_file) if images_meta is None: raise StopIteration for image in images_meta: image['dir'] = paths.images source_base = os.path.join(image['dir'], image['name']) source_file = dot_concat(source_base, 'svg') rst_file = dot_concat(source_base, 'rst') image['conf'] = conf yield { 'target': rst_file, 'dependency': [ meta_file, os.path.join(paths.buildsystem, 'utils', 'rstcloth', 'images.py') ], 'job': generate_image_pages, 'args': image, 'description': "generating rst include file {0} for {1}".format(rst_file, source_file) } for output in image['output']: if 'tag' in output: tag = '-' + output['tag'] else: tag = '' target_img = source_base + tag + '.png' inkscape_cmd = '{cmd} -z -d {dpi} -w {width} -y 0.0 -e >/dev/null {target} {source}' yield { 'target': target_img, 'dependency': [ source_file, meta_file ], 'job': _generate_images, 'args': [ inkscape_cmd, output['dpi'], output['width'], target_img, source_file ], 'description': 'generating image file {0} from {1}'.format(target_img, source_file) }
def populate_external_param(fn, basename, projectdir, sourcedir): if fn.startswith('/'): fn = os.path.join(sourcedir, fn[1:]) try: ext_param = ingest_yaml_list(fn) except OSError: fn = os.path.join(basename, fn) ext_param = ingest_yaml_list(fn) except OSError: fn = os.path.join(projectdir, sourcedir, fn) ext_param = ingest_yaml_list(fn) else: pass o = { } for param in ext_param: # leaving the object sub-document unmodified if we use it at some point, # we might need to modify here. o[param['name']] = param return fn, o
def populate_external_param(fn, basename, projectdir, sourcedir): if fn.startswith('/'): fn = os.path.join(sourcedir, fn[1:]) try: ext_param = ingest_yaml_list(fn) except OSError: fn = os.path.join(basename, fn) ext_param = ingest_yaml_list(fn) except OSError: fn = os.path.join(projectdir, sourcedir, fn) ext_param = ingest_yaml_list(fn) else: pass o = {} for param in ext_param: # leaving the object sub-document unmodified if we use it at some point, # we might need to modify here. o[param['name']] = param return fn, o
def ingest(self, fn): if self.source_dirname is None: self.source_dirname = os.path.dirname(os.path.abspath(fn)) self.source_files.append(fn) input_sources = ingest_yaml_list(os.path.join(self.source_dirname, os.path.basename(fn))) self.cache[fn] = dict() for option in input_sources: opt = Option(option) self.cache_option(opt, fn) self.resolve(fn)
def ingest(self, fn): if self.source_dirname is None: self.source_dirname = os.path.dirname(os.path.abspath(fn)) self.source_files.append(fn) input_sources = ingest_yaml_list( os.path.join(self.source_dirname, os.path.basename(fn))) self.cache[fn] = dict() for option in input_sources: opt = Option(option) self.cache_option(opt, fn) self.resolve(fn)
def __init__(self, fn, cache=None): if cache is None: cache = dict() self.source_fn = fn self.agg_sources = cache self.source_list = ingest_yaml_list(self.source_fn) self.source_dir = os.path.dirname(self.source_fn) self.source = dict() sort_needed = False for idx, step in enumerate(self.source_list): if 'stepnum' not in step: step['stepnum'] = idx+1 else: sort_needed = True if 'source' in step or 'inherit' in step: if 'source' in step: source_file = step['source']['file'] source_ref = step['source']['ref'] elif 'inherit' in step: source_file = step['inherit']['file'] source_ref = step['inherit']['ref'] if source_file in self.agg_sources: current_step = self.agg_sources[source_file].get_step(source_ref) else: if not os.path.exists(fn): raise Exception('[ERROR]: file {0} does not exist'.format(fn)) elif fn in self.agg_sources or source_file in self.agg_sources: raise Exception('[ERROR]: hitting recursion issue on {0}'.format(fn)) steps = Steps(os.path.join(self.source_dir, source_file), self.agg_sources) current_step = steps.get_step(source_ref) self.agg_sources[source_file] = steps self.agg_sources.update(steps.agg_sources) current_step.update(step) self.source_list[idx] = current_step self.source[source_ref] = current_step else: self.source[step['ref']] = step if sort_needed is True: self.source_list.sort(key=lambda k:k['stepnum'])
def error_pages(builder, conf): error_conf = os.path.join(conf.paths.builddata, 'errors.yaml') if not os.path.exists(error_conf): return None else: error_pages = ingest_yaml_list(error_conf) sub = (re.compile(r'\.\./\.\./'), conf.project.url + r'/' + conf.project.tag + r'/') for error in error_pages: page = os.path.join(conf.paths.projectroot, conf.paths.branch_output, builder, 'meta', error, 'index.html') munge_page(fn=page, regex=sub, tag='error-pages') logging.info('error-pages: rendered {0} error pages'.format(len(error_pages)))
def robots_txt_builder(fn, conf, override=False): if override is False: if conf.git.branches.current != 'master': print( '[robots]: cowardly refusing to regenerate robots.txt on non-master branch.' ) return False else: print( '[robots]: regenerating robots.txt on non-master branch with override.' ) input_fn = os.path.join(conf.paths.projectroot, conf.paths.builddata, 'robots.yaml') if not os.path.exists(input_fn): print('[robots]: {0} does not exist. not generating robots.txt'.format( input_fn)) return False suppressed = ingest_yaml_list(input_fn) robots_txt_dir = os.path.dirname(fn) if not os.path.exists(robots_txt_dir): os.makedirs(robots_txt_dir) with open(fn, 'w') as f: f.write('User-agent: *') f.write('\n') for record in suppressed: page = record['file'] if 'branches' not in record: f.write('Disallow: {0}'.format(page)) f.write('\n') else: for branch in record['branches']: if branch == '{{published}}': for pbranch in conf.git.branches.published: f.write('Disallow: /{0}{1}'.format(pbranch, page)) f.write('\n') else: f.write('Disallow: /{0}{1}'.format(branch, page)) f.write('\n') print('[robots]: regenerated robots.txt file.')
def build_sffms(conf): munge_script = os.path.join(conf.paths.buildsystem, 'bin', 'sffms-cleanup') base_dir = os.path.join(conf.paths.projectroot, conf.paths.output, 'sffms') preprocess = [ { 'job': command, 'args': [' '.join([munge_script, fn])] } for fn in expand_tree(base_dir, 'tex') ] pdfs = ingest_yaml_list(os.path.join(conf.paths.builddata, 'pdfs.yaml')) count = runner(preprocess) print("[pdf] [sffms]: prepossessed {0} sffms files".format(count )) for pdf in pdfs: copy_if_needed(source_file=os.path.join(base_dir, pdf['input']), target_file=os.path.join(base_dir, pdf['output']), name='sffms') pdf_processor(conf, pdfs, None)
def error_pages(builder, conf): error_conf = os.path.join(conf.paths.builddata, 'errors.yaml') if not os.path.exists(error_conf): return None else: error_pages = ingest_yaml_list(error_conf) sub = (re.compile(r'\.\./\.\./'), conf.project.url + r'/' + conf.project.tag + r'/') for error in error_pages: page = os.path.join(conf.paths.projectroot, conf.paths.branch_output, builder, 'meta', error, 'index.html') munge_page(fn=page, regex=sub, tag='error-pages') logging.info('error-pages: rendered {0} error pages'.format( len(error_pages)))
def robots_txt_builder(fn, conf, override=False): if override is False: if conf.git.branches.current != 'master': print('[robots]: cowardly refusing to regenerate robots.txt on non-master branch.') return False else: print('[robots]: regenerating robots.txt on non-master branch with override.') input_fn = os.path.join(conf.paths.projectroot, conf.paths.builddata, 'robots.yaml') if not os.path.exists(input_fn): print('[robots]: {0} does not exist. not generating robots.txt'.format(input_fn)) return False suppressed = ingest_yaml_list(input_fn) robots_txt_dir = os.path.dirname(fn) if not os.path.exists(robots_txt_dir): os.makedirs(robots_txt_dir) with open(fn, 'w') as f: f.write('User-agent: *') f.write('\n') for record in suppressed: page = record['file'] if 'branches' not in record: f.write('Disallow: {0}'.format(page)) f.write('\n') else: for branch in record['branches']: if branch == '{{published}}': for pbranch in conf.git.branches.published: f.write('Disallow: /{0}{1}'.format(pbranch, page)) f.write('\n') else: f.write('Disallow: /{0}{1}'.format(branch,page)) f.write('\n') print('[robots]: regenerated robots.txt file.')
def generated_includes(conf): toc_spec_files = [] step_files = [] for fn in expand_tree(os.path.join(conf.paths.includes), input_extension='yaml'): base = os.path.basename(fn) if base.startswith('toc-spec'): toc_spec_files.append(fn) elif base.startswith('ref-spec'): toc_spec_files.append(fn) elif base.startswith('steps'): step_files.append(fn) maskl = len(conf.paths.source) path_prefix = conf.paths.includes[len(conf.paths.source):] mapping = {} for spec_file in toc_spec_files: if os.path.exists(spec_file): data = ingest_yaml_doc(spec_file) else: continue deps = [os.path.join(path_prefix, i) for i in data['sources']] mapping[spec_file[maskl:]] = deps for step_def in step_files: data = ingest_yaml_list(step_def) deps = [] for step in data: if 'source' in step: deps.append(step['source']['file']) if len(deps) != 0: deps = [os.path.join(path_prefix, i) for i in deps] mapping[step_def[maskl:]] = deps return mapping
def generated_includes(conf): toc_spec_files = [] step_files = [] for fn in expand_tree(os.path.join(conf.paths.includes), input_extension='yaml'): base = os.path.basename(fn) if base.startswith('toc-spec'): toc_spec_files.append(fn) elif base.startswith('ref-spec'): toc_spec_files.append(fn) elif base.startswith('steps'): step_files.append(fn) maskl = len(conf.paths.source) path_prefix = conf.paths.includes[len(conf.paths.source):] mapping = {} for spec_file in toc_spec_files: if os.path.exists(spec_file): data = ingest_yaml_doc(spec_file) else: continue deps = [ os.path.join(path_prefix, i ) for i in data['sources']] mapping[spec_file[maskl:]] = deps for step_def in step_files: data = ingest_yaml_list(step_def) deps = [] for step in data: if 'source' in step: deps.append(step['source']['file']) if len(deps) != 0: deps = [ os.path.join(path_prefix, i ) for i in deps ] mapping[step_def[maskl:]] = deps return mapping
def _generate_api_param(source, target, conf): r = generate_params(ingest_yaml_list(source), source, conf) r.write(target) print('[api]: rebuilt {0}'.format(target))
def __init__(self, fn, cache=None): if cache is None: cache = dict() self.source_fn = fn self.agg_sources = cache self.source_list = ingest_yaml_list(self.source_fn) self.source_dir = os.path.dirname(self.source_fn) self.source = dict() sort_needed = False for idx, step in enumerate(self.source_list): if 'stepnum' not in step: step['stepnum'] = idx+1 else: sort_needed = True if 'source' in step or 'inherit' in step: if 'source' in step: source_file = step['source']['file'] source_ref = step['source']['ref'] elif 'inherit' in step: source_file = step['inherit']['file'] source_ref = step['inherit']['ref'] if source_file in self.agg_sources: current_step = self.agg_sources[source_file].get_step(source_ref) msg = 'resolved ref "{0}" from file "{1}" using step cache' logger.debug(msg.format(source_ref, source_file)) else: msg = 'could *not* resolved ref "{0}" from file "{1}" with step cache' logger.debug(msg.format(source_ref, source_file)) if not os.path.exists(fn): msg = 'file {0} does not exist'.format(fn) logger.error(msg) raise InvalidStep(msg) elif fn in self.agg_sources or source_file in self.agg_sources: msg = 'hitting recursion issue on {0}'.format(fn) logger.error(msg) raise InvalidStep(msg) else: msg = "reading and caching step {0} from {1} and caching" logger.debug(msg.format(source_ref, source_file)) if source_file == os.path.basename(self.source_fn): current_step = self.get_step(source_ref) else: steps = Steps(os.path.join(self.source_dir, source_file), self.agg_sources) current_step = steps.get_step(source_ref) self.agg_sources[source_file] = steps self.agg_sources.update(steps.agg_sources) logger.debug('successfully cached {0}'.format(source_file)) if current_step is None: msg = 'Missing ref for {0}:"{1}" in step file "{2}"'.format(source_file, source_ref, os.path.basename(self.source_fn)) logger.error(msg) raise InvalidStep(msg) current_step.update(step) self._validate_step(current_step, ['ref', 'title']) self.source[source_ref] = current_step self.source_list[idx] = current_step else: self._validate_step(step, ['ref', 'title']) self.source[step['ref']] = step if sort_needed is True: self.source_list.sort(key=lambda k:k['stepnum'])
from bootstrap import buildsystem sys.path.append( os.path.abspath( os.path.join(os.path.dirname(__file__), buildsystem, 'sphinxext'))) sys.path.append( os.path.abspath(os.path.join(os.path.dirname(__file__), buildsystem, 'bin'))) from utils.serialization import ingest_yaml, ingest_yaml_list from utils.config import get_conf from utils.project import get_versions, get_manual_path conf = get_conf() pdfs = ingest_yaml_list(os.path.join(conf.paths.builddata, 'pdfs.yaml')) intersphinx_libs = ingest_yaml_list( os.path.join(conf.paths.builddata, 'intersphinx.yaml')) # -- General configuration ---------------------------------------------------- needs_sphinx = '1.0' extensions = [ 'sphinx.ext.extlinks', 'sphinx.ext.todo', 'mongodb', 'directives', 'intermanual' ] templates_path = ['.templates'] exclude_patterns = []
from bootstrap import buildsystem sys.path.append(os.path.join(project_root, buildsystem, "sphinxext")) sys.path.append(os.path.join(project_root, buildsystem, "bin")) from utils.config import get_conf from utils.project import get_versions, get_manual_path from utils.serialization import ingest_yaml, ingest_yaml_list from utils.structures import BuildConfiguration from utils.strings import dot_concat conf = get_conf() conf.paths.projectroot = project_root intersphinx_libs = ingest_yaml_list(os.path.join(conf.paths.builddata, "intersphinx.yaml")) sconf = BuildConfiguration(os.path.join(conf.paths.builddata, "sphinx-local.yaml")) # -- General configuration ---------------------------------------------------- needs_sphinx = "1.0" extensions = ["sphinx.ext.intersphinx", "sphinx.ext.extlinks", "sphinx.ext.todo", "mongodb", "directives"] locale_dirs = [conf.paths.locale] gettext_compact = False templates_path = [".templates"] exclude_patterns = [] source_suffix = ".txt"
project_root = os.path.join(os.path.abspath(os.path.dirname(__file__))) sys.path.append(project_root) from bootstrap import buildsystem try: sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), buildsystem, 'sphinxext'))) sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), buildsystem, 'bin'))) from utils.serialization import ingest_yaml, ingest_yaml_list from utils.structures import BuildConfiguration from utils.config import get_conf from utils.project import get_versions, get_manual_path conf = get_conf() pdfs = ingest_yaml_list(os.path.join(conf.paths.projectroot, conf.paths.builddata, 'pdfs.yaml')) sconf = BuildConfiguration(os.path.join(conf.paths.projectroot, conf.paths.builddata, 'sphinx_local.yaml')) intersphinx_libs = ingest_yaml_list(os.path.join(conf.paths.projectroot, conf.paths.builddata, 'intersphinx.yaml')) except: from giza.config.runtime import RuntimeStateConfig from giza.config.helper import fetch_config, get_versions, get_manual_path from giza.tools.strings import dot_concat conf = fetch_config(RuntimeStateConfig()) intersphinx_libs = conf.system.files.data.intersphinx pdfs = conf.system.files.data.pdfs sconf = conf.system.files.data.sphinx_local sys.path.append(os.path.join(conf.paths.projectroot, conf.paths.buildsystem, 'sphinxext')) # -- General configuration ----------------------------------------------------
def _generate_api_param(source, target, conf): r = generate_params(ingest_yaml_list(source), source, conf) r.write(target) logger.info('rebuilt {0}'.format(target))
from bootstrap import buildsystem sys.path.append(os.path.join(project_root, buildsystem, 'sphinxext')) sys.path.append(os.path.join(project_root, buildsystem, 'bin')) from utils.config import get_conf from utils.project import get_versions, get_manual_path from utils.serialization import ingest_yaml, ingest_yaml_list from utils.structures import BuildConfiguration from utils.strings import dot_concat conf = get_conf() conf.paths.projectroot = project_root intersphinx_libs = ingest_yaml_list(os.path.join(conf.paths.builddata, 'intersphinx.yaml')) sconf = BuildConfiguration(os.path.join(conf.paths.builddata, 'sphinx-local.yaml')) # -- General configuration ---------------------------------------------------- needs_sphinx = '1.0' extensions = [ 'sphinx.ext.extlinks', 'sphinx.ext.todo', 'mongodb', 'directives', 'intermanual', ] locale_dirs = [ os.path.join(conf.paths.projectroot, conf.paths.locale) ]
def __init__(self, fn, cache=None): if cache is None: cache = dict() self.source_fn = fn self.agg_sources = cache self.source_list = ingest_yaml_list(self.source_fn) self.source_dir = os.path.dirname(self.source_fn) self.source = dict() sort_needed = False for idx, step in enumerate(self.source_list): if 'stepnum' not in step: step['stepnum'] = idx + 1 else: sort_needed = True if 'source' in step or 'inherit' in step: if 'source' in step: source_file = step['source']['file'] source_ref = step['source']['ref'] elif 'inherit' in step: source_file = step['inherit']['file'] source_ref = step['inherit']['ref'] if source_file in self.agg_sources: current_step = self.agg_sources[source_file].get_step( source_ref) msg = 'resolved ref "{0}" from file "{1}" using step cache' logger.debug(msg.format(source_ref, source_file)) else: msg = 'could *not* resolved ref "{0}" from file "{1}" with step cache' logger.debug(msg.format(source_ref, source_file)) if not os.path.exists(fn): msg = 'file {0} does not exist'.format(fn) logger.error(msg) raise InvalidStep(msg) elif fn in self.agg_sources or source_file in self.agg_sources: msg = 'hitting recursion issue on {0}'.format(fn) logger.error(msg) raise InvalidStep(msg) else: msg = "reading and caching step {0} from {1} and caching" logger.debug(msg.format(source_ref, source_file)) if source_file == os.path.basename(self.source_fn): current_step = self.get_step(source_ref) else: steps = Steps( os.path.join(self.source_dir, source_file), self.agg_sources) current_step = steps.get_step(source_ref) self.agg_sources[source_file] = steps self.agg_sources.update(steps.agg_sources) logger.debug( 'successfully cached {0}'.format(source_file)) if current_step is None: msg = 'Missing ref for {0}:"{1}" in step file "{2}"'.format( source_file, source_ref, os.path.basename(self.source_fn)) logger.error(msg) raise InvalidStep(msg) current_step.update(step) self._validate_step(current_step, ['ref', 'title']) self.source[source_ref] = current_step self.source_list[idx] = current_step else: self._validate_step(step, ['ref', 'title']) self.source[step['ref']] = step if sort_needed is True: self.source_list.sort(key=lambda k: k['stepnum'])