Ejemplo n.º 1
0
def finalize_single_html_jobs(builder, conf):
    pjoin = os.path.join

    single_html_dir = get_single_html_dir(conf)

    if not os.path.exists(single_html_dir):
        os.makedirs(single_html_dir)

    try:
        manual_single_html(input_file=pjoin(conf.paths.branch_output, builder,
                                            'contents.html'),
                           output_file=pjoin(single_html_dir, 'index.html'))
    except (IOError, OSError):
        manual_single_html(input_file=pjoin(conf.paths.branch_output, builder,
                                            'index.html'),
                           output_file=pjoin(single_html_dir, 'index.html'))
    copy_if_needed(source_file=pjoin(conf.paths.branch_output, builder,
                                     'objects.inv'),
                   target_file=pjoin(single_html_dir, 'objects.inv'))

    single_path = pjoin(single_html_dir, '_static')

    for fn in expand_tree(pjoin(conf.paths.branch_output, builder, '_static'),
                          None):

        yield {
            'job': copy_if_needed,
            'args': [fn, pjoin(single_path, os.path.basename(fn))],
            'target': None,
            'dependency': None
        }
Ejemplo n.º 2
0
def toc_jobs(conf):
    paths = copy(conf.paths)

    for fn in expand_tree(paths.includes, 'yaml'):
        if fn.startswith(os.path.join(paths.includes, 'table')):
            continue
        elif fn.startswith(os.path.join(paths.includes, 'step')):
            continue
        elif len(fn) >= 24:
            base_name = _get_toc_base_name(fn)

            fmt = fn[20:24]
            if fmt != 'spec':
                fmt = fn[16:19]

            o = {
                  'dependency': os.path.join(paths.projectroot, fn),
                  'job': _generate_toc_tree,
                  'target': [],
                  'args': [fn, fmt, base_name, paths, conf]
                }

            if fmt != 'spec':
                o['target'].append(_get_toc_output_name(base_name, 'toc', paths))

            is_ref_spec = fn.startswith(os.path.join(os.path.dirname(fn), 'ref-spec'))

            if not is_ref_spec and (fmt == 'toc' or fmt == 'spec'):
                o['target'].append(_get_toc_output_name(base_name, 'dfn-list', paths))
            elif fmt == 'ref' or is_ref_spec:
                o['target'].append(_get_toc_output_name(base_name, 'table', paths))
           
            yield o
Ejemplo n.º 3
0
def finalize_single_html_jobs(builder, conf):
    pjoin = os.path.join

    single_html_dir = get_single_html_dir(conf)

    if not os.path.exists(single_html_dir):
        os.makedirs(single_html_dir)

    try:
        manual_single_html(input_file=pjoin(conf.paths.branch_output,
                                                    builder, 'contents.html'),
                                   output_file=pjoin(single_html_dir, 'index.html'))
    except (IOError, OSError):
        manual_single_html(input_file=pjoin(conf.paths.branch_output,
                                                    builder, 'index.html'),
                                   output_file=pjoin(single_html_dir, 'index.html'))
    copy_if_needed(source_file=pjoin(conf.paths.branch_output,
                                     builder, 'objects.inv'),
                   target_file=pjoin(single_html_dir, 'objects.inv'))

    single_path = pjoin(single_html_dir, '_static')

    for fn in expand_tree(pjoin(conf.paths.branch_output,
                                builder, '_static'), None):

        yield {
            'job': copy_if_needed,
            'args': [fn, pjoin(single_path, os.path.basename(fn))],
            'target': None,
            'dependency': None
        }
Ejemplo n.º 4
0
def dump_file_hashes(conf=None):
    conf = lazy_conf(conf)

    output = conf.system.dependency_cache

    o = {
        'conf': conf,
        'time': datetime.datetime.utcnow().strftime("%s"),
        'files': {}
    }

    files = expand_tree(
        os.path.join(conf.paths.projectroot, conf.paths.source), None)

    fmap = o['files']

    for fn in files:
        if os.path.exists(fn):
            fmap[fn] = md5_file(fn)

    output_dir = os.path.dirname(output)

    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    with open(output, 'w') as f:
        json.dump(o, f)

    logger.info('wrote dependency cache to: {0}'.format(output))
Ejemplo n.º 5
0
def manpage_url_jobs(conf):
    project_source = os.path.join(conf.paths.projectroot,
                                  conf.paths.source)

    top_level_items = set()
    for fs_obj in os.listdir(project_source):
        if fs_obj.startswith('.static') or fs_obj == 'index.txt':
            continue
        if os.path.isdir(os.path.join(project_source, fs_obj)):
            top_level_items.add(fs_obj)
        if fs_obj.endswith('.txt'):
            top_level_items.add(fs_obj[:-4])

    top_level_items = '/'+ r'[^\s]*|/'.join(top_level_items) + r'[^\s]*'

    re_string = r'(\\fB({0})\\fP)'.format(top_level_items).replace(r'-', r'\-')
    subst = conf.project.url + '/' + conf.project.tag + r'\2'

    regex_obj = (re.compile(re_string), subst)

    for manpage in expand_tree(os.path.join(conf.paths.projectroot,
                                            conf.paths.output,
                                            conf.git.branches.current,
                                            'man'), ['1', '5']):
        yield dict(target=manpage,
                   dependency=None,
                   job=manpage_url,
                   args=[regex_obj, manpage])
Ejemplo n.º 6
0
def dump_file_hashes(conf=None):
    conf = lazy_conf(conf)

    output = conf.system.dependency_cache

    o = { 'conf': conf,
          'time': datetime.datetime.utcnow().strftime("%s"),
          'files': { }
        }

    files = expand_tree(os.path.join(conf.paths.projectroot, conf.paths.source), None)

    fmap = o['files']

    for fn in files:
        if os.path.exists(fn):
            fmap[fn] = md5_file(fn)

    output_dir = os.path.dirname(output)

    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    with open(output, 'w') as f:
        json.dump(o, f)

    logger.info('wrote dependency cache to: {0}'.format(output))
Ejemplo n.º 7
0
def include_files(conf=None):
    conf = lazy_config(conf)

    source_dir = os.path.join(conf.paths.projectroot, conf.paths.source)

    inc_re = re.compile(r'\.\. include:: /(.*)')

    matches = grep(inc_re, expand_tree(source_dir, None))

    def tuple_sort(k):
        return k[1]
    matches.sort(key=tuple_sort)

    files = dict()

    for i in groupby(matches, operator.itemgetter(1)):
        files[i[0]] = set()
        for src in i[1]:
            if not src[0].endswith('~'):
                files[i[0]].add(src[0])
        files[i[0]] = list(files[i[0]])

    files.update(generated_includes(conf))

    return files
Ejemplo n.º 8
0
def manpage_url_jobs(builder, conf):
    project_source = os.path.join(conf.paths.projectroot, conf.paths.source)

    top_level_items = set()
    for fs_obj in os.listdir(project_source):
        if fs_obj.startswith('.static') or fs_obj == 'index.txt':
            continue
        if os.path.isdir(os.path.join(project_source, fs_obj)):
            top_level_items.add(fs_obj)
        if fs_obj.endswith('.txt'):
            top_level_items.add(fs_obj[:-4])

    top_level_items = '/' + r'[^\s]*|/'.join(top_level_items) + r'[^\s]*'

    re_string = r'(\\fB({0})\\fP)'.format(top_level_items).replace(r'-', r'\-')
    subst = conf.project.url + '/' + conf.project.tag + r'\2'

    regex_obj = (re.compile(re_string), subst)

    for manpage in expand_tree(
            os.path.join(conf.paths.projectroot, conf.paths.output,
                         conf.git.branches.current, builder), ['1', '5']):
        yield dict(target=manpage,
                   dependency=None,
                   job=manpage_url,
                   args=[regex_obj, manpage])
Ejemplo n.º 9
0
def toc_jobs(conf):
    paths = copy(conf.paths)

    for fn in expand_tree(paths.includes, "yaml"):
        if fn.startswith(os.path.join(paths.includes, "table")):
            continue
        elif fn.startswith(os.path.join(paths.includes, "step")):
            continue
        elif len(fn) >= 24:
            base_name = _get_toc_base_name(fn)

            fmt = fn[20:24]
            if fmt != "spec":
                fmt = fn[16:19]

            o = {
                "dependency": os.path.join(paths.projectroot, fn),
                "job": _generate_toc_tree,
                "target": [],
                "args": [fn, fmt, base_name, paths, conf],
                "description": "generating {0} toc from {0}".format(fmt, fn),
            }

            if fmt != "spec":
                o["target"].append(_get_toc_output_name(base_name, "toc", paths))

            is_ref_spec = fn.startswith(os.path.join(os.path.dirname(fn), "ref-spec"))

            if not is_ref_spec and (fmt == "toc" or fmt == "spec"):
                o["target"].append(_get_toc_output_name(base_name, "dfn-list", paths))
            elif fmt == "ref" or is_ref_spec:
                o["target"].append(_get_toc_output_name(base_name, "table", paths))

            yield o
Ejemplo n.º 10
0
def main(files):
    options = Options()

    for fn in expand_tree("./", "yaml"):
        options.ingest(fn)

    for opt in options.iterator():
        renderer = OptionRendered(opt)
        renderer.render(os.path.dirname(fn))
Ejemplo n.º 11
0
def include_file_data(conf):
    inc_path = os.path.join(conf.paths.includes)
    include_file_list = expand_tree(path=inc_path, input_extension=None)
    include_graph = include_files(conf=conf)

    recursive_use = included_recusively(include_graph)
    generated = generated_includes(conf)

    omni = {}
    for idx, fn in enumerate(include_file_list):
        incf = fn[len(conf.paths.source) :]

        if fn.endswith("~"):
            continue

        for prefix in suppressed_page_prefixes:
            if incf.startswith(prefix):
                break
        else:
            omni[incf] = {"id": idx, "name": os.path.splitext(incf)[0], "path": incf}

            if incf in generated:
                omni[incf]["generated"] = True
            else:
                omni[incf]["generated"] = False

            if incf in recursive_use:
                omni[incf]["recursive"] = True
            else:
                omni[incf]["recursive"] = False

            if incf in include_graph:
                omni[incf]["num_clients"] = len(include_graph[incf])

                omni[incf]["clients"] = []
                for cl in include_graph[incf]:
                    cl, ext = os.path.splitext(cl)

                    if ext == "yaml":
                        continue
                    if cl.startswith("/includes/generated/overview") or cl.startswith("/includes/manpage-"):
                        continue

                    omni[incf]["clients"].append(cl)

                if len(omni[incf]["clients"]) == 0:
                    omni[incf]["yaml_only"] = True
                else:
                    omni[incf]["yaml_only"] = False
            else:
                omni[incf]["clients"] = dict()
                omni[incf]["num_clients"] = 0

            with open(fn, "r") as f:
                omni[incf]["content"] = [ln.rstrip() for ln in f.readlines()]

    return omni
Ejemplo n.º 12
0
def main(files):
    options = Options()

    for fn in expand_tree('./', 'yaml'):
        options.ingest(fn)

    for opt in options.iterator():
        renderer = OptionRendered(opt)
        renderer.render(os.path.dirname(fn))
Ejemplo n.º 13
0
def steps_jobs(conf):
    paths = conf.paths

    for fn in expand_tree(os.path.join(paths.projectroot, paths.includes), 'yaml'):
        if fn.startswith(os.path.join(paths.projectroot, paths.includes, 'step')):
            out_fn = _get_steps_output_fn(fn, paths)

            yield { 'dependency': fn,
                    'target': out_fn,
                    'job': render_step_file,
                    'args': [fn, out_fn] }
Ejemplo n.º 14
0
def api_jobs(conf=None):
    conf = lazy_conf(conf)

    for source in expand_tree(os.path.join(conf.paths.projectroot, conf.paths.source, 'reference'), 'yaml'):
        target = dot_concat(os.path.splitext(source)[0], 'rst')

        yield {
                'target': target,
                'dependency': source,
                'job': _generate_api_param,
                'args': [source, target, conf]
              }
Ejemplo n.º 15
0
def include_files_unused(conf, inc_files=None):
    inc_files = [ fn[6:] for fn in expand_tree(os.path.join(conf.paths.includes), None) ]
    mapping = include_files(conf=conf)

    results = []
    for fn in inc_files:
        if fn.endswith('yaml') or fn.endswith('~'):
            continue
        if fn not in mapping.keys():
            results.append(fn)

    return results
Ejemplo n.º 16
0
def steps_jobs(conf):
    paths = conf.paths

    for fn in expand_tree(os.path.join(paths.projectroot, paths.includes), 'yaml'):
        if fn.startswith(os.path.join(paths.projectroot, paths.includes, 'step')):
            out_fn = _get_steps_output_fn(fn, paths)

            yield { 'dependency': fn,
                    'target': out_fn,
                    'job': render_step_file,
                    'args': [fn, out_fn, conf],
                    'description': 'generating step file {0} from {1}'.format(out_fn, fn)
                  }
Ejemplo n.º 17
0
def jobs(conf):
    conf.km = AttributeDict()
    conf.km.spec = os.path.abspath(os.path.join(conf.paths.projectroot, '..', 
                                                'mongo-' + conf.version.release, 
                                                'modules'))

    data = runner([{'job': BuildConfiguration, 'args': [fn]}
                   for fn in expand_tree(conf.km.spec)], force=False, pool=8)

    for item in data:
        yield { 
            'job': render_page,
            'args': [item, conf]
        }
Ejemplo n.º 18
0
def include_files_unused(conf, inc_files=None):
    inc_files = [
        fn[6:] for fn in expand_tree(os.path.join(conf.paths.includes), None)
    ]
    mapping = include_files(conf=conf)

    results = []
    for fn in inc_files:
        if fn.endswith('yaml') or fn.endswith('~'):
            continue
        if fn not in mapping.keys():
            results.append(fn)

    return results
Ejemplo n.º 19
0
def table_jobs(conf=None):
    conf = lazy_conf(conf)

    for source in expand_tree(os.path.join(conf.paths.projectroot, conf.paths.includes), 'yaml'):
        if os.path.basename(source).startswith('table'):
            target = _get_table_output_name(source)
            list_target = _get_list_table_output_name(source)

            yield {
                    'target': [ target, list_target ],
                    'dependency': source,
                    'job': _generate_tables,
                    'args': [ source, target, list_target ]
                  }
Ejemplo n.º 20
0
def steps_jobs(conf):
    paths = conf.paths

    for fn in expand_tree(os.path.join(paths.projectroot, paths.includes),
                          'yaml'):
        if fn.startswith(
                os.path.join(paths.projectroot, paths.includes, 'step')):
            out_fn = _get_steps_output_fn(fn, paths)

            yield {
                'dependency': fn,
                'target': out_fn,
                'job': render_step_file,
                'args': [fn, out_fn]
            }
Ejemplo n.º 21
0
def gettext_jobs(conf=None):
    conf = lazy_conf(conf)

    locale_dirs = os.path.join(conf.paths.projectroot,
                               conf.paths.locale, 'pot')

    branch_output = os.path.join(conf.paths.projectroot,
                                       conf.paths.branch_output,
                                       'gettext')

    path_offset = len(branch_output) + 1

    for fn in expand_tree(branch_output, None):
        yield {
            'job': copy_if_needed,
            'args': [ fn, os.path.join(locale_dirs, fn[path_offset:]), None]
        }
Ejemplo n.º 22
0
def option_jobs(conf):
    paths = conf.paths

    options = Options()
    options.source_dirname = os.path.join(paths.projectroot, paths.includes)
    base_path = os.path.join(paths.projectroot, paths.includes)
    output_path = os.path.join(base_path, 'option')

    for fn in expand_tree(base_path, 'yaml'):
        if fn.startswith(output_path):
            options.ingest(fn)

    if not os.path.exists(output_path):
        os.makedirs(output_path)

    for opt in options.iterator():
        yield {'job': render_option_page, 'args': [opt, output_path]}
Ejemplo n.º 23
0
def gettext_jobs(conf=None):
    conf = lazy_conf(conf)

    locale_dirs = os.path.join(conf.paths.projectroot, conf.paths.locale,
                               'pot')

    branch_output = os.path.join(conf.paths.projectroot,
                                 conf.paths.branch_output, 'gettext')

    path_offset = len(branch_output) + 1

    for fn in expand_tree(branch_output, None):
        yield {
            'job': copy_if_needed,
            'args': [fn, os.path.join(locale_dirs, fn[path_offset:]), None],
            'description': "migrating po file {0} if needed".format(fn)
        }
Ejemplo n.º 24
0
def json_output_jobs(conf):

    regexes = [
        (re.compile(r'<a class=\"headerlink\"'), '<a'),
        (re.compile(r'<[^>]*>'), ''),
        (re.compile(r'&#8220;'), '"'),
        (re.compile(r'&#8221;'), '"'),
        (re.compile(r'&#8216;'), "'"),
        (re.compile(r'&#8217;'), "'"),
        (re.compile(r'&#\d{4};'), ''),
        (re.compile(r'&nbsp;'), ''),
        (re.compile(r'&gt;'), '>'),
        (re.compile(r'&lt;'), '<')
    ]

    outputs = []
    for fn in expand_tree('source', 'txt'):
        # path = build/<branch>/json/<filename>

        path = os.path.join(conf.paths.branch_output,
                            'json', os.path.splitext(fn.split(os.path.sep, 1)[1])[0])
        fjson = dot_concat(path, 'fjson')
        json = dot_concat(path, 'json')

        if conf.project.name == 'mms':
            if not os.path.exists(fjson):
                continue

        yield { 'target': json,
                'dependency': fjson,
                'job': process_json_file,
                'description': "processing json file".format(json),
                'args': (fjson, json, regexes, conf) }

        outputs.append(json)

    list_file = os.path.join(conf.paths.branch_output, 'json-file-list')

    yield { 'target': list_file,
            'dependency': None,
            'description': 'generating json index list {0}'.format(list_file),
            'job': generate_list_file,
            'args': (outputs, list_file, conf) }

    json_output(conf)
Ejemplo n.º 25
0
def gettext_jobs(conf=None):
    conf = lazy_conf(conf)

    locale_dirs = os.path.join(conf.paths.projectroot,
                               conf.paths.locale, 'pot')

    branch_output = os.path.join(conf.paths.projectroot,
                                       conf.paths.branch_output,
                                       'gettext')

    path_offset = len(branch_output) + 1

    for fn in expand_tree(branch_output, None):
        yield {
            'job': copy_if_needed,
            'args': [ fn, os.path.join(locale_dirs, fn[path_offset:]), None],
            'description': "migrating po file {0} if needed".format(fn)
        }
Ejemplo n.º 26
0
def json_output_jobs(conf):

    regexes = [(re.compile(r'<a class=\"headerlink\"'), '<a'),
               (re.compile(r'<[^>]*>'), ''), (re.compile(r'&#8220;'), '"'),
               (re.compile(r'&#8221;'), '"'), (re.compile(r'&#8216;'), "'"),
               (re.compile(r'&#8217;'), "'"), (re.compile(r'&#\d{4};'), ''),
               (re.compile(r'&nbsp;'), ''), (re.compile(r'&gt;'), '>'),
               (re.compile(r'&lt;'), '<')]

    outputs = []
    for fn in expand_tree('source', 'txt'):
        # path = build/<branch>/json/<filename>

        path = os.path.join(conf.paths.branch_output, 'json',
                            os.path.splitext(fn.split(os.path.sep, 1)[1])[0])
        fjson = dot_concat(path, 'fjson')
        json = dot_concat(path, 'json')

        if conf.project.name == 'mms':
            if not os.path.exists(fjson):
                continue

        yield {
            'target': json,
            'dependency': fjson,
            'job': process_json_file,
            'description': "processing json file".format(json),
            'args': (fjson, json, regexes, conf)
        }

        outputs.append(json)

    list_file = os.path.join(conf.paths.branch_output, 'json-file-list')

    yield {
        'target': list_file,
        'dependency': None,
        'description': 'generating json index list {0}'.format(list_file),
        'job': generate_list_file,
        'args': (outputs, list_file, conf)
    }

    json_output(conf)
Ejemplo n.º 27
0
def build_sffms(conf):
    munge_script = os.path.join(conf.paths.buildsystem, 'bin', 'sffms-cleanup')

    base_dir = os.path.join(conf.paths.projectroot, conf.paths.output, 'sffms')

    preprocess = [ { 'job': command, 'args': [' '.join([munge_script, fn])] }
                   for fn in expand_tree(base_dir, 'tex') ]

    pdfs = ingest_yaml_list(os.path.join(conf.paths.builddata, 'pdfs.yaml'))

    count = runner(preprocess)
    print("[pdf] [sffms]: prepossessed {0} sffms files".format(count ))

    for pdf in pdfs:
        copy_if_needed(source_file=os.path.join(base_dir, pdf['input']),
                       target_file=os.path.join(base_dir, pdf['output']),
                       name='sffms')

    pdf_processor(conf, pdfs, None)
Ejemplo n.º 28
0
def option_jobs(conf):
    paths = conf.paths

    options = Options()
    options.source_dirname = os.path.join(paths.projectroot, paths.includes)
    base_path = os.path.join(paths.projectroot, paths.includes)
    output_path = os.path.join(base_path, 'option')

    for fn in expand_tree(base_path, 'yaml'):
        if fn.startswith(output_path):
            options.ingest(fn)

    if not os.path.exists(output_path):
        os.makedirs(output_path)

    for opt in options.iterator():
        yield { 'job': render_option_page,
                'args': [ opt, output_path ]
              }
Ejemplo n.º 29
0
def json_output_jobs(conf=None):
    conf = lazy_conf(conf)

    regexes = [
        (re.compile(r'<a class=\"headerlink\"'), '<a'),
        (re.compile(r'<[^>]*>'), ''),
        (re.compile(r'&#8220;'), '"'),
        (re.compile(r'&#8221;'), '"'),
        (re.compile(r'&#8216;'), "'"),
        (re.compile(r'&#8217;'), "'"),
        (re.compile(r'&#\d{4};'), ''),
        (re.compile(r'&nbsp;'), ''),
        (re.compile(r'&gt;'), '>'),
        (re.compile(r'&lt;'), '<')
    ]

    outputs = []
    for fn in expand_tree('source', 'txt'):
        # path = build/<branch>/json/<filename>

        path = os.path.join(conf.paths.branch_output,
                            'json', os.path.splitext(fn.split(os.path.sep, 1)[1])[0])
        fjson = dot_concat(path, 'fjson')
        json = dot_concat(path, 'json')

        if conf.project.name == 'mms':
            if not os.path.exists(fjson):
                continue

        yield dict(target=json,
                   dependency=fjson,
                   job=process_json_file,
                   args=(fjson, json, regexes, conf))

        outputs.append(json)

    list_file = os.path.join(conf.paths.branch_staging, 'json-file-list')

    yield dict(target=list_file,
               dependency=None,
               job=generate_list_file,
               args=(outputs, list_file, conf))
Ejemplo n.º 30
0
def steps_jobs(conf):
    paths = conf.paths

    for fn in expand_tree(os.path.join(paths.projectroot, paths.includes),
                          'yaml'):
        if fn.startswith(
                os.path.join(paths.projectroot, paths.includes, 'step')):
            out_fn = _get_steps_output_fn(fn, paths)

            yield {
                'dependency':
                fn,
                'target':
                out_fn,
                'job':
                render_step_file,
                'args': [fn, out_fn, conf],
                'description':
                'generating step file {0} from {1}'.format(out_fn, fn)
            }
Ejemplo n.º 31
0
def generated_includes(conf):
    toc_spec_files = []
    step_files = []
    for fn in expand_tree(os.path.join(conf.paths.includes),
                          input_extension='yaml'):
        base = os.path.basename(fn)

        if base.startswith('toc-spec'):
            toc_spec_files.append(fn)
        elif base.startswith('ref-spec'):
            toc_spec_files.append(fn)
        elif base.startswith('steps'):
            step_files.append(fn)

    maskl = len(conf.paths.source)
    path_prefix = conf.paths.includes[len(conf.paths.source):]
    mapping = {}
    for spec_file in toc_spec_files:
        if os.path.exists(spec_file):
            data = ingest_yaml_doc(spec_file)
        else:
            continue

        deps = [os.path.join(path_prefix, i) for i in data['sources']]

        mapping[spec_file[maskl:]] = deps

    for step_def in step_files:
        data = ingest_yaml_list(step_def)

        deps = []
        for step in data:
            if 'source' in step:
                deps.append(step['source']['file'])

        if len(deps) != 0:
            deps = [os.path.join(path_prefix, i) for i in deps]

            mapping[step_def[maskl:]] = deps

    return mapping
Ejemplo n.º 32
0
def option_jobs(conf):
    paths = conf.paths

    options = Options()
    options.source_dirname = os.path.join(paths.projectroot, paths.includes)
    base_path = os.path.join(paths.projectroot, paths.includes)
    output_path = os.path.join(base_path, "option")

    for fn in expand_tree(base_path, "yaml"):
        if fn.startswith(output_path):
            options.ingest(fn)

    if not os.path.exists(output_path):
        os.makedirs(output_path)

    for opt in options.iterator():
        yield {
            "job": render_option_page,
            "args": [opt, output_path],
            "description": 'generating option "{0}" for "{1}"'.format(opt.name, opt.program),
        }
Ejemplo n.º 33
0
def generate_report(mask, output_file=None, conf=None, data=None):
    conf = lazy_config(conf)

    base_path = os.path.join(
        conf.build.paths.output, conf.git.branches.current, 'json')

    if mask is not None:
        if mask.startswith('/'):
            mask = mask[1:]

        mask = os.path.join(base_path, mask)

    if data is None:
        docs = expand_tree(base_path, '.json')

        output = runner(jobs=report_jobs(docs, mask),
                        retval='results')
    else:
        output = data

    return output
Ejemplo n.º 34
0
def generated_includes(conf):
    toc_spec_files = []
    step_files = []
    for fn in expand_tree(os.path.join(conf.paths.includes), input_extension='yaml'):
        base = os.path.basename(fn)

        if base.startswith('toc-spec'):
            toc_spec_files.append(fn)
        elif base.startswith('ref-spec'):
            toc_spec_files.append(fn)
        elif base.startswith('steps'):
            step_files.append(fn)

    maskl = len(conf.paths.source)
    path_prefix = conf.paths.includes[len(conf.paths.source):]
    mapping = {}
    for spec_file in toc_spec_files:
        if os.path.exists(spec_file):
            data = ingest_yaml_doc(spec_file)
        else:
            continue

        deps = [ os.path.join(path_prefix, i ) for i in data['sources']]

        mapping[spec_file[maskl:]] = deps

    for step_def in step_files:
        data = ingest_yaml_list(step_def)

        deps = []
        for step in data:
            if 'source' in step:
                deps.append(step['source']['file'])

        if len(deps) != 0:
            deps = [ os.path.join(path_prefix, i ) for i in deps ]

            mapping[step_def[maskl:]] = deps

    return mapping
Ejemplo n.º 35
0
def toc_jobs(conf):
    paths = copy(conf.paths)

    for fn in expand_tree(paths.includes, 'yaml'):
        if fn.startswith(os.path.join(paths.includes, 'table')):
            continue
        elif fn.startswith(os.path.join(paths.includes, 'step')):
            continue
        elif len(fn) >= 24:
            base_name = _get_toc_base_name(fn)

            fmt = fn[20:24]
            if fmt != 'spec':
                fmt = fn[16:19]

            o = {
                'dependency': os.path.join(paths.projectroot, fn),
                'job': _generate_toc_tree,
                'target': [],
                'args': [fn, fmt, base_name, paths, conf],
                'description': 'generating {0} toc from {0}'.format(fmt, fn)
            }

            if fmt != 'spec':
                o['target'].append(
                    _get_toc_output_name(base_name, 'toc', paths))

            is_ref_spec = fn.startswith(
                os.path.join(os.path.dirname(fn), 'ref-spec'))

            if not is_ref_spec and (fmt == 'toc' or fmt == 'spec'):
                o['target'].append(
                    _get_toc_output_name(base_name, 'dfn-list', paths))
            elif fmt == 'ref' or is_ref_spec:
                o['target'].append(
                    _get_toc_output_name(base_name, 'table', paths))

            yield o
Ejemplo n.º 36
0
def external_jobs(conf=None):
    conf = lazy_conf(conf)

    ext_mod_path = os.path.join(conf.paths.projectroot, 'local')
    if not os.path.exists(ext_mod_path):
        raise StopIteration

    external_mods = []

    for mod in expand_tree(ext_mod_path, 'py'):
        path, name = os.path.split(mod)
        name, _ = os.path.splitext(name)

        file, filename, data = imp.find_module(name, [path])

        imp.load_module(name, file, mod, data)
        external_mods.append(name)

    for name in external_mods:
        mod = sys.modules[name]
        if 'jobs' in dir(mod) and 'stage' in dir(mod) and mod.stage.startswith('pre'):
            for task in mod.jobs(conf):
                yield task
Ejemplo n.º 37
0
def json_output_jobs(conf=None):
    conf = lazy_conf(conf)

    regexes = [(re.compile(r'<a class=\"headerlink\"'), '<a'),
               (re.compile(r'<[^>]*>'), ''), (re.compile(r'&#8220;'), '"'),
               (re.compile(r'&#8221;'), '"'), (re.compile(r'&#8216;'), "'"),
               (re.compile(r'&#8217;'), "'"), (re.compile(r'&#\d{4};'), ''),
               (re.compile(r'&nbsp;'), ''), (re.compile(r'&gt;'), '>'),
               (re.compile(r'&lt;'), '<')]

    outputs = []
    for fn in expand_tree('source', 'txt'):
        # path = build/<branch>/json/<filename>

        path = os.path.join(conf.paths.branch_output, 'json',
                            os.path.splitext(fn.split(os.path.sep, 1)[1])[0])
        fjson = dot_concat(path, 'fjson')
        json = dot_concat(path, 'json')

        if conf.project.name == 'mms':
            if not os.path.exists(fjson):
                continue

        yield dict(target=json,
                   dependency=fjson,
                   job=process_json_file,
                   args=(fjson, json, regexes, conf))

        outputs.append(json)

    list_file = os.path.join(conf.paths.branch_staging, 'json-file-list')

    yield dict(target=list_file,
               dependency=None,
               job=generate_list_file,
               args=(outputs, list_file, conf))
Ejemplo n.º 38
0
def external_jobs(conf=None):
    conf = lazy_conf(conf)

    ext_mod_path = os.path.join(conf.paths.projectroot, 'local')
    if not os.path.exists(ext_mod_path):
        raise StopIteration

    external_mods = []

    for mod in expand_tree(ext_mod_path, 'py'):
        path, name = os.path.split(mod)
        name, _ = os.path.splitext(name)

        file, filename, data = imp.find_module(name, [path])

        imp.load_module(name, file, mod, data)
        external_mods.append(name)

    for name in external_mods:
        mod = sys.modules[name]
        if 'jobs' in dir(mod) and 'stage' in dir(mod) and mod.stage.startswith(
                'pre'):
            for task in mod.jobs(conf):
                yield task
Ejemplo n.º 39
0
def include_file_data(conf):
    inc_path = os.path.join(conf.paths.includes)
    include_file_list = expand_tree(path=inc_path, input_extension=None)
    include_graph = include_files(conf=conf)

    recursive_use = included_recusively(include_graph)
    generated = generated_includes(conf)

    omni = {}
    for idx, fn in enumerate(include_file_list):
        incf = fn[len(conf.paths.source):]

        if fn.endswith('~'):
            continue

        for prefix in suppressed_page_prefixes:
            if incf.startswith(prefix):
                break
        else:
            omni[incf] = {
                'id': idx,
                'name': os.path.splitext(incf)[0],
                'path': incf,
            }

            if incf in generated:
                omni[incf]['generated'] = True
            else:
                omni[incf]['generated'] = False

            if incf in recursive_use:
                omni[incf]['recursive'] = True
            else:
                omni[incf]['recursive'] = False

            if incf in include_graph:
                omni[incf]['num_clients'] = len(include_graph[incf])

                omni[incf]['clients'] = []
                for cl in include_graph[incf]:
                    cl, ext = os.path.splitext(cl)

                    if ext == 'yaml':
                        continue
                    if (cl.startswith('/includes/generated/overview') or
                        cl.startswith('/includes/manpage-')):
                        continue

                    omni[incf]['clients'].append(cl)

                if len(omni[incf]['clients']) == 0:
                    omni[incf]['yaml_only'] = True
                else:
                    omni[incf]['yaml_only'] = False
            else:
                omni[incf]['clients'] = dict()
                omni[incf]['num_clients'] = 0

            with open(fn, 'r') as f:
                omni[incf]['content'] = [ ln.rstrip() for ln in f.readlines() ]

    return omni
Ejemplo n.º 40
0
def include_file_data(conf):
    inc_path = os.path.join(conf.paths.includes)
    include_file_list = expand_tree(path=inc_path, input_extension=None)
    include_graph = include_files(conf=conf)

    recursive_use = included_recusively(include_graph)
    generated = generated_includes(conf)

    omni = {}
    for idx, fn in enumerate(include_file_list):
        incf = fn[len(conf.paths.source):]

        if fn.endswith('~'):
            continue

        for prefix in suppressed_page_prefixes:
            if incf.startswith(prefix):
                break
        else:
            omni[incf] = {
                'id': idx,
                'name': os.path.splitext(incf)[0],
                'path': incf,
            }

            if incf in generated:
                omni[incf]['generated'] = True
            else:
                omni[incf]['generated'] = False

            if incf in recursive_use:
                omni[incf]['recursive'] = True
            else:
                omni[incf]['recursive'] = False

            if incf in include_graph:
                omni[incf]['num_clients'] = len(include_graph[incf])

                omni[incf]['clients'] = []
                for cl in include_graph[incf]:
                    cl, ext = os.path.splitext(cl)

                    if ext == 'yaml':
                        continue
                    if (cl.startswith('/includes/generated/overview')
                            or cl.startswith('/includes/manpage-')):
                        continue

                    omni[incf]['clients'].append(cl)

                if len(omni[incf]['clients']) == 0:
                    omni[incf]['yaml_only'] = True
                else:
                    omni[incf]['yaml_only'] = False
            else:
                omni[incf]['clients'] = dict()
                omni[incf]['num_clients'] = 0

            with open(fn, 'r') as f:
                omni[incf]['content'] = [ln.rstrip() for ln in f.readlines()]

    return omni