Beispiel #1
0
def dump_file_hashes(conf):
    output = conf.system.dependency_cache

    o = { 'conf': conf.dict(),
          'time': datetime.datetime.utcnow().strftime("%s"),
          'files': { }
        }

    files = expand_tree(os.path.join(conf.paths.projectroot, conf.paths.source), None)

    fmap = o['files']

    for fn in files:
        if os.path.exists(fn):
            fmap[fn] = md5_file(fn)

    output_dir = os.path.dirname(output)

    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    with open(output, 'w') as f:
        json.dump(o, f)

    logger.info('wrote dependency cache to: {0}'.format(output))
Beispiel #2
0
def example_tasks(conf, app):
    include_dir = os.path.join(conf.paths.projectroot, conf.paths.includes)
    fn_prefix = os.path.join(include_dir, 'example')

    example_sources = [ fn for fn in
                        expand_tree(include_dir, 'yaml')
                        if fn.startswith(fn_prefix) ]

    d = ExampleDataCache(example_sources, conf)

    if not os.path.isdir(fn_prefix):
        os.makedirs(fn_prefix)

    for fn in d.cache.keys():
        exmpf = d.cache[fn]
        basename = fn[len(fn_prefix)+1:-5]

        out_fn = os.path.join(conf.paths.projectroot,
                              conf.paths.branch_source,
                              'includes', 'examples', basename) + '.rst'

        t = app.add('task')
        t.target = out_fn
        t.dependency = fn
        t.job = write_full_example
        t.args = (exmpf.collection, exmpf.examples, out_fn)
        t.description = 'generate an example for ' + basename

    logger.debug('added all tasks for example generation')
Beispiel #3
0
def finalize_single_html_jobs(builder, conf):
    pjoin = os.path.join

    single_html_dir = get_single_html_dir(conf)

    if not os.path.exists(single_html_dir):
        os.makedirs(single_html_dir)

    try:
        manual_single_html(input_file=pjoin(conf.paths.branch_output, builder,
                                            'contents.html'),
                           output_file=pjoin(single_html_dir, 'index.html'))
    except (IOError, OSError):
        manual_single_html(input_file=pjoin(conf.paths.branch_output, builder,
                                            'index.html'),
                           output_file=pjoin(single_html_dir, 'index.html'))
    copy_if_needed(source_file=pjoin(conf.paths.branch_output, builder,
                                     'objects.inv'),
                   target_file=pjoin(single_html_dir, 'objects.inv'))

    single_path = pjoin(single_html_dir, '_static')

    for fn in expand_tree(pjoin(conf.paths.branch_output, builder, '_static'),
                          None):

        yield {
            'job': copy_if_needed,
            'args': [fn, pjoin(single_path, os.path.basename(fn))],
            'target': None,
            'dependency': None
        }
Beispiel #4
0
def gettext_tasks(conf):
    locale_dirs = os.path.join(conf.paths.projectroot,
                               conf.paths.locale, 'pot')

    builder_name = resolve_builder_path('gettext', conf.project.edition, None, conf)

    branch_output = os.path.join(conf.paths.projectroot,
                                 conf.paths.branch_output,
                                 builder_name)

    path_offset = len(branch_output) + 1

    tasks = []
    for fn in expand_tree(branch_output, None):
        target = os.path.join(locale_dirs, fn[path_offset:])
        source = fn

        t = libgiza.task.Task(job=copy_if_needed,
                              args=(source, target, None),
                              target=target,
                              dependency=source,
                              description="migrating po file {0} if needed".format(fn))
        tasks.append(t)

    return tasks
Beispiel #5
0
def example_tasks(conf, app):
    include_dir = os.path.join(conf.paths.projectroot, conf.paths.includes)
    fn_prefix = os.path.join(include_dir, 'example')

    example_sources = [ fn for fn in
                        expand_tree(include_dir, 'yaml')
                        if fn.startswith(fn_prefix) ]

    d = ExampleDataCache(example_sources, conf)

    if not os.path.isdir(fn_prefix):
        os.makedirs(fn_prefix)

    for fn in d.cache.keys():
        exmpf = d.cache[fn]
        basename = fn[len(fn_prefix)+1:-5]

        out_fn = os.path.join(conf.paths.projectroot,
                              conf.paths.branch_source,
                              'includes', 'examples', basename) + '.rst'

        t = app.add('task')
        t.target = out_fn
        t.dependency = fn
        t.job = write_full_example
        t.args = (exmpf.collection, exmpf.examples, out_fn)
        t.description = 'generate an example for ' + basename

    logger.debug('added all tasks for example generation')
Beispiel #6
0
def finalize_single_html_jobs(builder, conf):
    pjoin = os.path.join

    single_html_dir = get_single_html_dir(conf)

    if not os.path.exists(single_html_dir):
        os.makedirs(single_html_dir)

    try:
        manual_single_html(input_file=pjoin(conf.paths.branch_output,
                                                    builder, 'contents.html'),
                                   output_file=pjoin(single_html_dir, 'index.html'))
    except (IOError, OSError):
        manual_single_html(input_file=pjoin(conf.paths.branch_output,
                                                    builder, 'index.html'),
                                   output_file=pjoin(single_html_dir, 'index.html'))
    copy_if_needed(source_file=pjoin(conf.paths.branch_output,
                                     builder, 'objects.inv'),
                   target_file=pjoin(single_html_dir, 'objects.inv'))

    single_path = pjoin(single_html_dir, '_static')

    for fn in expand_tree(pjoin(conf.paths.branch_output,
                                builder, '_static'), None):

        yield {
            'job': copy_if_needed,
            'args': [fn, pjoin(single_path, os.path.basename(fn))],
            'target': None,
            'dependency': None
        }
Beispiel #7
0
def finalize_single_html_tasks(builder, conf):
    single_html_dir = get_single_html_dir(conf)

    # create directory when registering tasks.
    safe_create_directory(single_html_dir)
    safe_create_directory(os.path.join(single_html_dir, '_static'))

    if 'edition' in conf.project and conf.project.edition != conf.project.name:
        artifact_dir = os.path.join(conf.paths.projectroot,
                                    conf.paths.branch_output,
                                    '-'.join((builder, conf.project.edition)))
    else:
        artifact_dir = os.path.join(conf.paths.projectroot, conf.paths.branch_output, builder)

    tasks = [giza.libgiza.task.Task(job=finalize_single_html,
                                    args=(single_html_dir, artifact_dir, conf),
                                    target=True,
                                    dependency=None,
                                    description="migrating singlehtml")]

    for fn in expand_tree(os.path.join(artifact_dir, '_static'), None):
        target_fn = os.path.join(single_html_dir, '_static', os.path.basename(fn))

        tasks.append(giza.libgiza.task.Task(job=copy_if_needed,
                                            args=(fn, target_fn),
                                            target=target_fn,
                                            dependency=fn,
                                            description="moving static files to the singlehtml build"))

    return tasks
Beispiel #8
0
def gettext_tasks(conf):
    locale_dirs = os.path.join(conf.paths.projectroot,
                               conf.paths.locale, 'pot')

    builder_name = resolve_builder_path('gettext', conf.project.edition, None, conf)

    branch_output = os.path.join(conf.paths.projectroot,
                                 conf.paths.branch_output,
                                 builder_name)

    path_offset = len(branch_output) + 1

    tasks = []
    for fn in expand_tree(branch_output, None):
        target = os.path.join(locale_dirs, fn[path_offset:])
        source = fn

        t = giza.libgiza.task.Task(job=copy_if_needed,
                                   args=(source, target, None),
                                   target=target,
                                   dependency=source,
                                   description="migrating po file {0} if needed".format(fn))
        tasks.append(t)

    logger.info("if you added files to the corpus since your last gettext build,"
                "please run the gettext build a second time for complete finalization.")

    return tasks
Beispiel #9
0
def gettext_tasks(conf):
    locale_dirs = os.path.join(conf.paths.projectroot, conf.paths.locale,
                               'pot')

    builder_name = resolve_builder_path('gettext', conf.project.edition, None,
                                        conf)

    branch_output = os.path.join(conf.paths.projectroot,
                                 conf.paths.branch_output, builder_name)

    path_offset = len(branch_output) + 1

    tasks = []
    for fn in expand_tree(branch_output, None):
        target = os.path.join(locale_dirs, fn[path_offset:])
        source = fn

        t = libgiza.task.Task(
            job=copy_if_needed,
            args=(source, target, None),
            target=target,
            dependency=source,
            description="migrating po file {0} if needed".format(fn))
        tasks.append(t)

    logger.info(
        "if you added files to the corpus since your last gettext build,"
        "please run the gettext build a second time for complete finalization."
    )

    return tasks
Beispiel #10
0
def toc_tasks(conf, app):
    paths = conf.paths

    for fn in expand_tree(paths.includes, 'yaml'):
        if not (fn.startswith(os.path.join(paths.includes, 'toc')) or
                fn.startswith(os.path.join(paths.includes, 'ref-toc')) or
                fn.startswith(os.path.join(paths.includes, 'ref-spec'))):
            continue
        elif len(fn) >= 24:
            task = app.add('task')
            base_name = _get_toc_base_name(fn)
            target = []

            fmt = fn[20:24]
            if fmt != 'spec':
                fmt = fn[16:19]

            task.dependency = os.path.join(paths.projectroot, fn)
            task.job = _generate_toc_tree
            task.args = [fn, fmt, base_name, paths, conf]
            task.description = 'generating {0} from {1}'.format(fmt, fn)

            if fmt != 'spec':
                target.append(_get_toc_output_name(base_name, 'toc', paths))

            is_ref_spec = fn.startswith(os.path.join(os.path.dirname(fn), 'ref-spec'))

            if not is_ref_spec and (fmt == 'toc' or fmt == 'spec'):
                target.append(_get_toc_output_name(base_name, 'dfn-list', paths))
            elif fmt == 'ref' or is_ref_spec:
                target.append(_get_toc_output_name(base_name, 'table', paths))

            task.target = target

            logger.debug('added task for generating toc from {0}'.format(fn))
Beispiel #11
0
def manpage_url_tasks(builder, conf):
    project_source = os.path.join(conf.paths.projectroot,
                                  conf.paths.source)

    top_level_items = set()
    for fs_obj in os.listdir(project_source):
        if fs_obj.startswith('.static') or fs_obj == 'index.txt':
            continue
        if os.path.isdir(os.path.join(project_source, fs_obj)):
            top_level_items.add(fs_obj)
        if fs_obj.endswith('.txt'):
            top_level_items.add(fs_obj[:-4])

    top_level_items = '/' + r'[^\s]*|/'.join(top_level_items) + r'[^\s]*'

    re_string = r'(\\fB({0})\\fP)'.format(top_level_items).replace(r'-', r'\-')
    subst = conf.project.url + '/' + conf.project.tag + r'\2'

    regex_obj = (re.compile(re_string), subst)

    tasks = []
    for manpage in expand_tree(os.path.join(conf.paths.projectroot,
                                            conf.paths.output,
                                            conf.git.branches.current,
                                            builder), ['1', '5']):

        description = 'processing urls in manpage file: {0}'.format(manpage)
        tasks.append(libgiza.task.Task(job=manpage_url,
                                       args=(regex_obj, manpage),
                                       target=manpage,
                                       dependency=None,
                                       description=description))

    return tasks
Beispiel #12
0
def task(task, conf):
    if task == 'source':
        legacy_tables = expand_tree(
            os.path.join(conf.paths.projectroot, conf.paths.source,
                         'reference'), 'yaml')
        dirname = os.path.join(conf.paths.projectroot, conf.paths.includes,
                               'apiargs')
        safe_create_directory(dirname)
        offset = len(os.path.join(conf.paths.projectroot, conf.paths.source))
    elif task == 'branch':
        legacy_tables = expand_tree(
            os.path.join(conf.paths.projectroot, conf.paths.branch_source,
                         'reference'), 'yaml')
        safe_create_directory(conf.system.content.apiargs.output_dir)
        offset = len(
            os.path.join(conf.paths.projectroot, conf.paths.branch_source))
    else:
        logger.critical('cannot perform apiarg migration for: ' + str(task))
        return

    new_apiarg = []
    new_fns = []
    for fn in legacy_tables:
        new_data, new_fn = migrate_legacy_apiarg(task, fn, conf)
        if new_fn in new_fns:
            logger.error("duplicate: {0}, from: {1}".format(
                os.path.basename(new_fn), os.path.basename(fn)))
        else:
            new_fns.append(new_fn[offset:])
            new_apiarg.append((new_fn, new_data))

    for fn, data in new_apiarg:
        write_yaml(data, fn)
    # for fn in legacy_tables:
    #     os.remove(fn)

    new_sources = conf.system.content.apiargs.sources

    if len(new_sources) != len(legacy_tables) and len(legacy_tables) != len(
            new_fns):
        logger.critical('problem in apiargs table migration.')
    else:
        logger.info('legacy apiargs tables migrated successfully.')

    legacy_tables = [fn[offset:] for fn in legacy_tables]
    return zip(legacy_tables, new_fns)
Beispiel #13
0
    def sources(self):
        files = expand_tree(self.dir, 'yaml')

        sources = set()
        for prefix, fn in itertools.product(self.prefixes, files):
            if fn.startswith(self.output_dir[:-1]):
                sources.add(fn)

        return list(sources)
Beispiel #14
0
def json_output_tasks(conf):
    regexes = [
        (re.compile(r'<a class=\"headerlink\"'), '<a'),
        (re.compile(r'<[^>]*>'), ''),
        (re.compile(r'&#8220;'), '"'),
        (re.compile(r'&#8221;'), '"'),
        (re.compile(r'&#8216;'), "'"),
        (re.compile(r'&#8217;'), "'"),
        (re.compile(r'&#\d{4};'), ''),
        (re.compile(r'&nbsp;'), ''),
        (re.compile(r'&gt;'), '>'),
        (re.compile(r'&lt;'), '<')
    ]

    outputs = []

    tasks = []

    for fn in expand_tree('source', 'txt'):
        # path = build/<branch>/json/<filename>

        if 'edition' in conf.project and conf.project.edition != conf.project.name:
            path = os.path.join(conf.paths.branch_output,
                                'json-' + conf.project.edition,
                                os.path.splitext(fn.split(os.path.sep, 1)[1])[0])

        else:
            path = os.path.join(conf.paths.branch_output,
                                'json', os.path.splitext(fn.split(os.path.sep, 1)[1])[0])

        fjson = path + '.fjson'
        jsonf = path + '.json'

        task = libgiza.task.Task(job=process_json_file,
                                 args=(fjson, jsonf, regexes, conf),
                                 target=jsonf,
                                 dependency=fjson,
                                 description="processing json file".format(json))
        tasks.append(task)
        outputs.append(jsonf)

    list_file = os.path.join(conf.paths.branch_output, 'json-file-list')
    tasks.append(libgiza.task.Task(job=generate_list_file,
                                   args=(outputs, list_file, conf),
                                   target=list_file,
                                   dependency=None,
                                   description="generating list of json files"))

    transfer = libgiza.task.Task(job=json_output,
                                 args=[conf],
                                 target=True,
                                 dependency=None,
                                 description='transfer json output to public directory')

    return tasks, transfer
Beispiel #15
0
def example_clean(conf, app):
    fn_prefix = os.path.join(include_dir, "example")

    example_sources = [fn for fn in expand_tree(include_dir, "yaml") if fn.startswith(fn_prefix)]

    for fn in example_sources:
        basename = fn[len(fn_prefix) + 1 : -5]

        out_fn = (
            os.path.join(conf.paths.projectroot, conf.paths.branch_source, "includes", "examples", basename) + ".rst"
        )
Beispiel #16
0
def json_output_tasks(conf, app):
    regexes = [
        (re.compile(r'<a class=\"headerlink\"'), '<a'),
        (re.compile(r'<[^>]*>'), ''),
        (re.compile(r'&#8220;'), '"'),
        (re.compile(r'&#8221;'), '"'),
        (re.compile(r'&#8216;'), "'"),
        (re.compile(r'&#8217;'), "'"),
        (re.compile(r'&#\d{4};'), ''),
        (re.compile(r'&nbsp;'), ''),
        (re.compile(r'&gt;'), '>'),
        (re.compile(r'&lt;'), '<')
    ]

    outputs = []
    for fn in expand_tree('source', 'txt'):
        # path = build/<branch>/json/<filename>

        if 'edition' in conf.project and conf.project.edition != conf.project.name:
            path = os.path.join(conf.paths.branch_output,
                                'json-' + conf.project.edition, 
                                os.path.splitext(fn.split(os.path.sep, 1)[1])[0])
            
        else:
            path = os.path.join(conf.paths.branch_output,
                                'json', os.path.splitext(fn.split(os.path.sep, 1)[1])[0])



        fjson = dot_concat(path, 'fjson')
        json = dot_concat(path, 'json')

        task = app.add('task')
        task.target = json
        task.dependency = fjson
        task.job = process_json_file
        task.description = "processing json file".format(json)
        task.args = [fjson, json, regexes, conf]

        outputs.append(json)

    list_file = os.path.join(conf.paths.branch_output, 'json-file-list')

    list_task = app.add('task')
    list_task.target = list_file
    list_task.job = generate_list_file
    list_task.args = [outputs, list_file, conf]

    output = app.add('app')
    out_task = output.add('task')
    out_task.job = json_output
    out_task.args = [conf]
    out_task.description = 'transfer json output to public directory'
Beispiel #17
0
def json_output_tasks(conf, app):
    regexes = [
        (re.compile(r'<a class=\"headerlink\"'), '<a'),
        (re.compile(r'<[^>]*>'), ''),
        (re.compile(r'&#8220;'), '"'),
        (re.compile(r'&#8221;'), '"'),
        (re.compile(r'&#8216;'), "'"),
        (re.compile(r'&#8217;'), "'"),
        (re.compile(r'&#\d{4};'), ''),
        (re.compile(r'&nbsp;'), ''),
        (re.compile(r'&gt;'), '>'),
        (re.compile(r'&lt;'), '<')
    ]

    outputs = []
    for fn in expand_tree('source', 'txt'):
        # path = build/<branch>/json/<filename>

        if 'edition' in conf.project and conf.project.edition != conf.project.name:
            path = os.path.join(conf.paths.branch_output,
                                'json-' + conf.project.edition,
                                os.path.splitext(fn.split(os.path.sep, 1)[1])[0])

        else:
            path = os.path.join(conf.paths.branch_output,
                                'json', os.path.splitext(fn.split(os.path.sep, 1)[1])[0])



        fjson = dot_concat(path, 'fjson')
        json = dot_concat(path, 'json')

        task = app.add('task')
        task.target = json
        task.dependency = fjson
        task.job = process_json_file
        task.description = "processing json file".format(json)
        task.args = [fjson, json, regexes, conf]

        outputs.append(json)

    list_file = os.path.join(conf.paths.branch_output, 'json-file-list')

    list_task = app.add('task')
    list_task.target = list_file
    list_task.job = generate_list_file
    list_task.args = [outputs, list_file, conf]

    output = app.add('app')
    out_task = output.add('task')
    out_task.job = json_output
    out_task.args = [conf]
    out_task.description = 'transfer json output to public directory'
Beispiel #18
0
def include_files_unused(conf, inc_files=None):
    inc_files = [ fn[6:] for fn in expand_tree(os.path.join(conf.paths.includes), None) ]
    mapping = include_files(conf=conf)

    results = []
    for fn in inc_files:
        if fn.endswith('yaml') or fn.endswith('~'):
            continue
        if fn not in mapping.keys():
            results.append(fn)

    return results
Beispiel #19
0
def example_clean(conf, app):
    fn_prefix = os.path.join(include_dir, 'example')

    example_sources = [ fn for fn in
                        expand_tree(include_dir, 'yaml')
                        if fn.startswith(fn_prefix) ]

    for fn in example_sources:
        basename = fn[len(fn_prefix)+1:-5]

        out_fn = os.path.join(conf.paths.projectroot,
                              conf.paths.branch_source,
                              'includes', 'examples', basename) + '.rst'
Beispiel #20
0
def steps_tasks(conf, app):
    paths = conf.paths

    for fn in expand_tree(os.path.join(paths.projectroot, paths.includes), 'yaml'):
        if fn.startswith(os.path.join(paths.projectroot, paths.includes, 'step')):
            out_fn = _get_steps_output_fn(fn, paths)

            task = app.add('task')
            task.dependency = fn
            task.target = out_fn
            task.job = render_step_file
            task.args = [fn, out_fn, conf]
            logger.debug('added task to generate step: {0}'.format(out_fn))
Beispiel #21
0
def include_files_unused(conf, inc_files=None):
    inc_files = [
        fn[6:] for fn in expand_tree(os.path.join(conf.paths.includes), None)
    ]
    keys = set(include_files(conf=conf).keys())

    results = []
    for fn in inc_files:
        if fn.endswith('yaml') or fn.endswith('~'):
            continue
        if fn not in keys:
            results.append(fn)

    return results
Beispiel #22
0
def sphinx_tasks(sconf, conf):
    # Projects that use the append functionality in extracts or similar content
    # generators will rebuild this task every time.

    deps = [os.path.join(conf.paths.projectroot, 'conf.py')]
    deps.extend(conf.system.files.get_configs('sphinx_local'))
    deps.extend(expand_tree(os.path.join(conf.paths.projectroot, conf.paths.branch_source), 'txt'))

    return Task(job=run_sphinx,
                args=(sconf.builder, sconf, conf),
                target=os.path.join(conf.paths.projectroot,
                                    conf.paths.branch_output,
                                    sconf.builder),
                dependency=deps,
                description='building {0} with sphinx'.format(sconf.builder))
Beispiel #23
0
def sphinx_tasks(sconf, conf):
    # Projects that use the append functionality in extracts or similar content
    # generators will rebuild this task every time.

    deps = [os.path.join(conf.paths.projectroot, 'conf.py')]
    deps.extend(conf.system.files.get_configs('sphinx_local'))
    deps.extend(expand_tree(os.path.join(conf.paths.projectroot, conf.paths.branch_source), 'txt'))

    return Task(job=run_sphinx,
                args=(sconf.builder, sconf, conf),
                target=os.path.join(conf.paths.projectroot,
                                    conf.paths.branch_output,
                                    sconf.builder),
                dependency=deps,
                description='building {0} with sphinx'.format(sconf.builder))
Beispiel #24
0
def steps_tasks(conf, app):
    paths = conf.paths

    for fn in expand_tree(os.path.join(paths.projectroot, paths.includes),
                          'yaml'):
        if fn.startswith(
                os.path.join(paths.projectroot, paths.includes, 'step')):
            out_fn = _get_steps_output_fn(fn, paths)

            task = app.add('task')
            task.dependency = fn
            task.target = out_fn
            task.job = render_step_file
            task.args = [fn, out_fn, conf]
            logger.debug('added task to generate step: {0}'.format(out_fn))
Beispiel #25
0
def finalize_single_html_tasks(builder, conf, app):
    single_html_dir = get_single_html_dir(conf)

    safe_create_directory(single_html_dir)

    found_src = False
    for base_path in (builder, hyph_concat(builder, conf.project.edition)):
        if found_src is True:
            break

        for fn in [
                os.path.join(base_path, f)
                for f in ('contents.html', 'index.html')
        ]:
            src_fn = os.path.join(conf.paths.projectroot,
                                  conf.paths.branch_output, fn)

            if os.path.exists(src_fn):
                manual_single_html(input_file=src_fn,
                                   output_file=os.path.join(
                                       single_html_dir, 'index.html'))

                copy_if_needed(
                    source_file=os.path.join(conf.paths.projectroot,
                                             conf.paths.branch_output,
                                             base_path, 'objects.inv'),
                    target_file=os.path.join(single_html_dir, 'objects.inv'))

                found_src = True

                break

    if found_src is not True:
        raise FileNotFoundError('singlehtml source file')

    single_path = os.path.join(single_html_dir, '_static')

    for fn in expand_tree(os.path.join(os.path.dirname(src_fn), '_static'),
                          None):
        target_fn = os.path.join(single_path, os.path.basename(fn))

        task = app.add('task')
        task.job = copy_if_needed
        task.target = target_fn
        task.dependency = fn
        task.args = [fn, target_fn]
        task.description = "migrating static files to the HTML build"
Beispiel #26
0
def generated_includes(conf):
    toc_spec_files = []
    step_files = []
    for fn in expand_tree(os.path.join(conf.paths.includes),
                          input_extension='yaml'):
        base = os.path.basename(fn)

        if base.startswith('toc-spec'):
            toc_spec_files.append(fn)
        elif base.startswith('ref-spec'):
            toc_spec_files.append(fn)
        elif base.startswith('steps'):
            step_files.append(fn)
        elif base.startswith('example'):
            # example files, for the purpose of this have the same structure as
            # steps, so we can just use that:
            step_files.append(fn)

    maskl = len(conf.paths.source)
    path_prefix = conf.paths.includes[len(conf.paths.source):]
    mapping = {}
    for spec_file in toc_spec_files:
        if os.path.exists(spec_file):
            data = ingest_yaml_doc(spec_file)
        else:
            continue

        deps = [os.path.join(path_prefix, i) for i in data['sources']]

        mapping[spec_file[maskl:]] = deps

    for step_def in step_files:
        data = ingest_yaml_list(step_def)

        deps = []
        for step in data:
            if 'source' in step:
                deps.append(step['source']['file'])

        if len(deps) != 0:
            deps = [os.path.join(path_prefix, i) for i in deps]

            mapping[step_def[maskl:]] = deps

    return mapping
Beispiel #27
0
def get_file_list(path, input_extension):
    ''' This function wraps around expand tree to return a list of only 1 file
    if the user gives a path to a file and not a directory. Otherwise it has
    the same functionality
    :param string path: path to the file
    :param list input_extension: a list (or a single) of extensions that is acceptable
    '''
    if os.path.isfile(path):
        if input_extension is not None:
            if isinstance(input_extension, list):
                if os.path.splitext(path)[1][1:] not in input_extension:
                    return []
            else:
                if not path.endswith(input_extension):
                    return []
        return [path]
    else:
        return expand_tree(path, input_extension)
Beispiel #28
0
def directory_expansion(source_path, page, conf):
    new_page = {'sources': expand_tree(source_path, None)}
    del page['source']

    if 'source_dir' in page:
        new_page['source_dir'] = page['source_dir']

    pages = []

    for p in convert_multi_source(new_page):
        p.update(page)
        p['target'] = os.path.join(conf.paths.projectroot, conf.paths.source,
                                   p['target'],
                                   p['source'][len(source_path) + 1:])

        pages.append(p)

    return pages
Beispiel #29
0
def get_file_list(path, input_extension):
    ''' This function wraps around expand tree to return a list of only 1 file
    if the user gives a path to a file and not a directory. Otherwise it has
    the same functionality
    :param string path: path to the file
    :param list input_extension: a list (or a single) of extensions that is acceptable
    '''
    if os.path.isfile(path):
        if input_extension is not None:
            if isinstance(input_extension, list):
                if os.path.splitext(path)[1][1:] not in input_extension:
                    return []
            else:
                if not path.endswith(input_extension):
                    return []
        return [path]
    else:
        return expand_tree(path, input_extension)
Beispiel #30
0
def gettext_tasks(conf, app):
    locale_dirs = os.path.join(conf.paths.projectroot,
                               conf.paths.locale, 'pot')

    builder_name = resolve_builder_path('gettext', conf.project.edition, None, conf)

    branch_output = os.path.join(conf.paths.projectroot,
                                 conf.paths.branch_output,
                                 builder_name)

    path_offset = len(branch_output) + 1

    for fn in expand_tree(branch_output, None):
        task = app.add('task')
        task.target = fn
        task.job = copy_if_needed
        task.args = [ fn, os.path.join(locale_dirs, fn[path_offset:]), None]
        task.description = "migrating po file {0} if needed".format(fn)
Beispiel #31
0
def generated_includes(conf):
    toc_spec_files = []
    step_files = []
    for fn in expand_tree(os.path.join(conf.paths.includes), input_extension='yaml'):
        base = os.path.basename(fn)

        if base.startswith('toc-spec'):
            toc_spec_files.append(fn)
        elif base.startswith('ref-spec'):
            toc_spec_files.append(fn)
        elif base.startswith('steps'):
            step_files.append(fn)
        elif base.startswith('example'):
            # example files, for the purpose of this have the same structure as
            # steps, so we can just use that:
            step_files.append(fn)

    maskl = len(conf.paths.source)
    path_prefix = conf.paths.includes[len(conf.paths.source):]
    mapping = {}
    for spec_file in toc_spec_files:
        if os.path.exists(spec_file):
            data = ingest_yaml_doc(spec_file)
        else:
            continue

        deps = [ os.path.join(path_prefix, i ) for i in data['sources']]

        mapping[spec_file[maskl:]] = deps

    for step_def in step_files:
        data = ingest_yaml_list(step_def)

        deps = []
        for step in data:
            if 'source' in step:
                deps.append(step['source']['file'])

        if len(deps) != 0:
            deps = [ os.path.join(path_prefix, i ) for i in deps ]

            mapping[step_def[maskl:]] = deps

    return mapping
Beispiel #32
0
def directory_expansion(source_path, page, conf):
    new_page = { 'sources': expand_tree(source_path, None)}
    del page['source']

    if 'source_dir' in page:
        new_page['source_dir'] = page['source_dir']

    pages = []

    for p in convert_multi_source(new_page):
        p.update(page)
        p['target'] = os.path.join(conf.paths.projectroot,
                                   conf.paths.source,
                                   p['target'],
                                   p['source'][len(source_path)+1:])

        pages.append(p)


    return pages
Beispiel #33
0
def dump_file_hashes(conf):
    output = conf.system.dependency_cache

    o = {'time': datetime.datetime.utcnow().strftime("%s"),
         'files': {}}

    files = expand_tree(os.path.join(conf.paths.projectroot, conf.paths.branch_source), None)

    fmap = o['files']

    for fn in files:
        if os.path.exists(fn):
            fmap[fn] = md5_file(fn)

    safe_create_directory(os.path.dirname(output))

    with open(output, 'w') as f:
        json.dump(o, f)

    logger.debug('wrote dependency cache to: {0}'.format(output))
Beispiel #34
0
def dump_file_hashes(conf):
    output = conf.system.dependency_cache

    o = {'time': datetime.datetime.utcnow().strftime("%s"), 'files': {}}

    files = expand_tree(
        os.path.join(conf.paths.projectroot, conf.paths.branch_source), None)

    fmap = o['files']

    for fn in files:
        if os.path.exists(fn):
            fmap[fn] = md5_file(fn)

    safe_create_directory(os.path.dirname(output))

    with open(output, 'w') as f:
        json.dump(o, f)

    logger.info('wrote dependency cache to: {0}'.format(output))
Beispiel #35
0
def finalize_single_html_tasks(builder, conf, app):
    single_html_dir = get_single_html_dir(conf)

    if not os.path.exists(single_html_dir):
        os.makedirs(single_html_dir)

    found_src = False
    for base_path in (builder, hyph_concat(builder, conf.project.edition)):
        if found_src is True:
            break

        for fn in [ os.path.join(base_path, f) for f in ('contents.html', 'index.html') ]:
            src_fn = os.path.join(conf.paths.projectroot, conf.paths.branch_output, fn)

            if os.path.exists(src_fn):
                manual_single_html(input_file=src_fn,
                                   output_file=os.path.join(single_html_dir, 'index.html'))

                copy_if_needed(source_file=os.path.join(conf.paths.projectroot,
                                                 conf.paths.branch_output,
                                                 base_path, 'objects.inv'),
                               target_file=os.path.join(single_html_dir, 'objects.inv'))

                found_src = True

                break

    if found_src is not True:
        raise FileNotFoundError('singlehtml source file')

    single_path = os.path.join(single_html_dir, '_static')

    for fn in expand_tree(os.path.join(os.path.dirname(src_fn), '_static'), None):
        target_fn = os.path.join(single_path, os.path.basename(fn))

        task = app.add('task')
        task.job = copy_if_needed
        task.target = target_fn
        task.dependency = fn
        task.args = [fn, target_fn]
        task.description = "migrating static files to the HTML build"
Beispiel #36
0
def gettext_tasks(conf, app):
    locale_dirs = os.path.join(conf.paths.projectroot, conf.paths.locale,
                               'pot')

    builder_name = resolve_builder_path('gettext', conf.project.edition, None,
                                        conf)

    branch_output = os.path.join(conf.paths.projectroot,
                                 conf.paths.branch_output, builder_name)

    path_offset = len(branch_output) + 1

    for fn in expand_tree(branch_output, None):
        target = os.path.join(locale_dirs, fn[path_offset:])
        source = fn

        task = app.add('task')
        task.target = target
        task.dependency = source
        task.job = copy_if_needed
        task.args = [source, target, None]
        task.description = "migrating po file {0} if needed".format(fn)
Beispiel #37
0
def generated_includes(conf):
    step_files = []
    mapping = {}

    content_prefixes = []
    for _, prefixes in conf.system.content.content_prefixes:
        content_prefixes.extend(prefixes)

    for fn in expand_tree(os.path.join(conf.paths.includes),
                          input_extension='yaml'):
        base = os.path.basename(fn)

        # example/toc-specs files, for the purpose of this have the same
        # structure as steps, so we can just use that
        for prefix in content_prefixes:
            if base.startswith(prefix):
                step_files.append(fn)
                break

    maskl = len(conf.paths.source)
    path_prefix = conf.paths.includes[len(conf.paths.source):]

    for step_def in step_files:
        deps = []

        with open(step_def, 'r') as f:
            data = yaml.safe_load_all(f)

            for step in data:
                if 'source' in step:
                    deps.append(step['source']['file'])

        if len(deps) != 0:
            deps = [os.path.join(path_prefix, i) for i in deps]

            mapping[step_def[maskl:]] = deps

    return mapping
Beispiel #38
0
def toc_tasks(conf, app):
    paths = conf.paths

    for fn in expand_tree(paths.includes, 'yaml'):
        if not (fn.startswith(os.path.join(paths.includes, 'toc'))
                or fn.startswith(os.path.join(paths.includes, 'ref-toc'))
                or fn.startswith(os.path.join(paths.includes, 'ref-spec'))):
            continue
        elif len(fn) >= 24:
            task = app.add('task')
            base_name = _get_toc_base_name(fn)
            target = []

            fmt = fn[20:24]
            if fmt != 'spec':
                fmt = fn[16:19]

            task.dependency = os.path.join(paths.projectroot, fn)
            task.job = _generate_toc_tree
            task.args = [fn, fmt, base_name, paths, conf]
            task.description = 'generating {0} from {1}'.format(fmt, fn)

            if fmt != 'spec':
                target.append(_get_toc_output_name(base_name, 'toc', paths))

            is_ref_spec = fn.startswith(
                os.path.join(os.path.dirname(fn), 'ref-spec'))

            if not is_ref_spec and (fmt == 'toc' or fmt == 'spec'):
                target.append(
                    _get_toc_output_name(base_name, 'dfn-list', paths))
            elif fmt == 'ref' or is_ref_spec:
                target.append(_get_toc_output_name(base_name, 'table', paths))

            task.target = target

            logger.debug('added task for generating toc from {0}'.format(fn))
Beispiel #39
0
def generated_includes(conf):
    step_files = []
    mapping = {}

    content_prefixes = []
    for _, prefixes in conf.system.content.content_prefixes:
        content_prefixes.extend(prefixes)

    for fn in expand_tree(os.path.join(conf.paths.includes), input_extension='yaml'):
        base = os.path.basename(fn)

        # example/toc-specs files, for the purpose of this have the same
        # structure as steps, so we can just use that
        for prefix in content_prefixes:
            if base.startswith(prefix):
                step_files.append(fn)
                break

    maskl = len(conf.paths.source)
    path_prefix = conf.paths.includes[len(conf.paths.source):]

    for step_def in step_files:
        deps = []

        with open(step_def, 'r') as f:
            data = yaml.safe_load_all(f)

            for step in data:
                if 'source' in step:
                    deps.append(step['source']['file'])

        if len(deps) != 0:
            deps = [os.path.join(path_prefix, i) for i in deps]

            mapping[step_def[maskl:]] = deps

    return mapping
Beispiel #40
0
def finalize_single_html_tasks(builder, conf):
    single_html_dir = get_single_html_dir(conf)

    # create directory when registering tasks.
    safe_create_directory(single_html_dir)
    safe_create_directory(os.path.join(single_html_dir, '_static'))

    if 'edition' in conf.project and conf.project.edition != conf.project.name:
        artifact_dir = os.path.join(conf.paths.projectroot,
                                    conf.paths.branch_output, '-'.join(
                                        (builder, conf.project.edition)))
    else:
        artifact_dir = os.path.join(conf.paths.projectroot,
                                    conf.paths.branch_output, builder)

    tasks = [
        libgiza.task.Task(job=finalize_single_html,
                          args=(single_html_dir, artifact_dir, conf),
                          target=True,
                          dependency=None,
                          description="migrating singlehtml")
    ]

    for fn in expand_tree(os.path.join(artifact_dir, '_static'), None):
        target_fn = os.path.join(single_html_dir, '_static',
                                 os.path.basename(fn))

        tasks.append(
            libgiza.task.Task(
                job=copy_if_needed,
                args=(fn, target_fn),
                target=target_fn,
                dependency=fn,
                description="moving static files to the singlehtml build"))

    return tasks
Beispiel #41
0
def get_migration_specifications(conf):
    return [ fn for fn in expand_tree(os.path.join(conf.paths.projectroot,
                                                   conf.paths.builddata))
             if  conf.project.name in os.path.basename(fn) and 'migrations' in fn ]
Beispiel #42
0
def include_file_data(conf):
    inc_path = os.path.join(conf.paths.includes)
    include_file_list = expand_tree(path=inc_path, input_extension=None)
    include_graph = include_files(conf=conf)

    recursive_use = included_recusively(conf, include_graph)
    generated = generated_includes(conf)

    omni = {}
    for idx, fn in enumerate(include_file_list):
        incf = fn[len(conf.paths.source):]

        if fn.endswith('~'):
            continue

        for prefix in suppressed_page_prefixes:
            if incf.startswith(prefix):
                break
        else:
            omni[incf] = {
                'id': idx,
                'name': os.path.splitext(incf)[0],
                'path': incf,
            }

            if incf in generated:
                omni[incf]['generated'] = True
            else:
                omni[incf]['generated'] = False

            if incf in recursive_use:
                omni[incf]['recursive'] = True
            else:
                omni[incf]['recursive'] = False

            if incf in include_graph:
                omni[incf]['num_clients'] = len(include_graph[incf])

                omni[incf]['clients'] = []
                for cl in include_graph[incf]:
                    cl, ext = os.path.splitext(cl)

                    if ext == 'yaml':
                        continue
                    if (cl.startswith('/includes/generated/overview') or
                        cl.startswith('/includes/manpage-')):
                        continue

                    omni[incf]['clients'].append(cl)

                if len(omni[incf]['clients']) == 0:
                    omni[incf]['yaml_only'] = True
                else:
                    omni[incf]['yaml_only'] = False
            else:
                omni[incf]['clients'] = dict()
                omni[incf]['num_clients'] = 0

            with open(fn, 'r') as f:
                omni[incf]['content'] = [ ln.rstrip() for ln in f.readlines() ]

    return omni
Beispiel #43
0
def option_sources(conf):
    output_path = get_option_path(conf)

    for fn in expand_tree(get_base_path(conf), 'yaml'):
        if fn.startswith(output_path):
            yield fn
Beispiel #44
0
def main():
    files = expand_tree('source/includes/', 'yaml')

    spec_fns = []
    toc_fns = []

    for fn in files:
        if 'ref-spec' in fn:
            spec_fns.append(fn)
        elif 'toc' not in fn:
            continue
        elif 'spec-new' in fn:
            continue
        elif 'spec' in fn:
            spec_fns.append(fn)
        else:
            toc_fns.append(fn)

    logger.info('have {0} spec files'.format(len(spec_fns)))
    logger.info('have {0} toc files'.format(len(toc_fns)))

    specs = {}
    tocs = {}

    for fn in spec_fns:
        try:
            with open(fn, 'r') as f:
                specs[fn] = [ d for d in yaml.safe_load_all(f) if d is not None][0]
        except:
            continue

    for fn in toc_fns:
        with open(fn, 'r') as f:
            tocs[fn] = [ d for d in yaml.safe_load_all(f) if d is not None]

    logger.info('have {0} spec files'.format(len(specs)))
    logger.info('have {0} toc files'.format(len(tocs)))

    new_specs = {}
    for fn in specs:
        new_spec = []
        spec = specs[fn]
        new_specs[fn] = new_spec

        for entry in spec['files']:
            new_entry = {}
            new_spec.append(new_entry)

            if 'level' in entry:
                new_entry['level'] = entry['level']
            else:
                new_entry['level'] = 1


            if isinstance(entry, basestring):
                entry = { 'file': entry }

            if 'file' in entry:
                new_entry['source'] = {
                    'file': get_enclosing_file(tocs, entry['file']),
                    'ref': entry['file']
                }
            else:
                new_entry['text_only'] = True
                new_entry['description'] = entry['text']
                new_entry['ref'] = "".join( [random.choice(string.letters) for i in xrange(15)] )

        with open(fn, 'w') as f:
            yaml.safe_dump_all(new_spec, f, default_flow_style=False)
            logger.info('wrote: ' + fn)
Beispiel #45
0
def step_sources(paths):
    for fn in expand_tree(os.path.join(paths.projectroot, paths.includes), ' yaml'):
        if fn.startswith(os.path.join(paths.projectroot, paths.includes, 'step')):
            yield fn
Beispiel #46
0
def table_sources(conf):
    for source in expand_tree(
            os.path.join(conf.paths.projectroot, conf.paths.branch_includes),
            'yaml'):
        if os.path.basename(source).startswith('table'):
            yield source
Beispiel #47
0
def api_sources(conf):
    return expand_tree(
        os.path.join(conf.paths.projectroot, conf.paths.branch_source,
                     'reference'), 'yaml')
Beispiel #48
0
def include_file_data(conf):
    inc_path = os.path.join(conf.paths.includes)
    include_file_list = expand_tree(path=inc_path, input_extension=None)
    include_graph = include_files(conf=conf)

    recursive_use = included_recusively(conf, include_graph)
    generated = generated_includes(conf)

    omni = {}
    for idx, fn in enumerate(include_file_list):
        incf = fn[len(conf.paths.source):]

        if fn.endswith('~'):
            continue

        for prefix in suppressed_page_prefixes:
            if incf.startswith(prefix):
                break
        else:
            omni[incf] = {
                'id': idx,
                'name': os.path.splitext(incf)[0],
                'path': incf,
            }

            if incf in generated:
                omni[incf]['generated'] = True
            else:
                omni[incf]['generated'] = False

            if incf in recursive_use:
                omni[incf]['recursive'] = True
            else:
                omni[incf]['recursive'] = False

            if incf in include_graph:
                omni[incf]['num_clients'] = len(include_graph[incf])

                omni[incf]['clients'] = []
                for cl in include_graph[incf]:
                    cl, ext = os.path.splitext(cl)

                    if ext == 'yaml':
                        continue
                    if (cl.startswith('/includes/generated/overview') or
                        cl.startswith('/includes/manpage-')):
                        continue

                    omni[incf]['clients'].append(cl)

                if len(omni[incf]['clients']) == 0:
                    omni[incf]['yaml_only'] = True
                else:
                    omni[incf]['yaml_only'] = False
            else:
                omni[incf]['clients'] = dict()
                omni[incf]['num_clients'] = 0

            with open(fn, 'r') as f:
                omni[incf]['content'] = [ ln.rstrip() for ln in f.readlines() ]

    return omni
Beispiel #49
0
def get_migration_specifications(conf):
    return [
        fn for fn in expand_tree(
            os.path.join(conf.paths.projectroot, conf.paths.builddata))
        if conf.project.name in os.path.basename(fn) and 'migrations' in fn
    ]
def main():
    files = expand_tree('source/includes/', 'yaml')

    spec_fns = []
    toc_fns = []

    for fn in files:
        if 'ref-spec' in fn:
            spec_fns.append(fn)
        elif 'toc' not in fn:
            continue
        elif 'spec-new' in fn:
            continue
        elif 'spec' in fn:
            spec_fns.append(fn)
        else:
            toc_fns.append(fn)

    logger.info('have {0} spec files'.format(len(spec_fns)))
    logger.info('have {0} toc files'.format(len(toc_fns)))

    specs = {}
    tocs = {}

    for fn in spec_fns:
        try:
            with open(fn, 'r') as f:
                specs[fn] = [
                    d for d in yaml.safe_load_all(f) if d is not None
                ][0]
        except:
            continue

    for fn in toc_fns:
        with open(fn, 'r') as f:
            tocs[fn] = [d for d in yaml.safe_load_all(f) if d is not None]

    logger.info('have {0} spec files'.format(len(specs)))
    logger.info('have {0} toc files'.format(len(tocs)))

    new_specs = {}
    for fn in specs:
        new_spec = []
        spec = specs[fn]
        new_specs[fn] = new_spec

        for entry in spec['files']:
            new_entry = {}
            new_spec.append(new_entry)

            if 'level' in entry:
                new_entry['level'] = entry['level']
            else:
                new_entry['level'] = 1

            if isinstance(entry, basestring):
                entry = {'file': entry}

            if 'file' in entry:
                new_entry['source'] = {
                    'file': get_enclosing_file(tocs, entry['file']),
                    'ref': entry['file']
                }
            else:
                new_entry['text_only'] = True
                new_entry['description'] = entry['text']
                new_entry['ref'] = "".join(
                    [random.choice(string.letters) for i in xrange(15)])

        with open(fn, 'w') as f:
            yaml.safe_dump_all(new_spec, f, default_flow_style=False)
            logger.info('wrote: ' + fn)
Beispiel #51
0
def table_sources(conf):
    for source in expand_tree(os.path.join(conf.paths.projectroot, conf.paths.includes), 'yaml'):
        if os.path.basename(source).startswith('table'):
            yield source
Beispiel #52
0
def api_sources(conf):
    return expand_tree(os.path.join(conf.paths.projectroot, conf.paths.source, 'reference'), 'yaml')
Beispiel #53
0
def step_sources(paths):
    for fn in expand_tree(os.path.join(paths.projectroot, paths.includes),
                          ' yaml'):
        if fn.startswith(
                os.path.join(paths.projectroot, paths.includes, 'step')):
            yield fn