Beispiel #1
0
def apply(obj,repo=None):
    if repo is None:

        repo = get_conf().git.remote.upstream

    cmd = ['curl',
           'https://github.com/{0}/'.format(repo),
           '|', 'git', 'am',
           '--signoff --3way' if env.sign else '--3way' ]

    if obj.startswith('http'):
        cmd[1] = obj
        if not obj.endswith('.patch'):
            cmd[1] += '.patch'
        local(' '.join(cmd))
    elif re.search('[a-zA-Z]+', obj):
        cmd[1] = cmd[1] + 'commit/' + obj + '.patch'

        local(' '.join(cmd))
        puts('[git]: merged commit {0} for {1} into {2}'.format(obj, repo, get_branch()))
    else:
        cmd[1] = cmd[1] + 'pull/' + obj + '.patch'

        local(' '.join(cmd))
        puts('[git]: merged pull request #{0} for {1} into {2}'.format(obj, repo, get_branch()))
Beispiel #2
0
def manpage_jobs():
    conf = get_conf()

    jobs = [
        (
            os.path.join(conf.build.paths.includes, "manpage-options-auth.rst"),
            os.path.join(conf.build.paths.includes, 'manpage-options-auth-mongo.rst'),
            (re.compile('fact-authentication-source-tool'), 'fact-authentication-source-mongo')
        ),
        (
            os.path.join(conf.build.paths.includes, 'manpage-options-ssl.rst'),
            os.path.join(conf.build.paths.includes, 'manpage-options-ssl-settings.rst'),
            [ (re.compile(r'\.\. option:: --'), r'.. setting:: ' ),
              (re.compile(r'setting:: (\w+) .*'), r'setting:: \1'),
              (re.compile(r':option:`--'), r':setting:`') ]
        )
    ]

    for input_fn, output_fn, regex in jobs:
        yield {
                'target': output_fn,
                'dependency': output_fn,
                'job': _process_page,
                'args': [ input_fn, output_fn, regex, 'manpage' ],
              }
Beispiel #3
0
def validate_branch(branch):
    if branch == 'override':
        pass
    elif branch is None:
        abort('must specify a branch')
    elif branch not in get_conf().git.branches.published:
        abort('must specify a published branch.')
Beispiel #4
0
def buildinfo_hash():
    conf = get_conf()

    fn = os.path.join(conf.build.paths.projectroot,
                      conf.build.paths.includes,
                      'hash.rst')

    generate_hash_file(fn)

    if conf.project.name == 'manual':
        release_fn = os.path.join(conf.build.paths.projectroot,
                                  conf.build.paths.branch_staging,
                                  'release.txt')
    else:
        release_fn = os.path.join(conf.build.paths.projectroot,
                                  conf.build.paths.public, 'release.txt')


    if not os.path.exists(os.path.dirname(release_fn)):
        os.makedirs(os.path.dirname(release_fn))

    with open(release_fn, 'w') as f:
        f.write(conf.git.commit)

    puts('[build]: generated "{0}" with current release hash.'.format(release_fn))
Beispiel #5
0
def manpage_url_jobs():
    conf = get_conf()

    project_source = os.path.join(conf.build.paths.projectroot, conf.build.paths.source)

    top_level_items = set()
    for fs_obj in os.listdir(project_source):
        if fs_obj.startswith('.static') or fs_obj == 'index.txt':
            continue
        if os.path.isdir(os.path.join(project_source, fs_obj)):
            top_level_items.add(fs_obj)
        if fs_obj.endswith('.txt'):
            top_level_items.add(fs_obj[:-4])

    top_level_items = '/'+ r'[^\s]*|/'.join(top_level_items) + r'[^\s]*'

    re_string = r'(\\fB({0})\\fP)'.format(top_level_items).replace(r'-', r'\-')
    subst = conf.project.url + '/' + conf.project.tag + r'\2'

    regex_obj = (re.compile(re_string), subst)

    for manpage in expand_tree(os.path.join(conf.build.paths.projectroot,
                                            conf.build.paths.output,
                                            conf.git.branches.current,
                                            'man'), ['1', '5']):
        yield dict(target=manpage,
                   dependency=None,
                   job=manpage_url,
                   args=[regex_obj, manpage])
Beispiel #6
0
def generate_list_file(outputs, path, conf=None):
    dirname = os.path.dirname(path)

    if conf is None:
        conf = get_conf()

    if conf.project.name == 'ecosystem':
        url = 'http://docs.mongodb.org/ecosystem'
    elif conf.project.name == 'mms':
        if conf.project.edition == 'hosted':
            url = '/'.join(['http://mms.mongodb.com/help-hosted', get_manual_path()])
        else:
            url = 'http://mms.mongodb.com/help'
    else:
        url = '/'.join(['http://docs.mongodb.org', get_manual_path()])

    if not os.path.exists(dirname):
        os.mkdir(dirname)

    with open(path, 'w') as f:
        for fn in outputs:
            f.write( '/'.join([ url, 'json', fn.split('/', 3)[3:][0]]) )
            f.write('\n')

    puts('[json]: rebuilt inventory of json output.')
Beispiel #7
0
def tags():
    conf = get_conf()

    regexp_fn = os.path.join(os.path.join(conf.build.paths.projectroot,
                                        conf.build.paths.tools, 'etags.regexp'))

    if not os.path.exists(regexp_fn):
        abort('[dev]: cannot regenerate TAGS: no {0} file'.format(regexp_fn))

    source = expand_tree(os.path.join(conf.build.paths.projectroot,
                                      conf.build.paths.source), 'txt')

    if len(source) == 0:
        abort('[dev]: no source files in {0}'.format(source))

    source = ' '.join(source)

    local('etags -I --language=none --regex=@{0} {1}'.format(regexp_fn, source))

    regexps = [
        (re.compile(r'\.\. (.*):: \$*(.*)'), r'\1.\2'),
        (re.compile(r'\.\. _(.*)'), r'ref.\1')
    ]

    munge_page(fn=os.path.join(conf.build.paths.projectroot, 'TAGS'),
               regex=regexps,
               tag='dev')
Beispiel #8
0
def main():
    conf = get_conf()

    m = generate_meta(conf)

    m.write(sys.argv[1])
    print('[meta-build]: built "' + sys.argv[1] + '" to seed build metadata.')
Beispiel #9
0
def _make(target):
    with lcd(get_conf().build.paths.projectroot):
        if isinstance(target, list):
            target_str = make + ' '.join([target])
        elif isinstance(target, basestring):
            target_str = ' '.join(['make', target])

        local(target_str)
Beispiel #10
0
def sweep(mask=None):
    puts('[stats]: starting full sweep of docs content.')
    conf = get_conf()

    out_file = _fn_output(mask, conf)

    _generate_report(mask, output_file=out_file, conf=conf)

    puts('[stats]: wrote full manual sweep to {0}'.format(out_file))
Beispiel #11
0
def release_jobs():
    conf = get_conf()
    data_file = os.path.join(conf.build.paths.builddata, 'releases') + '.yaml'

    # shim to allow backwards compatibility on older branches for a while.
    try:
        release_version = conf.version.release
    except AttributeError:
        from conf import release as release_version
    except IOError:
        print('[ERROR]: cannot determine current release.')
        exit(1)

    if not os.path.exists(data_file):
        return

    rel_data = ingest_yaml(os.path.join(conf.build.paths.builddata, 'releases') + '.yaml')

    for rel in rel_data['source-files']:
        target = os.path.join(conf.build.paths.projectroot,
                              conf.build.paths.includes,
                              'install-curl-release-{0}.rst'.format(rel))
        yield {
                'target': target,
                'dependency': [
                                os.path.join(conf.build.paths.projectroot, 'conf.py'),
                                os.path.join(conf.build.paths.projectroot,
                                             conf.build.paths.buildsystem,
                                             'rstcloth', 'releases.py')
                              ],
                'job': _generate_release_core,
                'args': [
                          rel,
                          target,
                          release_version,
                        ]
              }

    for rel in rel_data['subscription-build']:
        target = 'source/includes/install-curl-release-ent-{0}.rst'.format(rel['system'])

        yield {
                'target': target,
                'dependency': [
                                os.path.join(conf.build.paths.projectroot, 'conf.py'),
                                os.path.join(conf.build.paths.projectroot,
                                             conf.build.paths.buildsystem,
                                             'rstcloth', 'releases.py')
                              ],
                'job': _generate_release_ent,
                'args': [
                          rel,
                          target,
                          release_version
                        ]
              }
Beispiel #12
0
def get_conf_file(file, directory=None):
    if directory is None:
        from docs_meta import get_conf
        conf = get_conf()

        directory = conf.paths.builddata

    conf_file = os.path.split(file)[1].rsplit('.', 1)[0] + '.yaml'

    return os.path.join(directory, conf_file)
Beispiel #13
0
def bootstrap(action='setup'):
    cmd = ['python bootstrap.py']

    if action in ['setup', 'clean']:
        cmd.append(action)
    else:
        abort('[docs-tools]: invalid bootstrap action')

    with lcd(get_conf().build.paths.projectroot):
        local(' '.join(cmd))
Beispiel #14
0
def sphinx(builder='html', conf=None):
    if conf is None:
        conf = docs_meta.get_conf()

    root = conf.build.paths.branch_output

    cleaner([ os.path.join(root, 'doctrees' + '-' + builder),
              os.path.join(root, builder) ] )

    puts('[clean-{0}]: removed all files supporting the {0} build'.format(builder))
Beispiel #15
0
def get_conf_file(file, directory=None):
    if directory is None:
        from docs_meta import get_conf
        conf = get_conf()

        directory = conf.paths.builddata

    conf_file = os.path.split(file)[1].rsplit('.', 1)[0] + '.yaml'

    return os.path.join(directory, conf_file)
Beispiel #16
0
def _fn_output(tag, conf=None):
    if conf is None:
        conf = get_conf()

    fn = ['stats', 'sweep' ]
    if tag is not None:
        fn.append(tag.replace('/', '-'))
    fn.extend([conf.git.branches.current, conf.git.commit[:6]])

    out_fn = '.'.join(['-'.join(fn), 'yaml'])
    return os.path.join(conf.build.paths.output, out_fn)
Beispiel #17
0
def generate_image_pages(dir, name, alt, output, conf=None):
    r = RstCloth()
    if conf is None:
        conf = get_conf()

    image = '/'.join([dir, name])
    alt = alt
    b = name

    for img_output in output:
        if img_output['type'] == 'print':
            r.directive('only', 'latex', wrap=False, block=b)
            html = False
        else:
            html = True
            r.directive('only', 'not latex', wrap=False, block=b)
            img_output['width'] = str(img_output['width']) + 'px'

        r.newline()

        if 'tag' in img_output:
            tag = '-' + img_output['tag'] + '.png'
        else:
            tag = '.png'

        options = [('alt', alt), ('align', 'center'), ('figwidth', img_output['width'])]

        if 'scale' in img_output:
            options.append(('scale', img_output['scale']))

        if html is False:
            r.directive(name='figure',
                        arg='/images/{0}{1}'.format(name, tag),
                        fields=options,
                        indent=3,
                        content=alt,
                        block=b)
        elif html is True:
            img_tags = ['<div class="figure align-center" style="max-width:{5};">',
                        '<img src="{0}/{1}/_images/{2}{3}" alt="{4}">', '</img>',
                        '<p class="caption">{4}</p></div>' ]
            img_str = ''.join(img_tags)
            r.directive(name='raw', arg='html',
                        content=img_str.format(conf.project.url,
                                               conf.git.branches.current, name, tag, alt,
                                               img_output['width']),
                        indent=3,
                        block=b)

        r.newline(block=b)

    r.write(image + '.rst')
    print('[image]: generated include file {0}.rst'.format(image))
Beispiel #18
0
def build(builder='publish'):
    if env.logfile is None:
        env.logfile = os.path.join(get_conf().build.paths.output, 'docs-staging-delegated.log')

    if builder not in env.builders:
        pass
    else:
        with hide('running'):
            env.repo.set_branch(env.branch)
            env.repo.set_path(env.repo.delegated_path)
            env.repo.update_repo(logfile=env.logfile, branch=env.branch)

        build_branch(logfile=env.logfile, branch=env.branch, target=builder, wait=env.wait)
Beispiel #19
0
def api_jobs(conf=None):
    if conf is None:
        conf = get_conf()

    for source in expand_tree(os.path.join(conf.build.paths.projectroot, conf.build.paths.source, 'reference'), 'yaml'):
        target = dot_concat(os.path.splitext(source)[0], 'rst')

        yield {
                'target': target,
                'dependency': source,
                'job': _generate_api_param,
                'args': [source, target, conf]
              }
Beispiel #20
0
def generate_json_output_meta():
    m = MakefileCloth()

    m.section_break('json output coordination.')
    paths = render_paths('dict')

    if get_conf().git.remote.upstream.endswith('ecosystem'):
        public_json_output = os.path.join(paths['public'], 'json')
    else:
        public_json_output = os.path.join(paths['branch-staging'], 'json')

    build_json_output = os.path.join(paths['branch-output'], 'json')
    branch_json_list_file = os.path.join(paths['branch-output'],
                                         'json-file-list')
    public_json_list_file = os.path.join(public_json_output, '.file_list')

    m.section_break('meta')

    m.target('json-output', ['json'])
    m.job('fab process.json_output')

    rsync_cmd = 'rsync --recursive --times --delete --exclude="*pickle" --exclude=".buildinfo" --exclude="*fjson" {0}/ {1}'
    m.job(rsync_cmd.format(build_json_output, public_json_output))
    m.msg('[json]: migrated all .json files to staging.')
    m.msg('[json]: processed all json files.')

    m.section_break('list file')

    m.comment(
        'the meta build system generates "{0}" when it generates this file'.
        format(branch_json_list_file))

    fab_cmd = 'fab process.input:{0} process.output:{1} process.copy_if_needed:json'
    m.target('json-file-list', public_json_list_file)
    m.target(public_json_list_file, 'json-output')
    m.job(fab_cmd.format(branch_json_list_file, public_json_list_file))
    m.msg('[json]: rebuilt inventory of json output.')

    m.target(build_json_output, 'json')

    m.target(
        '.PHONY',
        ['clean-json-output', 'clean-json', 'json-output', 'json-file-list'])
    m.target('clean-json-output', 'clean-json')
    m.job(' '.join([
        'rm -rf ', public_json_list_file, branch_json_list_file,
        public_json_output
    ]))
    m.msg('[json]: removed all processed json.')

    return m
Beispiel #21
0
def table_jobs():
    paths = get_conf().build.paths

    for source in expand_tree(os.path.join(paths.projectroot, paths.includes), 'yaml'):
        if os.path.basename(source).startswith('table'):
            target = _get_table_output_name(source)
            list_target = _get_list_table_output_name(source)

            yield {
                    'target': [ target, list_target ],
                    'dependency': source,
                    'job': _generate_tables,
                    'args': [ source, target, list_target ]
                  }
Beispiel #22
0
def image_jobs():
    conf = get_conf()
    paths = conf.build.paths

    meta_file = os.path.join(paths.images, 'metadata') + '.yaml'

    if not os.path.exists(meta_file):
        return

    images_meta = ingest_yaml_list(meta_file)

    for image in images_meta:
        image['dir'] = paths.images
        source_base = os.path.join(image['dir'], image['name'])
        source_file = source_base + '.svg'
        rst_file = source_base + '.rst'
        image['conf'] = conf

        yield {
                'target': rst_file,
                'dependency': [ meta_file, os.path.join(paths.buildsystem, 'rstcloth', 'images.py') ],
                'job': generate_image_pages,
                'args': image
              }

        for output in image['output']:
            if 'tag' in output:
                tag = '-' + output['tag']
            else:
                tag = ''

            target_img = source_base + tag + '.png'

            inkscape_cmd = '{cmd} -z -d {dpi} -w {width} -y 0.0 -e >/dev/null {target} {source}'

            yield {
                    'target': target_img,
                    'dependency': source_file,
                    'job': _generate_images,
                    'args': [
                              inkscape_cmd,
                              output['dpi'],
                              output['width'],
                              target_img,
                              source_file
                            ],
                  }
Beispiel #23
0
def error_pages():
    conf = get_conf()

    error_conf = os.path.join(conf.build.paths.builddata, 'errors.yaml')

    if not os.path.exists(error_conf):
        return None
    else:
        error_pages = ingest_yaml_list(error_conf)

        sub = (re.compile(r'\.\./\.\./'), conf.project.url + r'/' + conf.project.tag + r'/')

        for error in error_pages:
            page = os.path.join(conf.build.paths.projectroot, conf.build.paths['branch-output'], 'dirhtml', 'meta', error, 'index.html')
            munge_page(fn=page, regex=sub, tag='error-pages')

        puts('[error-pages]: rendered {0} error pages'.format(len(error_pages)))
Beispiel #24
0
def main():
    ui = user_input()

    conf = get_conf()

    lines = []
    for doc in utils.ingest_yaml(ui.data):
        if doc['type'] == 'redirect':
            lines.append(generate_redirects(process_redirect(doc, conf=conf), match=ui.match, conf=conf))
        if doc['type'] == 'redirect-draft':
            print(generate_redirects(process_redirect(doc, conf=conf), match=ui.match, conf=conf))

    if lines:
        with open(ui.filename, 'w') as f:
            for line in lines:
                f.write(line)

        print('[redirect]: regenerated ' + ui.filename + ' file.' )
Beispiel #25
0
    def __init__(self, path=None, branch='master'):
        self.delegated_path = os.path.join(os.getcwd(), 'build/.docs-staging')
        self.delegated_build_path = os.path.join(self.delegated_path, 'build')
        self.b = 'git'

        self.current_branch = get_branch(os.getcwd())
        self.branch = self.current_branch

        self.branches = set()
        self.branches.add(self.current_branch)
        for branch in get_conf().git.branches.published:
            if branch is not None:
                self.branches.add(branch)

        self.set_branch(branch)

        self.path = None
        self.set_path(path)
Beispiel #26
0
def _job_transfer(migration, block):
    conf = get_conf()

    if 'branch' not in migration or migration['branch'] == conf.git.branches.current:
        m.job('mkdir -p {0}'.format(migration['target']))
        m.job('rsync -a {0}/ {1}/'.format(migration['dependency'], migration['target']))

        if 'filter' in migration and migration['filter'] and migration['filter'] is not None:
            fsobjs = [ ]
            for obj in migration['filter']:
                fsobjs.append(migration['target'] + obj)
            m.job('rm -rf {0}'.format(' '.join(fsobjs)))

        m.job('touch {0}'.format(migration['target']), block=block)
        m.msg('[build]: migrated "{0}" to "{1}"'.format(migration['dependency'],
                                                        migration['target']))
    else:
        m.msg('[build]: doing nothing for {0} in this branch'.format(migration['target']))
Beispiel #27
0
def generate_json_output_meta():
    m = MakefileCloth()

    m.section_break('json output coordination.')
    paths = render_paths('dict')

    if get_conf().git.remote.upstream.endswith('ecosystem'):
        public_json_output = os.path.join(paths['public'], 'json')
    else:
        public_json_output = os.path.join(paths['branch-staging'], 'json')

    build_json_output = os.path.join(paths['branch-output'], 'json')
    branch_json_list_file = os.path.join(paths['branch-output'], 'json-file-list')
    public_json_list_file = os.path.join(public_json_output, '.file_list')

    m.section_break('meta')

    m.target('json-output', ['json'])
    m.job('fab process.json_output')

    rsync_cmd = 'rsync --recursive --times --delete --exclude="*pickle" --exclude=".buildinfo" --exclude="*fjson" {0}/ {1}'
    m.job(rsync_cmd.format(build_json_output, public_json_output))
    m.msg('[json]: migrated all .json files to staging.')
    m.msg('[json]: processed all json files.')

    m.section_break('list file')

    m.comment('the meta build system generates "{0}" when it generates this file'.format(branch_json_list_file))

    fab_cmd = 'fab process.input:{0} process.output:{1} process.copy_if_needed:json'
    m.target('json-file-list', public_json_list_file)
    m.target(public_json_list_file, 'json-output')
    m.job(fab_cmd.format(branch_json_list_file , public_json_list_file))
    m.msg('[json]: rebuilt inventory of json output.')

    m.target(build_json_output, 'json')

    m.target('.PHONY', ['clean-json-output', 'clean-json', 'json-output', 'json-file-list'])
    m.target('clean-json-output', 'clean-json')
    m.job(' '.join(['rm -rf ', public_json_list_file, branch_json_list_file, public_json_output]))
    m.msg('[json]: removed all processed json.')

    return m
Beispiel #28
0
def _generate_report(mask, output_file=None, conf=None):
    if conf is None:
        conf = get_conf()

    base_path = os.path.join(conf.build.paths.output, conf.git.branches.current, 'json')
    docs = expand_tree(base_path, '.json')

    if mask is not None and mask.startswith('/'):
        mask = mask[1:]

    output = []

    p = Pool()

    for doc in docs:
        if doc.endswith('searchindex.json') or doc.endswith('globalcontext.json'):
            continue
        elif mask is None:
            output.append(p.apply_async( _render_report, kwds=dict(fn=doc)))
        else:
            if doc.startswith(os.path.join(base_path, mask)):
                output.append(p.apply_async( _render_report, args=(doc,)))

    p.close()
    p.join()

    stats = [ _output_report_yaml(o.get()) for o in output ]

    if len(stats) == 0:
        stats[0] = stats[0][4:]

    stats.append('...\n')

    if output_file is None:
        return (o.get() for o in output )
    elif output_file == 'print':
        for ln in stats:
            print(ln[:-1])
    else:
        with open(output_file, 'w') as f:
            for ln in stats:
                f.write(ln)
Beispiel #29
0
def builds(days=14):
    days = time.time() - 60*60*24 * int(days)

    path = os.path.abspath(os.path.join(os.path.dirname(__file__), '../build/')) + '/'

    builds = [ path + o for o in os.listdir(path) if os.path.isdir(path + o)]

    for build in builds:
        branch = build.rsplit('/', 1)[1]

        if branch in docs_meta.get_conf().git.branches.published:
            continue
        elif branch == docs_meta.get_branch():
            continue
        elif branch == 'public':
            continue
        elif os.stat(build).st_mtime < days:
            _rm_rf(build)
            _rm_rf(path + "public/" + branch)
            print('[clean]: removed stale build artifact: ' + build)
Beispiel #30
0
def json_output_jobs(conf=None):
    if conf is None:
        conf = get_conf()

    regexes = [
        (re.compile(r'<a class=\"headerlink\"'), '<a'),
        (re.compile(r'<[^>]*>'), ''),
        (re.compile(r'&#8220;'), '"'),
        (re.compile(r'&#8221;'), '"'),
        (re.compile(r'&#8216;'), "'"),
        (re.compile(r'&#8217;'), "'"),
        (re.compile(r'&#\d{4};'), ''),
        (re.compile(r'&nbsp;'), ''),
        (re.compile(r'&gt;'), '>'),
        (re.compile(r'&lt;'), '<')
    ]

    outputs = []
    for fn in expand_tree('source', 'txt'):
        # path = build/<branch>/json/<filename>
        if conf.project.name == 'mms':
            path = os.path.join(conf.build.paths.branch_staging,
                                'json', os.path.splitext(fn.split(os.path.sep, 1)[1])[0])
        else:
            path = os.path.join(conf.build.paths.branch_output,
                                'json', os.path.splitext(fn.split(os.path.sep, 1)[1])[0])
        fjson = dot_concat(path, 'fjson')
        json = dot_concat(path, 'json')

        yield dict(target=json,
                   dependency=fjson,
                   job=process_json_file,
                   args=(fjson, json, regexes, conf))
        outputs.append(json)

    list_file = os.path.join(conf.build.paths.branch_staging, 'json-file-list')

    yield dict(target=list_file,
               dependency=None,
               job=generate_list_file,
               args=(outputs, list_file, conf))
Beispiel #31
0
def source(conf=None):
    if conf is None:
        conf = get_conf()

    target = os.path.join(conf.build.paths.projectroot, conf.build.paths.branch_output)

    if not os.path.exists(target):
        os.makedirs(target)
        puts('[sphinx-prep]: created ' + target)
    elif not os.path.isdir(target):
        abort('[sphinx-prep]: {0} exists and is not a directory'.format(target))

    source_dir = os.path.join(conf.build.paths.projectroot, conf.build.paths.source)

    local('rsync --recursive --times --delete {0} {1}'.format(source_dir, target))
    puts('[sphinx-prep]: updated source in {0}'.format(target))

    with quiet():
        local(build_platform_notification('Sphinx', 'Build in progress past critical phase.'))

    puts('[sphinx-prep]: INFO - Build in progress past critical phase.')
Beispiel #32
0
def post_process_jobs(source_fn=None, tasks=None, conf=None):
    if tasks is None:
        if conf is None:
            conf = get_conf()

        if source_fn is None:
            source_fn = os.path.join(conf.build.paths.project.root,
                                     conf.build.paths.builddata,
                                     'processing.yaml')
        tasks = ingest_yaml(source_fn)
    elif not isinstance(tasks, collections.Iterable):
        abort('[ERROR]: cannot parse post processing specification.')

    def rjob(fn, regex, type):
        return {
                 'target': fn,
                 'dependency': None,
                 'job': _process_page,
                 'args': dict(fn=fn, output_fn=fn, regex=regex, builder=type)
               }

    for job in tasks:
        if not isinstance(job, dict):
            abort('[ERROR]: invalid replacement specification.')
        elif not 'file' in job and not 'transform' in job:
            abort('[ERROR]: replacement specification incomplete.')

        if 'type' not in job:
            job['type'] = 'processor'

        if isinstance(job['transform'], list):
            regex = [ ( re.compile(rs['regex'], rs['replace']) ) for rs  in job['transform'] ]
        else:
            regex = ( re.compile(job['transform']['regex']), job['transform']['replace'])

        if isinstance(job['file'], list):
            for fn in job['file']:
                yield rjob(fn, regex, job['type'])
        else:
            yield rjob(fn, regex, job['type'])
Beispiel #33
0
#!/usr/bin/python

import sys
import os.path

sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
sys.path.append(
    os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'bin')))
import utils
from makecloth import MakefileCloth
from docs_meta import render_paths, get_manual_path, get_conf

m = MakefileCloth()

paths = render_paths('dict')
conf = get_conf()
correction = "'s/(index|bfcode)\{(.*!*)*--(.*)\}/\\1\{\\2-\{-\}\\3\}/g'"
pdf_latex_command = 'TEXINPUTS=".:{0}/latex/:" pdflatex --interaction batchmode --output-directory {0}/latex/ $(LATEXOPTS)'.format(
    paths['branch-output'])


def pdf_makefile(name, tag=None, edition=None):
    if tag is None:
        name_tagged = name
        name_tagged_branch_pdf = '-'.join([name, utils.get_branch()]) + '.pdf'
    else:
        name_tagged = '-'.join([name, tag])
        name_tagged_branch_pdf = '-'.join([name, tag,
                                           utils.get_branch()]) + '.pdf'

    if conf.git.remote.upstream.endswith('mms-docs'):