Exemple #1
0
def manpage_jobs(conf=None):
    conf = lazy_conf(conf)

    options_compat_re = [(re.compile(r'\.\. option:: --'), r'.. setting:: '),
                         (re.compile(r'setting:: (\w+) .*'), r'setting:: \1'),
                         (re.compile(r':option:`--'), r':setting:`')]

    jobs = [
        (os.path.join(conf.paths.includes, "manpage-options-auth.rst"),
         os.path.join(conf.paths.includes, 'manpage-options-auth-mongo.rst'),
         (re.compile('fact-authentication-source-tool'),
          'fact-authentication-source-mongo')),
        (os.path.join(conf.paths.includes, 'manpage-options-ssl.rst'),
         os.path.join(conf.paths.includes,
                      'manpage-options-ssl-settings.rst'), options_compat_re),
        (os.path.join(conf.paths.includes, 'manpage-options-audit.rst'),
         os.path.join(conf.paths.includes,
                      'manpage-options-audit-settings.rst'), options_compat_re)
    ]

    for input_fn, output_fn, regex in jobs:
        if os.path.exists(input_fn):
            yield {
                'target':
                output_fn,
                'dependency':
                input_fn,
                'job':
                process_page,
                'args': [input_fn, output_fn, regex, 'manpage'],
                'description':
                "generating manpage {0} from {1}".format(output_fn, input_fn)
            }
Exemple #2
0
def main():
    ui = user_input()

    conf = lazy_conf()

    lines = []
    for doc in ingest_yaml(ui.data):
        if doc['type'] == 'redirect':
            lines.append(
                generate_redirects(process_redirect(doc, conf=conf),
                                   match=ui.match,
                                   conf=conf))
        if doc['type'] == 'draft':
            print(
                generate_redirects(process_redirect(doc, conf=conf),
                                   match=ui.match,
                                   conf=conf))

    if lines:
        dirname = os.path.dirname(ui.filename)
        if not os.path.exists(dirname):
            os.makedirs(dirname)

        with open(ui.filename, 'w') as f:
            for line in lines:
                f.write(line)

        print('[redirect]: regenerated ' + ui.filename + ' file.')
Exemple #3
0
def generate_redirects(redirect, match=False, conf=None):
    conf = lazy_conf(conf)

    multi = determine_is_multi(redirect['outputs'])

    if 'external' in redirect:
        o = ''
        for output in redirect['outputs']:
            o += generate_external_rule(redirect, output, conf)
            o += '\n'
    elif multi and match is True:
        _base = ''
        for path in redirect['outputs']:
            _base += path + '|'
        base = _base[:-1]

        o = generate_match_rule(redirect, base, conf)
        o += '\n'
    elif multi is True and match is False:
        o = ''
        for output in redirect['outputs']:
            o += generate_simple_rule(redirect, output, conf)
            o += '\n'
    elif multi is False:
        o = generate_simple_rule(redirect, conf=conf)
        o += '\n'

    o = o.replace('http:/', 'http://')
    o = o.replace('//', '/')

    return o
Exemple #4
0
def generate_match_rule(redir, base, conf=None):
    conf = lazy_conf(conf)

    o = 'RedirectMatch {0} /({1}){2} {3}/$1{4}'

    return o.format(redir['code'], base, redir['from'], conf.project.url,
                    redir['to'])
def process_redirect(redirect, conf=None):
    conf = lazy_conf(conf)

    if 'all' in redirect['outputs']:
        redirect['outputs'].remove('all')
        for branch in conf.git.branches.published:
            redirect['outputs'].append(branch)

    for output in redirect['outputs']:
        if isinstance(output, dict):
            continue
        elif output.startswith('after-'):
            idx = conf.git.branches.published.index(output.split('-', 1)[1])

            redirect['outputs'].remove(output)
            redirect['outputs'].extend(conf.git.branches.published[:idx])
        elif output.startswith('before-'):
            idx = conf.git.branches.published.index(output.split('-', 1)[1])

            redirect['outputs'].remove(output)
            redirect['outputs'].extend(conf.git.branches.published[idx:])

    if redirect['code'] in [301, 302, 303]:
        redirect['code'] = str(redirect['code'])
    else:
        raise Exception(
            str(redirect['code']) + ' is not a supported redirect code')

    return redirect
def generate_match_rule(redir, base, conf=None):
    conf = lazy_conf(conf)

    o = 'RedirectMatch {0} /({1}){2} {3}/$1{4}'

    return o.format(redir['code'], base, redir['redirect-path'],
                    conf.project.url, redir['url-base'])
Exemple #7
0
def include_files(files=None, conf=None):
    if files is not None:
        return files
    else:
        conf = lazy_conf(conf)

        source_dir = os.path.join(conf.paths.projectroot, conf.paths.source)
        grep = command(
            'grep -R ".. include:: /" {0} || exit 0'.format(source_dir),
            capture=True).out

        rx = re.compile(source_dir + r'(.*):.*\.\. include:: (.*)')

        s = [
            m.groups() for m in [rx.match(d) for d in grep.split('\n')]
            if m is not None
        ]

        def tuple_sort(k):
            return k[1]

        s.sort(key=tuple_sort)

        files = dict()

        for i in groupby(s, itemgetter(1)):
            files[i[0]] = set()
            for src in i[1]:
                if not src[0].endswith('~'):
                    files[i[0]].add(src[0])
            files[i[0]] = list(files[i[0]])

        files.update(generated_includes(conf))

        return files
def process_redirect(redirect, conf=None):
    conf = lazy_conf(conf)

    if 'all' in redirect['outputs']:
        redirect['outputs'].remove('all')
        for branch in conf.git.branches.published:
            redirect['outputs'].append(branch)

    for output in redirect['outputs']:
        if isinstance(output, dict):
            continue
        elif output.startswith('after-'):
            idx = conf.git.branches.published.index(output.split('-', 1)[1])

            redirect['outputs'].remove(output)
            redirect['outputs'].extend(conf.git.branches.published[:idx])
        elif output.startswith('before-'):
            idx = conf.git.branches.published.index(output.split('-', 1)[1])

            redirect['outputs'].remove(output)
            redirect['outputs'].extend(conf.git.branches.published[idx:])

    if redirect['code'] in [ 301, 302, 303 ]:
        redirect['code'] = str(redirect['code'])
    else:
        raise Exception(str(redirect['code']) + ' is not a supported redirect code')

    return redirect
def generate_redirects(redirect, match=False, conf=None):
    conf = lazy_conf(conf)

    multi = determine_is_multi(redirect['outputs'])

    if 'external' in redirect:
        o = ''
        for output in redirect['outputs']:
            o += generate_external_rule(redirect, output, conf)
            o += '\n'
    elif multi and match is True:
        _base = ''
        for path in redirect['outputs']:
            _base += path + '|'
        base = _base[:-1]

        o = generate_match_rule(redirect, base, conf)
        o += '\n'
    elif multi is True and match is False:
        o = ''
        for output in redirect['outputs']:
            o += generate_simple_rule(redirect, output, conf)
            o += '\n'
    elif multi is False:
        o = generate_simple_rule(redirect, conf=conf)
        o += '\n'

    return o
Exemple #10
0
def dump_file_hashes(conf=None):
    conf = lazy_conf(conf)

    output = conf.system.dependency_cache

    o = { 'conf': conf,
          'time': datetime.datetime.utcnow().strftime("%s"),
          'files': { }
        }

    files = expand_tree(os.path.join(conf.paths.projectroot, conf.paths.source), None)

    fmap = o['files']

    for fn in files:
        if os.path.exists(fn):
            fmap[fn] = md5_file(fn)

    output_dir = os.path.dirname(output)

    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    with open(output, 'w') as f:
        json.dump(o, f)

    logger.info('wrote dependency cache to: {0}'.format(output))
Exemple #11
0
def manpage_jobs(conf=None):
    conf = lazy_conf(conf)

    options_compat_re = [ (re.compile(r'\.\. option:: --'), r'.. setting:: ' ),
                          (re.compile(r'setting:: (\w+) .*'), r'setting:: \1'),
                          (re.compile(r':option:`--'), r':setting:`') ]


    jobs = [
        (
            os.path.join(conf.paths.includes, "manpage-options-auth.rst"),
            os.path.join(conf.paths.includes, 'manpage-options-auth-mongo.rst'),
            ( re.compile('fact-authentication-source-tool'),
              'fact-authentication-source-mongo' )
        ),
        (
            os.path.join(conf.paths.includes, 'manpage-options-ssl.rst'),
            os.path.join(conf.paths.includes, 'manpage-options-ssl-settings.rst'),
            options_compat_re
        ),
        (
            os.path.join(conf.paths.includes, 'manpage-options-audit.rst'),
            os.path.join(conf.paths.includes, 'manpage-options-audit-settings.rst'),
            options_compat_re
        )
    ]

    for input_fn, output_fn, regex in jobs:
        if os.path.exists(input_fn):
            yield {
                'target': output_fn,
                'dependency': input_fn,
                'job': process_page,
                'args': [ input_fn, output_fn, regex, 'manpage' ],
              }
Exemple #12
0
def include_files(files=None, conf=None):
    if files is not None:
        return files
    else:
        conf = lazy_conf(conf)

        source_dir = os.path.join(conf.paths.projectroot, conf.paths.source)
        grep = command('grep -R ".. include:: /" {0} || exit 0'.format(source_dir), capture=True).out

        rx = re.compile(source_dir + r'(.*):.*\.\. include:: (.*)')

        s = [ m.groups()
              for m in [ rx.match(d)
                         for d in grep.split('\n') ]
              if m is not None
            ]

        def tuple_sort(k):
            return k[1]
        s.sort(key=tuple_sort)

        files = dict()

        for i in groupby(s, itemgetter(1) ):
            files[i[0]] = set()
            for src in i[1]:
                if not src[0].endswith('~'):
                    files[i[0]].add(src[0])
            files[i[0]] = list(files[i[0]])

        files.update(generated_includes(conf))

        return files
Exemple #13
0
def dump_file_hashes(conf=None):
    conf = lazy_conf(conf)

    output = conf.system.dependency_cache

    o = {
        'conf': conf,
        'time': datetime.datetime.utcnow().strftime("%s"),
        'files': {}
    }

    files = expand_tree(
        os.path.join(conf.paths.projectroot, conf.paths.source), None)

    fmap = o['files']

    for fn in files:
        if os.path.exists(fn):
            fmap[fn] = md5_file(fn)

    output_dir = os.path.dirname(output)

    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    with open(output, 'w') as f:
        json.dump(o, f)

    logger.info('wrote dependency cache to: {0}'.format(output))
Exemple #14
0
def refresh_dependencies(conf=None):
    conf = lazy_conf(conf)

    results = runner(refresh_dependency_jobs(conf),
                     pool=4,
                     parallel='process',
                     force=False)
    return sum(results)
Exemple #15
0
def post_process_jobs(source_fn=None, tasks=None, conf=None):
    """
    input documents should be:

    {
      'transform': {
                     'regex': str,
                     'replace': str
                   }
      'type': <str>
      'file': <str|list>
    }

    ``transform`` can be either a document or a list of documents.
    """

    if tasks is None:
        conf = lazy_conf(conf)

        if source_fn is None:
            source_fn = os.path.join(conf.paths.project.root,
                                     conf.paths.builddata, 'processing.yaml')
        tasks = ingest_yaml(source_fn)
    elif not isinstance(tasks, collections.Iterable):
        raise ProcessingError(
            '[ERROR]: cannot parse post processing specification.')

    def rjob(fn, regex, type):
        return {
            'target': fn,
            'dependency': None,
            'job': process_page,
            'args': dict(fn=fn, output_fn=fn, regex=regex, builder=type)
        }

    for job in tasks:
        if not isinstance(job, dict):
            raise ProcessingError(
                '[ERROR]: invalid replacement specification.')
        elif not 'file' in job and not 'transform' in job:
            raise ProcessingError(
                '[ERROR]: replacement specification incomplete.')

        if 'type' not in job:
            job['type'] = 'processor'

        if isinstance(job['transform'], list):
            regex = [(re.compile(rs['regex']), rs['replace'])
                     for rs in job['transform']]
        else:
            regex = (re.compile(job['transform']['regex']),
                     job['transform']['replace'])

        if isinstance(job['file'], list):
            for fn in job['file']:
                yield rjob(fn, regex, job['type'])
        else:
            yield rjob(job['file'], regex, job['type'])
Exemple #16
0
def release_jobs(conf=None):
    conf = lazy_conf(conf)

    data_file = os.path.join(conf.paths.builddata, 'releases') + '.yaml'

    if 'release' in conf.version:
        release_version = conf.version.release
    else:
        release_version = conf.version.published[0]

    if not os.path.exists(data_file):
        return

    rel_data = ingest_yaml(os.path.join(conf.paths.builddata, 'releases') + '.yaml')

    deps = [ os.path.join(conf.paths.projectroot, conf.system.conf_file) ]

    for rel in rel_data['source-files']:
        target = os.path.join(conf.paths.projectroot,
                              conf.paths.includes,
                              'install-curl-release-{0}.rst'.format(rel))

        yield {
                'target': target,
                'dependency': deps,
                'job': _generate_release_core,
                'args': [ rel, target, release_version ]
              }

        target = os.path.join(conf.paths.projectroot,
                              conf.paths.includes,
                              'install-untar-release-{0}.rst'.format(rel))
        yield {
                'target': target,
                'dependency': deps,
                'job': _generate_untar_core,
                'args': [ rel, target, release_version ]
              }

        target = os.path.join(conf.paths.projectroot,
                              conf.paths.includes,
                              'install-copy-release-{0}.rst'.format(rel))
        yield {
                'target': target,
                'dependency': deps,
                'job': _generate_copy_core,
                'args': [ rel, target, release_version ]
              }

    for rel in rel_data['subscription-build']:
        target = 'source/includes/install-curl-release-ent-{0}.rst'.format(rel['system'])

        yield {
                'target': target,
                'dependency': deps,
                'job': _generate_release_ent,
                'args': [ rel, target, release_version ]
              }
Exemple #17
0
def image_jobs(conf=None):
    conf = lazy_conf(None)
    paths = conf.paths

    meta_file = os.path.join(paths.images, 'metadata') + '.yaml'

    if not os.path.exists(meta_file):
        raise StopIteration

    images_meta = ingest_yaml_list(meta_file)

    if images_meta is None:
        raise StopIteration

    for image in images_meta:
        image['dir'] = paths.images
        source_base = os.path.join(image['dir'], image['name'])
        source_file = dot_concat(source_base, 'svg')
        rst_file = dot_concat(source_base, 'rst')
        image['conf'] = conf

        yield {
            'target':
            rst_file,
            'dependency': [
                meta_file,
                os.path.join(paths.buildsystem, 'utils', 'rstcloth',
                             'images.py')
            ],
            'job':
            generate_image_pages,
            'args':
            image
        }

        for output in image['output']:
            if 'tag' in output:
                tag = '-' + output['tag']
            else:
                tag = ''

            target_img = source_base + tag + '.png'

            inkscape_cmd = '{cmd} -z -d {dpi} -w {width} -y 0.0 -e >/dev/null {target} {source}'

            yield {
                'target':
                target_img,
                'dependency': [source_file, meta_file],
                'job':
                _generate_images,
                'args': [
                    inkscape_cmd, output['dpi'], output['width'], target_img,
                    source_file
                ],
            }
Exemple #18
0
def generate_external_rule(redir, base=None, conf=None):
    conf = lazy_conf(conf)

    if base is None:
        base = redir['outputs'][0]

    o = 'Redirect {0} /{1}{2} {3}/{4}{5}'

    return o.format(redir['code'], base, redir['redirect-path'],
                    conf.project.url, redir['external'], redir['url-base'])
Exemple #19
0
    def __init__(self, steps, conf=None):
        if not isinstance(steps, Steps):
            raise TypeError
        else:
            self.steps = steps

        self.conf = lazy_conf(conf)
        self.current_step = 1
        self.rst = RstCloth()
        self.hook()
Exemple #20
0
def generate_external_rule(redir, base=None, conf=None):
    conf = lazy_conf(conf)

    if base is None:
        base = redir['outputs'][0]

    o = 'Redirect {0} /{1}{2} {3}/{4}{5}'

    return o.format(redir['code'], base, redir['redirect-path'],
                    conf.project.url, redir['external'], redir['url-base'])
Exemple #21
0
    def __init__(self, steps, conf=None):
        if not isinstance(steps, Steps):
            raise TypeError
        else:
            self.steps = steps

        self.conf = lazy_conf(conf)
        self.current_step = 1
        self.rst = RstCloth()
        self.hook()
Exemple #22
0
def post_process_jobs(source_fn=None, tasks=None, conf=None):
    """
    input documents should be:

    {
      'transform': {
                     'regex': str,
                     'replace': str
                   }
      'type': <str>
      'file': <str|list>
    }

    ``transform`` can be either a document or a list of documents.
    """

    if tasks is None:
        conf = lazy_conf(conf)

        if source_fn is None:
            source_fn = os.path.join(conf.paths.project.root,
                                     conf.paths.builddata,
                                     'processing.yaml')
        tasks = ingest_yaml(source_fn)
    elif not isinstance(tasks, collections.Iterable):
        raise ProcessingError('[ERROR]: cannot parse post processing specification.')

    def rjob(fn, regex, type):
        return {
                 'target': fn,
                 'dependency': None,
                 'job': process_page,
                 'args': dict(fn=fn, output_fn=fn, regex=regex, builder=type)
               }

    for job in tasks:
        if not isinstance(job, dict):
            raise ProcessingError('[ERROR]: invalid replacement specification.')
        elif not 'file' in job and not 'transform' in job:
            raise ProcessingError('[ERROR]: replacement specification incomplete.')

        if 'type' not in job:
            job['type'] = 'processor'

        if isinstance(job['transform'], list):
            regex = [ (re.compile(rs['regex']), rs['replace'])
                      for rs in job['transform'] ]
        else:
            regex = (re.compile(job['transform']['regex']), job['transform']['replace'])

        if isinstance(job['file'], list):
            for fn in job['file']:
                yield rjob(fn, regex, job['type'])
        else:
            yield rjob(job['file'], regex, job['type'])
Exemple #23
0
def process_redirect(redirect, conf=None):
    """Given a redirection document, returns a properly formatted string for an
    Apache htaccess redirect statement."""

    conf = lazy_conf(conf)

    redirect = redirect_schema_migration(redirect)

    if 'all' in redirect['outputs']:
        redirect['outputs'].remove('all')
        redirect['outputs'].extend(conf.git.branches.published)

    for output in redirect['outputs']:
        if isinstance(output, dict):
            source, target = output.items()[0]

            if isinstance(target, dict):
                left, right = target.items()[0]

                if source.startswith('after-'):
                    redirect['outputs'].remove(output)
                    idx = conf.git.branches.published.index(source.split('-', 1)[1])

                    for out in conf.git.branches.published[:idx]:
                        redirect['outputs'].append({ slash_concat(left, out): slash_concat(right,out) })
                elif source.startswith('before-'):
                    redirect['outputs'].remove(output)
                    idx = conf.git.branches.published.index(source.split('-', 1)[1])

                    for out in conf.git.branches.published[idx:]:
                        redirect['outputs'].append({ slash_concat(left, out): slash_concat(right,out) })
                else:
                    logger.error("{0} is invalid source for redirect: {1}".format(source, redirect))
            else:
                continue
        elif output.startswith('after-'):
            idx = conf.git.branches.published.index(output.split('-', 1)[1])

            redirect['outputs'].remove(output)
            redirect['outputs'].extend(conf.git.branches.published[:idx])
        elif output.startswith('before-'):
            idx = conf.git.branches.published.index(output.split('-', 1)[1])

            redirect['outputs'].remove(output)
            redirect['outputs'].extend(conf.git.branches.published[idx:])

    if redirect['code'] in [ 301, 302, 303 ]:
        redirect['code'] = str(redirect['code'])
    else:
        msg = str(redirect['code']) + ' is not a supported redirect code'
        logger.critical(msg)
        raise Exception(msg)

    return redirect
Exemple #24
0
def api_jobs(conf=None):
    conf = lazy_conf(conf)

    for source in expand_tree(os.path.join(conf.paths.projectroot, conf.paths.source, 'reference'), 'yaml'):
        target = dot_concat(os.path.splitext(source)[0], 'rst')

        yield {
                'target': target,
                'dependency': source,
                'job': _generate_api_param,
                'args': [source, target, conf]
              }
def post_process_jobs(source_fn=None, tasks=None, conf=None):
    """
    input documents should be:

    {
      'transform': {
                     'regex': str,
                     'replace': str
                   }
      'type': <str>
      'file': <str|list>
    }

    ``transform`` can be either a document or a list of documents.
    """

    if tasks is None:
        conf = lazy_conf(conf)

        if source_fn is None:
            source_fn = os.path.join(conf.paths.project.root, conf.paths.builddata, "processing.yaml")
        tasks = ingest_yaml(source_fn)
    elif not isinstance(tasks, collections.Iterable):
        raise ProcessingError("[ERROR]: cannot parse post processing specification.")

    def rjob(fn, regex, type):
        return {
            "target": fn,
            "dependency": None,
            "job": process_page,
            "args": dict(fn=fn, output_fn=fn, regex=regex, builder=type),
        }

    for job in tasks:
        if not isinstance(job, dict):
            raise ProcessingError("[ERROR]: invalid replacement specification.")
        elif not "file" in job and not "transform" in job:
            raise ProcessingError("[ERROR]: replacement specification incomplete.")

        if "type" not in job:
            job["type"] = "processor"

        if isinstance(job["transform"], list):
            regex = [(re.compile(rs["regex"]), rs["replace"]) for rs in job["transform"]]
        else:
            regex = (re.compile(job["transform"]["regex"]), job["transform"]["replace"])

        if isinstance(job["file"], list):
            for fn in job["file"]:
                yield rjob(fn, regex, job["type"])
        else:
            yield rjob(job["file"], regex, job["type"])
Exemple #26
0
def image_jobs(conf=None):
    conf = lazy_conf(None)
    paths = conf.paths

    meta_file = os.path.join(paths.images, 'metadata') + '.yaml'

    if not os.path.exists(meta_file):
        raise StopIteration

    images_meta = ingest_yaml_list(meta_file)

    if images_meta is None:
        raise StopIteration

    for image in images_meta:
        image['dir'] = paths.images
        source_base = os.path.join(image['dir'], image['name'])
        source_file = dot_concat(source_base, 'svg')
        rst_file = dot_concat(source_base, 'rst')
        image['conf'] = conf

        yield {
                'target': rst_file,
                'dependency': [ meta_file, os.path.join(paths.buildsystem, 'utils', 'rstcloth', 'images.py') ],
                'job': generate_image_pages,
                'args': image,
                'description': "generating rst include file {0} for {1}".format(rst_file, source_file)
              }

        for output in image['output']:
            if 'tag' in output:
                tag = '-' + output['tag']
            else:
                tag = ''

            target_img = source_base + tag + '.png'

            inkscape_cmd = '{cmd} -z -d {dpi} -w {width} -y 0.0 -e >/dev/null {target} {source}'

            yield {
                    'target': target_img,
                    'dependency': [ source_file, meta_file ],
                    'job': _generate_images,
                    'args': [
                              inkscape_cmd,
                              output['dpi'],
                              output['width'],
                              target_img,
                              source_file
                            ],
                    'description': 'generating image file {0} from {1}'.format(target_img, source_file)
                  }
Exemple #27
0
def table_jobs(conf=None):
    conf = lazy_conf(conf)

    for source in expand_tree(os.path.join(conf.paths.projectroot, conf.paths.includes), 'yaml'):
        if os.path.basename(source).startswith('table'):
            target = _get_table_output_name(source)
            list_target = _get_list_table_output_name(source)

            yield {
                    'target': [ target, list_target ],
                    'dependency': source,
                    'job': _generate_tables,
                    'args': [ source, target, list_target ]
                  }
Exemple #28
0
def include_files_unused(inc_files=None, conf=None):
    conf = lazy_conf(conf)

    inc_files = [ fn[6:] for fn in expand_tree(os.path.join(conf.paths.includes), None) ]
    mapping = include_files(conf)

    results = []
    for fn in inc_files:
        if fn.endswith('yaml') or fn.endswith('~'):
            continue
        if fn not in mapping.keys():
            results.append(fn)

    return results
Exemple #29
0
def generate_external_rule(redir, base=None, conf=None):
    conf = lazy_conf(conf)

    if base is None:
        base = redir['outputs'][0]

    if redir['external'].startswith('http'):
        o = 'Redirect {0} /{1}{2} {3}{4}'
        return o.format(redir['code'], base, redir['from'],
                        redir['external'], redir['to'])
    else:
        o = 'Redirect {0} /{1}{2} {3}{4}{5}'

        return o.format(redir['code'], base, redir['from'],
                        conf.project.url, redir['external'], redir['to'])
Exemple #30
0
    def __init__(self, filename, conf=None, sort=False):
        self.spec = self._process_spec(filename, sort)

        self.conf = lazy_conf(conf)

        if "ref-toc" in filename:
            self._is_ref = True
        else:
            self._is_ref = False

        self.table = None
        self.contents = None
        self.dfn = None

        self.final = False
Exemple #31
0
def generate_external_rule(redir, base=None, conf=None):
    conf = lazy_conf(conf)

    if base is None:
        base = redir['outputs'][0]

    if redir['external'].startswith('http'):
        o = 'Redirect {0} /{1}{2} {3}{4}'
        return o.format(redir['code'], base, redir['from'], redir['external'],
                        redir['to'])
    else:
        o = 'Redirect {0} /{1}{2} {3}{4}{5}'

        return o.format(redir['code'], base, redir['from'], conf.project.url,
                        redir['external'], redir['to'])
Exemple #32
0
    def __init__(self, filename, conf=None, sort=False):
        self.spec = self._process_spec(filename, sort)

        self.conf = lazy_conf(conf)

        if "ref-toc" in filename:
            self._is_ref = True
        else:
            self._is_ref = False

        self.table = None
        self.contents = None
        self.dfn = None

        self.final = False
Exemple #33
0
def generate_list_file(outputs, path, conf=None):
    dirname = os.path.dirname(path)

    conf = lazy_conf(conf)

    url = '/'.join([ conf.project.url, conf.project.basepath, 'json' ])

    if not os.path.exists(dirname):
        os.mkdir(dirname)

    with open(path, 'w') as f:
        for fn in outputs:
            f.write( '/'.join([ url, fn.split('/', 3)[3:][0]]) )
            f.write('\n')

    logger.info('rebuilt inventory of json output.')
def generate_list_file(outputs, path, conf=None):
    dirname = os.path.dirname(path)

    conf = lazy_conf(conf)

    url = '/'.join([conf.project.url, conf.project.basepath, 'json'])

    if not os.path.exists(dirname):
        os.mkdir(dirname)

    with open(path, 'w') as f:
        for fn in outputs:
            f.write('/'.join([url, fn.split('/', 3)[3:][0]]))
            f.write('\n')

    print('[json]: rebuilt inventory of json output.')
Exemple #35
0
def include_files_unused(inc_files=None, conf=None):
    conf = lazy_conf(conf)

    inc_files = [
        fn[6:] for fn in expand_tree(os.path.join(conf.paths.includes), None)
    ]
    mapping = include_files(conf)

    results = []
    for fn in inc_files:
        if fn.endswith('yaml') or fn.endswith('~'):
            continue
        if fn not in mapping.keys():
            results.append(fn)

    return results
Exemple #36
0
def gettext_jobs(conf=None):
    conf = lazy_conf(conf)

    locale_dirs = os.path.join(conf.paths.projectroot, conf.paths.locale,
                               'pot')

    branch_output = os.path.join(conf.paths.projectroot,
                                 conf.paths.branch_output, 'gettext')

    path_offset = len(branch_output) + 1

    for fn in expand_tree(branch_output, None):
        yield {
            'job': copy_if_needed,
            'args': [fn, os.path.join(locale_dirs, fn[path_offset:]), None],
            'description': "migrating po file {0} if needed".format(fn)
        }
Exemple #37
0
def gettext_jobs(conf=None):
    conf = lazy_conf(conf)

    locale_dirs = os.path.join(conf.paths.projectroot,
                               conf.paths.locale, 'pot')

    branch_output = os.path.join(conf.paths.projectroot,
                                       conf.paths.branch_output,
                                       'gettext')

    path_offset = len(branch_output) + 1

    for fn in expand_tree(branch_output, None):
        yield {
            'job': copy_if_needed,
            'args': [ fn, os.path.join(locale_dirs, fn[path_offset:]), None]
        }
Exemple #38
0
def generate_simple_rule(redir, base=None, conf=None):
    conf = lazy_conf(conf)

    if base is None:
        base = redir['outputs'][0]

    if isinstance(base, dict):
        left, right = base.items()[0]

        o = 'Redirect {0} /{1}{2} {3}/{4}{5}'

        return o.format(redir['code'], left, redir['from'], conf.project.url,
                        right, redir['to'])
    else:
        o = 'Redirect {0} /{1}{2} {3}/{1}{4}'

        return o.format(redir['code'], base, redir['from'], conf.project.url,
                        redir['to'])
def json_output(conf=None):
    conf = lazy_conf(conf)

    list_file = os.path.join(conf.paths.branch_staging, 'json-file-list')
    public_list_file = os.path.join(conf.paths.public_site_output,
                                    'json', '.file_list')

    cmd = 'rsync --recursive --times --delete --exclude="*pickle" --exclude=".buildinfo" --exclude="*fjson" {src} {dst}'
    json_dst = os.path.join(conf.paths.public_site_output, 'json')

    if not os.path.exists(json_dst):
        os.makedirs(json_dst)

    command(cmd.format(src=os.path.join(conf.paths.branch_output, 'json') + '/',
                       dst=json_dst))

    copy_if_needed(list_file, public_list_file)
    print('[json]: deployed json files to local staging.')
Exemple #40
0
def gettext_jobs(conf=None):
    conf = lazy_conf(conf)

    locale_dirs = os.path.join(conf.paths.projectroot,
                               conf.paths.locale, 'pot')

    branch_output = os.path.join(conf.paths.projectroot,
                                       conf.paths.branch_output,
                                       'gettext')

    path_offset = len(branch_output) + 1

    for fn in expand_tree(branch_output, None):
        yield {
            'job': copy_if_needed,
            'args': [ fn, os.path.join(locale_dirs, fn[path_offset:]), None],
            'description': "migrating po file {0} if needed".format(fn)
        }
Exemple #41
0
def generate_simple_rule(redir, base=None, conf=None):
    conf = lazy_conf(conf)

    if base is None:
        base = redir['outputs'][0]

    if isinstance(base, dict):
        left, right = base.items()[0]

        o = 'Redirect {0} /{1}{2} {3}/{4}{5}'

        return o.format(redir['code'], left, redir['redirect-path'],
                        conf.project.url, right, redir['url-base'])
    else:
        o = 'Redirect {0} /{1}{2} {3}/{1}{4}'

        return o.format(redir['code'], base, redir['redirect-path'],
                        conf.project.url, redir['url-base'])
Exemple #42
0
def json_output_jobs(conf=None):
    conf = lazy_conf(conf)

    regexes = [
        (re.compile(r'<a class=\"headerlink\"'), '<a'),
        (re.compile(r'<[^>]*>'), ''),
        (re.compile(r'&#8220;'), '"'),
        (re.compile(r'&#8221;'), '"'),
        (re.compile(r'&#8216;'), "'"),
        (re.compile(r'&#8217;'), "'"),
        (re.compile(r'&#\d{4};'), ''),
        (re.compile(r'&nbsp;'), ''),
        (re.compile(r'&gt;'), '>'),
        (re.compile(r'&lt;'), '<')
    ]

    outputs = []
    for fn in expand_tree('source', 'txt'):
        # path = build/<branch>/json/<filename>

        path = os.path.join(conf.paths.branch_output,
                            'json', os.path.splitext(fn.split(os.path.sep, 1)[1])[0])
        fjson = dot_concat(path, 'fjson')
        json = dot_concat(path, 'json')

        if conf.project.name == 'mms':
            if not os.path.exists(fjson):
                continue

        yield { 'target': json,
                'dependency': fjson,
                'job': process_json_file,
                'description': "processing json file".format(json),
                'args': (fjson, json, regexes, conf) }

        outputs.append(json)

    list_file = os.path.join(conf.paths.branch_staging, 'json-file-list')

    yield { 'target': list_file,
            'dependency': None,
            'description': 'generating json index list {0}'.format(list_file),
            'job': generate_list_file,
            'args': (outputs, list_file, conf) }
def json_output(conf=None):
    conf = lazy_conf(conf)

    list_file = os.path.join(conf.paths.branch_staging, 'json-file-list')
    public_list_file = os.path.join(conf.paths.public_site_output, 'json',
                                    '.file_list')

    cmd = 'rsync --recursive --times --delete --exclude="*pickle" --exclude=".buildinfo" --exclude="*fjson" {src} {dst}'
    json_dst = os.path.join(conf.paths.public_site_output, 'json')

    if not os.path.exists(json_dst):
        os.makedirs(json_dst)

    command(
        cmd.format(src=os.path.join(conf.paths.branch_output, 'json') + '/',
                   dst=json_dst))

    copy_if_needed(list_file, public_list_file)
    print('[json]: deployed json files to local staging.')
Exemple #44
0
def error_pages(conf=None):
    conf = lazy_conf(conf)

    error_conf = os.path.join(conf.paths.builddata, 'errors.yaml')

    if not os.path.exists(error_conf):
        return None
    else:
        error_pages = ingest_yaml_list(error_conf)

        sub = (re.compile(r'\.\./\.\./'), conf.project.url + r'/' + conf.project.tag + r'/')

        for error in error_pages:
            page = os.path.join(conf.paths.projectroot,
                                conf.paths.branch_output, 'dirhtml',
                                'meta', error, 'index.html')
            munge_page(fn=page, regex=sub, tag='error-pages')

        print('[error-pages]: rendered {0} error pages'.format(len(error_pages)))
def json_output_jobs(conf=None):
    conf = lazy_conf(conf)

    regexes = [
        (re.compile(r'<a class=\"headerlink\"'), '<a'),
        (re.compile(r'<[^>]*>'), ''),
        (re.compile(r'&#8220;'), '"'),
        (re.compile(r'&#8221;'), '"'),
        (re.compile(r'&#8216;'), "'"),
        (re.compile(r'&#8217;'), "'"),
        (re.compile(r'&#\d{4};'), ''),
        (re.compile(r'&nbsp;'), ''),
        (re.compile(r'&gt;'), '>'),
        (re.compile(r'&lt;'), '<')
    ]

    outputs = []
    for fn in expand_tree('source', 'txt'):
        # path = build/<branch>/json/<filename>

        path = os.path.join(conf.paths.branch_output,
                            'json', os.path.splitext(fn.split(os.path.sep, 1)[1])[0])
        fjson = dot_concat(path, 'fjson')
        json = dot_concat(path, 'json')

        if conf.project.name == 'mms':
            if not os.path.exists(fjson):
                continue

        yield dict(target=json,
                   dependency=fjson,
                   job=process_json_file,
                   args=(fjson, json, regexes, conf))

        outputs.append(json)

    list_file = os.path.join(conf.paths.branch_staging, 'json-file-list')

    yield dict(target=list_file,
               dependency=None,
               job=generate_list_file,
               args=(outputs, list_file, conf))
Exemple #46
0
def output_sphinx_stream(out, conf=None):
    if conf is None:
        conf = lazy_conf(conf)

    out = [o for o in out.split('\n') if o != '']

    full_path = os.path.join(conf.paths.projectroot, conf.paths.branch_output)

    regx = re.compile(
        r'(.*):[0-9]+: WARNING: duplicate object description of ".*", other instance in (.*)'
    )

    printable = []
    for idx, l in enumerate(out):
        if is_msg_worthy(l) is not True:
            printable.append(None)
            continue

        f1 = regx.match(l)
        if f1 is not None:
            g = f1.groups()

            if g[1].endswith(g[0]):
                printable.append(None)
                continue

        l = path_normalization(l, full_path, conf)

        if l.startswith('InputError: [Errno 2] No such file or directory'):
            l = path_normalization(
                l.split(' ')[-1].strip()[1:-2], full_path, conf)
            printable[idx - 1] += ' ' + l
            l = None

        printable.append(l)

    printable = list(set(printable))
    printable.sort()

    print_build_messages(printable)
Exemple #47
0
def main():
    ui = user_input()

    conf = lazy_conf()

    lines = []
    for doc in ingest_yaml(ui.data):
        if doc['type'] == 'redirect':
            lines.append(generate_redirects(process_redirect(doc, conf=conf), match=ui.match, conf=conf))
        if doc['type'] == 'redirect-draft':
            print(generate_redirects(process_redirect(doc, conf=conf), match=ui.match, conf=conf))

    if lines:
        dirname = os.path.dirname(ui.filename)
        if not os.path.exists(dirname):
            os.makedirs(dirname)

        with open(ui.filename, 'w') as f:
            for line in lines:
                f.write(line)

        print('[redirect]: regenerated ' + ui.filename + ' file.' )
def makefile_meta():
    # because this is typically called by bootstrap.py, projectroot is the
    # doctools directory.
    conf = lazy_conf(None)

    # re/generate the makefile.meta

    script_path = os.path.join(conf.paths.projectroot, conf.paths.buildsystem,
                               'makecloth', 'meta.py')
    makefn_path = os.path.join(conf.paths.projectroot, conf.paths.output,
                               'makefile.meta')

    cmd = 'python {0} {1}'.format(script_path, makefn_path).split()

    try:
        subprocess.check_call(cmd)
    except:
        subprocess.check_call('python {0} {1}'.format(
            os.path.join('build', 'docs-tools', 'makecloth', 'meta.py'),
            os.path.join('build', 'makefile.meta')))
    finally:
        return
Exemple #49
0
def generated_includes(conf=None):
    conf = lazy_conf(conf)

    toc_spec_files = []
    step_files = []
    for fn in expand_tree(os.path.join(conf.paths.includes),
                          input_extension='yaml'):
        base = os.path.basename(fn)

        if base.startswith('toc-spec'):
            toc_spec_files.append(fn)
        elif base.startswith('ref-spec'):
            toc_spec_files.append(fn)
        elif base.startswith('steps'):
            step_files.append(fn)

    maskl = len(conf.paths.source)
    path_prefix = conf.paths.includes[len(conf.paths.source):]
    mapping = {}
    for spec_file in toc_spec_files:
        data = ingest_yaml_doc(spec_file)
        deps = [os.path.join(path_prefix, i) for i in data['sources']]

        mapping[spec_file[maskl:]] = deps

    for step_def in step_files:
        data = ingest_yaml_list(step_def)

        deps = []
        for step in data:
            if 'source' in step:
                deps.append(step['source']['file'])

        if len(deps) != 0:
            deps = [os.path.join(path_prefix, i) for i in deps]

            mapping[step_def[maskl:]] = deps

    return mapping
Exemple #50
0
def generated_includes(conf=None):
    conf = lazy_conf(conf)

    toc_spec_files = []
    step_files = []
    for fn in expand_tree(os.path.join(conf.paths.includes), input_extension='yaml'):
        base = os.path.basename(fn)

        if base.startswith('toc-spec'):
            toc_spec_files.append(fn)
        elif base.startswith('ref-spec'):
            toc_spec_files.append(fn)
        elif base.startswith('steps'):
            step_files.append(fn)

    maskl = len(conf.paths.source)
    path_prefix = conf.paths.includes[len(conf.paths.source):]
    mapping = {}
    for spec_file in toc_spec_files:
        data = ingest_yaml_doc(spec_file)
        deps = [ os.path.join(path_prefix, i ) for i in data['sources']]

        mapping[spec_file[maskl:]] = deps

    for step_def in step_files:
        data = ingest_yaml_list(step_def)

        deps = []
        for step in data:
            if 'source' in step:
                deps.append(step['source']['file'])

        if len(deps) != 0:
            deps = [ os.path.join(path_prefix, i ) for i in deps ]

            mapping[step_def[maskl:]] = deps

    return mapping
Exemple #51
0
def external_jobs(conf=None):
    conf = lazy_conf(conf)

    ext_mod_path = os.path.join(conf.paths.projectroot, 'local')
    if not os.path.exists(ext_mod_path):
        raise StopIteration

    external_mods = []

    for mod in expand_tree(ext_mod_path, 'py'):
        path, name = os.path.split(mod)
        name, _ = os.path.splitext(name)

        file, filename, data = imp.find_module(name, [path])

        imp.load_module(name, file, mod, data)
        external_mods.append(name)

    for name in external_mods:
        mod = sys.modules[name]
        if 'jobs' in dir(mod) and 'stage' in dir(mod) and mod.stage.startswith('pre'):
            for task in mod.jobs(conf):
                yield task
Exemple #52
0
def changed_includes(conf=None):
    from pygit2 import Repository, GIT_STATUS_CURRENT, GIT_STATUS_IGNORED
    conf = lazy_conf(conf)

    repo_path = conf.paths.projectroot

    r = Repository(repo_path)

    changed = []
    for path, flag in r.status().items():
        if flag not in [ GIT_STATUS_CURRENT, GIT_STATUS_IGNORED ]:
            if path.startswith('source/'):
                if path.endswith('.txt'):
                    changed.append(path[6:])

    source_path = os.path.join(conf.paths.source, conf.paths.output, conf.git.branches.current, 'json')
    changed_report = []

    for report in _generate_report(None):
        if report['source'][len(source_path):] in changed:
            changed_report.append(report)

    return changed_report
def bootstrap(**kwargs):
    """
    The bootstrap file calls this function. Use this as a site for future
    extension.
    """

    try:
        build_tools_path = kwargs['build_tools_path']
        conf_path = kwargs['conf_path']

        symlink(name=os.path.join(build_tools_path, 'bin', 'utils'),
                target=os.path.join(os.path.abspath(build_tools_path),
                                    'utils'))

        import utils.bootstrap

        utils.bootstrap.fabric(build_tools_path, conf_path)
        utils.bootstrap.config(build_tools_path, conf_path)
        utils.bootstrap.utils(build_tools_path, conf_path)

    except KeyError:
        print(
            '[bootstrap] [warning]: your bootstrap.py is probably out of date. '
            'Please update as soon as possible.')

    import utils.bootstrap
    from utils.config import lazy_conf

    conf = lazy_conf(None)

    utils.bootstrap.pin_tools(conf)
    utils.bootstrap.makefile_meta(conf)

    if 'primer' in os.path.split(os.getcwd()):
        utils.bootstrap.primer()

    print('[bootstrap]: initialized fabfiles and dependencies.')
Exemple #54
0
def changed_includes(conf=None):
    from pygit2 import Repository, GIT_STATUS_CURRENT, GIT_STATUS_IGNORED
    conf = lazy_conf(conf)

    repo_path = conf.paths.projectroot

    r = Repository(repo_path)

    changed = []
    for path, flag in r.status().items():
        if flag not in [GIT_STATUS_CURRENT, GIT_STATUS_IGNORED]:
            if path.startswith('source/'):
                if path.endswith('.txt'):
                    changed.append(path[6:])

    source_path = os.path.join(conf.paths.source, conf.paths.output,
                               conf.git.branches.current, 'json')
    changed_report = []

    for report in _generate_report(None):
        if report['source'][len(source_path):] in changed:
            changed_report.append(report)

    return changed_report
Exemple #55
0
def external_jobs(conf=None):
    conf = lazy_conf(conf)

    ext_mod_path = os.path.join(conf.paths.projectroot, 'local')
    if not os.path.exists(ext_mod_path):
        raise StopIteration

    external_mods = []

    for mod in expand_tree(ext_mod_path, 'py'):
        path, name = os.path.split(mod)
        name, _ = os.path.splitext(name)

        file, filename, data = imp.find_module(name, [path])

        imp.load_module(name, file, mod, data)
        external_mods.append(name)

    for name in external_mods:
        mod = sys.modules[name]
        if 'jobs' in dir(mod) and 'stage' in dir(mod) and mod.stage.startswith(
                'pre'):
            for task in mod.jobs(conf):
                yield task
def json_output_jobs(conf=None):
    conf = lazy_conf(conf)

    regexes = [(re.compile(r'<a class=\"headerlink\"'), '<a'),
               (re.compile(r'<[^>]*>'), ''), (re.compile(r'&#8220;'), '"'),
               (re.compile(r'&#8221;'), '"'), (re.compile(r'&#8216;'), "'"),
               (re.compile(r'&#8217;'), "'"), (re.compile(r'&#\d{4};'), ''),
               (re.compile(r'&nbsp;'), ''), (re.compile(r'&gt;'), '>'),
               (re.compile(r'&lt;'), '<')]

    outputs = []
    for fn in expand_tree('source', 'txt'):
        # path = build/<branch>/json/<filename>

        path = os.path.join(conf.paths.branch_output, 'json',
                            os.path.splitext(fn.split(os.path.sep, 1)[1])[0])
        fjson = dot_concat(path, 'fjson')
        json = dot_concat(path, 'json')

        if conf.project.name == 'mms':
            if not os.path.exists(fjson):
                continue

        yield dict(target=json,
                   dependency=fjson,
                   job=process_json_file,
                   args=(fjson, json, regexes, conf))

        outputs.append(json)

    list_file = os.path.join(conf.paths.branch_staging, 'json-file-list')

    yield dict(target=list_file,
               dependency=None,
               job=generate_list_file,
               args=(outputs, list_file, conf))