Example #1
0
def htaccess(fn='.htaccess'):
    conf = lazy_conf()

    in_files = ( i
                 for i in expand_tree(conf.paths.builddata, 'yaml')
                 if os.path.basename(i).startswith('htaccess') )

    sources = []
    for i in in_files:
        sources.extend(ingest_yaml_list(i))

    dirname = os.path.dirname(fn)
    if not dirname == '' and not os.path.exists(dirname):
        os.makedirs(dirname)

    lines = set( [ ] )

    for redir in sources:
        lines.add(generate_redirects(process_redirect(redir, conf), conf=conf, match=False))

    with open(fn, 'w') as f:
        f.writelines(lines)
        f.write('\n')
        f.writelines( ['<FilesMatch "\.(ttf|otf|eot|woff)$">','\n',
                       '   Header set Access-Control-Allow-Origin "*"', '\n'
                       '</FilesMatch>', '\n'] )

    logging.info('redirect: regenerated {0} with {1} redirects ({2} lines)'.format(fn, len(sources), len(lines)))
Example #2
0
def htaccess(fn='.htaccess'):
    conf = lazy_conf()

    in_files = (i for i in expand_tree(conf.paths.builddata, 'yaml')
                if os.path.basename(i).startswith('htaccess'))

    sources = []
    for i in in_files:
        sources.extend(ingest_yaml_list(i))

    dirname = os.path.dirname(fn)
    if not dirname == '' and not os.path.exists(dirname):
        os.makedirs(dirname)

    lines = set([])

    for redir in sources:
        lines.add(
            generate_redirects(process_redirect(redir, conf),
                               conf=conf,
                               match=False))

    with open(fn, 'w') as f:
        f.writelines(lines)
        f.write('\n')
        f.writelines([
            '<FilesMatch "\.(ttf|otf|eot|woff)$">', '\n',
            '   Header set Access-Control-Allow-Origin "*"', '\n'
            '</FilesMatch>', '\n'
        ])

    logging.info(
        'redirect: regenerated {0} with {1} redirects ({2} lines)'.format(
            fn, len(sources), len(lines)))
Example #3
0
def migrate(conf=None):
    "Migrates all manual files to primer according to the spec. As needed."

    conf = lazy_conf(conf)

    migration_paths = get_migration_specifications(conf)

    if conf.project.name != 'primer':
        return False
    elif len(migration_paths) == 0:
        return False
    else:
        migrations = ingest_yaml_list(*migration_paths)

        truncate_jobs = []
        munge_jobs = []
        migration_jobs = []
        append_jobs = []

        for page in migrations:
            if 'sources' in page:
                migrations.extend(convert_multi_source(page))
                continue

            page = fix_migration_paths(page)

            fq_target = os.path.join(conf.paths.projectroot, conf.paths.source,
                                     page['target'])
            fq_source = os.path.join(conf.paths.manual_source, page['source'])

            migration_jobs.append(build_migration_job(fq_target, fq_source))

            if 'truncate' in page:
                truncate_jobs.append(
                    build_truncate_job(page['truncate'], fq_target))

            if 'transform' in page:
                migration_jobs[-1]['job'] = copy_always
                munge_jobs.append(
                    build_transform_job(page['transform'], fq_target))

            if 'append' in page:
                migration_jobs[-1]['job'] = copy_always
                append_jobs.append(
                    build_append_job(page, fq_target, migration_paths))

        with ProcessPool() as p:
            migration_res = p.runner(migration_jobs)
            munge_res = p.runner(post_process_jobs(tasks=munge_jobs))
            truncate_res = p.runner(truncate_jobs)
            append_res = p.runner(append_jobs)

        msg = '[primer]: migrated {0}, munged {1}, truncated {2}, and appended to {3} pages.'
        print(
            msg.format(len(migration_res), len(munge_res), len(truncate_res),
                       len(append_res)))

        return True
Example #4
0
def package(target=None, conf=None):
    "Builds a package from the current build output."

    if conf is None:
        conf = get_conf()

    archive_path = os.path.join(conf.paths.projectroot, conf.paths.buildarchive)
    fn = package_filename(archive_path, target, conf)

    pconf = conf_from_list('target', ingest_yaml_list(os.path.join(conf.paths.projectroot,
                                                                   conf.paths.builddata,
                                                                   'push.yaml')))
    if target is None:
        pconf = pconf[pconf.keys()[0]]
    else:
        pconf = pconf[target]

    if not os.path.exists(archive_path):
        os.makedirs(archive_path)
        puts('[deploy] [tarball]: creating {0} directory'.format(archive_path))
    else:
        if not os.path.isdir(archive_path):
            abort('[ERROR]: {0} exists and is not a directory.'.format(archive_path))

    arc_conf = os.path.join(conf.paths.projectroot,
                            conf.paths.branch_output,
                            'conf.json')

    with open(arc_conf, 'w') as f:
        json.dump(get_build_metadata(conf), f, indent=2)

    with tarfile.open(fn, 'w:gz') as t:
        if 'branched' in pconf.options:
            input_path = os.path.join(conf.paths.projectroot,
                                      conf.paths.output,
                                      pconf.paths.local,
                                      conf.git.branches.current)
            output_path_name = conf.git.branches.current
        else:
            input_path = os.path.join(conf.paths.projectroot,
                                      conf.paths.output,
                                      pconf.paths.local)
            output_path_name = os.path.split(pconf.paths.local)[-1]

        t.add(name=input_path,
              arcname=output_path_name)

        t.add(arc_conf, arcname='conf.json')

        if 'static' in pconf.paths:
            for path in pconf.paths.static:
                rendered_path = os.path.join(conf.paths.projectroot,
                                             conf.paths.public, path)
                if os.path.exists(rendered_path):
                    t.add(name=rendered_path,
                          arcname=path)

    puts('[deploy] [tarball]: created {0} as archive of current build artifacts.'.format(fn))
Example #5
0
def migrate(conf=None):
    "Migrates all manual files to primer according to the spec. As needed."

    conf = lazy_conf(conf)

    migration_paths = get_migration_specifications(conf)

    if conf.project.name != 'primer':
        return False
    elif len(migration_paths) == 0:
        return False
    else:
        migrations = ingest_yaml_list(*migration_paths)

        truncate_jobs = []
        munge_jobs = []
        migration_jobs = []
        append_jobs = []

        for page in migrations:
            if 'sources' in page:
                migrations.extend(convert_multi_source(page))
                continue

            page = fix_migration_paths(page)

            fq_target = os.path.join(conf.paths.projectroot, conf.paths.source, page['target'])
            fq_source = os.path.join(conf.paths.manual_source, page['source'])

            migration_jobs.append(build_migration_job(fq_target, fq_source))

            if 'truncate' in page:
                truncate_jobs.append(build_truncate_job(page['truncate'], fq_target))

            if 'transform' in page:
                migration_jobs[-1]['job'] = copy_always
                munge_jobs.append(build_transform_job(page['transform'], fq_target))

            if 'append' in page:
                migration_jobs[-1]['job'] = copy_always
                append_jobs.append(build_append_job(page, fq_target, migration_paths))

        with ProcessPool() as p:
            migration_res = p.runner(migration_jobs)
            munge_res = p.runner(post_process_jobs(tasks=munge_jobs))
            truncate_res = p.runner(truncate_jobs)
            append_res = p.runner(append_jobs)

        msg = '[primer]: migrated {0}, munged {1}, truncated {2}, and appended to {3} pages.'
        print(msg.format(len(migration_res), len(munge_res), len(truncate_res), len(append_res)))

        return True
Example #6
0
def primer_migrate_pages(conf=None):
    "Migrates all manual files to primer according to the spec. As needed."

    conf = lazy_conf(conf)

    migration_paths = get_migration_specifications(conf)

    if conf.project.name != "primer":
        return False
    elif len(migration_paths) == 0:
        return False
    else:
        migrations = ingest_yaml_list(*migration_paths)

        truncate_jobs = []
        munge_jobs = []
        migration_jobs = []
        append_jobs = []

        for page in migrations:
            if "sources" in page:
                migrations.extend(convert_multi_source(page))
                continue

            page = fix_migration_paths(page)

            fq_target = os.path.join(conf.paths.projectroot, conf.paths.source, page["target"])
            fq_source = os.path.abspath(os.path.join(conf.paths.projectroot, "..", "source", page["source"]))

            migration_jobs.append(build_migration_job(fq_target, fq_source))

            if "truncate" in page:
                truncate_jobs.append(build_truncate_job(page["truncate"], fq_target))

            if "transform" in page:
                migration_jobs[-1]["job"] = copy_always
                munge_jobs.append(build_transform_job(page["transform"], fq_target))

            if "append" in page:
                migration_jobs[-1]["job"] = copy_always
                append_jobs.append(build_append_job(page, fq_target, migration_paths))

        with ProcessPool() as p:
            migration_res = p.runner(migration_jobs)
            munge_res = p.runner(post_process_jobs(tasks=munge_jobs))
            truncate_res = p.runner(truncate_jobs)
            append_res = p.runner(append_jobs)

        msg = "migrated {0}, munged {1}, truncated {2}, and appended to {3} pages."
        logger.info(msg.format(len(migration_res), len(munge_res), len(truncate_res), len(append_res)))

        return True
Example #7
0
def package(target=None, conf=None):
    "Builds a package from the current build output."

    if conf is None:
        conf = get_conf()

    archive_path = os.path.join(conf.paths.projectroot, conf.paths.buildarchive)
    fn = package_filename(archive_path, target, conf)

    pconf = conf_from_list(
        "target", ingest_yaml_list(os.path.join(conf.paths.projectroot, conf.paths.builddata, "push.yaml"))
    )
    if target is None:
        pconf = pconf[pconf.keys()[0]]
    else:
        pconf = pconf[target]

    if not os.path.exists(archive_path):
        os.makedirs(archive_path)
        puts("[deploy] [tarball]: creating {0} directory".format(archive_path))
    else:
        if not os.path.isdir(archive_path):
            abort("[ERROR]: {0} exists and is not a directory.".format(archive_path))

    arc_conf = os.path.join(conf.paths.projectroot, conf.paths.branch_output, "conf.json")

    with open(arc_conf, "w") as f:
        json.dump(get_build_metadata(conf), f, indent=2)

    with tarfile.open(fn, "w:gz") as t:
        if "branched" in pconf.options:
            input_path = os.path.join(
                conf.paths.projectroot, conf.paths.output, pconf.paths.local, conf.git.branches.current
            )
            output_path_name = conf.git.branches.current
        else:
            input_path = os.path.join(conf.paths.projectroot, conf.paths.output, pconf.paths.local)
            output_path_name = os.path.split(pconf.paths.local)[-1]

        t.add(name=input_path, arcname=output_path_name)

        t.add(arc_conf, arcname="conf.json")

        if "static" in pconf.paths:
            for path in pconf.paths.static:
                rendered_path = os.path.join(conf.paths.projectroot, conf.paths.public, path)
                if os.path.exists(rendered_path):
                    t.add(name=rendered_path, arcname=path)

    puts("[deploy] [tarball]: created {0} as archive of current build artifacts.".format(fn))
Example #8
0
def get_build_metadata(conf=None):
    if conf is None:
        conf = get_conf()

    o = dict(push=dict(), conf=conf, meta=dict())

    for target in ingest_yaml_list(os.path.join(conf.paths.projectroot, conf.paths.builddata, "push.yaml")):
        o["push"][target["target"]] = target

    o["meta"]["user"] = pwd.getpwuid(os.getuid())[0]
    o["meta"]["platform"] = sys.platform
    o["meta"]["time"] = datetime.datetime.utcnow().isoformat()
    o["meta"]["host"] = os.uname()[1]
    return o
Example #9
0
def get_build_metadata(conf=None):
    if conf is None:
        conf=get_conf()

    o = dict(push=dict(), conf=conf, meta=dict())

    for target in ingest_yaml_list(os.path.join(conf.paths.projectroot,
                                                conf.paths.builddata,
                                                'push.yaml')):
        o['push'][target['target']] = target

    o['meta']['user'] = pwd.getpwuid(os.getuid())[0]
    o['meta']['platform'] = sys.platform
    o['meta']['time'] = datetime.datetime.utcnow().isoformat()
    o['meta']['host'] = os.uname()[1]
    return o
Example #10
0
def deploy(target, conf=None, pconf=None):
    """Deploys a site. Specifies the deployment target defined in 'push.yaml'"""

    conf = lazy_conf(conf)

    push_conf = ingest_yaml_list(os.path.join(conf.paths.projectroot,
                                              conf.paths.builddata,
                                              'push.yaml'))

    pconf = conf_from_list('target', push_conf)[target]


    if pconf['target'] != target:
        abort('[deploy] [ERROR]: this build environment does not support the {0} target'.format(target))

    res = runner(deploy_jobs(target, conf, pconf), pool=2)
    puts('[deploy]: pushed {0} targets'.format(len(res)))
Example #11
0
def clean(conf=None):
    "Removes all migrated primer files according to the current spec."

    conf = lazy_conf(conf)

    migration_paths = get_migration_specifications(conf)
    migrations = ingest_yaml_list(*migration_paths)

    targets = []
    for page in migrations:
        if 'sources' in page:
            migrations.extend(convert_multi_source(page))
            continue

        page = fix_migration_paths(page)

        targets.append(os.path.join(conf.paths.projectroot, conf.paths.source, page['target']))

    map(verbose_remove, targets)
    print('[clean] [primer]: removed {0} files'.format(len(targets)))
Example #12
0
def clean(conf=None):
    "Removes all migrated primer files according to the current spec."

    conf = lazy_conf(conf)

    migration_paths = get_migration_specifications(conf)
    migrations = ingest_yaml_list(*migration_paths)

    targets = []
    for page in migrations:
        if 'sources' in page:
            migrations.extend(convert_multi_source(page))
            continue

        page = fix_migration_paths(page)

        targets.append(os.path.join(conf.paths.projectroot, conf.paths.source, page['target']))

    map(verbose_remove, targets)
    logger.info('clean: removed {0} files'.format(len(targets)))
Example #13
0
def intersphinx_jobs(conf=None):
    conf = lazy_conf(conf)
    data_file = os.path.join(conf.paths.projectroot,
                             conf.paths.builddata,
                             'intersphinx.yaml')

    if not os.path.exists(data_file):
        return

    intersphinx_mapping = ingest_yaml_list(data_file)

    for i in intersphinx_mapping:
        f = os.path.join(conf.paths.projectroot,
                         conf.paths.output, i['path'])

        s = i['url'] + 'objects.inv'
        yield {
                'target': f,
                'dependency': None,
                'job': download,
                'args': { 'f': f, 's': s }
              }
Example #14
0
def pdf_jobs(target, conf):
    pdfs = ingest_yaml_list(os.path.join(conf.paths.builddata, 'pdfs.yaml'))
    tex_regexes = [ ( re.compile(r'(index|bfcode)\{(.*)--(.*)\}'),
                      r'\1\{\2-\{-\}\3\}'),
                    ( re.compile(r'\\PYGZsq{}'), "'"),
                    ( re.compile(r'\\code\{/(?!.*{}/|etc|usr|data|var|srv)'),
                      r'\code{' + conf.project.url + r'/' + conf.project.tag) ]

    # this is temporary
    queue = ( [], [], [], [], [] )

    for i in pdfs:
        tagged_name = i['output'][:-4] + '-' + i['tag']
        deploy_fn = tagged_name + '-' + conf.git.branches.current + '.pdf'
        link_name = deploy_fn.replace('-' + conf.git.branches.current, '')

        latex_dir = os.path.join(conf.paths.branch_output, target)

        if 'edition' in i:
            deploy_path = conf.paths.mms[i['edition']]

            target_split = target.split('-')

            if len(target_split) > 1:
                if target_split[1] != i['edition']:
                    logger.debug('not making pdf {0}, because {1} is not {2}'.format(link_name, target_split[1], i['edition']))
                    continue

            if i['edition'] != 'hosted':
                deploy_fn = tagged_name + '.pdf'
                link_name = deploy_fn
        else:
            deploy_path = conf.paths.public_site_output

        i['source'] = os.path.join(latex_dir, i['output'])
        i['processed'] = os.path.join(latex_dir, tagged_name + '.tex')
        i['pdf'] = os.path.join(latex_dir, tagged_name + '.pdf')
        i['deployed'] = os.path.join(deploy_path, deploy_fn)
        i['link'] = os.path.join(deploy_path, link_name)
        i['path'] = latex_dir

        # these appends will become yields, once runner() can be dependency
        # aware.
        queue[0].append(dict(dependency=None,
                             target=i['source'],
                             job=_clean_sphinx_latex,
                             args=(i['source'], tex_regexes)))

        queue[1].append(dict(dependency=i['source'],
                             target=i['processed'],
                             job=copy_if_needed,
                             args=(i['source'], i['processed'], 'pdf')))

        queue[2].append(dict(dependency=i['processed'],
                             target=i['pdf'],
                             job=_render_tex_into_pdf,
                             args=(i['processed'], i['path'])))

        queue[3].append(dict(dependency=i['pdf'],
                             target=i['deployed'],
                             job=copy_if_needed,
                             args=(i['pdf'], i['deployed'], 'pdf')))

        if i['link'] != i['deployed']:
            queue[4].append(dict(dependency=i['deployed'],
                                 target=i['link'],
                                 job=create_link,
                                 args=(deploy_fn, i['link'])))

    return queue
Example #15
0
def pdf_jobs(target, conf):
    pdfs = ingest_yaml_list(os.path.join(conf.paths.builddata, 'pdfs.yaml'))
    tex_regexes = [(re.compile(r'(index|bfcode)\{(.*)--(.*)\}'),
                    r'\1\{\2-\{-\}\3\}'), (re.compile(r'\\PYGZsq{}'), "'"),
                   (re.compile(r'\\code\{/(?!.*{}/|etc|usr|data|var|srv)'),
                    r'\code{' + conf.project.url + r'/' + conf.project.tag)]

    # this is temporary
    queue = ([], [], [], [], [])

    for i in pdfs:
        tagged_name = i['output'][:-4] + '-' + i['tag']
        deploy_fn = tagged_name + '-' + conf.git.branches.current + '.pdf'
        link_name = deploy_fn.replace('-' + conf.git.branches.current, '')

        latex_dir = os.path.join(conf.paths.branch_output, target)

        if 'edition' in i:
            deploy_path = os.path.join(conf.paths.public, i['edition'])

            target_split = target.split('-')

            if len(target_split) > 1:
                if target_split[1] != i['edition']:
                    continue

            if i['edition'] == 'hosted':
                deploy_path = os.path.join(deploy_path,
                                           conf.git.branches.current)
            else:
                deploy_fn = tagged_name + '.pdf'
                link_name = deploy_fn
        else:
            deploy_path = conf.paths.branch_staging

        i['source'] = os.path.join(latex_dir, i['output'])
        i['processed'] = os.path.join(latex_dir, tagged_name + '.tex')
        i['pdf'] = os.path.join(latex_dir, tagged_name + '.pdf')
        i['deployed'] = os.path.join(deploy_path, deploy_fn)
        i['link'] = os.path.join(deploy_path, link_name)
        i['path'] = latex_dir

        # these appends will become yields, once runner() can be dependency
        # aware.
        queue[0].append(
            dict(dependency=None,
                 target=i['source'],
                 job=_clean_sphinx_latex,
                 args=(i['source'], tex_regexes)))

        queue[1].append(
            dict(dependency=i['source'],
                 target=i['processed'],
                 job=copy_if_needed,
                 args=(i['source'], i['processed'], 'pdf')))

        queue[2].append(
            dict(dependency=i['processed'],
                 target=i['pdf'],
                 job=_render_tex_into_pdf,
                 args=(i['processed'], i['path'])))

        if conf.project.name == 'mms' and mms_should_migrate(target,
                                                             conf) is False:
            pass
        else:
            queue[3].append(
                dict(dependency=i['pdf'],
                     target=i['deployed'],
                     job=copy_if_needed,
                     args=(i['pdf'], i['deployed'], 'pdf')))

            if i['link'] != i['deployed']:
                queue[4].append(
                    dict(dependency=i['deployed'],
                         target=i['link'],
                         job=create_link,
                         args=(deploy_fn, i['link'])))

    return queue