Beispiel #1
0
def generate_meta():
    m.section_break('branch/release meta', block='rel')
    m.var('manual-branch', MANUAL_BRANCH, block='rel')
    m.var('current-branch', str(utils.get_branch()), block='rel')
    m.var('last-commit', str(utils.get_commit()), block='rel')
    m.var('current-if-not-manual', str(get_manual_path()), block='rel')

    paths = render_paths(True)

    m.section_break('file system paths', block='paths')
    m.var('output', paths['output'], block='paths')
    m.var('public-output', paths['public'], block='paths')
    m.var('branch-output', paths['branch-output'], block='paths')
    m.var('rst-include', paths['includes'], block='paths')
    m.var('branch-source', paths['branch-source'], block='paths')
    m.var('public-branch-output', paths['branch-staging'], block='paths')

    generated_makefiles = []

    m.newline()
    for target in dynamic_makefiles():
        file = '/'.join([paths['output'], "makefile." + target])
        cloth = '/'.join([paths['tools'], "makecloth", target + '.py'])

        generated_makefiles.append(file)
        m.raw(['-include ' + paths['output'] + '/makefile.' + target])

        m.target(target=file, dependency=cloth, block='makefiles')
        m.job(' '.join(["$(PYTHONBIN)", cloth, file]))
        m.newline()

    m.newline()

    m.target('.PHONY', generated_makefiles)
Beispiel #2
0
def generate_meta():
    m.section_break('branch/release meta', block='rel')
    m.var('manual-branch', MANUAL_BRANCH, block='rel')
    m.var('current-branch', str(utils.get_branch()), block='rel')
    m.var('last-commit', str(utils.get_commit()), block='rel')
    m.var('current-if-not-manual', str(get_manual_path()), block='rel')

    paths = render_paths(True)

    m.section_break('file system paths', block='paths')
    m.var('output', paths['output'], block='paths')
    m.var('public-output', paths['public'], block='paths')
    m.var('branch-output', paths['branch-output'], block='paths')
    m.var('rst-include', paths['includes'], block='paths')
    m.var('branch-source', paths['branch-source'], block='paths')
    m.var('public-branch-output', paths['branch-staging'], block='paths')

    generated_makefiles = []

    m.newline()
    for target in dynamic_makefiles():
        file ='/'.join([paths['output'], "makefile." + target])
        cloth = '/'.join([paths['tools'], "makecloth", target + '.py'])
        
        generated_makefiles.append(file)
        m.raw(['-include ' + paths['output'] + '/makefile.' + target])

        m.target(target=file, dependency=cloth, block='makefiles')
        m.job(' '.join(["$(PYTHONBIN)", cloth, file]))
        m.newline()

    m.newline()

    m.target('.PHONY',  generated_makefiles)
Beispiel #3
0
def toc_jobs():
    paths = render_paths('obj')

    for fn in expand_tree(paths.includes, 'yaml'):
        if fn.startswith(os.path.join(paths.includes, 'table')):
            pass
        elif len(fn) >= 24:
            base_name = _get_toc_base_name(fn)

            fmt = fn[20:24]
            if fmt != 'spec':
                fmt = fn[16:19]

            o = {
                  'dependency': fn,
                  'job': _generate_toc_tree,
                  'target': [],
                  'args': [fn, fmt, base_name, paths]
                }

            if fmt != 'spec':
                o['target'].append(_get_toc_output_name(base_name, 'toc', paths))

            is_ref_spec = fn.startswith(os.path.join(os.path.dirname(fn), 'ref-spec'))

            if not is_ref_spec and (fmt == 'toc' or fmt == 'spec'):
                o['target'].append(_get_toc_output_name(base_name, 'dfn-list', paths))
            elif fmt == 'ref' or is_ref_spec:
                o['target'].append(_get_toc_output_name(base_name, 'table', paths))

            yield o
Beispiel #4
0
def steps_jobs():
    paths = render_paths('obj')

    for fn in expand_tree(os.path.join(paths.projectroot, paths.includes), 'yaml'):
        if fn.startswith(os.path.join(paths.projectroot, paths.includes, 'step')):
            out_fn = _get_steps_output_fn(fn, paths)

            yield { 'dependency': fn,
                    'target': out_fn,
                    'job': render_step_file,
                    'args': [fn, out_fn] }
Beispiel #5
0
def api_jobs():
    paths = render_paths('obj')

    for source in expand_tree(os.path.join(paths.source, 'reference'), 'yaml'):
        target = dot_concat(os.path.splitext(source)[0], 'rst')

        yield {
                'target': target,
                'dependency': source,
                'job': _generate_api_param,
                'args': [source, target]
              }
Beispiel #6
0
def generate_json_output_meta():
    m = MakefileCloth()

    m.section_break('json output coordination.')
    paths = render_paths('dict')

    if get_conf().git.remote.upstream.endswith('ecosystem'):
        public_json_output = os.path.join(paths['public'], 'json')
    else:
        public_json_output = os.path.join(paths['branch-staging'], 'json')

    build_json_output = os.path.join(paths['branch-output'], 'json')
    branch_json_list_file = os.path.join(paths['branch-output'],
                                         'json-file-list')
    public_json_list_file = os.path.join(public_json_output, '.file_list')

    m.section_break('meta')

    m.target('json-output', ['json'])
    m.job('fab process.json_output')

    rsync_cmd = 'rsync --recursive --times --delete --exclude="*pickle" --exclude=".buildinfo" --exclude="*fjson" {0}/ {1}'
    m.job(rsync_cmd.format(build_json_output, public_json_output))
    m.msg('[json]: migrated all .json files to staging.')
    m.msg('[json]: processed all json files.')

    m.section_break('list file')

    m.comment(
        'the meta build system generates "{0}" when it generates this file'.
        format(branch_json_list_file))

    fab_cmd = 'fab process.input:{0} process.output:{1} process.copy_if_needed:json'
    m.target('json-file-list', public_json_list_file)
    m.target(public_json_list_file, 'json-output')
    m.job(fab_cmd.format(branch_json_list_file, public_json_list_file))
    m.msg('[json]: rebuilt inventory of json output.')

    m.target(build_json_output, 'json')

    m.target(
        '.PHONY',
        ['clean-json-output', 'clean-json', 'json-output', 'json-file-list'])
    m.target('clean-json-output', 'clean-json')
    m.job(' '.join([
        'rm -rf ', public_json_list_file, branch_json_list_file,
        public_json_output
    ]))
    m.msg('[json]: removed all processed json.')

    return m
Beispiel #7
0
def table_jobs():
    paths = render_paths('obj')

    for source in expand_tree(paths.includes, 'yaml'):
        if os.path.basename(source).startswith('table'):
            target = _get_table_output_name(source)
            list_target = _get_list_table_output_name(source)

            yield {
                    'target': [ target, list_target ],
                    'dependency': source,
                    'job': _generate_tables,
                    'args': [ source, target, list_target ]
                  }
Beispiel #8
0
def generate_meta():
    m.section_break('branch/release meta', block='rel')
    m.var('manual-branch', docs_meta.MANUAL_BRANCH, block='rel')
    m.var('current-branch', str(docs_meta.get_branch()), block='rel')
    m.var('last-commit', str(docs_meta.get_commit()), block='rel')
    m.var('current-if-not-manual', str(docs_meta.get_manual_path()), block='rel')

    paths = docs_meta.render_paths(True)

    m.section_break('file system paths', block='paths')
    m.var('output', paths['output'], block='paths')
    m.var('public-output', paths['public'], block='paths')
    m.var('branch-output', paths['branch-output'], block='paths')
    m.var('rst-include', paths['includes'], block='paths')
    m.var('branch-source', paths['branch-source'], block='paths')
    m.var('public-branch-output', paths['branch-staging'], block='paths')
Beispiel #9
0
def generate_meta():
    m.section_break("branch/release meta", block="rel")
    m.var("manual-branch", MANUAL_BRANCH, block="rel")
    m.var("current-branch", str(utils.get_branch()), block="rel")
    m.var("last-commit", str(utils.get_commit()), block="rel")
    m.var("current-if-not-manual", str(get_manual_path()), block="rel")

    paths = render_paths(True)

    m.section_break("file system paths", block="paths")
    m.var("output", paths["output"], block="paths")
    m.var("public-output", paths["public"], block="paths")
    m.var("branch-output", paths["branch-output"], block="paths")
    m.var("rst-include", paths["includes"], block="paths")
    m.var("branch-source", paths["branch-source"], block="paths")
    m.var("public-branch-output", paths["branch-staging"], block="paths")
Beispiel #10
0
def image_jobs():
    paths = render_paths('obj')

    meta_file = os.path.join(paths.images, 'metadata') + '.yaml'

    if not os.path.exists(meta_file):
        return

    images_meta = ingest_yaml_list(meta_file)

    for image in images_meta:
        image['dir'] = paths.images
        source_base = os.path.join(image['dir'], image['name'])
        source_file = source_base + '.svg'
        rst_file = source_base + '.rst'

        yield {
                'target': rst_file,
                'dependency': [ meta_file, os.path.join(paths.buildsystem, 'rstcloth', 'images.py') ],
                'job': generate_image_pages,
                'args': image
              }

        for output in image['output']:
            if 'tag' in output:
                tag = '-' + output['tag']
            else:
                tag = ''

            target_img = source_base + tag + '.png'

            inkscape_cmd = '{cmd} -z -d {dpi} -w {width} -y 0.0 -e >/dev/null {target} {source}'

            yield {
                    'target': target_img,
                    'dependency': source_file,
                    'job': _generate_images,
                    'args': [
                              inkscape_cmd,
                              output['dpi'],
                              output['width'],
                              target_img,
                              source_file
                            ],
                  }
Beispiel #11
0
def generate_json_output_meta():
    m = MakefileCloth()

    m.section_break('json output coordination.')
    paths = render_paths('dict')

    if get_conf().git.remote.upstream.endswith('ecosystem'):
        public_json_output = os.path.join(paths['public'], 'json')
    else:
        public_json_output = os.path.join(paths['branch-staging'], 'json')

    build_json_output = os.path.join(paths['branch-output'], 'json')
    branch_json_list_file = os.path.join(paths['branch-output'], 'json-file-list')
    public_json_list_file = os.path.join(public_json_output, '.file_list')

    m.section_break('meta')

    m.target('json-output', ['json'])
    m.job('fab process.json_output')

    rsync_cmd = 'rsync --recursive --times --delete --exclude="*pickle" --exclude=".buildinfo" --exclude="*fjson" {0}/ {1}'
    m.job(rsync_cmd.format(build_json_output, public_json_output))
    m.msg('[json]: migrated all .json files to staging.')
    m.msg('[json]: processed all json files.')

    m.section_break('list file')

    m.comment('the meta build system generates "{0}" when it generates this file'.format(branch_json_list_file))

    fab_cmd = 'fab process.input:{0} process.output:{1} process.copy_if_needed:json'
    m.target('json-file-list', public_json_list_file)
    m.target(public_json_list_file, 'json-output')
    m.job(fab_cmd.format(branch_json_list_file , public_json_list_file))
    m.msg('[json]: rebuilt inventory of json output.')

    m.target(build_json_output, 'json')

    m.target('.PHONY', ['clean-json-output', 'clean-json', 'json-output', 'json-file-list'])
    m.target('clean-json-output', 'clean-json')
    m.job(' '.join(['rm -rf ', public_json_list_file, branch_json_list_file, public_json_output]))
    m.msg('[json]: removed all processed json.')

    return m
Beispiel #12
0
def sitemap(config_path=None):
    paths = render_paths('obj')

    sys.path.append(os.path.join(paths.projectroot, paths.buildsystem, 'bin'))
    import sitemap_gen

    if config_path is None:
        config_path = os.path.join(paths.projectroot, 'conf-sitemap.xml')

    if not os.path.exists(config_path):
        puts('[ERROR] [sitemap]: configuration file {0} does not exist. Returning early'.fomrat(config_path))
        return False

    sitemap = sitemap_gen.CreateSitemapFromFile(configpath=config_path,
                                                suppress_notify=True)
    if sitemap is None:
        puts('[ERROR] [sitemap]: failed to generate the sitemap due to encountered errors.')
        return False

    sitemap.Generate()

    puts('[sitemap]: generated sitemap according to the config file {0}'.format(config_path))
    return True
Beispiel #13
0
from fabric.api import cd, local, task, env, hide, settings
from fabric.utils import puts
from multiprocessing import cpu_count
import pkg_resources
import docs_meta
import datetime

paths = docs_meta.render_paths(True)

def get_tags(target):
    if target.startswith('html') or target.startswith('dirhtml'):
        return 'website'
    else:
        return 'print'

def timestamp(form='filename'):
    if form == 'filename':
        return datetime.datetime.now().strftime("%Y-%m-%d, %H:%M %p")
    else:
        return datetime.datetime.now().strftime("%Y%m%d%H:%M%p")

def get_sphinx_args(nitpick=None):
    o = ''

    if pkg_resources.get_distribution("sphinx").version.startswith('1.2'):
        o += '-j ' + str(cpu_count() + 1)

    if nitpick is not None:
        o += '-n -w $(branch-output)/build.{0}.log'.format(timestamp('filename'))

    return o
Beispiel #14
0
#!/usr/bin/python

import sys
import os.path

sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__),
                                             '../')))
import utils
from makecloth import MakefileCloth
from docs_meta import render_paths

m = MakefileCloth()

paths = render_paths('dict')


def pdf_makefile(name, tag):
    name_tagged = '-'.join([name, tag])
    name_tagged_pdf = name_tagged + '.pdf'
    name_tagged_branch_pdf = '-'.join([name, tag, utils.get_branch()]) + '.pdf'

    generated_latex = '{0}/latex/{1}.tex'.format(paths['branch-output'], name)
    built_tex = '{0}/latex/{1}.tex'.format(paths['branch-output'], name_tagged)

    built_pdf = '{0}/latex/{1}'.format(paths['branch-output'], name_tagged_pdf)
    staged_pdf_branch = '{0}/{1}'.format(paths['branch-staging'],
                                         name_tagged_branch_pdf)
    staged_pdf = '{0}/{1}'.format(paths['branch-staging'], name_tagged_pdf)

    m.section_break(name)
    m.target(target=generated_latex, dependency='latex')
Beispiel #15
0
from fabric.api import cd, local, task, env, hide, settings
from fabric.utils import puts
from multiprocessing import cpu_count
import pkg_resources
import docs_meta
import datetime

paths = docs_meta.render_paths(True)

def get_tags(target):
    if target.startswith('html') or target.startswith('dirhtml'):
        return 'website'
    else:
        return 'print'

def timestamp(form='filename'):
    if form == 'filename':
        return datetime.datetime.now().strftime("%Y-%m-%d.%H-%M")
    else:
        return datetime.datetime.now().strftime("%Y-%m-%d, %H:%M %p")

def get_sphinx_args(nitpick=None):
    o = ''

    if pkg_resources.get_distribution("sphinx").version == '1.2b1-xgen-dev-20130529':
         o += '-j ' + str(cpu_count() + 1) + ' '

    if nitpick is not None:
        o += '-n -w {0}/build.{1}.log'.format(paths['branch-output'], timestamp('filename'))

    return o
Beispiel #16
0
#!/usr/bin/python
import sys
import os.path
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__),
                                             '../')))
sys.path.append(
    os.path.abspath(os.path.join(os.path.dirname(__file__), '../../')))

from makecloth import MakefileCloth
from conf import intersphinx_mapping
import docs_meta

paths = docs_meta.render_paths('dict')
m = MakefileCloth()


def intersphinx_builders():
    invs = []
    for i in intersphinx_mapping:
        output = '{0}/{1}.inv'.format(paths['output'], i)
        m.target(output, block=i)
        m.job('@fab {0}.url:{1}objects.inv {0}.file:{2} {0}.download'.format(
            'intersphinx', intersphinx_mapping[i][0], output),
              block=i)
        m.newline(block=i)
        invs.append(output)

    m.newline(block='control')
    m.target('intersphinx', invs, block='control')

    invs = ' '.join(invs)
Beispiel #17
0
#!/usr/bin/python
import sys
import os.path
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '../')))
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '../../')))

from makecloth import MakefileCloth
from conf import intersphinx_mapping
import docs_meta

paths = docs_meta.render_paths('dict')
m = MakefileCloth()

def intersphinx_builders():
    invs = []
    for i in intersphinx_mapping:
        output = '{0}/{1}.inv'.format(paths['output'], i)
        m.target(output, block=i)
        m.job('@fab {0}.url:{1}objects.inv {0}.file:{2} {0}.download'.format('intersphinx', intersphinx_mapping[i][0], output), block=i)
        m.newline(block=i)
        invs.append(output)

    m.newline(block='control')
    m.target('intersphinx', invs, block='control')

    invs = ' '.join(invs)
    m.target('clean-intersphinx', block='control')
    m.job('rm -f ' + invs, block='control')
    m.msg('[intersphinx]: all existing intersphinx inv files removed.', block='control')
    m.target('.PHONY', 'intersxpinx clean-intersphinx ' + invs, block='control')
Beispiel #18
0
import sys
import os.path
import argparse
import json
from multiprocessing import cpu_count

sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '../')))
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '../bin/')))
from utils import ingest_yaml_list
from docs_meta import render_paths
from makecloth import MakefileCloth

m = MakefileCloth()
paths = render_paths('dict')

def generate_targets(images):
    image_files = []
    image_rst_files = []

    for image in images:
        b = image['name']
        source_base = '/'.join([image['dir'], image['name']])
        source_file = source_base + '.svg'

        m.section_break(image['name'], block=b)
        m.newline(block=b)

        for output in image['output']:
            if 'tag' in output:
                tag = '-' + output['tag']
            else:
Beispiel #19
0
#!/usr/bin/python

import sys
import os.path

sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "bin")))
import utils
from makecloth import MakefileCloth
from docs_meta import render_paths, get_manual_path, get_conf

m = MakefileCloth()

paths = render_paths("dict")
conf = get_conf()
correction = "'s/(index|bfcode)\{(.*!*)*--(.*)\}/\\1\{\\2-\{-\}\\3\}/g'"
pdf_latex_command = 'TEXINPUTS=".:{0}/latex/:" pdflatex --interaction batchmode --output-directory {0}/latex/ $(LATEXOPTS)'.format(
    paths["branch-output"]
)


def pdf_makefile(name, tag=None, edition=None):
    if tag is None:
        name_tagged = name
        name_tagged_branch_pdf = "-".join([name, utils.get_branch()]) + ".pdf"
    else:
        name_tagged = "-".join([name, tag])
        name_tagged_branch_pdf = "-".join([name, tag, utils.get_branch()]) + ".pdf"

    if conf.git.remote.upstream.endswith("mms-docs"):
        site_url = "http://mms.10gen.com"