Ejemplo n.º 1
0
def option_tasks(conf):
    option_sources = conf.system.content.options.sources
    o = OptionDataCache(option_sources, conf)

    if len(option_sources) > 0 and not os.path.isdir(
            conf.system.content.options.output_dir):
        safe_create_directory(conf.system.content.options.output_dir)

    tasks = []
    for dep_fn, option in o.content_iter():
        if option.program.startswith('_'):
            continue

        out_fn = hyph_concat(option.directive, option.program,
                             option.name) + '.rst'
        output_fn = os.path.join(conf.system.content.options.fn_prefix, out_fn)

        t = Task(job=write_options,
                 description='generating option file "{0}" from "{1}"'.format(
                     output_fn, dep_fn),
                 target=output_fn,
                 dependency=[dep_fn])
        t.args = (option, output_fn, conf)

        tasks.append(t)

    logger.info("added tasks for {0} option generation tasks".format(
        len(tasks)))
    return tasks
Ejemplo n.º 2
0
def step_tasks(conf):
    step_sources = conf.system.content.steps.sources
    s = StepDataCache(step_sources, conf)

    if len(step_sources) > 0 and not os.path.isdir(
            conf.system.content.steps.output_dir):
        safe_create_directory(conf.system.content.steps.output_dir)

    tasks = []
    for fn, stepf in s.file_iter():
        basename = conf.system.content.steps.get_basename(fn)

        out_fn = os.path.join(conf.system.content.steps.output_dir,
                              basename) + '.rst'

        t = Task(job=write_steps,
                 description='generate a stepfile for ' + fn,
                 target=out_fn,
                 dependency=fn)
        t.args = (stepf, out_fn, conf)

        tasks.append(t)

    logger.info("added tasks for {0} step generation tasks".format(len(tasks)))
    return tasks
Ejemplo n.º 3
0
def example_tasks(conf):
    # In the beginning of this operation, which executes in the main thread, we
    # read all files in "source/includes/" and sub-directories that start with
    # "example-*"

    example_sources = conf.system.content.examples.sources

    # process the corpus of example data.
    d = ExampleDataCache(example_sources, conf)

    if len(example_sources) > 0 and not os.path.isdir(
            conf.system.content.examples.output_dir):
        safe_create_directory(conf.system.content.examples.output_dir)

    tasks = []
    for fn, exmpf in d.file_iter():
        if exmpf.collection is None or exmpf.collection.options.base_file is True:
            continue
        basename = conf.system.content.examples.get_basename(fn)
        out_fn = os.path.join(conf.system.content.examples.output_dir,
                              basename) + '.rst'

        t = Task(
            job=write_full_example,
            description='generate an example for ' + fn,
            target=out_fn,
            dependency=fn,
        )
        t.args = (exmpf.collection, exmpf.examples, out_fn)

        tasks.append(t)

    logger.info("added tasks for {0} example generation tasks".format(
        len(tasks)))
    return tasks
Ejemplo n.º 4
0
def finalize_single_html_tasks(builder, conf):
    single_html_dir = get_single_html_dir(conf)

    # create directory when registering tasks.
    safe_create_directory(single_html_dir)
    safe_create_directory(os.path.join(single_html_dir, '_static'))

    if 'edition' in conf.project and conf.project.edition != conf.project.name:
        artifact_dir = os.path.join(conf.paths.projectroot,
                                    conf.paths.branch_output,
                                    '-'.join((builder, conf.project.edition)))
    else:
        artifact_dir = os.path.join(conf.paths.projectroot, conf.paths.branch_output, builder)

    tasks = [giza.libgiza.task.Task(job=finalize_single_html,
                                    args=(single_html_dir, artifact_dir, conf),
                                    target=True,
                                    dependency=None,
                                    description="migrating singlehtml")]

    for fn in expand_tree(os.path.join(artifact_dir, '_static'), None):
        target_fn = os.path.join(single_html_dir, '_static', os.path.basename(fn))

        tasks.append(giza.libgiza.task.Task(job=copy_if_needed,
                                            args=(fn, target_fn),
                                            target=target_fn,
                                            dependency=fn,
                                            description="moving static files to the singlehtml build"))

    return tasks
Ejemplo n.º 5
0
def download(path, url, conf):
    try:
        mtime = os.stat(path).st_mtime
    except OSError:
        mtime = -1

    now = time.time()
    if now < (mtime + MAX_AGE):
        logger.debug('Intersphinx file still young: %s', url)
        return

    request = urllib.request.Request(
        url, headers={'If-Modified-Since': email.utils.formatdate(mtime)})

    safe_create_directory(os.path.dirname(path))

    try:
        response = urllib.request.urlopen(request, timeout=TIMEOUT_SECONDS)
        with open(path, 'wb') as f:
            f.write(response.read())
    except urllib.error.HTTPError as err:
        if err.code == 304:
            logger.debug('Not modified: %s', url)
            return
        logger.error('Error downloading %s: Got %d', url, err.code)
    except urllib.error.URLError as err:
        logger.error('Error downloading %s: %s', url, str(err))
Ejemplo n.º 6
0
def transfer_images(conf, sconf):
    image_dir = os.path.join(conf.paths.projectroot, conf.paths.branch_images)
    if not os.path.isdir(image_dir):
        return False
    elif sconf.builder == 'latex':

        if 'edition' in sconf and sconf.edition is not None:
            builder_dir = '-'.join((sconf.builder, sconf.edition))
        else:
            builder_dir = sconf.builder

        builder_dir = os.path.join(conf.paths.projectroot, conf.paths.branch_output, builder_dir)
        safe_create_directory(builder_dir)

        cmd = ('rsync -am '
               '--include="*.png" --include="*.jpg" --include="*.eps" '
               '--exclude="*" {0}/ {1}')
        cmd = cmd.format(image_dir, builder_dir)

        with open(os.devnull, 'w') as null:
            for img_cmd in (shlex.split(cmd.replace('images', 'figures')),
                            shlex.split(cmd)):

                subprocess.call(img_cmd, stdout=null, stderr=null)

        logger.info('migrated images for latex build')
Ejemplo n.º 7
0
def json_output(conf):
    list_file = os.path.join(conf.paths.branch_output, 'json-file-list')
    public_list_file = os.path.join(conf.paths.projectroot,
                                    conf.paths.public_site_output,
                                    'json', '.file_list')

    cmd = ('rsync --recursive --times --delete '
           '--exclude="*pickle" --exclude=".buildinfo" --exclude="*fjson" '
           '{src} {dst}')

    json_dst = os.path.join(conf.paths.projectroot, conf.paths.public_site_output, 'json')
    safe_create_directory(json_dst)

    builder = 'json'
    if 'edition' in conf.project and conf.project.edition != conf.project.name:
        builder += '-' + conf.project.edition

    cmd_str = cmd.format(src=os.path.join(conf.paths.projectroot,
                                          conf.paths.branch_output, builder) + '/',
                         dst=json_dst)

    try:
        subprocess.check_call(cmd_str.split())
        copy_if_needed(list_file, public_list_file)
        logger.info('deployed json files to local staging.')
    except subprocess.CalledProcessError:
        logger.error('error migrating json artifacts to local staging')
Ejemplo n.º 8
0
def download(path, url, conf):
    try:
        mtime = os.stat(path).st_mtime
    except OSError:
        mtime = -1

    now = time.time()
    if now < (mtime + MAX_AGE):
        logger.debug('Intersphinx file still young: %s', url)
        return

    request = urllib.request.Request(url, headers={
        'If-Modified-Since': email.utils.formatdate(mtime)
    })

    safe_create_directory(os.path.dirname(path))

    try:
        response = urllib.request.urlopen(request, timeout=TIMEOUT_SECONDS)
        with open(path, 'wb') as f:
            f.write(response.read())
    except urllib.error.HTTPError as err:
        if err.code == 304:
            logger.debug('Not modified: %s', url)
            return
        logger.error('Error downloading %s: Got %d', url, err.code)
    except urllib.error.URLError as err:
        logger.error('Error downloading %s: %s', url, str(err))
Ejemplo n.º 9
0
def transfer_images(conf, sconf):
    image_dir = os.path.join(conf.paths.projectroot, conf.paths.branch_images)
    if not os.path.isdir(image_dir):
        return False
    elif sconf.builder == 'latex':

        if 'edition' in sconf and sconf.edition is not None:
            builder_dir = '-'.join((sconf.builder, sconf.edition))
        else:
            builder_dir = sconf.builder

        builder_dir = os.path.join(conf.paths.projectroot,
                                   conf.paths.branch_output, builder_dir)
        safe_create_directory(builder_dir)

        cmd = ('rsync -am '
               '--include="*.png" --include="*.jpg" --include="*.eps" '
               '--exclude="*" {0}/ {1}')
        cmd = cmd.format(image_dir, builder_dir)

        with open(os.devnull, 'w') as null:
            for img_cmd in (shlex.split(cmd.replace('images', 'figures')),
                            shlex.split(cmd)):

                subprocess.call(img_cmd, stdout=null, stderr=null)

        logger.info('migrated images for latex build')
Ejemplo n.º 10
0
def toc_tasks(conf):
    toc_sources = conf.system.content.toc.sources

    tocs = TocDataCache(toc_sources, conf)

    if len(toc_sources) > 0 and not os.path.isdir(
            conf.system.content.toc.output_dir):
        safe_create_directory(conf.system.content.toc.output_dir)

    tasks = []
    for dep_fn, toc_data in tocs.file_iter():
        deps = [dep_fn]
        if 'ref-toc-' in dep_fn:
            base_offset = 8
        else:
            base_offset = 4

        fn_basename = os.path.basename(dep_fn)[base_offset:].replace(
            'yaml', 'rst')

        toc_items = toc_data.ordered_items()

        if toc_data.is_spec() is False:
            out_fn = os.path.join(conf.system.content.toc.output_dir,
                                  fn_basename)

            t = Task(job=write_toc_tree_output,
                     target=out_fn,
                     dependency=dep_fn,
                     description="writing toctree to '{0}'".format(out_fn))
            t.args = (out_fn, toc_items)
            tasks.append(t)
        else:
            deps.extend(toc_data.spec_deps())

        if 'ref-toc' in dep_fn:
            out_fn = os.path.join(conf.system.content.toc.output_dir,
                                  hyph_concat('table', fn_basename))

            reft = Task(
                job=write_toc_table,
                target=out_fn,
                dependency=deps,
                description="write table of contents generator".format(out_fn))
            reft.args = (out_fn, toc_items)
            tasks.append(reft)
        else:
            out_fn = os.path.join(conf.system.content.toc.output_dir,
                                  hyph_concat('dfn-list', fn_basename))
            dt = Task(job=write_dfn_list_output,
                      target=out_fn,
                      dependency=deps,
                      description="write definition list toc to '{0}'".format(
                          out_fn))
            dt.args = (out_fn, toc_items)
            tasks.append(dt)

    logger.info('added tasks for {0} toc generation tasks'.format(len(tasks)))

    return tasks
Ejemplo n.º 11
0
def create_archive(files_to_archive, tarball_name):
    # ready to write the tarball

    safe_create_directory(os.path.dirname(tarball_name))

    with tarfile.open(tarball_name, 'w:gz') as t:
        for fn, arc_fn in files_to_archive:
            t.add(name=fn, arcname=arc_fn)
Ejemplo n.º 12
0
def create_archive(files_to_archive, tarball_name):
    # ready to write the tarball

    safe_create_directory(os.path.dirname(tarball_name))

    with tarfile.open(tarball_name, 'w:gz') as t:
        for fn, arc_fn in files_to_archive:
            t.add(name=fn, arcname=arc_fn)
Ejemplo n.º 13
0
def download_file(file, url):
    cmd = ['curl', '-s', '--remote-time', url, '-o', file]

    safe_create_directory(os.path.dirname(file))

    try:
        subprocess.check_call(cmd)
        logger.info('downloaded {0}'.format(file))
        return True
    except subprocess.CalledProcessError:
        logger.error('trouble downloading interspinx inventory: ' + file)
        return False
Ejemplo n.º 14
0
def package_build_env(builders, editions, languages, conf):
    arc_fn = hyph_concat('cache', conf.project.name, conf.git.branches.current,
                         datetime.datetime.utcnow().strftime('%s'),
                         conf.git.commit[:8]) + ".tar.gz"
    archive_path = os.path.join(conf.paths.buildarchive, arc_fn)
    safe_create_directory(conf.paths.buildarchive)

    existing_archives = os.listdir(conf.paths.buildarchive)

    for arc in existing_archives:
        if conf.git.commit[:8] in arc:
            logger.warning(
                'archive "{0}" exists for current git hash, not recreating'.
                format(archive_path))
            return

    logger.debug("no archive for commit '{0}' continuing".format(
        conf.git.commit))

    with cd(conf.paths.projectroot):
        files_to_archive = set()

        for edition, language, builder in itertools.product(
                editions, languages, builders):
            rconf, sconf = get_sphinx_build_configuration(
                edition, language, builder, copy.deepcopy(conf.runstate))
            builder_dirname = resolve_builder_path(builder, edition, language,
                                                   rconf)

            files_to_archive.add(rconf.paths.branch_source)
            files_to_archive.add(
                os.path.join(rconf.paths.branch_output, builder_dirname))
            files_to_archive.add(
                os.path.join(rconf.paths.branch_output,
                             hyph_concat('doctrees', builder_dirname)))
            files_to_archive.add(rconf.system.dependency_cache_fn)

        files_to_archive = list(files_to_archive)
        logger.info('prepped build cache archive. writing file now.')

        for fn in files_to_archive:
            if not os.path.exists(fn):
                raise FileNotFoundError(fn)

        try:
            with tarfile.open(archive_path, 'w:gz') as t:
                for fn in files_to_archive:
                    t.add(fn, exclude=is_git_dir)
            logger.info("created build-cache archive: " + archive_path)
        except Exception as e:
            os.remove(archive_path)
            logger.critical("failed to create archive: " + archive_path)
            logger.error(e)
Ejemplo n.º 15
0
def download_file(file, url):
    cmd = ['curl', '--silent', '--location', '--remote-time', url, '-o', file]

    safe_create_directory(os.path.dirname(file))

    try:
        subprocess.check_call(cmd)
        logger.info('downloaded {0}'.format(file))
        return True
    except subprocess.CalledProcessError:
        logger.error('trouble downloading interspinx inventory: ' + file)
        return False
Ejemplo n.º 16
0
def generate_list_file(outputs, path, conf):
    dirname = os.path.dirname(path)
    safe_create_directory(dirname)

    url = get_site_url(conf)
    url.append('json')
    url = '/'.join(url)

    with open(path, 'w') as f:
        for fn in outputs:
            if os.path.isfile(fn) is True:
                line = '/'.join([url, fn.split('/', 3)[3:][0]])
                f.write(line)
                f.write('\n')

    logger.info('rebuilt inventory of json output.')
Ejemplo n.º 17
0
def finalize_single_html_tasks(builder, conf, app):
    single_html_dir = get_single_html_dir(conf)

    safe_create_directory(single_html_dir)

    found_src = False
    for base_path in (builder, hyph_concat(builder, conf.project.edition)):
        if found_src is True:
            break

        for fn in [
                os.path.join(base_path, f)
                for f in ('contents.html', 'index.html')
        ]:
            src_fn = os.path.join(conf.paths.projectroot,
                                  conf.paths.branch_output, fn)

            if os.path.exists(src_fn):
                manual_single_html(input_file=src_fn,
                                   output_file=os.path.join(
                                       single_html_dir, 'index.html'))

                copy_if_needed(
                    source_file=os.path.join(conf.paths.projectroot,
                                             conf.paths.branch_output,
                                             base_path, 'objects.inv'),
                    target_file=os.path.join(single_html_dir, 'objects.inv'))

                found_src = True

                break

    if found_src is not True:
        raise FileNotFoundError('singlehtml source file')

    single_path = os.path.join(single_html_dir, '_static')

    for fn in expand_tree(os.path.join(os.path.dirname(src_fn), '_static'),
                          None):
        target_fn = os.path.join(single_path, os.path.basename(fn))

        task = app.add('task')
        task.job = copy_if_needed
        task.target = target_fn
        task.dependency = fn
        task.args = [fn, target_fn]
        task.description = "migrating static files to the HTML build"
Ejemplo n.º 18
0
def task(task, conf):
    if task == 'source':
        legacy_tables = expand_tree(
            os.path.join(conf.paths.projectroot, conf.paths.source,
                         'reference'), 'yaml')
        dirname = os.path.join(conf.paths.projectroot, conf.paths.includes,
                               'apiargs')
        safe_create_directory(dirname)
        offset = len(os.path.join(conf.paths.projectroot, conf.paths.source))
    elif task == 'branch':
        legacy_tables = expand_tree(
            os.path.join(conf.paths.projectroot, conf.paths.branch_source,
                         'reference'), 'yaml')
        safe_create_directory(conf.system.content.apiargs.output_dir)
        offset = len(
            os.path.join(conf.paths.projectroot, conf.paths.branch_source))
    else:
        logger.critical('cannot perform apiarg migration for: ' + str(task))
        return

    new_apiarg = []
    new_fns = []
    for fn in legacy_tables:
        new_data, new_fn = migrate_legacy_apiarg(task, fn, conf)
        if new_fn in new_fns:
            logger.error("duplicate: {0}, from: {1}".format(
                os.path.basename(new_fn), os.path.basename(fn)))
        else:
            new_fns.append(new_fn[offset:])
            new_apiarg.append((new_fn, new_data))

    for fn, data in new_apiarg:
        write_yaml(data, fn)
    # for fn in legacy_tables:
    #     os.remove(fn)

    new_sources = conf.system.content.apiargs.sources

    if len(new_sources) != len(legacy_tables) and len(legacy_tables) != len(
            new_fns):
        logger.critical('problem in apiargs table migration.')
    else:
        logger.info('legacy apiargs tables migrated successfully.')

    legacy_tables = [fn[offset:] for fn in legacy_tables]
    return zip(legacy_tables, new_fns)
Ejemplo n.º 19
0
def package_build_env(builders, editions, languages, conf):
    arc_fn = '-'.join(['cache',
                       conf.project.name,
                       conf.git.branches.current,
                       datetime.datetime.utcnow().strftime('%s'),
                       conf.git.commit[:8]]) + ".tar.gz"
    archive_path = os.path.join(conf.paths.buildarchive, arc_fn)
    safe_create_directory(conf.paths.buildarchive)

    existing_archives = os.listdir(conf.paths.buildarchive)

    for arc in existing_archives:
        if conf.git.commit[:8] in arc:
            m = 'archive "{0}" exists for current git hash, not recreating'
            logger.warning(m.format(archive_path))
            return

    logger.debug("no archive for commit '{0}' continuing".format(conf.git.commit))

    with cd(conf.paths.projectroot):
        files_to_archive = set()

        for ((edition, language, builder), (rconf, sconf)) in get_builder_jobs(conf):
            files_to_archive.add(rconf.paths.branch_source)
            files_to_archive.add(os.path.join(rconf.paths.branch_output,
                                              sconf.build_output))
            files_to_archive.add(os.path.join(rconf.paths.branch_output,
                                              '-'.join(('doctrees', sconf.build_output))))
            files_to_archive.add(rconf.system.dependency_cache_fn)

        files_to_archive = list(files_to_archive)
        logger.info('prepped build cache archive. writing file now.')

        for fn in files_to_archive:
            if not os.path.exists(fn):
                raise FileNotFoundError(fn)

        try:
            with tarfile.open(archive_path, 'w:gz') as t:
                for fn in files_to_archive:
                    t.add(fn, exclude=is_git_dir)
            logger.info("created build-cache archive: " + archive_path)
        except Exception as e:
            os.remove(archive_path)
            logger.critical("failed to create archive: " + archive_path)
            logger.error(e)
Ejemplo n.º 20
0
def apiarg_tasks(conf):
    with Timer('apiargs migrations'):
        name_changes = migration_task(task='branch', conf=conf)

    apiarg_sources = conf.system.content.apiargs.sources
    a = ApiArgDataCache(apiarg_sources, conf)

    if len(apiarg_sources) > 0 and not os.path.isdir(
            conf.system.content.apiargs.output_dir):
        safe_create_directory(conf.system.content.apiargs.output_dir)

    tasks = []
    # for dep_fn, table in a.file_iter():
    #     print dep_fn

    logger.info(
        'new apiargs not yet implemented, but there are {0} of them'.format(
            str(len(conf.system.content.apiargs.sources))))
    return []
Ejemplo n.º 21
0
def extract_tasks(conf):
    extract_sources = conf.system.content.extracts.sources

    extracts = ExtractDataCache(extract_sources, conf)

    if len(extract_sources) > 0 and not os.path.isdir(
            conf.system.content.extracts.output_dir):
        safe_create_directory(conf.system.content.extracts.output_dir)

    tasks = []
    for dep_fn, extract in extracts.content_iter():
        t = Task(job=write_extract_file,
                 description="generating extract file: " + extract.target,
                 target=extract.target,
                 dependency=dep_fn)
        t.args = (extract, extract.target)
        tasks.append(t)

        include_statement = get_include_statement(extract.target_project_path)

        for verb, adjc, noun in [(prepend_to_file, 'prepend', extract.prepend),
                                 (append_to_file, 'append', extract.append)]:
            if noun:
                if not isinstance(noun, list):
                    files = [noun]
                else:
                    files = noun

                for fn in files:
                    t = Task(
                        job=verb,
                        target=fn,
                        dependency=dep_fn,
                        description="{0} extract include for '{0}' to '{1}'".
                        format(adjc, extract.target, fn))
                    t.args = (fn, include_statement)
                    tasks.append(t)

    logger.info("added tasks for {0} extract generation tasks".format(
        len(tasks)))

    return tasks
Ejemplo n.º 22
0
def test_build_site(args):
    args.languages_to_build = args.editions_to_build = []
    args.builder = 'html'

    conf = fetch_config(args)

    safe_create_directory('build')
    with BuildApp.new(pool_type=conf.runstate.runner,
                      pool_size=conf.runstate.pool_size,
                      force=conf.runstate.force).context() as app:
        try:
            sphinx_publication(conf, args, app)
        except:
            sphinx_publication(conf, args, app)
            if os.path.exists('doc-tools'):
                shutil.rmtree('docs-tools')

    logger.info('bootstrapped makefile system')

    logger.info('updated project skeleton in current directory.')
Ejemplo n.º 23
0
def dump_file_hashes(conf):
    output = conf.system.dependency_cache

    o = {'time': datetime.datetime.utcnow().strftime("%s"), 'files': {}}

    files = expand_tree(
        os.path.join(conf.paths.projectroot, conf.paths.branch_source), None)

    fmap = o['files']

    for fn in files:
        if os.path.exists(fn):
            fmap[fn] = md5_file(fn)

    safe_create_directory(os.path.dirname(output))

    with open(output, 'w') as f:
        json.dump(o, f)

    logger.info('wrote dependency cache to: {0}'.format(output))
Ejemplo n.º 24
0
def transfer_source(conf, sconf):
    target = os.path.join(conf.paths.projectroot, conf.paths.branch_source)

    dir_exists = safe_create_directory(target)

    # this operation is just for messaging the above operation, and error'ing
    # appropriately.
    if dir_exists is True:
        logger.info('created directory for sphinx build: {0}'.format(target))
    elif not os.path.isdir(target):
        msg = '"{0}" exists and is not a directory'.format(target)
        logger.error(msg)
        raise InvalidFile(msg)

    source_dir = os.path.join(conf.paths.projectroot, conf.paths.source)
    exclusions = [
        os.path.join('includes', 'table'),
        os.path.join('includes', 'generated')
    ]

    if conf.paths.images is not None:
        image_dir = os.path.join(conf.paths.images[len(conf.paths.source) +
                                                   1:])
        exclusions.append(image_dir + os.path.sep + '*.png')
        exclusions.append(image_dir + os.path.sep + '*.rst')

    prefix_len = len(
        os.path.join(conf.paths.projectroot, conf.paths.branch_source)) + 1

    exclusions.extend([
        o for o in conf.system.content.output_directories(prefix_len)
        if o != "includes/changelogs"
    ])

    # we don't want rsync to delete directories that hold generated content in
    # the target so we can have more incremental builds.
    exclusions = "--exclude=" + ' --exclude='.join(exclusions)

    cmd = 'rsync --links --checksum --recursive {2} --delete {0}/ {1}'
    cmd = cmd.format(source_dir, target, exclusions)

    try:
        subprocess.check_call(shlex.split(cmd))
    except subprocess.CalledProcessError as e:
        logger.error('source transfer rsync had error: ' + str(e.returncode))
        logger.info(cmd)

    # remove files from the source tree specified in the sphinx config for this
    # build.
    source_exclusion(conf, sconf)
    os.utime(target, None)

    logger.info(
        'prepared and migrated source for sphinx build in {0}'.format(target))
Ejemplo n.º 25
0
def dump_file_hashes(conf):
    output = conf.system.dependency_cache

    o = {'time': datetime.datetime.utcnow().strftime("%s"),
         'files': {}}

    files = expand_tree(os.path.join(conf.paths.projectroot, conf.paths.branch_source), None)

    fmap = o['files']

    for fn in files:
        if os.path.exists(fn):
            fmap[fn] = md5_file(fn)

    safe_create_directory(os.path.dirname(output))

    with open(output, 'w') as f:
        json.dump(o, f)

    logger.debug('wrote dependency cache to: {0}'.format(output))
Ejemplo n.º 26
0
def transfer_images(conf, sconf):
    image_dir = os.path.join(conf.paths.projectroot, conf.paths.branch_images)
    if not os.path.isdir(image_dir):
        return False
    elif sconf.builder == 'latex':

        if 'edition' in sconf and sconf.edition is not None:
            builder_dir = hyph_concat(sconf.builder, sconf.edition)
        else:
            builder_dir = sconf.builder

        builder_dir = os.path.join(conf.paths.projectroot, conf.paths.branch_output, builder_dir)

        safe_create_directory(builder_dir)
        cmd = 'rsync -am --include="*.png" --include="*.jpg" --include="*.eps" --exclude="*" {0}/ {1} '.format(image_dir, builder_dir)

        command(cmd)
        command(cmd.replace('images', 'figures'), ignore=True)

        logger.info('migrated images for latex build')
Ejemplo n.º 27
0
def test_build_site(args):
    args.languages_to_build = args.editions_to_build = []
    args.builder = 'html'

    conf = fetch_config(args)

    safe_create_directory('build')
    with BuildApp.new(pool_type=conf.runstate.runner,
                      pool_size=conf.runstate.pool_size,
                      force=conf.runstate.force).context() as app:
        try:
            sphinx_publication(conf, args, app)
        except:
            sphinx_publication(conf, args, app)
            if os.path.exists('doc-tools'):
                shutil.rmtree('docs-tools')

    logger.info('bootstrapped makefile system')

    logger.info('updated project skeleton in current directory.')
Ejemplo n.º 28
0
def json_output(conf):
    list_file = os.path.join(conf.paths.branch_output, 'json-file-list')
    public_list_file = os.path.join(conf.paths.projectroot,
                                    conf.paths.public_site_output,
                                    'json', '.file_list')

    cmd = 'rsync --recursive --times --delete --exclude="*pickle" --exclude=".buildinfo" --exclude="*fjson" {src} {dst}'

    json_dst = os.path.join(conf.paths.projectroot, conf.paths.public_site_output, 'json')
    safe_create_directory(json_dst)

    builder = 'json'
    if 'edition' in conf.project and conf.project.edition != conf.project.name:
        builder += '-' + conf.project.edition

    command(cmd.format(src=os.path.join(conf.paths.projectroot,
                                        conf.paths.branch_output, builder) + '/',
                       dst=json_dst))

    copy_if_needed(list_file, public_list_file)
    logger.info('deployed json files to local staging.')
Ejemplo n.º 29
0
def release_tasks(conf):
    release_sources = conf.system.content.releases.sources

    rel = ReleaseDataCache(release_sources, conf)

    if len(release_sources) > 0 and not os.path.isdir(
            conf.system.content.releases.output_dir):
        safe_create_directory(conf.system.content.releases.output_dir)

    tasks = []

    for dep_fn, release in rel.content_iter():
        t = Task(job=write_release_file,
                 description='generating release spec file: ' + release.target,
                 target=release.target,
                 dependency=dep_fn)
        t.args = (release, release.target, conf)

        tasks.append(t)

    logger.info("added tasks for {0} release generation tasks".format(
        len(tasks)))
    return tasks
Ejemplo n.º 30
0
def run_sphinx(builder, sconf, conf):
    if safe_create_directory(sconf.fq_build_output):
        m = 'created directory "{1}" for sphinx builder {0}'
        logger.info(m.format(builder, sconf.fq_build_output))

    if 'language' in sconf and sconf.language is not None:
        cmd_str = 'sphinx-intl build --language=' + sconf.language
        try:
            subprocess.check_call(shlex.split(cmd_str))
            logger.info('compiled all PO files for translated build.')
        except subprocess.CalledProcessError as e:
            logger.error('sphinx-intl encountered error: ' + str(e.returncode))
            logger.info(cmd_str)

    logger.info('starting sphinx build {0}'.format(builder))

    cmd = 'sphinx-build {0} -d {1}/doctrees-{2} {3} {4}'  # per-builder-doctree

    sphinx_cmd = cmd.format(
        get_sphinx_args(sconf, conf),
        os.path.join(conf.paths.projectroot,
                     conf.paths.branch_output), sconf.build_output,
        os.path.join(conf.paths.projectroot, conf.paths.branch_source),
        sconf.fq_build_output)

    logger.debug(sphinx_cmd)
    m = "running sphinx build for: {0}, {1}, {2}"

    with Timer(m.format(builder, sconf.language, sconf.edition)):
        try:
            output = subprocess.check_output(shlex.split(sphinx_cmd),
                                             stderr=subprocess.STDOUT)
            return_code = 0
        except subprocess.CalledProcessError as e:
            output = e.output
            return_code = e.returncode
            logger.info(sphinx_cmd)
    try:
        os.utime(sconf.fq_build_output, None)
    except:
        pass

    m = 'completed {0} sphinx build for {1}.{2}.{3} ({4})'

    logger.info(
        m.format(builder, conf.project.name, conf.project.edition,
                 conf.git.branches.current, return_code))

    return return_code, output
Ejemplo n.º 31
0
def finalize_single_html_tasks(builder, conf):
    single_html_dir = get_single_html_dir(conf)

    # create directory when registering tasks.
    safe_create_directory(single_html_dir)
    safe_create_directory(os.path.join(single_html_dir, '_static'))

    if 'edition' in conf.project and conf.project.edition != conf.project.name:
        artifact_dir = os.path.join(conf.paths.projectroot,
                                    conf.paths.branch_output, '-'.join(
                                        (builder, conf.project.edition)))
    else:
        artifact_dir = os.path.join(conf.paths.projectroot,
                                    conf.paths.branch_output, builder)

    tasks = [
        libgiza.task.Task(job=finalize_single_html,
                          args=(single_html_dir, artifact_dir, conf),
                          target=True,
                          dependency=None,
                          description="migrating singlehtml")
    ]

    for fn in expand_tree(os.path.join(artifact_dir, '_static'), None):
        target_fn = os.path.join(single_html_dir, '_static',
                                 os.path.basename(fn))

        tasks.append(
            libgiza.task.Task(
                job=copy_if_needed,
                args=(fn, target_fn),
                target=target_fn,
                dependency=fn,
                description="moving static files to the singlehtml build"))

    return tasks
Ejemplo n.º 32
0
def run_sphinx(builder, sconf, conf):
    if safe_create_directory(sconf.fq_build_output):
        m = 'created directory "{1}" for sphinx builder {0}'
        logger.info(m.format(builder, sconf.fq_build_output))

    if 'language' in sconf and sconf.language is not None:
        cmd_str = 'sphinx-intl build --language=' + sconf.language
        try:
            subprocess.check_call(shlex.split(cmd_str))
            logger.info('compiled all PO files for translated build.')
        except subprocess.CalledProcessError as e:
            logger.error('sphinx-intl encountered error: ' + str(e.returncode))
            logger.info(cmd_str)

    logger.info('starting sphinx build {0}'.format(builder))

    cmd = 'sphinx-build {0} -d {1}/doctrees-{2} {3} {4}'  # per-builder-doctree

    sphinx_cmd = cmd.format(get_sphinx_args(sconf, conf),
                            os.path.join(conf.paths.projectroot, conf.paths.branch_output),
                            sconf.build_output,
                            os.path.join(conf.paths.projectroot, conf.paths.branch_source),
                            sconf.fq_build_output)

    logger.debug(sphinx_cmd)
    m = "running sphinx build for: {0}, {1}, {2}"

    with Timer(m.format(builder, sconf.language, sconf.edition)):
        try:
            output = subprocess.check_output(shlex.split(sphinx_cmd), stderr=subprocess.STDOUT)
            return_code = 0
        except subprocess.CalledProcessError as e:
            output = e.output
            return_code = e.returncode
            logger.info(sphinx_cmd)
    try:
        os.utime(sconf.fq_build_output, None)
    except:
        pass

    m = 'completed {0} sphinx build for {1}.{2}.{3} ({4})'

    logger.info(m.format(builder, conf.project.name, conf.project.edition,
                         conf.git.branches.current, return_code))

    return return_code, output
Ejemplo n.º 33
0
def run_sphinx(builder, sconf, conf):
    if safe_create_directory(sconf.fq_build_output):
        logger.info('created directory "{1}" for sphinx builder {0}'.format(
            builder, sconf.fq_build_output))

    if 'language' in sconf and sconf.language is not None:
        command('sphinx-intl build --language=' + sconf.language)
        logger.info('compiled all PO files for translated build.')

    logger.info('starting sphinx build {0}'.format(builder))

    cmd = 'sphinx-build {0} -d {1}/doctrees-{2} {3} {4}'  # per-builder-doctree

    sphinx_cmd = cmd.format(
        get_sphinx_args(sconf, conf),
        os.path.join(conf.paths.projectroot,
                     conf.paths.branch_output), sconf.build_output,
        os.path.join(conf.paths.projectroot, conf.paths.branch_source),
        sconf.fq_build_output)

    logger.debug(sphinx_cmd)
    with Timer("running sphinx build for: {0}, {1}, {2}".format(
            builder, sconf.language, sconf.edition)):
        out = command(sphinx_cmd, capture=True, ignore=True)

    logger.info('completed sphinx build {0}'.format(builder))

    if True:  # out.return_code == 0:
        logger.info('successfully completed {0} sphinx build ({1})'.format(
            builder, out.return_code))

        finalizer_app = BuildApp(conf)
        finalizer_app.pool = "thread"
        finalizer_app.root_app = False
        finalize_sphinx_build(sconf, conf, finalizer_app)

        with Timer("finalize sphinx {0} build".format(builder)):
            finalizer_app.run()
    else:
        logger.warning(
            'the sphinx build {0} was not successful. not running finalize operation'
            .format(builder))

    output = '\n'.join([out.err, out.out])

    return out.return_code, output
Ejemplo n.º 34
0
def transfer_source(conf, sconf):
    target = os.path.join(conf.paths.projectroot, conf.paths.branch_source)

    dir_exists = safe_create_directory(target)

    # this operation is just for messaging the above operation, and error'ing
    # appropriately.
    if dir_exists is True:
        logger.info('created directory for sphinx build: {0}'.format(target))
    elif not os.path.isdir(target):
        msg = '"{0}" exists and is not a directory'.format(target)
        logger.error(msg)
        raise InvalidFile(msg)

    source_dir = os.path.join(conf.paths.projectroot, conf.paths.source)
    image_dir = os.path.join(conf.paths.images[len(conf.paths.source) + 1:])

    exclusions = [os.path.join('includes', 'table'),
                  os.path.join('includes', 'generated'),
                  image_dir + os.path.sep + "*.png",
                  image_dir + os.path.sep + "*.rst",
                  image_dir + os.path.sep + "*.eps"]

    prefix_len = len(os.path.join(conf.paths.projectroot, conf.paths.branch_source)) + 1

    exclusions.extend([o for o in conf.system.content.output_directories(prefix_len) if o != "includes/changelogs"])

    # we don't want rsync to delete directories that hold generated content in
    # the target so we can have more incremental builds.
    exclusions = "--exclude=" + ' --exclude='.join(exclusions)

    cmd = 'rsync --links --checksum --recursive {2} --delete {0}/ {1}'
    cmd = cmd.format(source_dir, target, exclusions)

    try:
        subprocess.check_call(shlex.split(cmd))
    except subprocess.CalledProcessError as e:
        logger.error('source transfer rsync had error: ' + str(e.returncode))
        logger.info(cmd)

    # remove files from the source tree specified in the sphinx config for this
    # build.
    source_exclusion(conf, sconf)
    os.utime(target, None)

    logger.info('prepared and migrated source for sphinx build in {0}'.format(target))
Ejemplo n.º 35
0
def transfer_source(conf, sconf):
    target = os.path.join(conf.paths.projectroot, conf.paths.branch_source)

    dir_exists = safe_create_directory(target)

    # this operation is just for messaging the above operation, and error'ing
    # appropriately.
    if dir_exists is True:
        logger.info('created directory for sphinx build: {0}'.format(target))
    elif not os.path.isdir(target):
        msg = '"{0}" exists and is not a directory'.format(target)
        logger.error(msg)
        raise InvalidFile(msg)

    source_dir = os.path.join(conf.paths.projectroot, conf.paths.source)
    image_dir = os.path.join(conf.paths.images[len(conf.paths.source)+1:])
    ref_dir = 'reference'

    exclusions = [ os.path.join('includes', 'table'),
                   os.path.join('includes', 'generated'),
                   os.path.join(ref_dir, 'method') + os.path.sep + "*.rst",
                   os.path.join(ref_dir, 'command') + os.path.sep + "*.rst",
                   os.path.join(ref_dir, 'operator', 'query') + os.path.sep + "*.rst",
                   os.path.join(ref_dir, 'operator', 'aggregation') + os.path.sep + "*.rst",
                   ref_dir + os.path.sep + "*.rst",
                   image_dir + os.path.sep + "*.png",
                   image_dir + os.path.sep + "*.rst",
                   image_dir + os.path.sep + "*.eps" ]

    prefix_len = len(os.path.join(conf.paths.projectroot, conf.paths.branch_source)) + 1
    exclusions.extend([ o for o in conf.system.content.output_directories(prefix_len) ])

    # we don't want rsync to delete directories that hold generated content in
    # the target so we can have more incremental builds.
    exclusions = "--exclude=" + ' --exclude='.join(exclusions)

    cmd = 'rsync --checksum --recursive {2} --delete {0}/ {1}'.format(source_dir, target, exclusions)
    command(cmd)

    # remove files from the source tree specified in the sphinx config for this
    # build.
    source_exclusion(conf, sconf)
    os.utime(target, None)

    logger.info('prepared and migrated source for sphinx build in {0}'.format(target))
Ejemplo n.º 36
0
def make_parent_dirs(*paths):
    for path in paths:
        safe_create_directory(os.path.dirname(path))
Ejemplo n.º 37
0
def make_parent_dirs(*paths):
    for path in paths:
        safe_create_directory(os.path.dirname(path))