Пример #1
0
def create_branch(args):
    """
    Takes a single branch name and (if necessary) creates a new branch. Then,
    populates the ``build/<branch>`` directory for the new branch using either
    the parent branch or ``master``. Safe to run multiple times (after a rebase)
    to update the build cache from master.

    Also calls :method:`~giza.operations.build_env.fix_build_environment()` to
    tweak the new build output to update hashes and on-disk copies of the
    environment to prevent unnecessary full-rebuilds from sphinx.
    """

    conf = fetch_config(args)

    g = GitRepo(conf.paths.projectroot)

    branch = conf.runstate.git_branch
    base_branch = g.current_branch()

    if base_branch == branch:
        base_branch = 'master'
        logger.warning('seeding build data for branch "{0}" from "master"'.format(branch))

    branch_builddir = os.path.join(conf.paths.projectroot,
                                   conf.paths.output, branch)

    base_builddir = os.path.join(conf.paths.projectroot,
                                 conf.paths.output, base_branch)

    if g.branch_exists(branch):
        logger.info('checking out branch "{0}"'.format(branch))
    else:
        logger.info('creating and checking out a branch named "{0}"'.format(branch))

    g.checkout_branch(branch)

    cmd = "rsync -r --times --checksum {0}/ {1}".format(base_builddir, branch_builddir)
    logger.info('seeding build directory for "{0}" from "{1}"'.format(branch, base_branch))

    try:
        subprocess.check_call(args=cmd.split())
        logger.info('branch creation complete.')
    except subprocess.CalledProcessError:
        logger.error(cmd)

    # get a new config here for the new branch
    conf = fetch_config(args)
    builders = get_existing_builders(conf)

    with BuildApp.new(pool_type='process',
                      pool_size=conf.runstate.pool_size,
                      force=conf.runstate.force).context() as app:
        app.exted_queue(fix_build_env_tasks(builders, conf))
Пример #2
0
def examples(args):
    c = fetch_config(args)
    app = BuildApp(c)

    example_tasks(c, app)

    app.run()
Пример #3
0
def update_translations_transifex(args):
    """Updates translation and uploads them to the transifex service."""

    conf = fetch_config(args)

    update_transifex(conf)
    check_for_orphaned_tx_files(conf)
Пример #4
0
def pull_translations(args):
    conf = fetch_config(args)

    with libgiza.app.BuildApp.new(pool_type=conf.runstate.runner,
                                  pool_size=conf.runstate.pool_size,
                                  force=conf.runstate.force).context() as app:
        app.extend_queue(pull_tasks(conf))
Пример #5
0
def unwind(args):
    conf = fetch_config(args)

    conf.runstate.package_path = fetch_package(conf.runstate.package_path, conf)
    logger.info('extracting package: ' + conf.runstate.package_path)
    extract_package(conf)
    logger.info('extracted package')
Пример #6
0
def build_translation_model(args):
    conf = fetch_config(args)
    if args.t_translate_config is None:
        tconf = conf.system.files.data.translate
    elif os.path.isfile(args.t_translate_config):
        tconf = TranslateConfig(args.t_translate_config, conf)
    else:
        logger.error(args.t_translate_config + " doesn't exist")
        return

    if os.path.exists(tconf.paths.project) is False:
        os.makedirs(tconf.paths.project)
    elif os.path.isfile(tconf.paths.project):
        logger.error(tconf.paths.project + " is a file")
        sys.exit(1)
    elif os.listdir(tconf.paths.project) != []:
        logger.error(tconf.paths.project + " must be empty")
        sys.exit(1)

    with open(os.path.join(tconf.paths.project, "translate.yaml"), 'w') as f:
        yaml.dump(tconf.dict(), f, default_flow_style=False)

    tconf.conf.runstate.pool_size = tconf.settings.pool_size
    run_args = get_run_args(tconf)

    app = BuildApp.new(pool_type=conf.runstate.runner,
                       pool_size=conf.runstate.pool_size,
                       force=conf.runstate.force)
    os.environ['IRSTLM'] = tconf.paths.irstlm

    setup_train(tconf)
    setup_tune(tconf)
    setup_test(tconf)

    for idx, parameter_set in enumerate(run_args):
        parameter_set = list(parameter_set)
        parameter_set.append(idx)
        parameter_set.append(tconf)
        t = app.add()
        t.job = build_model
        t.args = parameter_set
        t.description = "model_" + str(parameter_set[9])

    app.run()

    aggregate_model_data(tconf.paths.project)

    from_addr = "*****@*****.**"
    to_addr = [tconf.settings.email]

    with open(tconf.paths.project + "/data.csv") as data:
        msg = MIMEText(data.read())

    msg['Subject'] = "Model Complete"
    msg['From'] = from_addr
    msg['To'] = ", ".join(to_addr)

    server = smtplib.SMTP("localhost")
    server.sendmail(from_addr, to_addr, msg.as_string())
    server.quit()
Пример #7
0
    def __init__(self, args, destage, dry_run=False):
        self.args = args
        self.conf = fetch_config(args)
        self.destage = destage
        self.dry_run = dry_run

        self.branch = GitRepo().current_branch()
Пример #8
0
def package(args):
    conf = fetch_config(args)

    package_build_env(builders=conf.runstate.builder,
                      editions=conf.runstate.editions_to_build,
                      languages=conf.runstate.languages_to_build,
                      conf=conf)
Пример #9
0
def render_config(args):
    """Returns configuration object for diagnostic purposes."""

    c = fetch_config(args)

    # the following values are rendered lazily. we list them here so that the
    # final object will be useful to inspect.
    [c.git.commit, c.paths.public, c.git.branches.current,
     c.git.branches.manual, c.git.branches.published,
     c.paths.branch_output, c.paths.buildarchive,
     c.paths.branch_source, c.paths.branch_staging,
     c.paths.branch_images, c.paths.branch_includes,
     c.version.published, c.version.stable, c.version.upcoming,
     c.project.edition, c.deploy, c.paths.global_config,
     c.project.branched, c.system.dependency_cache,
     c.system.dependency_cache_fn, c.paths.public_site_output,
     c.system.content, c.runstate.runner, c.runstate.force,
     c.system.files, c.system.files.paths, c.system.files.data,
     c.paths.htaccess]

    print('--- ' + "str of config object >>>")
    print(json.dumps(c.dict(), indent=3))
    print('---  <<<')

    print c.system.files.data.jira
Пример #10
0
def api(args):
    c = fetch_config(args)

    with BuildApp.new(pool_type=c.runstate.runner,
                      pool_size=c.runstate.pool_size,
                      force=c.runstate.force).context() as app:
        app.extend_queue(apiarg_tasks(c))
Пример #11
0
def fetch(args):
    conf = fetch_config(args)

    if conf.runstate.package_path.startswith('http'):
        fetch_package(conf.runstate.pacage_path, conf)
    else:
        logger.error('{0} is not a url'.format(conf.runstate.package_path))
        raise SystemExit
Пример #12
0
def source(args):
    args.builder = 'html'
    conf = fetch_config(args)

    with BuildApp.new(pool_type=conf.runstate.runner,
                      pool_size=conf.runstate.pool_size,
                      force=conf.runstate.force).context() as app:
        sphinx_content_preperation(app, conf)
Пример #13
0
def robots(args):
    c = fetch_config(args)
    app = BuildApp(c)
    app.pool = "serial"

    robots_txt_tasks(c, app)

    app.run()
Пример #14
0
def robots(args):
    c = fetch_config(args)

    with BuildApp.new(pool_type=c.runstate.runner,
                      pool_size=c.runstate.pool_size,
                      force=c.runstate.force).context() as app:
        app.pool = 'serial'
        app.extend_queue(robots_txt_tasks(c))
Пример #15
0
def push_translations(args):
    conf = fetch_config(args)
    app = BuildApp(conf)

    push_tasks(conf, app)

    update(conf)
    app.run()
Пример #16
0
def main(args):
    """
    Removes build artifacts from ``build/`` directory.
    """

    c = fetch_config(args)
    app = BuildApp.new(pool_type=c.runstate.runner,
                       pool_size=c.runstate.pool_size,
                       force=c.runstate.force)

    to_remove = set()

    if c.runstate.git_branch is not None:
        to_remove.add(os.path.join(c.paths.projectroot, c.paths.branch_output))

    if c.runstate.builder != []:
        for edition, language, builder in get_builder_jobs(c):
            builder_path = resolve_builder_path(builder, edition, language, c)
            builder_path = os.path.join(c.paths.projectroot, c.paths.branch_output, builder_path)

            to_remove.add(builder_path)
            dirpath, base = os.path.split(builder_path)
            to_remove.add(os.path.join(dirpath, 'doctrees-' + base))

            m = 'remove artifacts associated with the {0} builder in {1} ({2}, {3})'
            logger.debug(m.format(builder, c.git.branches.current, edition, language))

    if c.runstate.days_to_save is not None:
        published_branches = ['docs-tools', 'archive', 'public', 'primer', c.git.branches.current]
        published_branches.extend(c.git.branches.published)

        for build in os.listdir(os.path.join(c.paths.projectroot, c.paths.output)):
            build = os.path.join(c.paths.projectroot, c.paths.output, build)
            branch = os.path.split(build)[1]

            if branch in published_branches:
                continue
            elif not os.path.isdir(build):
                continue
            elif os.stat(build).st_mtime > c.runstate.days_to_save:
                to_remove.add(build)
                to_remove.add(os.path.join(c.paths.projectroot, c.paths.output, 'public', branch))
                logger.debug('removed stale artifacts: "{0}" and "build/public/{0}"'.format(branch))

    for fn in to_remove:
        if os.path.isdir(fn):
            job = shutil.rmtree
        else:
            job = os.remove

        t = app.add('task')
        t.job = job
        t.args = fn
        m = 'removing artifact: {0}'.format(fn)
        t.description = m
        logger.critical(m)

    app.run()
Пример #17
0
def main(args):
    """
    Provides a way to specify make-like targets to invoke giza
    operations. Targets take a <action>-<option<-option>> form.
    """
    targets = [(t[0], t[1:]) for t in [t.split("-") for t in args.make_target]]

    conf = fetch_config(args)
    run_make_operations(targets, conf)
Пример #18
0
def toc(args):
    c = fetch_config(args)

    if c.runstate.clean_generated is True:
        toc_clean(c)
    else:
        app = BuildApp(c)
        toc_tasks(c, app)
        app.run()
Пример #19
0
def primer(args):
    c = fetch_config(args)

    if c.runstate.clean_generated is True:
        primer_clean(c)
    else:
        app = BuildApp(c)
        primer_migration_tasks(c, app)
        app.run()
Пример #20
0
def push(args):
    c = fetch_config(args)
    app = BuildApp(c)

    sphinx_ret = sphinx_publication(c, args, app)
    if sphinx_ret == 0 or c.runstate.force is True:
        deploy_worker(c, app)
    else:
        logger.warning('a sphinx build failed, and build not forced. not deploying.')
Пример #21
0
def options(args):
    c = fetch_config(args)

    if c.runstate.clean_generated is True:
        option_clean(c)
    else:
        app = BuildApp(c)
        option_tasks(c, app)
        app.run()
Пример #22
0
def pull_rebase(args):
    c = fetch_config(args)

    g = GitRepo(c.paths.projectroot)

    if c.runstate.git_branch is None:
        c.runstate.git_branch = g.current_branch()

    with g.branch(c.runstate.git_branch):
        g.update()
Пример #23
0
def steps(args):
    c = fetch_config(args)

    with BuildApp.new(pool_type=c.runstate.runner,
                      pool_size=c.runstate.pool_size,
                      force=c.runstate.force).context() as app:
        if c.runstate.clean_generated is True:
            app.extend_queue(step_clean(c))
        else:
            app.extend_queue(step_tasks(c))
Пример #24
0
def redirects(args):
    c = fetch_config(args)

    if args.dry_run is True:
        print(''.join(make_redirect(c)))
    else:
        with BuildApp.new(pool_type=c.runstate.runner,
                          pool_size=c.runstate.pool_size,
                          force=c.runstate.force).context() as app:
            app.extend_queue(redirect_tasks(c))
Пример #25
0
def translate_text_doc(args):
    conf = fetch_config(args)
    if args.t_translate_config is None:
        tconf = conf.system.files.data.translate
    elif os.path.isfile(args.t_translate_config):
        tconf = TranslateConfig(ingest_yaml_doc(args.t_translate_config), conf)
    else:
        logger.error(args.t_translate_config + " doesn't exist")
        return
    translate_file(args.t_input_file, args.t_output_file, tconf, args.t_protected_regex)
Пример #26
0
def cherry_pick(args):
    c = fetch_config(args)

    g = GitRepo(c.paths.projectroot)

    if c.runstate.git_branch is None:
        c.runstate.git_branch = g.current_branch()

    with g.branch(c.runstate.git_branch):
        g.cherry_pick(c.runstate.git_objects)
Пример #27
0
def images(args):
    c = fetch_config(args)
    app = BuildApp(c)

    if c.runstate.clean_generated is True:
        image_clean(c, app)
    else:
        image_tasks(c, app)

    app.run()
Пример #28
0
def intersphinx(args):
    c = fetch_config(args)
    app = BuildApp(c)

    if c.runstate.clean_generated is True:
        intersphinx_clean(c, app)
    else:
        intersphinx_tasks(c, app)

    app.run()
Пример #29
0
def clean(args):
    c = fetch_config(args)

    for fn in include_files_unused(conf=c):
        fn = os.path.join(c.paths.source, fn[1:])
        if os.path.exists(fn):
            os.remove(fn)
            logger.info("removed {0}, which was an unused include file.".format(fn))
        else:
            logger.error('{0} does not exist'.format(fn))
Пример #30
0
def assets(args):
    c = fetch_config(args)
    app = BuildApp(c)

    if c.runstate.clean_generated is True:
        assets_clean(c, app)
    else:
        assets_tasks(c, app)

    app.run()
Пример #31
0
def create(args):
    conf = fetch_config(args)

    for target in conf.runstate.push_targets:
        create_package(target, conf)