Example #1
0
def sync_repository_state(ctx,
                          push=False,
                          no_profile=False,
                          bucket_prefix=None):
    """
    Performs a pull or push from local filesystem to a production S3 bucket.

    A pull needs to be performed in order to be able to run tests or make builds.
    APPLICATION_MODE needs to be production in order to run this command.
    """
    if push and ctx.config.env != "production":
        Exit("Can't push to environment other than production")
    elif push:
        sure = input(
            "You are about to overwrite production configuration with your local configuration. Are you sure?"
        )
        if sure.lower() not in ["y", "yes"]:
            Exit("Aborted push of local configuration files to production")
    profile = f"AWS_PROFILE={REPOSITORY_AWS_PROFILE}" if not no_profile else ""

    local_directory = "."
    bucket_prefix = bucket_prefix or REPOSITORY_AWS_PROFILE
    repository_state_bucket = f"s3://{bucket_prefix}-repository-state"
    source = repository_state_bucket if not push else local_directory
    destination = local_directory if not push else repository_state_bucket
    targets = [("cp", os.path.join("nginx", "ip-whitelist.conf")),
               ("sync",
                os.path.join("harvester", "sources", "factories", "fixtures"))]
    for operation, path in targets:
        source_path = os.path.join(source, path)
        destination_path = os.path.join(destination, path)
        ctx.run(
            f"{profile} aws s3 {operation} {source_path} {destination_path}",
            echo=True)
Example #2
0
def release(ctx, release_type):
    """Releases the project in one swift command!"""
    if release_type not in ('patch', 'minor', 'major'):
        raise Exit(
            'The release type parameter is invalid.\nMust be one of: major, minor, patch'
        )

    # Run checks
    ctx.run('invoke check test')

    # Bump version and git tag it
    ctx.run('bump2version %s --verbose' % release_type)

    # Build project
    ctx.run('python setup.py clean --all sdist bdist_wheel')

    # Upload to pypi
    if confirm(
            'You are about to upload the release to pypi.org. Are you sure? [y/N]'
    ):
        files = ['dist/*.whl', 'dist/*.gz', 'dist/*.zip']
        dist_files = ' '.join(
            [pattern for f in files for pattern in glob.glob(f)])

        if len(dist_files):
            ctx.run('twine upload --skip-existing %s' % dist_files)

            prepare_changelog(ctx)
        else:
            raise Exit('No files found to release')
    else:
        raise Exit('Aborted release')
Example #3
0
def tests(ctx, delphi_version=DEFAULT_DELPHI_VERSION):
    """Builds and execute the unit tests"""
    import os

    apppath = os.path.dirname(os.path.realpath(__file__))
    res = True
    testclient = r"unittests\general\Several\DMVCFrameworkTests.dproj"
    testserver = r"unittests\general\TestServer\TestServer.dproj"

    print("\nBuilding Unit Test client")
    build_delphi_project(ctx, testclient, config="CI", delphi_version=delphi_version)
    print("\nBuilding Test Server")
    build_delphi_project(ctx, testserver, config="CI", delphi_version=delphi_version)

    # import subprocess
    # subprocess.run([r"unittests\general\TestServer\Win32\Debug\TestServer.exe"])
    # os.spawnl(os.P_NOWAIT, r"unittests\general\TestServer\Win32\Debug\TestServer.exe")
    import subprocess

    print("\nExecuting tests...")
    subprocess.Popen([r"unittests\general\TestServer\bin\TestServer.exe"])
    r = subprocess.run([r"unittests\general\Several\bin\DMVCFrameworkTests.exe"])
    if r.returncode != 0:
        return Exit("Compilation failed: \n" + str(r.stdout))
    subprocess.run(["taskkill", "/f", "/im", "TestServer.exe"])
    if r.returncode > 0:
        print(r)
        print("Unit Tests Failed")
        return Exit("Unit tests failed")
Example #4
0
def release(ctx, release_type):
    """Releases the project in one swift command!"""
    if release_type not in ('patch', 'minor', 'major'):
        raise Exit('The release type parameter is invalid.\nMust be one of: major, minor, patch')

    # Run checks
    ctx.run('invoke check test')

    # Bump version and git tag it
    ctx.run('bump2version %s --verbose' % release_type)

    # Build project
    ctx.run('python setup.py clean --all sdist bdist_wheel')

    # Prepare changelog for next release
    prepare_changelog(ctx)

    # Clean up local artifacts
    clean(ctx)

    # Upload to pypi
    if confirm('Everything is ready. You are about to push to git which will trigger a release to pypi.org. Are you sure? [y/N]'):
        ctx.run('git push --tags && git push')
    else:
        raise Exit('You need to manually revert the tag/commits created.')
Example #5
0
def verify_drills(spec):
    designs_holes = {}

    for slug, data in spec.items():
        holes = get_drills_of_dia(data['drill_file'], data['tool_diameter'])

        if 'filter_out_f' in data:
            holes = [h for h in holes if not data['filter_out_f'](h)]
            if holes[0] != (0, 0):
                holes = normalize_coords(holes)

        if len(holes) != data['holes_number']:
            raise Exit(
                dedent(f"""\
                Found {len(holes)} holes in {slug}, expected {data["holes_number"]}:
                {holes}
            """))

        designs_holes[slug] = sorted(holes)

    a, b = designs_holes.keys()
    if designs_holes[a] != designs_holes[b]:
        raise Exit(
            dedent(f"""\
            Holes did not match:
            {a} = {designs_holes[a]}
            {b} = {designs_holes[b]}
        """))

    print('PASS!')
def add_to_rhino(ctx):
    """Adds the current project to Rhino Python search paths."""
    try:
        python_source_path = os.path.join(os.getcwd(), 'src')
        rhino_setting_per_version = [('5.0', 'settings.xml'),
                                     ('6.0', 'settings-Scheme__Default.xml')]
        setting_files_updated = 0

        for version, filename in rhino_setting_per_version:
            ironpython_path = get_ironpython_path(version)

            if not ironpython_path:
                continue

            settings_file = os.path.join(ironpython_path, filename)
            if not os.path.isfile(settings_file):
                log.warn('IronPython settings for Rhino ' + version +
                         ' not found')
            else:
                updateSearchPaths(settings_file, python_source_path)
                log.write('Updated search path for Rhino ' + version)
                setting_files_updated += 1

        if setting_files_updated == 0:
            raise Exit(
                '[ERROR] No Rhino settings file found\n' +
                'Could not automatically make this project available to IronPython\n'
                +
                'To add manually, open EditPythonScript on Rhinoceros, go to Tools -> Options\n'
                + 'and add the project path to the module search paths')

    except RuntimeError as error:
        raise Exit(error)
Example #7
0
def release(ctx, release_type, bump_version=False):
    """Releases the project in one swift command!"""
    if release_type not in ('patch', 'minor', 'major'):
        raise Exit(
            'The release type parameter is invalid.\nMust be one of: major, minor, patch'
        )

    with chdir(BASE_FOLDER):
        if bump_version:
            ctx.run('bumpversion %s --verbose' % release_type)
        ctx.run('invoke docs test')
        ctx.run('python setup.py clean --all sdist bdist_wheel')
        if confirm(
                'You are about to upload the release to pypi.org. Are you sure? [y/N]'
        ):
            files = ['dist/*.whl', 'dist/*.gz', 'dist/*.zip']
            dist_files = ' '.join(
                [pattern for f in files for pattern in glob.glob(f)])

            if len(dist_files):
                ctx.run('twine upload --skip-existing %s' % dist_files)
            else:
                raise Exit('No files found to release')
        else:
            raise Exit('Aborted release')
Example #8
0
def deploy(ctx):
    """
    Create or update the CloudFormation stack. Note: you must run `package` first.
    """
    template_path = join(dirname(__file__), 'template.yml')

    if not s3_zipfile_exists(ctx):
        print("No zipfile found in s3!")
        print("Did you run the `package` command?")
        raise Exit(1)

    current_stack = existing_stack(ctx)

    if current_stack is None:
        create_or_update = "create"
        cidr_block = find_cidr_base(ctx)
    else:
        create_or_update = "update"
        try:
            cidr_block = next(
                x["OutputValue"] for x in current_stack["Outputs"]
                if x["OutputKey"] == "VpcCidrBlock"
            )
        except StopIteration:
            print("Existing stack doesn't have a cidr block?!?!")
            raise Exit(1)

    cmd = ("aws {} cloudformation {}-stack "
           "--stack-name {} "
           "--capabilities CAPABILITY_NAMED_IAM "
           "--template-body file://{} "
           "--tags Key=Project,Value=MH Key=OU,Value=DE Key=TranscriptIndexer,Value=1 "
           "--parameters "
           "ParameterKey=CidrBlock,ParameterValue='{}' "
           "ParameterKey=LambdaCodeBucket,ParameterValue='{}' "
           "ParameterKey=NotificationEmail,ParameterValue='{}' "
           "ParameterKey=ElasticsearchInstanceType,ParameterValue='{}' "
           ).format(
        profile_arg(),
        create_or_update,
        getenv("STACK_NAME"),
        template_path,
        cidr_block,
        getenv('LAMBDA_CODE_BUCKET'),
        getenv('NOTIFICATION_EMAIL'),
        getenv('ES_INSTANCE_TYPE')
    )

    res = ctx.run(cmd, warn=True, hide=True)
    if res.exited != 0 and "No updates" in res.stderr:
        print("Stack is up-to-date!")
        return
    elif res.exited != 0:
        raise Exit(res.stderr)

    print("Waiting for deployment/update to complete...")
    cmd = ("aws --profile test cloudformation wait stack-{}-complete "
           "--stack-name {}").format(create_or_update, getenv('STACK_NAME'))
    ctx.run(cmd)
    print("Done")
Example #9
0
def _check_stack(stack):
    if stack is None:
        raise Exit("No stack name specified")

    stack_path = os.path.abspath(stack)
    if not os.path.isdir(stack_path):
        raise Exit(f"No stack directory named {stack}")

    return stack_path
Example #10
0
 def install(cls, cxn, args, is_master):
     if "HADOOP_HOME" not in os.environ:
         raise Exit("HADOOP_HOME must be set.")
     elif args.cluster_type == "spark" and "SPARK_HOME" not in os.environ:
         raise Exit("SPARK_HOME must be set.")
     elif args.cluster_type == "presto" and "PRESTO_HOME" not in os.environ:
         raise Exit("PRESTO_HOME must be set.")
     cls._scp(cxn, args)
     cls._rpm_install(cxn, args)
     cls._rubix_op(cxn, args, is_master)
Example #11
0
def upgrade_all_packages(ctx, skip=False, patch=False, packages=None):
    """
    Upgrade all the packages listed in all ``requirements/*.txt`` files.

    This task only upgrades the versions of the packages in the text files, but
    do not perform the action to effectively upgrade them in the system.

    The task is used to keep Read the Docs updated and find potential
    incompatibilities with newer versions and take advantage of the latest
    securities releases.
    """
    try:
        import pur
    except ImportError:
        print('You need to install `pur` package: "pip install pur"')
        raise Exit(1)

    if patch and not skip:
        method = '--patch'
    elif skip and not patch:
        method = '--skip'
    elif not skip and not patch:
        # default
        method = '--skip'
    else:
        print("You can't use --patch and --skip together.")
        raise Exit(1)

    command_template = 'pur {method} {packages} --requirement {reqfile}'

    # We only upgrade these packages for the patch version of them because we
    # found there are some incompatibilities with the following versions. See
    # each .txt file to know the reasons.
    if packages is None:
        packages = (
            'redis',
            'commonmark',
            'django',
            'docker',
            'celery',
            'gitpython',
            'elasticsearch',
            'pyelasticsearch',
            'mercurial',
        )
        packages = ','.join(packages)

    for reqfile in glob.glob('requirements/*.txt'):
        cmd = command_template.format(packages=packages, reqfile=reqfile, method=method)
        print('Running: {}'.format(cmd))
        ctx.run(cmd)
Example #12
0
def restore_snapshot(conn, source_profile, snapshot_name=None):
    """
    Loads a particular snapshot into the database on AWS through a bastion host
    """
    if conn.host != conn.config.aws.bastion:
        raise Exit(
            f"Did not expect the host {conn.host} while the bastion is {conn.config.aws.bastion}"
        )
    if conn.config.env == "production":
        raise Exit("Cowardly refusing to restore the production database")

    snapshot_file_path = download_snapshot(
        snapshot_name, conn.config.aws.search_content_bucket, source_profile)
    database = conn.config.postgres.database

    print("Restoring snapshot")
    # Setup auto-responder
    postgres_user = conn.config.postgres.user
    postgres_password = conn.config.secrets.postgres.password
    postgres_password_responder = Responder(pattern=r"Password: "******"\n")

    # Run Postgres command with port forwarding
    with conn.forward_local(local_port=1111,
                            remote_host=conn.config.postgres.host,
                            remote_port=5432):
        # Restore actual database
        conn.local(
            f"psql -h localhost -p 1111 -U {postgres_user} -W -d {database} -f {snapshot_file_path}",
            echo=True,
            watchers=[postgres_password_responder],
            pty=True)
        # Create generic superuser named supersurf
        admin_password = conn.config.secrets.django.admin_password
        harvester_key = conn.config.secrets.harvester.api_key
        insert_user = insert_django_user_statement(
            "supersurf",
            admin_password,
            harvester_key,
            is_search_service=conn.config.service.name == "service")
        conn.local(
            f'psql -h localhost -p 1111 -U {postgres_user} -d {conn.config.postgres.database} -W -c "{insert_user}"',
            echo=True,
            pty=True,
            warn=True,
            watchers=[postgres_password_responder],
        )

    conn.local(f"rm {snapshot_file_path}.bak", warn=True)
    print("Done")
Example #13
0
def deploy(c, user=None, mode="test", branch="master"):
    '''
    Deploy latest code on our server and set it 'live'
    (you need admin rights for this on the server)
    Be sure to use --user=you for sudo commands to succeed.
    Also, use --mode=production to update the production app.
    Finally, fabric accepts a few parameters so you handle your ssh key
    correctly. For instance, you might need to use:
    --identity=path/to/your/provate/key
    --prompt-for-passphrase
    See Fabrics's (>=2.0) documentation for more details.
    '''
    if not user:
        raise Exit("Please provide a user name for use on the server")

    # make sure we don't deploy untested or outdated code
    #test(c, standalone=False)  # Here, c is a Connection, not a Context, so this would need work
    #push(c)
    if not confirm(
            "Have you tested and pushed your code?? (You can use `fab prepare_deploy`)"
    ):
        raise Exit("Aborting at user request.")

    code_dir = "/var/voko/git-repo"
    app_dir = "/var/www"

    # make sure we have the code cloned on the server
    if not c.run(f"test -d {code_dir}", warn=True):
        c.run(f"git clone [email protected]:vokomokum/vkmkm-erp.git {code_dir}",
              user=user)

    # make sure sudo command can be used
    sudo_pass = getpass.getpass(f"What's the sudo password for user {user}?")
    c.config = Config(overrides={'sudo': {'password': sudo_pass}})

    #with cd(code_dir):  # not yet implemented in fabric2 and sudo does not remember cd
    c.sudo(
        f"{code_dir}/dev-tools/update-members-site-from-git {mode} {branch}")

    # No need to restart apache, simply touch wsgi file (when in daemon mode)
    # (see http://code.google.com/p/modwsgi/wiki/ReloadingSourceCode)
    #if mode == 'production':
    #    c.sudo(f"touch {app_dir}/members/pyramid.wsgi")
    #else:
    #    c.sudo(f"touch {app_dir}/memberstest/pyramid.wsgi")

    # The above is not working anymore, we do restart directly now
    #c.sudo("/etc/init.d/apache2 restart")
    c.sudo("systemctl restart httpd")
def create_local_archive(conn, config, src_commit):
    """
    Create local archive and return its path.
    """
    wt = config.get_working_tree()
    if src_commit is None:
        src_branch = config.get_config_branch()
    else:
        src_branch = src_commit
    if not os.path.exists(wt):
        raise Exit("Working tree '{}' does not exist!".format(wt))
    has_secrets = os.path.exists(os.path.join(wt, ".gitsecret"))
    with conn.cd(wt):
        result = conn.run("git diff-index --quiet HEAD --", warn=True)
        if result.failed:
            if confirm(
                    "There are uncommited changes in the working tree.  Reset to HEAD?"
            ):
                conn.run("git reset --hard HEAD")
            else:
                raise Exit(
                    "Can't use working tree with uncommitted changes.  Stash, commit, or reset."
                )
        conn.run("git checkout {}".format(shellquote(src_branch)))
        if has_secrets:
            conn.run("git secret reveal")
            ttools.fill_templates(config)
        archive_branch = "{}-archive".format(src_branch)
        conn.run("git branch -D {}".format(shellquote(archive_branch)),
                 warn=True)
        conn.run("git checkout -b {}".format(shellquote(archive_branch)))
        filter_files_for_archival(conn, config, ".secret")
        filter_files_for_archival(conn, config, ".template")
        if has_secrets:
            secrets_file_name = config.get_secrets_file_name()
            if secrets_file_name is not None:
                conn.run("git rm -f {}".format(shellquote(secrets_file_name)))
        if os.path.exists(os.path.join(wt, '.gitignore')):
            conn.run("git rm -f .gitignore")
        if os.path.exists(os.path.join(wt, '.gitsecret')):
            conn.run("git rm -rf .gitsecret")
        conn.run("git commit -m 'Decrypted for deployment.'", warn=True)
        archive_path = conn.run("mktemp").stdout.rstrip()
        conn.run("git archive --format tgz -o {} HEAD".format(
            shellquote(archive_path)))
        conn.run("git checkout {}".format(shellquote(src_branch)))
        conn.run("git branch -D {}".format(shellquote(archive_branch)))
    return archive_path
Example #15
0
def deploy_poen(c):
    sudo_pass = getpass.getpass("Enter your sudo password on %s: " % SERVER)
    config = Config(overrides={'sudo': {'password': sudo_pass}})
    c = Connection(SERVER, config=config)

    # Pull from GitHub
    c.run('cd %s && git pull [email protected]:openstate/%s.git' %
          (DIR, GIT_REPO))

    # Compile assets
    output = c.sudo('docker inspect --format="{{.State.Status}}" %s' %
                    (NODE_CONTAINER))
    if output.stdout.strip() != 'running':
        raise Exit(
            '\n*** ERROR: The %s container, used to compile the assets, is '
            'not running. Please build/run/start the container.' %
            (NODE_CONTAINER))
    c.sudo('docker exec %s yarn' % (NODE_CONTAINER))
    c.sudo('docker exec %s yarn prod' % (NODE_CONTAINER))

    # Upgrade database
    c.sudo('docker exec %s flask db upgrade' % (APP_CONTAINER))

    # Reload app
    c.run('cd %s && touch uwsgi-touch-reload' % (DIR))
Example #16
0
def deploy(c):
    sudo_pass = getpass.getpass("Enter your sudo password on %s: " % SERVER)
    config = Config(overrides={'sudo': {'password': sudo_pass}})
    c = Connection(SERVER, config=config)

    # Pull from GitHub
    c.run('cd %s && git pull [email protected]:openstate/%s.git' %
          (DIR, GIT_REPO))

    # build & start new containers
    c.sudo("sh -c 'cd %s && docker-compose build'" %
           (os.path.join(DIR, 'docker'), ))
    c.sudo("sh -c 'cd %s && docker-compose up -d'" %
           (os.path.join(DIR, 'docker'), ))

    # compile web landing page
    c.sudo("docker exec %s ./makesite.py" % (MAKESITE_CONTAINER, ))
    # Compile assets
    output = c.sudo('docker inspect --format="{{.State.Status}}" %s' %
                    (NODE_CONTAINER))
    if output.stdout.strip() != 'running':
        raise Exit(
            '\n*** ERROR: The %s container, used to compile the assets, is '
            'not running. Please build/run/start the container.' %
            (NODE_CONTAINER))
    c.sudo('docker exec %s yarn' % (NODE_CONTAINER))
    c.sudo('docker exec %s yarn build' % (NODE_CONTAINER))

    # Upgrade database
    c.sudo('docker exec %s alembic upgrade head' % (APP_CONTAINER))

    # put elasticsearch mapings
    c.sudo('docker exec %s python manage.py elasticsearch put_templates' %
           (APP_CONTAINER))
Example #17
0
def tag(ctx, tag=None, major=False, minor=False, patch=False):
    """Tag or bump release with a semver tag, prefixed with 'v'. Makes a signed tag."""
    latest = None
    if tag is None:
        tags = get_tags()
        if not tags:
            latest = semver.VersionInfo(0, 0, 0)
        else:
            latest = tags[-1]
        if patch:
            nextver = latest.bump_patch()
        elif minor:
            nextver = latest.bump_minor()
        elif major:
            nextver = latest.bump_major()
        else:
            nextver = latest.bump_patch()
    else:
        if tag.startswith("v"):
            tag = tag[1:]
        try:
            nextver = semver.parse_version_info(tag)
        except ValueError:
            raise Exit("Invalid semver tag.", 2)

    print(latest, "->", nextver)
    tagopt = "-s" if CURRENT_USER in SIGNERS else "-a"
    ctx.run(f'git tag {tagopt} -m "Release v{nextver}" v{nextver}')
Example #18
0
def is_branch(ctx: Context, branch: str):
    current_branch: str = ctx.run("git rev-parse --abbrev-ref HEAD",
                                  hide=True).stdout.strip("\n ")
    if current_branch != branch:
        raise Exit(
            f"invalid branch for this action: {current_branch} (requrired: {branch})",
            code=1)
Example #19
0
def _parse_label(line):
    tokens = re.split('\s{2,}', line)
    if len(tokens) == 2:
        tokens.append('')
    if len(tokens) != 3 or len(tokens[1]) != 6:
        raise Exit(f'Invalid label information:\n{line}')
    return tokens
Example #20
0
def _check_exe(exe, instructions_url):
    exe_path = which(exe)
    if not exe_path:
        msg = ("Couldn't find `{}`.\n"
               "This tool can be found here:\n\n"
               ">  {}".format(exe, instructions_url))
        raise Exit(msg)
Example #21
0
    def create_structure(self):
        """
        Create the basic directory structure on the remote server.
        """
        command = " ".join([
            "mkdir -p",
            self.project_root,
            self.backups_root,
            self.static_root,
            self.media_root,
        ])
        self.run(command)

        # Initialize empty git repository for project
        with self.cd(self.project_root):
            try:
                self.git("--version", hide=True)
            except UnexpectedExit:
                raise Exit("Provisioning not finished, git not available!")

            try:
                self.git("rev-parse --git-dir", hide=True)
            except UnexpectedExit:
                self.git("init")
                self.git("commit --allow-empty -m empty-commit")
                self.git("branch -f last_master master")
Example #22
0
def build_core(ctx, version="DEBUG", delphi_version=DEFAULT_DELPHI_VERSION):
    """Builds core packages extensions"""
    init_build(version)
    delphi_projects = get_delphi_projects_to_build("core", delphi_version)
    ret = build_delphi_project_list(ctx, delphi_projects, version, "", delphi_version)
    if not ret:
        raise Exit("Build failed")
Example #23
0
def docs(c):
    """Run doctests."""
    result = pytest.main([
        "-v", "--doctest-modules", "--ignore", "ubermagutil/tests",
        "ubermagutil"
    ])
    raise Exit(code=result)
Example #24
0
def deploy(c, branchname='master'):
    print('c.user={} c.host={} branchname={}'.format(c.user, c.host,
                                                     branchname))

    venv_dir = '/var/www/{server}/venv'.format(server=c.host)
    project_dir = '/var/www/{server}/{appname}/{appname}'.format(
        server=c.host, appname=APP_NAME)

    c.run('cd {} && git pull'.format(project_dir))

    if not c.run('cd {} && git show-ref --verify --quiet refs/heads/{}'.format(
            project_dir, branchname),
                 warn=True):
        raise Exit('branchname {} does not exist'.format(branchname))

    c.run('cd {} && git checkout {}'.format(project_dir, branchname))
    c.run('cd {} && cp -R ../../libs/js  {}/static'.format(
        project_dir, APP_NAME))
    # must source bin/activate before each command which must be done under venv
    # because each is a separate process
    c.run('cd {} && source {}/bin/activate && pip install -r requirements.txt'.
          format(project_dir, venv_dir))

    versions_dir = '{}/migrations/versions'.format(project_dir)
    if not c.run('test -d {}'.format(versions_dir), warn=True):
        c.run('mkdir {}'.format(versions_dir))

    c.run('cd {} && source {}/bin/activate && flask db upgrade'.format(
        project_dir, venv_dir, APP_NAME))
    c.run('cd {} && touch {}'.format(project_dir, WSGI_SCRIPT))
Example #25
0
def require_build_config_is_valid(ctx, build_config, strict=True):
    if not build_config:
        build_config = ctx.config.build_config or BUILD_CONFIG_DEFAULT

    if build_config == "host_debug" or build_config == "auto":
        build_config = HOST_BUILD_CONFIG_DEBUG
    elif build_config == "host_release":
        build_config = HOST_BUILD_CONFIG_RELEASE

    build_configs = ctx.config.build_configs or []
    build_configs_map = ctx.config.get("build_configs_map", None)
    if not build_configs_map or build_configs_map == BUILD_CONFIG_DEFAULT_MAP:
        build_configs_map = make_build_configs_map(build_configs)
        ctx.config.build_configs_map = build_configs_map

    build_config_data = build_configs_map.get(build_config)
    if build_config_data is None:
        build_config_data = BUILD_CONFIG_HOST_MAP.get(build_config)
    if build_config_data is not None:
        return True

    expected = ", ".join(sorted(list(build_configs_map.keys())))
    message = "UNKNOWN BUILD-CONFIG: %s (expected: %s)" % (build_config,
                                                           expected)
    if not strict:
        print(message)
        return False

    # -- STRICT MODE: Here and no further.
    raise Exit(message, code=10)
Example #26
0
def promote_dataset_version(ctx,
                            mode,
                            dataset=None,
                            version=None,
                            version_id=None,
                            legacy_system=True):
    """
    Starts a task on the AWS container cluster or localhost to promote a DatasetVersion index to latest
    """
    command = [
        "python",
        "manage.py",
        "promote_dataset_version",
    ]
    if version_id:
        command += [f"--dataset-version-id={version_id}"]
    elif dataset:
        command += [f"--dataset={dataset}"]
        if version:
            command += [f"--harvester-version={version}"]
    else:
        Exit("Either specify a dataset of a dataset version id")
    run_harvester_task(ctx,
                       mode,
                       command,
                       version=version,
                       legacy_system=legacy_system)
Example #27
0
def fill_templates(config):
    """
    Inspect `secrets.yml` in the working tree and replace the 
    placeholders contained in the files described with actual 
    secrets.
    """
    basedir = config.get_working_tree()
    secrets = config.get_secrets_file_name()
    if secrets is None:
        return
    secrets_path = os.path.join(basedir, secrets)
    if not os.path.exists(secrets_path):
        raise Exit("Secrets file '{}' does not exist.".format(secrets_path))
    jinja2_env = jinja2.Environment(trim_blocks=True, lstrip_blocks=True)
    with open(secrets_path, "r") as f:
        doc = yaml.load(f)
    for fname, info in doc['files'].items():
        path = os.path.join(basedir, fname)
        with open(path) as f:
            try:
                t = jinja2_env.from_string(f.read())
            except Exception as ex:
                fabutils.warn("Error processing template '{0}'.".format(path))
                raise
            transformed = os.path.splitext(path)[0]
            with open(transformed, "w") as fout:
                print(t.render(info['secrets']), file=fout)
Example #28
0
def storeids(number, username):
    """
    Get the files for the archive. However, since this is potentially just a
    local test which is unable to process large amounts of files, save only
    a small subset.

    If the number is less than 1 return the full set
    """
    connection = Connection(host=DATA_SERVER, user=username)
    head_command = ''
    if int(number) > 0:
        head_command = ' | head -n ' + str(number)
    indata = StringIO()
    errdata = StringIO()
    data = connection.run('find {} -name "*.xml" {}'.format(
        CORPUS_ROOT, head_command),
                          echo=False,
                          err_stream=errdata,
                          out_stream=indata)
    if data.return_code != 0:
        print('Error in retrieving list of oids')
        errdata.seek(SEEK_SET)
        print(errdata.read())
        Exit(-1)
    indata.seek(SEEK_SET)
    with open(join(LOCAL_DEPLOY_DIR, OID_FILE), 'w') as oid_file:
        while True:
            line = indata.readline()
            if not line:
                break
            oid_file.write(line)
    connection.close()
Example #29
0
def build(ctx, version="DEBUG", delphi_version=DEFAULT_DELPHI_VERSION):
    """Builds LoggerPro"""
    delphi_projects = get_delphi_projects_to_build(delphi_version)
    ret = build_delphi_project_list(ctx, delphi_projects, version,
                                    delphi_version)
    if not ret:
        raise Exit("Build failed")
Example #30
0
    def __init__(self, version=None, path=None, pattern=VERSION_PATTERN):
        """Initialize based on given version of by version read from file.

        :param version: Version to use. If not given, version is read from file.
        :param path: File were version if read if not explicitly given.
            Also used by :meth:`write`.
        :param pattern: Pattern to use when reading/writing version information.

        Version can have these values:
        - No value. Version is read from file.
        - Actual version number to use. See below for supported formats.
        - String 'dev' to read the version from file and to update it to
          latest suitable development version (e.g. 3.0 -> 3.0.1.dev1,
          3.1.1 -> 3.1.2.dev1, 3.2a1 -> 3.2a2.dev1, 3.2.dev1 -> 3.2.dev2).

        Given version number must be in one of these PEP-440 compatible formats:
        - Stable version in 'X.Y' or 'X.Y.Z' format (e.g. 3.0, 3.2.1)
        - Pre-releases with 'aN', 'bN' or 'rcN' postfix (e.g. 3.0a1, 3.1.1rc2)
        - Development releases with '.devN' postfix (e.g. 3.2.1.dev1 or
          3.2a1.dev2).
        """
        if not version:
            version = Version.from_file(path, pattern).version
        elif version == 'dev':
            version = Version.from_file(path, pattern).to_dev().version
        match = self.match(version)
        if not match:
            raise Exit(f'Invalid version {version!r}.')
        self.release = match.group('release')
        self.preview = match.group('pre')
        self.dev = match.group('dev')
        self.path = path
        self.pattern = pattern