Exemple #1
0
def _insert_test_data(repo_path, host, port, dbuser, dbname):
    # check if 'migrate' exists in repo_path/database/
    repo_insert_test_data_path = norm_abs_join_path(
        repo_path, "./database/insert_test_data")
    if os.path.exists(repo_insert_test_data_path):
        # TODO -- add support for running this script
        raise NotImplementedError("We have not implemented running insert_test_data from the repo.")
    else:
        logger.debug(
            f"The repo at '{repo_path}' does not contain './database/insert_test_data' script")

    repo_insert_test_data_sql_path = norm_abs_join_path(
        repo_path, "./database/insert_test_data.sql")
    if os.path.exists(repo_insert_test_data_sql_path):
        logger.info(f"Inserting data in '{repo_insert_test_data_sql_path}'.")
        with open(repo_insert_test_data_sql_path) as ifp:
            run_and_log(
                f"psql -h {host} -p {port} -U {dbuser} {dbname}",
                input=ifp.read().encode()
            )
        return
    else:
        logger.info(
            f"The repo at '{repo_path}' does not contain './database/insert_test_data.sql' script")

    raise ValueError(f"'{repo_path}' does not contain an insert test data script or sql file.")
Exemple #2
0
def setup_development_environment(path):
    version = version_utils.version(path)
    logging.debug('version: %s', version)

    # get package name
    try:
        package_name = get_package_name_from_meta_yaml(path)
    except FileNotFoundError:
        with change_directory(path):
            package_name = repo_name().replace("_", "-")

    logging.debug('package name: %s', package_name)

    # build the package.
    # ( we need to do this to install the dependencies -- which is super hacky but required
    #   because of the jinja templating -- see https://github.com/conda/conda/issues/5126   )
    output_file_path = build_package(path,
                                     version=version,
                                     skip_existing=False)
    logger.debug('output build to %s', output_file_path)

    # conda install this package, which installs all of the dependencies
    logger.debug("installing package at '%s'", output_file_path)
    # extract the local path from the return output_file_path, which is of the form:
    # /tmp/nboley/conda/linux-64/balrog-10-0.tar.bz2
    local_channel = "file://" + os.path.split(
        os.path.split(output_file_path)[0])[0]
    run_and_log(
        f"conda install {package_name}=={version} --only-deps --yes -c {local_channel}"
    )
    # python setup.py develop $PATH
    run_and_log("cd {path} && python setup.py develop".format(path=path))
Exemple #3
0
def _setup_db(image_name, repo_path, host, port):
    # check if 'setup' exists in repo_path/database/
    repo_setup_sql_path = norm_abs_join_path(repo_path, "./database/setup.sql")
    if os.path.exists(repo_setup_sql_path):
        with open(repo_setup_sql_path) as ifp:
            setup_sql = ifp.read()
    # if this doesn't exist, revert to the default
    else:
        setup_sql_template_path = norm_abs_join_path(
            os.path.dirname(__file__), "./database_template/scripts/setup.sql")
        logger.debug(f"The repo at '{repo_path}' does not contain './database/setup.sql'."
                     f"\nDefaulting to {setup_sql_template_path} with "
                     f"USER='******', DATABASE='{image_name}'")
        # get the new database password from the environment
        if 'PGPASSWORD' not in os.environ:
            raise ValueError("PGPASSWORD must be in the environment to setup a new database.")
        with open(setup_sql_template_path) as ifp:
            setup_sql = ifp.read().format(
                PGUSER=image_name, PGDATABASE=image_name, PGPASSWORD=os.environ['PGPASSWORD']
            )

    # execute the setup.sql script
    run_and_log(f"psql -h {host} -p {port} -U postgres", input=setup_sql.encode())

    return
Exemple #4
0
def _run_migrations(repo_path, host, port, dbname, dbuser):
    # check if 'migrate' exists in repo_path/database/
    repo_migrate_path = norm_abs_join_path(repo_path, "./database/migrate")
    if os.path.exists(repo_migrate_path):
        # TODO -- add support for running this script
        raise NotImplementedError("We have not implemented running migrate from the repo.")

    logger.info(f"The repo at '{repo_path}' does not contain './database/migrate' script"
                "\nDefaulting to running sqitch migrations in './database/sqitch'")
    sqitch_path = norm_abs_join_path(repo_path, "./database/sqitch")
    if not os.path.exists(sqitch_path):
        raise RuntimeError(
            f"Sqitch migration files must exist at '{sqitch_path}' "
            "if a migration script is not provided.")

    with change_directory(sqitch_path):
        try:
            run_and_log(
                f"sqitch --engine pg deploy db:pg://postgres@{host}:{port}/{dbname}")
        except subprocess.CalledProcessError as inst:
            # we don't care if there's nothing to deploy
            if inst.stderr.decode().strip() == 'Nothing to deploy (empty plan)':
                pass
            else:
                raise
    return
Exemple #5
0
def start_test_database(
        repo_path, project_name, host=DEFAULT_TEST_DB_HOST, port=DEFAULT_TEST_DB_PORT):
    """Start a test database in a docker container.

    This starts a new test database in a docker container. This function:
    1) builds the postgres server docker image
    2) starts the docker container on port 'port'
    3) waits for the database cluster to start
    4) runs the DB setup script
       - create a new db user $dbname
       - create a new db $dbname owned by $dbname
    5) runs the database migrations
    """
    # TODO (nb): add a check to ensure that 'port' is free
    # host is always localhost because we are running it in a local Docker container
    if host != 'localhost':
        raise NotImplementedError('Non localhost test databases are not supported.')

    # set the path to the Postgres Dockerfile
    docker_file_path = norm_abs_join_path(repo_path, "./database/Dockerfile")
    # if the repo doesn't have a Dockerfile in the database sub-directory, then
    # default to the template Dockerfile
    if not os.path.exists(docker_file_path):
        docker_file_path = norm_abs_join_path(
            os.path.dirname(__file__), "./database_template/Dockerfile")
        logger.info(f"Setting DB docker file path to '{docker_file_path}'")

    docker_file_dir = os.path.dirname(docker_file_path)

    # build
    cmd = f"docker build --rm -t {project_name}:latest {docker_file_dir}"
    run_and_log(cmd)

    # starting-db
    cmd = f"docker run -d -p {port}:5432 --name {project_name}_{port} {project_name}:latest"
    run_and_log(cmd)
    # the database cluster needs some time to start, so try to connect periodically until we can
    _wait_for_db_cluster_to_start(host, port)

    # setup-db
    # we need to connect to the 'postgres' database to create a new database
    _setup_db(project_name, repo_path, host, port)

    # run-migrations
    _run_migrations(
        repo_path=repo_path, host=host, port=port, dbname=project_name, dbuser=project_name)

    # insert test data
    _insert_test_data(
        repo_path=repo_path, host=host, port=port, dbname=project_name, dbuser=project_name)

    # log the connection command
    connection_cmd = f"psql -h {host} -p {port} -U {project_name} {project_name}"
    logger.info(f"Database is up! You can connect by running:\n{connection_cmd}")
Exemple #6
0
def stop_test_database(project_name, host=DEFAULT_TEST_DB_HOST, port=DEFAULT_TEST_DB_PORT):
    if host != 'localhost':
        raise NotImplementedError('Non localhost test databases are not supported.')
    image_name = f"{project_name}_{port}"
    cmd = f"docker kill {image_name}"
    try:
        run_and_log(cmd)
    except subprocess.CalledProcessError as inst:
        # if this is an error because the container already exists, then raise
        # a custom error type
        pat = (f"Error response from daemon: Cannot kill container:"
               f" {image_name}: No such container: {image_name}")
        if inst.stderr.decode().strip() == pat:
            raise ContainerDoesNotExistError(inst)
        # otherwise just propogate the error
        else:
            raise

    cmd = f"docker rm -f {image_name}"
    run_and_log(cmd)
def test_run_and_log_with_bytes_input():
    run_and_log('cat', input=b'BYTES')
def test_run_and_log():
    run_and_log('echo HELLO')
def test_run_and_log_lots_of_data():
    """ensure that we don't get a dead lock when there is lots of output"""
    with tempfile.NamedTemporaryFile() as ofp:
        ofp.write(b'a' * 100000000)
        ofp.flush()
        run_and_log(f'cat {ofp.name}')
def test_run_and_log_with_file_input():
    with tempfile.TemporaryFile() as ofp:
        ofp.write(b"FILE")
        run_and_log('cat', input=ofp)
def test_run_and_log_with_str_input():
    run_and_log('cat', input='STRING')