def test_command(): with new_container(image_name='alpine:latest', command='echo "hey"', docker_api_version='1.24') as container: for line in container.logs(stream=True): assert line == b'hey\n' break
def postgres_port(): port = os.environ.get('DEVPORT', None) or portpicker.pick_unused_port() with dockerctx.new_container( image_name='postgres:alpine', ports={'5432/tcp': port}, ready_test=lambda: dockerctx.pg_ready( host='localhost', port=port, timeout=6000)) as container: yield port
def f_redis(): port = get_open_port() with new_container( image_name='redis:latest', ports={'6379/tcp': port}, ready_test=lambda: time.sleep(0.5) or True, docker_api_version='1.24') as container: yield container, port
def hms(hive_data_dir): with new_container('fpin/docker-hive-spark', ports={ '9083/tcp': None, '10000/tcp': None, }, entrypoint='/start.sh', environment={ 'USER': getpass.getuser(), 'USER_ID': os.getuid(), }, volumes={hive_data_dir: '/data/hive'}) as container: # according to the container this needs a bit of sleep time. time.sleep(30) yield container logs = container.logs() print("\n\nLogs from docker process") print(logs.decode('utf-8'))
def db_fixture(): db_username = '******' # default POSTGRES_USER value inside container db_password = '******' db_host = 'localhost' db_port = biodome.environ.get('DEV_DBPORT', 0) or dockerctx.get_open_port() db_name = 'postgres' image_name = 'timescale/timescaledb:latest-pg10' with dockerctx.new_container( image_name=image_name, ports={'5432/tcp': db_port}, tmpfs=['/tmp', '/var/lib/postgresql/data:rw'], ready_test=lambda: dockerctx.pg_ready(host=db_host, port=db_port), persist=lambda: biodome.environ.get('TEST_DB_PERSIST', False), environment=['POSTGRES_PASSWORD=password'], ) as container: logger.info( f'Started {image_name} container with name {container.name}') service_db_username = biodome.environ.get('DB_USERNAME', 'venus') service_db_password = biodome.environ.get('DB_PASSWORD', 'venus') service_db_name = biodome.environ.get('DB_NAME', 'venus') logger.debug(f'Creating database {service_db_name}') # Connect to the DB and add slump user + perms. # We still run all operations as postgres user, but connect as the # slump user during tests. db_connection_url = (f'postgres://{db_username}:{db_password}@' f'{db_host}:{db_port}/{db_name}') engine = sqlalchemy.create_engine(db_connection_url) from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT engine.raw_connection().set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) # CREATE DATABASE needs to happen outside of a transaction, so run the # other operations that need to run as user postgres on db postgres at # the same time. engine.execute(f"""CREATE ROLE {service_db_username} WITH LOGIN;""") engine.execute( f"""ALTER ROLE {service_db_username} WITH PASSWORD '{service_db_password}';""" ) engine.execute(f"""CREATE DATABASE {service_db_name}""") engine.dispose() db_connection_url = (f'postgres://{db_username}:{db_password}@' f'{db_host}:{db_port}/{service_db_name}') with sqlalchemy.create_engine(db_connection_url, echo=True).connect() as connection: connection.execute( f"""CREATE SCHEMA {service_db_username} AUTHORIZATION {service_db_username};""" ) # Have to run these here because only superuser can install # these extensions. connection.execute('CREATE EXTENSION IF NOT EXISTS "uuid-ossp";') connection.execute('CREATE EXTENSION IF NOT EXISTS "pg_trgm";') connection.execute( 'CREATE EXTENSION IF NOT EXISTS "timescaledb" CASCADE;') logger.debug(f'Running migrations on: {db_connection_url}') # Change to the venus user instead of the postgres user to run the # migrations. db_connection_url = ( f'postgres://{service_db_username}:{service_db_password}@' f'{db_host}:{db_port}/{service_db_name}') alembic.config.main( argv=['-x', f'url={db_connection_url}', 'upgrade', 'head']) os.environ['DB_USERNAME'] = service_db_username os.environ['DB_PASSWORD'] = service_db_password os.environ['DB_HOST'] = db_host os.environ['DB_PORT'] = str(db_port) os.environ['DB_NAME'] = service_db_name yield db_port