class TestPullCrifTable(unittest.TestCase): def setUp(self) -> None: self.sample_table_name = 'crif_sample' self.file_type = 'customers' self.sample_df = pd.DataFrame(sample_dict) self.postgres_container = PostgresContainer("postgres:9.5").start() with create_engine(self.postgres_container.get_connection_url() ).connect() as conn, conn.begin(): conn.execute("create schema if not exists mart_compliance ") self.sample_df.to_sql(self.sample_table_name, conn, schema='mart_compliance') self.crif_table = CrifTable( self.sample_table_name, 'customers', datetime.now(), self.postgres_container.get_connection_url()) def tearDown(self) -> None: self.postgres_container.stop() def test_pull_table(self): month_a = datetime.strptime('2020-02-01', '%Y-%m-%d') month_b = datetime.strptime('2020-04-01', '%Y-%m-%d') with CrifTable( self.sample_table_name, 'customers', month_a, self.postgres_container.get_connection_url()) as crif_table: crif_table.pull_table_as_dicts() assert len(crif_table.table) == 6 with CrifTable( self.sample_table_name, 'customers', month_b, self.postgres_container.get_connection_url()) as crif_table: crif_table.pull_table_as_dicts() assert len(crif_table.table) == 8
def psql(): """ We need to modify the Flask Application to accept a seperate sessionmaker for testing. !TODO """ psql_container = PostgresContainer("postgres:9.6") psql_container.POSTGRES_USER = "******" psql_container.POSTGRES_PASSWORD = "******" psql_container.POSTGRES_DB = "test" psql_container.start() engine = sqlalchemy.create_engine(psql_container.get_connection_url()) session = sessionmaker(bind=engine)() # alert webservice to port for this database port = psql_container.get_connection_url().split(":")[-1].split("/")[0] header = copy.copy(TEST_HEADER) header["port"] = port return {"container": psql_container, "session": session, "header": header}
def main(sqs): pg = None try: print("Starting postgres db...") pg = PostgresContainer("postgres:11.6-alpine") pg.start() os.environ['DB_URL'] = pg.get_connection_url() sqs.create_queue(QueueName="test-queue.fifo", Attributes={"FifoQueue": "true"}) from app import main yield main finally: if pg is not None: pg.stop()
def db_session(request): postgres = PostgresContainer() postgres.start() db_url = postgres.get_connection_url() engine = create_engine(db_url) session = scoped_session( sessionmaker(autocommit=False, autoflush=False, bind=engine)) Base.query = session.query_property() Base.metadata.create_all(engine) def stop_db(): postgres.stop() request.addfinalizer(stop_db) return session
import sys from flask_sqlalchemy import SQLAlchemy from flask_migrate import Migrate from testcontainers.postgres import PostgresContainer SQLALCHEMY_DATABASE_URI = None # somewhat hacky setup to run a testcontainer when running tests testing = "pytest" in sys.modules postgres_container = None def stop_postgres_container(): if postgres_container != None: postgres_container.stop() if testing: postgres_container = PostgresContainer("ratestask-db:latest") postgres_container.start() SQLALCHEMY_DATABASE_URI = postgres_container.get_connection_url() else: SQLALCHEMY_DATABASE_URI = "postgresql+psycopg2://postgres:ratestask@localhost/postgres" database = SQLAlchemy() migrate = Migrate(db=database)
class CrossLanguageJdbcIOTest(unittest.TestCase): def setUp(self): self.postgres = PostgresContainer('postgres:latest') self.postgres.start() self.engine = sqlalchemy.create_engine( self.postgres.get_connection_url()) self.username = '******' self.password = '******' self.host = self.postgres.get_container_host_ip() self.port = self.postgres.get_exposed_port(5432) self.database_name = 'test' self.driver_class_name = 'org.postgresql.Driver' self.jdbc_url = 'jdbc:postgresql://{}:{}/{}'.format( self.host, self.port, self.database_name) def tearDown(self): self.postgres.stop() def test_xlang_jdbc_write(self): table_name = 'jdbc_external_test_write' self.engine.execute( "CREATE TABLE {}(f_id INTEGER, f_real REAL, f_string VARCHAR)". format(table_name)) inserted_rows = [ JdbcWriteTestRow(i, i + 0.1, 'Test{}'.format(i)) for i in range(ROW_COUNT) ] with TestPipeline() as p: p.not_use_test_runner_api = True _ = (p | beam.Create(inserted_rows).with_output_types(JdbcWriteTestRow) | 'Write to jdbc' >> WriteToJdbc( driver_class_name=self.driver_class_name, jdbc_url=self.jdbc_url, username=self.username, password=self.password, statement='INSERT INTO {} VALUES(?, ?, ?)'.format( table_name), )) fetched_data = self.engine.execute( "SELECT * FROM {}".format(table_name)) fetched_rows = [ JdbcWriteTestRow(int(row[0]), float(row[1]), str(row[2])) for row in fetched_data ] self.assertEqual( set(fetched_rows), set(inserted_rows), 'Inserted data does not fit data fetched from table', ) def test_xlang_jdbc_read(self): table_name = 'jdbc_external_test_read' self.engine.execute( "CREATE TABLE {}(f_int INTEGER)".format(table_name)) for i in range(ROW_COUNT): self.engine.execute("INSERT INTO {} VALUES({})".format( table_name, i)) with TestPipeline() as p: p.not_use_test_runner_api = True result = (p | 'Read from jdbc' >> ReadFromJdbc( driver_class_name=self.driver_class_name, jdbc_url=self.jdbc_url, username=self.username, password=self.password, query='SELECT f_int FROM {}'.format(table_name), )) assert_that( result, equal_to([JdbcReadTestRow(i) for i in range(ROW_COUNT)]))