def db(request): """ Connect to a running Postgres database, and return the Session object. request.param tells whether the db should be built from alembic migrations or using metadata.create_all() """ # running in non-UTC catches some timezone errors # os.environ["TZ"] = "Etc/UTC" os.environ["TZ"] = "America/New_York" engine = create_engine(config["DATABASE_CONNECTION_STRING"], poolclass=NullPool) # drop everything currently in the database engine.execute("DROP SCHEMA public CASCADE; CREATE SCHEMA public;") if request.param == "migrations": # rebuild it with alembic migrations apply_migrations() else: # create everything from the current models, not incrementally through migrations Base.metadata.create_all(engine) return sessionmaker(bind=engine)
def db_impl(param): """ Connect to a running Postgres database param tells whether the db should be built from alembic migrations or using metadata.create_all() """ # running in non-UTC catches some timezone errors # os.environ["TZ"] = "Etc/UTC" os.environ["TZ"] = "America/New_York" # drop everything currently in the database with session_scope() as session: session.execute( "DROP SCHEMA public CASCADE; CREATE SCHEMA public; CREATE EXTENSION postgis; CREATE EXTENSION pg_trgm;" ) if param == "migrations": # rebuild it with alembic migrations apply_migrations() else: # create the slugify function functions = Path(__file__).parent / "slugify.sql" with open(functions) as f, session_scope() as session: session.execute(f.read()) # create everything from the current models, not incrementally through migrations Base.metadata.create_all(get_engine())
def test_migrations(testconfig): """Compares the database schema built up from migrations, with the schema built by models.py. Both scenarios are started from an empty database, and dumped with pg_dump. Any unexplainable differences in the output are reported in unified diff format and fails the test. """ drop_all() # rebuild it with alembic migrations apply_migrations() with_migrations = pg_dump() drop_all() # create everything from the current models, not incrementally # through migrations create_schema_from_models() from_scratch = pg_dump() def message(s): s = sort_pg_dump_output(s) # filter out alembic tables s = "\n-- ".join(x for x in s.split("\n-- ") if not x.startswith("Name: alembic_")) return strip_leading_whitespace(s.splitlines()) diff = "\n".join( difflib.unified_diff(message(with_migrations), message(from_scratch), fromfile="migrations", tofile="model")) print(diff) success = diff == "" assert success
logger.critical("Unhandled exception", exc_info=(exc_type, exc_value, exc_traceback)) sys.excepthook = log_unhandled_exception logger.info(f"Checking DB connection") with session_scope() as session: res = session.execute(text("SELECT 42;")) if list(res) != [(42, )]: raise Exception("Failed to connect to DB") logger.info(f"Running DB migrations") apply_migrations() if config.config["ADD_DUMMY_DATA"]: add_dummy_data() logger.info(f"Starting") if config.config["ROLE"] in ["api", "all"]: server = create_main_server(port=1751) server.start() media_server = create_media_server(port=1753) media_server.start() logger.info(f"Serving on 1751 (secure) and 1753 (media)") if config.config["ROLE"] in ["scheduler", "all"]: scheduler = start_jobs_scheduler()