def teardown(): app = create_app('test') with app.app_context(): db.session.remove() db.engine.execute("drop sequence suppliers_supplier_id_seq cascade") db.drop_all() db.engine.execute("drop table alembic_version") insp = inspect(db.engine) for enum in insp.get_enums(): db.Enum(name=enum['name']).drop(db.engine) db.get_engine(app).dispose()
def teardown(): app = create_app('test') with app.app_context(): db.session.remove() db.engine.execute("drop sequence suppliers_supplier_id_seq cascade") db.drop_all() db.engine.execute("drop table alembic_version") insp = inspect(db.engine) for enum in insp.get_enums(): db.Enum(name=enum['name']).drop(db.engine) db.get_engine(app).dispose() app_env_var_mock.stop()
def management_index(): admin_count = Role.query.filter_by( name='Administrator').limit(1).first().users.count() eng = db.get_engine(current_app) now = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())) ts = time.time() ret = render_template('management-system/index.html', user=current_user, admin_count=admin_count, now=now, platform=platform(), python_version=python_version(), remote_addr=request.remote_addr, db_version=eng.dialect.server_version_info, render_time=lambda: round(time.time() - ts, 2)) return ret
pairing="McMahon", rule_set="AGA", time_controls="required field", basic_time="required field", overtime_format="required field", overtime_conditions="required field", komi="7", tie_break1="SOS", tie_break2="SODOS") return t print("Tournament...") t = make_tournament() print("Saving tournament...") db.session.add(t) db.session.commit() if __name__ == '__main__': app = get_app('config.DockerConfiguration') with app.app_context(): db.session.remove() db.drop_all() db.get_engine(app).dispose() print('Creating tables...') db.create_all() print('Creating test data...') create_test_data() print('Creating extra data...') create_extra_data()
def create_app(runtime_environment): connexion_options = {"swagger_ui": True} # This feels like a hack but it is needed. The logging configuration # needs to be setup before the flask app is initialized. configure_logging() app_config = Config(runtime_environment) app_config.log_configuration() connexion_app = connexion.App("inventory", specification_dir="./swagger/", options=connexion_options) # Read the swagger.yml file to configure the endpoints parser = ResolvingParser(SPECIFICATION_FILE, resolve_types=RESOLVE_FILES) parser.parse() for api_url in app_config.api_urls: if api_url: connexion_app.add_api( parser.specification, arguments={"title": "RestyResolver Example"}, resolver=RestyResolver("api"), validate_responses=True, strict_validation=True, base_path=api_url, ) logger.info("Listening on API: %s", api_url) # Add an error handler that will convert our top level exceptions # into error responses connexion_app.add_error_handler(InventoryException, render_exception) flask_app = connexion_app.app flask_app.config["SQLALCHEMY_ECHO"] = False flask_app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False flask_app.config["SQLALCHEMY_DATABASE_URI"] = app_config.db_uri flask_app.config["SQLALCHEMY_POOL_SIZE"] = app_config.db_pool_size flask_app.config["SQLALCHEMY_POOL_TIMEOUT"] = app_config.db_pool_timeout flask_app.config["INVENTORY_CONFIG"] = app_config db.init_app(flask_app) register_shutdown(db.get_engine(flask_app).dispose, "Closing database") flask_app.register_blueprint(monitoring_blueprint, url_prefix=app_config.mgmt_url_path_prefix) @flask_app.before_request def set_request_id(): threadctx.request_id = request.headers.get(REQUEST_ID_HEADER, UNKNOWN_REQUEST_ID_VALUE) if runtime_environment.event_producer_enabled: flask_app.event_producer = EventProducer(app_config) register_shutdown(flask_app.event_producer.close, "Closing EventProducer") else: logger.warning( "WARNING: The event producer has been disabled. " "The message queue based event notifications have been disabled.") payload_tracker_producer = None if not runtime_environment.payload_tracker_enabled: # If we are running in "testing" mode, then inject the NullProducer. payload_tracker_producer = payload_tracker.NullProducer() logger.warning( "WARNING: Using the NullProducer for the payload tracker producer. " "No payload tracker events will be sent to to payload tracker.") payload_tracker.init_payload_tracker(app_config, producer=payload_tracker_producer) # HTTP request metrics if runtime_environment.metrics_endpoint_enabled: PrometheusMetrics( flask_app, defaults_prefix="inventory", group_by="url_rule", path=None, excluded_paths=[ "^/metrics$", "^/health$", "^/version$", r"^/favicon\.ico$" ], ) # initialize metrics to zero initialize_metrics(app_config) return flask_app
rated=False, result=result, game_record=sgf_data, date_played=datetime.datetime.now() - datetime.timedelta(seconds=random.randint(0, 10000000)), ) return g print("Games...") games = [make_game() for i in range(2000)] print("Saving games...") for g in games: db.session.add(g) db.session.commit() strongest = max(p_priors, key=lambda k: p_priors[k]) strongest_games = [str(g) for g in games if g.white.user_id == strongest or g.black.user_id == strongest] print("Strongest, %d (%f):\n%s" % (strongest, p_priors[strongest], strongest_games)) if __name__ == "__main__": app = get_app("config.DockerConfiguration") with app.app_context(): db.session.remove() db.drop_all() db.get_engine(app).dispose() print("Creating tables...") db.create_all() print("Creating test data...") create_test_data() print("Creating extra data...") create_extra_data()
def initialize_db_connections(): db.session.remove() db.get_engine(current_app).dispose()
def test_creating_tables(app): create_tables(app) inspector = inspect(db.get_engine(app)) assert sorted(inspector.get_table_names()) == [ 'clients', 'productions', 'requests', 'roles', 'users' ]
def test_empty_database(app): remove_tables(app) inspector = inspect(db.get_engine(app)) assert len(inspector.get_table_names()) == 0