def rollback(transaction): pyc_session.session.rollback() pyc_session.Session.remove() # Rollback the outer transaction to the savepoint assert transaction is not None transaction.rollback() close_all_sessions()
def dbm(request): db.create_db() yield close_all_sessions() db.drop_all() db.delete_db() assert not os.path.exists(db.sqlite_path)
def setUp(self): close_all_sessions() Base.metadata.create_all(engine) cl = app.test_client() user_data1 = { "name" : "user1", "email" : "*****@*****.**", "password" : "pass", "status" : "teacher" } user_data2 = { "name" : "user2", "email" : "*****@*****.**", "password" : "pass", "status" : "student" } user_data3 = { "name" : "user3", "email" : "*****@*****.**", "password" : "pass", "status" : "student" } json_data1= json.dumps(user_data1).encode('utf-8') json_data2= json.dumps(user_data2).encode('utf-8') json_data3= json.dumps(user_data3).encode('utf-8') rr = cl.open(path = '/courses/user', method = 'POST', data = json_data1, headers={'Content-Type': 'application/json'}) rr = cl.open(path = '/courses/user', method = 'POST', data = json_data2, headers={'Content-Type': 'application/json'}) rr = cl.open(path = '/courses/user', method = 'POST', data = json_data3, headers={'Content-Type': 'application/json'})
def invalid_db_session(): engine = create_engine( 'postgresql://*****:*****@invalid_host:5432/db') session = sessionmaker(autocommit=False, autoflush=False, bind=engine) yield session() close_all_sessions()
def session(testapp): ctx = app.app_context() ctx.push() yield db_session #db_session.close_all() close_all_sessions() ctx.pop()
def __handle_entity_event( # pylint: disable=too-many-branches,too-many-return-statements,too-many-statements self, item: ConsumeEntityMessageQueueItem, ) -> None: if ( item.routing_key in ( RoutingKey.DEVICE_PROPERTY_ENTITY_CREATED, RoutingKey.DEVICE_PROPERTY_ENTITY_UPDATED, RoutingKey.CHANNEL_PROPERTY_ENTITY_CREATED, RoutingKey.CHANNEL_PROPERTY_ENTITY_UPDATED, ) and "actual_value" in item.data.keys() and item.data.get("actual_value") is not None ): conditions = self.__conditions_repository.get_all_by_property_identifier( property_id=uuid.UUID(item.data.get("id"), version=4), ) for condition in conditions: self.__validate_condition_property_item(condition=condition, value=str(item.data.get("actual_value"))) is_fulfilled = self.__check_conditions(trigger_id=condition.trigger.id) if is_fulfilled: trigger = self.__triggers_repository.get_by_id(trigger_id=condition.trigger.id) if trigger is None or not trigger.enabled: return self.__process_trigger_actions(trigger_id=trigger.id) actions = self.__actions_repository.get_all_by_property_identifier( property_id=uuid.UUID(item.data.get("id"), version=4), ) for action in actions: self.__validate_action_property_item(action=action, value=str(item.data.get("actual_value"))) if item.routing_key in ( RoutingKey.TRIGGER_ENTITY_CREATED, RoutingKey.TRIGGER_ENTITY_UPDATED, RoutingKey.TRIGGER_ENTITY_DELETED, RoutingKey.TRIGGER_CONTROL_ENTITY_CREATED, RoutingKey.TRIGGER_CONTROL_ENTITY_UPDATED, RoutingKey.TRIGGER_CONTROL_ENTITY_DELETED, RoutingKey.TRIGGER_ACTION_ENTITY_CREATED, RoutingKey.TRIGGER_ACTION_ENTITY_UPDATED, RoutingKey.TRIGGER_ACTION_ENTITY_DELETED, RoutingKey.TRIGGER_NOTIFICATION_ENTITY_CREATED, RoutingKey.TRIGGER_NOTIFICATION_ENTITY_UPDATED, RoutingKey.TRIGGER_NOTIFICATION_ENTITY_DELETED, RoutingKey.TRIGGER_CONDITION_ENTITY_CREATED, RoutingKey.TRIGGER_CONDITION_ENTITY_UPDATED, RoutingKey.TRIGGER_CONDITION_ENTITY_DELETED, ): # Clear all session after entity changes close_all_sessions()
def drop_test_db() -> None: import sqlalchemy from sqlalchemy.orm import close_all_sessions with sqlalchemy.create_engine( ConfigTest.TEST_DATABASE_SERVER, isolation_level="AUTOCOMMIT").connect() as connection: close_all_sessions() connection.execute( f'drop database {ConfigTest.TEST_DB_NAME} WITH (FORCE)')
def shutdown() -> None: # Find all Sessions in memory and close them. close_all_sessions() logger.info("All sessions closed.") # Each connection was released on execution, so just formally # dispose of the db connection if it's been instantiated if db: db.dispose() logger.info("Database connection disposed.")
def db_session(): engine = create_engine(settings.SQLALCHEMY_DATABASE_URI) Base.metadata.create_all(engine) session = sessionmaker(autocommit=False, autoflush=False, bind=engine) db = session() yield db close_all_sessions() Base.metadata.drop_all(engine)
def before_all(trans): app = create_app('test') ctx = app.app_context() ctx.push() stash['app'] = app close_all_sessions() db.drop_all()
def set_session(sess, sessid='db'): global db_session close_all_sessions() db_session = sess register_session(sessid, db_session) # todo: put this into eme's register_session (if new) for repo in data_access.repositories.values(): repo.session = db_session
def after_each(trans): close_all_sessions() db.drop_all() if 'real' not in trans or 'body' not in trans['real']: return data = json.loads(trans['real']['body']) if isinstance(data, (list, )): if len(data) == 0: trans['fail'] = "Empty array returned, nothing was verified"
def testdb(mocker): subprocess.check_call(['dropdb', '-U', 'postgres', '--if-exists', TEST_DB]) subprocess.check_call(['createdb', '-U', 'postgres', TEST_DB]) engine = create_engine('postgresql://postgres@/' + TEST_DB) DBSession.configure(bind=engine) Base.metadata.create_all(engine) yield DBSession close_all_sessions() DBSession.remove() engine.dispose() subprocess.check_call(['dropdb', '-U', 'postgres', '--if-exists', TEST_DB])
def test_multithreading(self): """Ensure spans are captured correctly in a multithreading scenario We also expect no logged warnings about calling end() on an ended span. """ if self.VENDOR == "sqlite": return def insert_player(session): _session = session() player = Player(name="Player") _session.add(player) _session.commit() _session.query(Player).all() def insert_players(session): _session = session() players = [] for player_number in range(3): players.append(Player(name=f"Player {player_number}")) _session.add_all(players) _session.commit() session_factory = sessionmaker(bind=self.engine) # pylint: disable=invalid-name Session = scoped_session(session_factory) thread_one = threading.Thread(target=insert_player, args=(Session,)) thread_two = threading.Thread(target=insert_players, args=(Session,)) logger = logging.getLogger("opentelemetry.sdk.trace") with self.assertRaises(AssertionError): with self.assertLogs(logger, level="WARNING"): thread_one.start() thread_two.start() thread_one.join() thread_two.join() close_all_sessions() spans = self.memory_exporter.get_finished_spans() # SQLAlchemy 1.4 uses the `execute_values` extension of the psycopg2 dialect to # batch inserts together which means `insert_players` only generates one span. # See https://docs.sqlalchemy.org/en/14/changelog/migration_14.html#orm-batch-inserts-with-psycopg2-now-batch-statements-with-returning-in-most-cases self.assertEqual( len(spans), 5 if self.VENDOR not in ["postgresql"] else 3 )
def setUp(self): close_all_sessions() Base.metadata.create_all(engine) cl = app.test_client() user_data1 = { "name" : "user1", "email" : "*****@*****.**", "password" : "pass", "status" : "teacher" } user_data2 = { "name" : "user2", "email" : "*****@*****.**", "password" : "pass", "status" : "student" } user_data3 = { "name" : "user3", "email" : "*****@*****.**", "password" : "pass", "status" : "student" } json_data1= json.dumps(user_data1).encode('utf-8') json_data2= json.dumps(user_data2).encode('utf-8') json_data3= json.dumps(user_data3).encode('utf-8') rr = cl.open(path = '/courses/user', method = 'POST', data = json_data1, headers={'Content-Type': 'application/json'}) rr = cl.open(path = '/courses/user', method = 'POST', data = json_data2, headers={'Content-Type': 'application/json'}) rr = cl.open(path = '/courses/user', method = 'POST', data = json_data3, headers={'Content-Type': 'application/json'}) course_data = { "name" : "Meth", "title" : "Intense course", "owner_id" : 1, "students" : [] } json_data = json.dumps(course_data).encode('utf-8') rr = cl.open(path = '/courses/course', method = 'POST', data = json_data, headers={'Content-Type': 'application/json', 'Authorization': 'Basic ' + base64.b64encode('[email protected]:pass'.encode()).decode()})
def close(self): try: close_all_sessions() self.engine.dispose() # NOTE: close required before dispose! except Exception as firstException: log.warning( self.close, 'not possible to close connections. Going for a second attempt', exception=firstException) try: close_all_sessions() self.engine.dispose() # NOTE: close required before dispose! except Exception as secondException: log.error( self.close, 'not possible to close connections at the second attempt either', secondException) raise secondException log.debug(self.close, 'Connections closed')
def setUp(self): close_all_sessions() Base.metadata.create_all(engine) cl = app.test_client() user_data1 = { "username": "******", "email": "*****@*****.**", "password": "******" } user_data2 = { "username": "******", "email": "*****@*****.**", "password": "******" } json_data1 = json.dumps(user_data1).encode('utf-8') json_data2 = json.dumps(user_data2).encode('utf-8') rr = cl.open(path='/music/user', method='POST', data=json_data1, headers={'Content-Type': 'application/json'}) rr = cl.open(path='/music/user', method='POST', data=json_data2, headers={'Content-Type': 'application/json'})
def teardown(): db.session.expunge_all() close_all_sessions()
def db(engine) -> Generator: SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) session: Session = SessionLocal() yield session close_all_sessions()
def tearDown(self): app.db_session.remove() close_all_sessions() app.Base.metadata.drop_all(app.engine)
def setUp(self): app.db_session.remove() close_all_sessions() app.Base.metadata.drop_all(app.engine) app.Base.metadata.create_all(app.engine)
def teardown_test(self): close_all_sessions() clear_mappers() Base.metadata.drop_all(testing.db)
def database(db_context: DataBaseContext) -> Iterator[None]: db_context.metadata.drop_all() db_context.metadata.create_all() yield close_all_sessions()
def drop_db(self): self.session.commit() self.session.close_all() close_all_sessions() Base.metadata.drop_all(self.engine)
def tearDown(self): close_all_sessions() Base.metadata.drop_all(engine)
def setUp(self): close_all_sessions() Base.metadata.create_all(engine)
def teardown(self): close_all_sessions() clear_mappers() Base.metadata.drop_all()
def drop_db(self, yes_i_am_sure=False): if yes_i_am_sure: self.session.commit() self.session.close_all() close_all_sessions() Base.metadata.drop_all(self.engine)
def db_session(): engine = create_engine("sqlite:///database.sqlite3", echo=True) yield close_all_sessions() Base.metadata.drop_all(bind=engine)
# if not nf_configs['discord_token']: # nf_configs['discord_token'] = input("I couldn't find a token to login with. Type one down below and let's try this again.\n>> ") # print("Good. Let's try this. In the future, if you want to skip this prompt, consider looking at ./example_config.yml.") # try: # bot.run(nf_configs['discord_token']) # except GatewayNotFound as e: # print(f"Gateway wasn't found, likely a Discord API error.\n{e}") # except ConnectionClosed as e: # print(f"Connection closed by Discord.\n{e}") # except LoginFailure as e: # print(f"You gave me the wrong credentials, so Discord didn't let me log in.\n{e}") # except HTTPException as e: # print(f"Some weird HTTP error happened. More details below.\n{e}") # except Exception as e: # print(f"Something else went wrong and I'm not sure what. More details below.\n{e}") # finally: # print("Do you want to restart? Type yes or no below. ('no' will shut me down, obviously.)") # restart = yn() # if __name__ == '__main__': # try: # main() # except Exception as e: # print(f"Something went wrong with the main loop. Terminating.\n{e}") bot.run(nf_configs['discord_token']) DBSession.commit() close_all_sessions() # for SQLAlchemy print("THAT'S IT! REHABILITATION! FIRST COMES REHABILITATION!")