def get_db() -> Generator: try: db = SessionLocal() yield db finally: # db.commit() db.close()
def clear_db(): db = SessionLocal() models = [ cls for name, cls in base.__dict__.items() if isinstance(cls, type) and name != "Base" ] model_table_names = [m.__tablename__ for m in models] model_table_names.append("alembic_version") # Drop records from the static tables for model in models: try: if model.__tablename__ == "user": # leave the superuser db.query(model).filter(model.id > 1).delete() else: db.query(model).delete() # print(f'Deleted {n} {model.__name__}s') db.commit() except Exception as e: print(f"Failed to delete {model}s") print(e) db.rollback() # Drop the dynamically created interface tables metadata = MetaData() metadata.bind = engine metadata.reflect(bind=engine) all_tables = metadata.tables for name, table in all_tables.items(): if name not in model_table_names: table.drop()
def init_db() -> None: try: # Try to create session to check if DB is awake db = SessionLocal() db.execute("SELECT 1") except Exception as e: logger.error(e) raise e
def db() -> Generator: try: base_model.BASE.metadata.create_all(engine) yield SessionLocal() finally: SessionLocal().close_all() engine.dispose() base_model.BASE.metadata.drop_all(engine)
def check_db() -> None: try: # Try to create session to check if DB is awake db = SessionLocal() db.execute("SELECT 1") except Exception: logger.exception("Init failed") raise
def end_auction(cls, id: int): try: db = SessionLocal() english = EnglishAuction() auction = auction_repo.get(db, id=id) english.end(db, db_obj=auction) finally: db.close()
def get_db(): db = SessionLocal() try: logging.debug('Creating DB Session.') yield db finally: logging.debug('Closing DB Session.') db.close()
def setup_db() -> None: session = SessionLocal() if not database_exists(engine.url): create_database(engine.url) else: Base.metadata.drop_all(engine) init_db(session) session.close()
def get_db() -> Generator: try: db = SessionLocal() yield db # TODO rollback after error? # except: # db.rollback() finally: db.close()
def get_db() -> Generator: """ Get database """ try: db = SessionLocal() yield db finally: db.close()
def init(): try: db = SessionLocal() # Try to create session to check if DB is awake db.execute("SELECT 1") # Wait for API to be awake, run one simple tests to authenticate except Exception as e: logger.error(e) raise e
def get_db() -> Generator: """ 获取sqlalchemy会话对象 :return: """ try: db = SessionLocal() yield db finally: db.close()
def init() -> None: logger.error(f'{settings.SQLALCHEMY_DATABASE_URI}') logger.error('hello c pre start') try: # Try to create session to check if DB is awake db = SessionLocal() db.execute("SELECT 1") except Exception as e: logger.error(e) raise e
async def authenticate_user(self, environ: Dict) -> schemas.UserInDB: token = environ.get("HTTP_AUTHORIZATION") if token is None: raise ConnectionRefusedError("Not authenticated") db = SessionLocal() try: return await deps.get_current_user(db, self.server.cache, token) except HTTPException as e: raise ConnectionRefusedError(e.detail) finally: db.close()
def init() -> None: try: db = SessionLocal() # Try to create session to check if DB is awake db.execute("SELECT 1") ctx.database_connection = True logger.info("Database connection is working") except Exception as e: ctx.database_connection = False logger.error(e) logger.error("Could not connect to database")
def init() -> None: try: db = SessionLocal() # Try to create session to check if DB is awake db.execute("SELECT 1") if not os.path.isdir(settings.UPLOADED_FILES_FOLDER): create_uploaded_files_folder(settings.UPLOADED_FILES_FOLDER) except Exception as e: logger.error(e) raise e
def get_db(): """ 获取sqlalchemy会话对象 :return: """ try: db = SessionLocal() print('获取数据库会话') yield db finally: db.close() print('数据库关闭')
def create_providers_task(providers: List[Dict[str, str]]): db = SessionLocal() try: providers_db = crud.provider.create_multi( db=db, objs_in=[ schemas.ProviderCreate(name=provider["name"], url=provider["url"]) for provider in providers ], ) return jsonable_encoder(providers_db) finally: db.close()
def delete_donations(ctx): from app.db.session import SessionLocal from app.models.donation import Donator, Donation db = SessionLocal() db.query(Donation).delete() db.query(Donator).delete() db.commit()
def login_access_token(login: Login): ''' :param login: :return: ''' appid = login.appId secret_key = login.secretKey try: db: Session = SessionLocal() invoker: Invoker = db.query(Invoker).filter(Invoker.app_id == appid).filter(Invoker.secret_key == secret_key)\ .filter(Invoker.active==1).first() if invoker: # token = create_access_token({'appid': appid, 'nick_name': invoker.nick_name}) token = create_access_token(appid) msg = { 'success': True, 'data': { 'access_token': token }, 'message': '成功' } return msg except Exception as e: logger.exception(e) msg = {'success': False, 'data': {}, 'message': '非法请求'} return msg
async def handle_consume(message: IncomingMessage) -> None: async with message.process(): logger.info(f"{message.body = }") data = schemas.TransactionInputSchemas.parse_raw(message.body) async with SessionLocal() as session: transaction = await commit_db( session, obj_callable=add_transaction, kwargs={ "uuid": data.uuid, "amount": data.amount, "wallet_id": data.wallet_id, "transaction_type": data.transaction_type, "currency": data.currency, }, async_callback=after_transaction_create) if data.transaction_type == schemas.TransactionType.TRANSFER: await commit_db(session, obj_callable=add_transaction, kwargs={ "uuid": uuid.uuid4(), "amount": data.amount, "wallet_id": data.transfer_wallet_id, "transaction_type": data.transaction_type, "currency": data.currency, "transfer_transaction": transaction }, async_callback=after_transaction_create)
def run() -> None: """ This service prepares a flatten dataset with only the most important words and it's audio clips. This is the last step to prepare the data for the front-end. """ # TODO this is terrible. We shouldn't be getting anything related to DB connection here db = SessionLocal() # Get video_captions from unique important words captions_with_important_words = get_video_captions( db=db, words=get_unique_important_words(), video_titles=get_processed_video_ids(), ) # Check which captions were properly processed and prepare result available_sentence_audios = _get_available_sentence_audios( captions_with_important_words ) logger.info(f"{len(available_sentence_audios)} available sentence audios") # Merge with words + topics words_by_topics_dataset = _prepare_words_and_audios_dataset( available_sentence_audios ) # save prepared data as json save_as_json(words_by_topics_dataset, FINAL_DATASET_PATH) _move_audios_to_static_folder()
def iptables_runner( port_id: int, server_id: int, local_port: int, remote_ip: str = None, remote_port: int = None, forward_type: str = None, update_status: bool = False, ): if not update_status: return server = get_server(SessionLocal(), server_id) priv_data_dir = prepare_priv_dir(server) if not forward_type: args = f" delete {local_port}" elif remote_ip and remote_port: args = f" -t={forward_type} forward {local_port} {remote_ip} {remote_port}" else: args = f" list {local_port}" extra_vars = { "host": server.ansible_name, "local_port": local_port, "iptables_args": args, } t = ansible_runner.run_async( private_data_dir=priv_data_dir, project_dir="ansible/project", playbook="iptables.yml", extravars=extra_vars, status_handler=lambda s, **k: forward_rule_status_handler.delay( port_id, s, update_status), finished_callback=iptables_finished_handler(server, True), ) return t[1].config.artifact_dir
def gost_runner( port_id: int, server_id: int, port_num: int, gost_config: t.Dict, remote_ip: str = None, update_gost: bool = False, update_status: bool = False, ): server = get_server(SessionLocal(), server_id) priv_data_dir = prepare_priv_dir(server) with open(f"ansible/project/roles/gost/files/{port_id}.json", "w") as f: f.write(json.dumps(gost_config, indent=4)) extra_vars = { "host": server.ansible_name, "port_id": port_id, "local_port": port_num, "remote_ip": remote_ip, "update_gost": update_gost, "update_status": update_status, } r = ansible_runner.run_async( private_data_dir=priv_data_dir, project_dir="ansible/project", playbook="gost.yml", extravars=extra_vars, status_handler=lambda s, **k: gost_status_handler.delay( port_id, s, update_status ), finished_callback=iptables_finished_handler(server, True) if update_status else lambda r: None, ) return r[1].config.artifact_dir
def test_farm(): db = SessionLocal() farm = get_test_farm_instance(db) yield farm # Delete the test farm from the DB for cleanup. delete_test_farm_instance(db, farm.id)
def get_warehouses() -> Dict[str, Any]: """NOTE: this assumes Zillion Web DB is same as Zillion DB""" global warehouses if warehouses: # TODO: cache control? return warehouses print("Building warehouses...") db = SessionLocal() try: result = db.query(Warehouses).all() for row in result: warehouses[row.id] = Warehouse.load(row.id) pp(warehouses) return warehouses finally: db.close()
async def db_session_middleware(request: Request, call_next): response = Response("Internal server error", status_code=500) try: request.state.db = SessionLocal() response = await call_next(request) finally: request.state.db.close() return response
def init() -> None: try: # Try to create session to check if DB is awake db = SessionLocal() db.execute("SELECT 1") logger.success("Backend Services Connected") except Exception as e: logger.error(e) raise e
def create_table(): engine = create_engine(config.SQLALCHEMY_DATABASE_URI, ) metadata = MetaData() video_caption = Table( "video_caption", metadata, Column("title", String, primary_key=True), Column("caption", JSONB), ) print("Creating tables") metadata.drop_all(engine) metadata.create_all(engine) db = SessionLocal() db.execute( f"CREATE INDEX ON video_caption((video_caption.caption->'caption'));")
def get_db() -> Generator: try: db = SessionLocal() yield db finally: try: db.commit() except exc.SQLAlchemyError: db.rollback() backend_logger.error("ERROR DB COMMIT", exc_info=True) db.close()