def __init__(self, database_url: str = None, replica_database_url=None, sentry_dsn: str = None, auth_token_verify_user_callback=None, cors=True, service_layer: typing.Dict[str, typing.Any] = None, **kwargs): self.database = None self.sentry_dsn = sentry_dsn if database_url: import databases self.database = databases.Database(database_url) self.replica_database = None if replica_database_url: self.replica_database = databases.Database( str(replica_database_url)) self.is_serverless = kwargs.pop("serverless", False) self.model_initializer = kwargs.pop("model_initializer", None) additional_middlewares = kwargs.pop("middleware", []) or [] middlewares = self.populate_middlewares( auth_token_verify_user_callback, cors=cors, debug=kwargs.get("debug") or False, ) middlewares.extend(additional_middlewares) exception_handlers = kwargs.pop("exception_handlers", {}) exception_handlers = {403: not_authorized, **exception_handlers} self.redis = None routes = kwargs.pop("routes", []) on_startup = kwargs.pop("on_startup", []) on_shutdown = kwargs.pop("on_shutdown", []) on_startup.append(self.startup) on_shutdown.append(self.shutdown) if service_layer: additional_routes = [ self.build_view(key, **value) for key, value in service_layer.items() ] if self.is_serverless: additional_routes.extend([ Route(x.path, serverless_function(x.endpoint), methods=x.methods) for x in routes ]) routes = additional_routes # routes.extend(additional_routes) super().__init__(routes=routes, middleware=middlewares, exception_handlers=exception_handlers, on_startup=on_startup, on_shutdown=on_shutdown, **kwargs)
def init_db(host, db_name, user, password, table_name): database_url = f"mysql://{user}:{password}@{host}/{db_name}" database_url = f'sqlite:///{db_name}' # logger.debug(f"DB: {database_url}") database = databases.Database(database_url) metadata = sqlalchemy.MetaData() service_table = sqlalchemy.Table( table_name, metadata, sqlalchemy.Column("id", sqlalchemy.Integer, primary_key=True), sqlalchemy.Column("name", sqlalchemy.String(32)), sqlalchemy.Column("stage", sqlalchemy.String(10)), sqlalchemy.Column("host", sqlalchemy.String(32)), sqlalchemy.Column("port", sqlalchemy.Integer), sqlalchemy.Column("active", sqlalchemy.Boolean), UniqueConstraint('host', 'port', name='host_port'), UniqueConstraint('name', 'stage', name='name_stage'), ) mapper(Service, service_table) engine = sqlalchemy.create_engine(database_url) metadata.create_all(engine) Session = sessionmaker(bind=engine) session_db = scoped_session(Session) # set_utf8_for_tables(session_db) return session_db, service_table, engine, database
async def insert_records(vals): async with databases.Database( 'postgresql://*****:*****@localhost:5432/omicidx') as db: stmt = 'insert into geo_jsonb (accession, doc) values (:accession, :doc)' stmt = geo_jsonb.insert() res = await db.execute_many(stmt, vals)
def get_app(engine): create_test_database(engine) DATABASE_URL = "sqlite:///test.db" database = databases.Database(DATABASE_URL) metadata = sqlalchemy.MetaData(bind=engine) metadata.reflect() notes_table = metadata.tables["notes"] async def list_notes(request): if not engine: raise RuntimeError("Server error") query = "SELECT * FROM NOTES" with engine.connect() as connection: result = connection.execute(query) d, a = {}, [] for rowproxy in result: for column, value in rowproxy.items(): d = {**d, **{column: value}} a.append(d) response = str(a) return PlainTextResponse(response) async def add_note(request): if not engine: raise RuntimeError("Server error") request_json = await request.json() with engine.connect() as connection: with connection.begin(): connection.execute(notes_table.insert(), request_json) response = "Success" return PlainTextResponse(response) routes = [ Route("/", endpoint=homepage, name="homepage", methods=["GET"]), Route("/200", endpoint=success, name="200", methods=["GET"]), Route("/201", endpoint=create, name="201", methods=["POST"]), Route("/500", endpoint=error, name="500", methods=["GET"]), Route("/stream", endpoint=stream, name="stream", methods=["GET"]), Route("/file", endpoint=file, name="file", methods=["GET"]), Route("/users/{userid:int}", endpoint=success, name="path_params", methods=["GET"]), Route("/users/{userid:int}/info", endpoint=success, name="multi_path_params", methods=["GET"]), Route("/users/{userid:int}/{attribute:str}", endpoint=success, name="multi_path_params", methods=["GET"]), Route("/notes", endpoint=list_notes, methods=["GET"]), Route("/notes", endpoint=add_note, methods=["POST"]), ] app = Starlette(routes=routes, on_startup=[database.connect], on_shutdown=[database.disconnect]) return app
def gethosts(): database = databases.Database('zhihu_info') database.connectdb() sql = 'SELECT * FROM ip_proxys WHERE status = 1 ORDER BY RAND() LIMIT 10;' database.executedb(sql) hosts = [] host_infos = database.value if host_infos: for item in host_infos: id = item[0] protocol = item[1] ip = item[2] port = item[3] status = item[4] if ipm.is_validity(protocol, ip, port)==True: hosts.append({protocol : protocol + "://" + ip + ":" + port}) else: sql = "UPDATE ip_proxys SET status = 0 WHERE id = %s;" %(id) database.executedb(sql) else: print("提取ip失败...") database.closedb() if len(hosts) > 1: return hosts else: gethosts()
async def search_downloads(resource_name, discord_user_id=None): database = databases.Database(global_config.databases.tf2maps_site) await database.connect() results = [] if discord_user_id: query = "SELECT user_id FROM xf_user_connected_account WHERE provider = :field_id AND provider_key = :field_value" values = {"field_id": "th_cap_discord", "field_value": discord_user_id} result = await database.fetch_one(query=query, values=values) if not result: raise ForumUserNotFoundException forum_user_id = result[0] query = 'SELECT title,resource_id from xf_rm_resource where user_id=:field_user_id AND title LIKE :field_title' values = {"field_user_id": forum_user_id, "field_title": f"%{resource_name}%"} results = await database.fetch_all(query=query, values=values) else: query = 'SELECT title,resource_id from xf_rm_resource where title LIKE :field_title ORDER BY resource_id DESC' values = {"field_title": f"%{resource_name}%"} results = await database.fetch_all(query=query, values=values) links = [] for name, map_id in results: name = re.sub("[^A-z0-9_]", "-", name) name = re.sub("-+$", "", name) name = re.sub("-{2,}", "-", name) name = name.lower() links.append(f"https://tf2maps.net/downloads/{name}.{map_id}/") return links
def __new__(cls, *args, **kwargs): if not cls.__instance: cls.__instance = super().__new__(cls) cls.database_url = kwargs['database_url'] cls.database = databases.Database(cls.database_url) return cls.__instance
def __init__(self, database_url): self.conn = db.Database(database_url, min_size=5, max_size=20) self.config_cache = {} self.infractions_cache = {} self.last_infraction_id_cache = {} self.role_persist_cache = {} self.user_history_cache = {}
async def upgrade(self, ctx): database = databases.Database(global_config.databases.tf2maps_site) await database.connect() query = "SELECT user_id FROM xf_user_connected_account WHERE provider_key = :field_value" values = {"field_value": ctx.author.id} result = await database.fetch_one(query=query, values=values) if not result: await ctx.reply( f"{error} You don't seem to have a Discord User ID # set in your TF2Maps.net profile.\nSee here on how to get started: http://bot.tf2maps.net/faq.php'" ) return query = "SELECT secondary_group_ids FROM xf_user WHERE user_id = :user_id AND find_in_set(:vip_gid, secondary_group_ids)" values = {"user_id": result[0], "vip_gid": 19} result = await database.fetch_one(query=query, values=values) if not result: await ctx.reply( f"{error} You must be a VIP user on TF2Maps.net for the discord VIP Role." ) return vip_role = discord.utils.get(ctx.guild.roles, name="VIP") if vip_role in ctx.author.roles: await ctx.send(f"{warning} You're already a VIP, go away.") return await ctx.author.add_roles(vip_role) await ctx.reply( f"{info} You are now a :star2: TF2Maps Discord VIP :star2: ")
async def update_metadata(db_string, generation, update_dict): db = databases.Database(db_string) await db.connect() md_table = get_table(db_string, 'metadata') async with db.transaction(): # print(db_string, generation) md = await db.fetch_one(md_table.select(md_table.c.generation == generation, for_update=True)) if md is None: await db.disconnect() return False metadata = md['data'] # https://stackoverflow.com/questions/3232943/update-value-of-a-nested-dictionary-of-varying-depth import collections.abc def update(d, u): for k, v in u.items(): if isinstance(v, collections.abc.Mapping): d[k] = update(d.get(k, {}), v) else: d[k] = v return d metadata = update(metadata, update_dict) await db.execute(md_table.update().where(md_table.c.generation == generation).values(data=metadata)) await db.disconnect() return True
async def get_db(): db = databases.Database(uri) await db.connect() try: yield db finally: await db.disconnect()
async def dump_data(data, db_string, table): db = databases.Database(db_string) await db.connect() query = table.insert() async with db.transaction(): await db.execute_many(query, data) await db.disconnect()
async def db_conn() -> AsyncGenerator[databases.Database, None]: """Async db connection.""" db = databases.Database(TEST_DB) await db.connect() yield db if db.is_connected: await db.disconnect()
async def dump_from_db_to_json(database='science', output_dir='./temp/'): if not os.path.exists(output_dir): os.makedirs(output_dir) from database import host, user, port, password DATABASE_URL = f'mysql://{user}{f":{password}" if password else ""}@{host}{f":{port}" if port else ""}/{database}' database_ = databases.Database(DATABASE_URL) await database_.connect() try: tables = [ t[f'Tables_in_{database}'] for t in await database_.fetch_all('SHOW TABLES') ] for table in tables: res = await database_.fetch_all(f'SELECT * FROM {table}') data = [{ k: v if not isinstance(v, datetime) else v.strftime('%Y-%m-%d %H:%M:%S') for k, v in d.items() } for d in res] with open(os.path.join(output_dir, f'{table}.json'), 'w', encoding='utf-8') as f: json.dump(data, f, ensure_ascii=False, indent='\t') finally: await database_.disconnect()
def __init__(self, database_url: str): self._connection = databases.Database(database_url) metadata = sqlalchemy.MetaData() real_property_table = sqlalchemy.Table( "properties", metadata, sqlalchemy.Column("id", sqlalchemy.String, primary_key=True), sqlalchemy.Column("geocode_geo", Geography(geometry_type='POINT', srid=4326), nullable=True), sqlalchemy.Column("parcel_geo", Geography(geometry_type='POLYGON', srid=4326), nullable=True), sqlalchemy.Column("building_geo", Geography(geometry_type='POLYGON', srid=4326), nullable=True), sqlalchemy.Column("image_bounds", postgresql.ARRAY(postgresql.DOUBLE_PRECISION), nullable=True), sqlalchemy.Column("image_url", sqlalchemy.String, nullable=True), ) self._real_property_queries = RealPropertyQueries( self._connection, real_property_table) self._real_property_commands = RealPropertyCommands( self._connection, real_property_table)
def __init__( self, sqlite_db_path: Path, ) -> None: self._sqlite_db_path = sqlite_db_path self._database_url = f'sqlite:///{sqlite_db_path}' self.db = databases.Database(self._database_url)
async def connect_to_postgres(): logging.info("Connecting to database") db.database = databases.Database(DATABASE_URL, min_size=MIN_CONNECTIONS_COUNT, max_size=MAX_CONNECTIONS_COUNT) await db.database.connect() logging.info("Connected to database")
async def create_test_user(): import databases db = databases.Database(CONFIG.DATABASES.DEFAULT) await db.connect() from fastapi_contrib.auth import crud, schemas user = schemas.UserCreate(email='*****@*****.**', password='******') await crud.user.create(db, obj_in=user) await db.disconnect()
def setup(self, db_url: str): self.db_url = db_url self.engine = sqlalchemy.create_engine( db_url, connect_args={"check_same_thread": False}) # Create the DB schema, if necessary Base.metadata.create_all(self.engine) # Setup databases self.database = databases.Database(db_url)
async def __aenter__(self): import traceback try: db = databases.Database(DB_URL.replace('+pymysql', '')) await db.connect() except: traceback.print_exc() self.db = db return db
def __init__(self, url: str, drop_all: bool = False): self.metadata = sqlalchemy.MetaData() self.database = databases.Database(url) self.engine = sqlalchemy.create_engine(url) self.engine.execute("PRAGMA journal_mode=WAL") # auto_vacuum=FULL self.init_table_model() if drop_all: self.metadata.drop_all(self.engine, checkfirst=True) self.metadata.create_all(self.engine, checkfirst=True)
def __init__(self): self.database = databases.Database(self.dburl) self.engine = sqlalchemy.create_engine(self.dburl) class Meta: database = self.database metadata = self.metadata self.meta = Meta
def main(): urls = [ 'https://www.kuaidaili.com/free/inha/', 'http://www.xicidaili.com/nn/' ] #kuaidlSoup(urls[0],1,1) database = databases.Database('zhihu_info') database.connectdb() xicidlSoup(urls[1], 1, 1, database) database.closedb()
async def connect_db(app: FastAPI) -> None: """Connect pg.""" database = databases.Database( conf.postgres.POSTGRES_URI, min_size=conf.postgres.POSTGRES_POOL_MIN, max_size=conf.postgres.POSTGRES_POOL_MAX, ) await database.connect() app.state.db = database
def __init__(self) -> None: if DB.__instance == None: LOGGER.info("Creating Database instance") self.database = databases.Database(os.environ.get("DB_URL")) DB.__instance = self else: LOGGER.error("Explcit call to Database constructor") raise Exception("Singleton class. Use get_instance method")
async def _write_all_json_to_database(): DATABASE_URL = f'mysql://root@localhost/science3' database = databases.Database(DATABASE_URL) engine = sqlalchemy.create_engine(DATABASE_URL) metadata.create_all(engine) await database.connect() with open('data/topic.json', 'r', encoding='utf-8') as f: topic_data = json.load(f) await database.execute_many(topics.insert(), topic_data) topicname2id = {} for t in topic_data: if t['topic'] not in topicname2id: topicname2id[t['topic']] = [t['id']] else: topicname2id[t['topic']].append(t['id']) with open('data/strategy.json', 'r', encoding='utf-8') as f: strategy_data = json.load(f) await database.execute_many(strategies.insert(), strategy_data) temp = set() with open('data/news.json', 'r', encoding='utf-8') as f: line = f.readline().strip() while line: data = json.loads(line) for topic_id in topicname2id[data['topic']]: await database.execute( news.insert(), { 'topic_id': topic_id, 'publish_time': datetime.strptime(data['publish_time'], '%Y-%m-%d %H:%M:%S'), 'title': data['title'], 'url': data['url'], 'site': data['site'], 'source': data['source'], 'content': data['content'], 'strategy_id': json.loads(data['strategy_id']), 'predict_strategy_id': [], }) for sid in json.loads(data['strategy_id']): temp.add(sid) line = f.readline().strip() await database.disconnect()
def _setup_database(): if database_exists(DATABASE_URL): drop_database(DATABASE_URL) create_database(DATABASE_URL) database = databases.Database(DATABASE_URL) engine = create_engine(DATABASE_URL, connect_args={"check_same_thread": False}) return engine, database
def __init__(self): """Initialize.""" config = Config('./.env') self.debug = config('DEBUG', cast=bool, default=False) self.database = databases.Database( config('DATABASE_URL', cast=databases.DatabaseURL)) self.image_path = config('IMAGE_PATH', cast=pathlib.Path) self.emulate = config('EMULATE', cast=str, default=None) self.cache = aiocache.Cache(aiocache.Cache.MEMORY) self.metadata = {}
async def connect_to_sdb(): try: db = databases.Database(sdb_conn, **(sdb_options or {})) await db.connect() except Exception: self.log.exception("Failed to connect to the state DB at %s", sdb_conn) raise GracefulExit() from None self.log.info("Connected to the server state DB on %s", sdb_conn) self.sdb = db
def _set_database(cls, *, metadata: MetaData, models: List[ModelMetaclass], app_name: str, environment: Environment): repository_directory = cls._get_repository_directory(app_name=app_name, environment=environment) database_file = os.path.join(repository_directory, cls._DATABASE_FILE) database = databases.Database("sqlite:///" + database_file) for model in models: model.__database__ = database engine = sqlalchemy.create_engine(str(database.url), connect_args={'timeout': 6000000}) metadata.create_all(engine, checkfirst=True)