async def make_data(): async with create_engine(get_db_url()) as e: async with e.acquire() as conn: for i in range(10): parent_id = 0 for j in range(10): res = await conn.execute( post_table.insert().values(parent_id=parent_id)) row = await res.first() parent_id = row.id
async def test_pubsub(): async with create_pool(get_db_url()) as e: async with e.acquire() as listen_conn: listener = listen_helper(listen_conn) db = Database() await db.startup() await asyncio.gather( listener, db.insert_post(parent_id=290, content='testing notify')) print("listen/notify done!")
def __init__(self, app, db_url=None, channel_prefix="pubsub_", loop=None): super().__init__(app, loop=loop) self.db_url = db_url or get_db_url() self.engine = None self.channel_prefix = channel_prefix self.subs = defaultdict(set) self.to_listen = asyncio.Queue() self.to_unlisten = asyncio.Queue() self._conn = None self._lock = asyncio.Lock() self._futures = []
def run_migrations_online(): """Run migrations in 'online' mode. In this scenario we need to create an Engine and associate a connection with the context. """ # NOTE: overridden so we can have database credentials in one place # connectable = engine_from_config( # config.get_section(config.config_ini_section), # prefix='sqlalchemy.', # poolclass=pool.NullPool) connectable = create_engine(get_db_url()) with connectable.connect() as connection: context.configure(connection=connection, target_metadata=target_metadata) with context.begin_transaction(): context.run_migrations()
def run_migrations_offline(): """Run migrations in 'offline' mode. This configures the context with just a URL and not an Engine, though an Engine is acceptable here as well. By skipping the Engine creation we don't even need a DBAPI to be available. Calls to context.execute() here emit the given string to the script output. """ # NOTE: overridden so we can have database credentials in one place # url = config.get_main_option("sqlalchemy.url") url = get_db_url() context.configure(url=url, target_metadata=target_metadata, literal_binds=True) with context.begin_transaction(): context.run_migrations()
def main(): parser = argparse.ArgumentParser() parser.add_argument('-lnk', '--linkage_only', required=False, action='store_true', help='Extract linkage data only') args = parser.parse_args() url = utils.get_db_url(db_host=DB_HOST, db_name=DB_NAME, db_user=DB_USER, db_pass=DB_PASS) print("url: {}".format(url)) conn = db.create_engine(url) if args.linkage_only: if not utils.ask_yes_no("Extract linkage data to: {}".format(OUT_LNK)): sys.exit('Got it.') df_linkage = read_linkage(conn) print("Writing [{} lines to: {} ".format(len(df_linkage), OUT_LNK)) df_linkage.to_csv(OUT_LNK, sep=OUT_SEP, index=False) sys.exit() if not utils.ask_yes_no("Extract raw_patid_to_uuid maps for UFH and FLM?"): sys.exit('Got it.') print("Reading UFH...") df_ufh = read_ufh(conn) print("Writing [{}] lines to: {}".format(len(df_ufh), OUT_UFH)) df_ufh.to_csv(OUT_UFH, sep=OUT_SEP, index=False) print("Reading FLM...") df_flm = read_flm(conn) print("Writing [{} lines to: {} ".format(len(df_flm), OUT_FLM)) df_flm.to_csv(OUT_FLM, sep=OUT_SEP, index=False)
def main(): parser = argparse.ArgumentParser() parser.add_argument('-lnk', '--linkage_only', required=False, action='store_true', help='Extract linkage data only') parser.add_argument( '-p', '--partners', required=False, help='the name of the partner(s) for which we extract the data.' + ' Valid partners: {}'.format(VALID_PARTNERS)) parser.add_argument('-s', '--separator', required=False, default='\t', help='record separator') args = parser.parse_args() url = utils.get_db_url(db_host=DB_HOST, db_name=DB_NAME, db_user=DB_USER, db_pass=DB_PASS) print("Database connection url: {}".format(url)) conn = db.create_engine(url) try: os.makedirs(OUT_DIR, exist_ok=True) print("Created output folder: {}".format(OUT_DIR)) except Exception as exc: sys.exit("Unable to create folder [{}] due: ".format(OUT_DIR, exc)) if args.linkage_only: out_file = os.path.join(OUT_DIR, OUT_LNK) if not utils.ask_yes_no( "Extract linkage data to: {} ?".format(out_file)): sys.exit('Got it.') df_linkage = read_linkage(conn) print("Writing [{}] lines to: {} using separator [{}]".format( len(df_linkage), out_file, args.separator)) df_linkage.to_csv(out_file, sep=args.separator, index=False) sys.exit() if args.partners is not None and 'None' != args.partners: partners = args.partners.split(",") else: partners = [] if not partners: parser.print_help(sys.stderr) sys.exit(1) if not utils.ask_yes_no( "Extract raw_patid_to_uuid map for: {}?".format(partners)): sys.exit('Got it.') for partner in partners: if partner not in VALID_PARTNERS: print("Skip invalid partner: {}".format(partner)) continue print("{}: Reading RAW_PATID -> UUID mapping...".format(partner)) df = read_partner(conn, partner) out_file = os.path.join(OUT_DIR, "{}{}".format(partner, OUT_SUFFIX)) print( "{}: Writing [{}] output lines to {} using separator [{}]".format( partner, len(df), out_file, args.separator)) df.to_csv(out_file, sep=args.separator, index=False) print("All done!")
async def test_subtree(): async with create_engine(get_db_url()) as e: async with e.acquire() as conn: res = await conn.execute( select(['*']).select_from(func.subtree(0, 5))) pprint([dict(row) async for row in res])
def __init__(self, app, db_url=None, loop=None): super().__init__(app, loop=loop) self.db_url = db_url or get_db_url() self.engine = None