def last_block(ctx): """Return the highest block stored in the database""" engine = ctx.obj['engine'] database_url = ctx.obj['database_url'] metadata = ctx.obj['metadata'] # init tables first init_tables(database_url, metadata) # configure session Session.configure(bind=engine) session = Session() click.echo(Block.highest_block(session))
def insert_blocks(ctx, blocks): """Insert blocks into the database""" engine = ctx.obj['engine'] database_url = ctx.obj['database_url'] metadata = ctx.obj['metadata'] # init tables first init_tables(database_url, metadata) # configure session Session.configure(bind=engine) session = Session() add_blocks( blocks, session, insert=True, merge_insert=False, insert_many=False)
def task_find_missing_block_nums(database_url, last_chain_block, task_num=4): task_message = fmt_task_message( 'Finding blocks missing from db', emoji_code_point=u'\U0001F52D', task_num=task_num) click.echo(task_message) with isolated_engine(database_url) as engine: session = Session(bind=engine) missing_block_nums_gen = Block.get_missing_block_num_iterator( session, last_chain_block, chunksize=1000000) with click.progressbar( missing_block_nums_gen, label='Finding missing block_nums', **progress_bar_kwargs) as pbar: all_missing_block_nums = [] for missing_gen in pbar: all_missing_block_nums.extend(missing_gen()) success_msg = fmt_success_message('found %s missing blocks', len(all_missing_block_nums)) click.echo(success_msg) engine.dispose() return all_missing_block_nums
def find_missing_blocks(ctx, url): """Return JSON array of block_nums from missing blocks""" from sbds.storages.db.tables import Block engine = ctx.obj['engine'] database_url = ctx.obj['database_url'] metadata = ctx.obj['metadata'] rpc = SimpleSteemAPIClient(url) # init tables first init_tables(database_url, metadata) # configure session Session.configure(bind=engine) session = Session() last_chain_block = rpc.last_irreversible_block_num() click.echo( json.dumps( Block.find_missing(session, last_chain_block=last_chain_block)))
def block_adder_process_worker(database_url, rpc_url, block_nums, max_threads=5): with isolated_engine(database_url) as engine: session = Session(bind=engine) raw_blocks = block_fetcher_thread_worker( rpc_url, block_nums, max_threads=max_threads) for raw_blocks_chunk in chunkify(raw_blocks, 1000): # pylint: disable=unused-variable # we could do something here with results, like retry failures results = bulk_add(raw_blocks_chunk, session)
def get_db_plugin(database_url): engine_config = configure_engine(database_url) Session.configure(bind=engine_config.engine) # pylint: disable=undefined-variable return sqlalchemy.Plugin( # SQLAlchemy engine created with create_engine function. engine_config.engine, # SQLAlchemy metadata, required only if create=True. Base.metadata, # Keyword used to inject session database in a route (default 'db'). keyword='db', # If it is true, execute `metadata.create_all(engine)` when plugin is applied (default False). create=True, # If it is true, plugin commit changes after route is executed (default True). commit=False, # If it is true and keyword is not defined, plugin uses **kwargs argument to inject session database (default False). use_kwargs=False, create_session=Session)
def task_stream_blocks(database_url, steemd_http_url, task_num=6): task_message = fmt_task_message( 'Streaming blocks', emoji_code_point=u'\U0001F4DD', task_num=task_num) click.echo(task_message) with isolated_engine(database_url, pool_recycle=3600) as engine: session = Session(bind=engine) highest_db_block = Block.highest_block(session) rpc = SimpleSteemAPIClient(steemd_http_url) blocks = rpc.stream(highest_db_block) blocks_to_add = [] for block in blocks: try: blocks_to_add.append(block) add_blocks(blocks_to_add, session, insert=True) except Exception as e: logger.exception('failed to add block') else: blocks_to_add = []
def bulk_add_blocks(ctx, blocks, chunksize): """Insert many blocks in the database""" engine = ctx.obj['engine'] database_url = ctx.obj['database_url'] metadata = ctx.obj['metadata'] # init tables first init_tables(database_url, metadata) # configure session Session.configure(bind=engine) session = Session() click.echo("SQL: 'SET SESSION innodb_lock_wait_timeout=150'", err=True) session.execute('SET SESSION innodb_lock_wait_timeout=150') try: for chunk in chunkify(blocks, chunksize): bulk_add(chunk, session) except Exception as e: raise e finally: session.close_all()
from sbds.storages.db.tables import TxEscrowApprove from sbds.storages.db.tables import TxEscrowDispute from sbds.storages.db.tables import TxEscrowRelease from sbds.storages.db.tables import TxEscrowTransfer from sbds.storages.db.tables import TxFeedPublish from sbds.storages.db.tables import TxLimitOrderCancel from sbds.storages.db.tables import TxLimitOrderCreate from sbds.storages.db.tables import TxPow from sbds.storages.db.tables import TxPow2 from sbds.storages.db.tables import TxRecoverAccount from sbds.storages.db.tables import TxRequestAccountRecovery from sbds.storages.db.tables import TxWithdrawVestingRoute from sbds.storages.db.tables import TxTransfer from sbds.storages.db.tables import TxTransferFromSavings from sbds.storages.db.tables import TxTransferToSavings from sbds.storages.db.tables import TxTransferToVesting from sbds.storages.db.tables import TxVote from sbds.storages.db.tables import TxWithdrawVesting from sbds.storages.db.tables import TxWitnessUpdate from sbds.storages.db.utils import configure_engine from sbds.http_client import SimpleSteemAPIClient db_url = os.environ['DATABASE_URL'] rpc_url = os.environ['STEEMD_HTTP_URL'] engine_config = configure_engine(db_url) engine = engine_config.engine session = Session(bind=engine) client = SimpleSteemAPIClient(url=rpc_url)
def sqlitedb_session(sqlite_db_url=None): sqlite_db_url = sqlite_db_url or 'sqlite://' engine_config = configure_engine(sqlite_db_url) session = Session(bind=engine_config.engine) return session