Exemplo n.º 1
0
def db(ctx, database_url, echo):
    """Interact with an SQL storage backend
        Typical usage would be reading blocks in JSON format from STDIN
        and then storing those blocks in the database:

        \b
            sbds | db insert-blocks

        In the example above, the "sbds" command streams new blocks to STDOUT, which are piped to STDIN of
        the "db insert-blocks" command by default. The "database_url" was read from the "DATABASE_URL"
        ENV var, though it may optionally be provided on the command line:

        \b
        db --database_url 'dialect[+driver]://user:password@host/dbname[?key=value..]' tests

    """

    engine_config = configure_engine(database_url, echo=echo)

    ctx.obj = dict(database_url=engine_config.database_url,
                   url=engine_config.url,
                   engine_kwargs=engine_config.engine_kwargs,
                   engine=engine_config.engine,
                   base=Base,
                   metadata=Base.metadata,
                   Session=Session)
Exemplo n.º 2
0
def block_adder_process_worker(database_url,
                               rpc_url,
                               block_nums,
                               max_threads=5):
    try:
        engine_config = configure_engine(database_url)
        session = Session(bind=engine_config.engine)
        raw_blocks = block_fetcher_thread_worker(
            rpc_url, block_nums, max_threads=max_threads)
        for raw_blocks_chunk in chunkify(raw_blocks, 1000):
            # pylint: disable=unused-variable
            # we could do something here with results, like retry failures
            results = bulk_add(raw_blocks_chunk, session)

    except Exception as e:
        logger.exception(e)
    finally:
        Session.close_all()
Exemplo n.º 3
0
def get_db_plugin(database_url):
    engine_config = configure_engine(database_url)
    Session.configure(bind=engine_config.engine)

    # pylint: disable=undefined-variable
    return sqlalchemy.Plugin(
        # SQLAlchemy engine created with create_engine function.
        engine_config.engine,

        # SQLAlchemy metadata, required only if create=True.
        Base.metadata,

        # Keyword used to inject session database in a route (default 'db').
        keyword='db',

        # If it is true, execute `metadata.create_all(engine)` when plugin is applied (default False).
        create=True,

        # If it is true, plugin commit changes after route is executed (default True).
        commit=False,
        # If it is true and keyword is not defined, plugin uses **kwargs argument to inject session database (default False).
        use_kwargs=False,
        create_session=Session)
Exemplo n.º 4
0
from sbds.storages.db.tables import TxEscrowApprove
from sbds.storages.db.tables import TxEscrowDispute
from sbds.storages.db.tables import TxEscrowRelease
from sbds.storages.db.tables import TxEscrowTransfer
from sbds.storages.db.tables import TxFeedPublish
from sbds.storages.db.tables import TxLimitOrderCancel
from sbds.storages.db.tables import TxLimitOrderCreate
from sbds.storages.db.tables import TxPow
from sbds.storages.db.tables import TxPow2
from sbds.storages.db.tables import TxRecoverAccount
from sbds.storages.db.tables import TxRequestAccountRecovery
from sbds.storages.db.tables import TxWithdrawVestingRoute
from sbds.storages.db.tables import TxTransfer
from sbds.storages.db.tables import TxTransferFromSavings
from sbds.storages.db.tables import TxTransferToSavings
from sbds.storages.db.tables import TxTransferToVesting
from sbds.storages.db.tables import TxVote
from sbds.storages.db.tables import TxWithdrawVesting
from sbds.storages.db.tables import TxWitnessUpdate

from sbds.storages.db.utils import configure_engine
from sbds.http_client import SimpleSteemAPIClient

db_url = os.environ['DATABASE_URL']
rpc_url = os.environ['STEEMD_HTTP_URL']

engine_config = configure_engine(db_url)
engine = engine_config.engine
session = Session(bind=engine)
client = SimpleSteemAPIClient(url=rpc_url)
Exemplo n.º 5
0
def sqlitedb_engine_config(sqlite_db_url=None):
    sqlite_db_url = sqlite_db_url or 'sqlite://'
    sqlite_engine_config = configure_engine(sqlite_db_url)
Exemplo n.º 6
0
def sqlitedb_session(sqlite_db_url=None):
    sqlite_db_url = sqlite_db_url or 'sqlite://'
    engine_config = configure_engine(sqlite_db_url)
    session = Session(bind=engine_config.engine)
    return session
Exemplo n.º 7
0
from sbds.storages.db import Base, Session
from sbds.storages.db.tables.core import Block
from sbds.storages.db.tables.tx import tx_class_map
from sbds.storages.db.utils import configure_engine

MAX_DB_ROW_RESULTS = 100000
DB_QUERY_LIMIT = MAX_DB_ROW_RESULTS + 1

logger = sbds.logging.getLogger(__name__, level=logging.DEBUG)

rpc_url = os.environ.get('STEEMD_HTTP_URL', 'https://steemd.steemitdev.com')
database_url = os.environ.get('DATABASE_URL', 'sqlite:///')

rpc = SimpleSteemAPIClient(rpc_url)

database_url, url, engine_kwargs, engine = configure_engine(database_url,
                                                            echo=True)
Session.configure(bind=engine)

app = bottle.Bottle()
# pylint: disable=undefined-variable
plugin = sqlalchemy.Plugin(
    engine,  # SQLAlchemy engine created with create_engine function.
    Base.metadata,  # SQLAlchemy metadata, required only if create=True.
    keyword='db',
    # Keyword used to inject session database in a route (default 'db').
    create=True,
    # If it is true, execute `metadata.create_all(engine)` when plugin is applied (default False).
    commit=False,
    # If it is true, plugin commit changes after route is executed (default True).
    use_kwargs=False,
    # If it is true and keyword is not defined, plugin uses **kwargs argument to inject session database (default False).
Exemplo n.º 8
0
def _populate(database_url, steemd_http_url, steemd_websocket_url, max_procs,
              max_threads):
    # pylint: disable=too-many-locals, too-many-statements
    rpc = SimpleSteemAPIClient(steemd_http_url)
    engine_config = configure_engine(database_url)

    db_name = engine_config.url.database
    db_user_name = engine_config.url.username

    Session.configure(bind=engine_config.engine)
    session = Session()

    # [1/7] confirm db connectivity
    task_message = fmt_task_message(
        'Confirm database connectivity',
        emoji_code_point=u'\U0001F4DE',
        counter=1)
    click.echo(task_message)

    url, table_count = test_connection(database_url)
    if url:
        success_msg = fmt_success_message(
            'connected to %s and found %s tables', url.__repr__(), table_count)
        click.echo(success_msg)

    if not url:
        raise Exception('Unable to connect to database')
    del url
    del table_count

    # [2/7] kill existing db threads
    task_message = fmt_task_message(
        'Killing active db threads', emoji_code_point='\U0001F4A5', counter=2)
    click.echo(task_message)
    all_procs, killed_procs = kill_db_processes(database_url, db_name,
                                                db_user_name)
    if len(killed_procs) > 0:
        success_msg = fmt_success_message('killed %s processes',
                                          len(killed_procs))
        click.echo(success_msg)
    del all_procs
    del killed_procs

    # [3/7] init db if required
    task_message = fmt_task_message(
        'Initialising db if required',
        emoji_code_point=u'\U0001F50C',
        counter=3)
    click.echo(task_message)
    init_tables(database_url, Base.metadata)

    # [4/7] find last irreversible block
    last_chain_block = rpc.last_irreversible_block_num()
    task_message = fmt_task_message(
        'Finding highest blockchain block',
        emoji_code_point='\U0001F50E',
        counter=4)
    click.echo(task_message)

    success_msg = fmt_success_message(
        'learned highest irreversible block is %s', last_chain_block)
    click.echo(success_msg)

    # [5/7] get missing block_nums
    task_message = fmt_task_message(
        'Finding blocks missing from db',
        emoji_code_point=u'\U0001F52D',
        counter=5)
    click.echo(task_message)
    missing_block_nums_gen = Block.get_missing_block_num_iterator(
        session, last_chain_block, chunksize=100000)

    with click.progressbar(
            missing_block_nums_gen,
            label='Finding missing block_nums',
            color=True,
            show_eta=False,
            show_percent=False,
            empty_char='░',
            fill_char='█',
            show_pos=True,
            bar_template='%(bar)s  %(info)s') as pbar:
        all_missing_block_nums = []
        for missing_gen in pbar:
            all_missing_block_nums.extend(missing_gen())

    success_msg = fmt_success_message('found %s missing blocks',
                                      len(all_missing_block_nums))
    click.echo(success_msg)
    del missing_block_nums_gen
    del pbar
    session.invalidate()

    # [6/7] adding missing blocks
    task_message = fmt_task_message(
        'Adding missing blocks to db, this may take a while',
        emoji_code_point=u'\U0001F4DD',
        counter=6)
    click.echo(task_message)

    max_workers = max_procs or os.cpu_count() or 1

    chunksize = len(all_missing_block_nums) // max_workers
    if chunksize <= 0:
        chunksize = 1

    map_func = partial(
        block_adder_process_worker,
        database_url,
        steemd_http_url,
        max_threads=max_threads)

    chunks = chunkify(all_missing_block_nums, 10000)

    with concurrent.futures.ProcessPoolExecutor(
            max_workers=max_workers) as executor:
        executor.map(map_func, chunks , chunksize=1)

    success_msg = fmt_success_message('added missing blocks')
    click.echo(success_msg)
    del all_missing_block_nums

    # [7/7] stream blocks
    task_message = fmt_task_message(
        'Streaming blocks', emoji_code_point=u'\U0001F4DD', counter=7)
    click.echo(task_message)

    highest_db_block = Block.highest_block(session)
    ws_rpc = SteemNodeRPC(steemd_websocket_url)
    blocks = ws_rpc.block_stream(highest_db_block)
    add_blocks(blocks, session)