Ejemplo n.º 1
0
 def __init__(self, db_path, rpc_server, mint_addr):
     Thread.__init__(self)
     self.db_path = db_path
     started = False
     logger.info('transactions db worker starting')
     while not started:
         try:
             start_rpc_client_instance(rpc_server, mint_addr)
             started = True
         except:
             sleep(10)
Ejemplo n.º 2
0
 def __init__(self, config):
     Thread.__init__(self)
     self.url = "{DB_DIALECT}+{DB_DRIVER}://{DB_USERNAME}:{DB_PASSWORD}@{DB_HOST}:{DB_PORT}/{DB_NAME}".format(**config)
     logger.info('sqlalchemy.url: {}'.format(self.url))
     self.db_backup_path = config['DB_BACKUP_PATH']
     running = False
     while not running:
         try:
             start_rpc_client_instance(config['RPC_SERVER'], config['MINT_ACCOUNT'])
             running = True
         except:
             sleep(10)
Ejemplo n.º 3
0

########
# Main #
########
if __name__ == '__main__':
    with open('config.json', 'r') as f:
        config = json.load(f)

    try:
        config = config[os.getenv("BROWSER")]
    except:
        config = config["PRODUCTION"]

    app.logger.info("system configuration: {}".format(
        json.dumps(config, indent=4)))

    TxDBWorker(config['DB_PATH'], config['RPC_SERVER'],
               config['MINT_ACCOUNT']).start()

    start_rpc_client_instance(config['RPC_SERVER'], config['MINT_ACCOUNT'])

    start_client_instance(config['CLIENT_PATH'], config['ACCOUNT_FILE'])

    sleep(1)

    app.run(port=config['FLASK_PORT'],
            threaded=config['FLASK_THREADED'],
            host=config['FLASK_HOST'],
            debug=config['FLASK_DEBUG'])
Ejemplo n.º 4
0
def tx_db_worker(db_path, rpc_server, mint_addr):
    while True:
        try:
            logger.info('transactions db worker starting')

            # create rpc connection
            try:
                start_rpc_client_instance(rpc_server, mint_addr)
            except:
                sleep(10)
                start_rpc_client_instance(rpc_server, mint_addr)

            # connect to DB
            c, conn = connect_to_db(db_path)  # returns cursor object
            init_db(c)

            # get latest version in the db
            cur_ver = get_latest_version(c)
            cur_ver += 1  # TODO: later handle genesis
            logger.info('starting update at version {}'.format(cur_ver))

            # start the main loop
            while True:
                try:
                    bver = get_latest_version_from_ledger()
                except:
                    sleep(1)
                    continue
                if cur_ver > bver:
                    sleep(1)
                    continue

                # batch update
                num = min(1000, bver - cur_ver)  # at most 5000 records at once
                tx_data = get_raw_tx_lst(cur_ver, num)

                # read records
                res = parse_raw_tx_lst(*tx_data)
                if len(res) == 0:
                    sleep(5)
                    continue

                # do the insertion
                db_data = [tuple(x.values()) for x in res]
                c.executemany(
                    "INSERT INTO transactions VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?);",
                    db_data)

                # update counter to the latest version we inserted
                cur_ver = res[-1]['version']
                logger.debug('update to version: {} - success'.format(cur_ver))

                # Save (commit) the changes
                conn.commit()

                # update latest version to next
                cur_ver += 1

                # sleep relative to amount of rows fetched so we don't get a 429 error
                sleep(0.001 * num)

        except:
            logger.exception('Major error in tx_db_worker')
            sleep(2)
            logger.info('restarting tx_db_worker')