Exemple #1
0
    def run(self):
        global engine
        while True:
            logger.info('transactions db worker starting')
            engine = create_engine(self.url)
            metadata.create_all(engine)
            try:
                # get latest version in the db
                cur_ver = get_latest_version()
                cur_ver += 1  # TODO: later handle genesis
                logger.info('starting update at version {}'.format(cur_ver))
                # start the main loop
                while True:
                    try:
                        bver = get_latest_version_from_ledger()
                    except:
                        sleep(1)
                        continue
                    if cur_ver > bver:
                        if cur_ver > bver + 50: # for safety due to typical blockchain behavior
                            sleep(1)
                            continue
                        file_path = '{}_{}.gz'.format(self.db_backup_path, strftime('%Y%m%d%H%M%S'))
                        logger.info('saving database to {}'.format(file_path))
                        with gzip.open(file_path, 'wb') as f:
                            f.write(dumps(engine.execute(select([txs])).fetchall()))
                        metadata.drop_all(engine)
                        metadata.create_all(engine)
                        break

                    # batch update
                    num = min(1000, bver - cur_ver)  # at most 5000 records at once
                    tx_data = get_raw_tx_lst(cur_ver, num)

                    # read records
                    res = parse_raw_tx_lst(*tx_data)
                    if not res:
                        sleep(5)
                        continue

                    # do the insertion
                    engine.execute(txs.insert(), res)

                    # update counter to the latest version we inserted
                    cur_ver = res[-1]['version']
                    logger.debug('update to version: {} - success'.format(cur_ver))

                    # update latest version to next
                    cur_ver += 1

                    # sleep relative to amount of rows fetched so we don't get a 429 error
                    sleep(0.001 * num)

            except:
                logger.exception('Major error in tx_db_worker')
                sleep(2)
    def run(self):
        while True:
            try:
                # get latest version in the db
                cur_ver = get_latest_version()
                cur_ver += 1  # TODO: later handle genesis
                logger.info('starting update at version {}'.format(cur_ver))
                # start the main loop
                while True:
                    try:
                        bver = get_latest_version_from_ledger()
                    except:
                        sleep(1)
                        continue
                    if cur_ver > bver:
                        sleep(1)
                        continue

                    # batch update
                    num = min(1000, bver - cur_ver)  # at most 5000 records at once
                    tx_data = get_raw_tx_lst(cur_ver, num)

                    # read records
                    res = parse_raw_tx_lst(*tx_data)
                    if len(res) == 0:
                        sleep(5)
                        continue

                    # do the insertion
                    engine.execute(txs.insert(), res)

                    # update counter to the latest version we inserted
                    cur_ver = res[-1]['version']
                    logger.debug('update to version: {} - success'.format(cur_ver))

                    # update latest version to next
                    cur_ver += 1

                    # sleep relative to amount of rows fetched so we don't get a 429 error
                    sleep(0.001 * num)

            except:
                logger.exception('Major error in tx_db_worker')
                sleep(2)
                logger.info('restarting tx_db_worker')
    def run(self):
        while True:
            logger.info('transactions db worker starting')
            engine = create_engine(self.url)
            Session.configure(bind=engine)
            Base.metadata.create_all(engine)

            # get latest version in the db
            with session_scope() as session:
                cur_ver = session.query(func.max(Transaction.version)).scalar()
            cur_ver = (cur_ver +
                       1) if cur_ver else 1  # TODO: later handle genesis

            try:
                logger.info('starting update at version {}'.format(cur_ver))
                # start the main loop
                while True:
                    try:
                        bver = get_latest_version_from_ledger()
                    except:
                        sleep(1)
                        continue
                    if cur_ver > bver + 50:
                        # +50 for safety due to chance we're not in sync with latest blockchain ver
                        file_path = '{}_{}.gz'.format(self.db_backup_path,
                                                      strftime('%Y%m%d%H%M%S'))
                        logger.info('saving database to {}'.format(file_path))
                        with gzip.open(file_path,
                                       'wb') as f, session_scope() as session:
                            f.write(dumps(session.query(Transaction)))
                        Base.metadata.drop_all(engine)
                        Base.metadata.create_all(engine)
                        break
                    elif cur_ver > bver:
                        sleep(1)
                        continue

                    # batch update
                    num = min(1000,
                              bver - cur_ver)  # at most 5000 records at once
                    tx_data = get_raw_tx_lst(cur_ver, num)

                    # read records
                    res = parse_raw_tx_lst(*tx_data)
                    if not res:
                        sleep(5)
                        continue

                    # do the insertion
                    with session_scope() as session:
                        session.add_all(Transaction(**v) for v in res)
                    # update counter to the latest version we inserted
                    cur_ver = res[-1]['version']
                    logger.debug(
                        'update to version: {} - success'.format(cur_ver))

                    # update latest version to next
                    cur_ver = cur_ver + 1

                    # sleep relative to amount of rows fetched so we don't get a 429 error
                    sleep(0.001 * num)
                    self.running = True

            except:
                logger.exception('Major error in tx_db_worker')
                sleep(2)
Exemple #4
0
def tx_db_worker(db_path, rpc_server, mint_addr):
    while True:
        try:
            logger.info('transactions db worker starting')

            # create rpc connection
            try:
                start_rpc_client_instance(rpc_server, mint_addr)
            except:
                sleep(10)
                start_rpc_client_instance(rpc_server, mint_addr)

            # connect to DB
            c, conn = connect_to_db(db_path)  # returns cursor object
            init_db(c)

            # get latest version in the db
            cur_ver = get_latest_version(c)
            cur_ver += 1  # TODO: later handle genesis
            logger.info('starting update at version {}'.format(cur_ver))

            # start the main loop
            while True:
                try:
                    bver = get_latest_version_from_ledger()
                except:
                    sleep(1)
                    continue
                if cur_ver > bver:
                    sleep(1)
                    continue

                # batch update
                num = min(1000, bver - cur_ver)  # at most 5000 records at once
                tx_data = get_raw_tx_lst(cur_ver, num)

                # read records
                res = parse_raw_tx_lst(*tx_data)
                if len(res) == 0:
                    sleep(5)
                    continue

                # do the insertion
                db_data = [tuple(x.values()) for x in res]
                c.executemany(
                    "INSERT INTO transactions VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?);",
                    db_data)

                # update counter to the latest version we inserted
                cur_ver = res[-1]['version']
                logger.debug('update to version: {} - success'.format(cur_ver))

                # Save (commit) the changes
                conn.commit()

                # update latest version to next
                cur_ver += 1

                # sleep relative to amount of rows fetched so we don't get a 429 error
                sleep(0.001 * num)

        except:
            logger.exception('Major error in tx_db_worker')
            sleep(2)
            logger.info('restarting tx_db_worker')