Beispiel #1
0
def main():
    start_time = datetime.now()
    #block_id, block_hash, depth = (0, '\0' * 32, 0)
    #block_id, block_hash, depth = (257339, binascii.unhexlify('610ebdcb90530fe6f5c7911479ce8b257738f048390adb5d102cf91200000000'), 12054)

    db_session = db.Session()

    res = db.engine.execute('update block set prev_block_id = p.id from block p where block.prev_block_hash = p.block_hash and block.prev_block_id is null')
    print '%r prev_block_id set' % (res.rowcount,)
    db_session.commit()

    db_session.query(db.Block).filter(db.Block.prev_block_hash == 32 * '\x00').update(values={'depth': 0})

    ChildBlock = aliased(db.Block, name='child_block')
    ParentBlock = aliased(db.Block, name='parent_block')
    prev = None
    found = True
    while found:
        found = False
        for block_id, block_hash, depth in (
            db_session.query(
                ParentBlock.id, ParentBlock.block_hash, ParentBlock.depth
            ).join(
                ChildBlock, ChildBlock.prev_block_id == ParentBlock.id
            ).filter(
                (ChildBlock.depth.is_(None)) & (ParentBlock.depth.isnot(None))
            )
        ):
            if prev == block_id:
                break
            prev = block_id
            found = True
            while True:
                depth += 1
                res = db_session.query(db.Block.id, db.Block.block_hash).filter(db.Block.prev_block_id == block_id).first()
                if res is None:
                    break
                #prev_block_id = block_id
                block_id, block_hash = res
                db_session.query(db.Block).filter(db.Block.id == block_id).update(values={'depth': depth})  # , 'prev_block_id': prev_block_id})
                #if depth % 20 == 0:
                #    db_session.commit()
                db_session.commit()
                print('%7i %7i %s' % (depth, block_id, binascii.hexlify(block_hash)))
            #db_session.commit()

    # the above worked the last time I tried it but previously I needed the below to update some blocks
    while True:
        res = db.engine.execute('update block set depth = p.depth + 1 from block p where block.prev_block_id = p.id and block.depth is null and p.depth is not null returning block.id, block.depth, block.block_hash')
        if res.rowcount == 0:
            break
        blk_id, depth, blk_hash = res.first()
        print('%i %7i %7i %s' % (res.rowcount, blk_id, depth, binascii.hexlify(blk_hash)))
        db_session.commit()
Beispiel #2
0
 def __init__(self):
     self.db_session = db.Session()
     self.queue = multiprocessing.Queue()
     self.num_processed = multiprocessing.Value('i', 0)
     self.total_to_process = self.db_session.query(db.TxIn.id).filter(
         db.TxIn.txout_id.is_(None)
         &
         (db.TxIn.previous_output_transaction_hash != 32 * '\x00')).count()
     self.total_blocks = self.total_to_process / BLOCK
     self.blocks_processed = multiprocessing.Value('i', 0)
     self.shutdown_event = multiprocessing.Event()
     self.queued_blocks = 0
     self.start_time = datetime.now()
Beispiel #3
0
def upgrade():
    ### commands auto generated by Alembic - please adjust! ###
    op.create_table('txin_txout',
                    sa.Column('txin_id', sa.Integer(), nullable=False),
                    sa.Column('txout_id', sa.Integer(), nullable=False),
                    sa.ForeignKeyConstraint(
                        ['txin_id'],
                        ['txin.id'],
                    ), sa.ForeignKeyConstraint(
                        ['txout_id'],
                        ['txout.id'],
                    ), sa.PrimaryKeyConstraint('txin_id', 'txout_id'))

    # INSERT INTO txin_txout (txin_id, txout_id) SELECT id, txout_id FROM txin WHERE txout_id IS NOT NULL
    conn = op.get_bind()
    session = db.Session(bind=conn)
    num_to_insert = session.query(sa.func.count(db.TxIn.id)).filter(
        db.TxIn.txout_id.isnot(None)).scalar()
    print('Going to insert %u records into txin_txout' % (num_to_insert, ))
    start_time = datetime.now()
    num_inserted = 0
    for chunk in chunked(session.query(db.TxIn.id, db.TxIn.txout_id).filter(
            db.TxIn.txout_id.isnot(None)).yield_per(CHUNK_SIZE),
                         chunk_size=CHUNK_SIZE):
        query_start = datetime.now()
        conn.execute(db.TxIn_TxOut.__table__.insert().values(chunk))
        query_end = datetime.now()
        num_inserted += len(chunk)
        tot_time = query_end - start_time
        avg_time = tot_time / num_inserted
        print(
            '%u / %u %.3f%% done, %u inserted, %s for query, %s total, %s avg, ~%s remaining'
            % (num_inserted, num_to_insert, num_inserted * 100.0 /
               num_to_insert, len(chunk), query_end - query_start, tot_time,
               avg_time, avg_time * (num_to_insert - num_inserted)))

    # Create indexes after inserting data
    op.create_index(op.f('ix_txin_txout_txin_id'),
                    'txin_txout', ['txin_id'],
                    unique=False)
    op.create_index('ix_txin_txout_txin_id_txout_id',
                    'txin_txout', ['txin_id', 'txout_id'],
                    unique=True)
    op.create_index(op.f('ix_txin_txout_txout_id'),
                    'txin_txout', ['txout_id'],
                    unique=False)
Beispiel #4
0
 def run(self):
     self.db_session = db.Session()
     try:
         while not self.db_write_thread_stop_event.is_set():
             try:
                 if self.block_queue.empty():
                     time.sleep(1)
                 else:
                     try:
                         blktmpfilename = self.block_queue.get(timeout=1)
                     except Queue.Empty:
                         continue
                     log.info('Reading block file %s', blktmpfilename)
                     try:
                         with open(blktmpfilename, 'rb') as blktmpfile:
                             data = blktmpfile.read()
                     except IOError:
                         log.exception('IOError reading blockfile %s',
                                       blktmpfilename)
                         continue
                     (msg, _) = protocol.Message.parse(data)
                     assert not _, _
                     self.write_block_to_db(msg)
                     os.remove(blktmpfilename)
             except Exception:
                 log.exception(
                     'Exception in db_write_loop, creating a new session')
                 self.block_queue.put(blktmpfilename)
                 try:
                     db.reconnect()
                 except Exception:
                     log.exception('Exception reconnecting, ignoring')
                 log.info('Reconnected')
             except:
                 self.block_queue.put(blktmpfilename)
                 raise
     except:
         log.exception('exception in db_write_loop')
         raise
     finally:
         log.info('db_write_loop shutting down IOLoop')
         self.ioloop.shutdown()
         log.info('db_write_loop finished')
 def __init__(self):
     self.db_session = db.Session()
     #STATEFILE = 'txout_id'
     #if not os.path.exists(STATEFILE):
     #    with open(STATEFILE, 'w') as f:
     #        f.write(str(self.db_session.query(sql_functions.min(db.TxOut.id)).scalar()))
     #with open(STATEFILE, 'r') as f:
     #    self.min_id = int(f.read())
     #self.min_id = 97465110 - BLOCK * 100
     #self.min_id = self.db_session.query(sql_functions.min(db.TxOut.id)).scalar()
     self.queue = multiprocessing.Queue()
     self.num_processed = multiprocessing.Value('i', 0)
     self.total_to_process = self.db_session.query(db.TxOut.id).filter(
         # db.TxOut.id >= self.min_id
         db.TxOut.spent.is_(None) | db.TxOut.spent.is_(False)).count()
     self.total_blocks = self.total_to_process / BLOCK
     self.blocks_processed = multiprocessing.Value('i', 0)
     self.shutdown_event = multiprocessing.Event()
     self.queued_blocks = 0
     self.start_time = datetime.now()
Beispiel #6
0
    def __init__(self, read_blocktmp_files=True):
        super(IOLoop, self).__init__()
        self.sock = None
        self.out_queue = Queue.Queue()
        self.waiting_for = {}
        self.stored = {}
        self.max_height = multiprocessing.Value(ctypes.c_ulong, 0)

        self.db_session = db.Session()

        self.db_write_loop = DBWriteLoop(self)

        max_height = self.db_session.query(sql_functions.max(
            db.Block.depth)).scalar()
        if max_height is not None:
            self.max_height.value = max_height
        self.known_blocks = set(
            block.block_hash
            for block in self.db_session.query(db.Block.block_hash).all())
        self._prev_block_hashes = set()
        if read_blocktmp_files:
            for blktmpfilename in glob.glob('blocktmp/*.rawblk'):
                self.db_write_loop.queue_block(blktmpfilename)
                log.info('Reading blockfile %s', blktmpfilename)
                try:
                    with open(blktmpfilename, 'rb') as blktmpfile:
                        data = blktmpfile.read()
                except IOError:
                    log.exception('IOError reading blockfile %s',
                                  blktmpfilename)
                    continue
                (msg, _) = protocol.Message.parse(data)
                assert not _, _
                self.known_blocks.add(msg.block_hash)
                self._prev_block_hashes.add(msg.prev_block_hash)

        self.num_blocks = multiprocessing.Value(ctypes.c_ulonglong,
                                                len(self.known_blocks))
        log.info('Block database starting with %r blocks',
                 self.num_blocks.value)

        self.process_queue = Queue.Queue()

        self.process_thread = None
        self.write_thread = None
        self.read_thread = None

        self.shutdown_event = multiprocessing.Event()
        self._internal_shutdown_event = threading.Event()

        self.message_timeout = MESSAGE_TIMEOUT
        self.ping_timing = SECONDS_BETWEEN_PINGS
        self.last_ping = None
        self.last_pong = None
        self.last_message = None

        self.remote_addr = ('10.0.42.253', 8333)
        self.remote_addr = ('127.0.0.1', 8333)
        local_addr = [
            addrs for i, addrs in ((i, [
                addr for addr in addrs[netifaces.AF_INET] if 'peer' not in addr
            ]) for i, addrs in ((i, netifaces.ifaddresses(i))
                                for i in netifaces.interfaces())
                                   if netifaces.AF_INET in addrs) if addrs
        ][0][0]
        self.local_addr = local_addr['addr']
        self.local_port = 8334