def Header(self): if not self.__header: self.__header = Header(self.PrevHash, self.MerkleRoot, self.Timestamp, self.Index, self.ConsensusData, self.NextConsensus, self.Script) return self.__header
def GetHeader(self, hash): try: out = bytearray(self._db.get(DBPrefix.DATA_Block + hash)) out = out[8:] outhex = binascii.unhexlify(out) return Header.FromTrimmedData(outhex, 0) except TypeError as e2: pass except Exception as e: logger.info("OTHER ERRROR %s " % e) return None
def Header(self): """ Get the block header. Returns: neo.Core.Header: """ if not self.__header: self.__header = Header(self.PrevHash, self.MerkleRoot, self.Timestamp, self.Index, self.ConsensusData, self.NextConsensus, self.Script) return self.__header
def GetHeader(self, hash): if isinstance(hash, UInt256): hash = hash.ToString().encode() try: out = bytearray(self._db.get(DBPrefix.DATA_Block + hash)) out = out[8:] outhex = binascii.unhexlify(out) return Header.FromTrimmedData(outhex, 0) except TypeError as e2: pass except Exception as e: logger.info("OTHER ERRROR %s " % e) return None
def __init__(self, path): super(LevelDBBlockchain, self).__init__() self._path = path self.__log.debug('Initialized LEVELDB') self._header_index = [] self._header_index.append( Blockchain.GenesisBlock().Header().HashToByteString()) try: self._db = plyvel.DB(self._path, create_if_missing=True) except Exception as e: print( "leveldb unavailable, you may already be running this process: %s " % e) raise Exception('Leveldb Unavailable') version = self._db.get_property(SYS_Version) self.__log.debug("current version %s " % version) self._current_block_height = self._db.get(SYS_CurrentBlock, 0) current_header_height = self._db.get(SYS_CurrentHeader, self._current_block_height) self.__log.debug("current header height, hashes %s %s " % (self._current_block_height, self._header_index)) hashes = [] for key, value in self._db.iterator(start=IX_HeaderHashList): hashes.append({'index': key, 'hash': value}) sorted(hashes, key=lambda i: i['index']) for h in hashes: if not h['hash'] == Blockchain.GenesisBlock().Hash(): self._header_index.append(h['hash']) self._stored_header_count += 1 if self._stored_header_count == 0: headers = [] for key, value in self._db.iterator(start=DATA_Block): headers.append(Header.FromTrimmedData(bytearray(value)[4:], 0)) sorted(headers, key=lambda h: h.Index) for h in headers: self._header_index.append(h.HashToByteString())
def GetHeader(self, hash): if self._do_cache: #lock header cache if hash in self._header_cache: return self._header_cache[hash] #end lock header cache self.__log.debug("get header from db not implementet yet") try: out = bytearray(self._db.get(DATA_Block + hash))[4:] outhex = binascii.unhexlify(out) self.__log.debug("heade hash from db out %s " % outhex) return Header.FromTrimmedData(outhex, 0) except Exception as e: print("could not retrive header hash %s" % e) return None
def __init__(self, path, skip_version_check=False): super(LevelDBBlockchain, self).__init__() self._path = path self._header_index = [] self._header_index.append(Blockchain.GenesisBlock().Header.Hash.ToBytes()) self.TXProcessed = 0 print(TXProcessed) print("==========begin===========") print(Blockchain.GenesisBlock()) try: self._db = plyvel.DB(self._path, create_if_missing=True) # self._db = plyvel.DB(self._path, create_if_missing=True, bloom_filter_bits=16, compression=None) logger.info("Created Blockchain DB at %s " % self._path) except Exception as e: logger.info("leveldb unavailable, you may already be running this process: %s " % e) raise Exception('Leveldb Unavailable') version = self._db.get(DBPrefix.SYS_Version) print(version) if skip_version_check: self._db.put(DBPrefix.SYS_Version, self._sysversion) version = self._sysversion if version == self._sysversion: # or in the future, if version doesn't equal the current version... ba = bytearray(self._db.get(DBPrefix.SYS_CurrentBlock, 0)) self._current_block_height = int.from_bytes(ba[-4:], 'little') print("Block height") print(self._current_block_height) ba = bytearray(self._db.get(DBPrefix.SYS_CurrentHeader, 0)) current_header_height = int.from_bytes(ba[-4:], 'little') current_header_hash = bytes(ba[:64].decode('utf-8'), encoding='utf-8') # logger.info("current header hash!! %s " % current_header_hash) # logger.info("current header height, hashes %s %s %s" %(self._current_block_height, self._header_index, current_header_height) ) hashes = [] try: for key, value in self._db.iterator(prefix=DBPrefix.IX_HeaderHashList): ms = StreamManager.GetStream(value) reader = BinaryReader(ms) hlist = reader.Read2000256List() key = int.from_bytes(key[-4:], 'little') hashes.append({'k': key, 'v': hlist}) StreamManager.ReleaseStream(ms) # hashes.append({'index':int.from_bytes(key, 'little'), 'hash':value}) except Exception as e: logger.info("Could not get stored header hash list: %s " % e) print("hashes") print(len(hashes)) print(hashes) if len(hashes): hashes.sort(key=lambda x: x['k']) genstr = Blockchain.GenesisBlock().Hash.ToBytes() for hlist in hashes: for hash in hlist['v']: if hash != genstr: self._header_index.append(hash) self._stored_header_count += 1 if self._stored_header_count == 0: headers = [] for key, value in self._db.iterator(prefix=DBPrefix.DATA_Block): dbhash = bytearray(value)[8:] headers.append(Header.FromTrimmedData(binascii.unhexlify(dbhash), 0)) headers.sort(key=lambda h: h.Index) for h in headers: if h.Index > 0: self._header_index.append(h.Hash.ToBytes()) elif current_header_height > self._stored_header_count: try: hash = current_header_hash targethash = self._header_index[-1] newhashes = [] while hash != targethash: header = self.GetHeader(hash) newhashes.insert(0, header) hash = header.PrevHash.ToBytes() self.AddHeaders(newhashes) except Exception as e: pass elif version is None: self.Persist(Blockchain.GenesisBlock()) self._db.put(DBPrefix.SYS_Version, self._sysversion) else: logger.error("\n\n") logger.warning("Database schema has changed from %s to %s.\n" % (version, self._sysversion)) logger.warning("You must either resync from scratch, or use the np-bootstrap command to bootstrap the chain.") res = prompt("Type 'continue' to erase your current database and sync from new. Otherwise this program will exit:\n> ") if res == 'continue': with self._db.write_batch() as wb: for key, value in self._db.iterator(): wb.delete(key) self.Persist(Blockchain.GenesisBlock()) self._db.put(DBPrefix.SYS_Version, self._sysversion) else: raise Exception("Database schema changed")
def __init__(self, path): super(LevelDBBlockchain, self).__init__() self._path = path self._header_index = [] self._header_index.append( Blockchain.GenesisBlock().Header.Hash.ToBytes()) try: self._db = plyvel.DB(self._path, create_if_missing=True) # self._db = plyvel.DB(self._path, create_if_missing=True, bloom_filter_bits=16, compression=None) except Exception as e: logger.info( "leveldb unavailable, you may already be running this process: %s " % e) raise Exception('Leveldb Unavailable') version = self._db.get(DBPrefix.SYS_Version) if version == self._sysversion: # or in the future, if version doesn't equal the current version... ba = bytearray(self._db.get(DBPrefix.SYS_CurrentBlock, 0)) self._current_block_height = int.from_bytes(ba[-4:], 'little') ba = bytearray(self._db.get(DBPrefix.SYS_CurrentHeader, 0)) current_header_height = int.from_bytes(ba[-4:], 'little') current_header_hash = bytes(ba[:64].decode('utf-8'), encoding='utf-8') # logger.info("current header hash!! %s " % current_header_hash) # logger.info("current header height, hashes %s %s %s" %(self._current_block_height, self._header_index, current_header_height) ) hashes = [] try: for key, value in self._db.iterator( prefix=DBPrefix.IX_HeaderHashList): ms = StreamManager.GetStream(value) reader = BinaryReader(ms) hlist = reader.Read2000256List() key = int.from_bytes(key[-4:], 'little') hashes.append({'k': key, 'v': hlist}) StreamManager.ReleaseStream(ms) # hashes.append({'index':int.from_bytes(key, 'little'), 'hash':value}) except Exception as e: logger.info("Could not get stored header hash list: %s " % e) if len(hashes): hashes.sort(key=lambda x: x['k']) genstr = Blockchain.GenesisBlock().Hash.ToBytes() for hlist in hashes: for hash in hlist['v']: if hash != genstr: self._header_index.append(hash) self._stored_header_count += 1 if self._stored_header_count == 0: headers = [] for key, value in self._db.iterator( prefix=DBPrefix.DATA_Block): dbhash = bytearray(value)[8:] headers.append( Header.FromTrimmedData(binascii.unhexlify(dbhash), 0)) headers.sort(key=lambda h: h.Index) for h in headers: if h.Index > 0: self._header_index.append(h.Hash.ToBytes()) elif current_header_height > self._stored_header_count: try: hash = current_header_hash targethash = self._header_index[-1] newhashes = [] while hash != targethash: header = self.GetHeader(hash) newhashes.insert(0, header) hash = header.PrevHash.ToBytes() self.AddHeaders(newhashes) except Exception as e: pass else: with self._db.write_batch() as wb: for key, value in self._db.iterator(): wb.delete(key) self.Persist(Blockchain.GenesisBlock()) self._db.put(DBPrefix.SYS_Version, self._sysversion)
def __init__(self, path): super(RocksDBBlockchain, self).__init__() self._path = path self._header_index = [] self._header_index.append( Blockchain.GenesisBlock().Header.Hash.ToBytes()) logger.info("start to create database") logger.info("db path is: ", self._path) try: opts = rocksdb.Options() opts.create_if_missing = True opts.prefix_extractor = StaticPrefix() # self._db = plyvel.DB(self._path, create_if_missing=True) self._db = rocksdb.DB(self._path, opts) # self._db = plyvel.DB(self._path, create_if_missing=True, bloom_filter_bits=16, compression=None) logger.info("rocksdb is created successfully") except Exception as e: logger.info( "RocksDB unavailable, you may already be running this process: %s " % e) raise Exception('RocksDB Unavailable') version = self._db.get(DBPrefix.SYS_Version) logger.info("database is created successfully, version is: ", version) logger.info("database is created successfully, self_sysversion is: ", self._sysversion) if version == self._sysversion: # or in the future, if version doesn't equal the current version... ba = bytearray(self._db.get(DBPrefix.SYS_CurrentBlock, 0)) logger.debug("current block", ba) logger.debug("current block", ba[-4:]) self._current_block_height = int.from_bytes(ba[-4:], 'little') logger.debug("current block height: ", self._current_block_height) ba = bytearray(self._db.get(DBPrefix.SYS_CurrentHeader, 0)) logger.debug("current header", ba) current_header_height = int.from_bytes(ba[-4:], 'little') current_header_hash = bytes(ba[:64].decode('utf-8'), encoding='utf-8') # logger.info("current header hash!! %s " % current_header_hash) # logger.info("current header height, hashes %s %s %s" %(self._current_block_height, self._header_index, current_header_height) ) hashes = [] try: it = self._db.iteritems() it.seek(DBPrefix.IX_HeaderHashList) # print ( dict(itertools.takewhile(lambda item: item[0].startswith(DBPrefix.IX_HeaderHashList), it))) # for key in self._db.iterkeys().seek(prefix=DBPrefix.IX_HeaderHashList): for key, value in dict( itertools.takewhile( lambda item: item[0].startswith( DBPrefix.IX_HeaderHashList), it)).items(): ms = StreamManager.GetStream(value) reader = BinaryReader(ms) hlist = reader.Read2000256List() key = int.from_bytes(key[-4:], 'little') hashes.append({'k': key, 'v': hlist}) StreamManager.ReleaseStream(ms) # hashes.append({'index':int.from_bytes(key, 'little'), 'hash':value}) except Exception as e: logger.info("Couldnt get stored header hash list: %s " % e) if len(hashes): hashes.sort(key=lambda x: x['k']) genstr = Blockchain.GenesisBlock().Hash.ToBytes() for hlist in hashes: for hash in hlist['v']: if hash != genstr: self._header_index.append(hash) self._stored_header_count += 1 if self._stored_header_count == 0: headers = [] # for key, value in self._db.iterator(prefix=DBPrefix.DATA_Block): it = self._db.iteritems() # print ("it before seek ", it) logger.debug("seek key prefix: ", DBPrefix.DATA_Block) it.seek(DBPrefix.DATA_Block) # print ("it after seek ", it) # print ( dict(itertools.takewhile(lambda item: item[0].startswith(DBPrefix.DATA_Block), it))) for key, value in dict( itertools.takewhile( lambda item: item[0].startswith(DBPrefix.DATA_Block ), it)).items(): # print("key is ", key) # print("value is ", value) dbhash = bytearray(value)[8:] # print (bytearray(value)) # print ("dbhash is ", dbhash) headers.append( Header.FromTrimmedData(binascii.unhexlify(dbhash), 0)) # print ("header is ", headers) headers.sort(key=lambda h: h.Index) for h in headers: if h.Index > 0: self._header_index.append(h.Hash.ToBytes()) elif current_header_height > self._stored_header_count: try: hash = current_header_hash targethash = self._header_index[-1] newhashes = [] while hash != targethash: header = self.GetHeader(hash) newhashes.insert(0, header) hash = header.PrevHash.ToBytes() self.AddHeaders(newhashes) except Exception as e: pass else: # with self._db.write_batch() as wb: wb = rocksdb.WriteBatch() for key in self._db.iterkeys(): wb.delete(key) self._db.write(wb) self.Persist(Blockchain.GenesisBlock()) self._db.put(DBPrefix.SYS_Version, self._sysversion)
def __init__(self, db, skip_version_check=False, skip_header_check=False): self._db = db self._header_index = [] self._header_index.append( Blockchain.GenesisBlock().Header.Hash.ToBytes()) self.TXProcessed = 0 version = self._db.get(DBPrefix.SYS_Version) if skip_version_check: self._db.write(DBPrefix.SYS_Version, self._sysversion) version = self._sysversion if version == self._sysversion: # or in the future, if version doesn't equal the current version... ba = bytearray(self._db.get(DBPrefix.SYS_CurrentBlock, 0)) self._current_block_height = int.from_bytes(ba[-4:], 'little') if not skip_header_check: ba = bytearray(self._db.get(DBPrefix.SYS_CurrentHeader, 0)) current_header_height = int.from_bytes(ba[-4:], 'little') current_header_hash = bytes(ba[:64].decode('utf-8'), encoding='utf-8') hashes = [] try: with self._db.openIter( DBProperties(DBPrefix.IX_HeaderHashList)) as it: for key, value in it: ms = StreamManager.GetStream(value) reader = BinaryReader(ms) hlist = reader.Read2000256List() key = int.from_bytes(key[-4:], 'little') hashes.append({'k': key, 'v': hlist}) StreamManager.ReleaseStream(ms) except Exception as e: logger.info("Could not get stored header hash list: %s " % e) if len(hashes): hashes.sort(key=lambda x: x['k']) genstr = Blockchain.GenesisBlock().Hash.ToBytes() for hlist in hashes: for hash in hlist['v']: if hash != genstr: self._header_index.append(hash) self._stored_header_count += 1 if self._stored_header_count == 0: logger.info( "Current stored headers empty, re-creating from stored blocks..." ) headers = [] logger.info('Recreate headers') with self._db.openIter(DBProperties( DBPrefix.DATA_Block)) as it: for key, value in it: dbhash = bytearray(value)[8:] headers.append( Header.FromTrimmedData( binascii.unhexlify(dbhash), 0)) headers.sort(key=lambda h: h.Index) for h in headers: if h.Index > 0: self._header_index.append(h.Hash.ToBytes()) if len(headers): self.OnAddHeader(headers[-1]) elif current_header_height > self._stored_header_count: try: hash = current_header_hash targethash = self._header_index[-1] newhashes = [] while hash != targethash: header = self.GetHeader(hash) newhashes.insert(0, header) hash = header.PrevHash.ToBytes() self.AddHeaders(newhashes) except Exception as e: pass elif version is None: wait_for(self.Persist(Blockchain.GenesisBlock())) self._db.write(DBPrefix.SYS_Version, self._sysversion) else: logger.error("\n\n") logger.warning("Database schema has changed from %s to %s.\n" % (version, self._sysversion)) logger.warning( "You must either resync from scratch, or use the np-bootstrap command to bootstrap the chain." ) res = prompt( "Type 'continue' to erase your current database and sync from new. Otherwise this program will exit:\n> " ) if res == 'continue': with self._db.getBatch() as wb: with self._db.openIter( DBProperties(include_value=False)) as it: for key in it: wb.delete(key) wait_for(self.Persist(Blockchain.GenesisBlock())) self._db.write(DBPrefix.SYS_Version, self._sysversion) else: raise Exception("Database schema changed")